diff --git a/.github/workflows/test_branches.yml b/.github/workflows/test_branches.yml index 55f903a37f9..75db5d66431 100644 --- a/.github/workflows/test_branches.yml +++ b/.github/workflows/test_branches.yml @@ -40,7 +40,8 @@ jobs: python-version: '3.10' - name: Black Formatting Check run: | - pip install black + # Note v24.4.1 fails due to a bug in the parser + pip install 'black!=24.4.1' black . -S -C --check --diff --exclude examples/pyomobook/python-ch/BadIndent.py - name: Spell Check uses: crate-ci/typos@master @@ -92,7 +93,7 @@ jobs: skip_doctest: 1 TARGET: linux PYENV: conda - PACKAGES: mpi4py + PACKAGES: openmpi mpi4py - os: ubuntu-latest python: '3.10' diff --git a/.github/workflows/test_pr_and_main.yml b/.github/workflows/test_pr_and_main.yml index 76ec6de951a..5a484dccbc8 100644 --- a/.github/workflows/test_pr_and_main.yml +++ b/.github/workflows/test_pr_and_main.yml @@ -7,6 +7,11 @@ on: pull_request: branches: - main + types: + - opened + - reopened + - synchronize + - ready_for_review workflow_dispatch: inputs: git-ref: @@ -34,6 +39,8 @@ jobs: lint: name: lint/style-and-typos runs-on: ubuntu-latest + if: | + contains(github.event.pull_request.title, '[WIP]') != true && !github.event.pull_request.draft steps: - name: Checkout Pyomo source uses: actions/checkout@v4 @@ -43,7 +50,8 @@ jobs: python-version: '3.10' - name: Black Formatting Check run: | - pip install black + # Note v24.4.1 fails due to a bug in the parser + pip install 'black!=24.4.1' black . -S -C --check --diff --exclude examples/pyomobook/python-ch/BadIndent.py - name: Spell Check uses: crate-ci/typos@master @@ -93,7 +101,7 @@ jobs: skip_doctest: 1 TARGET: linux PYENV: conda - PACKAGES: mpi4py + PACKAGES: openmpi mpi4py - os: ubuntu-latest python: '3.11' @@ -733,7 +741,7 @@ jobs: cover: name: process-coverage-${{ matrix.TARGET }} needs: build - if: always() # run even if a build job fails + if: success() || failure() # run even if a build job fails, but not if cancelled runs-on: ${{ matrix.os }} timeout-minutes: 10 strategy: diff --git a/.github/workflows/typos.toml b/.github/workflows/typos.toml index 23f94fc8afd..4d69cde34e1 100644 --- a/.github/workflows/typos.toml +++ b/.github/workflows/typos.toml @@ -40,4 +40,28 @@ WRONLY = "WRONLY" Hax = "Hax" # Big Sur Sur = "Sur" +# Ignore the shorthand ans for answer +ans = "ans" +# Ignore the keyword arange +arange = "arange" +# Ignore IIS +IIS = "IIS" +iis = "iis" +# Ignore PN +PN = "PN" +# Ignore hd +hd = "hd" +# Ignore opf +opf = "opf" +# Ignore FRE +FRE = "FRE" +# Ignore MCH +MCH = "MCH" +# Ignore RO +ro = "ro" +RO = "RO" +# Ignore EOF - end of file +EOF = "EOF" +# Ignore lst as shorthand for list +lst = "lst" # AS NEEDED: Add More Words Below diff --git a/README.md b/README.md index 12c3ce8ed9a..707f1a06c5a 100644 --- a/README.md +++ b/README.md @@ -71,6 +71,7 @@ version, we will remove testing for that Python version. ### Tutorials and Examples +* [Pyomo — Optimization Modeling in Python](https://link.springer.com/book/10.1007/978-3-030-68928-5) * [Pyomo Workshop Slides](https://github.com/Pyomo/pyomo-tutorials/blob/main/Pyomo-Workshop-December-2023.pdf) * [Prof. Jeffrey Kantor's Pyomo Cookbook](https://jckantor.github.io/ND-Pyomo-Cookbook/) * The [companion notebooks](https://mobook.github.io/MO-book/intro.html) diff --git a/doc/OnlineDocs/bibliography.rst b/doc/OnlineDocs/bibliography.rst index 6cbb96d3bfb..c12d3f81d8c 100644 --- a/doc/OnlineDocs/bibliography.rst +++ b/doc/OnlineDocs/bibliography.rst @@ -39,6 +39,8 @@ Bibliography John D. Siirola, Jean-Paul Watson, and David L. Woodruff. Pyomo - Optimization Modeling in Python, 3rd Edition. Vol. 67. Springer, 2021. + doi: `10.1007/978-3-030-68928-5 + `_ .. [PyomoJournal] William E. Hart, Jean-Paul Watson, David L. Woodruff. "Pyomo: modeling and solving mathematical programs in diff --git a/doc/OnlineDocs/contributed_packages/pynumero/backward_compatibility.rst b/doc/OnlineDocs/contributed_packages/pynumero/backward_compatibility.rst new file mode 100644 index 00000000000..036a00bee62 --- /dev/null +++ b/doc/OnlineDocs/contributed_packages/pynumero/backward_compatibility.rst @@ -0,0 +1,14 @@ +Backward Compatibility +====================== + +While PyNumero is a third-party contribution to Pyomo, we intend to maintain +the stability of its core functionality. The core functionality of PyNumero +consists of: + +1. The ``NLP`` API and ``PyomoNLP`` implementation of this API +2. HSL and MUMPS linear solver interfaces +3. ``BlockVector`` and ``BlockMatrix`` classes +4. CyIpopt and SciPy solver interfaces + +Other parts of PyNumero, such as ``ExternalGreyBoxBlock`` and +``ImplicitFunctionSolver``, are experimental and subject to change without notice. diff --git a/doc/OnlineDocs/contributed_packages/pynumero/index.rst b/doc/OnlineDocs/contributed_packages/pynumero/index.rst index 6ff8b29f812..711bb83eb3b 100644 --- a/doc/OnlineDocs/contributed_packages/pynumero/index.rst +++ b/doc/OnlineDocs/contributed_packages/pynumero/index.rst @@ -13,6 +13,7 @@ PyNumero. For more details, see the API documentation (:ref:`pynumero_api`). installation.rst tutorial.rst api.rst + backward_compatibility.rst Developers diff --git a/doc/OnlineDocs/contributed_packages/pyros.rst b/doc/OnlineDocs/contributed_packages/pyros.rst index 76a751dd994..95049eded8a 100644 --- a/doc/OnlineDocs/contributed_packages/pyros.rst +++ b/doc/OnlineDocs/contributed_packages/pyros.rst @@ -903,10 +903,10 @@ Observe that the log contains the following information: :linenos: ============================================================================== - PyROS: The Pyomo Robust Optimization Solver, v1.2.9. - Pyomo version: 6.7.0 + PyROS: The Pyomo Robust Optimization Solver, v1.2.11. + Pyomo version: 6.7.2 Commit hash: unknown - Invoked at UTC 2023-12-16T00:00:00.000000 + Invoked at UTC 2024-03-28T00:00:00.000000 Developed by: Natalie M. Isenberg (1), Jason A. F. Sherman (1), John D. Siirola (2), Chrysanthos E. Gounaris (1) @@ -926,6 +926,7 @@ Observe that the log contains the following information: keepfiles=False tee=False load_solution=True + symbolic_solver_labels=False objective_focus= nominal_uncertain_param_vals=[0.13248000000000001, 4.97, 4.97, 1800] decision_rule_order=1 diff --git a/doc/OnlineDocs/contribution_guide.rst b/doc/OnlineDocs/contribution_guide.rst index 10670627546..b98dcc3d014 100644 --- a/doc/OnlineDocs/contribution_guide.rst +++ b/doc/OnlineDocs/contribution_guide.rst @@ -71,6 +71,10 @@ at least 70% coverage of the lines modified in the PR and prefer coverage closer to 90%. We also require that all tests pass before a PR will be merged. +.. note:: + If you are having issues getting tests to pass on your Pull Request, + please tag any of the core developers to ask for help. + The Pyomo main branch provides a Github Actions workflow (configured in the ``.github/`` directory) that will test any changes pushed to a branch with a subset of the complete test harness that includes @@ -82,13 +86,16 @@ This will enable the tests to run automatically with each push to your fork. At any point in the development cycle, a "work in progress" pull request may be opened by including '[WIP]' at the beginning of the PR -title. This allows your code changes to be tested by the full suite of -Pyomo's automatic -testing infrastructure. Any pull requests marked '[WIP]' will not be +title. Any pull requests marked '[WIP]' or draft will not be reviewed or merged by the core development team. However, any '[WIP]' pull request left open for an extended period of time without active development may be marked 'stale' and closed. +.. note:: + Draft and WIP Pull Requests will **NOT** trigger tests. This is an effort to + reduce our CI backlog. Please make use of the provided + branch test suite for evaluating / testing draft functionality. + Python Version Support ++++++++++++++++++++++ diff --git a/doc/OnlineDocs/developer_reference/solvers.rst b/doc/OnlineDocs/developer_reference/solvers.rst index 6168da3480e..94fb684236f 100644 --- a/doc/OnlineDocs/developer_reference/solvers.rst +++ b/doc/OnlineDocs/developer_reference/solvers.rst @@ -84,6 +84,37 @@ be used with other Pyomo tools / capabilities. ... 3 Declarations: x y obj +In keeping with our commitment to backwards compatibility, both the legacy and +future methods of specifying solver options are supported: + +.. testcode:: + :skipif: not ipopt_available + + import pyomo.environ as pyo + + model = pyo.ConcreteModel() + model.x = pyo.Var(initialize=1.5) + model.y = pyo.Var(initialize=1.5) + + def rosenbrock(model): + return (1.0 - model.x) ** 2 + 100.0 * (model.y - model.x**2) ** 2 + + model.obj = pyo.Objective(rule=rosenbrock, sense=pyo.minimize) + + # Backwards compatible + status = pyo.SolverFactory('ipopt_v2').solve(model, options={'max_iter' : 6}) + # Forwards compatible + status = pyo.SolverFactory('ipopt_v2').solve(model, solver_options={'max_iter' : 6}) + model.pprint() + +.. testoutput:: + :skipif: not ipopt_available + :hide: + + 2 Var Declarations + ... + 3 Declarations: x y obj + Using the new interfaces directly ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/OnlineDocs/installation.rst b/doc/OnlineDocs/installation.rst index ecba05e13fb..83cd08e7a4a 100644 --- a/doc/OnlineDocs/installation.rst +++ b/doc/OnlineDocs/installation.rst @@ -12,7 +12,7 @@ version, Pyomo will remove testing for that Python version. Using CONDA ~~~~~~~~~~~ -We recommend installation with *conda*, which is included with the +We recommend installation with ``conda``, which is included with the Anaconda distribution of Python. You can install Pyomo in your system Python installation by executing the following in a shell: @@ -21,7 +21,7 @@ Python installation by executing the following in a shell: conda install -c conda-forge pyomo Optimization solvers are not installed with Pyomo, but some open source -optimization solvers can be installed with conda as well: +optimization solvers can be installed with ``conda`` as well: :: @@ -31,7 +31,7 @@ optimization solvers can be installed with conda as well: Using PIP ~~~~~~~~~ -The standard utility for installing Python packages is *pip*. You +The standard utility for installing Python packages is ``pip``. You can install Pyomo in your system Python installation by executing the following in a shell: @@ -43,14 +43,14 @@ the following in a shell: Conditional Dependencies ~~~~~~~~~~~~~~~~~~~~~~~~ -Extensions to Pyomo, and many of the contributions in `pyomo.contrib`, +Extensions to Pyomo, and many of the contributions in ``pyomo.contrib``, often have conditional dependencies on a variety of third-party Python packages including but not limited to: matplotlib, networkx, numpy, openpyxl, pandas, pint, pymysql, pyodbc, pyro4, scipy, sympy, and xlrd. A full list of conditional dependencies can be found in Pyomo's -`setup.py` and displayed using: +``setup.py`` and displayed using: :: @@ -72,3 +72,28 @@ with the standard Anaconda installation. You can check which Python packages you have installed using the command ``conda list`` or ``pip list``. Additional Python packages may be installed as needed. + + +Installation with Cython +~~~~~~~~~~~~~~~~~~~~~~~~ + +Users can opt to install Pyomo with +`cython `_ +initialized. + +.. note:: + This can only be done via ``pip`` or from source. + +Via ``pip``: + +:: + + pip install pyomo --global-option="--with-cython" + +From source (recommended for advanced users only): + +:: + + git clone https://github.com/Pyomo/pyomo.git + cd pyomo + python setup.py install --with-cython diff --git a/doc/OnlineDocs/library_reference/common/enums.rst b/doc/OnlineDocs/library_reference/common/enums.rst new file mode 100644 index 00000000000..5ed2dbb1e80 --- /dev/null +++ b/doc/OnlineDocs/library_reference/common/enums.rst @@ -0,0 +1,7 @@ + +pyomo.common.enums +================== + +.. automodule:: pyomo.common.enums + :members: + :member-order: bysource diff --git a/doc/OnlineDocs/library_reference/common/index.rst b/doc/OnlineDocs/library_reference/common/index.rst index c9c99008250..c03436600f2 100644 --- a/doc/OnlineDocs/library_reference/common/index.rst +++ b/doc/OnlineDocs/library_reference/common/index.rst @@ -11,6 +11,7 @@ or rely on any other parts of Pyomo. config.rst dependencies.rst deprecation.rst + enums.rst errors.rst fileutils.rst formatting.rst diff --git a/doc/OnlineDocs/tutorial_examples.rst b/doc/OnlineDocs/tutorial_examples.rst index 6a40949ef90..a18f9d77d42 100644 --- a/doc/OnlineDocs/tutorial_examples.rst +++ b/doc/OnlineDocs/tutorial_examples.rst @@ -3,15 +3,18 @@ Pyomo Tutorial Examples Additional Pyomo tutorials and examples can be found at the following links: -`Pyomo Workshop Slides and Exercises -`_ +* `Pyomo — Optimization Modeling in Python + `_ ([PyomoBookIII]_) -`Prof. Jeffrey Kantor's Pyomo Cookbook -`_ +* `Pyomo Workshop Slides and Exercises + `_ -The `companion notebooks `_ -for *Hands-On Mathematical Optimization with Python* +* `Prof. Jeffrey Kantor's Pyomo Cookbook + `_ -`Pyomo Gallery `_ +* The `companion notebooks `_ + for *Hands-On Mathematical Optimization with Python* + +* `Pyomo Gallery `_ diff --git a/examples/pyomobook/pyomo-components-ch/obj_declaration.txt b/examples/pyomobook/pyomo-components-ch/obj_declaration.txt index 607586a1fb3..e4d4b02a252 100644 --- a/examples/pyomobook/pyomo-components-ch/obj_declaration.txt +++ b/examples/pyomobook/pyomo-components-ch/obj_declaration.txt @@ -55,7 +55,7 @@ Model unknown None value x[Q] + 2*x[R] -1 +minimize 6.5 Model unknown diff --git a/pyomo/common/enums.py b/pyomo/common/enums.py new file mode 100644 index 00000000000..4d969bf7a9e --- /dev/null +++ b/pyomo/common/enums.py @@ -0,0 +1,162 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +"""This module provides standard :py:class:`enum.Enum` definitions used in +Pyomo, along with additional utilities for working with custom Enums + +Utilities: + +.. autosummary:: + + ExtendedEnumType + +Standard Enums: + +.. autosummary:: + + ObjectiveSense + +""" + +import enum +import itertools +import sys + +if sys.version_info[:2] < (3, 11): + _EnumType = enum.EnumMeta +else: + _EnumType = enum.EnumType + + +class ExtendedEnumType(_EnumType): + """Metaclass for creating an :py:class:`enum.Enum` that extends another Enum + + In general, :py:class:`enum.Enum` classes are not extensible: that is, + they are frozen when defined and cannot be the base class of another + Enum. This Metaclass provides a workaround for creating a new Enum + that extends an existing enum. Members in the base Enum are all + present as members on the extended enum. + + Example + ------- + + .. testcode:: + :hide: + + import enum + from pyomo.common.enums import ExtendedEnumType + + .. testcode:: + + class ObjectiveSense(enum.IntEnum): + minimize = 1 + maximize = -1 + + class ProblemSense(enum.IntEnum, metaclass=ExtendedEnumType): + __base_enum__ = ObjectiveSense + + unknown = 0 + + .. doctest:: + + >>> list(ProblemSense) + [, , ] + >>> ProblemSense.unknown + + >>> ProblemSense.maximize + + >>> ProblemSense(0) + + >>> ProblemSense(1) + + >>> ProblemSense('unknown') + + >>> ProblemSense('maximize') + + >>> hasattr(ProblemSense, 'minimize') + True + >>> ProblemSense.minimize is ObjectiveSense.minimize + True + >>> ProblemSense.minimize in ProblemSense + True + + """ + + def __getattr__(cls, attr): + try: + return getattr(cls.__base_enum__, attr) + except: + return super().__getattr__(attr) + + def __iter__(cls): + # The members of this Enum are the base enum members joined with + # the local members + return itertools.chain(super().__iter__(), cls.__base_enum__.__iter__()) + + def __contains__(cls, member): + # This enum "contains" both its local members and the members in + # the __base_enum__ (necessary for good auto-enum[sphinx] docs) + return super().__contains__(member) or member in cls.__base_enum__ + + def __instancecheck__(cls, instance): + if cls.__subclasscheck__(type(instance)): + return True + # Also pretend that members of the extended enum are subclasses + # of the __base_enum__. This is needed to circumvent error + # checking in enum.__new__ (e.g., for `ProblemSense('minimize')`) + return cls.__base_enum__.__subclasscheck__(type(instance)) + + def _missing_(cls, value): + # Support attribute lookup by value or name + for attr in ('value', 'name'): + for member in cls: + if getattr(member, attr) == value: + return member + return None + + def __new__(metacls, cls, bases, classdict, **kwds): + # Support lookup by name - but only if the new Enum doesn't + # specify its own implementation of _missing_ + if '_missing_' not in classdict: + classdict['_missing_'] = classmethod(ExtendedEnumType._missing_) + return super().__new__(metacls, cls, bases, classdict, **kwds) + + +class ObjectiveSense(enum.IntEnum): + """Flag indicating if an objective is minimizing (1) or maximizing (-1). + + While the numeric values are arbitrary, there are parts of Pyomo + that rely on this particular choice of value. These values are also + consistent with some solvers (notably Gurobi). + + """ + + minimize = 1 + maximize = -1 + + # Overloading __str__ is needed to match the behavior of the old + # pyutilib.enum class (removed June 2020). There are spots in the + # code base that expect the string representation for items in the + # enum to not include the class name. New uses of enum shouldn't + # need to do this. + def __str__(self): + return self.name + + @classmethod + def _missing_(cls, value): + for member in cls: + if member.name == value: + return member + return None + + +minimize = ObjectiveSense.minimize +maximize = ObjectiveSense.maximize diff --git a/pyomo/common/tests/test_enums.py b/pyomo/common/tests/test_enums.py new file mode 100644 index 00000000000..80d081505e9 --- /dev/null +++ b/pyomo/common/tests/test_enums.py @@ -0,0 +1,97 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +import enum + +import pyomo.common.unittest as unittest + +from pyomo.common.enums import ExtendedEnumType, ObjectiveSense + + +class ProblemSense(enum.IntEnum, metaclass=ExtendedEnumType): + __base_enum__ = ObjectiveSense + + unknown = 0 + + +class TestExtendedEnumType(unittest.TestCase): + def test_members(self): + self.assertEqual( + list(ProblemSense), + [ProblemSense.unknown, ObjectiveSense.minimize, ObjectiveSense.maximize], + ) + + def test_isinstance(self): + self.assertIsInstance(ProblemSense.unknown, ProblemSense) + self.assertIsInstance(ProblemSense.minimize, ProblemSense) + self.assertIsInstance(ProblemSense.maximize, ProblemSense) + + self.assertTrue(ProblemSense.__instancecheck__(ProblemSense.unknown)) + self.assertTrue(ProblemSense.__instancecheck__(ProblemSense.minimize)) + self.assertTrue(ProblemSense.__instancecheck__(ProblemSense.maximize)) + + def test_getattr(self): + self.assertIs(ProblemSense.unknown, ProblemSense.unknown) + self.assertIs(ProblemSense.minimize, ObjectiveSense.minimize) + self.assertIs(ProblemSense.maximize, ObjectiveSense.maximize) + + def test_hasattr(self): + self.assertTrue(hasattr(ProblemSense, 'unknown')) + self.assertTrue(hasattr(ProblemSense, 'minimize')) + self.assertTrue(hasattr(ProblemSense, 'maximize')) + + def test_call(self): + self.assertIs(ProblemSense(0), ProblemSense.unknown) + self.assertIs(ProblemSense(1), ObjectiveSense.minimize) + self.assertIs(ProblemSense(-1), ObjectiveSense.maximize) + + self.assertIs(ProblemSense('unknown'), ProblemSense.unknown) + self.assertIs(ProblemSense('minimize'), ObjectiveSense.minimize) + self.assertIs(ProblemSense('maximize'), ObjectiveSense.maximize) + + with self.assertRaisesRegex(ValueError, "'foo' is not a valid ProblemSense"): + ProblemSense('foo') + with self.assertRaisesRegex(ValueError, "2 is not a valid ProblemSense"): + ProblemSense(2) + + def test_contains(self): + self.assertIn(ProblemSense.unknown, ProblemSense) + self.assertIn(ProblemSense.minimize, ProblemSense) + self.assertIn(ProblemSense.maximize, ProblemSense) + + self.assertNotIn(ProblemSense.unknown, ObjectiveSense) + self.assertIn(ProblemSense.minimize, ObjectiveSense) + self.assertIn(ProblemSense.maximize, ObjectiveSense) + + +class TestObjectiveSense(unittest.TestCase): + def test_members(self): + self.assertEqual( + list(ObjectiveSense), [ObjectiveSense.minimize, ObjectiveSense.maximize] + ) + + def test_hasattr(self): + self.assertTrue(hasattr(ProblemSense, 'minimize')) + self.assertTrue(hasattr(ProblemSense, 'maximize')) + + def test_call(self): + self.assertIs(ObjectiveSense(1), ObjectiveSense.minimize) + self.assertIs(ObjectiveSense(-1), ObjectiveSense.maximize) + + self.assertIs(ObjectiveSense('minimize'), ObjectiveSense.minimize) + self.assertIs(ObjectiveSense('maximize'), ObjectiveSense.maximize) + + with self.assertRaisesRegex(ValueError, "'foo' is not a valid ObjectiveSense"): + ObjectiveSense('foo') + + def test_str(self): + self.assertEqual(str(ObjectiveSense.minimize), 'minimize') + self.assertEqual(str(ObjectiveSense.maximize), 'maximize') diff --git a/pyomo/common/tests/test_timing.py b/pyomo/common/tests/test_timing.py index 48288746882..90f4cdcd034 100644 --- a/pyomo/common/tests/test_timing.py +++ b/pyomo/common/tests/test_timing.py @@ -35,7 +35,7 @@ Any, TransformationFactory, ) -from pyomo.core.base.var import _VarData +from pyomo.core.base.var import VarData class _pseudo_component(Var): @@ -62,7 +62,7 @@ def test_raw_construction_timer(self): ) v = Var() v.construct() - a = ConstructionTimer(_VarData(v)) + a = ConstructionTimer(VarData(v)) self.assertRegex( str(a), r"ConstructionTimer object for Var ScalarVar\[NOTSET\]; " diff --git a/pyomo/contrib/appsi/base.py b/pyomo/contrib/appsi/base.py index 201e5975ac9..6655ec26524 100644 --- a/pyomo/contrib/appsi/base.py +++ b/pyomo/contrib/appsi/base.py @@ -21,12 +21,12 @@ Tuple, MutableMapping, ) -from pyomo.core.base.constraint import _GeneralConstraintData, Constraint -from pyomo.core.base.sos import _SOSConstraintData, SOSConstraint -from pyomo.core.base.var import _GeneralVarData, Var -from pyomo.core.base.param import _ParamData, Param -from pyomo.core.base.block import _BlockData, Block -from pyomo.core.base.objective import _GeneralObjectiveData +from pyomo.core.base.constraint import ConstraintData, Constraint +from pyomo.core.base.sos import SOSConstraintData, SOSConstraint +from pyomo.core.base.var import VarData, Var +from pyomo.core.base.param import ParamData, Param +from pyomo.core.base.block import BlockData, Block +from pyomo.core.base.objective import ObjectiveData from pyomo.common.collections import ComponentMap from .utils.get_objective import get_objective from .utils.collect_vars_and_named_exprs import collect_vars_and_named_exprs @@ -179,9 +179,7 @@ def __init__( class SolutionLoaderBase(abc.ABC): - def load_vars( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> NoReturn: + def load_vars(self, vars_to_load: Optional[Sequence[VarData]] = None) -> NoReturn: """ Load the solution of the primal variables into the value attribute of the variables. @@ -197,8 +195,8 @@ def load_vars( @abc.abstractmethod def get_primals( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: """ Returns a ComponentMap mapping variable to var value. @@ -216,8 +214,8 @@ def get_primals( pass def get_duals( - self, cons_to_load: Optional[Sequence[_GeneralConstraintData]] = None - ) -> Dict[_GeneralConstraintData, float]: + self, cons_to_load: Optional[Sequence[ConstraintData]] = None + ) -> Dict[ConstraintData, float]: """ Returns a dictionary mapping constraint to dual value. @@ -235,8 +233,8 @@ def get_duals( raise NotImplementedError(f'{type(self)} does not support the get_duals method') def get_slacks( - self, cons_to_load: Optional[Sequence[_GeneralConstraintData]] = None - ) -> Dict[_GeneralConstraintData, float]: + self, cons_to_load: Optional[Sequence[ConstraintData]] = None + ) -> Dict[ConstraintData, float]: """ Returns a dictionary mapping constraint to slack. @@ -256,8 +254,8 @@ def get_slacks( ) def get_reduced_costs( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: """ Returns a ComponentMap mapping variable to reduced cost. @@ -303,8 +301,8 @@ def __init__( self._reduced_costs = reduced_costs def get_primals( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: if self._primals is None: raise RuntimeError( 'Solution loader does not currently have a valid solution. Please ' @@ -319,8 +317,8 @@ def get_primals( return primals def get_duals( - self, cons_to_load: Optional[Sequence[_GeneralConstraintData]] = None - ) -> Dict[_GeneralConstraintData, float]: + self, cons_to_load: Optional[Sequence[ConstraintData]] = None + ) -> Dict[ConstraintData, float]: if self._duals is None: raise RuntimeError( 'Solution loader does not currently have valid duals. Please ' @@ -336,8 +334,8 @@ def get_duals( return duals def get_slacks( - self, cons_to_load: Optional[Sequence[_GeneralConstraintData]] = None - ) -> Dict[_GeneralConstraintData, float]: + self, cons_to_load: Optional[Sequence[ConstraintData]] = None + ) -> Dict[ConstraintData, float]: if self._slacks is None: raise RuntimeError( 'Solution loader does not currently have valid slacks. Please ' @@ -353,8 +351,8 @@ def get_slacks( return slacks def get_reduced_costs( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: if self._reduced_costs is None: raise RuntimeError( 'Solution loader does not currently have valid reduced costs. Please ' @@ -621,13 +619,13 @@ def __str__(self): return self.name @abc.abstractmethod - def solve(self, model: _BlockData, timer: HierarchicalTimer = None) -> Results: + def solve(self, model: BlockData, timer: HierarchicalTimer = None) -> Results: """ Solve a Pyomo model. Parameters ---------- - model: _BlockData + model: BlockData The Pyomo model to be solved timer: HierarchicalTimer An option timer for reporting timing @@ -708,9 +706,7 @@ class PersistentSolver(Solver): def is_persistent(self): return True - def load_vars( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> NoReturn: + def load_vars(self, vars_to_load: Optional[Sequence[VarData]] = None) -> NoReturn: """ Load the solution of the primal variables into the value attribute of the variables. @@ -726,13 +722,13 @@ def load_vars( @abc.abstractmethod def get_primals( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: pass def get_duals( - self, cons_to_load: Optional[Sequence[_GeneralConstraintData]] = None - ) -> Dict[_GeneralConstraintData, float]: + self, cons_to_load: Optional[Sequence[ConstraintData]] = None + ) -> Dict[ConstraintData, float]: """ Declare sign convention in docstring here. @@ -752,8 +748,8 @@ def get_duals( ) def get_slacks( - self, cons_to_load: Optional[Sequence[_GeneralConstraintData]] = None - ) -> Dict[_GeneralConstraintData, float]: + self, cons_to_load: Optional[Sequence[ConstraintData]] = None + ) -> Dict[ConstraintData, float]: """ Parameters ---------- @@ -771,8 +767,8 @@ def get_slacks( ) def get_reduced_costs( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: """ Parameters ---------- @@ -799,43 +795,43 @@ def set_instance(self, model): pass @abc.abstractmethod - def add_variables(self, variables: List[_GeneralVarData]): + def add_variables(self, variables: List[VarData]): pass @abc.abstractmethod - def add_params(self, params: List[_ParamData]): + def add_params(self, params: List[ParamData]): pass @abc.abstractmethod - def add_constraints(self, cons: List[_GeneralConstraintData]): + def add_constraints(self, cons: List[ConstraintData]): pass @abc.abstractmethod - def add_block(self, block: _BlockData): + def add_block(self, block: BlockData): pass @abc.abstractmethod - def remove_variables(self, variables: List[_GeneralVarData]): + def remove_variables(self, variables: List[VarData]): pass @abc.abstractmethod - def remove_params(self, params: List[_ParamData]): + def remove_params(self, params: List[ParamData]): pass @abc.abstractmethod - def remove_constraints(self, cons: List[_GeneralConstraintData]): + def remove_constraints(self, cons: List[ConstraintData]): pass @abc.abstractmethod - def remove_block(self, block: _BlockData): + def remove_block(self, block: BlockData): pass @abc.abstractmethod - def set_objective(self, obj: _GeneralObjectiveData): + def set_objective(self, obj: ObjectiveData): pass @abc.abstractmethod - def update_variables(self, variables: List[_GeneralVarData]): + def update_variables(self, variables: List[VarData]): pass @abc.abstractmethod @@ -857,20 +853,20 @@ def get_primals(self, vars_to_load=None): return self._solver.get_primals(vars_to_load=vars_to_load) def get_duals( - self, cons_to_load: Optional[Sequence[_GeneralConstraintData]] = None - ) -> Dict[_GeneralConstraintData, float]: + self, cons_to_load: Optional[Sequence[ConstraintData]] = None + ) -> Dict[ConstraintData, float]: self._assert_solution_still_valid() return self._solver.get_duals(cons_to_load=cons_to_load) def get_slacks( - self, cons_to_load: Optional[Sequence[_GeneralConstraintData]] = None - ) -> Dict[_GeneralConstraintData, float]: + self, cons_to_load: Optional[Sequence[ConstraintData]] = None + ) -> Dict[ConstraintData, float]: self._assert_solution_still_valid() return self._solver.get_slacks(cons_to_load=cons_to_load) def get_reduced_costs( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: self._assert_solution_still_valid() return self._solver.get_reduced_costs(vars_to_load=vars_to_load) @@ -954,10 +950,10 @@ def set_instance(self, model): self.set_objective(None) @abc.abstractmethod - def _add_variables(self, variables: List[_GeneralVarData]): + def _add_variables(self, variables: List[VarData]): pass - def add_variables(self, variables: List[_GeneralVarData]): + def add_variables(self, variables: List[VarData]): for v in variables: if id(v) in self._referenced_variables: raise ValueError( @@ -975,19 +971,19 @@ def add_variables(self, variables: List[_GeneralVarData]): self._add_variables(variables) @abc.abstractmethod - def _add_params(self, params: List[_ParamData]): + def _add_params(self, params: List[ParamData]): pass - def add_params(self, params: List[_ParamData]): + def add_params(self, params: List[ParamData]): for p in params: self._params[id(p)] = p self._add_params(params) @abc.abstractmethod - def _add_constraints(self, cons: List[_GeneralConstraintData]): + def _add_constraints(self, cons: List[ConstraintData]): pass - def _check_for_new_vars(self, variables: List[_GeneralVarData]): + def _check_for_new_vars(self, variables: List[VarData]): new_vars = dict() for v in variables: v_id = id(v) @@ -995,7 +991,7 @@ def _check_for_new_vars(self, variables: List[_GeneralVarData]): new_vars[v_id] = v self.add_variables(list(new_vars.values())) - def _check_to_remove_vars(self, variables: List[_GeneralVarData]): + def _check_to_remove_vars(self, variables: List[VarData]): vars_to_remove = dict() for v in variables: v_id = id(v) @@ -1004,7 +1000,7 @@ def _check_to_remove_vars(self, variables: List[_GeneralVarData]): vars_to_remove[v_id] = v self.remove_variables(list(vars_to_remove.values())) - def add_constraints(self, cons: List[_GeneralConstraintData]): + def add_constraints(self, cons: List[ConstraintData]): all_fixed_vars = dict() for con in cons: if con in self._named_expressions: @@ -1034,10 +1030,10 @@ def add_constraints(self, cons: List[_GeneralConstraintData]): v.fix() @abc.abstractmethod - def _add_sos_constraints(self, cons: List[_SOSConstraintData]): + def _add_sos_constraints(self, cons: List[SOSConstraintData]): pass - def add_sos_constraints(self, cons: List[_SOSConstraintData]): + def add_sos_constraints(self, cons: List[SOSConstraintData]): for con in cons: if con in self._vars_referenced_by_con: raise ValueError( @@ -1054,10 +1050,10 @@ def add_sos_constraints(self, cons: List[_SOSConstraintData]): self._add_sos_constraints(cons) @abc.abstractmethod - def _set_objective(self, obj: _GeneralObjectiveData): + def _set_objective(self, obj: ObjectiveData): pass - def set_objective(self, obj: _GeneralObjectiveData): + def set_objective(self, obj: ObjectiveData): if self._objective is not None: for v in self._vars_referenced_by_obj: self._referenced_variables[id(v)][2] = None @@ -1132,10 +1128,10 @@ def add_block(self, block): self.set_objective(obj) @abc.abstractmethod - def _remove_constraints(self, cons: List[_GeneralConstraintData]): + def _remove_constraints(self, cons: List[ConstraintData]): pass - def remove_constraints(self, cons: List[_GeneralConstraintData]): + def remove_constraints(self, cons: List[ConstraintData]): self._remove_constraints(cons) for con in cons: if con not in self._named_expressions: @@ -1154,10 +1150,10 @@ def remove_constraints(self, cons: List[_GeneralConstraintData]): del self._vars_referenced_by_con[con] @abc.abstractmethod - def _remove_sos_constraints(self, cons: List[_SOSConstraintData]): + def _remove_sos_constraints(self, cons: List[SOSConstraintData]): pass - def remove_sos_constraints(self, cons: List[_SOSConstraintData]): + def remove_sos_constraints(self, cons: List[SOSConstraintData]): self._remove_sos_constraints(cons) for con in cons: if con not in self._vars_referenced_by_con: @@ -1174,10 +1170,10 @@ def remove_sos_constraints(self, cons: List[_SOSConstraintData]): del self._vars_referenced_by_con[con] @abc.abstractmethod - def _remove_variables(self, variables: List[_GeneralVarData]): + def _remove_variables(self, variables: List[VarData]): pass - def remove_variables(self, variables: List[_GeneralVarData]): + def remove_variables(self, variables: List[VarData]): self._remove_variables(variables) for v in variables: v_id = id(v) @@ -1198,10 +1194,10 @@ def remove_variables(self, variables: List[_GeneralVarData]): del self._vars[v_id] @abc.abstractmethod - def _remove_params(self, params: List[_ParamData]): + def _remove_params(self, params: List[ParamData]): pass - def remove_params(self, params: List[_ParamData]): + def remove_params(self, params: List[ParamData]): self._remove_params(params) for p in params: del self._params[id(p)] @@ -1246,10 +1242,10 @@ def remove_block(self, block): ) @abc.abstractmethod - def _update_variables(self, variables: List[_GeneralVarData]): + def _update_variables(self, variables: List[VarData]): pass - def update_variables(self, variables: List[_GeneralVarData]): + def update_variables(self, variables: List[VarData]): for v in variables: self._vars[id(v)] = ( v, @@ -1334,12 +1330,12 @@ def update(self, timer: HierarchicalTimer = None): for c in self._vars_referenced_by_con.keys(): if c not in current_cons_dict and c not in current_sos_dict: if (c.ctype is Constraint) or ( - c.ctype is None and isinstance(c, _GeneralConstraintData) + c.ctype is None and isinstance(c, ConstraintData) ): old_cons.append(c) else: assert (c.ctype is SOSConstraint) or ( - c.ctype is None and isinstance(c, _SOSConstraintData) + c.ctype is None and isinstance(c, SOSConstraintData) ) old_sos.append(c) self.remove_constraints(old_cons) @@ -1529,7 +1525,7 @@ def update(self, timer: HierarchicalTimer = None): class LegacySolverInterface(object): def solve( self, - model: _BlockData, + model: BlockData, tee: bool = False, load_solutions: bool = True, logfile: Optional[str] = None, diff --git a/pyomo/contrib/appsi/cmodel/src/expression.hpp b/pyomo/contrib/appsi/cmodel/src/expression.hpp index 0c0777ef468..e91ca0af3b3 100644 --- a/pyomo/contrib/appsi/cmodel/src/expression.hpp +++ b/pyomo/contrib/appsi/cmodel/src/expression.hpp @@ -680,10 +680,10 @@ class PyomoExprTypes { expr_type_map[np_float32] = py_float; expr_type_map[np_float64] = py_float; expr_type_map[ScalarVar] = var; - expr_type_map[_GeneralVarData] = var; + expr_type_map[VarData] = var; expr_type_map[AutoLinkedBinaryVar] = var; expr_type_map[ScalarParam] = param; - expr_type_map[_ParamData] = param; + expr_type_map[ParamData] = param; expr_type_map[MonomialTermExpression] = product; expr_type_map[ProductExpression] = product; expr_type_map[NPV_ProductExpression] = product; @@ -700,7 +700,7 @@ class PyomoExprTypes { expr_type_map[UnaryFunctionExpression] = unary_func; expr_type_map[NPV_UnaryFunctionExpression] = unary_func; expr_type_map[LinearExpression] = linear; - expr_type_map[_GeneralExpressionData] = named_expr; + expr_type_map[ExpressionData] = named_expr; expr_type_map[ScalarExpression] = named_expr; expr_type_map[Integral] = named_expr; expr_type_map[ScalarIntegral] = named_expr; @@ -728,12 +728,12 @@ class PyomoExprTypes { py::type np_float64 = np.attr("float64"); py::object ScalarParam = py::module_::import("pyomo.core.base.param").attr("ScalarParam"); - py::object _ParamData = - py::module_::import("pyomo.core.base.param").attr("_ParamData"); + py::object ParamData = + py::module_::import("pyomo.core.base.param").attr("ParamData"); py::object ScalarVar = py::module_::import("pyomo.core.base.var").attr("ScalarVar"); - py::object _GeneralVarData = - py::module_::import("pyomo.core.base.var").attr("_GeneralVarData"); + py::object VarData = + py::module_::import("pyomo.core.base.var").attr("VarData"); py::object AutoLinkedBinaryVar = py::module_::import("pyomo.gdp.disjunct").attr("AutoLinkedBinaryVar"); py::object numeric_expr = py::module_::import("pyomo.core.expr.numeric_expr"); @@ -765,8 +765,8 @@ class PyomoExprTypes { py::object NumericConstant = py::module_::import("pyomo.core.expr.numvalue").attr("NumericConstant"); py::object expr_module = py::module_::import("pyomo.core.base.expression"); - py::object _GeneralExpressionData = - expr_module.attr("_GeneralExpressionData"); + py::object ExpressionData = + expr_module.attr("ExpressionData"); py::object ScalarExpression = expr_module.attr("ScalarExpression"); py::object ScalarIntegral = py::module_::import("pyomo.dae.integral").attr("ScalarIntegral"); diff --git a/pyomo/contrib/appsi/fbbt.py b/pyomo/contrib/appsi/fbbt.py index 8b6cc52d2aa..8e0c74b00e9 100644 --- a/pyomo/contrib/appsi/fbbt.py +++ b/pyomo/contrib/appsi/fbbt.py @@ -18,12 +18,12 @@ ) from .cmodel import cmodel, cmodel_available from typing import List, Optional -from pyomo.core.base.var import _GeneralVarData -from pyomo.core.base.param import _ParamData -from pyomo.core.base.constraint import _GeneralConstraintData -from pyomo.core.base.sos import _SOSConstraintData -from pyomo.core.base.objective import _GeneralObjectiveData, minimize, maximize -from pyomo.core.base.block import _BlockData +from pyomo.core.base.var import VarData +from pyomo.core.base.param import ParamData +from pyomo.core.base.constraint import ConstraintData +from pyomo.core.base.sos import SOSConstraintData +from pyomo.core.base.objective import ObjectiveData, minimize, maximize +from pyomo.core.base.block import BlockData from pyomo.core.base import SymbolMap, TextLabeler from pyomo.common.errors import InfeasibleConstraintException @@ -121,7 +121,7 @@ def set_instance(self, model, symbolic_solver_labels: Optional[bool] = None): if self._objective is None: self.set_objective(None) - def _add_variables(self, variables: List[_GeneralVarData]): + def _add_variables(self, variables: List[VarData]): if self._symbolic_solver_labels: set_name = True symbol_map = self._symbol_map @@ -143,7 +143,7 @@ def _add_variables(self, variables: List[_GeneralVarData]): False, ) - def _add_params(self, params: List[_ParamData]): + def _add_params(self, params: List[ParamData]): cparams = cmodel.create_params(len(params)) for ndx, p in enumerate(params): cp = cparams[ndx] @@ -154,7 +154,7 @@ def _add_params(self, params: List[_ParamData]): cp = cparams[ndx] cp.name = self._symbol_map.getSymbol(p, self._param_labeler) - def _add_constraints(self, cons: List[_GeneralConstraintData]): + def _add_constraints(self, cons: List[ConstraintData]): cmodel.process_fbbt_constraints( self._cmodel, self._pyomo_expr_types, @@ -169,13 +169,13 @@ def _add_constraints(self, cons: List[_GeneralConstraintData]): for c, cc in self._con_map.items(): cc.name = self._symbol_map.getSymbol(c, self._con_labeler) - def _add_sos_constraints(self, cons: List[_SOSConstraintData]): + def _add_sos_constraints(self, cons: List[SOSConstraintData]): if len(cons) != 0: raise NotImplementedError( 'IntervalTightener does not support SOS constraints' ) - def _remove_constraints(self, cons: List[_GeneralConstraintData]): + def _remove_constraints(self, cons: List[ConstraintData]): if self._symbolic_solver_labels: for c in cons: self._symbol_map.removeSymbol(c) @@ -184,13 +184,13 @@ def _remove_constraints(self, cons: List[_GeneralConstraintData]): self._cmodel.remove_constraint(cc) del self._rcon_map[cc] - def _remove_sos_constraints(self, cons: List[_SOSConstraintData]): + def _remove_sos_constraints(self, cons: List[SOSConstraintData]): if len(cons) != 0: raise NotImplementedError( 'IntervalTightener does not support SOS constraints' ) - def _remove_variables(self, variables: List[_GeneralVarData]): + def _remove_variables(self, variables: List[VarData]): if self._symbolic_solver_labels: for v in variables: self._symbol_map.removeSymbol(v) @@ -198,14 +198,14 @@ def _remove_variables(self, variables: List[_GeneralVarData]): cvar = self._var_map.pop(id(v)) del self._rvar_map[cvar] - def _remove_params(self, params: List[_ParamData]): + def _remove_params(self, params: List[ParamData]): if self._symbolic_solver_labels: for p in params: self._symbol_map.removeSymbol(p) for p in params: del self._param_map[id(p)] - def _update_variables(self, variables: List[_GeneralVarData]): + def _update_variables(self, variables: List[VarData]): cmodel.process_pyomo_vars( self._pyomo_expr_types, variables, @@ -224,13 +224,13 @@ def update_params(self): cp = self._param_map[p_id] cp.value = p.value - def set_objective(self, obj: _GeneralObjectiveData): + def set_objective(self, obj: ObjectiveData): if self._symbolic_solver_labels: if self._objective is not None: self._symbol_map.removeSymbol(self._objective) super().set_objective(obj) - def _set_objective(self, obj: _GeneralObjectiveData): + def _set_objective(self, obj: ObjectiveData): if obj is None: ce = cmodel.Constant(0) sense = 0 @@ -275,7 +275,7 @@ def _deactivate_satisfied_cons(self): c.deactivate() def perform_fbbt( - self, model: _BlockData, symbolic_solver_labels: Optional[bool] = None + self, model: BlockData, symbolic_solver_labels: Optional[bool] = None ): if model is not self._model: self.set_instance(model, symbolic_solver_labels=symbolic_solver_labels) @@ -304,7 +304,7 @@ def perform_fbbt( self._deactivate_satisfied_cons() return n_iter - def perform_fbbt_with_seed(self, model: _BlockData, seed_var: _GeneralVarData): + def perform_fbbt_with_seed(self, model: BlockData, seed_var: VarData): if model is not self._model: self.set_instance(model) else: diff --git a/pyomo/contrib/appsi/solvers/cbc.py b/pyomo/contrib/appsi/solvers/cbc.py index 7f04ffbfce7..08833e747e2 100644 --- a/pyomo/contrib/appsi/solvers/cbc.py +++ b/pyomo/contrib/appsi/solvers/cbc.py @@ -26,11 +26,11 @@ import math from pyomo.common.collections import ComponentMap from typing import Optional, Sequence, NoReturn, List, Mapping -from pyomo.core.base.var import _GeneralVarData -from pyomo.core.base.constraint import _GeneralConstraintData -from pyomo.core.base.block import _BlockData -from pyomo.core.base.param import _ParamData -from pyomo.core.base.objective import _GeneralObjectiveData +from pyomo.core.base.var import VarData +from pyomo.core.base.constraint import ConstraintData +from pyomo.core.base.block import BlockData +from pyomo.core.base.param import ParamData +from pyomo.core.base.objective import ObjectiveData from pyomo.common.timing import HierarchicalTimer from pyomo.common.tee import TeeStream import sys @@ -164,34 +164,34 @@ def symbol_map(self): def set_instance(self, model): self._writer.set_instance(model) - def add_variables(self, variables: List[_GeneralVarData]): + def add_variables(self, variables: List[VarData]): self._writer.add_variables(variables) - def add_params(self, params: List[_ParamData]): + def add_params(self, params: List[ParamData]): self._writer.add_params(params) - def add_constraints(self, cons: List[_GeneralConstraintData]): + def add_constraints(self, cons: List[ConstraintData]): self._writer.add_constraints(cons) - def add_block(self, block: _BlockData): + def add_block(self, block: BlockData): self._writer.add_block(block) - def remove_variables(self, variables: List[_GeneralVarData]): + def remove_variables(self, variables: List[VarData]): self._writer.remove_variables(variables) - def remove_params(self, params: List[_ParamData]): + def remove_params(self, params: List[ParamData]): self._writer.remove_params(params) - def remove_constraints(self, cons: List[_GeneralConstraintData]): + def remove_constraints(self, cons: List[ConstraintData]): self._writer.remove_constraints(cons) - def remove_block(self, block: _BlockData): + def remove_block(self, block: BlockData): self._writer.remove_block(block) - def set_objective(self, obj: _GeneralObjectiveData): + def set_objective(self, obj: ObjectiveData): self._writer.set_objective(obj) - def update_variables(self, variables: List[_GeneralVarData]): + def update_variables(self, variables: List[VarData]): self._writer.update_variables(variables) def update_params(self): @@ -440,8 +440,8 @@ def _check_and_escape_options(): return results def get_primals( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: if ( self._last_results_object is None or self._last_results_object.best_feasible_objective is None @@ -477,8 +477,8 @@ def get_duals(self, cons_to_load=None): return {c: self._dual_sol[c] for c in cons_to_load} def get_reduced_costs( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: if ( self._last_results_object is None or self._last_results_object.termination_condition diff --git a/pyomo/contrib/appsi/solvers/cplex.py b/pyomo/contrib/appsi/solvers/cplex.py index 1b7ab5000d2..10de981ce7d 100644 --- a/pyomo/contrib/appsi/solvers/cplex.py +++ b/pyomo/contrib/appsi/solvers/cplex.py @@ -22,11 +22,11 @@ import math from pyomo.common.collections import ComponentMap from typing import Optional, Sequence, NoReturn, List, Mapping, Dict -from pyomo.core.base.var import _GeneralVarData -from pyomo.core.base.constraint import _GeneralConstraintData -from pyomo.core.base.block import _BlockData -from pyomo.core.base.param import _ParamData -from pyomo.core.base.objective import _GeneralObjectiveData +from pyomo.core.base.var import VarData +from pyomo.core.base.constraint import ConstraintData +from pyomo.core.base.block import BlockData +from pyomo.core.base.param import ParamData +from pyomo.core.base.objective import ObjectiveData from pyomo.common.timing import HierarchicalTimer import sys import time @@ -179,34 +179,34 @@ def update_config(self): def set_instance(self, model): self._writer.set_instance(model) - def add_variables(self, variables: List[_GeneralVarData]): + def add_variables(self, variables: List[VarData]): self._writer.add_variables(variables) - def add_params(self, params: List[_ParamData]): + def add_params(self, params: List[ParamData]): self._writer.add_params(params) - def add_constraints(self, cons: List[_GeneralConstraintData]): + def add_constraints(self, cons: List[ConstraintData]): self._writer.add_constraints(cons) - def add_block(self, block: _BlockData): + def add_block(self, block: BlockData): self._writer.add_block(block) - def remove_variables(self, variables: List[_GeneralVarData]): + def remove_variables(self, variables: List[VarData]): self._writer.remove_variables(variables) - def remove_params(self, params: List[_ParamData]): + def remove_params(self, params: List[ParamData]): self._writer.remove_params(params) - def remove_constraints(self, cons: List[_GeneralConstraintData]): + def remove_constraints(self, cons: List[ConstraintData]): self._writer.remove_constraints(cons) - def remove_block(self, block: _BlockData): + def remove_block(self, block: BlockData): self._writer.remove_block(block) - def set_objective(self, obj: _GeneralObjectiveData): + def set_objective(self, obj: ObjectiveData): self._writer.set_objective(obj) - def update_variables(self, variables: List[_GeneralVarData]): + def update_variables(self, variables: List[VarData]): self._writer.update_variables(variables) def update_params(self): @@ -362,8 +362,8 @@ def _postsolve(self, timer: HierarchicalTimer, solve_time): return results def get_primals( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: if ( self._cplex_model.solution.get_solution_type() == self._cplex_model.solution.type.none @@ -389,8 +389,8 @@ def get_primals( return res def get_duals( - self, cons_to_load: Optional[Sequence[_GeneralConstraintData]] = None - ) -> Dict[_GeneralConstraintData, float]: + self, cons_to_load: Optional[Sequence[ConstraintData]] = None + ) -> Dict[ConstraintData, float]: if ( self._cplex_model.solution.get_solution_type() == self._cplex_model.solution.type.none @@ -440,8 +440,8 @@ def get_duals( return res def get_reduced_costs( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: if ( self._cplex_model.solution.get_solution_type() == self._cplex_model.solution.type.none diff --git a/pyomo/contrib/appsi/solvers/gurobi.py b/pyomo/contrib/appsi/solvers/gurobi.py index 1e18862e3bd..2719ecc2a00 100644 --- a/pyomo/contrib/appsi/solvers/gurobi.py +++ b/pyomo/contrib/appsi/solvers/gurobi.py @@ -23,10 +23,10 @@ from pyomo.common.config import ConfigValue, NonNegativeInt from pyomo.core.kernel.objective import minimize, maximize from pyomo.core.base import SymbolMap, NumericLabeler, TextLabeler -from pyomo.core.base.var import Var, _GeneralVarData -from pyomo.core.base.constraint import _GeneralConstraintData -from pyomo.core.base.sos import _SOSConstraintData -from pyomo.core.base.param import _ParamData +from pyomo.core.base.var import Var, VarData +from pyomo.core.base.constraint import ConstraintData +from pyomo.core.base.sos import SOSConstraintData +from pyomo.core.base.param import ParamData from pyomo.core.expr.numvalue import value, is_constant, is_fixed, native_numeric_types from pyomo.repn import generate_standard_repn from pyomo.core.expr.numeric_expr import NPV_MaxExpression, NPV_MinExpression @@ -458,7 +458,7 @@ def _process_domain_and_bounds( return lb, ub, vtype - def _add_variables(self, variables: List[_GeneralVarData]): + def _add_variables(self, variables: List[VarData]): var_names = list() vtypes = list() lbs = list() @@ -489,7 +489,7 @@ def _add_variables(self, variables: List[_GeneralVarData]): self._vars_added_since_update.update(variables) self._needs_updated = True - def _add_params(self, params: List[_ParamData]): + def _add_params(self, params: List[ParamData]): pass def _reinit(self): @@ -579,7 +579,7 @@ def _get_expr_from_pyomo_expr(self, expr): mutable_quadratic_coefficients, ) - def _add_constraints(self, cons: List[_GeneralConstraintData]): + def _add_constraints(self, cons: List[ConstraintData]): for con in cons: conname = self._symbol_map.getSymbol(con, self._labeler) ( @@ -709,7 +709,7 @@ def _add_constraints(self, cons: List[_GeneralConstraintData]): self._constraints_added_since_update.update(cons) self._needs_updated = True - def _add_sos_constraints(self, cons: List[_SOSConstraintData]): + def _add_sos_constraints(self, cons: List[SOSConstraintData]): for con in cons: conname = self._symbol_map.getSymbol(con, self._labeler) level = con.level @@ -735,7 +735,7 @@ def _add_sos_constraints(self, cons: List[_SOSConstraintData]): self._constraints_added_since_update.update(cons) self._needs_updated = True - def _remove_constraints(self, cons: List[_GeneralConstraintData]): + def _remove_constraints(self, cons: List[ConstraintData]): for con in cons: if con in self._constraints_added_since_update: self._update_gurobi_model() @@ -749,7 +749,7 @@ def _remove_constraints(self, cons: List[_GeneralConstraintData]): self._mutable_quadratic_helpers.pop(con, None) self._needs_updated = True - def _remove_sos_constraints(self, cons: List[_SOSConstraintData]): + def _remove_sos_constraints(self, cons: List[SOSConstraintData]): for con in cons: if con in self._constraints_added_since_update: self._update_gurobi_model() @@ -759,7 +759,7 @@ def _remove_sos_constraints(self, cons: List[_SOSConstraintData]): del self._pyomo_sos_to_solver_sos_map[con] self._needs_updated = True - def _remove_variables(self, variables: List[_GeneralVarData]): + def _remove_variables(self, variables: List[VarData]): for var in variables: v_id = id(var) if var in self._vars_added_since_update: @@ -771,10 +771,10 @@ def _remove_variables(self, variables: List[_GeneralVarData]): self._mutable_bounds.pop(v_id, None) self._needs_updated = True - def _remove_params(self, params: List[_ParamData]): + def _remove_params(self, params: List[ParamData]): pass - def _update_variables(self, variables: List[_GeneralVarData]): + def _update_variables(self, variables: List[VarData]): for var in variables: var_id = id(var) if var_id not in self._pyomo_var_to_solver_var_map: @@ -1195,7 +1195,7 @@ def set_linear_constraint_attr(self, con, attr, val): Parameters ---------- - con: pyomo.core.base.constraint._GeneralConstraintData + con: pyomo.core.base.constraint.ConstraintData The pyomo constraint for which the corresponding gurobi constraint attribute should be modified. attr: str @@ -1221,7 +1221,7 @@ def set_var_attr(self, var, attr, val): Parameters ---------- - var: pyomo.core.base.var._GeneralVarData + var: pyomo.core.base.var.VarData The pyomo var for which the corresponding gurobi var attribute should be modified. attr: str @@ -1256,7 +1256,7 @@ def get_var_attr(self, var, attr): Parameters ---------- - var: pyomo.core.base.var._GeneralVarData + var: pyomo.core.base.var.VarData The pyomo var for which the corresponding gurobi var attribute should be retrieved. attr: str @@ -1272,7 +1272,7 @@ def get_linear_constraint_attr(self, con, attr): Parameters ---------- - con: pyomo.core.base.constraint._GeneralConstraintData + con: pyomo.core.base.constraint.ConstraintData The pyomo constraint for which the corresponding gurobi constraint attribute should be retrieved. attr: str @@ -1288,7 +1288,7 @@ def get_sos_attr(self, con, attr): Parameters ---------- - con: pyomo.core.base.sos._SOSConstraintData + con: pyomo.core.base.sos.SOSConstraintData The pyomo SOS constraint for which the corresponding gurobi SOS constraint attribute should be retrieved. attr: str @@ -1304,7 +1304,7 @@ def get_quadratic_constraint_attr(self, con, attr): Parameters ---------- - con: pyomo.core.base.constraint._GeneralConstraintData + con: pyomo.core.base.constraint.ConstraintData The pyomo constraint for which the corresponding gurobi constraint attribute should be retrieved. attr: str @@ -1425,7 +1425,7 @@ def cbCut(self, con): Parameters ---------- - con: pyomo.core.base.constraint._GeneralConstraintData + con: pyomo.core.base.constraint.ConstraintData The cut to add """ if not con.active: @@ -1510,7 +1510,7 @@ def cbLazy(self, con): """ Parameters ---------- - con: pyomo.core.base.constraint._GeneralConstraintData + con: pyomo.core.base.constraint.ConstraintData The lazy constraint to add """ if not con.active: diff --git a/pyomo/contrib/appsi/solvers/highs.py b/pyomo/contrib/appsi/solvers/highs.py index 3612b9d5014..6410700c569 100644 --- a/pyomo/contrib/appsi/solvers/highs.py +++ b/pyomo/contrib/appsi/solvers/highs.py @@ -20,10 +20,10 @@ from pyomo.common.log import LogStream from pyomo.core.kernel.objective import minimize, maximize from pyomo.core.base import SymbolMap -from pyomo.core.base.var import _GeneralVarData -from pyomo.core.base.constraint import _GeneralConstraintData -from pyomo.core.base.sos import _SOSConstraintData -from pyomo.core.base.param import _ParamData +from pyomo.core.base.var import VarData +from pyomo.core.base.constraint import ConstraintData +from pyomo.core.base.sos import SOSConstraintData +from pyomo.core.base.param import ParamData from pyomo.core.expr.numvalue import value, is_constant from pyomo.repn import generate_standard_repn from pyomo.core.expr.numeric_expr import NPV_MaxExpression, NPV_MinExpression @@ -308,7 +308,7 @@ def _process_domain_and_bounds(self, var_id): return lb, ub, vtype - def _add_variables(self, variables: List[_GeneralVarData]): + def _add_variables(self, variables: List[VarData]): self._sol = None if self._last_results_object is not None: self._last_results_object.solution_loader.invalidate() @@ -335,7 +335,7 @@ def _add_variables(self, variables: List[_GeneralVarData]): len(vtypes), np.array(indices), np.array(vtypes) ) - def _add_params(self, params: List[_ParamData]): + def _add_params(self, params: List[ParamData]): pass def _reinit(self): @@ -376,7 +376,7 @@ def set_instance(self, model): if self._objective is None: self.set_objective(None) - def _add_constraints(self, cons: List[_GeneralConstraintData]): + def _add_constraints(self, cons: List[ConstraintData]): self._sol = None if self._last_results_object is not None: self._last_results_object.solution_loader.invalidate() @@ -456,13 +456,13 @@ def _add_constraints(self, cons: List[_GeneralConstraintData]): np.array(coef_values, dtype=np.double), ) - def _add_sos_constraints(self, cons: List[_SOSConstraintData]): + def _add_sos_constraints(self, cons: List[SOSConstraintData]): if cons: raise NotImplementedError( 'Highs interface does not support SOS constraints' ) - def _remove_constraints(self, cons: List[_GeneralConstraintData]): + def _remove_constraints(self, cons: List[ConstraintData]): self._sol = None if self._last_results_object is not None: self._last_results_object.solution_loader.invalidate() @@ -487,13 +487,13 @@ def _remove_constraints(self, cons: List[_GeneralConstraintData]): {v: k for k, v in self._pyomo_con_to_solver_con_map.items()} ) - def _remove_sos_constraints(self, cons: List[_SOSConstraintData]): + def _remove_sos_constraints(self, cons: List[SOSConstraintData]): if cons: raise NotImplementedError( 'Highs interface does not support SOS constraints' ) - def _remove_variables(self, variables: List[_GeneralVarData]): + def _remove_variables(self, variables: List[VarData]): self._sol = None if self._last_results_object is not None: self._last_results_object.solution_loader.invalidate() @@ -515,10 +515,10 @@ def _remove_variables(self, variables: List[_GeneralVarData]): self._pyomo_var_to_solver_var_map.clear() self._pyomo_var_to_solver_var_map.update(new_var_map) - def _remove_params(self, params: List[_ParamData]): + def _remove_params(self, params: List[ParamData]): pass - def _update_variables(self, variables: List[_GeneralVarData]): + def _update_variables(self, variables: List[VarData]): self._sol = None if self._last_results_object is not None: self._last_results_object.solution_loader.invalidate() diff --git a/pyomo/contrib/appsi/solvers/ipopt.py b/pyomo/contrib/appsi/solvers/ipopt.py index 54e21d333e5..76cd204e36d 100644 --- a/pyomo/contrib/appsi/solvers/ipopt.py +++ b/pyomo/contrib/appsi/solvers/ipopt.py @@ -28,11 +28,11 @@ from pyomo.core.expr.numvalue import value from pyomo.core.expr.visitor import replace_expressions from typing import Optional, Sequence, NoReturn, List, Mapping -from pyomo.core.base.var import _GeneralVarData -from pyomo.core.base.constraint import _GeneralConstraintData -from pyomo.core.base.block import _BlockData -from pyomo.core.base.param import _ParamData -from pyomo.core.base.objective import _GeneralObjectiveData +from pyomo.core.base.var import VarData +from pyomo.core.base.constraint import ConstraintData +from pyomo.core.base.block import BlockData +from pyomo.core.base.param import ParamData +from pyomo.core.base.objective import ObjectiveData from pyomo.common.timing import HierarchicalTimer from pyomo.common.tee import TeeStream import sys @@ -228,34 +228,34 @@ def set_instance(self, model): self._writer.config.symbolic_solver_labels = self.config.symbolic_solver_labels self._writer.set_instance(model) - def add_variables(self, variables: List[_GeneralVarData]): + def add_variables(self, variables: List[VarData]): self._writer.add_variables(variables) - def add_params(self, params: List[_ParamData]): + def add_params(self, params: List[ParamData]): self._writer.add_params(params) - def add_constraints(self, cons: List[_GeneralConstraintData]): + def add_constraints(self, cons: List[ConstraintData]): self._writer.add_constraints(cons) - def add_block(self, block: _BlockData): + def add_block(self, block: BlockData): self._writer.add_block(block) - def remove_variables(self, variables: List[_GeneralVarData]): + def remove_variables(self, variables: List[VarData]): self._writer.remove_variables(variables) - def remove_params(self, params: List[_ParamData]): + def remove_params(self, params: List[ParamData]): self._writer.remove_params(params) - def remove_constraints(self, cons: List[_GeneralConstraintData]): + def remove_constraints(self, cons: List[ConstraintData]): self._writer.remove_constraints(cons) - def remove_block(self, block: _BlockData): + def remove_block(self, block: BlockData): self._writer.remove_block(block) - def set_objective(self, obj: _GeneralObjectiveData): + def set_objective(self, obj: ObjectiveData): self._writer.set_objective(obj) - def update_variables(self, variables: List[_GeneralVarData]): + def update_variables(self, variables: List[VarData]): self._writer.update_variables(variables) def update_params(self): @@ -514,8 +514,8 @@ def _apply_solver(self, timer: HierarchicalTimer): return results def get_primals( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: if ( self._last_results_object is None or self._last_results_object.best_feasible_objective is None @@ -534,9 +534,7 @@ def get_primals( res[v] = self._primal_sol[v] return res - def get_duals( - self, cons_to_load: Optional[Sequence[_GeneralConstraintData]] = None - ): + def get_duals(self, cons_to_load: Optional[Sequence[ConstraintData]] = None): if ( self._last_results_object is None or self._last_results_object.termination_condition @@ -553,8 +551,8 @@ def get_duals( return {c: self._dual_sol[c] for c in cons_to_load} def get_reduced_costs( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: if ( self._last_results_object is None or self._last_results_object.termination_condition diff --git a/pyomo/contrib/appsi/solvers/wntr.py b/pyomo/contrib/appsi/solvers/wntr.py index 00c0598c687..0a66cc640e5 100644 --- a/pyomo/contrib/appsi/solvers/wntr.py +++ b/pyomo/contrib/appsi/solvers/wntr.py @@ -39,10 +39,10 @@ from pyomo.common.collections import ComponentMap from pyomo.core.expr.numvalue import native_numeric_types from typing import Dict, Optional, List -from pyomo.core.base.block import _BlockData -from pyomo.core.base.var import _GeneralVarData -from pyomo.core.base.param import _ParamData -from pyomo.core.base.constraint import _GeneralConstraintData +from pyomo.core.base.block import BlockData +from pyomo.core.base.var import VarData +from pyomo.core.base.param import ParamData +from pyomo.core.base.constraint import ConstraintData from pyomo.common.timing import HierarchicalTimer from pyomo.core.base import SymbolMap, NumericLabeler, TextLabeler from pyomo.common.dependencies import attempt_import @@ -178,7 +178,7 @@ def _solve(self, timer: HierarchicalTimer): ) return results - def solve(self, model: _BlockData, timer: HierarchicalTimer = None) -> Results: + def solve(self, model: BlockData, timer: HierarchicalTimer = None) -> Results: StaleFlagManager.mark_all_as_stale() if self._last_results_object is not None: self._last_results_object.solution_loader.invalidate() @@ -239,7 +239,7 @@ def set_instance(self, model): self.add_block(model) - def _add_variables(self, variables: List[_GeneralVarData]): + def _add_variables(self, variables: List[VarData]): aml = wntr.sim.aml.aml for var in variables: varname = self._symbol_map.getSymbol(var, self._labeler) @@ -270,7 +270,7 @@ def _add_variables(self, variables: List[_GeneralVarData]): ) self._needs_updated = True - def _add_params(self, params: List[_ParamData]): + def _add_params(self, params: List[ParamData]): aml = wntr.sim.aml.aml for p in params: pname = self._symbol_map.getSymbol(p, self._labeler) @@ -278,7 +278,7 @@ def _add_params(self, params: List[_ParamData]): setattr(self._solver_model, pname, wntr_p) self._pyomo_param_to_solver_param_map[id(p)] = wntr_p - def _add_constraints(self, cons: List[_GeneralConstraintData]): + def _add_constraints(self, cons: List[ConstraintData]): aml = wntr.sim.aml.aml for con in cons: if not con.equality: @@ -294,7 +294,7 @@ def _add_constraints(self, cons: List[_GeneralConstraintData]): self._pyomo_con_to_solver_con_map[con] = wntr_con self._needs_updated = True - def _remove_constraints(self, cons: List[_GeneralConstraintData]): + def _remove_constraints(self, cons: List[ConstraintData]): for con in cons: solver_con = self._pyomo_con_to_solver_con_map[con] delattr(self._solver_model, solver_con.name) @@ -302,7 +302,7 @@ def _remove_constraints(self, cons: List[_GeneralConstraintData]): del self._pyomo_con_to_solver_con_map[con] self._needs_updated = True - def _remove_variables(self, variables: List[_GeneralVarData]): + def _remove_variables(self, variables: List[VarData]): for var in variables: v_id = id(var) solver_var = self._pyomo_var_to_solver_var_map[v_id] @@ -314,7 +314,7 @@ def _remove_variables(self, variables: List[_GeneralVarData]): del self._solver_model._wntr_fixed_var_cons[v_id] self._needs_updated = True - def _remove_params(self, params: List[_ParamData]): + def _remove_params(self, params: List[ParamData]): for p in params: p_id = id(p) solver_param = self._pyomo_param_to_solver_param_map[p_id] @@ -322,7 +322,7 @@ def _remove_params(self, params: List[_ParamData]): self._symbol_map.removeSymbol(p) del self._pyomo_param_to_solver_param_map[p_id] - def _update_variables(self, variables: List[_GeneralVarData]): + def _update_variables(self, variables: List[VarData]): aml = wntr.sim.aml.aml for var in variables: v_id = id(var) diff --git a/pyomo/contrib/appsi/writers/lp_writer.py b/pyomo/contrib/appsi/writers/lp_writer.py index 9984cb7465d..788dfde7892 100644 --- a/pyomo/contrib/appsi/writers/lp_writer.py +++ b/pyomo/contrib/appsi/writers/lp_writer.py @@ -10,12 +10,12 @@ # ___________________________________________________________________________ from typing import List -from pyomo.core.base.param import _ParamData -from pyomo.core.base.var import _GeneralVarData -from pyomo.core.base.constraint import _GeneralConstraintData -from pyomo.core.base.objective import _GeneralObjectiveData -from pyomo.core.base.sos import _SOSConstraintData -from pyomo.core.base.block import _BlockData +from pyomo.core.base.param import ParamData +from pyomo.core.base.var import VarData +from pyomo.core.base.constraint import ConstraintData +from pyomo.core.base.objective import ObjectiveData +from pyomo.core.base.sos import SOSConstraintData +from pyomo.core.base.block import BlockData from pyomo.repn.standard_repn import generate_standard_repn from pyomo.core.expr.numvalue import value from pyomo.contrib.appsi.base import PersistentBase @@ -77,7 +77,7 @@ def set_instance(self, model): if self._objective is None: self.set_objective(None) - def _add_variables(self, variables: List[_GeneralVarData]): + def _add_variables(self, variables: List[VarData]): cmodel.process_pyomo_vars( self._expr_types, variables, @@ -91,7 +91,7 @@ def _add_variables(self, variables: List[_GeneralVarData]): False, ) - def _add_params(self, params: List[_ParamData]): + def _add_params(self, params: List[ParamData]): cparams = cmodel.create_params(len(params)) for ndx, p in enumerate(params): cp = cparams[ndx] @@ -99,36 +99,36 @@ def _add_params(self, params: List[_ParamData]): cp.value = p.value self._pyomo_param_to_solver_param_map[id(p)] = cp - def _add_constraints(self, cons: List[_GeneralConstraintData]): + def _add_constraints(self, cons: List[ConstraintData]): cmodel.process_lp_constraints(cons, self) - def _add_sos_constraints(self, cons: List[_SOSConstraintData]): + def _add_sos_constraints(self, cons: List[SOSConstraintData]): if len(cons) != 0: raise NotImplementedError('LP writer does not yet support SOS constraints') - def _remove_constraints(self, cons: List[_GeneralConstraintData]): + def _remove_constraints(self, cons: List[ConstraintData]): for c in cons: cc = self._pyomo_con_to_solver_con_map.pop(c) self._writer.remove_constraint(cc) self._symbol_map.removeSymbol(c) del self._solver_con_to_pyomo_con_map[cc] - def _remove_sos_constraints(self, cons: List[_SOSConstraintData]): + def _remove_sos_constraints(self, cons: List[SOSConstraintData]): if len(cons) != 0: raise NotImplementedError('LP writer does not yet support SOS constraints') - def _remove_variables(self, variables: List[_GeneralVarData]): + def _remove_variables(self, variables: List[VarData]): for v in variables: cvar = self._pyomo_var_to_solver_var_map.pop(id(v)) del self._solver_var_to_pyomo_var_map[cvar] self._symbol_map.removeSymbol(v) - def _remove_params(self, params: List[_ParamData]): + def _remove_params(self, params: List[ParamData]): for p in params: del self._pyomo_param_to_solver_param_map[id(p)] self._symbol_map.removeSymbol(p) - def _update_variables(self, variables: List[_GeneralVarData]): + def _update_variables(self, variables: List[VarData]): cmodel.process_pyomo_vars( self._expr_types, variables, @@ -147,7 +147,7 @@ def update_params(self): cp = self._pyomo_param_to_solver_param_map[p_id] cp.value = p.value - def _set_objective(self, obj: _GeneralObjectiveData): + def _set_objective(self, obj: ObjectiveData): cobj = cmodel.process_lp_objective( self._expr_types, obj, @@ -167,7 +167,7 @@ def _set_objective(self, obj: _GeneralObjectiveData): cobj.name = cname self._writer.objective = cobj - def write(self, model: _BlockData, filename: str, timer: HierarchicalTimer = None): + def write(self, model: BlockData, filename: str, timer: HierarchicalTimer = None): if timer is None: timer = HierarchicalTimer() if model is not self._model: diff --git a/pyomo/contrib/appsi/writers/nl_writer.py b/pyomo/contrib/appsi/writers/nl_writer.py index bd24a86216a..27cdca004cb 100644 --- a/pyomo/contrib/appsi/writers/nl_writer.py +++ b/pyomo/contrib/appsi/writers/nl_writer.py @@ -10,12 +10,12 @@ # ___________________________________________________________________________ from typing import List -from pyomo.core.base.param import _ParamData -from pyomo.core.base.var import _GeneralVarData -from pyomo.core.base.constraint import _GeneralConstraintData -from pyomo.core.base.objective import _GeneralObjectiveData -from pyomo.core.base.sos import _SOSConstraintData -from pyomo.core.base.block import _BlockData +from pyomo.core.base.param import ParamData +from pyomo.core.base.var import VarData +from pyomo.core.base.constraint import ConstraintData +from pyomo.core.base.objective import ObjectiveData +from pyomo.core.base.sos import SOSConstraintData +from pyomo.core.base.block import BlockData from pyomo.repn.standard_repn import generate_standard_repn from pyomo.core.expr.numvalue import value from pyomo.contrib.appsi.base import PersistentBase @@ -78,7 +78,7 @@ def set_instance(self, model): self.set_objective(None) self._set_pyomo_amplfunc_env() - def _add_variables(self, variables: List[_GeneralVarData]): + def _add_variables(self, variables: List[VarData]): if self.config.symbolic_solver_labels: set_name = True symbol_map = self._symbol_map @@ -100,7 +100,7 @@ def _add_variables(self, variables: List[_GeneralVarData]): False, ) - def _add_params(self, params: List[_ParamData]): + def _add_params(self, params: List[ParamData]): cparams = cmodel.create_params(len(params)) for ndx, p in enumerate(params): cp = cparams[ndx] @@ -111,7 +111,7 @@ def _add_params(self, params: List[_ParamData]): cp = cparams[ndx] cp.name = self._symbol_map.getSymbol(p, self._param_labeler) - def _add_constraints(self, cons: List[_GeneralConstraintData]): + def _add_constraints(self, cons: List[ConstraintData]): cmodel.process_nl_constraints( self._writer, self._expr_types, @@ -126,11 +126,11 @@ def _add_constraints(self, cons: List[_GeneralConstraintData]): for c, cc in self._pyomo_con_to_solver_con_map.items(): cc.name = self._symbol_map.getSymbol(c, self._con_labeler) - def _add_sos_constraints(self, cons: List[_SOSConstraintData]): + def _add_sos_constraints(self, cons: List[SOSConstraintData]): if len(cons) != 0: raise NotImplementedError('NL writer does not support SOS constraints') - def _remove_constraints(self, cons: List[_GeneralConstraintData]): + def _remove_constraints(self, cons: List[ConstraintData]): if self.config.symbolic_solver_labels: for c in cons: self._symbol_map.removeSymbol(c) @@ -140,11 +140,11 @@ def _remove_constraints(self, cons: List[_GeneralConstraintData]): self._writer.remove_constraint(cc) del self._solver_con_to_pyomo_con_map[cc] - def _remove_sos_constraints(self, cons: List[_SOSConstraintData]): + def _remove_sos_constraints(self, cons: List[SOSConstraintData]): if len(cons) != 0: raise NotImplementedError('NL writer does not support SOS constraints') - def _remove_variables(self, variables: List[_GeneralVarData]): + def _remove_variables(self, variables: List[VarData]): if self.config.symbolic_solver_labels: for v in variables: self._symbol_map.removeSymbol(v) @@ -153,7 +153,7 @@ def _remove_variables(self, variables: List[_GeneralVarData]): cvar = self._pyomo_var_to_solver_var_map.pop(id(v)) del self._solver_var_to_pyomo_var_map[cvar] - def _remove_params(self, params: List[_ParamData]): + def _remove_params(self, params: List[ParamData]): if self.config.symbolic_solver_labels: for p in params: self._symbol_map.removeSymbol(p) @@ -161,7 +161,7 @@ def _remove_params(self, params: List[_ParamData]): for p in params: del self._pyomo_param_to_solver_param_map[id(p)] - def _update_variables(self, variables: List[_GeneralVarData]): + def _update_variables(self, variables: List[VarData]): cmodel.process_pyomo_vars( self._expr_types, variables, @@ -180,7 +180,7 @@ def update_params(self): cp = self._pyomo_param_to_solver_param_map[p_id] cp.value = p.value - def _set_objective(self, obj: _GeneralObjectiveData): + def _set_objective(self, obj: ObjectiveData): if obj is None: const = cmodel.Constant(0) lin_vars = list() @@ -232,7 +232,7 @@ def _set_objective(self, obj: _GeneralObjectiveData): cobj.sense = sense self._writer.objective = cobj - def write(self, model: _BlockData, filename: str, timer: HierarchicalTimer = None): + def write(self, model: BlockData, filename: str, timer: HierarchicalTimer = None): if timer is None: timer = HierarchicalTimer() if model is not self._model: diff --git a/pyomo/contrib/benders/benders_cuts.py b/pyomo/contrib/benders/benders_cuts.py index cf96ba26164..0653be55986 100644 --- a/pyomo/contrib/benders/benders_cuts.py +++ b/pyomo/contrib/benders/benders_cuts.py @@ -9,7 +9,7 @@ # This software is distributed under the 3-clause BSD License. # ___________________________________________________________________________ -from pyomo.core.base.block import _BlockData, declare_custom_block +from pyomo.core.base.block import BlockData, declare_custom_block import pyomo.environ as pyo from pyomo.solvers.plugins.solvers.persistent_solver import PersistentSolver from pyomo.core.expr.visitor import identify_variables @@ -166,13 +166,13 @@ def _setup_subproblem(b, root_vars, relax_subproblem_cons): @declare_custom_block(name='BendersCutGenerator') -class BendersCutGeneratorData(_BlockData): +class BendersCutGeneratorData(BlockData): def __init__(self, component): if not mpi4py_available: raise ImportError('BendersCutGenerator requires mpi4py.') if not numpy_available: raise ImportError('BendersCutGenerator requires numpy.') - _BlockData.__init__(self, component) + BlockData.__init__(self, component) self.num_subproblems_by_rank = 0 # np.zeros(self.comm.Get_size()) self.subproblems = list() diff --git a/pyomo/contrib/community_detection/detection.py b/pyomo/contrib/community_detection/detection.py index 5bf8187a243..0e2c3912e06 100644 --- a/pyomo/contrib/community_detection/detection.py +++ b/pyomo/contrib/community_detection/detection.py @@ -31,7 +31,7 @@ Objective, ConstraintList, ) -from pyomo.core.base.objective import _GeneralObjectiveData +from pyomo.core.base.objective import ObjectiveData from pyomo.core.expr.visitor import replace_expressions, identify_variables from pyomo.contrib.community_detection.community_graph import generate_model_graph from pyomo.common.dependencies import networkx as nx @@ -750,7 +750,7 @@ def generate_structured_model(self): # Check to see whether 'stored_constraint' is actually an objective (since constraints and objectives # grouped together) if self.with_objective and isinstance( - stored_constraint, (_GeneralObjectiveData, Objective) + stored_constraint, (ObjectiveData, Objective) ): # If the constraint is actually an objective, we add it to the block as an objective new_objective = Objective( diff --git a/pyomo/contrib/cp/interval_var.py b/pyomo/contrib/cp/interval_var.py index 953b859ea20..ff11d6e3a9f 100644 --- a/pyomo/contrib/cp/interval_var.py +++ b/pyomo/contrib/cp/interval_var.py @@ -18,7 +18,7 @@ from pyomo.core import Integers, value from pyomo.core.base import Any, ScalarVar, ScalarBooleanVar -from pyomo.core.base.block import _BlockData, Block +from pyomo.core.base.block import BlockData, Block from pyomo.core.base.component import ModelComponentFactory from pyomo.core.base.global_set import UnindexedComponent_index from pyomo.core.base.indexed_component import IndexedComponent, UnindexedComponent_set @@ -87,14 +87,14 @@ def get_associated_interval_var(self): return self.parent_block() -class IntervalVarData(_BlockData): +class IntervalVarData(BlockData): """This class defines the abstract interface for a single interval variable.""" # We will put our four variables on this, and everything else is off limits. _Block_reserved_words = Any def __init__(self, component=None): - _BlockData.__init__(self, component) + BlockData.__init__(self, component) with self._declare_reserved_components(): self.is_present = IntervalVarPresence() diff --git a/pyomo/contrib/cp/repn/docplex_writer.py b/pyomo/contrib/cp/repn/docplex_writer.py index 8356a1e752f..37429d420d2 100644 --- a/pyomo/contrib/cp/repn/docplex_writer.py +++ b/pyomo/contrib/cp/repn/docplex_writer.py @@ -60,12 +60,12 @@ ) from pyomo.core.base.boolean_var import ( ScalarBooleanVar, - _GeneralBooleanVarData, + BooleanVarData, IndexedBooleanVar, ) -from pyomo.core.base.expression import ScalarExpression, _GeneralExpressionData -from pyomo.core.base.param import IndexedParam, ScalarParam, _ParamData -from pyomo.core.base.var import ScalarVar, _GeneralVarData, IndexedVar +from pyomo.core.base.expression import ScalarExpression, ExpressionData +from pyomo.core.base.param import IndexedParam, ScalarParam, ParamData +from pyomo.core.base.var import ScalarVar, VarData, IndexedVar import pyomo.core.expr as EXPR from pyomo.core.expr.visitor import StreamBasedExpressionVisitor, identify_variables from pyomo.core.base import Set, RangeSet @@ -949,7 +949,7 @@ class LogicalToDoCplex(StreamBasedExpressionVisitor): BeforeExpression: _handle_before_expression_node, AtExpression: _handle_at_expression_node, AlwaysIn: _handle_always_in_node, - _GeneralExpressionData: _handle_named_expression_node, + ExpressionData: _handle_named_expression_node, ScalarExpression: _handle_named_expression_node, } _var_handles = { @@ -961,16 +961,16 @@ class LogicalToDoCplex(StreamBasedExpressionVisitor): IntervalVarData: _before_interval_var, IndexedIntervalVar: _before_indexed_interval_var, ScalarVar: _before_var, - _GeneralVarData: _before_var, + VarData: _before_var, IndexedVar: _before_indexed_var, ScalarBooleanVar: _before_boolean_var, - _GeneralBooleanVarData: _before_boolean_var, + BooleanVarData: _before_boolean_var, IndexedBooleanVar: _before_indexed_boolean_var, - _GeneralExpressionData: _before_named_expression, + ExpressionData: _before_named_expression, ScalarExpression: _before_named_expression, IndexedParam: _before_indexed_param, # Because of indirection ScalarParam: _before_param, - _ParamData: _before_param, + ParamData: _before_param, } def __init__(self, cpx_model, symbolic_solver_labels=False): diff --git a/pyomo/contrib/cp/transform/logical_to_disjunctive_program.py b/pyomo/contrib/cp/transform/logical_to_disjunctive_program.py index e318e621e88..c29bf3f2675 100644 --- a/pyomo/contrib/cp/transform/logical_to_disjunctive_program.py +++ b/pyomo/contrib/cp/transform/logical_to_disjunctive_program.py @@ -26,7 +26,7 @@ Transformation, NonNegativeIntegers, ) -from pyomo.core.base.block import _BlockData +from pyomo.core.base.block import BlockData from pyomo.core.base import SortComponents from pyomo.core.util import target_list from pyomo.gdp import Disjunct, Disjunction @@ -73,7 +73,7 @@ def _apply_to(self, model, **kwds): transBlocks = {} visitor = LogicalToDisjunctiveVisitor() for t in targets: - if t.ctype is Block or isinstance(t, _BlockData): + if t.ctype is Block or isinstance(t, BlockData): self._transform_block(t, model, visitor, transBlocks) elif t.ctype is LogicalConstraint: if t.is_indexed(): diff --git a/pyomo/contrib/cp/transform/logical_to_disjunctive_walker.py b/pyomo/contrib/cp/transform/logical_to_disjunctive_walker.py index d5f13e91535..fdcfd5a8308 100644 --- a/pyomo/contrib/cp/transform/logical_to_disjunctive_walker.py +++ b/pyomo/contrib/cp/transform/logical_to_disjunctive_walker.py @@ -27,9 +27,9 @@ value, ) import pyomo.core.base.boolean_var as BV -from pyomo.core.base.expression import ScalarExpression, _GeneralExpressionData -from pyomo.core.base.param import ScalarParam, _ParamData -from pyomo.core.base.var import ScalarVar, _GeneralVarData +from pyomo.core.base.expression import ScalarExpression, ExpressionData +from pyomo.core.base.param import ScalarParam, ParamData +from pyomo.core.base.var import ScalarVar, VarData from pyomo.gdp.disjunct import AutoLinkedBooleanVar, Disjunct, Disjunction @@ -209,15 +209,15 @@ def _dispatch_atmost(visitor, node, *args): _before_child_dispatcher = {} _before_child_dispatcher[BV.ScalarBooleanVar] = _dispatch_boolean_var -_before_child_dispatcher[BV._GeneralBooleanVarData] = _dispatch_boolean_var +_before_child_dispatcher[BV.BooleanVarData] = _dispatch_boolean_var _before_child_dispatcher[AutoLinkedBooleanVar] = _dispatch_boolean_var -_before_child_dispatcher[_ParamData] = _dispatch_param +_before_child_dispatcher[ParamData] = _dispatch_param _before_child_dispatcher[ScalarParam] = _dispatch_param # for the moment, these are all just so we can get good error messages when we # don't handle them: _before_child_dispatcher[ScalarVar] = _dispatch_var -_before_child_dispatcher[_GeneralVarData] = _dispatch_var -_before_child_dispatcher[_GeneralExpressionData] = _dispatch_expression +_before_child_dispatcher[VarData] = _dispatch_var +_before_child_dispatcher[ExpressionData] = _dispatch_expression _before_child_dispatcher[ScalarExpression] = _dispatch_expression diff --git a/pyomo/contrib/fbbt/fbbt.py b/pyomo/contrib/fbbt/fbbt.py index bde33b3caa0..1507c4a3cc5 100644 --- a/pyomo/contrib/fbbt/fbbt.py +++ b/pyomo/contrib/fbbt/fbbt.py @@ -24,9 +24,10 @@ import math from pyomo.core.base.block import Block from pyomo.core.base.constraint import Constraint +from pyomo.core.base.expression import ExpressionData, ScalarExpression +from pyomo.core.base.objective import ObjectiveData, ScalarObjective from pyomo.core.base.var import Var from pyomo.gdp import Disjunct -from pyomo.core.base.expression import _GeneralExpressionData, ScalarExpression import logging from pyomo.common.errors import InfeasibleConstraintException, PyomoException from pyomo.common.config import ( @@ -333,15 +334,15 @@ def _prop_bnds_leaf_to_root_UnaryFunctionExpression(visitor, node, arg): _unary_leaf_to_root_map[node.getname()](visitor, node, arg) -def _prop_bnds_leaf_to_root_GeneralExpression(visitor, node, expr): +def _prop_bnds_leaf_to_root_NamedExpression(visitor, node, expr): """ Propagate bounds from children to parent Parameters ---------- visitor: _FBBTVisitorLeafToRoot - node: pyomo.core.base.expression._GeneralExpressionData - expr: GeneralExpression arg + node: pyomo.core.base.expression.NamedExpressionData + expr: NamedExpressionData arg """ bnds_dict = visitor.bnds_dict if node in bnds_dict: @@ -366,8 +367,10 @@ def _prop_bnds_leaf_to_root_GeneralExpression(visitor, node, expr): numeric_expr.UnaryFunctionExpression: _prop_bnds_leaf_to_root_UnaryFunctionExpression, numeric_expr.LinearExpression: _prop_bnds_leaf_to_root_SumExpression, numeric_expr.AbsExpression: _prop_bnds_leaf_to_root_abs, - _GeneralExpressionData: _prop_bnds_leaf_to_root_GeneralExpression, - ScalarExpression: _prop_bnds_leaf_to_root_GeneralExpression, + ExpressionData: _prop_bnds_leaf_to_root_NamedExpression, + ScalarExpression: _prop_bnds_leaf_to_root_NamedExpression, + ObjectiveData: _prop_bnds_leaf_to_root_NamedExpression, + ScalarObjective: _prop_bnds_leaf_to_root_NamedExpression, }, ) @@ -898,13 +901,13 @@ def _prop_bnds_root_to_leaf_UnaryFunctionExpression(node, bnds_dict, feasibility ) -def _prop_bnds_root_to_leaf_GeneralExpression(node, bnds_dict, feasibility_tol): +def _prop_bnds_root_to_leaf_NamedExpression(node, bnds_dict, feasibility_tol): """ Propagate bounds from parent to children. Parameters ---------- - node: pyomo.core.base.expression._GeneralExpressionData + node: pyomo.core.base.expression.NamedExpressionData bnds_dict: ComponentMap feasibility_tol: float If the bounds computed on the body of a constraint violate the bounds of the constraint by more than @@ -945,12 +948,10 @@ def _prop_bnds_root_to_leaf_GeneralExpression(node, bnds_dict, feasibility_tol): ) _prop_bnds_root_to_leaf_map[numeric_expr.AbsExpression] = _prop_bnds_root_to_leaf_abs -_prop_bnds_root_to_leaf_map[_GeneralExpressionData] = ( - _prop_bnds_root_to_leaf_GeneralExpression -) -_prop_bnds_root_to_leaf_map[ScalarExpression] = ( - _prop_bnds_root_to_leaf_GeneralExpression -) +_prop_bnds_root_to_leaf_map[ExpressionData] = _prop_bnds_root_to_leaf_NamedExpression +_prop_bnds_root_to_leaf_map[ScalarExpression] = _prop_bnds_root_to_leaf_NamedExpression +_prop_bnds_root_to_leaf_map[ObjectiveData] = _prop_bnds_root_to_leaf_NamedExpression +_prop_bnds_root_to_leaf_map[ScalarObjective] = _prop_bnds_root_to_leaf_NamedExpression def _check_and_reset_bounds(var, lb, ub): diff --git a/pyomo/contrib/fme/fourier_motzkin_elimination.py b/pyomo/contrib/fme/fourier_motzkin_elimination.py index a1b5d744cf4..4636450c58e 100644 --- a/pyomo/contrib/fme/fourier_motzkin_elimination.py +++ b/pyomo/contrib/fme/fourier_motzkin_elimination.py @@ -23,7 +23,7 @@ value, ConstraintList, ) -from pyomo.core.base import TransformationFactory, _VarData +from pyomo.core.base import TransformationFactory, VarData from pyomo.core.plugins.transform.hierarchy import Transformation from pyomo.common.config import ConfigBlock, ConfigValue, NonNegativeFloat from pyomo.common.modeling import unique_component_name @@ -58,7 +58,7 @@ def _check_var_bounds_filter(constraint): def vars_to_eliminate_list(x): - if isinstance(x, (Var, _VarData)): + if isinstance(x, (Var, VarData)): if not x.is_indexed(): return ComponentSet([x]) ans = ComponentSet() diff --git a/pyomo/contrib/gdp_bounds/info.py b/pyomo/contrib/gdp_bounds/info.py index 6f39af5908d..e65df2bfab0 100644 --- a/pyomo/contrib/gdp_bounds/info.py +++ b/pyomo/contrib/gdp_bounds/info.py @@ -35,10 +35,10 @@ def disjunctive_bound(var, scope): """Compute the disjunctive bounds for a variable in a given scope. Args: - var (_VarData): Variable for which to compute bound + var (VarData): Variable for which to compute bound scope (Component): The scope in which to compute the bound. If not a - _DisjunctData, it will walk up the tree and use the scope of the - most immediate enclosing _DisjunctData. + DisjunctData, it will walk up the tree and use the scope of the + most immediate enclosing DisjunctData. Returns: numeric: the tighter of either the disjunctive lower bound, the diff --git a/pyomo/contrib/gdpopt/tests/test_LBB.py b/pyomo/contrib/gdpopt/tests/test_LBB.py index 273327b02a4..8a553398fa6 100644 --- a/pyomo/contrib/gdpopt/tests/test_LBB.py +++ b/pyomo/contrib/gdpopt/tests/test_LBB.py @@ -59,6 +59,7 @@ def test_infeasible_GDP(self): self.assertIsNone(m.d.disjuncts[0].indicator_var.value) self.assertIsNone(m.d.disjuncts[1].indicator_var.value) + @unittest.skipUnless(z3_available, "Z3 SAT solver is not available") def test_infeasible_GDP_check_sat(self): """Test for infeasible GDP with check_sat option True.""" m = ConcreteModel() diff --git a/pyomo/contrib/gdpopt/tests/test_gdpopt.py b/pyomo/contrib/gdpopt/tests/test_gdpopt.py index 005df56ced5..873bafabc76 100644 --- a/pyomo/contrib/gdpopt/tests/test_gdpopt.py +++ b/pyomo/contrib/gdpopt/tests/test_gdpopt.py @@ -22,7 +22,6 @@ from pyomo.common.collections import Bunch from pyomo.common.config import ConfigDict, ConfigValue from pyomo.common.fileutils import import_file, PYOMO_ROOT_DIR -from pyomo.contrib.appsi.solvers.gurobi import Gurobi from pyomo.contrib.gdpopt.create_oa_subproblems import ( add_util_block, add_disjunct_list, @@ -767,6 +766,9 @@ def test_time_limit(self): results.solver.termination_condition, TerminationCondition.maxTimeLimit ) + @unittest.skipUnless( + license_available, "No BARON license--8PP logical problem exceeds demo size" + ) def test_LOA_8PP_logical_default_init(self): """Test logic-based outer approximation with 8PP.""" exfile = import_file(join(exdir, 'eight_process', 'eight_proc_logical.py')) @@ -870,6 +872,9 @@ def test_LOA_8PP_maxBinary(self): ) ct.check_8PP_solution(self, eight_process, results) + @unittest.skipUnless( + license_available, "No BARON license--8PP logical problem exceeds demo size" + ) def test_LOA_8PP_logical_maxBinary(self): """Test logic-based OA with max_binary initialization.""" exfile = import_file(join(exdir, 'eight_process', 'eight_proc_logical.py')) @@ -1050,7 +1055,11 @@ def assert_correct_disjuncts_active( self.assertTrue(fabs(value(eight_process.profit.expr) - 68) <= 1e-2) - @unittest.skipUnless(Gurobi().available(), "APPSI Gurobi solver is not available") + @unittest.skipUnless( + SolverFactory('appsi_gurobi').available(exception_flag=False) + and SolverFactory('appsi_gurobi').license_is_valid(), + "Legacy APPSI Gurobi solver is not available", + ) def test_auto_persistent_solver(self): exfile = import_file(join(exdir, 'eight_process', 'eight_proc_model.py')) m = exfile.build_eight_process_flowsheet() @@ -1126,6 +1135,9 @@ def test_RIC_8PP_default_init(self): ) ct.check_8PP_solution(self, eight_process, results) + @unittest.skipUnless( + license_available, "No BARON license--8PP logical problem exceeds demo size" + ) def test_RIC_8PP_logical_default_init(self): """Test logic-based outer approximation with 8PP.""" exfile = import_file(join(exdir, 'eight_process', 'eight_proc_logical.py')) diff --git a/pyomo/contrib/gdpopt/util.py b/pyomo/contrib/gdpopt/util.py index 2cb70f0ea60..babe0245d57 100644 --- a/pyomo/contrib/gdpopt/util.py +++ b/pyomo/contrib/gdpopt/util.py @@ -553,6 +553,13 @@ def _add_bigm_constraint_to_transformed_model(m, constraint, block): # making a Reference to the ComponentData so that it will look like an # indexed component for now. If I redesign bigm at some point, then this # could be prettier. - bigm._transform_constraint(Reference(constraint), parent_disjunct, None, [], []) + bigm._transform_constraint( + Reference(constraint), + parent_disjunct, + None, + [], + [], + 1 - parent_disjunct.binary_indicator_var, + ) # Now get rid of it because this is a class attribute! del bigm._config diff --git a/pyomo/contrib/incidence_analysis/config.py b/pyomo/contrib/incidence_analysis/config.py index 128273b4dec..2a7734ba433 100644 --- a/pyomo/contrib/incidence_analysis/config.py +++ b/pyomo/contrib/incidence_analysis/config.py @@ -36,6 +36,13 @@ class IncidenceMethod(enum.Enum): """Use ``pyomo.repn.plugins.nl_writer.AMPLRepnVisitor``""" +class IncidenceOrder(enum.Enum): + + dulmage_mendelsohn_upper = 0 + + dulmage_mendelsohn_lower = 1 + + _include_fixed = ConfigValue( default=False, domain=bool, diff --git a/pyomo/contrib/incidence_analysis/interface.py b/pyomo/contrib/incidence_analysis/interface.py index 50cb84daaf5..b73ec17f36c 100644 --- a/pyomo/contrib/incidence_analysis/interface.py +++ b/pyomo/contrib/incidence_analysis/interface.py @@ -15,7 +15,7 @@ import enum import textwrap -from pyomo.core.base.block import _BlockData +from pyomo.core.base.block import BlockData from pyomo.core.base.var import Var from pyomo.core.base.constraint import Constraint from pyomo.core.base.objective import Objective @@ -28,7 +28,7 @@ scipy as sp, plotly, ) -from pyomo.common.deprecation import deprecated +from pyomo.common.deprecation import deprecated, deprecation_warning from pyomo.contrib.incidence_analysis.config import get_config_from_kwds from pyomo.contrib.incidence_analysis.matching import maximum_matching from pyomo.contrib.incidence_analysis.connected import get_independent_submatrices @@ -279,7 +279,7 @@ def __init__(self, model=None, active=True, include_inequality=True, **kwds): self._incidence_graph = None self._variables = None self._constraints = None - elif isinstance(model, _BlockData): + elif isinstance(model, BlockData): self._constraints = [ con for con in model.component_data_objects(Constraint, active=active) @@ -348,7 +348,7 @@ def __init__(self, model=None, active=True, include_inequality=True, **kwds): else: raise TypeError( "Unsupported type for incidence graph. Expected PyomoNLP" - " or _BlockData but got %s." % type(model) + " or BlockData but got %s." % type(model) ) @property @@ -453,11 +453,29 @@ def _validate_input(self, variables, constraints): raise ValueError("Neither variables nor a model have been provided.") else: variables = self.variables + elif self._incidence_graph is not None: + # If variables were provided and an incidence graph is cached, + # make sure the provided variables exist in the graph. + for var in variables: + if var not in self._var_index_map: + raise KeyError( + f"Variable {var} does not exist in the cached" + " incidence graph." + ) if constraints is None: if self._incidence_graph is None: raise ValueError("Neither constraints nor a model have been provided.") else: constraints = self.constraints + elif self._incidence_graph is not None: + # If constraints were provided and an incidence graph is cached, + # make sure the provided constraints exist in the graph. + for con in constraints: + if con not in self._con_index_map: + raise KeyError( + f"Constraint {con} does not exist in the cached" + " incidence graph." + ) _check_unindexed(variables + constraints) return variables, constraints @@ -854,7 +872,7 @@ def dulmage_mendelsohn(self, variables=None, constraints=None): # Hopefully this does not get too confusing... return var_partition, con_partition - def remove_nodes(self, nodes, constraints=None): + def remove_nodes(self, variables=None, constraints=None): """Removes the specified variables and constraints (columns and rows) from the cached incidence matrix. @@ -866,35 +884,76 @@ def remove_nodes(self, nodes, constraints=None): Parameters ---------- - nodes: list - VarData or ConData objects whose columns or rows will be - removed from the incidence matrix. + variables: list + VarData objects whose nodes will be removed from the incidence graph constraints: list - VarData or ConData objects whose columns or rows will be - removed from the incidence matrix. + ConData objects whose nodes will be removed from the incidence graph + + .. note:: + + **Deprecation in Pyomo v6.7.2.dev0** + + The pre-6.7.2.dev0 implementation of ``remove_nodes`` allowed variables and + constraints to remove to be specified in a single list. This made + error checking difficult, and indeed, if invalid components were + provided, we carried on silently instead of throwing an error or + warning. As part of a fix to raise an error if an invalid component + (one that is not part of the incidence graph) is provided, we now require + variables and constraints to be specified separately. """ if constraints is None: constraints = [] + if variables is None: + variables = [] if self._incidence_graph is None: raise RuntimeError( "Attempting to remove variables and constraints from cached " "incidence matrix,\nbut no incidence matrix has been cached." ) - to_exclude = ComponentSet(nodes) - to_exclude.update(constraints) - vars_to_include = [v for v in self.variables if v not in to_exclude] - cons_to_include = [c for c in self.constraints if c not in to_exclude] + + vars_to_validate = [] + cons_to_validate = [] + depr_msg = ( + "In IncidenceGraphInterface.remove_nodes, passing variables and" + " constraints in the same list is deprecated. Please separate your" + " variables and constraints and pass them in the order variables," + " constraints." + ) + if any(var in self._con_index_map for var in variables) or any( + con in self._var_index_map for con in constraints + ): + deprecation_warning(depr_msg, version="6.7.2.dev0") + # If we received variables/constraints in the same list, sort them. + # Any unrecognized objects will be caught by _validate_input. + for var in variables: + if var in self._con_index_map: + cons_to_validate.append(var) + else: + vars_to_validate.append(var) + for con in constraints: + if con in self._var_index_map: + vars_to_validate.append(con) + else: + cons_to_validate.append(con) + + variables, constraints = self._validate_input( + vars_to_validate, cons_to_validate + ) + v_exclude = ComponentSet(variables) + c_exclude = ComponentSet(constraints) + vars_to_include = [v for v in self.variables if v not in v_exclude] + cons_to_include = [c for c in self.constraints if c not in c_exclude] incidence_graph = self._extract_subgraph(vars_to_include, cons_to_include) # update attributes self._variables = vars_to_include self._constraints = cons_to_include self._incidence_graph = incidence_graph self._var_index_map = ComponentMap( - (var, i) for i, var in enumerate(self.variables) + (var, i) for i, var in enumerate(vars_to_include) ) self._con_index_map = ComponentMap( - (con, i) for i, con in enumerate(self._constraints) + (con, i) for i, con in enumerate(cons_to_include) ) def plot(self, variables=None, constraints=None, title=None, show=True): diff --git a/pyomo/contrib/incidence_analysis/scc_solver.py b/pyomo/contrib/incidence_analysis/scc_solver.py index 0c59fe8703e..378647c190c 100644 --- a/pyomo/contrib/incidence_analysis/scc_solver.py +++ b/pyomo/contrib/incidence_analysis/scc_solver.py @@ -27,7 +27,7 @@ def generate_strongly_connected_components( constraints, variables=None, include_fixed=False, igraph=None ): - """Yield in order ``_BlockData`` that each contain the variables and + """Yield in order ``BlockData`` that each contain the variables and constraints of a single diagonal block in a block lower triangularization of the incidence matrix of constraints and variables @@ -51,7 +51,7 @@ def generate_strongly_connected_components( Yields ------ - Tuple of ``_BlockData``, list-of-variables + Tuple of ``BlockData``, list-of-variables Blocks containing the variables and constraints of every strongly connected component, in a topological order. The variables are the "input variables" for that block. diff --git a/pyomo/contrib/incidence_analysis/tests/test_interface.py b/pyomo/contrib/incidence_analysis/tests/test_interface.py index 4b77d60d8ba..9957e78168b 100644 --- a/pyomo/contrib/incidence_analysis/tests/test_interface.py +++ b/pyomo/contrib/incidence_analysis/tests/test_interface.py @@ -634,17 +634,15 @@ def test_exception(self): nlp = PyomoNLP(model) igraph = IncidenceGraphInterface(nlp) - with self.assertRaises(RuntimeError) as exc: + with self.assertRaisesRegex(KeyError, "does not exist"): variables = [model.P] constraints = [model.ideal_gas] igraph.maximum_matching(variables, constraints) - self.assertIn("must be unindexed", str(exc.exception)) - with self.assertRaises(RuntimeError) as exc: + with self.assertRaisesRegex(KeyError, "does not exist"): variables = [model.P] constraints = [model.ideal_gas] igraph.block_triangularize(variables, constraints) - self.assertIn("must be unindexed", str(exc.exception)) @unittest.skipUnless(networkx_available, "networkx is not available.") @@ -885,17 +883,15 @@ def test_exception(self): model = make_gas_expansion_model() igraph = IncidenceGraphInterface(model) - with self.assertRaises(RuntimeError) as exc: + with self.assertRaisesRegex(KeyError, "does not exist"): variables = [model.P] constraints = [model.ideal_gas] igraph.maximum_matching(variables, constraints) - self.assertIn("must be unindexed", str(exc.exception)) - with self.assertRaises(RuntimeError) as exc: + with self.assertRaisesRegex(KeyError, "does not exist"): variables = [model.P] constraints = [model.ideal_gas] igraph.block_triangularize(variables, constraints) - self.assertIn("must be unindexed", str(exc.exception)) @unittest.skipUnless(scipy_available, "scipy is not available.") def test_remove(self): @@ -923,7 +919,7 @@ def test_remove(self): # Say we know that these variables and constraints should # be matched... vars_to_remove = [model.F[0], model.F[2]] - cons_to_remove = (model.mbal[1], model.mbal[2]) + cons_to_remove = [model.mbal[1], model.mbal[2]] igraph.remove_nodes(vars_to_remove, cons_to_remove) variable_set = ComponentSet(igraph.variables) self.assertNotIn(model.F[0], variable_set) @@ -1309,7 +1305,7 @@ def test_remove(self): # matrix. vars_to_remove = [m.flow_comp[1]] cons_to_remove = [m.flow_eqn[1]] - igraph.remove_nodes(vars_to_remove + cons_to_remove) + igraph.remove_nodes(vars_to_remove, cons_to_remove) var_dmp, con_dmp = igraph.dulmage_mendelsohn() var_con_set = ComponentSet(igraph.variables + igraph.constraints) underconstrained_set = ComponentSet( @@ -1460,6 +1456,42 @@ def test_remove_no_matrix(self): with self.assertRaisesRegex(RuntimeError, "no incidence matrix"): igraph.remove_nodes([m.v1]) + def test_remove_bad_node(self): + m = pyo.ConcreteModel() + m.x = pyo.Var([1, 2, 3]) + m.eq = pyo.Constraint(pyo.PositiveIntegers) + m.eq[1] = m.x[1] * m.x[2] == m.x[3] + m.eq[2] = m.x[1] + 2 * m.x[2] == 3 * m.x[3] + igraph = IncidenceGraphInterface(m) + with self.assertRaisesRegex(KeyError, "does not exist"): + # Suppose we think something like this should work. We should get + # an error, and not silently do nothing. + igraph.remove_nodes([m.x], [m.eq[1]]) + + with self.assertRaisesRegex(KeyError, "does not exist"): + igraph.remove_nodes(None, [m.eq]) + + with self.assertRaisesRegex(KeyError, "does not exist"): + igraph.remove_nodes([[m.x[1], m.x[2]], [m.eq[1]]]) + + def test_remove_varcon_samelist_deprecated(self): + m = pyo.ConcreteModel() + m.x = pyo.Var([1, 2, 3]) + m.eq = pyo.Constraint(pyo.PositiveIntegers) + m.eq[1] = m.x[1] * m.x[2] == m.x[3] + m.eq[2] = m.x[1] + 2 * m.x[2] == 3 * m.x[3] + + igraph = IncidenceGraphInterface(m) + # This raises a deprecation warning. When the deprecated functionality + # is removed, this will fail, and this test should be updated accordingly. + igraph.remove_nodes([m.eq[1], m.x[1]]) + self.assertEqual(len(igraph.variables), 2) + self.assertEqual(len(igraph.constraints), 1) + + igraph.remove_nodes([], [m.eq[2], m.x[2]]) + self.assertEqual(len(igraph.variables), 1) + self.assertEqual(len(igraph.constraints), 0) + @unittest.skipUnless(networkx_available, "networkx is not available.") @unittest.skipUnless(scipy_available, "scipy is not available.") @@ -1840,7 +1872,7 @@ def test_var_elim(self): for adj_con in igraph.get_adjacent_to(m.x[1]): for adj_var in igraph.get_adjacent_to(m.eq4): igraph.add_edge(adj_var, adj_con) - igraph.remove_nodes([m.x[1], m.eq4]) + igraph.remove_nodes([m.x[1]], [m.eq4]) assert ComponentSet(igraph.variables) == ComponentSet([m.x[2], m.x[3], m.x[4]]) assert ComponentSet(igraph.constraints) == ComponentSet([m.eq1, m.eq2, m.eq3]) @@ -1888,7 +1920,7 @@ def test_block_data_obj(self): self.assertEqual(len(var_dmp.unmatched), 1) self.assertEqual(len(con_dmp.unmatched), 1) - msg = "Unsupported type.*_BlockData" + msg = "Unsupported type.*BlockData" with self.assertRaisesRegex(TypeError, msg): igraph = IncidenceGraphInterface(m.block) diff --git a/pyomo/contrib/incidence_analysis/tests/test_visualize.py b/pyomo/contrib/incidence_analysis/tests/test_visualize.py new file mode 100644 index 00000000000..7c5538b671f --- /dev/null +++ b/pyomo/contrib/incidence_analysis/tests/test_visualize.py @@ -0,0 +1,47 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +import pyomo.common.unittest as unittest +from pyomo.common.dependencies import ( + matplotlib, + matplotlib_available, + scipy_available, + networkx_available, +) +from pyomo.contrib.incidence_analysis.visualize import spy_dulmage_mendelsohn +from pyomo.contrib.incidence_analysis.tests.models_for_testing import ( + make_gas_expansion_model, + make_dynamic_model, + make_degenerate_solid_phase_model, +) + + +@unittest.skipUnless(matplotlib_available, "Matplotlib is not available") +@unittest.skipUnless(scipy_available, "SciPy is not available") +@unittest.skipUnless(networkx_available, "NetworkX is not available") +class TestSpy(unittest.TestCase): + def test_spy_dulmage_mendelsohn(self): + models = [ + make_gas_expansion_model(), + make_dynamic_model(), + make_degenerate_solid_phase_model(), + ] + for m in models: + fig, ax = spy_dulmage_mendelsohn(m) + # Note that this is a weak test. We just test that we can call the + # plot method, it doesn't raise an error, and gives us back the + # types we expect. We don't attempt to validate the resulting plot. + self.assertTrue(isinstance(fig, matplotlib.pyplot.Figure)) + self.assertTrue(isinstance(ax, matplotlib.pyplot.Axes)) + + +if __name__ == "__main__": + unittest.main() diff --git a/pyomo/contrib/incidence_analysis/visualize.py b/pyomo/contrib/incidence_analysis/visualize.py new file mode 100644 index 00000000000..af1bdbbb918 --- /dev/null +++ b/pyomo/contrib/incidence_analysis/visualize.py @@ -0,0 +1,219 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ +"""Module for visualizing results of incidence graph or matrix analysis + +""" +from pyomo.contrib.incidence_analysis.config import IncidenceOrder +from pyomo.contrib.incidence_analysis.interface import ( + IncidenceGraphInterface, + get_structural_incidence_matrix, +) +from pyomo.common.dependencies import matplotlib + + +def _partition_variables_and_constraints( + model, order=IncidenceOrder.dulmage_mendelsohn_upper, **kwds +): + """Partition variables and constraints in an incidence graph""" + igraph = IncidenceGraphInterface(model, **kwds) + vdmp, cdmp = igraph.dulmage_mendelsohn() + + ucv = vdmp.unmatched + vdmp.underconstrained + ucc = cdmp.underconstrained + + ocv = vdmp.overconstrained + occ = cdmp.overconstrained + cdmp.unmatched + + ucvblocks, uccblocks = igraph.get_connected_components( + variables=ucv, constraints=ucc + ) + ocvblocks, occblocks = igraph.get_connected_components( + variables=ocv, constraints=occ + ) + wcvblocks, wccblocks = igraph.block_triangularize( + variables=vdmp.square, constraints=cdmp.square + ) + # By default, we block-*lower* triangularize. By default, however, we want + # the Dulmage-Mendelsohn decomposition to be block-*upper* triangular. + wcvblocks.reverse() + wccblocks.reverse() + vpartition = [ucvblocks, wcvblocks, ocvblocks] + cpartition = [uccblocks, wccblocks, occblocks] + + if order == IncidenceOrder.dulmage_mendelsohn_lower: + # If a block-lower triangular matrix was requested, we need to reverse + # both the inner and outer partitions + vpartition.reverse() + cpartition.reverse() + for vb in vpartition: + vb.reverse() + for cb in cpartition: + cb.reverse() + + return vpartition, cpartition + + +def _get_rectangle_around_coords(ij1, ij2, linewidth=2, linestyle="-"): + i1, j1 = ij1 + i2, j2 = ij2 + buffer = 0.5 + ll_corner = (min(i1, i2) - buffer, min(j1, j2) - buffer) + width = abs(i1 - i2) + 2 * buffer + height = abs(j1 - j2) + 2 * buffer + rect = matplotlib.patches.Rectangle( + ll_corner, + width, + height, + clip_on=False, + fill=False, + edgecolor="orange", + linewidth=linewidth, + linestyle=linestyle, + ) + return rect + + +def spy_dulmage_mendelsohn( + model, + *, + incidence_kwds=None, + order=IncidenceOrder.dulmage_mendelsohn_upper, + highlight_coarse=True, + highlight_fine=True, + skip_wellconstrained=False, + ax=None, + linewidth=2, + spy_kwds=None, +): + """Plot sparsity structure in Dulmage-Mendelsohn order on Matplotlib axes + + This is a wrapper around the Matplotlib ``Axes.spy`` method for plotting + an incidence matrix in Dulmage-Mendelsohn order, with coarse and/or fine + partitions highlighted. The coarse partition refers to the under-constrained, + over-constrained, and well-constrained subsystems, while the fine partition + refers to block diagonal or block triangular partitions of the former + subsystems. + + Parameters + ---------- + + model: ``ConcreteModel`` + Input model to plot sparsity structure of + + incidence_kwds: dict, optional + Config options for ``IncidenceGraphInterface`` + + order: ``IncidenceOrder``, optional + Order in which to plot sparsity structure. Default is + ``IncidenceOrder.dulmage_mendelsohn_upper`` for a block-upper triangular + matrix. Set to ``IncidenceOrder.dulmage_mendelsohn_lower`` for a + block-lower triangular matrix. + + highlight_coarse: bool, optional + Whether to draw a rectangle around the coarse partition. Default True + + highlight_fine: bool, optional + Whether to draw a rectangle around the fine partition. Default True + + skip_wellconstrained: bool, optional + Whether to skip highlighting the well-constrained subsystem of the + coarse partition. Default False + + ax: ``matplotlib.pyplot.Axes``, optional + Axes object on which to plot. If not provided, new figure + and axes are created. + + linewidth: int, optional + Line width of for rectangle used to highlight. Default 2 + + spy_kwds: dict, optional + Keyword arguments for ``Axes.spy`` + + Returns + ------- + + fig: ``matplotlib.pyplot.Figure`` or ``None`` + Figure on which the sparsity structure is plotted. ``None`` if axes + are provided + + ax: ``matplotlib.pyplot.Axes`` + Axes on which the sparsity structure is plotted + + """ + plt = matplotlib.pyplot + if incidence_kwds is None: + incidence_kwds = {} + if spy_kwds is None: + spy_kwds = {} + + vpart, cpart = _partition_variables_and_constraints(model, order=order) + vpart_fine = sum(vpart, []) + cpart_fine = sum(cpart, []) + vorder = sum(vpart_fine, []) + corder = sum(cpart_fine, []) + + imat = get_structural_incidence_matrix(vorder, corder) + nvar = len(vorder) + ncon = len(corder) + + if ax is None: + fig, ax = plt.subplots() + else: + fig = None + + markersize = spy_kwds.pop("markersize", None) + if markersize is None: + # At 10000 vars/cons, we want markersize=0.2 + # At 20 vars/cons, we want markersize=10 + # We assume we want a linear relationship between 1/nvar + # and the markersize. + markersize = (10.0 - 0.2) / (1 / 20 - 1 / 10000) * ( + 1 / max(nvar, ncon) - 1 / 10000 + ) + 0.2 + + ax.spy(imat, markersize=markersize, **spy_kwds) + ax.tick_params(length=0) + if highlight_coarse: + start = (0, 0) + for i, (vblocks, cblocks) in enumerate(zip(vpart, cpart)): + # Get the total number of variables/constraints in this part + # of the coarse partition + nv = sum(len(vb) for vb in vblocks) + nc = sum(len(cb) for cb in cblocks) + stop = (start[0] + nv - 1, start[1] + nc - 1) + if not (i == 1 and skip_wellconstrained) and nv > 0 and nc > 0: + # Regardless of whether we are plotting in upper or lower + # triangular order, the well-constrained subsystem is at + # position 1 + # + # The get-rectangle function doesn't look good if we give it + # an "empty region" to box. + ax.add_patch( + _get_rectangle_around_coords(start, stop, linewidth=linewidth) + ) + start = (stop[0] + 1, stop[1] + 1) + + if highlight_fine: + # Use dashed lines to distinguish inner from outer partitions + # if we are highlighting both + linestyle = "--" if highlight_coarse else "-" + start = (0, 0) + for vb, cb in zip(vpart_fine, cpart_fine): + stop = (start[0] + len(vb) - 1, start[1] + len(cb) - 1) + # Note that the subset's we're boxing here can't be empty. + ax.add_patch( + _get_rectangle_around_coords( + start, stop, linestyle=linestyle, linewidth=linewidth + ) + ) + start = (stop[0] + 1, stop[1] + 1) + + return fig, ax diff --git a/pyomo/contrib/latex_printer/__init__.py b/pyomo/contrib/latex_printer/__init__.py index c434b53dfe1..02eaa636a36 100644 --- a/pyomo/contrib/latex_printer/__init__.py +++ b/pyomo/contrib/latex_printer/__init__.py @@ -9,22 +9,11 @@ # This software is distributed under the 3-clause BSD License. # ___________________________________________________________________________ -# ___________________________________________________________________________ -# -# Pyomo: Python Optimization Modeling Objects -# Copyright (c) 2008-2023 -# National Technology and Engineering Solutions of Sandia, LLC -# Under the terms of Contract DE-NA0003525 with National Technology and -# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain -# rights in this software. -# This software is distributed under the 3-clause BSD License. -# ___________________________________________________________________________ - # Recommended just to build all of the appropriate things import pyomo.environ # Remove one layer of .latex_printer -# import statemnt is now: +# import statement is now: # from pyomo.contrib.latex_printer import latex_printer try: from pyomo.contrib.latex_printer.latex_printer import latex_printer diff --git a/pyomo/contrib/latex_printer/latex_printer.py b/pyomo/contrib/latex_printer/latex_printer.py index 0a595dd8e1b..cf286472a66 100644 --- a/pyomo/contrib/latex_printer/latex_printer.py +++ b/pyomo/contrib/latex_printer/latex_printer.py @@ -34,8 +34,8 @@ from pyomo.core.expr.visitor import identify_components from pyomo.core.expr.base import ExpressionBase -from pyomo.core.base.expression import ScalarExpression, _GeneralExpressionData -from pyomo.core.base.objective import ScalarObjective, _GeneralObjectiveData +from pyomo.core.base.expression import ScalarExpression, ExpressionData +from pyomo.core.base.objective import ScalarObjective, ObjectiveData import pyomo.core.kernel as kernel from pyomo.core.expr.template_expr import ( GetItemExpression, @@ -47,9 +47,9 @@ resolve_template, templatize_rule, ) -from pyomo.core.base.var import ScalarVar, _GeneralVarData, IndexedVar -from pyomo.core.base.param import _ParamData, ScalarParam, IndexedParam -from pyomo.core.base.set import _SetData, SetOperator +from pyomo.core.base.var import ScalarVar, VarData, IndexedVar +from pyomo.core.base.param import ParamData, ScalarParam, IndexedParam +from pyomo.core.base.set import SetData, SetOperator from pyomo.core.base.constraint import ScalarConstraint, IndexedConstraint from pyomo.common.collections.component_map import ComponentMap from pyomo.common.collections.component_set import ComponentSet @@ -64,7 +64,7 @@ from pyomo.core.base.external import _PythonCallbackFunctionID from pyomo.core.base.enums import SortComponents -from pyomo.core.base.block import _BlockData +from pyomo.core.base.block import BlockData from pyomo.repn.util import ExprType @@ -399,12 +399,12 @@ def __init__(self): EqualityExpression: handle_equality_node, InequalityExpression: handle_inequality_node, RangedExpression: handle_ranged_inequality_node, - _GeneralExpressionData: handle_named_expression_node, + ExpressionData: handle_named_expression_node, ScalarExpression: handle_named_expression_node, kernel.expression.expression: handle_named_expression_node, kernel.expression.noclone: handle_named_expression_node, - _GeneralObjectiveData: handle_named_expression_node, - _GeneralVarData: handle_var_node, + ObjectiveData: handle_named_expression_node, + VarData: handle_var_node, ScalarObjective: handle_named_expression_node, kernel.objective.objective: handle_named_expression_node, ExternalFunctionExpression: handle_external_function_node, @@ -417,7 +417,7 @@ def __init__(self): Numeric_GetItemExpression: handle_numericGetItemExpression_node, TemplateSumExpression: handle_templateSumExpression_node, ScalarParam: handle_param_node, - _ParamData: handle_param_node, + ParamData: handle_param_node, IndexedParam: handle_param_node, NPV_Numeric_GetItemExpression: handle_numericGetItemExpression_node, IndexedBlock: handle_indexedBlock_node, @@ -587,7 +587,7 @@ def latex_printer( Parameters ---------- - pyomo_component: _BlockData or Model or Objective or Constraint or Expression + pyomo_component: BlockData or Model or Objective or Constraint or Expression The Pyomo component to be printed latex_component_map: pyomo.common.collections.component_map.ComponentMap @@ -674,7 +674,7 @@ def latex_printer( use_equation_environment = True isSingle = True - elif isinstance(pyomo_component, _BlockData): + elif isinstance(pyomo_component, BlockData): objectives = [ obj for obj in pyomo_component.component_data_objects( @@ -705,10 +705,8 @@ def latex_printer( if isSingle: temp_comp, temp_indexes = templatize_fcn(pyomo_component) variableList = [] - for v in identify_components( - temp_comp, [ScalarVar, _GeneralVarData, IndexedVar] - ): - if isinstance(v, _GeneralVarData): + for v in identify_components(temp_comp, [ScalarVar, VarData, IndexedVar]): + if isinstance(v, VarData): v_write = v.parent_component() if v_write not in ComponentSet(variableList): variableList.append(v_write) @@ -717,10 +715,8 @@ def latex_printer( variableList.append(v) parameterList = [] - for p in identify_components( - temp_comp, [ScalarParam, _ParamData, IndexedParam] - ): - if isinstance(p, _ParamData): + for p in identify_components(temp_comp, [ScalarParam, ParamData, IndexedParam]): + if isinstance(p, ParamData): p_write = p.parent_component() if p_write not in ComponentSet(parameterList): parameterList.append(p_write) @@ -1275,17 +1271,17 @@ def get_index_names(st, lcm): rep_dict = {} for ky in reversed(list(latex_component_map)): - if isinstance(ky, (pyo.Var, _GeneralVarData)): + if isinstance(ky, (pyo.Var, VarData)): overwrite_value = latex_component_map[ky] if ky not in existing_components: overwrite_value = overwrite_value.replace('_', '\\_') rep_dict[variableMap[ky]] = overwrite_value - elif isinstance(ky, (pyo.Param, _ParamData)): + elif isinstance(ky, (pyo.Param, ParamData)): overwrite_value = latex_component_map[ky] if ky not in existing_components: overwrite_value = overwrite_value.replace('_', '\\_') rep_dict[parameterMap[ky]] = overwrite_value - elif isinstance(ky, _SetData): + elif isinstance(ky, SetData): # already handled pass elif isinstance(ky, (float, int)): diff --git a/pyomo/contrib/mcpp/pyomo_mcpp.py b/pyomo/contrib/mcpp/pyomo_mcpp.py index 35e883f98da..0ef0237681b 100644 --- a/pyomo/contrib/mcpp/pyomo_mcpp.py +++ b/pyomo/contrib/mcpp/pyomo_mcpp.py @@ -20,7 +20,7 @@ from pyomo.common.fileutils import Library from pyomo.core import value, Expression from pyomo.core.base.block import SubclassOf -from pyomo.core.base.expression import _ExpressionData +from pyomo.core.base.expression import NamedExpressionData from pyomo.core.expr.numvalue import nonpyomo_leaf_types from pyomo.core.expr.numeric_expr import ( AbsExpression, @@ -307,7 +307,9 @@ def exitNode(self, node, data): ans = self.mcpp.newConstant(node) elif not node.is_expression_type(): ans = self.register_num(node) - elif type(node) in SubclassOf(Expression) or isinstance(node, _ExpressionData): + elif type(node) in SubclassOf(Expression) or isinstance( + node, NamedExpressionData + ): ans = data[0] else: raise RuntimeError("Unhandled expression type: %s" % (type(node))) diff --git a/pyomo/contrib/mindtpy/algorithm_base_class.py b/pyomo/contrib/mindtpy/algorithm_base_class.py index 785a89d8982..8c703f8d842 100644 --- a/pyomo/contrib/mindtpy/algorithm_base_class.py +++ b/pyomo/contrib/mindtpy/algorithm_base_class.py @@ -27,13 +27,7 @@ from operator import itemgetter from pyomo.common.errors import DeveloperError from pyomo.solvers.plugins.solvers.gurobi_direct import gurobipy -from pyomo.opt import ( - SolverFactory, - SolverResults, - ProblemSense, - SolutionStatus, - SolverStatus, -) +from pyomo.opt import SolverFactory, SolverResults, SolutionStatus, SolverStatus from pyomo.core import ( minimize, maximize, @@ -633,9 +627,7 @@ def process_objective(self, update_var_con_list=True): raise ValueError('Model has multiple active objectives.') else: main_obj = active_objectives[0] - self.results.problem.sense = ( - ProblemSense.minimize if main_obj.sense == 1 else ProblemSense.maximize - ) + self.results.problem.sense = main_obj.sense self.objective_sense = main_obj.sense # Move the objective to the constraints if it is nonlinear or move_objective is True. diff --git a/pyomo/contrib/mindtpy/util.py b/pyomo/contrib/mindtpy/util.py index 1543497838f..7345af8a3e2 100644 --- a/pyomo/contrib/mindtpy/util.py +++ b/pyomo/contrib/mindtpy/util.py @@ -29,7 +29,6 @@ from pyomo.contrib.mcpp.pyomo_mcpp import mcpp_available, McCormick from pyomo.contrib.fbbt.fbbt import compute_bounds_on_expr import pyomo.core.expr as EXPR -from pyomo.opt import ProblemSense from pyomo.contrib.gdpopt.util import get_main_elapsed_time, time_code from pyomo.util.model_size import build_model_size_report from pyomo.common.dependencies import attempt_import diff --git a/pyomo/contrib/parmest/utils/scenario_tree.py b/pyomo/contrib/parmest/utils/scenario_tree.py index e71f51877b5..f245e053cad 100644 --- a/pyomo/contrib/parmest/utils/scenario_tree.py +++ b/pyomo/contrib/parmest/utils/scenario_tree.py @@ -25,7 +25,7 @@ def build_vardatalist(self, model, varlist=None): """ - Convert a list of pyomo variables to a list of ScalarVar and _GeneralVarData. If varlist is none, builds a + Convert a list of pyomo variables to a list of ScalarVar and VarData. If varlist is none, builds a list of all variables in the model. The new list is stored in the vars_to_tighten attribute. By CD Laird Parameters diff --git a/pyomo/contrib/piecewise/piecewise_linear_function.py b/pyomo/contrib/piecewise/piecewise_linear_function.py index 66ca02ad125..e92edacc756 100644 --- a/pyomo/contrib/piecewise/piecewise_linear_function.py +++ b/pyomo/contrib/piecewise/piecewise_linear_function.py @@ -20,7 +20,7 @@ PiecewiseLinearExpression, ) from pyomo.core import Any, NonNegativeIntegers, value, Var -from pyomo.core.base.block import _BlockData, Block +from pyomo.core.base.block import BlockData, Block from pyomo.core.base.component import ModelComponentFactory from pyomo.core.base.expression import Expression from pyomo.core.base.global_set import UnindexedComponent_index @@ -36,11 +36,11 @@ logger = logging.getLogger(__name__) -class PiecewiseLinearFunctionData(_BlockData): +class PiecewiseLinearFunctionData(BlockData): _Block_reserved_words = Any def __init__(self, component=None): - _BlockData.__init__(self, component) + BlockData.__init__(self, component) with self._declare_reserved_components(): self._expressions = Expression(NonNegativeIntegers) diff --git a/pyomo/contrib/piecewise/transform/piecewise_to_gdp_transformation.py b/pyomo/contrib/piecewise/transform/piecewise_to_gdp_transformation.py index 2e056c47a15..5417cbc17f4 100644 --- a/pyomo/contrib/piecewise/transform/piecewise_to_gdp_transformation.py +++ b/pyomo/contrib/piecewise/transform/piecewise_to_gdp_transformation.py @@ -33,7 +33,7 @@ Any, ) from pyomo.core.base import Transformation -from pyomo.core.base.block import _BlockData, Block +from pyomo.core.base.block import Block from pyomo.core.util import target_list from pyomo.gdp import Disjunct, Disjunction from pyomo.gdp.util import is_child_of @@ -147,7 +147,7 @@ def _apply_to_impl(self, instance, **kwds): self._transform_piecewise_linear_function( t, config.descend_into_expressions ) - elif t.ctype is Block or isinstance(t, _BlockData): + elif issubclass(t.ctype, Block): self._transform_block(t, config.descend_into_expressions) elif t.ctype is Constraint: if not config.descend_into_expressions: diff --git a/pyomo/contrib/preprocessing/plugins/var_aggregator.py b/pyomo/contrib/preprocessing/plugins/var_aggregator.py index d862f167fd7..3430d29de3a 100644 --- a/pyomo/contrib/preprocessing/plugins/var_aggregator.py +++ b/pyomo/contrib/preprocessing/plugins/var_aggregator.py @@ -13,7 +13,14 @@ from pyomo.common.collections import ComponentMap, ComponentSet -from pyomo.core.base import Block, Constraint, VarList, Objective, TransformationFactory +from pyomo.core.base import ( + Block, + Constraint, + VarList, + Objective, + Reals, + TransformationFactory, +) from pyomo.core.expr import ExpressionReplacementVisitor from pyomo.core.expr.numvalue import value from pyomo.core.plugins.transform.hierarchy import IsomorphicTransformation @@ -248,6 +255,12 @@ def _apply_to(self, model, detect_fixed_vars=True): # the variables in its equality set. z_agg.setlb(max_if_not_None(v.lb for v in eq_set if v.has_lb())) z_agg.setub(min_if_not_None(v.ub for v in eq_set if v.has_ub())) + # Set the domain of the aggregate variable to the intersection of + # the domains of the variables in its equality set + domain = Reals + for v in eq_set: + domain = domain & v.domain + z_agg.domain = domain # Set the fixed status of the aggregate var fixed_vars = [v for v in eq_set if v.fixed] diff --git a/pyomo/contrib/preprocessing/tests/test_var_aggregator.py b/pyomo/contrib/preprocessing/tests/test_var_aggregator.py index 6f6d02f2180..b0b672b76b0 100644 --- a/pyomo/contrib/preprocessing/tests/test_var_aggregator.py +++ b/pyomo/contrib/preprocessing/tests/test_var_aggregator.py @@ -19,12 +19,16 @@ max_if_not_None, min_if_not_None, ) +from pyomo.core.expr.compare import assertExpressionsEqual from pyomo.environ import ( + Binary, ConcreteModel, Constraint, ConstraintList, + maximize, Objective, RangeSet, + Reals, SolverFactory, TransformationFactory, Var, @@ -210,6 +214,36 @@ def test_var_update(self): self.assertEqual(m.x.value, 0) self.assertEqual(m.y.value, 0) + def test_binary_inequality(self): + m = ConcreteModel() + m.x = Var(domain=Binary) + m.y = Var(domain=Binary) + m.c = Constraint(expr=m.x == m.y) + m.o = Objective(expr=0.5 * m.x + m.y, sense=maximize) + TransformationFactory('contrib.aggregate_vars').apply_to(m) + var_to_z = m._var_aggregator_info.var_to_z + z = var_to_z[m.x] + self.assertIs(var_to_z[m.y], z) + self.assertEqual(z.domain, Binary) + self.assertEqual(z.lb, 0) + self.assertEqual(z.ub, 1) + assertExpressionsEqual(self, m.o.expr, 0.5 * z + z) + + def test_equality_different_domains(self): + m = ConcreteModel() + m.x = Var(domain=Reals, bounds=(1, 2)) + m.y = Var(domain=Binary) + m.c = Constraint(expr=m.x == m.y) + m.o = Objective(expr=0.5 * m.x + m.y, sense=maximize) + TransformationFactory('contrib.aggregate_vars').apply_to(m) + var_to_z = m._var_aggregator_info.var_to_z + z = var_to_z[m.x] + self.assertIs(var_to_z[m.y], z) + self.assertEqual(z.lb, 1) + self.assertEqual(z.ub, 1) + self.assertEqual(z.domain, Binary) + assertExpressionsEqual(self, m.o.expr, 0.5 * z + z) + if __name__ == '__main__': unittest.main() diff --git a/pyomo/contrib/pynumero/README.md b/pyomo/contrib/pynumero/README.md index 0d165dbc39c..f881e400d51 100644 --- a/pyomo/contrib/pynumero/README.md +++ b/pyomo/contrib/pynumero/README.md @@ -71,3 +71,75 @@ Prerequisites - cmake - a C/C++ compiler - MA57 library or COIN-HSL Full + +Code organization +================= + +PyNumero was initially designed around three core components: linear solver +interfaces, an interface for function and derivative callbacks, and block +vector and matrix classes. Since then, it has incorporated additional +functionality in an ad-hoc manner. The original "core functionality" of +PyNumero, as well as the solver interfaces accessible through +`SolverFactory`, should be considered stable and will only change after +appropriate deprecation warnings. Other functionality should be considered +experimental and subject to change without warning. + +The following is a rough overview of PyNumero, by directory: + +`linalg` +-------- + +Python interfaces to linear solvers. This is core functionality. + +`interfaces` +------------ + +- Classes that define and implement an API for function and derivative callbacks +required by nonlinear optimization solvers, e.g. `nlp.py` and `pyomo_nlp.py` +- Various wrappers around these NLP classes to support "hybrid" implementations, +e.g. `PyomoNLPWithGreyBoxBlocks` +- The `ExternalGreyBoxBlock` Pyomo modeling component and +`ExternalGreyBoxModel` API +- The `ExternalPyomoModel` implementation of `ExternalGreyBoxModel`, which allows +definition of an external grey box via an implicit function +- The `CyIpoptNLP` class, which wraps an object implementing the NLP API in +the interface required by CyIpopt + +Of the above, only `PyomoNLP` and the `NLP` base class should be considered core +functionality. + +`src` +----- + +C++ interfaces to ASL, MA27, and MA57. The ASL and MA27 interfaces are +core functionality. + +`sparse` +-------- + +Block vector and block matrix classes, including MPI variations. +These are core functionality. + +`algorithms` +------------ + +Originally intended to hold various useful algorithms implemented +on NLP objects rather than Pyomo models. Any files added here should +be considered experimental. + +`algorithms/solvers` +-------------------- + +Interfaces to Python solvers using the NLP API defined in `interfaces`. +Only the solvers accessible through `SolverFactory`, e.g. `PyomoCyIpoptSolver` +and `PyomoFsolveSolver`, should be considered core functionality. +The supported way to access these solvers is via `SolverFactory`. *The locations +of the underlying solver objects are subject to change without warning.* + +`examples` +---------- + +The examples demonstrated in `nlp_interface.py`, `nlp_interface_2.py1`, +`feasibility.py`, `mumps_example.py`, `sensitivity.py`, `sqp.py`, +`parallel_matvec.py`, and `parallel_vector_ops.py` are stable. All other +examples should be considered experimental. diff --git a/pyomo/contrib/pynumero/algorithms/solvers/cyipopt_solver.py b/pyomo/contrib/pynumero/algorithms/solvers/cyipopt_solver.py index cdea542295b..0999550711c 100644 --- a/pyomo/contrib/pynumero/algorithms/solvers/cyipopt_solver.py +++ b/pyomo/contrib/pynumero/algorithms/solvers/cyipopt_solver.py @@ -65,7 +65,7 @@ from pyomo.common.config import ConfigBlock, ConfigValue from pyomo.common.timing import TicTocTimer from pyomo.core.base import Block, Objective, minimize -from pyomo.opt import SolverStatus, SolverResults, TerminationCondition, ProblemSense +from pyomo.opt import SolverStatus, SolverResults, TerminationCondition from pyomo.opt.results.solution import Solution logger = logging.getLogger(__name__) @@ -319,7 +319,13 @@ def license_is_valid(self): return True def version(self): - return tuple(int(_) for _ in cyipopt.__version__.split(".")) + def _int(x): + try: + return int(x) + except: + return x + + return tuple(_int(_) for _ in cyipopt_interface.cyipopt.__version__.split(".")) def solve(self, model, **kwds): config = self.config(kwds, preserve_implicit=True) @@ -441,11 +447,10 @@ def solve(self, model, **kwds): results.problem.name = model.name obj = next(model.component_data_objects(Objective, active=True)) + results.problem.sense = obj.sense if obj.sense == minimize: - results.problem.sense = ProblemSense.minimize results.problem.upper_bound = info["obj_val"] else: - results.problem.sense = ProblemSense.maximize results.problem.lower_bound = info["obj_val"] results.problem.number_of_objectives = 1 results.problem.number_of_constraints = ng diff --git a/pyomo/contrib/pynumero/algorithms/solvers/pyomo_ext_cyipopt.py b/pyomo/contrib/pynumero/algorithms/solvers/pyomo_ext_cyipopt.py index 16c5a19a5c6..7f43f6ac7c0 100644 --- a/pyomo/contrib/pynumero/algorithms/solvers/pyomo_ext_cyipopt.py +++ b/pyomo/contrib/pynumero/algorithms/solvers/pyomo_ext_cyipopt.py @@ -16,7 +16,7 @@ from pyomo.contrib.pynumero.interfaces.pyomo_nlp import PyomoNLP from pyomo.contrib.pynumero.sparse.block_vector import BlockVector from pyomo.environ import Var, Constraint, value -from pyomo.core.base.var import _VarData +from pyomo.core.base.var import VarData from pyomo.common.modeling import unique_component_name """ @@ -109,12 +109,12 @@ def __init__( An instance of a derived class (from ExternalInputOutputModel) that provides the methods to compute the outputs and the derivatives. - inputs : list of Pyomo variables (_VarData) + inputs : list of Pyomo variables (VarData) The Pyomo model needs to have variables to represent the inputs to the external model. This is the list of those input variables in the order that corresponds to the input_values vector provided in the set_inputs call. - outputs : list of Pyomo variables (_VarData) + outputs : list of Pyomo variables (VarData) The Pyomo model needs to have variables to represent the outputs from the external model. This is the list of those output variables in the order that corresponds to the numpy array returned from the evaluate_outputs call. @@ -130,7 +130,7 @@ def __init__( # verify that the inputs and outputs were passed correctly self._inputs = [v for v in inputs] for v in self._inputs: - if not isinstance(v, _VarData): + if not isinstance(v, VarData): raise RuntimeError( 'Argument inputs passed to PyomoExternalCyIpoptProblem must be' ' a list of VarData objects. Note: if you have an indexed variable, pass' @@ -139,7 +139,7 @@ def __init__( self._outputs = [v for v in outputs] for v in self._outputs: - if not isinstance(v, _VarData): + if not isinstance(v, VarData): raise RuntimeError( 'Argument outputs passed to PyomoExternalCyIpoptProblem must be' ' a list of VarData objects. Note: if you have an indexed variable, pass' diff --git a/pyomo/contrib/pynumero/examples/tests/test_cyipopt_examples.py b/pyomo/contrib/pynumero/examples/tests/test_cyipopt_examples.py index 408a0197382..2df43c1e797 100644 --- a/pyomo/contrib/pynumero/examples/tests/test_cyipopt_examples.py +++ b/pyomo/contrib/pynumero/examples/tests/test_cyipopt_examples.py @@ -44,11 +44,13 @@ raise unittest.SkipTest("Pynumero needs the ASL extension to run CyIpopt tests") import pyomo.contrib.pynumero.algorithms.solvers.cyipopt_solver as cyipopt_solver +from pyomo.contrib.pynumero.interfaces.cyipopt_interface import cyipopt_available -if not cyipopt_solver.cyipopt_available: +if not cyipopt_available: raise unittest.SkipTest("PyNumero needs CyIpopt installed to run CyIpopt tests") import cyipopt as cyipopt_core + example_dir = os.path.join(this_file_dir(), '..') @@ -266,6 +268,11 @@ def test_cyipopt_functor(self): s = df['ca_bal'] self.assertAlmostEqual(s.iloc[6], 0, places=3) + @unittest.skipIf( + cyipopt_solver.PyomoCyIpoptSolver().version() == (1, 4, 0), + "Terminating Ipopt through a user callback is broken in CyIpopt 1.4.0 " + "(see mechmotum/cyipopt#249)", + ) def test_cyipopt_callback_halt(self): ex = import_file( os.path.join(example_dir, 'callback', 'cyipopt_callback_halt.py') diff --git a/pyomo/contrib/pynumero/interfaces/external_grey_box.py b/pyomo/contrib/pynumero/interfaces/external_grey_box.py index 7e42f161bee..68e652575cc 100644 --- a/pyomo/contrib/pynumero/interfaces/external_grey_box.py +++ b/pyomo/contrib/pynumero/interfaces/external_grey_box.py @@ -18,7 +18,7 @@ from pyomo.common.log import is_debug_set from pyomo.common.timing import ConstructionTimer from pyomo.core.base import Var, Set, Constraint, value -from pyomo.core.base.block import _BlockData, Block, declare_custom_block +from pyomo.core.base.block import BlockData, Block, declare_custom_block from pyomo.core.base.global_set import UnindexedComponent_index from pyomo.core.base.initializer import Initializer from pyomo.core.base.set import UnindexedComponent_set @@ -316,7 +316,7 @@ def evaluate_jacobian_outputs(self): # -class ExternalGreyBoxBlockData(_BlockData): +class ExternalGreyBoxBlockData(BlockData): def set_external_model(self, external_grey_box_model, inputs=None, outputs=None): """ Parameters @@ -424,7 +424,7 @@ class ScalarExternalGreyBoxBlock(ExternalGreyBoxBlockData, ExternalGreyBoxBlock) def __init__(self, *args, **kwds): ExternalGreyBoxBlockData.__init__(self, component=self) ExternalGreyBoxBlock.__init__(self, *args, **kwds) - # The above inherit from Block and _BlockData, so it's not until here + # The above inherit from Block and BlockData, so it's not until here # that we know it's scalar. So we set the index accordingly. self._index = UnindexedComponent_index diff --git a/pyomo/contrib/pynumero/interfaces/pyomo_nlp.py b/pyomo/contrib/pynumero/interfaces/pyomo_nlp.py index 51edd09311a..e12d0cf568b 100644 --- a/pyomo/contrib/pynumero/interfaces/pyomo_nlp.py +++ b/pyomo/contrib/pynumero/interfaces/pyomo_nlp.py @@ -22,6 +22,7 @@ import pyomo.core.base as pyo from pyomo.common.collections import ComponentMap from pyomo.common.env import CtypesEnviron +from pyomo.solvers.amplfunc_merge import amplfunc_merge from ..sparse.block_matrix import BlockMatrix from pyomo.contrib.pynumero.interfaces.ampl_nlp import AslNLP from pyomo.contrib.pynumero.interfaces.nlp import NLP @@ -92,15 +93,8 @@ def __init__(self, pyomo_model, nl_file_options=None): # The NL writer advertises the external function libraries # through the PYOMO_AMPLFUNC environment variable; merge it # with any preexisting AMPLFUNC definitions - amplfunc = "\n".join( - filter( - None, - ( - os.environ.get('AMPLFUNC', None), - os.environ.get('PYOMO_AMPLFUNC', None), - ), - ) - ) + amplfunc = amplfunc_merge(os.environ) + with CtypesEnviron(AMPLFUNC=amplfunc): super(PyomoNLP, self).__init__(nl_file) diff --git a/pyomo/contrib/pyros/CHANGELOG.txt b/pyomo/contrib/pyros/CHANGELOG.txt index 94f4848edb2..52cd7a6db47 100644 --- a/pyomo/contrib/pyros/CHANGELOG.txt +++ b/pyomo/contrib/pyros/CHANGELOG.txt @@ -2,6 +2,17 @@ PyROS CHANGELOG =============== +------------------------------------------------------------------------------- +PyROS 1.2.11 17 Mar 2024 +------------------------------------------------------------------------------- +- Standardize calls to subordinate solvers across all PyROS subproblem types +- Account for user-specified subsolver time limits when automatically + adjusting subsolver time limits +- Add support for automatic adjustment of SCIP subsolver time limit +- Move start point of main PyROS solver timer to just before argument + validation begins + + ------------------------------------------------------------------------------- PyROS 1.2.10 07 Feb 2024 ------------------------------------------------------------------------------- diff --git a/pyomo/contrib/pyros/config.py b/pyomo/contrib/pyros/config.py index bc2bfd591e6..c02dcd7ed0f 100644 --- a/pyomo/contrib/pyros/config.py +++ b/pyomo/contrib/pyros/config.py @@ -16,8 +16,8 @@ Path, ) from pyomo.common.errors import ApplicationError, PyomoException -from pyomo.core.base import Var, _VarData -from pyomo.core.base.param import Param, _ParamData +from pyomo.core.base import Var, VarData +from pyomo.core.base.param import Param, ParamData from pyomo.opt import SolverFactory from pyomo.contrib.pyros.util import ObjectiveType, setup_pyros_logger from pyomo.contrib.pyros.uncertainty_sets import UncertaintySet @@ -62,7 +62,7 @@ def mutable_param_validator(param_obj): Parameters ---------- - param_obj : Param or _ParamData + param_obj : Param or ParamData Param-like object of interest. Raises @@ -98,7 +98,7 @@ class InputDataStandardizer(object): Pyomo component type, such as Component, Var or Param. cdatatype : type Corresponding Pyomo component data type, such as - _ComponentData, _VarData, or _ParamData. + ComponentData, VarData, or ParamData. ctype_validator : callable, optional Validator function for objects of type `ctype`. cdatatype_validator : callable, optional @@ -503,6 +503,21 @@ def pyros_config(): ), ), ) + CONFIG.declare( + 'symbolic_solver_labels', + ConfigValue( + default=False, + domain=bool, + description=( + """ + True to ensure the component names given to the + subordinate solvers for every subproblem reflect + the names of the corresponding Pyomo modeling components, + False otherwise. + """ + ), + ), + ) # ================================================ # === Required User Inputs @@ -511,7 +526,7 @@ def pyros_config(): "first_stage_variables", ConfigValue( default=[], - domain=InputDataStandardizer(Var, _VarData, allow_repeats=False), + domain=InputDataStandardizer(Var, VarData, allow_repeats=False), description="First-stage (or design) variables.", visibility=1, ), @@ -520,7 +535,7 @@ def pyros_config(): "second_stage_variables", ConfigValue( default=[], - domain=InputDataStandardizer(Var, _VarData, allow_repeats=False), + domain=InputDataStandardizer(Var, VarData, allow_repeats=False), description="Second-stage (or control) variables.", visibility=1, ), @@ -531,7 +546,7 @@ def pyros_config(): default=[], domain=InputDataStandardizer( ctype=Param, - cdatatype=_ParamData, + cdatatype=ParamData, ctype_validator=mutable_param_validator, allow_repeats=False, ), diff --git a/pyomo/contrib/pyros/master_problem_methods.py b/pyomo/contrib/pyros/master_problem_methods.py index 8b9e85b90e9..2af38c1d582 100644 --- a/pyomo/contrib/pyros/master_problem_methods.py +++ b/pyomo/contrib/pyros/master_problem_methods.py @@ -27,6 +27,7 @@ from pyomo.core.expr import value from pyomo.core.base.set_types import NonNegativeIntegers, NonNegativeReals from pyomo.contrib.pyros.util import ( + call_solver, selective_clone, ObjectiveType, pyrosTerminationCondition, @@ -239,31 +240,18 @@ def solve_master_feasibility_problem(model_data, config): else: solver = config.local_solver - timer = TicTocTimer() - orig_setting, custom_setting_present = adjust_solver_time_settings( - model_data.timing, solver, config - ) - model_data.timing.start_timer("main.master_feasibility") - timer.tic(msg=None) - try: - results = solver.solve(model, tee=config.tee, load_solutions=False) - except ApplicationError: - # account for possible external subsolver errors - # (such as segmentation faults, function evaluation - # errors, etc.) - config.progress_logger.error( + results = call_solver( + model=model, + solver=solver, + config=config, + timing_obj=model_data.timing, + timer_name="main.master_feasibility", + err_msg=( f"Optimizer {repr(solver)} encountered exception " "attempting to solve master feasibility problem in iteration " f"{model_data.iteration}." - ) - raise - else: - setattr(results.solver, TIC_TOC_SOLVE_TIME_ATTR, timer.toc(msg=None)) - model_data.timing.stop_timer("main.master_feasibility") - finally: - revert_solver_max_time_adjustment( - solver, orig_setting, custom_setting_present, config - ) + ), + ) feasible_terminations = { tc.optimal, @@ -482,28 +470,18 @@ def minimize_dr_vars(model_data, config): config.progress_logger.debug(f" Initial DR norm: {value(polishing_obj)}") # === Solve the polishing model - timer = TicTocTimer() - orig_setting, custom_setting_present = adjust_solver_time_settings( - model_data.timing, solver, config - ) - model_data.timing.start_timer("main.dr_polishing") - timer.tic(msg=None) - try: - results = solver.solve(polishing_model, tee=config.tee, load_solutions=False) - except ApplicationError: - config.progress_logger.error( + results = call_solver( + model=polishing_model, + solver=solver, + config=config, + timing_obj=model_data.timing, + timer_name="main.dr_polishing", + err_msg=( f"Optimizer {repr(solver)} encountered an exception " "attempting to solve decision rule polishing problem " f"in iteration {model_data.iteration}" - ) - raise - else: - setattr(results.solver, TIC_TOC_SOLVE_TIME_ATTR, timer.toc(msg=None)) - model_data.timing.stop_timer("main.dr_polishing") - finally: - revert_solver_max_time_adjustment( - solver, orig_setting, custom_setting_present, config - ) + ), + ) # interested in the time and termination status for debugging # purposes @@ -726,7 +704,6 @@ def solver_call_master(model_data, config, solver, solve_data): solve_mode = "global" if config.solve_master_globally else "local" config.progress_logger.debug("Solving master problem") - timer = TicTocTimer() for idx, opt in enumerate(solvers): if idx > 0: config.progress_logger.warning( @@ -734,35 +711,18 @@ def solver_call_master(model_data, config, solver, solve_data): f"(solver {idx + 1} of {len(solvers)}) for " f"master problem of iteration {model_data.iteration}." ) - orig_setting, custom_setting_present = adjust_solver_time_settings( - model_data.timing, opt, config - ) - model_data.timing.start_timer("main.master") - timer.tic(msg=None) - try: - results = opt.solve( - nlp_model, - tee=config.tee, - load_solutions=False, - symbolic_solver_labels=True, - ) - except ApplicationError: - # account for possible external subsolver errors - # (such as segmentation faults, function evaluation - # errors, etc.) - config.progress_logger.error( + results = call_solver( + model=nlp_model, + solver=opt, + config=config, + timing_obj=model_data.timing, + timer_name="main.master", + err_msg=( f"Optimizer {repr(opt)} ({idx + 1} of {len(solvers)}) " "encountered exception attempting to " f"solve master problem in iteration {model_data.iteration}" - ) - raise - else: - setattr(results.solver, TIC_TOC_SOLVE_TIME_ATTR, timer.toc(msg=None)) - model_data.timing.stop_timer("main.master") - finally: - revert_solver_max_time_adjustment( - solver, orig_setting, custom_setting_present, config - ) + ), + ) optimal_termination = check_optimal_termination(results) infeasible = results.solver.termination_condition == tc.infeasible diff --git a/pyomo/contrib/pyros/pyros.py b/pyomo/contrib/pyros/pyros.py index 6de42d7299e..582233c4a56 100644 --- a/pyomo/contrib/pyros/pyros.py +++ b/pyomo/contrib/pyros/pyros.py @@ -12,7 +12,6 @@ # pyros.py: Generalized Robust Cutting-Set Algorithm for Pyomo import logging from pyomo.common.config import document_kwargs_from_configdict -from pyomo.common.collections import Bunch from pyomo.core.base.block import Block from pyomo.core.expr import value from pyomo.core.base.var import Var @@ -20,7 +19,7 @@ from pyomo.contrib.pyros.util import time_code from pyomo.common.modeling import unique_component_name from pyomo.opt import SolverFactory -from pyomo.contrib.pyros.config import pyros_config +from pyomo.contrib.pyros.config import pyros_config, logger_domain from pyomo.contrib.pyros.util import ( recast_to_min_obj, add_decision_rule_constraints, @@ -44,7 +43,7 @@ from datetime import datetime -__version__ = "1.2.10" +__version__ = "1.2.11" default_pyros_solver_logger = setup_pyros_logger() @@ -330,32 +329,41 @@ def solve( Summary of PyROS termination outcome. """ - kwds.update( - dict( - first_stage_variables=first_stage_variables, - second_stage_variables=second_stage_variables, - uncertain_params=uncertain_params, - uncertainty_set=uncertainty_set, - local_solver=local_solver, - global_solver=global_solver, - ) - ) - config, state_vars = self._resolve_and_validate_pyros_args(model, **kwds) - - # === Create data containers model_data = ROSolveResults() - model_data.timing = Bunch() - - # === Start timer, run the algorithm model_data.timing = TimingData() with time_code( timing_data_obj=model_data.timing, code_block_name="main", is_main_timer=True, ): - # output intro and disclaimer - self._log_intro(logger=config.progress_logger, level=logging.INFO) - self._log_disclaimer(logger=config.progress_logger, level=logging.INFO) + kwds.update( + dict( + first_stage_variables=first_stage_variables, + second_stage_variables=second_stage_variables, + uncertain_params=uncertain_params, + uncertainty_set=uncertainty_set, + local_solver=local_solver, + global_solver=global_solver, + ) + ) + + # we want to log the intro and disclaimer in + # advance of assembling the config. + # this helps clarify to the user that any + # messages logged during assembly of the config + # were, in fact, logged after PyROS was initiated + progress_logger = logger_domain( + kwds.get( + "progress_logger", + kwds.get("options", dict()).get( + "progress_logger", default_pyros_solver_logger + ), + ) + ) + self._log_intro(logger=progress_logger, level=logging.INFO) + self._log_disclaimer(logger=progress_logger, level=logging.INFO) + + config, state_vars = self._resolve_and_validate_pyros_args(model, **kwds) self._log_config( logger=config.progress_logger, config=config, diff --git a/pyomo/contrib/pyros/pyros_algorithm_methods.py b/pyomo/contrib/pyros/pyros_algorithm_methods.py index 5987db074e6..cfb57b08c7f 100644 --- a/pyomo/contrib/pyros/pyros_algorithm_methods.py +++ b/pyomo/contrib/pyros/pyros_algorithm_methods.py @@ -28,7 +28,7 @@ from pyomo.core.base import value from pyomo.core.expr import MonomialTermExpression from pyomo.common.collections import ComponentSet, ComponentMap -from pyomo.core.base.var import _VarData as VarData +from pyomo.core.base.var import VarData as VarData from itertools import chain from pyomo.common.dependencies import numpy as np diff --git a/pyomo/contrib/pyros/separation_problem_methods.py b/pyomo/contrib/pyros/separation_problem_methods.py index b5939ff5b19..18d0925bab0 100644 --- a/pyomo/contrib/pyros/separation_problem_methods.py +++ b/pyomo/contrib/pyros/separation_problem_methods.py @@ -18,7 +18,6 @@ from pyomo.core.base import Var, Param from pyomo.common.collections import ComponentSet, ComponentMap from pyomo.common.dependencies import numpy as np -from pyomo.contrib.pyros.util import ObjectiveType, get_time_from_solver from pyomo.contrib.pyros.solve_data import ( DiscreteSeparationSolveCallResults, SeparationSolveCallResults, @@ -37,9 +36,11 @@ from pyomo.contrib.pyros.util import ABS_CON_CHECK_FEAS_TOL from pyomo.common.timing import TicTocTimer from pyomo.contrib.pyros.util import ( - TIC_TOC_SOLVE_TIME_ATTR, adjust_solver_time_settings, + call_solver, + ObjectiveType, revert_solver_max_time_adjustment, + TIC_TOC_SOLVE_TIME_ATTR, ) import os from copy import deepcopy @@ -1070,6 +1071,7 @@ def solver_call_separation( separation_obj.activate() + solve_mode_adverb = "globally" if solve_globally else "locally" solve_call_results = SeparationSolveCallResults( solved_globally=solve_globally, time_out=False, @@ -1077,7 +1079,6 @@ def solver_call_separation( found_violation=False, subsolver_error=False, ) - timer = TicTocTimer() for idx, opt in enumerate(solvers): if idx > 0: config.progress_logger.warning( @@ -1086,37 +1087,19 @@ def solver_call_separation( f"separation of performance constraint {con_name_repr} " f"in iteration {model_data.iteration}." ) - orig_setting, custom_setting_present = adjust_solver_time_settings( - model_data.timing, opt, config - ) - model_data.timing.start_timer(f"main.{solve_mode}_separation") - timer.tic(msg=None) - try: - results = opt.solve( - nlp_model, - tee=config.tee, - load_solutions=False, - symbolic_solver_labels=True, - ) - except ApplicationError: - # account for possible external subsolver errors - # (such as segmentation faults, function evaluation - # errors, etc.) - adverb = "globally" if solve_globally else "locally" - config.progress_logger.error( + results = call_solver( + model=nlp_model, + solver=opt, + config=config, + timing_obj=model_data.timing, + timer_name=f"main.{solve_mode}_separation", + err_msg=( f"Optimizer {repr(opt)} ({idx + 1} of {len(solvers)}) " f"encountered exception attempting " - f"to {adverb} solve separation problem for constraint " + f"to {solve_mode_adverb} solve separation problem for constraint " f"{con_name_repr} in iteration {model_data.iteration}." - ) - raise - else: - setattr(results.solver, TIC_TOC_SOLVE_TIME_ATTR, timer.toc(msg=None)) - model_data.timing.stop_timer(f"main.{solve_mode}_separation") - finally: - revert_solver_max_time_adjustment( - opt, orig_setting, custom_setting_present, config - ) + ), + ) # record termination condition for this particular solver solver_status_dict[str(opt)] = results.solver.termination_condition diff --git a/pyomo/contrib/pyros/tests/test_config.py b/pyomo/contrib/pyros/tests/test_config.py index 0f52d04135d..166fbada4ff 100644 --- a/pyomo/contrib/pyros/tests/test_config.py +++ b/pyomo/contrib/pyros/tests/test_config.py @@ -5,10 +5,10 @@ import logging import unittest -from pyomo.core.base import ConcreteModel, Var, _VarData +from pyomo.core.base import ConcreteModel, Var, VarData from pyomo.common.log import LoggingIntercept from pyomo.common.errors import ApplicationError -from pyomo.core.base.param import Param, _ParamData +from pyomo.core.base.param import Param, ParamData from pyomo.contrib.pyros.config import ( InputDataStandardizer, mutable_param_validator, @@ -38,7 +38,7 @@ def test_single_component_data(self): mdl = ConcreteModel() mdl.v = Var([0, 1]) - standardizer_func = InputDataStandardizer(Var, _VarData) + standardizer_func = InputDataStandardizer(Var, VarData) standardizer_input = mdl.v[0] standardizer_output = standardizer_func(standardizer_input) @@ -74,7 +74,7 @@ def test_standardizer_indexed_component(self): mdl = ConcreteModel() mdl.v = Var([0, 1]) - standardizer_func = InputDataStandardizer(Var, _VarData) + standardizer_func = InputDataStandardizer(Var, VarData) standardizer_input = mdl.v standardizer_output = standardizer_func(standardizer_input) @@ -113,7 +113,7 @@ def test_standardizer_multiple_components(self): mdl.v = Var([0, 1]) mdl.x = Var(["a", "b"]) - standardizer_func = InputDataStandardizer(Var, _VarData) + standardizer_func = InputDataStandardizer(Var, VarData) standardizer_input = [mdl.v[0], mdl.x] standardizer_output = standardizer_func(standardizer_input) @@ -154,7 +154,7 @@ def test_standardizer_invalid_duplicates(self): mdl.v = Var([0, 1]) mdl.x = Var(["a", "b"]) - standardizer_func = InputDataStandardizer(Var, _VarData, allow_repeats=False) + standardizer_func = InputDataStandardizer(Var, VarData, allow_repeats=False) exc_str = r"Standardized.*list.*contains duplicate entries\." with self.assertRaisesRegex(ValueError, exc_str): @@ -165,7 +165,7 @@ def test_standardizer_invalid_type(self): Test standardizer raises exception as expected when input is of invalid type. """ - standardizer_func = InputDataStandardizer(Var, _VarData) + standardizer_func = InputDataStandardizer(Var, VarData) exc_str = r"Input object .*is not of valid component type.*" with self.assertRaisesRegex(TypeError, exc_str): @@ -178,7 +178,7 @@ def test_standardizer_iterable_with_invalid_type(self): """ mdl = ConcreteModel() mdl.v = Var([0, 1]) - standardizer_func = InputDataStandardizer(Var, _VarData) + standardizer_func = InputDataStandardizer(Var, VarData) exc_str = r"Input object .*entry of iterable.*is not of valid component type.*" with self.assertRaisesRegex(TypeError, exc_str): @@ -189,7 +189,7 @@ def test_standardizer_invalid_str_passed(self): Test standardizer raises exception as expected when input is of invalid type str. """ - standardizer_func = InputDataStandardizer(Var, _VarData) + standardizer_func = InputDataStandardizer(Var, VarData) exc_str = r"Input object .*is not of valid component type.*" with self.assertRaisesRegex(TypeError, exc_str): @@ -201,7 +201,7 @@ def test_standardizer_invalid_uninitialized_params(self): uninitialized entries passed. """ standardizer_func = InputDataStandardizer( - ctype=Param, cdatatype=_ParamData, ctype_validator=mutable_param_validator + ctype=Param, cdatatype=ParamData, ctype_validator=mutable_param_validator ) mdl = ConcreteModel() @@ -217,7 +217,7 @@ def test_standardizer_invalid_immutable_params(self): Param object(s) passed. """ standardizer_func = InputDataStandardizer( - ctype=Param, cdatatype=_ParamData, ctype_validator=mutable_param_validator + ctype=Param, cdatatype=ParamData, ctype_validator=mutable_param_validator ) mdl = ConcreteModel() @@ -237,7 +237,7 @@ def test_standardizer_valid_mutable_params(self): mdl.p2 = Param(["a", "b"], initialize=1, mutable=True) standardizer_func = InputDataStandardizer( - ctype=Param, cdatatype=_ParamData, ctype_validator=mutable_param_validator + ctype=Param, cdatatype=ParamData, ctype_validator=mutable_param_validator ) standardizer_input = [mdl.p1[0], mdl.p2] diff --git a/pyomo/contrib/pyros/tests/test_grcs.py b/pyomo/contrib/pyros/tests/test_grcs.py index c308f0d6990..f7efec4d6e7 100644 --- a/pyomo/contrib/pyros/tests/test_grcs.py +++ b/pyomo/contrib/pyros/tests/test_grcs.py @@ -19,7 +19,7 @@ from pyomo.common.collections import ComponentSet, ComponentMap from pyomo.common.config import ConfigBlock, ConfigValue from pyomo.core.base.set_types import NonNegativeIntegers -from pyomo.core.base.var import _VarData +from pyomo.core.base.var import VarData from pyomo.core.expr import ( identify_variables, identify_mutable_parameters, @@ -592,7 +592,7 @@ def test_dr_eqns_form_correct(self): param_product_multiplicand = term.args[0] dr_var_multiplicand = term.args[1] else: - self.assertIsInstance(term, _VarData) + self.assertIsInstance(term, VarData) param_product_multiplicand = 1 dr_var_multiplicand = term @@ -3795,6 +3795,7 @@ def test_solve_master(self): config.declare( "progress_logger", ConfigValue(default=logging.getLogger(__name__)) ) + config.declare("symbolic_solver_labels", ConfigValue(default=False)) with time_code(master_data.timing, "main", is_main_timer=True): master_soln = solve_master(master_data, config) @@ -4341,14 +4342,16 @@ def test_separation_terminate_time_limit(self): ) @unittest.skipUnless( - SolverFactory('gams').license_is_valid() - and SolverFactory('baron').license_is_valid(), - "Global NLP solver is not available and licensed.", + ipopt_available + and SolverFactory('gams').license_is_valid() + and SolverFactory('baron').license_is_valid() + and SolverFactory("scip").license_is_valid(), + "IPOPT not available or one of GAMS/BARON/SCIP not licensed", ) - def test_gams_successful_time_limit(self): + def test_pyros_subsolver_time_limit_adjustment(self): """ - Test PyROS time limit status returned in event - separation problem times out. + Check that PyROS does not ultimately alter state of + subordinate solver options due to time limit adjustments. """ m = ConcreteModel() m.x1 = Var(initialize=0, bounds=(0, None)) @@ -4367,20 +4370,26 @@ def test_gams_successful_time_limit(self): # Instantiate the PyROS solver pyros_solver = SolverFactory("pyros") - # Define subsolvers utilized in the algorithm - # two GAMS solvers, one of which has reslim set - # (overridden when invoked in PyROS) + # subordinate solvers to test. + # for testing, we pass each as the 'local' solver, + # and the BARON solver without custom options + # as the 'global' solver + baron_no_options = SolverFactory("baron") local_subsolvers = [ SolverFactory("gams:conopt"), SolverFactory("gams:conopt"), SolverFactory("ipopt"), + SolverFactory("ipopt", options={"max_cpu_time": 300}), + SolverFactory("scip"), + SolverFactory("scip", options={"limits/time": 300}), + baron_no_options, + SolverFactory("baron", options={"MaxTime": 300}), ] local_subsolvers[0].options["add_options"] = ["option reslim=100;"] - global_subsolver = SolverFactory("baron") - global_subsolver.options["MaxTime"] = 300 # Call the PyROS solver for idx, opt in enumerate(local_subsolvers): + original_solver_options = opt.options.copy() results = pyros_solver.solve( model=m, first_stage_variables=[m.x1, m.x2], @@ -4388,68 +4397,25 @@ def test_gams_successful_time_limit(self): uncertain_params=[m.u], uncertainty_set=interval, local_solver=opt, - global_solver=global_subsolver, + global_solver=baron_no_options, objective_focus=ObjectiveType.worst_case, solve_master_globally=True, time_limit=100, ) - self.assertEqual( results.pyros_termination_condition, pyrosTerminationCondition.robust_optimal, msg=( - f"Returned termination condition with local " - "subsolver {idx + 1} of 2 is not robust_optimal." + "Returned termination condition with local " + f"subsolver {idx + 1} of 2 is not robust_optimal." ), ) - - # check first local subsolver settings - # remain unchanged after PyROS exit - self.assertEqual( - len(list(local_subsolvers[0].options["add_options"])), - 1, - msg=( - f"Local subsolver {local_subsolvers[0]} options 'add_options'" - "were changed by PyROS" - ), - ) - self.assertEqual( - local_subsolvers[0].options["add_options"][0], - "option reslim=100;", - msg=( - f"Local subsolver {local_subsolvers[0]} setting " - "'add_options' was modified " - "by PyROS, but changes were not properly undone" - ), - ) - - # check global subsolver settings unchanged - self.assertEqual( - len(list(global_subsolver.options.keys())), - 1, - msg=(f"Global subsolver {global_subsolver} options were changed by PyROS"), - ) - self.assertEqual( - global_subsolver.options["MaxTime"], - 300, - msg=( - f"Global subsolver {global_subsolver} setting " - "'MaxTime' was modified " - "by PyROS, but changes were not properly undone" - ), - ) - - # check other local subsolvers remain unchanged - for slvr, key in zip(local_subsolvers[1:], ["add_options", "max_cpu_time"]): - # no custom options were added to the `options` - # attribute of the optimizer, so any attribute - # of `options` should be `None` - self.assertIs( - getattr(slvr.options, key, None), - None, + self.assertEqual( + opt.options, + original_solver_options, msg=( - f"Local subsolver {slvr} setting '{key}' was added " - "by PyROS, but not reverted" + f"Options for subordinate solver {opt} were changed " + "by PyROS, and the changes wee not properly reverted." ), ) @@ -6206,6 +6172,7 @@ def test_log_config(self): " keepfiles=False\n" " tee=False\n" " load_solution=True\n" + " symbolic_solver_labels=False\n" " objective_focus=\n" " nominal_uncertain_param_vals=[0.5]\n" " decision_rule_order=0\n" diff --git a/pyomo/contrib/pyros/util.py b/pyomo/contrib/pyros/util.py index a3ab3464aa8..3b0187af7dd 100644 --- a/pyomo/contrib/pyros/util.py +++ b/pyomo/contrib/pyros/util.py @@ -16,7 +16,9 @@ import copy from enum import Enum, auto from pyomo.common.collections import ComponentSet, ComponentMap +from pyomo.common.errors import ApplicationError from pyomo.common.modeling import unique_component_name +from pyomo.common.timing import TicTocTimer from pyomo.core.base import ( Constraint, Var, @@ -230,15 +232,15 @@ def get_main_elapsed_time(timing_data_obj): def adjust_solver_time_settings(timing_data_obj, solver, config): """ - Adjust solver max time setting based on current PyROS elapsed - time. + Adjust maximum time allowed for subordinate solver, based + on total PyROS solver elapsed time up to this point. Parameters ---------- timing_data_obj : Bunch PyROS timekeeper. solver : solver type - Solver for which to adjust the max time setting. + Subordinate solver for which to adjust the max time setting. config : ConfigDict PyROS solver config. @@ -260,26 +262,37 @@ def adjust_solver_time_settings(timing_data_obj, solver, config): ---- (1) Adjustment only supported for GAMS, BARON, and IPOPT interfaces. This routine can be generalized to other solvers - after a generic interface to the time limit setting + after a generic Pyomo interface to the time limit setting is introduced. - (2) For IPOPT, and probably also BARON, the CPU time limit - rather than the wallclock time limit, is adjusted, as - no interface to wallclock limit available. - For this reason, extra 30s is added to time remaining - for subsolver time limit. - (The extra 30s is large enough to ensure solver - elapsed time is not beneath elapsed time - user time limit, - but not so large as to overshoot the user-specified time limit - by an inordinate margin.) + (2) For IPOPT and BARON, the CPU time limit, + rather than the wallclock time limit, may be adjusted, + as there may be no means by which to specify the wall time + limit explicitly. + (3) For GAMS, we adjust the time limit through the GAMS Reslim + option. However, this may be overridden by any user + specifications included in a GAMS optfile, which may be + difficult to track down. + (4) To ensure the time limit is specified to a strictly + positive value, the time limit is adjusted to a value of + at least 1 second. """ + # in case there is no time remaining: we set time limit + # to a minimum of 1s, as some solvers require a strictly + # positive time limit + time_limit_buffer = 1 + if config.time_limit is not None: time_remaining = config.time_limit - get_main_elapsed_time(timing_data_obj) if isinstance(solver, type(SolverFactory("gams", solver_io="shell"))): original_max_time_setting = solver.options["add_options"] custom_setting_present = "add_options" in solver.options - # adjust GAMS solver time - reslim_str = f"option reslim={max(30, 30 + time_remaining)};" + # note: our time limit will be overridden by any + # time limits specified by the user through a + # GAMS optfile, but tracking down the optfile + # and/or the GAMS subsolver specific option + # is more difficult + reslim_str = "option reslim=" f"{max(time_limit_buffer, time_remaining)};" if isinstance(solver.options["add_options"], list): solver.options["add_options"].append(reslim_str) else: @@ -289,7 +302,16 @@ def adjust_solver_time_settings(timing_data_obj, solver, config): if isinstance(solver, SolverFactory.get_class("baron")): options_key = "MaxTime" elif isinstance(solver, SolverFactory.get_class("ipopt")): - options_key = "max_cpu_time" + options_key = ( + # IPOPT 3.14.0+ added support for specifying + # wall time limit explicitly; this is preferred + # over CPU time limit + "max_wall_time" + if solver.version() >= (3, 14, 0, 0) + else "max_cpu_time" + ) + elif isinstance(solver, SolverFactory.get_class("scip")): + options_key = "limits/time" else: options_key = None @@ -297,8 +319,19 @@ def adjust_solver_time_settings(timing_data_obj, solver, config): custom_setting_present = options_key in solver.options original_max_time_setting = solver.options[options_key] - # ensure positive value assigned to avoid application error - solver.options[options_key] = max(30, 30 + time_remaining) + # account for elapsed time remaining and + # original time limit setting. + # if no original time limit is set, then we assume + # there is no time limit, rather than tracking + # down the solver-specific default + orig_max_time = ( + float("inf") + if original_max_time_setting is None + else original_max_time_setting + ) + solver.options[options_key] = min( + max(time_limit_buffer, time_remaining), orig_max_time + ) else: custom_setting_present = False original_max_time_setting = None @@ -345,6 +378,8 @@ def revert_solver_max_time_adjustment( options_key = "MaxTime" elif isinstance(solver, SolverFactory.get_class("ipopt")): options_key = "max_cpu_time" + elif isinstance(solver, SolverFactory.get_class("scip")): + options_key = "limits/time" else: options_key = None @@ -359,12 +394,7 @@ def revert_solver_max_time_adjustment( if isinstance(solver, type(SolverFactory("gams", solver_io="shell"))): solver.options[options_key].pop() else: - # remove the max time specification introduced. - # All lines are needed here to completely remove the option - # from access through getattr and dictionary reference. delattr(solver.options, options_key) - if options_key in solver.options.keys(): - del solver.options[options_key] class PreformattedLogger(logging.Logger): @@ -832,7 +862,7 @@ def get_state_vars(blk, first_stage_variables, second_stage_variables): Get state variables of a modeling block. The state variables with respect to `blk` are the unfixed - `_VarData` objects participating in the active objective + `VarData` objects participating in the active objective or constraints descended from `blk` which are not first-stage variables or second-stage variables. @@ -847,7 +877,7 @@ def get_state_vars(blk, first_stage_variables, second_stage_variables): Yields ------ - _VarData + VarData State variable. """ dof_var_set = ComponentSet(first_stage_variables) | ComponentSet( @@ -954,7 +984,7 @@ def validate_variable_partitioning(model, config): Returns ------- - list of _VarData + list of VarData State variables of the model. Raises @@ -1731,6 +1761,80 @@ def process_termination_condition_master_problem(config, results): ) +def call_solver(model, solver, config, timing_obj, timer_name, err_msg): + """ + Solve a model with a given optimizer, keeping track of + wall time requirements. + + Parameters + ---------- + model : ConcreteModel + Model of interest. + solver : Pyomo solver type + Subordinate optimizer. + config : ConfigDict + PyROS solver settings. + timing_obj : TimingData + PyROS solver timing data object. + timer_name : str + Name of sub timer under the hierarchical timer contained in + ``timing_obj`` to start/stop for keeping track of solve + time requirements. + err_msg : str + Message to log through ``config.progress_logger.exception()`` + in event an ApplicationError is raised while attempting to + solve the model. + + Returns + ------- + SolverResults + Solve results. Note that ``results.solver`` contains + an additional attribute, named after + ``TIC_TOC_SOLVE_TIME_ATTR``, of which the value is set to the + recorded solver wall time. + + Raises + ------ + ApplicationError + If ApplicationError is raised by the solver. + In this case, `err_msg` is logged through + ``config.progress_logger.exception()`` before + the exception is raised. + """ + tt_timer = TicTocTimer() + + orig_setting, custom_setting_present = adjust_solver_time_settings( + timing_obj, solver, config + ) + timing_obj.start_timer(timer_name) + tt_timer.tic(msg=None) + + try: + results = solver.solve( + model, + tee=config.tee, + load_solutions=False, + symbolic_solver_labels=config.symbolic_solver_labels, + ) + except ApplicationError: + # account for possible external subsolver errors + # (such as segmentation faults, function evaluation + # errors, etc.) + config.progress_logger.error(err_msg) + raise + else: + setattr( + results.solver, TIC_TOC_SOLVE_TIME_ATTR, tt_timer.toc(msg=None, delta=True) + ) + finally: + timing_obj.stop_timer(timer_name) + revert_solver_max_time_adjustment( + solver, orig_setting, custom_setting_present, config + ) + + return results + + class IterationLogRecord: """ PyROS solver iteration log record. diff --git a/pyomo/contrib/solver/base.py b/pyomo/contrib/solver/base.py index 8840265763e..98bf3836004 100644 --- a/pyomo/contrib/solver/base.py +++ b/pyomo/contrib/solver/base.py @@ -14,11 +14,11 @@ from typing import Sequence, Dict, Optional, Mapping, NoReturn, List, Tuple import os -from pyomo.core.base.constraint import _GeneralConstraintData -from pyomo.core.base.var import _GeneralVarData -from pyomo.core.base.param import _ParamData -from pyomo.core.base.block import _BlockData -from pyomo.core.base.objective import _GeneralObjectiveData +from pyomo.core.base.constraint import ConstraintData +from pyomo.core.base.var import VarData +from pyomo.core.base.param import ParamData +from pyomo.core.base.block import BlockData +from pyomo.core.base.objective import Objective, ObjectiveData from pyomo.common.config import document_kwargs_from_configdict, ConfigValue from pyomo.common.errors import ApplicationError from pyomo.common.deprecation import deprecation_warning @@ -59,11 +59,13 @@ class SolverBase(abc.ABC): def __init__(self, **kwds) -> None: # We allow the user and/or developer to name the solver something else, - # if they really desire. Otherwise it defaults to the class name (all lowercase) + # if they really desire. + # Otherwise it defaults to the name defined when the solver was registered + # in the SolverFactory or the class name (all lowercase), whichever is + # applicable if "name" in kwds: - self.name = kwds["name"] - kwds.pop('name') - else: + self.name = kwds.pop('name') + elif not hasattr(self, 'name'): self.name = type(self).__name__.lower() self.config = self.CONFIG(value=kwds) @@ -108,13 +110,13 @@ def __str__(self): @document_kwargs_from_configdict(CONFIG) @abc.abstractmethod - def solve(self, model: _BlockData, **kwargs) -> Results: + def solve(self, model: BlockData, **kwargs) -> Results: """ Solve a Pyomo model. Parameters ---------- - model: _BlockData + model: BlockData The Pyomo model to be solved **kwargs Additional keyword arguments (including solver_options - passthrough @@ -182,7 +184,7 @@ class PersistentSolverBase(SolverBase): @document_kwargs_from_configdict(PersistentSolverConfig()) @abc.abstractmethod - def solve(self, model: _BlockData, **kwargs) -> Results: + def solve(self, model: BlockData, **kwargs) -> Results: super().solve(model, kwargs) def is_persistent(self): @@ -194,9 +196,7 @@ def is_persistent(self): """ return True - def _load_vars( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> NoReturn: + def _load_vars(self, vars_to_load: Optional[Sequence[VarData]] = None) -> NoReturn: """ Load the solution of the primal variables into the value attribute of the variables. @@ -212,19 +212,19 @@ def _load_vars( @abc.abstractmethod def _get_primals( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: """ Get mapping of variables to primals. Parameters ---------- - vars_to_load : Optional[Sequence[_GeneralVarData]], optional + vars_to_load : Optional[Sequence[VarData]], optional Which vars to be populated into the map. The default is None. Returns ------- - Mapping[_GeneralVarData, float] + Mapping[VarData, float] A map of variables to primals. """ raise NotImplementedError( @@ -232,8 +232,8 @@ def _get_primals( ) def _get_duals( - self, cons_to_load: Optional[Sequence[_GeneralConstraintData]] = None - ) -> Dict[_GeneralConstraintData, float]: + self, cons_to_load: Optional[Sequence[ConstraintData]] = None + ) -> Dict[ConstraintData, float]: """ Declare sign convention in docstring here. @@ -251,8 +251,8 @@ def _get_duals( raise NotImplementedError(f'{type(self)} does not support the get_duals method') def _get_reduced_costs( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: """ Parameters ---------- @@ -276,61 +276,61 @@ def set_instance(self, model): """ @abc.abstractmethod - def set_objective(self, obj: _GeneralObjectiveData): + def set_objective(self, obj: ObjectiveData): """ Set current objective for the model """ @abc.abstractmethod - def add_variables(self, variables: List[_GeneralVarData]): + def add_variables(self, variables: List[VarData]): """ Add variables to the model """ @abc.abstractmethod - def add_parameters(self, params: List[_ParamData]): + def add_parameters(self, params: List[ParamData]): """ Add parameters to the model """ @abc.abstractmethod - def add_constraints(self, cons: List[_GeneralConstraintData]): + def add_constraints(self, cons: List[ConstraintData]): """ Add constraints to the model """ @abc.abstractmethod - def add_block(self, block: _BlockData): + def add_block(self, block: BlockData): """ Add a block to the model """ @abc.abstractmethod - def remove_variables(self, variables: List[_GeneralVarData]): + def remove_variables(self, variables: List[VarData]): """ Remove variables from the model """ @abc.abstractmethod - def remove_parameters(self, params: List[_ParamData]): + def remove_parameters(self, params: List[ParamData]): """ Remove parameters from the model """ @abc.abstractmethod - def remove_constraints(self, cons: List[_GeneralConstraintData]): + def remove_constraints(self, cons: List[ConstraintData]): """ Remove constraints from the model """ @abc.abstractmethod - def remove_block(self, block: _BlockData): + def remove_block(self, block: BlockData): """ Remove a block from the model """ @abc.abstractmethod - def update_variables(self, variables: List[_GeneralVarData]): + def update_variables(self, variables: List[VarData]): """ Update variables on the model """ @@ -348,9 +348,19 @@ class LegacySolverWrapper: interface. Necessary for backwards compatibility. """ - def __init__(self, solver_io=None, **kwargs): - if solver_io is not None: + def __init__(self, **kwargs): + if 'solver_io' in kwargs: raise NotImplementedError('Still working on this') + # There is no reason for a user to be trying to mix both old + # and new options. That is silly. So we will yell at them. + self.options = kwargs.pop('options', None) + if 'solver_options' in kwargs: + if self.options is not None: + raise ValueError( + "Both 'options' and 'solver_options' were requested. " + "Please use one or the other, not both." + ) + self.options = kwargs.pop('solver_options') super().__init__(**kwargs) # @@ -376,6 +386,8 @@ def _map_config( keepfiles=NOTSET, solnfile=NOTSET, options=NOTSET, + solver_options=NOTSET, + writer_config=NOTSET, ): """Map between legacy and new interface configuration options""" self.config = self.config() @@ -393,8 +405,26 @@ def _map_config( self.config.time_limit = timelimit if report_timing is not NOTSET: self.config.report_timing = report_timing - if options is not NOTSET: + if self.options is not None: + self.config.solver_options.set_value(self.options) + if (options is not NOTSET) and (solver_options is not NOTSET): + # There is no reason for a user to be trying to mix both old + # and new options. That is silly. So we will yell at them. + # Example that would raise an error: + # solver.solve(model, options={'foo' : 'bar'}, solver_options={'foo' : 'not_bar'}) + raise ValueError( + "Both 'options' and 'solver_options' were requested. " + "Please use one or the other, not both." + ) + elif options is not NOTSET: + # This block is trying to mimic the existing logic in the legacy + # interface that allows users to pass initialized options to + # the solver object and override them in the solve call. self.config.solver_options.set_value(options) + elif solver_options is not NOTSET: + self.config.solver_options.set_value(solver_options) + if writer_config is not NOTSET: + self.config.writer_config.set_value(writer_config) # This is a new flag in the interface. To preserve backwards compatibility, # its default is set to "False" if raise_exception_on_nonoptimal_result is not NOTSET: @@ -435,9 +465,14 @@ def _map_results(self, model, results): ] legacy_soln.status = legacy_solution_status_map[results.solution_status] legacy_results.solver.termination_message = str(results.termination_condition) - legacy_results.problem.number_of_constraints = model.nconstraints() - legacy_results.problem.number_of_variables = model.nvariables() - number_of_objectives = model.nobjectives() + legacy_results.problem.number_of_constraints = float('nan') + legacy_results.problem.number_of_variables = float('nan') + number_of_objectives = sum( + 1 + for _ in model.component_data_objects( + Objective, active=True, descend_into=True + ) + ) legacy_results.problem.number_of_objectives = number_of_objectives if number_of_objectives == 1: obj = get_objective(model) @@ -464,6 +499,12 @@ def _solution_handler( """Method to handle the preferred action for the solution""" symbol_map = SymbolMap() symbol_map.default_labeler = NumericLabeler('x') + if not hasattr(model, 'solutions'): + # This logic gets around Issue #2130 in which + # solutions is not an attribute on Blocks + from pyomo.core.base.PyomoModel import ModelSolutions + + setattr(model, 'solutions', ModelSolutions(model)) model.solutions.add_symbol_map(symbol_map) legacy_results._smap_id = id(symbol_map) delete_legacy_soln = True @@ -496,7 +537,7 @@ def _solution_handler( def solve( self, - model: _BlockData, + model: BlockData, tee: bool = False, load_solutions: bool = True, logfile: Optional[str] = None, @@ -508,7 +549,10 @@ def solve( options: Optional[Dict] = None, keepfiles: bool = False, symbolic_solver_labels: bool = False, + # These are for forward-compatibility raise_exception_on_nonoptimal_result: bool = False, + solver_options: Optional[Dict] = None, + writer_config: Optional[Dict] = None, ): """ Solve method: maps new solve method style to backwards compatible version. @@ -534,6 +578,8 @@ def solve( 'keepfiles', 'solnfile', 'options', + 'solver_options', + 'writer_config', ) loc = locals() filtered_args = {k: loc[k] for k in map_args if loc.get(k, None) is not None} @@ -559,7 +605,10 @@ def available(self, exception_flag=True): """ ans = super().available() if exception_flag and not ans: - raise ApplicationError(f'Solver {self.__class__} is not available ({ans}).') + raise ApplicationError( + f'Solver "{self.name}" is not available. ' + f'The returned status is: {ans}.' + ) return bool(ans) def license_is_valid(self) -> bool: diff --git a/pyomo/contrib/solver/factory.py b/pyomo/contrib/solver/factory.py index 99fbcc3a6d0..d3ca1329af3 100644 --- a/pyomo/contrib/solver/factory.py +++ b/pyomo/contrib/solver/factory.py @@ -31,6 +31,8 @@ class LegacySolver(LegacySolverWrapper, cls): LegacySolver ) + # Preserve the preferred name, as registered in the Factory + cls.name = name return cls return decorator diff --git a/pyomo/contrib/solver/gurobi.py b/pyomo/contrib/solver/gurobi.py index d0ac0d80f45..10d8120c8b3 100644 --- a/pyomo/contrib/solver/gurobi.py +++ b/pyomo/contrib/solver/gurobi.py @@ -22,10 +22,10 @@ from pyomo.common.config import ConfigValue from pyomo.core.kernel.objective import minimize, maximize from pyomo.core.base import SymbolMap, NumericLabeler, TextLabeler -from pyomo.core.base.var import _GeneralVarData -from pyomo.core.base.constraint import _GeneralConstraintData -from pyomo.core.base.sos import _SOSConstraintData -from pyomo.core.base.param import _ParamData +from pyomo.core.base.var import VarData +from pyomo.core.base.constraint import ConstraintData +from pyomo.core.base.sos import SOSConstraintData +from pyomo.core.base.param import ParamData from pyomo.core.expr.numvalue import value, is_constant, is_fixed, native_numeric_types from pyomo.repn import generate_standard_repn from pyomo.core.expr.numeric_expr import NPV_MaxExpression, NPV_MinExpression @@ -438,7 +438,7 @@ def _process_domain_and_bounds( return lb, ub, vtype - def _add_variables(self, variables: List[_GeneralVarData]): + def _add_variables(self, variables: List[VarData]): var_names = list() vtypes = list() lbs = list() @@ -469,7 +469,7 @@ def _add_variables(self, variables: List[_GeneralVarData]): self._vars_added_since_update.update(variables) self._needs_updated = True - def _add_parameters(self, params: List[_ParamData]): + def _add_parameters(self, params: List[ParamData]): pass def _reinit(self): @@ -555,7 +555,7 @@ def _get_expr_from_pyomo_expr(self, expr): mutable_quadratic_coefficients, ) - def _add_constraints(self, cons: List[_GeneralConstraintData]): + def _add_constraints(self, cons: List[ConstraintData]): for con in cons: conname = self._symbol_map.getSymbol(con, self._labeler) ( @@ -685,7 +685,7 @@ def _add_constraints(self, cons: List[_GeneralConstraintData]): self._constraints_added_since_update.update(cons) self._needs_updated = True - def _add_sos_constraints(self, cons: List[_SOSConstraintData]): + def _add_sos_constraints(self, cons: List[SOSConstraintData]): for con in cons: conname = self._symbol_map.getSymbol(con, self._labeler) level = con.level @@ -711,7 +711,7 @@ def _add_sos_constraints(self, cons: List[_SOSConstraintData]): self._constraints_added_since_update.update(cons) self._needs_updated = True - def _remove_constraints(self, cons: List[_GeneralConstraintData]): + def _remove_constraints(self, cons: List[ConstraintData]): for con in cons: if con in self._constraints_added_since_update: self._update_gurobi_model() @@ -725,7 +725,7 @@ def _remove_constraints(self, cons: List[_GeneralConstraintData]): self._mutable_quadratic_helpers.pop(con, None) self._needs_updated = True - def _remove_sos_constraints(self, cons: List[_SOSConstraintData]): + def _remove_sos_constraints(self, cons: List[SOSConstraintData]): for con in cons: if con in self._constraints_added_since_update: self._update_gurobi_model() @@ -735,7 +735,7 @@ def _remove_sos_constraints(self, cons: List[_SOSConstraintData]): del self._pyomo_sos_to_solver_sos_map[con] self._needs_updated = True - def _remove_variables(self, variables: List[_GeneralVarData]): + def _remove_variables(self, variables: List[VarData]): for var in variables: v_id = id(var) if var in self._vars_added_since_update: @@ -747,10 +747,10 @@ def _remove_variables(self, variables: List[_GeneralVarData]): self._mutable_bounds.pop(v_id, None) self._needs_updated = True - def _remove_parameters(self, params: List[_ParamData]): + def _remove_parameters(self, params: List[ParamData]): pass - def _update_variables(self, variables: List[_GeneralVarData]): + def _update_variables(self, variables: List[VarData]): for var in variables: var_id = id(var) if var_id not in self._pyomo_var_to_solver_var_map: @@ -1125,7 +1125,7 @@ def set_linear_constraint_attr(self, con, attr, val): Parameters ---------- - con: pyomo.core.base.constraint._GeneralConstraintData + con: pyomo.core.base.constraint.ConstraintData The pyomo constraint for which the corresponding gurobi constraint attribute should be modified. attr: str @@ -1151,7 +1151,7 @@ def set_var_attr(self, var, attr, val): Parameters ---------- - var: pyomo.core.base.var._GeneralVarData + var: pyomo.core.base.var.VarData The pyomo var for which the corresponding gurobi var attribute should be modified. attr: str @@ -1186,7 +1186,7 @@ def get_var_attr(self, var, attr): Parameters ---------- - var: pyomo.core.base.var._GeneralVarData + var: pyomo.core.base.var.VarData The pyomo var for which the corresponding gurobi var attribute should be retrieved. attr: str @@ -1202,7 +1202,7 @@ def get_linear_constraint_attr(self, con, attr): Parameters ---------- - con: pyomo.core.base.constraint._GeneralConstraintData + con: pyomo.core.base.constraint.ConstraintData The pyomo constraint for which the corresponding gurobi constraint attribute should be retrieved. attr: str @@ -1218,7 +1218,7 @@ def get_sos_attr(self, con, attr): Parameters ---------- - con: pyomo.core.base.sos._SOSConstraintData + con: pyomo.core.base.sos.SOSConstraintData The pyomo SOS constraint for which the corresponding gurobi SOS constraint attribute should be retrieved. attr: str @@ -1234,7 +1234,7 @@ def get_quadratic_constraint_attr(self, con, attr): Parameters ---------- - con: pyomo.core.base.constraint._GeneralConstraintData + con: pyomo.core.base.constraint.ConstraintData The pyomo constraint for which the corresponding gurobi constraint attribute should be retrieved. attr: str @@ -1355,7 +1355,7 @@ def cbCut(self, con): Parameters ---------- - con: pyomo.core.base.constraint._GeneralConstraintData + con: pyomo.core.base.constraint.ConstraintData The cut to add """ if not con.active: @@ -1440,7 +1440,7 @@ def cbLazy(self, con): """ Parameters ---------- - con: pyomo.core.base.constraint._GeneralConstraintData + con: pyomo.core.base.constraint.ConstraintData The lazy constraint to add """ if not con.active: diff --git a/pyomo/contrib/solver/ipopt.py b/pyomo/contrib/solver/ipopt.py index 5f601b7a9f7..c88696f531b 100644 --- a/pyomo/contrib/solver/ipopt.py +++ b/pyomo/contrib/solver/ipopt.py @@ -25,7 +25,7 @@ ) from pyomo.common.tempfiles import TempfileManager from pyomo.common.timing import HierarchicalTimer -from pyomo.core.base.var import _GeneralVarData +from pyomo.core.base.var import VarData from pyomo.core.staleflag import StaleFlagManager from pyomo.repn.plugins.nl_writer import NLWriter, NLWriterInfo from pyomo.contrib.solver.base import SolverBase @@ -80,8 +80,8 @@ def __init__( class IpoptSolutionLoader(SolSolutionLoader): def get_reduced_costs( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: if self._nl_info is None: raise RuntimeError( 'Solution loader does not currently have a valid solution. Please ' @@ -307,7 +307,12 @@ def solve(self, model, **kwds): raise RuntimeError( f"NL file with the same name {basename + '.nl'} already exists!" ) - with open(basename + '.nl', 'w') as nl_file, open( + # Note: the ASL has an issue where string constants written + # to the NL file (e.g. arguments in external functions) MUST + # be terminated with '\n' regardless of platform. We will + # disable universal newlines in the NL file to prevent + # Python from mapping those '\n' to '\r\n' on Windows. + with open(basename + '.nl', 'w', newline='\n') as nl_file, open( basename + '.row', 'w' ) as row_file, open(basename + '.col', 'w') as col_file: timer.start('write_nl_file') diff --git a/pyomo/contrib/solver/persistent.py b/pyomo/contrib/solver/persistent.py index 4b1a7c58dcd..71322b7043e 100644 --- a/pyomo/contrib/solver/persistent.py +++ b/pyomo/contrib/solver/persistent.py @@ -12,11 +12,11 @@ import abc from typing import List -from pyomo.core.base.constraint import _GeneralConstraintData, Constraint -from pyomo.core.base.sos import _SOSConstraintData, SOSConstraint -from pyomo.core.base.var import _GeneralVarData -from pyomo.core.base.param import _ParamData, Param -from pyomo.core.base.objective import _GeneralObjectiveData +from pyomo.core.base.constraint import ConstraintData, Constraint +from pyomo.core.base.sos import SOSConstraintData, SOSConstraint +from pyomo.core.base.var import VarData +from pyomo.core.base.param import ParamData, Param +from pyomo.core.base.objective import ObjectiveData from pyomo.common.collections import ComponentMap from pyomo.common.timing import HierarchicalTimer from pyomo.core.expr.numvalue import NumericConstant @@ -54,10 +54,10 @@ def set_instance(self, model): self.set_objective(None) @abc.abstractmethod - def _add_variables(self, variables: List[_GeneralVarData]): + def _add_variables(self, variables: List[VarData]): pass - def add_variables(self, variables: List[_GeneralVarData]): + def add_variables(self, variables: List[VarData]): for v in variables: if id(v) in self._referenced_variables: raise ValueError( @@ -75,19 +75,19 @@ def add_variables(self, variables: List[_GeneralVarData]): self._add_variables(variables) @abc.abstractmethod - def _add_parameters(self, params: List[_ParamData]): + def _add_parameters(self, params: List[ParamData]): pass - def add_parameters(self, params: List[_ParamData]): + def add_parameters(self, params: List[ParamData]): for p in params: self._params[id(p)] = p self._add_parameters(params) @abc.abstractmethod - def _add_constraints(self, cons: List[_GeneralConstraintData]): + def _add_constraints(self, cons: List[ConstraintData]): pass - def _check_for_new_vars(self, variables: List[_GeneralVarData]): + def _check_for_new_vars(self, variables: List[VarData]): new_vars = {} for v in variables: v_id = id(v) @@ -95,7 +95,7 @@ def _check_for_new_vars(self, variables: List[_GeneralVarData]): new_vars[v_id] = v self.add_variables(list(new_vars.values())) - def _check_to_remove_vars(self, variables: List[_GeneralVarData]): + def _check_to_remove_vars(self, variables: List[VarData]): vars_to_remove = {} for v in variables: v_id = id(v) @@ -104,7 +104,7 @@ def _check_to_remove_vars(self, variables: List[_GeneralVarData]): vars_to_remove[v_id] = v self.remove_variables(list(vars_to_remove.values())) - def add_constraints(self, cons: List[_GeneralConstraintData]): + def add_constraints(self, cons: List[ConstraintData]): all_fixed_vars = {} for con in cons: if con in self._named_expressions: @@ -130,10 +130,10 @@ def add_constraints(self, cons: List[_GeneralConstraintData]): v.fix() @abc.abstractmethod - def _add_sos_constraints(self, cons: List[_SOSConstraintData]): + def _add_sos_constraints(self, cons: List[SOSConstraintData]): pass - def add_sos_constraints(self, cons: List[_SOSConstraintData]): + def add_sos_constraints(self, cons: List[SOSConstraintData]): for con in cons: if con in self._vars_referenced_by_con: raise ValueError( @@ -149,10 +149,10 @@ def add_sos_constraints(self, cons: List[_SOSConstraintData]): self._add_sos_constraints(cons) @abc.abstractmethod - def _set_objective(self, obj: _GeneralObjectiveData): + def _set_objective(self, obj: ObjectiveData): pass - def set_objective(self, obj: _GeneralObjectiveData): + def set_objective(self, obj: ObjectiveData): if self._objective is not None: for v in self._vars_referenced_by_obj: self._referenced_variables[id(v)][2] = None @@ -209,10 +209,10 @@ def add_block(self, block): self.set_objective(obj) @abc.abstractmethod - def _remove_constraints(self, cons: List[_GeneralConstraintData]): + def _remove_constraints(self, cons: List[ConstraintData]): pass - def remove_constraints(self, cons: List[_GeneralConstraintData]): + def remove_constraints(self, cons: List[ConstraintData]): self._remove_constraints(cons) for con in cons: if con not in self._named_expressions: @@ -230,10 +230,10 @@ def remove_constraints(self, cons: List[_GeneralConstraintData]): del self._vars_referenced_by_con[con] @abc.abstractmethod - def _remove_sos_constraints(self, cons: List[_SOSConstraintData]): + def _remove_sos_constraints(self, cons: List[SOSConstraintData]): pass - def remove_sos_constraints(self, cons: List[_SOSConstraintData]): + def remove_sos_constraints(self, cons: List[SOSConstraintData]): self._remove_sos_constraints(cons) for con in cons: if con not in self._vars_referenced_by_con: @@ -250,10 +250,10 @@ def remove_sos_constraints(self, cons: List[_SOSConstraintData]): del self._vars_referenced_by_con[con] @abc.abstractmethod - def _remove_variables(self, variables: List[_GeneralVarData]): + def _remove_variables(self, variables: List[VarData]): pass - def remove_variables(self, variables: List[_GeneralVarData]): + def remove_variables(self, variables: List[VarData]): self._remove_variables(variables) for v in variables: v_id = id(v) @@ -274,10 +274,10 @@ def remove_variables(self, variables: List[_GeneralVarData]): del self._vars[v_id] @abc.abstractmethod - def _remove_parameters(self, params: List[_ParamData]): + def _remove_parameters(self, params: List[ParamData]): pass - def remove_parameters(self, params: List[_ParamData]): + def remove_parameters(self, params: List[ParamData]): self._remove_parameters(params) for p in params: del self._params[id(p)] @@ -309,10 +309,10 @@ def remove_block(self, block): ) @abc.abstractmethod - def _update_variables(self, variables: List[_GeneralVarData]): + def _update_variables(self, variables: List[VarData]): pass - def update_variables(self, variables: List[_GeneralVarData]): + def update_variables(self, variables: List[VarData]): for v in variables: self._vars[id(v)] = ( v, @@ -384,12 +384,12 @@ def update(self, timer: HierarchicalTimer = None): for c in self._vars_referenced_by_con.keys(): if c not in current_cons_dict and c not in current_sos_dict: if (c.ctype is Constraint) or ( - c.ctype is None and isinstance(c, _GeneralConstraintData) + c.ctype is None and isinstance(c, ConstraintData) ): old_cons.append(c) else: assert (c.ctype is SOSConstraint) or ( - c.ctype is None and isinstance(c, _SOSConstraintData) + c.ctype is None and isinstance(c, SOSConstraintData) ) old_sos.append(c) self.remove_constraints(old_cons) diff --git a/pyomo/contrib/solver/solution.py b/pyomo/contrib/solver/solution.py index 32e84d2abca..a3e66475982 100644 --- a/pyomo/contrib/solver/solution.py +++ b/pyomo/contrib/solver/solution.py @@ -12,8 +12,8 @@ import abc from typing import Sequence, Dict, Optional, Mapping, NoReturn -from pyomo.core.base.constraint import _GeneralConstraintData -from pyomo.core.base.var import _GeneralVarData +from pyomo.core.base.constraint import ConstraintData +from pyomo.core.base.var import VarData from pyomo.core.expr import value from pyomo.common.collections import ComponentMap from pyomo.common.errors import DeveloperError @@ -30,9 +30,7 @@ class SolutionLoaderBase(abc.ABC): Intent of this class and its children is to load the solution back into the model. """ - def load_vars( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> NoReturn: + def load_vars(self, vars_to_load: Optional[Sequence[VarData]] = None) -> NoReturn: """ Load the solution of the primal variables into the value attribute of the variables. @@ -49,8 +47,8 @@ def load_vars( @abc.abstractmethod def get_primals( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: """ Returns a ComponentMap mapping variable to var value. @@ -67,8 +65,8 @@ def get_primals( """ def get_duals( - self, cons_to_load: Optional[Sequence[_GeneralConstraintData]] = None - ) -> Dict[_GeneralConstraintData, float]: + self, cons_to_load: Optional[Sequence[ConstraintData]] = None + ) -> Dict[ConstraintData, float]: """ Returns a dictionary mapping constraint to dual value. @@ -86,8 +84,8 @@ def get_duals( raise NotImplementedError(f'{type(self)} does not support the get_duals method') def get_reduced_costs( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: """ Returns a ComponentMap mapping variable to reduced cost. @@ -121,14 +119,14 @@ def get_primals(self, vars_to_load=None): return self._solver._get_primals(vars_to_load=vars_to_load) def get_duals( - self, cons_to_load: Optional[Sequence[_GeneralConstraintData]] = None - ) -> Dict[_GeneralConstraintData, float]: + self, cons_to_load: Optional[Sequence[ConstraintData]] = None + ) -> Dict[ConstraintData, float]: self._assert_solution_still_valid() return self._solver._get_duals(cons_to_load=cons_to_load) def get_reduced_costs( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: self._assert_solution_still_valid() return self._solver._get_reduced_costs(vars_to_load=vars_to_load) @@ -141,9 +139,7 @@ def __init__(self, sol_data: SolFileData, nl_info: NLWriterInfo) -> None: self._sol_data = sol_data self._nl_info = nl_info - def load_vars( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> NoReturn: + def load_vars(self, vars_to_load: Optional[Sequence[VarData]] = None) -> NoReturn: if self._nl_info is None: raise RuntimeError( 'Solution loader does not currently have a valid solution. Please ' @@ -169,8 +165,8 @@ def load_vars( StaleFlagManager.mark_all_as_stale(delayed=True) def get_primals( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: if self._nl_info is None: raise RuntimeError( 'Solution loader does not currently have a valid solution. Please ' @@ -205,8 +201,8 @@ def get_primals( return res def get_duals( - self, cons_to_load: Optional[Sequence[_GeneralConstraintData]] = None - ) -> Dict[_GeneralConstraintData, float]: + self, cons_to_load: Optional[Sequence[ConstraintData]] = None + ) -> Dict[ConstraintData, float]: if self._nl_info is None: raise RuntimeError( 'Solution loader does not currently have a valid solution. Please ' diff --git a/pyomo/contrib/solver/tests/unit/test_base.py b/pyomo/contrib/solver/tests/unit/test_base.py index 179d9823679..b52f96ba903 100644 --- a/pyomo/contrib/solver/tests/unit/test_base.py +++ b/pyomo/contrib/solver/tests/unit/test_base.py @@ -272,6 +272,99 @@ def test_map_config(self): with self.assertRaises(AttributeError): print(instance.config.keepfiles) + def test_solver_options_behavior(self): + # options can work in multiple ways (set from instantiation, set + # after instantiation, set during solve). + # Test case 1: Set at instantiation + solver = base.LegacySolverWrapper(options={'max_iter': 6}) + self.assertEqual(solver.options, {'max_iter': 6}) + + # Test case 2: Set later + solver = base.LegacySolverWrapper() + solver.options = {'max_iter': 4, 'foo': 'bar'} + self.assertEqual(solver.options, {'max_iter': 4, 'foo': 'bar'}) + + # Test case 3: pass some options to the mapping (aka, 'solve' command) + solver = base.LegacySolverWrapper() + config = ConfigDict(implicit=True) + config.declare( + 'solver_options', + ConfigDict(implicit=True, description="Options to pass to the solver."), + ) + solver.config = config + solver._map_config(options={'max_iter': 4}) + self.assertEqual(solver.config.solver_options, {'max_iter': 4}) + + # Test case 4: Set at instantiation and override during 'solve' call + solver = base.LegacySolverWrapper(options={'max_iter': 6}) + config = ConfigDict(implicit=True) + config.declare( + 'solver_options', + ConfigDict(implicit=True, description="Options to pass to the solver."), + ) + solver.config = config + solver._map_config(options={'max_iter': 4}) + self.assertEqual(solver.config.solver_options, {'max_iter': 4}) + self.assertEqual(solver.options, {'max_iter': 6}) + + # solver_options are also supported + # Test case 1: set at instantiation + solver = base.LegacySolverWrapper(solver_options={'max_iter': 6}) + self.assertEqual(solver.options, {'max_iter': 6}) + + # Test case 2: pass some solver_options to the mapping (aka, 'solve' command) + solver = base.LegacySolverWrapper() + config = ConfigDict(implicit=True) + config.declare( + 'solver_options', + ConfigDict(implicit=True, description="Options to pass to the solver."), + ) + solver.config = config + solver._map_config(solver_options={'max_iter': 4}) + self.assertEqual(solver.config.solver_options, {'max_iter': 4}) + + # Test case 3: Set at instantiation and override during 'solve' call + solver = base.LegacySolverWrapper(solver_options={'max_iter': 6}) + config = ConfigDict(implicit=True) + config.declare( + 'solver_options', + ConfigDict(implicit=True, description="Options to pass to the solver."), + ) + solver.config = config + solver._map_config(solver_options={'max_iter': 4}) + self.assertEqual(solver.config.solver_options, {'max_iter': 4}) + self.assertEqual(solver.options, {'max_iter': 6}) + + # users can mix... sort of + # Test case 1: Initialize with options, solve with solver_options + solver = base.LegacySolverWrapper(options={'max_iter': 6}) + config = ConfigDict(implicit=True) + config.declare( + 'solver_options', + ConfigDict(implicit=True, description="Options to pass to the solver."), + ) + solver.config = config + solver._map_config(solver_options={'max_iter': 4}) + self.assertEqual(solver.config.solver_options, {'max_iter': 4}) + + # users CANNOT initialize both values at the same time, because how + # do we know what to do with it then? + # Test case 1: Class instance + with self.assertRaises(ValueError): + solver = base.LegacySolverWrapper( + options={'max_iter': 6}, solver_options={'max_iter': 4} + ) + # Test case 2: Passing to `solve` + solver = base.LegacySolverWrapper() + config = ConfigDict(implicit=True) + config.declare( + 'solver_options', + ConfigDict(implicit=True, description="Options to pass to the solver."), + ) + solver.config = config + with self.assertRaises(ValueError): + solver._map_config(solver_options={'max_iter': 4}, options={'max_iter': 6}) + def test_map_results(self): # Unclear how to test this pass diff --git a/pyomo/contrib/solver/tests/unit/test_results.py b/pyomo/contrib/solver/tests/unit/test_results.py index 74404aaba4c..a15c9b87253 100644 --- a/pyomo/contrib/solver/tests/unit/test_results.py +++ b/pyomo/contrib/solver/tests/unit/test_results.py @@ -15,8 +15,8 @@ from pyomo.common import unittest from pyomo.common.config import ConfigDict -from pyomo.core.base.constraint import _GeneralConstraintData -from pyomo.core.base.var import _GeneralVarData +from pyomo.core.base.constraint import ConstraintData +from pyomo.core.base.var import VarData from pyomo.common.collections import ComponentMap from pyomo.contrib.solver import results from pyomo.contrib.solver import solution @@ -51,8 +51,8 @@ def __init__( self._reduced_costs = reduced_costs def get_primals( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: if self._primals is None: raise RuntimeError( 'Solution loader does not currently have a valid solution. Please ' @@ -67,8 +67,8 @@ def get_primals( return primals def get_duals( - self, cons_to_load: Optional[Sequence[_GeneralConstraintData]] = None - ) -> Dict[_GeneralConstraintData, float]: + self, cons_to_load: Optional[Sequence[ConstraintData]] = None + ) -> Dict[ConstraintData, float]: if self._duals is None: raise RuntimeError( 'Solution loader does not currently have valid duals. Please ' @@ -84,8 +84,8 @@ def get_duals( return duals def get_reduced_costs( - self, vars_to_load: Optional[Sequence[_GeneralVarData]] = None - ) -> Mapping[_GeneralVarData, float]: + self, vars_to_load: Optional[Sequence[VarData]] = None + ) -> Mapping[VarData, float]: if self._reduced_costs is None: raise RuntimeError( 'Solution loader does not currently have valid reduced costs. Please ' diff --git a/pyomo/contrib/trustregion/tests/test_interface.py b/pyomo/contrib/trustregion/tests/test_interface.py index 148caceddd1..0922ccf950b 100644 --- a/pyomo/contrib/trustregion/tests/test_interface.py +++ b/pyomo/contrib/trustregion/tests/test_interface.py @@ -33,7 +33,7 @@ cos, SolverFactory, ) -from pyomo.core.base.var import _GeneralVarData +from pyomo.core.base.var import VarData from pyomo.core.expr.numeric_expr import ExternalFunctionExpression from pyomo.core.expr.visitor import identify_variables from pyomo.contrib.trustregion.interface import TRFInterface @@ -158,7 +158,7 @@ def test_replaceExternalFunctionsWithVariables(self): self.assertIsInstance(k, ExternalFunctionExpression) self.assertIn(str(self.interface.model.x[0]), str(k)) self.assertIn(str(self.interface.model.x[1]), str(k)) - self.assertIsInstance(i, _GeneralVarData) + self.assertIsInstance(i, VarData) self.assertEqual(i, self.interface.data.ef_outputs[1]) for i, k in self.interface.data.basis_expressions.items(): self.assertEqual(k, 0) diff --git a/pyomo/contrib/viewer/model_browser.py b/pyomo/contrib/viewer/model_browser.py index 5887a577ba0..91dc946c55d 100644 --- a/pyomo/contrib/viewer/model_browser.py +++ b/pyomo/contrib/viewer/model_browser.py @@ -33,7 +33,7 @@ import pyomo.contrib.viewer.qt as myqt from pyomo.contrib.viewer.report import value_no_exception, get_residual -from pyomo.core.base.param import _ParamData +from pyomo.core.base.param import ParamData from pyomo.environ import ( Block, BooleanVar, @@ -243,7 +243,7 @@ def _get_expr_callback(self): return None def _get_value_callback(self): - if isinstance(self.data, _ParamData): + if isinstance(self.data, ParamData): v = value_no_exception(self.data, div0="divide_by_0") # Check the param value for numpy float and int, sometimes numpy # values can sneak in especially if you set parameters from data @@ -295,7 +295,7 @@ def _get_residual_callback(self): def _get_units_callback(self): if isinstance(self.data, (Var, Var._ComponentDataClass)): return str(units.get_units(self.data)) - if isinstance(self.data, (Param, _ParamData)): + if isinstance(self.data, (Param, ParamData)): return str(units.get_units(self.data)) return self._cache_units @@ -320,7 +320,7 @@ def _set_value_callback(self, val): o.value = val except: return - elif isinstance(self.data, _ParamData): + elif isinstance(self.data, ParamData): if not self.data.parent_component().mutable: return try: diff --git a/pyomo/contrib/viewer/report.py b/pyomo/contrib/viewer/report.py index f83a53c608d..a28e0082212 100644 --- a/pyomo/contrib/viewer/report.py +++ b/pyomo/contrib/viewer/report.py @@ -50,7 +50,7 @@ def get_residual(ui_data, c): values of the constraint body. This function uses the cached values and will not trigger recalculation. If variable values have changed, this may not yield accurate results. - c(_ConstraintData): a constraint or constraint data + c(ConstraintData): a constraint or constraint data Returns: (float) residual """ @@ -149,7 +149,7 @@ def degrees_of_freedom(blk): Return the degrees of freedom. Args: - blk (Block or _BlockData): Block to count degrees of freedom in + blk (Block or BlockData): Block to count degrees of freedom in Returns: (int): Number of degrees of freedom """ diff --git a/pyomo/core/__init__.py b/pyomo/core/__init__.py index bce79faacc5..f0d168d98f9 100644 --- a/pyomo/core/__init__.py +++ b/pyomo/core/__init__.py @@ -101,7 +101,7 @@ BooleanValue, native_logical_values, ) -from pyomo.core.kernel.objective import minimize, maximize +from pyomo.core.base import minimize, maximize from pyomo.core.base.config import PyomoOptions from pyomo.core.base.expression import Expression diff --git a/pyomo/core/base/__init__.py b/pyomo/core/base/__init__.py index 4bbd0c9dc44..2b21725d82f 100644 --- a/pyomo/core/base/__init__.py +++ b/pyomo/core/base/__init__.py @@ -12,6 +12,7 @@ # TODO: this import is for historical backwards compatibility and should # probably be removed from pyomo.common.collections import ComponentMap +from pyomo.common.enums import minimize, maximize from pyomo.core.expr.symbol_map import SymbolMap from pyomo.core.expr.numvalue import ( @@ -33,10 +34,11 @@ BooleanValue, native_logical_values, ) -from pyomo.core.kernel.objective import minimize, maximize -from pyomo.core.base.config import PyomoOptions -from pyomo.core.base.expression import Expression, _ExpressionData +from pyomo.core.base.component import name, Component, ModelComponentFactory +from pyomo.core.base.componentuid import ComponentUID +from pyomo.core.base.config import PyomoOptions +from pyomo.core.base.enums import SortComponents, TraversalStrategy from pyomo.core.base.label import ( CuidLabeler, CounterLabeler, @@ -47,56 +49,73 @@ NameLabeler, ShortNameLabeler, ) +from pyomo.core.base.misc import display +from pyomo.core.base.reference import Reference +from pyomo.core.base.symbol_map import symbol_map_from_instance +from pyomo.core.base.transformation import ( + Transformation, + TransformationFactory, + ReverseTransformationToken, +) + +from pyomo.core.base.PyomoModel import ( + global_option, + ModelSolution, + ModelSolutions, + Model, + ConcreteModel, + AbstractModel, +) # # Components # -from pyomo.core.base.component import name, Component, ModelComponentFactory -from pyomo.core.base.componentuid import ComponentUID from pyomo.core.base.action import BuildAction -from pyomo.core.base.check import BuildCheck -from pyomo.core.base.set import Set, SetOf, simple_set_rule, RangeSet -from pyomo.core.base.param import Param -from pyomo.core.base.var import Var, _VarData, _GeneralVarData, ScalarVar, VarList +from pyomo.core.base.block import ( + Block, + BlockData, + ScalarBlock, + active_components, + components, + active_components_data, + components_data, +) from pyomo.core.base.boolean_var import ( BooleanVar, - _BooleanVarData, - _GeneralBooleanVarData, + BooleanVarData, BooleanVarList, ScalarBooleanVar, ) +from pyomo.core.base.check import BuildCheck +from pyomo.core.base.connector import Connector, ConnectorData from pyomo.core.base.constraint import ( simple_constraint_rule, simple_constraintlist_rule, ConstraintList, Constraint, - _ConstraintData, + ConstraintData, ) +from pyomo.core.base.expression import Expression, NamedExpressionData, ExpressionData +from pyomo.core.base.external import ExternalFunction from pyomo.core.base.logical_constraint import ( LogicalConstraint, LogicalConstraintList, - _LogicalConstraintData, + LogicalConstraintData, ) from pyomo.core.base.objective import ( simple_objective_rule, simple_objectivelist_rule, Objective, ObjectiveList, - _ObjectiveData, -) -from pyomo.core.base.connector import Connector -from pyomo.core.base.sos import SOSConstraint -from pyomo.core.base.piecewise import Piecewise -from pyomo.core.base.suffix import ( - active_export_suffix_generator, - active_import_suffix_generator, - Suffix, + ObjectiveData, ) -from pyomo.core.base.external import ExternalFunction -from pyomo.core.base.symbol_map import symbol_map_from_instance -from pyomo.core.base.reference import Reference - +from pyomo.core.base.param import Param, ParamData +from pyomo.core.base.piecewise import Piecewise, PiecewiseData from pyomo.core.base.set import ( + Set, + SetData, + SetOf, + RangeSet, Reals, PositiveReals, NonPositiveReals, @@ -116,34 +135,21 @@ PercentFraction, RealInterval, IntegerInterval, + simple_set_rule, ) -from pyomo.core.base.misc import display -from pyomo.core.base.block import ( - Block, - ScalarBlock, - active_components, - components, - active_components_data, - components_data, -) -from pyomo.core.base.enums import SortComponents, TraversalStrategy -from pyomo.core.base.PyomoModel import ( - global_option, - ModelSolution, - ModelSolutions, - Model, - ConcreteModel, - AbstractModel, -) -from pyomo.core.base.transformation import ( - Transformation, - TransformationFactory, - ReverseTransformationToken, +from pyomo.core.base.sos import SOSConstraint, SOSConstraintData +from pyomo.core.base.suffix import ( + active_export_suffix_generator, + active_import_suffix_generator, + Suffix, ) +from pyomo.core.base.var import Var, VarData, ScalarVar, VarList from pyomo.core.base.instance2dat import instance2dat +# # These APIs are deprecated and should be removed in the near future +# from pyomo.core.base.set import set_options, RealSet, IntegerSet, BooleanSet from pyomo.common.deprecation import relocated_module_attribute @@ -155,4 +161,25 @@ relocated_module_attribute( 'SimpleBooleanVar', 'pyomo.core.base.boolean_var.SimpleBooleanVar', version='6.0' ) +# Historically, only a subset of "private" component data classes were imported here +relocated_module_attribute( + f'_GeneralVarData', f'pyomo.core.base.VarData', version='6.7.2.dev0' +) +relocated_module_attribute( + f'_GeneralBooleanVarData', f'pyomo.core.base.BooleanVarData', version='6.7.2.dev0' +) +relocated_module_attribute( + f'_ExpressionData', f'pyomo.core.base.NamedExpressionData', version='6.7.2.dev0' +) +for _cdata in ( + 'ConstraintData', + 'LogicalConstraintData', + 'VarData', + 'BooleanVarData', + 'ObjectiveData', +): + relocated_module_attribute( + f'_{_cdata}', f'pyomo.core.base.{_cdata}', version='6.7.2.dev0' + ) +del _cdata del relocated_module_attribute diff --git a/pyomo/core/base/block.py b/pyomo/core/base/block.py index 2918ef78b00..3eb18dde7a9 100644 --- a/pyomo/core/base/block.py +++ b/pyomo/core/base/block.py @@ -160,13 +160,13 @@ def __init__(self): self.seen_data = set() def unique(self, comp, items, are_values): - """Returns generator that filters duplicate _ComponentData objects from items + """Returns generator that filters duplicate ComponentData objects from items Parameters ---------- comp: ComponentBase The Component (indexed or scalar) that contains all - _ComponentData returned by the `items` generator. `comp` may + ComponentData returned by the `items` generator. `comp` may be an IndexedComponent generated by :py:func:`Reference` (and hence may not own the component datas in `items`) @@ -175,8 +175,8 @@ def unique(self, comp, items, are_values): `comp` Component. are_values: bool - If `True`, `items` yields _ComponentData objects, otherwise, - `items` yields `(index, _ComponentData)` tuples. + If `True`, `items` yields ComponentData objects, otherwise, + `items` yields `(index, ComponentData)` tuples. """ if comp.is_reference(): @@ -254,7 +254,7 @@ class _BlockConstruction(object): class PseudoMap(AutoSlots.Mixin): """ This class presents a "mock" dict interface to the internal - _BlockData data structures. We return this object to the + BlockData data structures. We return this object to the user to preserve the historical "{ctype : {name : obj}}" interface without actually regenerating that dict-of-dicts data structure. @@ -487,7 +487,7 @@ def iteritems(self): return self.items() -class _BlockData(ActiveComponentData): +class BlockData(ActiveComponentData): """ This class holds the fundamental block data. """ @@ -537,9 +537,9 @@ def __init__(self, component): # _ctypes: { ctype -> [1st idx, last idx, count] } # _decl: { name -> idx } # _decl_order: list( tuples( obj, next_type_idx ) ) - super(_BlockData, self).__setattr__('_ctypes', {}) - super(_BlockData, self).__setattr__('_decl', {}) - super(_BlockData, self).__setattr__('_decl_order', []) + super(BlockData, self).__setattr__('_ctypes', {}) + super(BlockData, self).__setattr__('_decl', {}) + super(BlockData, self).__setattr__('_decl_order', []) self._private_data = None def __getattr__(self, val) -> Union[Component, IndexedComponent, Any]: @@ -574,7 +574,7 @@ def __setattr__(self, name: str, val: Union[Component, IndexedComponent, Any]): # Other Python objects are added with the standard __setattr__ # method. # - super(_BlockData, self).__setattr__(name, val) + super(BlockData, self).__setattr__(name, val) # # Case 2. The attribute exists and it is a component in the # list of declarations in this block. We will use the @@ -628,11 +628,11 @@ def __setattr__(self, name: str, val: Union[Component, IndexedComponent, Any]): # else: # - # NB: This is important: the _BlockData is either a scalar + # NB: This is important: the BlockData is either a scalar # Block (where _parent and _component are defined) or a # single block within an Indexed Block (where only # _component is defined). Regardless, the - # _BlockData.__init__() method declares these methods and + # BlockData.__init__() method declares these methods and # sets them either to None or a weakref. Thus, we will # never have a problem converting these objects from # weakrefs into Blocks and back (when pickling); the @@ -647,23 +647,23 @@ def __setattr__(self, name: str, val: Union[Component, IndexedComponent, Any]): # return True, this shouldn't be too inefficient. # if name == '_parent': - if val is not None and not isinstance(val(), _BlockData): + if val is not None and not isinstance(val(), BlockData): raise ValueError( "Cannot set the '_parent' attribute of Block '%s' " "to a non-Block object (with type=%s); Did you " "try to create a model component named '_parent'?" % (self.name, type(val)) ) - super(_BlockData, self).__setattr__(name, val) + super(BlockData, self).__setattr__(name, val) elif name == '_component': - if val is not None and not isinstance(val(), _BlockData): + if val is not None and not isinstance(val(), BlockData): raise ValueError( "Cannot set the '_component' attribute of Block '%s' " "to a non-Block object (with type=%s); Did you " "try to create a model component named '_component'?" % (self.name, type(val)) ) - super(_BlockData, self).__setattr__(name, val) + super(BlockData, self).__setattr__(name, val) # # At this point, we should only be seeing non-component data # the user is hanging on the blocks (uncommon) or the @@ -680,7 +680,7 @@ def __setattr__(self, name: str, val: Union[Component, IndexedComponent, Any]): delattr(self, name) self.add_component(name, val) else: - super(_BlockData, self).__setattr__(name, val) + super(BlockData, self).__setattr__(name, val) def __delattr__(self, name): """ @@ -703,7 +703,7 @@ def __delattr__(self, name): # Other Python objects are removed with the standard __detattr__ # method. # - super(_BlockData, self).__delattr__(name) + super(BlockData, self).__delattr__(name) def _compact_decl_storage(self): idxMap = {} @@ -775,11 +775,11 @@ def transfer_attributes_from(self, src): Parameters ---------- - src: _BlockData or dict + src: BlockData or dict The Block or mapping that contains the new attributes to assign to this block. """ - if isinstance(src, _BlockData): + if isinstance(src, BlockData): # There is a special case where assigning a parent block to # this block creates a circular hierarchy if src is self: @@ -788,7 +788,7 @@ def transfer_attributes_from(self, src): while p_block is not None: if p_block is src: raise ValueError( - "_BlockData.transfer_attributes_from(): Cannot set a " + "BlockData.transfer_attributes_from(): Cannot set a " "sub-block (%s) to a parent block (%s): creates a " "circular hierarchy" % (self, src) ) @@ -804,7 +804,7 @@ def transfer_attributes_from(self, src): del_src_comp = lambda x: None else: raise ValueError( - "_BlockData.transfer_attributes_from(): expected a " + "BlockData.transfer_attributes_from(): expected a " "Block or dict; received %s" % (type(src).__name__,) ) @@ -878,7 +878,7 @@ def collect_ctypes(self, active=None, descend_into=True): def model(self): # - # Special case: the "Model" is always the top-level _BlockData, + # Special case: the "Model" is always the top-level BlockData, # so if this is the top-level block, it must be the model # # Also note the interesting and intentional characteristic for @@ -1035,7 +1035,7 @@ def add_component(self, name, val): # is inappropriate here. The correct way to add the attribute # is to delegate the work to the next class up the MRO. # - super(_BlockData, self).__setattr__(name, val) + super(BlockData, self).__setattr__(name, val) # # Update the ctype linked lists # @@ -1106,7 +1106,7 @@ def add_component(self, name, val): # This is tricky: If we are in the middle of # constructing an indexed block, the block component # already has _constructed=True. Now, if the - # _BlockData.__init__() defines any local variables + # BlockData.__init__() defines any local variables # (like pyomo.gdp.Disjunct's indicator_var), name(True) # will fail: this block data exists and has a parent(), # but it has not yet been added to the parent's _data @@ -1194,7 +1194,7 @@ def del_component(self, name_or_object): # Note: 'del self.__dict__[name]' is inappropriate here. The # correct way to add the attribute is to delegate the work to # the next class up the MRO. - super(_BlockData, self).__delattr__(name) + super(BlockData, self).__delattr__(name) def reclassify_component_type( self, name_or_object, new_ctype, preserve_declaration_order=True @@ -1399,7 +1399,7 @@ def _component_data_iteritems(self, ctype, active, sort, dedup): Generator that returns a nested 2-tuple of - ((component name, index value), _ComponentData) + ((component name, index value), ComponentData) for every component data in the block matching the specified ctype(s). @@ -1416,7 +1416,7 @@ def _component_data_iteritems(self, ctype, active, sort, dedup): Iterate over the components in a specified sorted order dedup: _DeduplicateInfo - Deduplicator to prevent returning the same _ComponentData twice + Deduplicator to prevent returning the same ComponentData twice """ for name, comp in PseudoMap(self, ctype, active, sort).items(): # NOTE: Suffix has a dict interface (something other derived @@ -1452,7 +1452,7 @@ def _component_data_iteritems(self, ctype, active, sort, dedup): yield from dedup.unique(comp, _items, False) def _component_data_itervalues(self, ctype, active, sort, dedup): - """Generator that returns the _ComponentData for every component data + """Generator that returns the ComponentData for every component data in the block. Parameters @@ -1467,7 +1467,7 @@ def _component_data_itervalues(self, ctype, active, sort, dedup): Iterate over the components in a specified sorted order dedup: _DeduplicateInfo - Deduplicator to prevent returning the same _ComponentData twice + Deduplicator to prevent returning the same ComponentData twice """ for comp in PseudoMap(self, ctype, active, sort).values(): # NOTE: Suffix has a dict interface (something other derived @@ -1573,7 +1573,7 @@ def component_data_iterindex( generator recursively descends into sub-blocks. The tuple is - ((component name, index value), _ComponentData) + ((component name, index value), ComponentData) """ dedup = _DeduplicateInfo() @@ -1994,6 +1994,11 @@ def private_data(self, scope=None): return self._private_data[scope] +class _BlockData(metaclass=RenamedClass): + __renamed__new_class__ = BlockData + __renamed__version__ = '6.7.2.dev0' + + @ModelComponentFactory.register( "A component that contains one or more model components." ) @@ -2007,7 +2012,7 @@ class Block(ActiveIndexedComponent): is deferred. """ - _ComponentDataClass = _BlockData + _ComponentDataClass = BlockData _private_data_initializers = defaultdict(lambda: dict) @overload @@ -2100,7 +2105,7 @@ def _getitem_when_not_present(self, idx): # components declared by the rule have the opportunity # to be initialized with data from # _BlockConstruction.data as they are transferred over. - if obj is not _block and isinstance(obj, _BlockData): + if obj is not _block and isinstance(obj, BlockData): _block.transfer_attributes_from(obj) finally: if data is not None and _block is not self: @@ -2221,7 +2226,7 @@ def display(self, filename=None, ostream=None, prefix=""): ostream = sys.stdout for key in sorted(self): - _BlockData.display(self[key], filename, ostream, prefix) + BlockData.display(self[key], filename, ostream, prefix) @staticmethod def register_private_data_initializer(initializer, scope=None): @@ -2241,9 +2246,9 @@ def register_private_data_initializer(initializer, scope=None): Block._private_data_initializers[scope] = initializer -class ScalarBlock(_BlockData, Block): +class ScalarBlock(BlockData, Block): def __init__(self, *args, **kwds): - _BlockData.__init__(self, component=self) + BlockData.__init__(self, component=self) Block.__init__(self, *args, **kwds) # Initialize the data dict so that (abstract) attribute # assignment will work. Note that we do not trigger @@ -2266,7 +2271,7 @@ def __init__(self, *args, **kwds): Block.__init__(self, *args, **kwds) @overload - def __getitem__(self, index) -> _BlockData: ... + def __getitem__(self, index) -> BlockData: ... __getitem__ = IndexedComponent.__getitem__ # type: ignore @@ -2325,7 +2330,7 @@ def components_data(block, ctype, sort=None, sort_by_keys=False, sort_by_names=F # Create a Block and record all the default attributes, methods, etc. # These will be assumed to be the set of illegal component names. # -_BlockData._Block_reserved_words = set(dir(Block())) +BlockData._Block_reserved_words = set(dir(Block())) class _IndexedCustomBlockMeta(type): @@ -2376,7 +2381,7 @@ def declare_custom_block(name, new_ctype=None): """Decorator to declare components for a custom block data class >>> @declare_custom_block(name=FooBlock) - ... class FooBlockData(_BlockData): + ... class FooBlockData(BlockData): ... # custom block data class ... pass """ diff --git a/pyomo/core/base/boolean_var.py b/pyomo/core/base/boolean_var.py index 246dcea6214..67c06bdacce 100644 --- a/pyomo/core/base/boolean_var.py +++ b/pyomo/core/base/boolean_var.py @@ -68,27 +68,65 @@ def __setstate__(self, state): self._boolvar = weakref_ref(state) -class _BooleanVarData(ComponentData, BooleanValue): - """ - This class defines the data for a single variable. - - Constructor Arguments: - component The BooleanVar object that owns this data. - Public Class Attributes: - fixed If True, then this variable is treated as a - fixed constant in the model. - stale A Boolean indicating whether the value of this variable is - legitimate. This value is true if the value should - be considered legitimate for purposes of reporting or - other interrogation. - value The numeric value of this variable. +def _associated_binary_mapper(encode, val): + if val is None: + return None + if encode: + if val.__class__ is not _DeprecatedImplicitAssociatedBinaryVariable: + return val() + else: + if val.__class__ is not _DeprecatedImplicitAssociatedBinaryVariable: + return weakref_ref(val) + return val + + +class BooleanVarData(ComponentData, BooleanValue): + """This class defines the data for a single Boolean variable. + + Parameters + ---------- + component: Component + The BooleanVar object that owns this data. + + Attributes + ---------- + domain: SetData + The domain of this variable. + + fixed: bool + If True, then this variable is treated as a fixed constant in + the model. + + stale: bool + A Boolean indicating whether the value of this variable is + Consistent with the most recent solve. `True` indicates that + this variable's value was set prior to the most recent solve and + was not updated by the results returned by the solve. + + value: bool + The value of this variable. """ - __slots__ = () + __slots__ = ('_value', 'fixed', '_stale', '_associated_binary') + __autoslot_mappers__ = { + '_associated_binary': _associated_binary_mapper, + '_stale': StaleFlagManager.stale_mapper, + } def __init__(self, component=None): + # + # These lines represent in-lining of the + # following constructors: + # - BooleanVarData + # - ComponentData + # - BooleanValue self._component = weakref_ref(component) if (component is not None) else None self._index = NOTSET + self._value = None + self.fixed = False + self._stale = 0 # True + + self._associated_binary = None def is_fixed(self): """Returns True if this variable is fixed, otherwise returns False.""" @@ -132,113 +170,6 @@ def __call__(self, exception=True): """Compute the value of this variable.""" return self.value - @property - def value(self): - """Return the value for this variable.""" - raise NotImplementedError - - @property - def domain(self): - """Return the domain for this variable.""" - raise NotImplementedError - - @property - def fixed(self): - """Return the fixed indicator for this variable.""" - raise NotImplementedError - - @property - def stale(self): - """Return the stale indicator for this variable.""" - raise NotImplementedError - - def fix(self, value=NOTSET, skip_validation=False): - """Fix the value of this variable (treat as nonvariable) - - This sets the `fixed` indicator to True. If ``value`` is - provided, the value (and the ``skip_validation`` flag) are first - passed to :py:meth:`set_value()`. - - """ - self.fixed = True - if value is not NOTSET: - self.set_value(value, skip_validation) - - def unfix(self): - """Unfix this variable (treat as variable) - - This sets the `fixed` indicator to False. - - """ - self.fixed = False - - def free(self): - """Alias for :py:meth:`unfix`""" - return self.unfix() - - -def _associated_binary_mapper(encode, val): - if val is None: - return None - if encode: - if val.__class__ is not _DeprecatedImplicitAssociatedBinaryVariable: - return val() - else: - if val.__class__ is not _DeprecatedImplicitAssociatedBinaryVariable: - return weakref_ref(val) - return val - - -class _GeneralBooleanVarData(_BooleanVarData): - """ - This class defines the data for a single Boolean variable. - - Constructor Arguments: - component The BooleanVar object that owns this data. - - Public Class Attributes: - domain The domain of this variable. - fixed If True, then this variable is treated as a - fixed constant in the model. - stale A Boolean indicating whether the value of this variable is - legitimiate. This value is true if the value should - be considered legitimate for purposes of reporting or - other interrogation. - value The numeric value of this variable. - - The domain attribute is a property because it is - too widely accessed directly to enforce explicit getter/setter - methods and we need to deter directly modifying or accessing - these attributes in certain cases. - """ - - __slots__ = ('_value', 'fixed', '_stale', '_associated_binary') - __autoslot_mappers__ = { - '_associated_binary': _associated_binary_mapper, - '_stale': StaleFlagManager.stale_mapper, - } - - def __init__(self, component=None): - # - # These lines represent in-lining of the - # following constructors: - # - _BooleanVarData - # - ComponentData - # - BooleanValue - self._component = weakref_ref(component) if (component is not None) else None - self._index = NOTSET - self._value = None - self.fixed = False - self._stale = 0 # True - - self._associated_binary = None - - # - # Abstract Interface - # - - # value is an attribute - @property def value(self): """Return (or set) the value for this variable.""" @@ -265,14 +196,14 @@ def stale(self, val): self._stale = StaleFlagManager.get_flag(0) def get_associated_binary(self): - """Get the binary _VarData associated with this - _GeneralBooleanVarData""" + """Get the binary VarData associated with this + BooleanVarData""" return ( self._associated_binary() if self._associated_binary is not None else None ) def associate_binary_var(self, binary_var): - """Associate a binary _VarData to this _GeneralBooleanVarData""" + """Associate a binary VarData to this BooleanVarData""" if ( self._associated_binary is not None and type(self._associated_binary) @@ -294,6 +225,40 @@ def associate_binary_var(self, binary_var): if binary_var is not None: self._associated_binary = weakref_ref(binary_var) + def fix(self, value=NOTSET, skip_validation=False): + """Fix the value of this variable (treat as nonvariable) + + This sets the `fixed` indicator to True. If ``value`` is + provided, the value (and the ``skip_validation`` flag) are first + passed to :py:meth:`set_value()`. + + """ + self.fixed = True + if value is not NOTSET: + self.set_value(value, skip_validation) + + def unfix(self): + """Unfix this variable (treat as variable) + + This sets the `fixed` indicator to False. + + """ + self.fixed = False + + def free(self): + """Alias for :py:meth:`unfix`""" + return self.unfix() + + +class _BooleanVarData(metaclass=RenamedClass): + __renamed__new_class__ = BooleanVarData + __renamed__version__ = '6.7.2.dev0' + + +class _GeneralBooleanVarData(metaclass=RenamedClass): + __renamed__new_class__ = BooleanVarData + __renamed__version__ = '6.7.2.dev0' + @ModelComponentFactory.register("Logical decision variables.") class BooleanVar(IndexedComponent): @@ -309,7 +274,7 @@ class BooleanVar(IndexedComponent): to True. """ - _ComponentDataClass = _GeneralBooleanVarData + _ComponentDataClass = BooleanVarData def __new__(cls, *args, **kwds): if cls != BooleanVar: @@ -390,7 +355,7 @@ def construct(self, data=None): _set.construct() # - # Construct _BooleanVarData objects for all index values + # Construct BooleanVarData objects for all index values # if not self.is_indexed(): self._data[None] = self @@ -501,11 +466,11 @@ def _pprint(self): ) -class ScalarBooleanVar(_GeneralBooleanVarData, BooleanVar): +class ScalarBooleanVar(BooleanVarData, BooleanVar): """A single variable.""" def __init__(self, *args, **kwd): - _GeneralBooleanVarData.__init__(self, component=self) + BooleanVarData.__init__(self, component=self) BooleanVar.__init__(self, *args, **kwd) self._index = UnindexedComponent_index @@ -521,7 +486,7 @@ def __init__(self, *args, **kwd): def value(self): """Return the value for this variable.""" if self._constructed: - return _GeneralBooleanVarData.value.fget(self) + return BooleanVarData.value.fget(self) raise ValueError( "Accessing the value of variable '%s' " "before the Var has been constructed (there " @@ -532,7 +497,7 @@ def value(self): def value(self, val): """Set the value for this variable.""" if self._constructed: - return _GeneralBooleanVarData.value.fset(self, val) + return BooleanVarData.value.fset(self, val) raise ValueError( "Setting the value of variable '%s' " "before the Var has been constructed (there " @@ -541,7 +506,7 @@ def value(self, val): @property def domain(self): - return _GeneralBooleanVarData.domain.fget(self) + return BooleanVarData.domain.fget(self) def fix(self, value=NOTSET, skip_validation=False): """ @@ -549,7 +514,7 @@ def fix(self, value=NOTSET, skip_validation=False): indicating the variable should be fixed at its current value. """ if self._constructed: - return _GeneralBooleanVarData.fix(self, value, skip_validation) + return BooleanVarData.fix(self, value, skip_validation) raise ValueError( "Fixing variable '%s' " "before the Var has been constructed (there " @@ -559,7 +524,7 @@ def fix(self, value=NOTSET, skip_validation=False): def unfix(self): """Sets the fixed indicator to False.""" if self._constructed: - return _GeneralBooleanVarData.unfix(self) + return BooleanVarData.unfix(self) raise ValueError( "Freeing variable '%s' " "before the Var has been constructed (there " diff --git a/pyomo/core/base/component.py b/pyomo/core/base/component.py index 22c2bc4b804..d06b85dcdd4 100644 --- a/pyomo/core/base/component.py +++ b/pyomo/core/base/component.py @@ -20,6 +20,7 @@ from pyomo.common.autoslots import AutoSlots, fast_deepcopy from pyomo.common.collections import OrderedDict from pyomo.common.deprecation import ( + RenamedClass, deprecated, deprecation_warning, relocated_module_attribute, @@ -79,7 +80,7 @@ class CloneError(pyomo.common.errors.PyomoException): pass -class _ComponentBase(PyomoObject): +class ComponentBase(PyomoObject): """A base class for Component and ComponentData This class defines some fundamental methods and properties that are @@ -368,7 +369,7 @@ def pprint(self, ostream=None, verbose=False, prefix=""): @property def name(self): - """Get the fully qualifed component name.""" + """Get the fully qualified component name.""" return self.getname(fully_qualified=True) # Adding a setter here to help users adapt to the new @@ -474,7 +475,12 @@ def _pprint_base_impl( ostream.write(_data) -class Component(_ComponentBase): +class _ComponentBase(metaclass=RenamedClass): + __renamed__new_class__ = ComponentBase + __renamed__version__ = '6.7.2.dev0' + + +class Component(ComponentBase): """ This is the base class for all Pyomo modeling components. @@ -664,7 +670,7 @@ def getname(self, fully_qualified=False, name_buffer=None, relative_to=None): @property def name(self): - """Get the fully qualifed component name.""" + """Get the fully qualified component name.""" return self.getname(fully_qualified=True) # Allow setting a component's name if it is not owned by a parent @@ -779,7 +785,7 @@ def deactivate(self): self._active = False -class ComponentData(_ComponentBase): +class ComponentData(ComponentBase): """ This is the base class for the component data used in Pyomo modeling components. Subclasses of ComponentData are @@ -802,11 +808,11 @@ class ComponentData(_ComponentBase): __autoslot_mappers__ = {'_component': AutoSlots.weakref_mapper} # NOTE: This constructor is in-lined in the constructors for the following - # classes: _BooleanVarData, _ConnectorData, _ConstraintData, - # _GeneralExpressionData, _LogicalConstraintData, - # _GeneralLogicalConstraintData, _GeneralObjectiveData, - # _ParamData,_GeneralVarData, _GeneralBooleanVarData, _DisjunctionData, - # _ArcData, _PortData, _LinearConstraintData, and + # classes: BooleanVarData, ConnectorData, ConstraintData, + # ExpressionData, LogicalConstraintData, + # LogicalConstraintData, ObjectiveData, + # ParamData,VarData, BooleanVarData, DisjunctionData, + # ArcData, PortData, _LinearConstraintData, and # _LinearMatrixConstraintData. Changes made here need to be made in those # constructors as well! def __init__(self, component): diff --git a/pyomo/core/base/connector.py b/pyomo/core/base/connector.py index 435a2c2fccb..e383b52fc11 100644 --- a/pyomo/core/base/connector.py +++ b/pyomo/core/base/connector.py @@ -28,7 +28,7 @@ logger = logging.getLogger('pyomo.core') -class _ConnectorData(ComponentData, NumericValue): +class ConnectorData(ComponentData, NumericValue): """Holds the actual connector information""" __slots__ = ('vars', 'aggregators') @@ -105,6 +105,11 @@ def _iter_vars(self): yield v +class _ConnectorData(metaclass=RenamedClass): + __renamed__new_class__ = ConnectorData + __renamed__version__ = '6.7.2.dev0' + + @ModelComponentFactory.register( "A bundle of variables that can be manipulated together." ) @@ -157,7 +162,7 @@ def __init__(self, *args, **kwd): # IndexedComponent # def _getitem_when_not_present(self, idx): - _conval = self._data[idx] = _ConnectorData(component=self) + _conval = self._data[idx] = ConnectorData(component=self) return _conval def construct(self, data=None): @@ -170,7 +175,7 @@ def construct(self, data=None): timer = ConstructionTimer(self) self._constructed = True # - # Construct _ConnectorData objects for all index values + # Construct ConnectorData objects for all index values # if self.is_indexed(): self._initialize_members(self._index_set) @@ -258,9 +263,9 @@ def _line_generator(k, v): ) -class ScalarConnector(Connector, _ConnectorData): +class ScalarConnector(Connector, ConnectorData): def __init__(self, *args, **kwd): - _ConnectorData.__init__(self, component=self) + ConnectorData.__init__(self, component=self) Connector.__init__(self, *args, **kwd) self._index = UnindexedComponent_index diff --git a/pyomo/core/base/constraint.py b/pyomo/core/base/constraint.py index fde1160e563..eb4af76fdc1 100644 --- a/pyomo/core/base/constraint.py +++ b/pyomo/core/base/constraint.py @@ -125,17 +125,13 @@ def C_rule(model, i, j): return rule_wrapper(rule, result_map, map_types=map_types) -# -# This class is a pure interface -# - - -class _ConstraintData(ActiveComponentData): +class ConstraintData(ActiveComponentData): """ - This class defines the data for a single constraint. + This class defines the data for a single algebraic constraint. Constructor arguments: component The Constraint object that owns this data. + expr The Pyomo expression stored in this constraint. Public class attributes: active A boolean that is true if this constraint is @@ -155,164 +151,17 @@ class _ConstraintData(ActiveComponentData): _active A boolean that indicates whether this data is active """ - __slots__ = () + __slots__ = ('_body', '_lower', '_upper', '_expr') # Set to true when a constraint class stores its expression # in linear canonical form _linear_canonical_form = False - def __init__(self, component=None): - # - # These lines represent in-lining of the - # following constructors: - # - _ConstraintData, - # - ActiveComponentData - # - ComponentData - self._component = weakref_ref(component) if (component is not None) else None - self._index = NOTSET - self._active = True - - # - # Interface - # - - def __call__(self, exception=True): - """Compute the value of the body of this constraint.""" - return value(self.body, exception=exception) - - def has_lb(self): - """Returns :const:`False` when the lower bound is - :const:`None` or negative infinity""" - return self.lb is not None - - def has_ub(self): - """Returns :const:`False` when the upper bound is - :const:`None` or positive infinity""" - return self.ub is not None - - def lslack(self): - """ - Returns the value of f(x)-L for constraints of the form: - L <= f(x) (<= U) - (U >=) f(x) >= L - """ - lb = self.lb - if lb is None: - return _inf - else: - return value(self.body) - lb - - def uslack(self): - """ - Returns the value of U-f(x) for constraints of the form: - (L <=) f(x) <= U - U >= f(x) (>= L) - """ - ub = self.ub - if ub is None: - return _inf - else: - return ub - value(self.body) - - def slack(self): - """ - Returns the smaller of lslack and uslack values - """ - lb = self.lb - ub = self.ub - body = value(self.body) - if lb is None: - return ub - body - elif ub is None: - return body - lb - return min(ub - body, body - lb) - - # - # Abstract Interface - # - - @property - def body(self): - """Access the body of a constraint expression.""" - raise NotImplementedError - - @property - def lower(self): - """Access the lower bound of a constraint expression.""" - raise NotImplementedError - - @property - def upper(self): - """Access the upper bound of a constraint expression.""" - raise NotImplementedError - - @property - def lb(self): - """Access the value of the lower bound of a constraint expression.""" - raise NotImplementedError - - @property - def ub(self): - """Access the value of the upper bound of a constraint expression.""" - raise NotImplementedError - - @property - def equality(self): - """A boolean indicating whether this is an equality constraint.""" - raise NotImplementedError - - @property - def strict_lower(self): - """True if this constraint has a strict lower bound.""" - raise NotImplementedError - - @property - def strict_upper(self): - """True if this constraint has a strict upper bound.""" - raise NotImplementedError - - def set_value(self, expr): - """Set the expression on this constraint.""" - raise NotImplementedError - - def get_value(self): - """Get the expression on this constraint.""" - raise NotImplementedError - - -class _GeneralConstraintData(_ConstraintData): - """ - This class defines the data for a single general constraint. - - Constructor arguments: - component The Constraint object that owns this data. - expr The Pyomo expression stored in this constraint. - - Public class attributes: - active A boolean that is true if this constraint is - active in the model. - body The Pyomo expression for this constraint - lower The Pyomo expression for the lower bound - upper The Pyomo expression for the upper bound - equality A boolean that indicates whether this is an - equality constraint - strict_lower A boolean that indicates whether this - constraint uses a strict lower bound - strict_upper A boolean that indicates whether this - constraint uses a strict upper bound - - Private class attributes: - _component The objective component. - _active A boolean that indicates whether this data is active - """ - - __slots__ = ('_body', '_lower', '_upper', '_expr') - def __init__(self, expr=None, component=None): # # These lines represent in-lining of the # following constructors: - # - _ConstraintData, + # - ConstraintData, # - ActiveComponentData # - ComponentData self._component = weakref_ref(component) if (component is not None) else None @@ -325,9 +174,9 @@ def __init__(self, expr=None, component=None): if expr is not None: self.set_value(expr) - # - # Abstract Interface - # + def __call__(self, exception=True): + """Compute the value of the body of this constraint.""" + return value(self.body, exception=exception) @property def body(self): @@ -451,6 +300,16 @@ def strict_upper(self): """True if this constraint has a strict upper bound.""" return False + def has_lb(self): + """Returns :const:`False` when the lower bound is + :const:`None` or negative infinity""" + return self.lb is not None + + def has_ub(self): + """Returns :const:`False` when the upper bound is + :const:`None` or positive infinity""" + return self.ub is not None + @property def expr(self): """Return the expression associated with this constraint.""" @@ -678,6 +537,53 @@ def set_value(self, expr): "upper bound (%s)." % (self.name, self._upper) ) + def lslack(self): + """ + Returns the value of f(x)-L for constraints of the form: + L <= f(x) (<= U) + (U >=) f(x) >= L + """ + lb = self.lb + if lb is None: + return _inf + else: + return value(self.body) - lb + + def uslack(self): + """ + Returns the value of U-f(x) for constraints of the form: + (L <=) f(x) <= U + U >= f(x) (>= L) + """ + ub = self.ub + if ub is None: + return _inf + else: + return ub - value(self.body) + + def slack(self): + """ + Returns the smaller of lslack and uslack values + """ + lb = self.lb + ub = self.ub + body = value(self.body) + if lb is None: + return ub - body + elif ub is None: + return body - lb + return min(ub - body, body - lb) + + +class _ConstraintData(metaclass=RenamedClass): + __renamed__new_class__ = ConstraintData + __renamed__version__ = '6.7.2.dev0' + + +class _GeneralConstraintData(metaclass=RenamedClass): + __renamed__new_class__ = ConstraintData + __renamed__version__ = '6.7.2.dev0' + @ModelComponentFactory.register("General constraint expressions.") class Constraint(ActiveIndexedComponent): @@ -721,7 +627,7 @@ class Constraint(ActiveIndexedComponent): The class type for the derived subclass """ - _ComponentDataClass = _GeneralConstraintData + _ComponentDataClass = ConstraintData class Infeasible(object): pass @@ -879,14 +785,14 @@ def display(self, prefix="", ostream=None): ) -class ScalarConstraint(_GeneralConstraintData, Constraint): +class ScalarConstraint(ConstraintData, Constraint): """ ScalarConstraint is the implementation representing a single, non-indexed constraint. """ def __init__(self, *args, **kwds): - _GeneralConstraintData.__init__(self, component=self, expr=None) + ConstraintData.__init__(self, component=self, expr=None) Constraint.__init__(self, *args, **kwds) self._index = UnindexedComponent_index @@ -897,7 +803,7 @@ def __init__(self, *args, **kwds): # currently in place). So during initialization only, we will # treat them as "indexed" objects where things like # Constraint.Skip are managed. But after that they will behave - # like _ConstraintData objects where set_value does not handle + # like ConstraintData objects where set_value does not handle # Constraint.Skip but expects a valid expression or None. # @property @@ -910,7 +816,7 @@ def body(self): "an expression. There is currently " "nothing to access." % (self.name) ) - return _GeneralConstraintData.body.fget(self) + return ConstraintData.body.fget(self) @property def lower(self): @@ -922,7 +828,7 @@ def lower(self): "an expression. There is currently " "nothing to access." % (self.name) ) - return _GeneralConstraintData.lower.fget(self) + return ConstraintData.lower.fget(self) @property def upper(self): @@ -934,7 +840,7 @@ def upper(self): "an expression. There is currently " "nothing to access." % (self.name) ) - return _GeneralConstraintData.upper.fget(self) + return ConstraintData.upper.fget(self) @property def equality(self): @@ -946,7 +852,7 @@ def equality(self): "an expression. There is currently " "nothing to access." % (self.name) ) - return _GeneralConstraintData.equality.fget(self) + return ConstraintData.equality.fget(self) @property def strict_lower(self): @@ -958,7 +864,7 @@ def strict_lower(self): "an expression. There is currently " "nothing to access." % (self.name) ) - return _GeneralConstraintData.strict_lower.fget(self) + return ConstraintData.strict_lower.fget(self) @property def strict_upper(self): @@ -970,7 +876,7 @@ def strict_upper(self): "an expression. There is currently " "nothing to access." % (self.name) ) - return _GeneralConstraintData.strict_upper.fget(self) + return ConstraintData.strict_upper.fget(self) def clear(self): self._data = {} @@ -1035,7 +941,7 @@ def add(self, index, expr): return self.__setitem__(index, expr) @overload - def __getitem__(self, index) -> _GeneralConstraintData: ... + def __getitem__(self, index) -> ConstraintData: ... __getitem__ = IndexedComponent.__getitem__ # type: ignore diff --git a/pyomo/core/base/expression.py b/pyomo/core/base/expression.py index 3ce998b62a4..013c388e6e5 100644 --- a/pyomo/core/base/expression.py +++ b/pyomo/core/base/expression.py @@ -36,24 +36,24 @@ logger = logging.getLogger('pyomo.core') -class _ExpressionData(numeric_expr.NumericValue): - """ - An object that defines a named expression. +class NamedExpressionData(numeric_expr.NumericValue): + """An object that defines a generic "named expression". + + This is the base class for both :py:class:`ExpressionData` and + :py:class:`ObjectiveData`. Public Class Attributes expr The expression owned by this data. + """ + # Note: derived classes are expected to declare the _args_ slot __slots__ = () EXPRESSION_SYSTEM = EXPR.ExpressionType.NUMERIC PRECEDENCE = 0 ASSOCIATIVITY = EXPR.OperatorAssociativity.NON_ASSOCIATIVE - # - # Interface - # - def __call__(self, exception=True): """Compute the value of this expression.""" (arg,) = self._args_ @@ -62,6 +62,18 @@ def __call__(self, exception=True): return arg return arg(exception=exception) + def create_node_with_local_data(self, values): + """ + Construct a simple expression after constructing the + contained expression. + + This class provides a consistent interface for constructing a + node, which is used in tree visitor scripts. + """ + obj = self.__class__() + obj._args_ = values + return obj + def is_named_expression_type(self): """A boolean indicating whether this in a named expression.""" return True @@ -110,9 +122,10 @@ def _compute_polynomial_degree(self, result): def _is_fixed(self, values): return values[0] - # - # Abstract Interface - # + # NamedExpressionData should never return False because + # they can store subexpressions that contain variables + def is_potentially_variable(self): + return True @property def expr(self): @@ -125,58 +138,6 @@ def expr(self): def expr(self, value): self.set_value(value) - def set_value(self, expr): - """Set the expression on this expression.""" - raise NotImplementedError - - def is_constant(self): - """A boolean indicating whether this expression is constant.""" - raise NotImplementedError - - def is_fixed(self): - """A boolean indicating whether this expression is fixed.""" - raise NotImplementedError - - # _ExpressionData should never return False because - # they can store subexpressions that contain variables - def is_potentially_variable(self): - return True - - -class _GeneralExpressionDataImpl(_ExpressionData): - """ - An object that defines an expression that is never cloned - - Constructor Arguments - expr The Pyomo expression stored in this expression. - component The Expression object that owns this data. - - Public Class Attributes - expr The expression owned by this data. - """ - - __slots__ = () - - def __init__(self, expr=None): - self._args_ = (expr,) - - def create_node_with_local_data(self, values): - """ - Construct a simple expression after constructing the - contained expression. - - This class provides a consistent interface for constructing a - node, which is used in tree visitor scripts. - """ - obj = ScalarExpression() - obj.construct() - obj._args_ = values - return obj - - # - # Abstract Interface - # - def set_value(self, expr): """Set the expression on this expression.""" if expr is None or expr.__class__ in native_numeric_types: @@ -235,7 +196,17 @@ def __ipow__(self, other): return numeric_expr._pow_dispatcher[e.__class__, other.__class__](e, other) -class _GeneralExpressionData(_GeneralExpressionDataImpl, ComponentData): +class _ExpressionData(metaclass=RenamedClass): + __renamed__new_class__ = NamedExpressionData + __renamed__version__ = '6.7.2.dev0' + + +class _GeneralExpressionDataImpl(metaclass=RenamedClass): + __renamed__new_class__ = NamedExpressionData + __renamed__version__ = '6.7.2.dev0' + + +class ExpressionData(NamedExpressionData, ComponentData): """ An object that defines an expression that is never cloned @@ -253,12 +224,16 @@ class _GeneralExpressionData(_GeneralExpressionDataImpl, ComponentData): __slots__ = ('_args_',) def __init__(self, expr=None, component=None): - _GeneralExpressionDataImpl.__init__(self, expr) - # Inlining ComponentData.__init__ + self._args_ = (expr,) self._component = weakref_ref(component) if (component is not None) else None self._index = NOTSET +class _GeneralExpressionData(metaclass=RenamedClass): + __renamed__new_class__ = ExpressionData + __renamed__version__ = '6.7.2.dev0' + + @ModelComponentFactory.register( "Named expressions that can be used in other expressions." ) @@ -275,7 +250,7 @@ class Expression(IndexedComponent): doc Text describing this component. """ - _ComponentDataClass = _GeneralExpressionData + _ComponentDataClass = ExpressionData # This seems like a copy-paste error, and should be renamed/removed NoConstraint = IndexedComponent.Skip @@ -402,9 +377,9 @@ def construct(self, data=None): timer.report() -class ScalarExpression(_GeneralExpressionData, Expression): +class ScalarExpression(ExpressionData, Expression): def __init__(self, *args, **kwds): - _GeneralExpressionData.__init__(self, expr=None, component=self) + ExpressionData.__init__(self, expr=None, component=self) Expression.__init__(self, *args, **kwds) self._index = UnindexedComponent_index @@ -427,7 +402,7 @@ def __call__(self, exception=True): def expr(self): """Return expression on this expression.""" if self._constructed: - return _GeneralExpressionData.expr.fget(self) + return ExpressionData.expr.fget(self) raise ValueError( "Accessing the expression of Expression '%s' " "before the Expression has been constructed (there " @@ -445,7 +420,7 @@ def clear(self): def set_value(self, expr): """Set the expression on this expression.""" if self._constructed: - return _GeneralExpressionData.set_value(self, expr) + return ExpressionData.set_value(self, expr) raise ValueError( "Setting the expression of Expression '%s' " "before the Expression has been constructed (there " @@ -455,7 +430,7 @@ def set_value(self, expr): def is_constant(self): """A boolean indicating whether this expression is constant.""" if self._constructed: - return _GeneralExpressionData.is_constant(self) + return ExpressionData.is_constant(self) raise ValueError( "Accessing the is_constant flag of Expression '%s' " "before the Expression has been constructed (there " @@ -465,7 +440,7 @@ def is_constant(self): def is_fixed(self): """A boolean indicating whether this expression is fixed.""" if self._constructed: - return _GeneralExpressionData.is_fixed(self) + return ExpressionData.is_fixed(self) raise ValueError( "Accessing the is_fixed flag of Expression '%s' " "before the Expression has been constructed (there " @@ -509,6 +484,6 @@ def add(self, index, expr): """Add an expression with a given index.""" if (type(expr) is tuple) and (expr == Expression.Skip): return None - cdata = _GeneralExpressionData(expr, component=self) + cdata = ExpressionData(expr, component=self) self._data[index] = cdata return cdata diff --git a/pyomo/core/base/indexed_component.py b/pyomo/core/base/indexed_component.py index e1be613d666..37a62e5c4d7 100644 --- a/pyomo/core/base/indexed_component.py +++ b/pyomo/core/base/indexed_component.py @@ -731,7 +731,7 @@ def __delitem__(self, index): # this supports "del m.x[:,1]" through a simple recursive call if index.__class__ is IndexedComponent_slice: - # Assert that this slice ws just generated + # Assert that this slice was just generated assert len(index._call_stack) == 1 # Make a copy of the slicer items *before* we start # iterating over it (since we will be removing items!). diff --git a/pyomo/core/base/logical_constraint.py b/pyomo/core/base/logical_constraint.py index f32d727931a..9584078307d 100644 --- a/pyomo/core/base/logical_constraint.py +++ b/pyomo/core/base/logical_constraint.py @@ -42,64 +42,7 @@ """ -class _LogicalConstraintData(ActiveComponentData): - """ - This class defines the data for a single logical constraint. - - It functions as a pure interface. - - Constructor arguments: - component The LogicalConstraint object that owns this data. - - Public class attributes: - active A boolean that is true if this statement is - active in the model. - body The Pyomo logical expression for this statement - - Private class attributes: - _component The statement component. - _active A boolean that indicates whether this data is active - """ - - __slots__ = () - - def __init__(self, component=None): - # - # These lines represent in-lining of the - # following constructors: - # - ActiveComponentData - # - ComponentData - self._component = weakref_ref(component) if (component is not None) else None - self._index = NOTSET - self._active = True - - # - # Interface - # - def __call__(self, exception=True): - """Compute the value of the body of this logical constraint.""" - if self.body is None: - return None - return self.body(exception=exception) - - # - # Abstract Interface - # - @property - def expr(self): - """Get the expression on this logical constraint.""" - raise NotImplementedError - - def set_value(self, expr): - """Set the expression on this logical constraint.""" - raise NotImplementedError - - def get_value(self): - """Get the expression on this logical constraint.""" - raise NotImplementedError - - -class _GeneralLogicalConstraintData(_LogicalConstraintData): +class LogicalConstraintData(ActiveComponentData): """ This class defines the data for a single general logical constraint. @@ -123,7 +66,7 @@ def __init__(self, expr=None, component=None): # # These lines represent in-lining of the # following constructors: - # - _LogicalConstraintData, + # - LogicalConstraintData, # - ActiveComponentData # - ComponentData self._component = weakref_ref(component) if (component is not None) else None @@ -134,6 +77,12 @@ def __init__(self, expr=None, component=None): if expr is not None: self.set_value(expr) + def __call__(self, exception=True): + """Compute the value of the body of this logical constraint.""" + if self.body is None: + return None + return self.body(exception=exception) + # # Abstract Interface # @@ -173,6 +122,16 @@ def get_value(self): return self._expr +class _LogicalConstraintData(metaclass=RenamedClass): + __renamed__new_class__ = LogicalConstraintData + __renamed__version__ = '6.7.2.dev0' + + +class _GeneralLogicalConstraintData(metaclass=RenamedClass): + __renamed__new_class__ = LogicalConstraintData + __renamed__version__ = '6.7.2.dev0' + + @ModelComponentFactory.register("General logical constraints.") class LogicalConstraint(ActiveIndexedComponent): """ @@ -215,7 +174,7 @@ class LogicalConstraint(ActiveIndexedComponent): The class type for the derived subclass """ - _ComponentDataClass = _GeneralLogicalConstraintData + _ComponentDataClass = LogicalConstraintData class Infeasible(object): pass @@ -373,7 +332,7 @@ def display(self, prefix="", ostream=None): # # Checks flags like Constraint.Skip, etc. before actually creating a - # constraint object. Returns the _ConstraintData object when it should be + # constraint object. Returns the ConstraintData object when it should be # added to the _data dict; otherwise, None is returned or an exception # is raised. # @@ -409,14 +368,14 @@ def _check_skip_add(self, index, expr): return expr -class ScalarLogicalConstraint(_GeneralLogicalConstraintData, LogicalConstraint): +class ScalarLogicalConstraint(LogicalConstraintData, LogicalConstraint): """ ScalarLogicalConstraint is the implementation representing a single, non-indexed logical constraint. """ def __init__(self, *args, **kwds): - _GeneralLogicalConstraintData.__init__(self, component=self, expr=None) + LogicalConstraintData.__init__(self, component=self, expr=None) LogicalConstraint.__init__(self, *args, **kwds) self._index = UnindexedComponent_index @@ -436,7 +395,7 @@ def body(self): "an expression. There is currently " "nothing to access." % self.name ) - return _GeneralLogicalConstraintData.body.fget(self) + return LogicalConstraintData.body.fget(self) raise ValueError( "Accessing the body of logical constraint '%s' " "before the LogicalConstraint has been constructed (there " @@ -450,7 +409,7 @@ def body(self): # currently in place). So during initialization only, we will # treat them as "indexed" objects where things like # True are managed. But after that they will behave - # like _LogicalConstraintData objects where set_value expects + # like LogicalConstraintData objects where set_value expects # a valid expression or None. # diff --git a/pyomo/core/base/matrix_constraint.py b/pyomo/core/base/matrix_constraint.py index adc9742302e..8dac7c3d24b 100644 --- a/pyomo/core/base/matrix_constraint.py +++ b/pyomo/core/base/matrix_constraint.py @@ -19,7 +19,7 @@ from pyomo.core.expr.numvalue import value from pyomo.core.expr.numeric_expr import LinearExpression from pyomo.core.base.component import ModelComponentFactory -from pyomo.core.base.constraint import IndexedConstraint, _ConstraintData +from pyomo.core.base.constraint import IndexedConstraint, ConstraintData from pyomo.repn.standard_repn import StandardRepn from collections.abc import Mapping @@ -28,7 +28,7 @@ logger = logging.getLogger('pyomo.core') -class _MatrixConstraintData(_ConstraintData): +class _MatrixConstraintData(ConstraintData): """ This class defines the data for a single linear constraint derived from a canonical form Ax=b constraint. @@ -104,7 +104,7 @@ def __init__(self, index, component_ref): # # These lines represent in-lining of the # following constructors: - # - _ConstraintData, + # - ConstraintData, # - ActiveComponentData # - ComponentData self._component = component_ref @@ -209,7 +209,7 @@ def index(self): return self._index # - # Abstract Interface (_ConstraintData) + # Abstract Interface (ConstraintData) # @property diff --git a/pyomo/core/base/objective.py b/pyomo/core/base/objective.py index fcc63755f2b..e388d25aab4 100644 --- a/pyomo/core/base/objective.py +++ b/pyomo/core/base/objective.py @@ -15,6 +15,7 @@ from pyomo.common.pyomo_typing import overload from pyomo.common.deprecation import RenamedClass +from pyomo.common.enums import ObjectiveSense, minimize, maximize from pyomo.common.log import is_debug_set from pyomo.common.modeling import NOTSET from pyomo.common.formatting import tabular_writer @@ -28,14 +29,13 @@ UnindexedComponent_set, rule_wrapper, ) -from pyomo.core.base.expression import _ExpressionData, _GeneralExpressionDataImpl +from pyomo.core.base.expression import NamedExpressionData from pyomo.core.base.set import Set from pyomo.core.base.initializer import ( Initializer, IndexedCallInitializer, CountedCallInitializer, ) -from pyomo.core.base import minimize, maximize logger = logging.getLogger('pyomo.core') @@ -81,47 +81,7 @@ def O_rule(model, i, j): return rule_wrapper(rule, {None: ObjectiveList.End}) -# -# This class is a pure interface -# - - -class _ObjectiveData(_ExpressionData): - """ - This class defines the data for a single objective. - - Public class attributes: - expr The Pyomo expression for this objective - sense The direction for this objective. - """ - - __slots__ = () - - # - # Interface - # - - def is_minimizing(self): - """Return True if this is a minimization objective.""" - return self.sense == minimize - - # - # Abstract Interface - # - - @property - def sense(self): - """Access sense (direction) of this objective.""" - raise NotImplementedError - - def set_sense(self, sense): - """Set the sense (direction) of this objective.""" - raise NotImplementedError - - -class _GeneralObjectiveData( - _GeneralExpressionDataImpl, _ObjectiveData, ActiveComponentData -): +class ObjectiveData(NamedExpressionData, ActiveComponentData): """ This class defines the data for a single objective. @@ -144,22 +104,20 @@ class _GeneralObjectiveData( _active A boolean that indicates whether this data is active """ - __slots__ = ("_sense", "_args_") + __slots__ = ("_args_", "_sense") def __init__(self, expr=None, sense=minimize, component=None): - _GeneralExpressionDataImpl.__init__(self, expr) + # Inlining NamedExpressionData.__init__ + self._args_ = (expr,) # Inlining ActiveComponentData.__init__ self._component = weakref_ref(component) if (component is not None) else None self._index = NOTSET self._active = True - self._sense = sense + self._sense = ObjectiveSense(sense) - if (self._sense != minimize) and (self._sense != maximize): - raise ValueError( - "Objective sense must be set to one of " - "'minimize' (%s) or 'maximize' (%s). Invalid " - "value: %s'" % (minimize, maximize, sense) - ) + def is_minimizing(self): + """Return True if this is a minimization objective.""" + return self.sense == minimize def set_value(self, expr): if expr is None: @@ -182,14 +140,17 @@ def sense(self, sense): def set_sense(self, sense): """Set the sense (direction) of this objective.""" - if sense in {minimize, maximize}: - self._sense = sense - else: - raise ValueError( - "Objective sense must be set to one of " - "'minimize' (%s) or 'maximize' (%s). Invalid " - "value: %s'" % (minimize, maximize, sense) - ) + self._sense = ObjectiveSense(sense) + + +class _ObjectiveData(metaclass=RenamedClass): + __renamed__new_class__ = ObjectiveData + __renamed__version__ = '6.7.2.dev0' + + +class _GeneralObjectiveData(metaclass=RenamedClass): + __renamed__new_class__ = ObjectiveData + __renamed__version__ = '6.7.2.dev0' @ModelComponentFactory.register("Expressions that are minimized or maximized.") @@ -240,7 +201,7 @@ class Objective(ActiveIndexedComponent): The class type for the derived subclass """ - _ComponentDataClass = _GeneralObjectiveData + _ComponentDataClass = ObjectiveData NoObjective = ActiveIndexedComponent.Skip def __new__(cls, *args, **kwds): @@ -353,11 +314,7 @@ def _pprint(self): ], self._data.items(), ("Active", "Sense", "Expression"), - lambda k, v: [ - v.active, - ("minimize" if (v.sense == minimize) else "maximize"), - v.expr, - ], + lambda k, v: [v.active, v.sense, v.expr], ) def display(self, prefix="", ostream=None): @@ -389,14 +346,14 @@ def display(self, prefix="", ostream=None): ) -class ScalarObjective(_GeneralObjectiveData, Objective): +class ScalarObjective(ObjectiveData, Objective): """ ScalarObjective is the implementation representing a single, non-indexed objective. """ def __init__(self, *args, **kwd): - _GeneralObjectiveData.__init__(self, expr=None, component=self) + ObjectiveData.__init__(self, expr=None, component=self) Objective.__init__(self, *args, **kwd) self._index = UnindexedComponent_index @@ -432,7 +389,7 @@ def expr(self): "a sense or expression (there is currently " "no value to return)." % (self.name) ) - return _GeneralObjectiveData.expr.fget(self) + return ObjectiveData.expr.fget(self) raise ValueError( "Accessing the expression of objective '%s' " "before the Objective has been constructed (there " @@ -455,7 +412,7 @@ def sense(self): "a sense or expression (there is currently " "no value to return)." % (self.name) ) - return _GeneralObjectiveData.sense.fget(self) + return ObjectiveData.sense.fget(self) raise ValueError( "Accessing the sense of objective '%s' " "before the Objective has been constructed (there " @@ -474,7 +431,7 @@ def sense(self, sense): # currently in place). So during initialization only, we will # treat them as "indexed" objects where things like # Objective.Skip are managed. But after that they will behave - # like _ObjectiveData objects where set_value does not handle + # like ObjectiveData objects where set_value does not handle # Objective.Skip but expects a valid expression or None # @@ -498,7 +455,7 @@ def set_sense(self, sense): if self._constructed: if len(self._data) == 0: self._data[None] = self - return _GeneralObjectiveData.set_sense(self, sense) + return ObjectiveData.set_sense(self, sense) raise ValueError( "Setting the sense of objective '%s' " "before the Objective has been constructed (there " diff --git a/pyomo/core/base/param.py b/pyomo/core/base/param.py index 5fcaf92b25a..9af6a37de45 100644 --- a/pyomo/core/base/param.py +++ b/pyomo/core/base/param.py @@ -118,7 +118,7 @@ def _parent(self, val): pass -class _ParamData(ComponentData, NumericValue): +class ParamData(ComponentData, NumericValue): """ This class defines the data for a mutable parameter. @@ -252,6 +252,11 @@ def _compute_polynomial_degree(self, result): return 0 +class _ParamData(metaclass=RenamedClass): + __renamed__new_class__ = ParamData + __renamed__version__ = '6.7.2.dev0' + + @ModelComponentFactory.register( "Parameter data that is used to define a model instance." ) @@ -285,7 +290,7 @@ class Param(IndexedComponent, IndexedComponent_NDArrayMixin): """ DefaultMutable = False - _ComponentDataClass = _ParamData + _ComponentDataClass = ParamData class NoValue(object): """A dummy type that is pickle-safe that we can use as the default @@ -523,14 +528,14 @@ def store_values(self, new_values, check=True): # instead of incurring the penalty of checking. for index, new_value in new_values.items(): if index not in self._data: - self._data[index] = _ParamData(self) + self._data[index] = ParamData(self) self._data[index]._value = new_value else: # For scalars, we will choose an approach based on # how "dense" the Param is if not self._data: # empty for index in self._index_set: - p = self._data[index] = _ParamData(self) + p = self._data[index] = ParamData(self) p._value = new_values elif len(self._data) == len(self._index_set): for index in self._index_set: @@ -538,7 +543,7 @@ def store_values(self, new_values, check=True): else: for index in self._index_set: if index not in self._data: - self._data[index] = _ParamData(self) + self._data[index] = ParamData(self) self._data[index]._value = new_values else: # @@ -601,9 +606,9 @@ def _getitem_when_not_present(self, index): # a default value, as long as *solving* a model without # reasonable values produces an informative error. if self._mutable: - # Note: _ParamData defaults to Param.NoValue + # Note: ParamData defaults to Param.NoValue if self.is_indexed(): - ans = self._data[index] = _ParamData(self) + ans = self._data[index] = ParamData(self) else: ans = self._data[index] = self ans._index = index @@ -698,8 +703,8 @@ def _setitem_impl(self, index, obj, value): return obj else: old_value, self._data[index] = self._data[index], value - # Because we do not have a _ParamData, we cannot rely on the - # validation that occurs in _ParamData.set_value() + # Because we do not have a ParamData, we cannot rely on the + # validation that occurs in ParamData.set_value() try: self._validate_value(index, value) return value @@ -736,14 +741,14 @@ def _setitem_when_not_present(self, index, value, _check_domain=True): self._index = UnindexedComponent_index return self elif self._mutable: - obj = self._data[index] = _ParamData(self) + obj = self._data[index] = ParamData(self) obj.set_value(value, index) obj._index = index return obj else: self._data[index] = value - # Because we do not have a _ParamData, we cannot rely on the - # validation that occurs in _ParamData.set_value() + # Because we do not have a ParamData, we cannot rely on the + # validation that occurs in ParamData.set_value() self._validate_value(index, value, _check_domain) return value except: @@ -901,9 +906,9 @@ def _pprint(self): return (headers, self.sparse_iteritems(), ("Value",), dataGen) -class ScalarParam(_ParamData, Param): +class ScalarParam(ParamData, Param): def __init__(self, *args, **kwds): - _ParamData.__init__(self, component=self) + ParamData.__init__(self, component=self) Param.__init__(self, *args, **kwds) self._index = UnindexedComponent_index @@ -996,7 +1001,7 @@ def _create_objects_for_deepcopy(self, memo, component_list): # between potentially variable GetItemExpression objects and # "constant" GetItemExpression objects. That will need to wait for # the expression rework [JDS; Nov 22]. - def __getitem__(self, args) -> _ParamData: + def __getitem__(self, args) -> ParamData: try: return super().__getitem__(args) except: diff --git a/pyomo/core/base/piecewise.py b/pyomo/core/base/piecewise.py index 7817a61b2f2..efe500dbfb1 100644 --- a/pyomo/core/base/piecewise.py +++ b/pyomo/core/base/piecewise.py @@ -40,14 +40,14 @@ import enum from pyomo.common.log import is_debug_set -from pyomo.common.deprecation import deprecation_warning +from pyomo.common.deprecation import RenamedClass, deprecation_warning from pyomo.common.numeric_types import value from pyomo.common.timing import ConstructionTimer -from pyomo.core.base.block import Block, _BlockData +from pyomo.core.base.block import Block, BlockData from pyomo.core.base.component import ModelComponentFactory from pyomo.core.base.constraint import Constraint, ConstraintList from pyomo.core.base.sos import SOSConstraint -from pyomo.core.base.var import Var, _VarData, IndexedVar +from pyomo.core.base.var import Var, VarData, IndexedVar from pyomo.core.base.set_types import PositiveReals, NonNegativeReals, Binary from pyomo.core.base.util import flatten_tuple @@ -214,14 +214,14 @@ def _characterize_function(name, tol, f_rule, model, points, *index): return 0, values, False -class _PiecewiseData(_BlockData): +class PiecewiseData(BlockData): """ This class defines the base class for all linearization and piecewise constraint generators.. """ def __init__(self, parent): - _BlockData.__init__(self, parent) + BlockData.__init__(self, parent) self._constructed = True self._bound_type = None self._domain_pts = None @@ -272,6 +272,11 @@ def __call__(self, x): ) +class _PiecewiseData(metaclass=RenamedClass): + __renamed__new_class__ = PiecewiseData + __renamed__version__ = '6.7.2.dev0' + + class _SimpleSinglePiecewise(object): """ Called when the piecewise points list has only two points @@ -1125,7 +1130,7 @@ def f(model,j,x): not be modified. """ - _ComponentDataClass = _PiecewiseData + _ComponentDataClass = PiecewiseData def __new__(cls, *args, **kwds): if cls != Piecewise: @@ -1235,7 +1240,7 @@ def __init__(self, *args, **kwds): # Check that the variables args are actually Pyomo Vars if not ( - isinstance(self._domain_var, _VarData) + isinstance(self._domain_var, VarData) or isinstance(self._domain_var, IndexedVar) ): msg = ( @@ -1244,7 +1249,7 @@ def __init__(self, *args, **kwds): ) raise TypeError(msg % (repr(self._domain_var),)) if not ( - isinstance(self._range_var, _VarData) + isinstance(self._range_var, VarData) or isinstance(self._range_var, IndexedVar) ): msg = ( @@ -1354,22 +1359,22 @@ def add(self, index, _is_indexed=None): _self_yvar = None _self_domain_pts_index = None if not _is_indexed: - # allows one to mix Var and _VarData as input to + # allows one to mix Var and VarData as input to # non-indexed Piecewise, index would be None in this case - # so for Var elements Var[None] is Var, but _VarData[None] would fail + # so for Var elements Var[None] is Var, but VarData[None] would fail _self_xvar = self._domain_var _self_yvar = self._range_var _self_domain_pts_index = self._domain_points[index] else: - # The following allows one to specify a Var or _VarData + # The following allows one to specify a Var or VarData # object even with an indexed Piecewise component. # The most common situation will most likely be a VarArray, # so we try this first. - if not isinstance(self._domain_var, _VarData): + if not isinstance(self._domain_var, VarData): _self_xvar = self._domain_var[index] else: _self_xvar = self._domain_var - if not isinstance(self._range_var, _VarData): + if not isinstance(self._range_var, VarData): _self_yvar = self._range_var[index] else: _self_yvar = self._range_var @@ -1541,7 +1546,7 @@ def add(self, index, _is_indexed=None): raise ValueError(msg % (self.name, index, self._pw_rep)) if _is_indexed: - comp = _PiecewiseData(self) + comp = PiecewiseData(self) else: comp = self self._data[index] = comp @@ -1551,9 +1556,9 @@ def add(self, index, _is_indexed=None): comp.build_constraints(func, _self_xvar, _self_yvar) -class SimplePiecewise(_PiecewiseData, Piecewise): +class SimplePiecewise(PiecewiseData, Piecewise): def __init__(self, *args, **kwds): - _PiecewiseData.__init__(self, self) + PiecewiseData.__init__(self, self) Piecewise.__init__(self, *args, **kwds) diff --git a/pyomo/core/base/reference.py b/pyomo/core/base/reference.py index 2279db067a6..558ced64f1b 100644 --- a/pyomo/core/base/reference.py +++ b/pyomo/core/base/reference.py @@ -18,7 +18,7 @@ Sequence, ) from pyomo.common.modeling import NOTSET -from pyomo.core.base.set import DeclareGlobalSet, Set, SetOf, OrderedSetOf, _SetDataBase +from pyomo.core.base.set import DeclareGlobalSet, Set, SetOf, OrderedSetOf, SetData from pyomo.core.base.component import Component, ComponentData from pyomo.core.base.global_set import UnindexedComponent_set from pyomo.core.base.enums import SortComponents @@ -579,7 +579,7 @@ def Reference(reference, ctype=NOTSET): :py:class:`IndexedComponent`. If the indices associated with wildcards in the component slice all - refer to the same :py:class:`Set` objects for all data identifed by + refer to the same :py:class:`Set` objects for all data identified by the slice, then the resulting indexed component will be indexed by the product of those sets. However, if all data do not share common set objects, or only a subset of indices in a multidimentional set @@ -774,10 +774,10 @@ def Reference(reference, ctype=NOTSET): # is that within the subsets list, and set is a wildcard set. index = wildcards[0][1] # index is the first wildcard set. - if not isinstance(index, _SetDataBase): + if not isinstance(index, SetData): index = SetOf(index) for lvl, idx in wildcards[1:]: - if not isinstance(idx, _SetDataBase): + if not isinstance(idx, SetData): idx = SetOf(idx) index = index * idx # index is now either a single Set, or a SetProduct of the diff --git a/pyomo/core/base/set.py b/pyomo/core/base/set.py index b3277ab3260..b9a2fe72e1d 100644 --- a/pyomo/core/base/set.py +++ b/pyomo/core/base/set.py @@ -50,7 +50,7 @@ RangeDifferenceError, ) from pyomo.core.base.component import ( - _ComponentBase, + ComponentBase, Component, ComponentData, ModelComponentFactory, @@ -84,10 +84,7 @@ All Sets implement one of the following APIs: -0. `class _SetDataBase(ComponentData)` - *(pure virtual interface)* - -1. `class _SetData(_SetDataBase)` +1. `class SetData(ComponentData)` *(base class for all AML Sets)* 2. `class _FiniteSetMixin(object)` @@ -102,7 +99,7 @@ bounded continuous ranges as well as unbounded discrete ranges). As there are an infinite number of values, iteration is *not* supported. The base class also implements all Python set operations. -Note that `_SetData` does *not* implement `len()`, as Python requires +Note that `SetData` does *not* implement `len()`, as Python requires `len()` to return a positive integer. Finite sets add iteration and support for `len()`. In addition, they @@ -128,7 +125,7 @@ def process_setarg(arg): - if isinstance(arg, _SetDataBase): + if isinstance(arg, SetData): if ( getattr(arg, '_parent', None) is not None or getattr(arg, '_anonymous_sets', None) is GlobalSetBase @@ -140,7 +137,7 @@ def process_setarg(arg): _anonymous.update(arg._anonymous_sets) return arg, _anonymous - elif isinstance(arg, _ComponentBase): + elif isinstance(arg, ComponentBase): if isinstance(arg, IndexedComponent) and arg.is_indexed(): raise TypeError( "Cannot apply a Set operator to an " @@ -512,16 +509,8 @@ class _NotFound(object): pass -# A trivial class that we can use to test if an object is a "legitimate" -# set (either ScalarSet, or a member of an IndexedSet) -class _SetDataBase(ComponentData): - """The base for all objects that can be used as a component indexing set.""" - - __slots__ = () - - -class _SetData(_SetDataBase): - """The base for all Pyomo AML objects that can be used as a component +class SetData(ComponentData): + """The base for all Pyomo objects that can be used as a component indexing set. Derived versions of this class can be used as the Index for any @@ -534,13 +523,13 @@ def __contains__(self, value): ans = self.get(value, _NotFound) except TypeError: # In Python 3.x, Sets are unhashable - if isinstance(value, _SetData): + if isinstance(value, SetData): ans = _NotFound else: raise if ans is _NotFound: - if isinstance(value, _SetData): + if isinstance(value, SetData): deprecation_warning( "Testing for set subsets with 'a in b' is deprecated. " "Use 'a.issubset(b)'.", @@ -894,7 +883,7 @@ def _get_continuous_interval(self): @property @deprecated("The 'virtual' attribute is no longer supported", version='5.7') def virtual(self): - return isinstance(self, (_AnySet, SetOperator, _InfiniteRangeSetData)) + return isinstance(self, (_AnySet, SetOperator, InfiniteRangeSetData)) @virtual.setter def virtual(self, value): @@ -1188,6 +1177,16 @@ def __gt__(self, other): return self >= other and not self == other +class _SetData(metaclass=RenamedClass): + __renamed__new_class__ = SetData + __renamed__version__ = '6.7.2.dev0' + + +class _SetDataBase(metaclass=RenamedClass): + __renamed__new_class__ = SetData + __renamed__version__ = '6.7.2.dev0' + + class _FiniteSetMixin(object): __slots__ = () @@ -1294,14 +1293,14 @@ def ranges(self): yield NonNumericRange(i) -class _FiniteSetData(_FiniteSetMixin, _SetData): +class FiniteSetData(_FiniteSetMixin, SetData): """A general unordered iterable Set""" __slots__ = ('_values', '_domain', '_validate', '_filter', '_dimen') def __init__(self, component): - _SetData.__init__(self, component=component) - # Derived classes (like _OrderedSetData) may want to change the + SetData.__init__(self, component=component) + # Derived classes (like OrderedSetData) may want to change the # storage if not hasattr(self, '_values'): self._values = set() @@ -1470,6 +1469,11 @@ def pop(self): return self._values.pop() +class _FiniteSetData(metaclass=RenamedClass): + __renamed__new_class__ = FiniteSetData + __renamed__version__ = '6.7.2.dev0' + + class _ScalarOrderedSetMixin(object): # This mixin is required because scalar ordered sets implement # __getitem__() as an alias of at() @@ -1630,16 +1634,16 @@ def _to_0_based_index(self, item): ) -class _OrderedSetData(_OrderedSetMixin, _FiniteSetData): +class OrderedSetData(_OrderedSetMixin, FiniteSetData): """ This class defines the base class for an ordered set of concrete data. In older Pyomo terms, this defines a "concrete" ordered set - that is, a set that "owns" the list of set members. While this class actually implements a set ordered by insertion order, we make the "official" - _InsertionOrderSetData an empty derivative class, so that + InsertionOrderSetData an empty derivative class, so that - issubclass(_SortedSetData, _InsertionOrderSetData) == False + issubclass(SortedSetData, InsertionOrderSetData) == False Constructor Arguments: component The Set object that owns this data. @@ -1652,7 +1656,7 @@ class _OrderedSetData(_OrderedSetMixin, _FiniteSetData): def __init__(self, component): self._values = {} self._ordered_values = [] - _FiniteSetData.__init__(self, component=component) + FiniteSetData.__init__(self, component=component) def _iter_impl(self): """ @@ -1730,7 +1734,12 @@ def ord(self, item): raise ValueError("%s.ord(x): x not in %s" % (self.name, self.name)) -class _InsertionOrderSetData(_OrderedSetData): +class _OrderedSetData(metaclass=RenamedClass): + __renamed__new_class__ = OrderedSetData + __renamed__version__ = '6.7.2.dev0' + + +class InsertionOrderSetData(OrderedSetData): """ This class defines the data for a ordered set where the items are ordered in insertion order (similar to Python's OrderedSet. @@ -1751,7 +1760,7 @@ def set_value(self, val): "This WILL potentially lead to nondeterministic behavior " "in Pyomo" % (type(val).__name__,) ) - super(_InsertionOrderSetData, self).set_value(val) + super(InsertionOrderSetData, self).set_value(val) def update(self, values): if type(values) in Set._UnorderedInitializers: @@ -1761,7 +1770,12 @@ def update(self, values): "This WILL potentially lead to nondeterministic behavior " "in Pyomo" % (type(values).__name__,) ) - super(_InsertionOrderSetData, self).update(values) + super(InsertionOrderSetData, self).update(values) + + +class _InsertionOrderSetData(metaclass=RenamedClass): + __renamed__new_class__ = InsertionOrderSetData + __renamed__version__ = '6.7.2.dev0' class _SortedSetMixin(object): @@ -1776,7 +1790,7 @@ def sorted_iter(self): return iter(self) -class _SortedSetData(_SortedSetMixin, _OrderedSetData): +class SortedSetData(_SortedSetMixin, OrderedSetData): """ This class defines the data for a sorted set. @@ -1791,7 +1805,7 @@ class _SortedSetData(_SortedSetMixin, _OrderedSetData): def __init__(self, component): # An empty set is sorted... self._is_sorted = True - _OrderedSetData.__init__(self, component=component) + OrderedSetData.__init__(self, component=component) def _iter_impl(self): """ @@ -1799,12 +1813,12 @@ def _iter_impl(self): """ if not self._is_sorted: self._sort() - return super(_SortedSetData, self)._iter_impl() + return super(SortedSetData, self)._iter_impl() def __reversed__(self): if not self._is_sorted: self._sort() - return super(_SortedSetData, self).__reversed__() + return super(SortedSetData, self).__reversed__() def _add_impl(self, value): # Note that the sorted status has no bearing on insertion, @@ -1818,7 +1832,7 @@ def _add_impl(self, value): # def discard(self, val): def clear(self): - super(_SortedSetData, self).clear() + super(SortedSetData, self).clear() self._is_sorted = True def at(self, index): @@ -1830,7 +1844,7 @@ def at(self, index): """ if not self._is_sorted: self._sort() - return super(_SortedSetData, self).at(index) + return super(SortedSetData, self).at(index) def ord(self, item): """ @@ -1842,7 +1856,7 @@ def ord(self, item): """ if not self._is_sorted: self._sort() - return super(_SortedSetData, self).ord(item) + return super(SortedSetData, self).ord(item) def sorted_data(self): return self.data() @@ -1855,6 +1869,11 @@ def _sort(self): self._is_sorted = True +class _SortedSetData(metaclass=RenamedClass): + __renamed__new_class__ = SortedSetData + __renamed__version__ = '6.7.2.dev0' + + ############################################################################ _SET_API = (('__contains__', 'test membership in'), 'get', 'ranges', 'bounds') @@ -1971,7 +1990,7 @@ class SortedOrder(object): _UnorderedInitializers = {set} @overload - def __new__(cls: Type[Set], *args, **kwds) -> Union[_SetData, IndexedSet]: ... + def __new__(cls: Type[Set], *args, **kwds) -> Union[SetData, IndexedSet]: ... @overload def __new__(cls: Type[OrderedScalarSet], *args, **kwds) -> OrderedScalarSet: ... @@ -1985,7 +2004,7 @@ def __new__(cls, *args, **kwds): # Many things are easier by forcing it to be consistent across # the set (namely, the _ComponentDataClass is constant). # However, it is a bit off that 'ordered' it the only arg NOT - # processed by Initializer. We can mock up a _SortedSetData + # processed by Initializer. We can mock up a SortedSetData # sort function that preserves Insertion Order (lambda x: x), but # the unsorted is harder (it would effectively be insertion # order, but ordered() may not be deterministic based on how the @@ -2030,11 +2049,11 @@ def __new__(cls, *args, **kwds): else: newObj = super(Set, cls).__new__(IndexedSet) if ordered is Set.InsertionOrder: - newObj._ComponentDataClass = _InsertionOrderSetData + newObj._ComponentDataClass = InsertionOrderSetData elif ordered is Set.SortedOrder: - newObj._ComponentDataClass = _SortedSetData + newObj._ComponentDataClass = SortedSetData else: - newObj._ComponentDataClass = _FiniteSetData + newObj._ComponentDataClass = FiniteSetData return newObj @overload @@ -2178,7 +2197,7 @@ def _getitem_when_not_present(self, index): """Returns the default component data value.""" # Because we allow sets within an IndexedSet to have different # dimen, we have moved the tuplization logic from PyomoModel - # into Set (because we cannot know the dimen of a _SetData until + # into Set (because we cannot know the dimen of a SetData until # we are actually constructing that index). This also means # that we need to potentially communicate the dimen to the # (wrapped) value initializer. So, we will get the dimen first, @@ -2338,7 +2357,7 @@ def _pprint(self): # else: # return '{' + str(ans)[1:-1] + "}" - # TBD: In the current design, we force all _SetData within an + # TBD: In the current design, we force all SetData within an # indexed Set to have the same isordered value, so we will only # print it once in the header. Is this a good design? try: @@ -2358,7 +2377,7 @@ def _pprint(self): _ordered = "Sorted" else: _ordered = "{user}" - elif issubclass(_refClass, _InsertionOrderSetData): + elif issubclass(_refClass, InsertionOrderSetData): _ordered = "Insertion" return ( [ @@ -2383,14 +2402,14 @@ def data(self): return {k: v.data() for k, v in self.items()} @overload - def __getitem__(self, index) -> _SetData: ... + def __getitem__(self, index) -> SetData: ... __getitem__ = IndexedComponent.__getitem__ # type: ignore -class FiniteScalarSet(_FiniteSetData, Set): +class FiniteScalarSet(FiniteSetData, Set): def __init__(self, **kwds): - _FiniteSetData.__init__(self, component=self) + FiniteSetData.__init__(self, component=self) Set.__init__(self, **kwds) self._index = UnindexedComponent_index @@ -2400,13 +2419,13 @@ class FiniteSimpleSet(metaclass=RenamedClass): __renamed__version__ = '6.0' -class OrderedScalarSet(_ScalarOrderedSetMixin, _InsertionOrderSetData, Set): +class OrderedScalarSet(_ScalarOrderedSetMixin, InsertionOrderSetData, Set): def __init__(self, **kwds): # In case someone inherits from us, we will provide a rational # default for the "ordered" flag kwds.setdefault('ordered', Set.InsertionOrder) - _InsertionOrderSetData.__init__(self, component=self) + InsertionOrderSetData.__init__(self, component=self) Set.__init__(self, **kwds) @@ -2415,13 +2434,13 @@ class OrderedSimpleSet(metaclass=RenamedClass): __renamed__version__ = '6.0' -class SortedScalarSet(_ScalarOrderedSetMixin, _SortedSetData, Set): +class SortedScalarSet(_ScalarOrderedSetMixin, SortedSetData, Set): def __init__(self, **kwds): # In case someone inherits from us, we will provide a rational # default for the "ordered" flag kwds.setdefault('ordered', Set.SortedOrder) - _SortedSetData.__init__(self, component=self) + SortedSetData.__init__(self, component=self) Set.__init__(self, **kwds) self._index = UnindexedComponent_index @@ -2464,14 +2483,14 @@ class AbstractSortedSimpleSet(metaclass=RenamedClass): ############################################################################ -class SetOf(_SetData, Component): +class SetOf(SetData, Component): """""" def __new__(cls, *args, **kwds): if cls is not SetOf: return super(SetOf, cls).__new__(cls) (reference,) = args - if isinstance(reference, (_SetData, GlobalSetBase)): + if isinstance(reference, (SetData, GlobalSetBase)): if reference.isfinite(): if reference.isordered(): return super(SetOf, cls).__new__(OrderedSetOf) @@ -2485,7 +2504,7 @@ def __new__(cls, *args, **kwds): return super(SetOf, cls).__new__(FiniteSetOf) def __init__(self, reference, **kwds): - _SetData.__init__(self, component=self) + SetData.__init__(self, component=self) kwds.setdefault('ctype', SetOf) Component.__init__(self, **kwds) self._ref = reference @@ -2508,7 +2527,7 @@ def construct(self, data=None): @property def dimen(self): - if isinstance(self._ref, _SetData): + if isinstance(self._ref, SetData): return self._ref.dimen _iter = iter(self) try: @@ -2603,7 +2622,7 @@ def ord(self, item): ############################################################################ -class _InfiniteRangeSetData(_SetData): +class InfiniteRangeSetData(SetData): """Data class for a infinite set. This Set implements an interface to an *infinite set* defined by one @@ -2615,7 +2634,7 @@ class _InfiniteRangeSetData(_SetData): __slots__ = ('_ranges',) def __init__(self, component): - _SetData.__init__(self, component=component) + SetData.__init__(self, component=component) self._ranges = None def get(self, value, default=None): @@ -2648,8 +2667,13 @@ def ranges(self): return iter(self._ranges) -class _FiniteRangeSetData( - _SortedSetMixin, _OrderedSetMixin, _FiniteSetMixin, _InfiniteRangeSetData +class _InfiniteRangeSetData(metaclass=RenamedClass): + __renamed__new_class__ = InfiniteRangeSetData + __renamed__version__ = '6.7.2.dev0' + + +class FiniteRangeSetData( + _SortedSetMixin, _OrderedSetMixin, _FiniteSetMixin, InfiniteRangeSetData ): __slots__ = () @@ -2672,7 +2696,7 @@ def _iter_impl(self): # iterate over it nIters = len(self._ranges) - 1 if not nIters: - yield from _FiniteRangeSetData._range_gen(self._ranges[0]) + yield from FiniteRangeSetData._range_gen(self._ranges[0]) return # The trick here is that we need to remove any duplicates from @@ -2683,7 +2707,7 @@ def _iter_impl(self): for r in self._ranges: # Note: there should always be at least 1 member in each # NumericRange - i = _FiniteRangeSetData._range_gen(r) + i = FiniteRangeSetData._range_gen(r) iters.append([next(i), i]) iters.sort(reverse=True, key=lambda x: x[0]) @@ -2749,11 +2773,16 @@ def ord(self, item): ) # We must redefine ranges(), bounds(), and domain so that we get the - # _InfiniteRangeSetData version and not the one from + # InfiniteRangeSetData version and not the one from # _FiniteSetMixin. - bounds = _InfiniteRangeSetData.bounds - ranges = _InfiniteRangeSetData.ranges - domain = _InfiniteRangeSetData.domain + bounds = InfiniteRangeSetData.bounds + ranges = InfiniteRangeSetData.ranges + domain = InfiniteRangeSetData.domain + + +class _FiniteRangeSetData(metaclass=RenamedClass): + __renamed__new_class__ = FiniteRangeSetData + __renamed__version__ = '6.7.2.dev0' @ModelComponentFactory.register( @@ -3120,7 +3149,7 @@ def construct(self, data=None): old_ranges.reverse() while old_ranges: r = old_ranges.pop() - for i, val in enumerate(_FiniteRangeSetData._range_gen(r)): + for i, val in enumerate(FiniteRangeSetData._range_gen(r)): if not _filter(_block, val): split_r = r.range_difference((NumericRange(val, val, 0),)) if len(split_r) == 2: @@ -3218,9 +3247,9 @@ def _pprint(self): ) -class InfiniteScalarRangeSet(_InfiniteRangeSetData, RangeSet): +class InfiniteScalarRangeSet(InfiniteRangeSetData, RangeSet): def __init__(self, *args, **kwds): - _InfiniteRangeSetData.__init__(self, component=self) + InfiniteRangeSetData.__init__(self, component=self) RangeSet.__init__(self, *args, **kwds) self._index = UnindexedComponent_index @@ -3233,9 +3262,9 @@ class InfiniteSimpleRangeSet(metaclass=RenamedClass): __renamed__version__ = '6.0' -class FiniteScalarRangeSet(_ScalarOrderedSetMixin, _FiniteRangeSetData, RangeSet): +class FiniteScalarRangeSet(_ScalarOrderedSetMixin, FiniteRangeSetData, RangeSet): def __init__(self, *args, **kwds): - _FiniteRangeSetData.__init__(self, component=self) + FiniteRangeSetData.__init__(self, component=self) RangeSet.__init__(self, *args, **kwds) self._index = UnindexedComponent_index @@ -3273,11 +3302,11 @@ class AbstractFiniteSimpleRangeSet(metaclass=RenamedClass): ############################################################################ -class SetOperator(_SetData, Set): +class SetOperator(SetData, Set): __slots__ = ('_sets',) def __init__(self, *args, **kwds): - _SetData.__init__(self, component=self) + SetData.__init__(self, component=self) Set.__init__(self, **kwds) self._sets, _anonymous = zip(*(process_setarg(_set) for _set in args)) _anonymous = tuple(filter(None, _anonymous)) @@ -3461,7 +3490,7 @@ def _domain(self, val): def _checkArgs(*sets): ans = [] for s in sets: - if isinstance(s, _SetDataBase): + if isinstance(s, SetData): ans.append((s.isordered(), s.isfinite())) elif type(s) in {tuple, list}: ans.append((True, True)) @@ -4217,9 +4246,9 @@ def ord(self, item): ############################################################################ -class _AnySet(_SetData, Set): +class _AnySet(SetData, Set): def __init__(self, **kwds): - _SetData.__init__(self, component=self) + SetData.__init__(self, component=self) # There is a chicken-and-egg game here: the SetInitializer uses # Any as part of the processing of the domain/within/bounds # domain restrictions. However, Any has not been declared when @@ -4273,9 +4302,9 @@ def get(self, val, default=None): return super(_AnyWithNoneSet, self).get(val, default) -class _EmptySet(_FiniteSetMixin, _SetData, Set): +class _EmptySet(_FiniteSetMixin, SetData, Set): def __init__(self, **kwds): - _SetData.__init__(self, component=self) + SetData.__init__(self, component=self) Set.__init__(self, **kwds) self.construct() diff --git a/pyomo/core/base/sets.py b/pyomo/core/base/sets.py index ca693cf7d8b..72d49479dd3 100644 --- a/pyomo/core/base/sets.py +++ b/pyomo/core/base/sets.py @@ -18,7 +18,7 @@ set_options, simple_set_rule, _SetDataBase, - _SetData, + SetData, Set, SetOf, IndexedSet, diff --git a/pyomo/core/base/sos.py b/pyomo/core/base/sos.py index 6b8586c9b49..4a8afb05d71 100644 --- a/pyomo/core/base/sos.py +++ b/pyomo/core/base/sos.py @@ -28,7 +28,7 @@ logger = logging.getLogger('pyomo.core') -class _SOSConstraintData(ActiveComponentData): +class SOSConstraintData(ActiveComponentData): """ This class defines the data for a single special ordered set. @@ -101,6 +101,11 @@ def set_items(self, variables, weights): self._weights.append(w) +class _SOSConstraintData(metaclass=RenamedClass): + __renamed__new_class__ = SOSConstraintData + __renamed__version__ = '6.7.2.dev0' + + @ModelComponentFactory.register("SOS constraint expressions.") class SOSConstraint(ActiveIndexedComponent): """ @@ -512,10 +517,10 @@ def add(self, index, variables, weights=None): Add a component data for the specified index. """ if index is None: - # because ScalarSOSConstraint already makes an _SOSConstraintData instance + # because ScalarSOSConstraint already makes an SOSConstraintData instance soscondata = self else: - soscondata = _SOSConstraintData(self) + soscondata = SOSConstraintData(self) self._data[index] = soscondata soscondata._index = index @@ -549,9 +554,9 @@ def pprint(self, ostream=None, verbose=False, prefix=""): ostream.write("\t\t" + str(weight) + ' : ' + var.name + '\n') -class ScalarSOSConstraint(SOSConstraint, _SOSConstraintData): +class ScalarSOSConstraint(SOSConstraint, SOSConstraintData): def __init__(self, *args, **kwd): - _SOSConstraintData.__init__(self, self) + SOSConstraintData.__init__(self, self) SOSConstraint.__init__(self, *args, **kwd) self._index = UnindexedComponent_index diff --git a/pyomo/core/base/var.py b/pyomo/core/base/var.py index 856a2dc0237..8870fc5b09c 100644 --- a/pyomo/core/base/var.py +++ b/pyomo/core/base/var.py @@ -85,241 +85,11 @@ 'value', 'stale', 'fixed', + ('__call__', "access property 'value' on"), ) -class _VarData(ComponentData, NumericValue): - """This class defines the abstract interface for a single variable. - - Note that this "abstract" class is not intended to be directly - instantiated. - - """ - - __slots__ = () - - # - # Interface - # - - def has_lb(self): - """Returns :const:`False` when the lower bound is - :const:`None` or negative infinity""" - return self.lb is not None - - def has_ub(self): - """Returns :const:`False` when the upper bound is - :const:`None` or positive infinity""" - return self.ub is not None - - # TODO: deprecate this? Properties are generally preferred over "set*()" - def setlb(self, val): - """ - Set the lower bound for this variable after validating that - the value is fixed (or None). - """ - self.lower = val - - # TODO: deprecate this? Properties are generally preferred over "set*()" - def setub(self, val): - """ - Set the upper bound for this variable after validating that - the value is fixed (or None). - """ - self.upper = val - - @property - def bounds(self): - """Returns (or set) the tuple (lower bound, upper bound). - - This returns the current (numeric) values of the lower and upper - bounds as a tuple. If there is no bound, returns None (and not - +/-inf) - - """ - return self.lb, self.ub - - @bounds.setter - def bounds(self, val): - self.lower, self.upper = val - - @property - def lb(self): - """Return (or set) the numeric value of the variable lower bound.""" - lb = value(self.lower) - return None if lb == _ninf else lb - - @lb.setter - def lb(self, val): - self.lower = val - - @property - def ub(self): - """Return (or set) the numeric value of the variable upper bound.""" - ub = value(self.upper) - return None if ub == _inf else ub - - @ub.setter - def ub(self, val): - self.upper = val - - def is_integer(self): - """Returns True when the domain is a contiguous integer range.""" - _id = id(self.domain) - if _id in _known_global_real_domains: - return not _known_global_real_domains[_id] - _interval = self.domain.get_interval() - if _interval is None: - return False - # Note: it is not sufficient to just check the step: the - # starting / ending points must be integers (or not specified) - start, stop, step = _interval - return ( - step == 1 - and (start is None or int(start) == start) - and (stop is None or int(stop) == stop) - ) - - def is_binary(self): - """Returns True when the domain is restricted to Binary values.""" - domain = self.domain - if domain is Binary: - return True - if id(domain) in _known_global_real_domains: - return False - return domain.get_interval() == (0, 1, 1) - - def is_continuous(self): - """Returns True when the domain is a continuous real range""" - _id = id(self.domain) - if _id in _known_global_real_domains: - return _known_global_real_domains[_id] - _interval = self.domain.get_interval() - return _interval is not None and _interval[2] == 0 - - def is_fixed(self): - """Returns True if this variable is fixed, otherwise returns False.""" - return self.fixed - - def is_constant(self): - """Returns False because this is not a constant in an expression.""" - return False - - def is_variable_type(self): - """Returns True because this is a variable.""" - return True - - def is_potentially_variable(self): - """Returns True because this is a variable.""" - return True - - def _compute_polynomial_degree(self, result): - """ - If the variable is fixed, it represents a constant - is a polynomial with degree 0. Otherwise, it has - degree 1. This method is used in expressions to - compute polynomial degree. - """ - if self.fixed: - return 0 - return 1 - - def clear(self): - self.value = None - - def __call__(self, exception=True): - """Compute the value of this variable.""" - return self.value - - # - # Abstract Interface - # - - def set_value(self, val, skip_validation=False): - """Set the current variable value.""" - raise NotImplementedError - - @property - def value(self): - """Return (or set) the value for this variable.""" - raise NotImplementedError - - @property - def domain(self): - """Return (or set) the domain for this variable.""" - raise NotImplementedError - - @property - def lower(self): - """Return (or set) an expression for the variable lower bound.""" - raise NotImplementedError - - @property - def upper(self): - """Return (or set) an expression for the variable upper bound.""" - raise NotImplementedError - - @property - def fixed(self): - """Return (or set) the fixed indicator for this variable. - - Alias for :meth:`is_fixed` / :meth:`fix` / :meth:`unfix`. - - """ - raise NotImplementedError - - @property - def stale(self): - """The stale status for this variable. - - Variables are "stale" if their current value was not updated as - part of the most recent model update. A "model update" can be - one of several things: a solver invocation, loading a previous - solution, or manually updating a non-stale :class:`Var` value. - - Returns - ------- - bool - - Notes - ----- - Fixed :class:`Var` objects will be stale after invoking a solver - (as their value was not updated by the solver). - - Updating a stale :class:`Var` value will not cause other - variable values to be come stale. However, updating the first - non-stale :class:`Var` value after a solve or solution load - *will* cause all other variables to be marked as stale - - """ - raise NotImplementedError - - def fix(self, value=NOTSET, skip_validation=False): - """Fix the value of this variable (treat as nonvariable) - - This sets the :attr:`fixed` indicator to True. If ``value`` is - provided, the value (and the ``skip_validation`` flag) are first - passed to :meth:`set_value()`. - - """ - self.fixed = True - if value is not NOTSET: - self.set_value(value, skip_validation) - - def unfix(self): - """Unfix this variable (treat as variable in solver interfaces) - - This sets the :attr:`fixed` indicator to False. - - """ - self.fixed = False - - def free(self): - """Alias for :meth:`unfix`""" - return self.unfix() - - -class _GeneralVarData(_VarData): +class VarData(ComponentData, NumericValue): """This class defines the data for a single variable.""" __slots__ = ('_value', '_lb', '_ub', '_domain', '_fixed', '_stale') @@ -329,7 +99,7 @@ def __init__(self, component=None): # # These lines represent in-lining of the # following constructors: - # - _VarData + # - VarData # - ComponentData # - NumericValue self._component = weakref_ref(component) if (component is not None) else None @@ -360,10 +130,6 @@ def copy(cls, src): self._index = src._index return self - # - # Abstract Interface - # - def set_value(self, val, skip_validation=False): """Set the current variable value. @@ -424,14 +190,20 @@ def set_value(self, val, skip_validation=False): @property def value(self): + """Return (or set) the value for this variable.""" return self._value @value.setter def value(self, val): self.set_value(val) + def __call__(self, exception=True): + """Compute the value of this variable.""" + return self._value + @property def domain(self): + """Return (or set) the domain for this variable.""" return self._domain @domain.setter @@ -448,9 +220,42 @@ def domain(self, domain): ) raise - @_VarData.bounds.getter + def has_lb(self): + """Returns :const:`False` when the lower bound is + :const:`None` or negative infinity""" + return self.lb is not None + + def has_ub(self): + """Returns :const:`False` when the upper bound is + :const:`None` or positive infinity""" + return self.ub is not None + + # TODO: deprecate this? Properties are generally preferred over "set*()" + def setlb(self, val): + """ + Set the lower bound for this variable after validating that + the value is fixed (or None). + """ + self.lower = val + + # TODO: deprecate this? Properties are generally preferred over "set*()" + def setub(self, val): + """ + Set the upper bound for this variable after validating that + the value is fixed (or None). + """ + self.upper = val + + @property def bounds(self): - # Custom implementation of _VarData.bounds to avoid unnecessary + """Returns (or set) the tuple (lower bound, upper bound). + + This returns the current (numeric) values of the lower and upper + bounds as a tuple. If there is no bound, returns None (and not + +/-inf) + + """ + # Custom implementation of lb / ub to avoid unnecessary # expression generation and duplicate calls to domain.bounds() domain_lb, domain_ub = self.domain.bounds() # lb is the tighter of the domain and bounds @@ -491,10 +296,14 @@ def bounds(self): ub = min(ub, domain_ub) return lb, ub - @_VarData.lb.getter + @bounds.setter + def bounds(self, val): + self.lower, self.upper = val + + @property def lb(self): - # Custom implementation of _VarData.lb to avoid unnecessary - # expression generation + """Return (or set) the numeric value of the variable lower bound.""" + # Note: Implementation avoids unnecessary expression generation domain_lb, domain_ub = self.domain.bounds() # lb is the tighter of the domain and bounds lb = self._lb @@ -516,10 +325,14 @@ def lb(self): lb = max(lb, domain_lb) return lb - @_VarData.ub.getter + @lb.setter + def lb(self, val): + self.lower = val + + @property def ub(self): - # Custom implementation of _VarData.ub to avoid unnecessary - # expression generation + """Return (or set) the numeric value of the variable upper bound.""" + # Note: implementation avoids unnecessary expression generation domain_lb, domain_ub = self.domain.bounds() # ub is the tighter of the domain and bounds ub = self._ub @@ -541,6 +354,10 @@ def ub(self): ub = min(ub, domain_ub) return ub + @ub.setter + def ub(self, val): + self.upper = val + @property def lower(self): """Return (or set) an expression for the variable lower bound. @@ -597,8 +414,37 @@ def get_units(self): # component if not scalar return self.parent_component()._units + def fix(self, value=NOTSET, skip_validation=False): + """Fix the value of this variable (treat as nonvariable) + + This sets the :attr:`fixed` indicator to True. If ``value`` is + provided, the value (and the ``skip_validation`` flag) are first + passed to :meth:`set_value()`. + + """ + self.fixed = True + if value is not NOTSET: + self.set_value(value, skip_validation) + + def unfix(self): + """Unfix this variable (treat as variable in solver interfaces) + + This sets the :attr:`fixed` indicator to False. + + """ + self.fixed = False + + def free(self): + """Alias for :meth:`unfix`""" + return self.unfix() + @property def fixed(self): + """Return (or set) the fixed indicator for this variable. + + Alias for :meth:`is_fixed` / :meth:`fix` / :meth:`unfix`. + + """ return self._fixed @fixed.setter @@ -607,6 +453,28 @@ def fixed(self, val): @property def stale(self): + """The stale status for this variable. + + Variables are "stale" if their current value was not updated as + part of the most recent model update. A "model update" can be + one of several things: a solver invocation, loading a previous + solution, or manually updating a non-stale :class:`Var` value. + + Returns + ------- + bool + + Notes + ----- + Fixed :class:`Var` objects will be stale after invoking a solver + (as their value was not updated by the solver). + + Updating a stale :class:`Var` value will not cause other + variable values to be come stale. However, updating the first + non-stale :class:`Var` value after a solve or solution load + *will* cause all other variables to be marked as stale + + """ return StaleFlagManager.is_stale(self._stale) @stale.setter @@ -616,11 +484,70 @@ def stale(self, val): else: self._stale = StaleFlagManager.get_flag(0) - # Note: override the base class definition to avoid a call through a - # property + def is_integer(self): + """Returns True when the domain is a contiguous integer range.""" + _id = id(self.domain) + if _id in _known_global_real_domains: + return not _known_global_real_domains[_id] + _interval = self.domain.get_interval() + if _interval is None: + return False + # Note: it is not sufficient to just check the step: the + # starting / ending points must be integers (or not specified) + start, stop, step = _interval + return ( + step == 1 + and (start is None or int(start) == start) + and (stop is None or int(stop) == stop) + ) + + def is_binary(self): + """Returns True when the domain is restricted to Binary values.""" + domain = self.domain + if domain is Binary: + return True + if id(domain) in _known_global_real_domains: + return False + return domain.get_interval() == (0, 1, 1) + + def is_continuous(self): + """Returns True when the domain is a continuous real range""" + _id = id(self.domain) + if _id in _known_global_real_domains: + return _known_global_real_domains[_id] + _interval = self.domain.get_interval() + return _interval is not None and _interval[2] == 0 + def is_fixed(self): + """Returns True if this variable is fixed, otherwise returns False.""" return self._fixed + def is_constant(self): + """Returns False because this is not a constant in an expression.""" + return False + + def is_variable_type(self): + """Returns True because this is a variable.""" + return True + + def is_potentially_variable(self): + """Returns True because this is a variable.""" + return True + + def clear(self): + self.value = None + + def _compute_polynomial_degree(self, result): + """ + If the variable is fixed, it represents a constant + is a polynomial with degree 0. Otherwise, it has + degree 1. This method is used in expressions to + compute polynomial degree. + """ + if self._fixed: + return 0 + return 1 + def _process_bound(self, val, bound_type): if type(val) in native_numeric_types or val is None: # TODO: warn/error: check if this Var has units: assigning @@ -643,6 +570,16 @@ def _process_bound(self, val, bound_type): return val +class _VarData(metaclass=RenamedClass): + __renamed__new_class__ = VarData + __renamed__version__ = '6.7.2.dev0' + + +class _GeneralVarData(metaclass=RenamedClass): + __renamed__new_class__ = VarData + __renamed__version__ = '6.7.2.dev0' + + @ModelComponentFactory.register("Decision variables.") class Var(IndexedComponent, IndexedComponent_NDArrayMixin): """A numeric variable, which may be defined over an index. @@ -668,7 +605,7 @@ class Var(IndexedComponent, IndexedComponent_NDArrayMixin): doc (str, optional): Text describing this component. """ - _ComponentDataClass = _GeneralVarData + _ComponentDataClass = VarData @overload def __new__(cls: Type[Var], *args, **kwargs) -> Union[ScalarVar, IndexedVar]: ... @@ -775,7 +712,7 @@ def add(self, index): def construct(self, data=None): """ - Construct the _VarData objects for this variable + Construct the VarData objects for this variable """ if self._constructed: return @@ -834,7 +771,7 @@ def construct(self, data=None): # initializers that are constant, we can avoid # re-calling (and re-validating) the inputs in certain # cases. To support this, we will create the first - # _VarData and then use it as a template to initialize + # VarData and then use it as a template to initialize # (constant portions of) every VarData so as to not # repeat all the domain/bounds validation. try: @@ -952,11 +889,11 @@ def _pprint(self): ) -class ScalarVar(_GeneralVarData, Var): +class ScalarVar(VarData, Var): """A single variable.""" def __init__(self, *args, **kwd): - _GeneralVarData.__init__(self, component=self) + VarData.__init__(self, component=self) Var.__init__(self, *args, **kwd) self._index = UnindexedComponent_index @@ -1003,7 +940,7 @@ def fix(self, value=NOTSET, skip_validation=False): def unfix(self): """Unfix all variables in this :class:`IndexedVar` (treat as variable) - This sets the :attr:`_VarData.fixed` indicator to False for + This sets the :attr:`VarData.fixed` indicator to False for every variable in this :class:`IndexedVar`. """ @@ -1057,7 +994,7 @@ def domain(self, domain): # between potentially variable GetItemExpression objects and # "constant" GetItemExpression objects. That will need to wait for # the expression rework [JDS; Nov 22]. - def __getitem__(self, args) -> _GeneralVarData: + def __getitem__(self, args) -> VarData: try: return super().__getitem__(args) except RuntimeError: diff --git a/pyomo/core/beta/dict_objects.py b/pyomo/core/beta/dict_objects.py index a8298b08e63..eedb3c45bf3 100644 --- a/pyomo/core/beta/dict_objects.py +++ b/pyomo/core/beta/dict_objects.py @@ -14,10 +14,10 @@ from pyomo.common.log import is_debug_set from pyomo.core.base.set_types import Any -from pyomo.core.base.var import IndexedVar, _VarData -from pyomo.core.base.constraint import IndexedConstraint, _ConstraintData -from pyomo.core.base.objective import IndexedObjective, _ObjectiveData -from pyomo.core.base.expression import IndexedExpression, _ExpressionData +from pyomo.core.base.var import IndexedVar, VarData +from pyomo.core.base.constraint import IndexedConstraint, ConstraintData +from pyomo.core.base.objective import IndexedObjective, ObjectiveData +from pyomo.core.base.expression import IndexedExpression, ExpressionData from collections.abc import MutableMapping from collections.abc import Mapping @@ -184,7 +184,7 @@ def __init__(self, *args, **kwds): # Constructor for ComponentDict needs to # go last in order to handle any initialization # iterable as an argument - ComponentDict.__init__(self, _VarData, *args, **kwds) + ComponentDict.__init__(self, VarData, *args, **kwds) class ConstraintDict(ComponentDict, IndexedConstraint): @@ -193,7 +193,7 @@ def __init__(self, *args, **kwds): # Constructor for ComponentDict needs to # go last in order to handle any initialization # iterable as an argument - ComponentDict.__init__(self, _ConstraintData, *args, **kwds) + ComponentDict.__init__(self, ConstraintData, *args, **kwds) class ObjectiveDict(ComponentDict, IndexedObjective): @@ -202,7 +202,7 @@ def __init__(self, *args, **kwds): # Constructor for ComponentDict needs to # go last in order to handle any initialization # iterable as an argument - ComponentDict.__init__(self, _ObjectiveData, *args, **kwds) + ComponentDict.__init__(self, ObjectiveData, *args, **kwds) class ExpressionDict(ComponentDict, IndexedExpression): @@ -211,4 +211,4 @@ def __init__(self, *args, **kwds): # Constructor for ComponentDict needs to # go last in order to handle any initialization # iterable as an argument - ComponentDict.__init__(self, _ExpressionData, *args, **kwds) + ComponentDict.__init__(self, ExpressionData, *args, **kwds) diff --git a/pyomo/core/beta/list_objects.py b/pyomo/core/beta/list_objects.py index f53997fed17..005bfc38a1f 100644 --- a/pyomo/core/beta/list_objects.py +++ b/pyomo/core/beta/list_objects.py @@ -14,10 +14,10 @@ from pyomo.common.log import is_debug_set from pyomo.core.base.set_types import Any -from pyomo.core.base.var import IndexedVar, _VarData -from pyomo.core.base.constraint import IndexedConstraint, _ConstraintData -from pyomo.core.base.objective import IndexedObjective, _ObjectiveData -from pyomo.core.base.expression import IndexedExpression, _ExpressionData +from pyomo.core.base.var import IndexedVar, VarData +from pyomo.core.base.constraint import IndexedConstraint, ConstraintData +from pyomo.core.base.objective import IndexedObjective, ObjectiveData +from pyomo.core.base.expression import IndexedExpression, ExpressionData from collections.abc import MutableSequence @@ -232,7 +232,7 @@ def __init__(self, *args, **kwds): # Constructor for ComponentList needs to # go last in order to handle any initialization # iterable as an argument - ComponentList.__init__(self, _VarData, *args, **kwds) + ComponentList.__init__(self, VarData, *args, **kwds) class XConstraintList(ComponentList, IndexedConstraint): @@ -241,7 +241,7 @@ def __init__(self, *args, **kwds): # Constructor for ComponentList needs to # go last in order to handle any initialization # iterable as an argument - ComponentList.__init__(self, _ConstraintData, *args, **kwds) + ComponentList.__init__(self, ConstraintData, *args, **kwds) class XObjectiveList(ComponentList, IndexedObjective): @@ -250,7 +250,7 @@ def __init__(self, *args, **kwds): # Constructor for ComponentList needs to # go last in order to handle any initialization # iterable as an argument - ComponentList.__init__(self, _ObjectiveData, *args, **kwds) + ComponentList.__init__(self, ObjectiveData, *args, **kwds) class XExpressionList(ComponentList, IndexedExpression): @@ -259,4 +259,4 @@ def __init__(self, *args, **kwds): # Constructor for ComponentList needs to # go last in order to handle any initialization # iterable as an argument - ComponentList.__init__(self, _ExpressionData, *args, **kwds) + ComponentList.__init__(self, ExpressionData, *args, **kwds) diff --git a/pyomo/core/expr/base.py b/pyomo/core/expr/base.py index f506956e478..6e2066afcc5 100644 --- a/pyomo/core/expr/base.py +++ b/pyomo/core/expr/base.py @@ -360,7 +360,7 @@ def size(self): """ return visitor.sizeof_expression(self) - def _apply_operation(self, result): # pragma: no cover + def _apply_operation(self, result): """ Compute the values of this node given the values of its children. diff --git a/pyomo/core/expr/calculus/derivatives.py b/pyomo/core/expr/calculus/derivatives.py index ecfdce02fd4..69fe4969938 100644 --- a/pyomo/core/expr/calculus/derivatives.py +++ b/pyomo/core/expr/calculus/derivatives.py @@ -39,11 +39,11 @@ def differentiate(expr, wrt=None, wrt_list=None, mode=Modes.reverse_numeric): ---------- expr: pyomo.core.expr.numeric_expr.NumericExpression The expression to differentiate - wrt: pyomo.core.base.var._GeneralVarData + wrt: pyomo.core.base.var.VarData If specified, this function will return the derivative with - respect to wrt. wrt is normally a _GeneralVarData, but could - also be a _ParamData. wrt and wrt_list cannot both be specified. - wrt_list: list of pyomo.core.base.var._GeneralVarData + respect to wrt. wrt is normally a VarData, but could + also be a ParamData. wrt and wrt_list cannot both be specified. + wrt_list: list of pyomo.core.base.var.VarData If specified, this function will return the derivative with respect to each element in wrt_list. A list will be returned where the values are the derivatives with respect to the diff --git a/pyomo/core/expr/numeric_expr.py b/pyomo/core/expr/numeric_expr.py index 25d83ca20f4..21896c63219 100644 --- a/pyomo/core/expr/numeric_expr.py +++ b/pyomo/core/expr/numeric_expr.py @@ -722,7 +722,7 @@ def args(self): @deprecated( 'The implicit recasting of a "not potentially variable" ' 'expression node to a potentially variable one is no ' - 'longer supported (this violates that immutability ' + 'longer supported (this violates the immutability ' 'promise for Pyomo5 expression trees).', version='6.4.3', ) @@ -1238,7 +1238,7 @@ class LinearExpression(SumExpression): - not potentially variable (e.g., native types, Params, or NPV expressions) - :py:class:`MonomialTermExpression` - - :py:class:`_VarData` + - :py:class:`VarData` Args: args (tuple): Children nodes @@ -2288,8 +2288,11 @@ def _iadd_mutablenpvsum_mutable(a, b): def _iadd_mutablenpvsum_native(a, b): if not b: return a - a._args_.append(b) - a._nargs += 1 + if a._args_ and a._args_[-1].__class__ in native_numeric_types: + a._args_[-1] += b + else: + a._args_.append(b) + a._nargs += 1 return a @@ -2301,9 +2304,7 @@ def _iadd_mutablenpvsum_npv(a, b): def _iadd_mutablenpvsum_param(a, b): if b.is_constant(): - b = b.value - if not b: - return a + return _iadd_mutablesum_native(a, b.value) a._args_.append(b) a._nargs += 1 return a @@ -2384,8 +2385,11 @@ def _iadd_mutablelinear_mutable(a, b): def _iadd_mutablelinear_native(a, b): if not b: return a - a._args_.append(b) - a._nargs += 1 + if a._args_ and a._args_[-1].__class__ in native_numeric_types: + a._args_[-1] += b + else: + a._args_.append(b) + a._nargs += 1 return a @@ -2397,9 +2401,7 @@ def _iadd_mutablelinear_npv(a, b): def _iadd_mutablelinear_param(a, b): if b.is_constant(): - b = b.value - if not b: - return a + return _iadd_mutablesum_native(a, b.value) a._args_.append(b) a._nargs += 1 return a @@ -2483,8 +2485,11 @@ def _iadd_mutablesum_mutable(a, b): def _iadd_mutablesum_native(a, b): if not b: return a - a._args_.append(b) - a._nargs += 1 + if a._args_ and a._args_[-1].__class__ in native_numeric_types: + a._args_[-1] += b + else: + a._args_.append(b) + a._nargs += 1 return a @@ -2496,9 +2501,7 @@ def _iadd_mutablesum_npv(a, b): def _iadd_mutablesum_param(a, b): if b.is_constant(): - b = b.value - if not b: - return a + return _iadd_mutablesum_native(a, b.value) a._args_.append(b) a._nargs += 1 return a diff --git a/pyomo/core/expr/template_expr.py b/pyomo/core/expr/template_expr.py index f65a1f2b9b0..d30046e9d82 100644 --- a/pyomo/core/expr/template_expr.py +++ b/pyomo/core/expr/template_expr.py @@ -19,11 +19,12 @@ from pyomo.core.expr.base import ExpressionBase, ExpressionArgs_Mixin, NPV_Mixin from pyomo.core.expr.logical_expr import BooleanExpression from pyomo.core.expr.numeric_expr import ( + ARG_TYPE, NumericExpression, - SumExpression, Numeric_NPV_Mixin, + SumExpression, + mutable_expression, register_arg_type, - ARG_TYPE, _balanced_parens, ) from pyomo.core.expr.numvalue import ( @@ -116,18 +117,10 @@ def _to_string(self, values, verbose, smap): return "%s[%s]" % (values[0], ','.join(values[1:])) def _resolve_template(self, args): - return args[0].__getitem__(tuple(args[1:])) + return args[0].__getitem__(args[1:]) def _apply_operation(self, result): - args = tuple( - ( - arg - if arg.__class__ in native_types or not arg.is_numeric_type() - else value(arg) - ) - for arg in result[1:] - ) - return result[0].__getitem__(tuple(result[1:])) + return result[0].__getitem__(result[1:]) class Numeric_GetItemExpression(GetItemExpression, NumericExpression): @@ -258,8 +251,8 @@ def nargs(self): return 2 def _apply_operation(self, result): - assert len(result) == 2 - return getattr(result[0], result[1]) + obj, attr = result + return getattr(obj, attr) def _to_string(self, values, verbose, smap): assert len(values) == 2 @@ -273,7 +266,7 @@ def _to_string(self, values, verbose, smap): return "%s.%s" % (values[0], attr) def _resolve_template(self, args): - return getattr(*tuple(args)) + return getattr(*args) class Numeric_GetAttrExpression(GetAttrExpression, NumericExpression): @@ -521,7 +514,15 @@ def _to_string(self, values, verbose, smap): return 'SUM(%s %s)' % (val, iterStr) def _resolve_template(self, args): - return SumExpression(args) + with mutable_expression() as e: + for arg in args: + e += arg + if e.nargs() > 1: + return e + elif not e.nargs(): + return 0 + else: + return e.arg(0) class IndexTemplate(NumericValue): diff --git a/pyomo/core/kernel/objective.py b/pyomo/core/kernel/objective.py index 9aa8e3315ef..ac6f22d07d3 100644 --- a/pyomo/core/kernel/objective.py +++ b/pyomo/core/kernel/objective.py @@ -9,15 +9,12 @@ # This software is distributed under the 3-clause BSD License. # ___________________________________________________________________________ +from pyomo.common.enums import ObjectiveSense, minimize, maximize from pyomo.core.expr.numvalue import as_numeric from pyomo.core.kernel.base import _abstract_readwrite_property from pyomo.core.kernel.container_utils import define_simple_containers from pyomo.core.kernel.expression import IExpression -# Constants used to define the optimization sense -minimize = 1 -maximize = -1 - class IObjective(IExpression): """ @@ -84,14 +81,7 @@ def sense(self): @sense.setter def sense(self, sense): """Set the sense (direction) of this objective.""" - if (sense == minimize) or (sense == maximize): - self._sense = sense - else: - raise ValueError( - "Objective sense must be set to one of: " - "[minimize (%s), maximize (%s)]. Invalid " - "value: %s'" % (minimize, maximize, sense) - ) + self._sense = ObjectiveSense(sense) # inserts class definitions for simple _tuple, _list, and diff --git a/pyomo/core/plugins/transform/add_slack_vars.py b/pyomo/core/plugins/transform/add_slack_vars.py index 6b5096d315c..39903384729 100644 --- a/pyomo/core/plugins/transform/add_slack_vars.py +++ b/pyomo/core/plugins/transform/add_slack_vars.py @@ -23,7 +23,6 @@ from pyomo.core.plugins.transform.hierarchy import NonIsomorphicTransformation from pyomo.common.config import ConfigBlock, ConfigValue from pyomo.core.base import ComponentUID -from pyomo.core.base.constraint import _ConstraintData from pyomo.common.deprecation import deprecation_warning @@ -42,7 +41,7 @@ def target_list(x): # [ESJ 07/15/2020] We have to just pass it through because we need the # instance in order to be able to do anything about it... return [x] - elif isinstance(x, (Constraint, _ConstraintData)): + elif getattr(x, 'ctype', None) is Constraint: return [x] elif hasattr(x, '__iter__'): ans = [] @@ -53,7 +52,7 @@ def target_list(x): deprecation_msg = None # same as above... ans.append(i) - elif isinstance(i, (Constraint, _ConstraintData)): + elif getattr(i, 'ctype', None) is Constraint: ans.append(i) else: raise ValueError( diff --git a/pyomo/core/plugins/transform/eliminate_fixed_vars.py b/pyomo/core/plugins/transform/eliminate_fixed_vars.py index 9312035b8c8..934228afd7c 100644 --- a/pyomo/core/plugins/transform/eliminate_fixed_vars.py +++ b/pyomo/core/plugins/transform/eliminate_fixed_vars.py @@ -11,7 +11,7 @@ from pyomo.core.expr import ExpressionBase, as_numeric from pyomo.core import Constraint, Objective, TransformationFactory -from pyomo.core.base.var import Var, _VarData +from pyomo.core.base.var import Var, VarData from pyomo.core.util import sequence from pyomo.core.plugins.transform.hierarchy import IsomorphicTransformation @@ -77,7 +77,7 @@ def _fix_vars(self, expr, model): if isinstance(expr._args[i], ExpressionBase): _args.append(self._fix_vars(expr._args[i], model)) elif ( - isinstance(expr._args[i], Var) or isinstance(expr._args[i], _VarData) + isinstance(expr._args[i], Var) or isinstance(expr._args[i], VarData) ) and expr._args[i].fixed: if expr._args[i].value != 0.0: _args.append(as_numeric(expr._args[i].value)) diff --git a/pyomo/core/plugins/transform/equality_transform.py b/pyomo/core/plugins/transform/equality_transform.py index a1a1b72f146..99291c2227c 100644 --- a/pyomo/core/plugins/transform/equality_transform.py +++ b/pyomo/core/plugins/transform/equality_transform.py @@ -66,7 +66,7 @@ def _create_using(self, model, **kwds): con = equality.__getattribute__(con_name) # - # Get all _ConstraintData objects + # Get all ConstraintData objects # # We need to get the keys ahead of time because we are modifying # con._data on-the-fly. @@ -104,7 +104,7 @@ def _create_using(self, model, **kwds): con.add(ub_name, new_expr) # Since we explicitly `continue` for equality constraints, we - # can safely remove the old _ConstraintData object + # can safely remove the old ConstraintData object del con._data[ndx] return equality.create() diff --git a/pyomo/core/plugins/transform/expand_connectors.py b/pyomo/core/plugins/transform/expand_connectors.py index 8c02f3e5698..82ec546e593 100644 --- a/pyomo/core/plugins/transform/expand_connectors.py +++ b/pyomo/core/plugins/transform/expand_connectors.py @@ -25,7 +25,7 @@ Var, SortComponents, ) -from pyomo.core.base.connector import _ConnectorData, ScalarConnector +from pyomo.core.base.connector import ConnectorData, ScalarConnector @TransformationFactory.register( @@ -69,7 +69,7 @@ def _apply_to(self, instance, **kwds): # The set of connectors found in the current constraint found = ComponentSet() - connector_types = set([ScalarConnector, _ConnectorData]) + connector_types = set([ScalarConnector, ConnectorData]) for constraint in instance.component_data_objects( Constraint, sort=SortComponents.deterministic ): diff --git a/pyomo/core/plugins/transform/logical_to_linear.py b/pyomo/core/plugins/transform/logical_to_linear.py index 7aa541a5fdd..da69ca113bd 100644 --- a/pyomo/core/plugins/transform/logical_to_linear.py +++ b/pyomo/core/plugins/transform/logical_to_linear.py @@ -29,7 +29,7 @@ BooleanVarList, SortComponents, ) -from pyomo.core.base.block import _BlockData +from pyomo.core.base.block import BlockData from pyomo.core.base.boolean_var import _DeprecatedImplicitAssociatedBinaryVariable from pyomo.core.expr.cnf_walker import to_cnf from pyomo.core.expr import ( @@ -100,7 +100,7 @@ def _apply_to(self, model, **kwds): # the GDP will be solved, and it would be wrong to assume that a GDP # will *necessarily* be solved as an algebraic model. The star # example of not doing so being GDPopt.) - if t.ctype is Block or isinstance(t, _BlockData): + if t.ctype is Block or isinstance(t, BlockData): self._transform_block(t, model, new_var_lists, transBlocks) elif t.ctype is LogicalConstraint: if t.is_indexed(): @@ -285,7 +285,7 @@ class CnfToLinearVisitor(StreamBasedExpressionVisitor): """Convert CNF logical constraint to linear constraints. Expected expression node types: AndExpression, OrExpression, NotExpression, - AtLeastExpression, AtMostExpression, ExactlyExpression, _BooleanVarData + AtLeastExpression, AtMostExpression, ExactlyExpression, BooleanVarData """ @@ -372,7 +372,7 @@ def beforeChild(self, node, child, child_idx): if child.is_expression_type(): return True, None - # Only thing left should be _BooleanVarData + # Only thing left should be BooleanVarData # # TODO: After the expr_multiple_dispatch is merged, this should # be switched to using as_numeric. diff --git a/pyomo/core/plugins/transform/model.py b/pyomo/core/plugins/transform/model.py index db8376afd29..7ee268a4292 100644 --- a/pyomo/core/plugins/transform/model.py +++ b/pyomo/core/plugins/transform/model.py @@ -55,8 +55,8 @@ def to_standard_form(self): # N.B. Structure hierarchy: # # active_components: {class: {attr_name: object}} - # object -> Constraint: ._data: {ndx: _ConstraintData} - # _ConstraintData: .lower, .body, .upper + # object -> Constraint: ._data: {ndx: ConstraintData} + # ConstraintData: .lower, .body, .upper # # So, altogether, we access a lower bound via # diff --git a/pyomo/core/plugins/transform/radix_linearization.py b/pyomo/core/plugins/transform/radix_linearization.py index c67e556d60c..92270655f31 100644 --- a/pyomo/core/plugins/transform/radix_linearization.py +++ b/pyomo/core/plugins/transform/radix_linearization.py @@ -21,7 +21,7 @@ Block, RangeSet, ) -from pyomo.core.base.var import _VarData +from pyomo.core.base.var import VarData import logging @@ -268,8 +268,8 @@ def _collect_bilinear(self, expr, bilin, quad): self._collect_bilinear(e, bilin, quad) # No need to check denominator, as this is poly_degree==2 return - if not isinstance(expr._numerator[0], _VarData) or not isinstance( - expr._numerator[1], _VarData + if not isinstance(expr._numerator[0], VarData) or not isinstance( + expr._numerator[1], VarData ): raise RuntimeError("Cannot yet handle complex subexpressions") if expr._numerator[0] is expr._numerator[1]: diff --git a/pyomo/core/plugins/transform/scaling.py b/pyomo/core/plugins/transform/scaling.py index ad894b31fde..11d4ac8c493 100644 --- a/pyomo/core/plugins/transform/scaling.py +++ b/pyomo/core/plugins/transform/scaling.py @@ -10,16 +10,7 @@ # ___________________________________________________________________________ from pyomo.common.collections import ComponentMap -from pyomo.core.base import ( - Block, - Var, - Constraint, - Objective, - _ConstraintData, - _ObjectiveData, - Suffix, - value, -) +from pyomo.core.base import Block, Var, Constraint, Objective, Suffix, value from pyomo.core.plugins.transform.hierarchy import Transformation from pyomo.core.base import TransformationFactory from pyomo.core.base.suffix import SuffixFinder @@ -197,7 +188,7 @@ def _apply_to(self, model, rename=True): already_scaled.add(id(c)) # perform the constraint/objective scaling and variable sub scaling_factor = component_scaling_factor_map[c] - if isinstance(c, _ConstraintData): + if c.ctype is Constraint: body = scaling_factor * replace_expressions( expr=c.body, substitution_map=variable_substitution_dict, @@ -226,7 +217,7 @@ def _apply_to(self, model, rename=True): else: c.set_value((lower, body, upper)) - elif isinstance(c, _ObjectiveData): + elif c.ctype is Objective: c.expr = scaling_factor * replace_expressions( expr=c.expr, substitution_map=variable_substitution_dict, diff --git a/pyomo/core/tests/transform/test_add_slacks.py b/pyomo/core/tests/transform/test_add_slacks.py index a74a9b75c4f..b395237b8e4 100644 --- a/pyomo/core/tests/transform/test_add_slacks.py +++ b/pyomo/core/tests/transform/test_add_slacks.py @@ -330,7 +330,7 @@ def test_error_for_non_constraint_noniterable_target(self): self.assertRaisesRegex( ValueError, "Expected Constraint or list of Constraints.\n\tReceived " - "", + "", TransformationFactory('core.add_slack_variables').apply_to, m, targets=m.indexedVar[1], diff --git a/pyomo/core/tests/unit/test_block.py b/pyomo/core/tests/unit/test_block.py index 71e80d90a73..660f65f1944 100644 --- a/pyomo/core/tests/unit/test_block.py +++ b/pyomo/core/tests/unit/test_block.py @@ -54,7 +54,7 @@ from pyomo.core.base.block import ( ScalarBlock, SubclassOf, - _BlockData, + BlockData, declare_custom_block, ) import pyomo.core.expr as EXPR @@ -851,7 +851,7 @@ class DerivedBlock(ScalarBlock): _Block_reserved_words = None DerivedBlock._Block_reserved_words = ( - set(['a', 'b', 'c']) | _BlockData._Block_reserved_words + set(['a', 'b', 'c']) | BlockData._Block_reserved_words ) m = ConcreteModel() @@ -965,7 +965,7 @@ def __init__(self, *args, **kwds): b.c.d.e = Block() with self.assertRaisesRegex( ValueError, - r'_BlockData.transfer_attributes_from\(\): ' + r'BlockData.transfer_attributes_from\(\): ' r'Cannot set a sub-block \(c.d.e\) to a parent block \(c\):', ): b.c.d.e.transfer_attributes_from(b.c) @@ -974,7 +974,7 @@ def __init__(self, *args, **kwds): b = Block(concrete=True) with self.assertRaisesRegex( ValueError, - r'_BlockData.transfer_attributes_from\(\): expected a Block ' + r'BlockData.transfer_attributes_from\(\): expected a Block ' 'or dict; received str', ): b.transfer_attributes_from('foo') @@ -2977,7 +2977,7 @@ def test_write_exceptions(self): def test_override_pprint(self): @declare_custom_block('TempBlock') - class TempBlockData(_BlockData): + class TempBlockData(BlockData): def pprint(self, ostream=None, verbose=False, prefix=""): ostream.write('Testing pprint of a custom block.') @@ -3052,9 +3052,9 @@ def test_derived_block_construction(self): class ConcreteBlock(Block): pass - class ScalarConcreteBlock(_BlockData, ConcreteBlock): + class ScalarConcreteBlock(BlockData, ConcreteBlock): def __init__(self, *args, **kwds): - _BlockData.__init__(self, component=self) + BlockData.__init__(self, component=self) ConcreteBlock.__init__(self, *args, **kwds) _buf = [] diff --git a/pyomo/core/tests/unit/test_component.py b/pyomo/core/tests/unit/test_component.py index 175c4c47d46..b12db9af047 100644 --- a/pyomo/core/tests/unit/test_component.py +++ b/pyomo/core/tests/unit/test_component.py @@ -66,19 +66,17 @@ def test_getname(self): ) m.b[2]._component = None - self.assertEqual( - m.b[2].getname(fully_qualified=True), "[Unattached _BlockData]" - ) + self.assertEqual(m.b[2].getname(fully_qualified=True), "[Unattached BlockData]") # I think that getname() should do this: # self.assertEqual(m.b[2].c[2,4].getname(fully_qualified=True), - # "[Unattached _BlockData].c[2,4]") + # "[Unattached BlockData].c[2,4]") # but it doesn't match current behavior. I will file a PEP to # propose changing the behavior later and proceed to test # current behavior. self.assertEqual(m.b[2].c[2, 4].getname(fully_qualified=True), "c[2,4]") self.assertEqual( - m.b[2].getname(fully_qualified=False), "[Unattached _BlockData]" + m.b[2].getname(fully_qualified=False), "[Unattached BlockData]" ) self.assertEqual(m.b[2].c[2, 4].getname(fully_qualified=False), "c[2,4]") diff --git a/pyomo/core/tests/unit/test_con.py b/pyomo/core/tests/unit/test_con.py index 6ed19c1bcfd..15f190e281e 100644 --- a/pyomo/core/tests/unit/test_con.py +++ b/pyomo/core/tests/unit/test_con.py @@ -44,7 +44,7 @@ InequalityExpression, RangedExpression, ) -from pyomo.core.base.constraint import _GeneralConstraintData +from pyomo.core.base.constraint import ConstraintData class TestConstraintCreation(unittest.TestCase): @@ -1074,7 +1074,7 @@ def test_setitem(self): m.c[2] = m.x**2 <= 4 self.assertEqual(len(m.c), 1) self.assertEqual(list(m.c.keys()), [2]) - self.assertIsInstance(m.c[2], _GeneralConstraintData) + self.assertIsInstance(m.c[2], ConstraintData) self.assertEqual(m.c[2].upper, 4) m.c[3] = Constraint.Skip @@ -1388,7 +1388,7 @@ def test_empty_singleton(self): # Even though we construct a ScalarConstraint, # if it is not initialized that means it is "empty" # and we should encounter errors when trying to access the - # _ConstraintData interface methods until we assign + # ConstraintData interface methods until we assign # something to the constraint. # self.assertEqual(a._constructed, True) diff --git a/pyomo/core/tests/unit/test_dict_objects.py b/pyomo/core/tests/unit/test_dict_objects.py index 8260f1ae320..ef9f330bfff 100644 --- a/pyomo/core/tests/unit/test_dict_objects.py +++ b/pyomo/core/tests/unit/test_dict_objects.py @@ -17,10 +17,10 @@ ObjectiveDict, ExpressionDict, ) -from pyomo.core.base.var import _GeneralVarData -from pyomo.core.base.constraint import _GeneralConstraintData -from pyomo.core.base.objective import _GeneralObjectiveData -from pyomo.core.base.expression import _GeneralExpressionData +from pyomo.core.base.var import VarData +from pyomo.core.base.constraint import ConstraintData +from pyomo.core.base.objective import ObjectiveData +from pyomo.core.base.expression import ExpressionData class _TestComponentDictBase(object): @@ -348,10 +348,10 @@ def test_active(self): class TestVarDict(_TestComponentDictBase, unittest.TestCase): - # Note: the updated _GeneralVarData class only takes an optional + # Note: the updated VarData class only takes an optional # parent argument (you no longer pass the domain in) _ctype = VarDict - _cdatatype = lambda self, arg: _GeneralVarData() + _cdatatype = lambda self, arg: VarData() def setUp(self): _TestComponentDictBase.setUp(self) @@ -360,7 +360,7 @@ def setUp(self): class TestExpressionDict(_TestComponentDictBase, unittest.TestCase): _ctype = ExpressionDict - _cdatatype = _GeneralExpressionData + _cdatatype = ExpressionData def setUp(self): _TestComponentDictBase.setUp(self) @@ -375,7 +375,7 @@ def setUp(self): class TestConstraintDict(_TestActiveComponentDictBase, unittest.TestCase): _ctype = ConstraintDict - _cdatatype = _GeneralConstraintData + _cdatatype = ConstraintData def setUp(self): _TestComponentDictBase.setUp(self) @@ -384,7 +384,7 @@ def setUp(self): class TestObjectiveDict(_TestActiveComponentDictBase, unittest.TestCase): _ctype = ObjectiveDict - _cdatatype = _GeneralObjectiveData + _cdatatype = ObjectiveData def setUp(self): _TestComponentDictBase.setUp(self) diff --git a/pyomo/core/tests/unit/test_expression.py b/pyomo/core/tests/unit/test_expression.py index 678df4c01a8..eb16f7c6142 100644 --- a/pyomo/core/tests/unit/test_expression.py +++ b/pyomo/core/tests/unit/test_expression.py @@ -29,7 +29,7 @@ value, sum_product, ) -from pyomo.core.base.expression import _GeneralExpressionData +from pyomo.core.base.expression import ExpressionData from pyomo.core.expr.compare import compare_expressions, assertExpressionsEqual from pyomo.common.tee import capture_output @@ -515,10 +515,10 @@ def test_implicit_definition(self): model.E = Expression(model.idx) self.assertEqual(len(model.E), 3) expr = model.E[1] - self.assertIs(type(expr), _GeneralExpressionData) + self.assertIs(type(expr), ExpressionData) model.E[1] = None self.assertIs(expr, model.E[1]) - self.assertIs(type(expr), _GeneralExpressionData) + self.assertIs(type(expr), ExpressionData) self.assertIs(expr.expr, None) model.E[1] = 5 self.assertIs(expr, model.E[1]) @@ -537,7 +537,7 @@ def test_explicit_skip_definition(self): model.E[1] = None expr = model.E[1] - self.assertIs(type(expr), _GeneralExpressionData) + self.assertIs(type(expr), ExpressionData) self.assertIs(expr.expr, None) model.E[1] = 5 self.assertIs(expr, model.E[1]) diff --git a/pyomo/core/tests/unit/test_indexed_slice.py b/pyomo/core/tests/unit/test_indexed_slice.py index babd3f3c46a..40aaad9fec9 100644 --- a/pyomo/core/tests/unit/test_indexed_slice.py +++ b/pyomo/core/tests/unit/test_indexed_slice.py @@ -17,7 +17,7 @@ import pyomo.common.unittest as unittest from pyomo.environ import Var, Block, ConcreteModel, RangeSet, Set, Any -from pyomo.core.base.block import _BlockData +from pyomo.core.base.block import BlockData from pyomo.core.base.indexed_component_slice import IndexedComponent_slice from pyomo.core.base.set import normalize_index @@ -64,7 +64,7 @@ def tearDown(self): self.m = None def test_simple_getitem(self): - self.assertIsInstance(self.m.b[1, 4], _BlockData) + self.assertIsInstance(self.m.b[1, 4], BlockData) def test_simple_getslice(self): _slicer = self.m.b[:, 4] diff --git a/pyomo/core/tests/unit/test_list_objects.py b/pyomo/core/tests/unit/test_list_objects.py index 3eb2e279964..671a8429e06 100644 --- a/pyomo/core/tests/unit/test_list_objects.py +++ b/pyomo/core/tests/unit/test_list_objects.py @@ -17,10 +17,10 @@ XObjectiveList, XExpressionList, ) -from pyomo.core.base.var import _GeneralVarData -from pyomo.core.base.constraint import _GeneralConstraintData -from pyomo.core.base.objective import _GeneralObjectiveData -from pyomo.core.base.expression import _GeneralExpressionData +from pyomo.core.base.var import VarData +from pyomo.core.base.constraint import ConstraintData +from pyomo.core.base.objective import ObjectiveData +from pyomo.core.base.expression import ExpressionData class _TestComponentListBase(object): @@ -365,10 +365,10 @@ def test_active(self): class TestVarList(_TestComponentListBase, unittest.TestCase): - # Note: the updated _GeneralVarData class only takes an optional + # Note: the updated VarData class only takes an optional # parent argument (you no longer pass the domain in) _ctype = XVarList - _cdatatype = lambda self, arg: _GeneralVarData() + _cdatatype = lambda self, arg: VarData() def setUp(self): _TestComponentListBase.setUp(self) @@ -377,7 +377,7 @@ def setUp(self): class TestExpressionList(_TestComponentListBase, unittest.TestCase): _ctype = XExpressionList - _cdatatype = _GeneralExpressionData + _cdatatype = ExpressionData def setUp(self): _TestComponentListBase.setUp(self) @@ -392,7 +392,7 @@ def setUp(self): class TestConstraintList(_TestActiveComponentListBase, unittest.TestCase): _ctype = XConstraintList - _cdatatype = _GeneralConstraintData + _cdatatype = ConstraintData def setUp(self): _TestComponentListBase.setUp(self) @@ -401,7 +401,7 @@ def setUp(self): class TestObjectiveList(_TestActiveComponentListBase, unittest.TestCase): _ctype = XObjectiveList - _cdatatype = _GeneralObjectiveData + _cdatatype = ObjectiveData def setUp(self): _TestComponentListBase.setUp(self) diff --git a/pyomo/core/tests/unit/test_numeric_expr.py b/pyomo/core/tests/unit/test_numeric_expr.py index 968b3acb6a4..efb01e6d6ce 100644 --- a/pyomo/core/tests/unit/test_numeric_expr.py +++ b/pyomo/core/tests/unit/test_numeric_expr.py @@ -112,7 +112,7 @@ from pyomo.core.base.label import NumericLabeler from pyomo.core.expr.template_expr import IndexTemplate from pyomo.core.expr import expr_common -from pyomo.core.base.var import _GeneralVarData +from pyomo.core.base.var import VarData from pyomo.repn import generate_standard_repn from pyomo.core.expr.numvalue import NumericValue @@ -294,7 +294,7 @@ def value_check(self, exp, val): class TestExpression_EvaluateVarData(TestExpression_EvaluateNumericValue): def create(self, val, domain): - tmp = _GeneralVarData() + tmp = VarData() tmp.domain = domain tmp.value = val return tmp diff --git a/pyomo/core/tests/unit/test_numeric_expr_dispatcher.py b/pyomo/core/tests/unit/test_numeric_expr_dispatcher.py index 37833d7e8a4..bb7a291e67d 100644 --- a/pyomo/core/tests/unit/test_numeric_expr_dispatcher.py +++ b/pyomo/core/tests/unit/test_numeric_expr_dispatcher.py @@ -6490,11 +6490,11 @@ def test_mutable_nvp_iadd(self): (mutable_npv, self.invalid, NotImplemented), (mutable_npv, self.asbinary, _MutableLinearExpression([10, self.bin])), (mutable_npv, self.zero, _MutableNPVSumExpression([10])), - (mutable_npv, self.one, _MutableNPVSumExpression([10, 1])), + (mutable_npv, self.one, _MutableNPVSumExpression([11])), # 4: - (mutable_npv, self.native, _MutableNPVSumExpression([10, 5])), + (mutable_npv, self.native, _MutableNPVSumExpression([15])), (mutable_npv, self.npv, _MutableNPVSumExpression([10, self.npv])), - (mutable_npv, self.param, _MutableNPVSumExpression([10, 6])), + (mutable_npv, self.param, _MutableNPVSumExpression([16])), ( mutable_npv, self.param_mut, @@ -6534,7 +6534,7 @@ def test_mutable_nvp_iadd(self): _MutableSumExpression([10] + self.mutable_l2.args), ), (mutable_npv, self.param0, _MutableNPVSumExpression([10])), - (mutable_npv, self.param1, _MutableNPVSumExpression([10, 1])), + (mutable_npv, self.param1, _MutableNPVSumExpression([11])), # 20: (mutable_npv, self.mutable_l3, _MutableNPVSumExpression([10, self.npv])), ] diff --git a/pyomo/core/tests/unit/test_numeric_expr_zerofilter.py b/pyomo/core/tests/unit/test_numeric_expr_zerofilter.py index 8e75ccc3feb..19968640a21 100644 --- a/pyomo/core/tests/unit/test_numeric_expr_zerofilter.py +++ b/pyomo/core/tests/unit/test_numeric_expr_zerofilter.py @@ -6020,11 +6020,11 @@ def test_mutable_nvp_iadd(self): (mutable_npv, self.invalid, NotImplemented), (mutable_npv, self.asbinary, _MutableLinearExpression([10, self.bin])), (mutable_npv, self.zero, _MutableNPVSumExpression([10])), - (mutable_npv, self.one, _MutableNPVSumExpression([10, 1])), + (mutable_npv, self.one, _MutableNPVSumExpression([11])), # 4: - (mutable_npv, self.native, _MutableNPVSumExpression([10, 5])), + (mutable_npv, self.native, _MutableNPVSumExpression([15])), (mutable_npv, self.npv, _MutableNPVSumExpression([10, self.npv])), - (mutable_npv, self.param, _MutableNPVSumExpression([10, 6])), + (mutable_npv, self.param, _MutableNPVSumExpression([16])), ( mutable_npv, self.param_mut, @@ -6064,7 +6064,7 @@ def test_mutable_nvp_iadd(self): _MutableSumExpression([10] + self.mutable_l2.args), ), (mutable_npv, self.param0, _MutableNPVSumExpression([10])), - (mutable_npv, self.param1, _MutableNPVSumExpression([10, 1])), + (mutable_npv, self.param1, _MutableNPVSumExpression([11])), # 20: (mutable_npv, self.mutable_l3, _MutableNPVSumExpression([10, self.npv])), ] diff --git a/pyomo/core/tests/unit/test_obj.py b/pyomo/core/tests/unit/test_obj.py index 3c8a05f7058..dc2e320e63b 100644 --- a/pyomo/core/tests/unit/test_obj.py +++ b/pyomo/core/tests/unit/test_obj.py @@ -78,7 +78,7 @@ def test_empty_singleton(self): # Even though we construct a ScalarObjective, # if it is not initialized that means it is "empty" # and we should encounter errors when trying to access the - # _ObjectiveData interface methods until we assign + # ObjectiveData interface methods until we assign # something to the objective. # self.assertEqual(a._constructed, True) diff --git a/pyomo/core/tests/unit/test_param.py b/pyomo/core/tests/unit/test_param.py index 9bc0c4b2ad2..f22674b6bf7 100644 --- a/pyomo/core/tests/unit/test_param.py +++ b/pyomo/core/tests/unit/test_param.py @@ -65,8 +65,8 @@ from pyomo.common.errors import PyomoException from pyomo.common.log import LoggingIntercept from pyomo.common.tempfiles import TempfileManager -from pyomo.core.base.param import _ParamData -from pyomo.core.base.set import _SetData +from pyomo.core.base.param import ParamData +from pyomo.core.base.set import SetData from pyomo.core.base.units_container import units, pint_available, UnitsError from io import StringIO @@ -181,7 +181,7 @@ def test_setitem_preexisting(self): idx = sorted(keys)[0] self.assertEqual(value(self.instance.A[idx]), self.data[idx]) if self.instance.A.mutable: - self.assertTrue(isinstance(self.instance.A[idx], _ParamData)) + self.assertTrue(isinstance(self.instance.A[idx], ParamData)) else: self.assertEqual(type(self.instance.A[idx]), float) @@ -190,7 +190,7 @@ def test_setitem_preexisting(self): if not self.instance.A.mutable: self.fail("Expected setitem[%s] to fail for immutable Params" % (idx,)) self.assertEqual(value(self.instance.A[idx]), 4.3) - self.assertTrue(isinstance(self.instance.A[idx], _ParamData)) + self.assertTrue(isinstance(self.instance.A[idx], ParamData)) except TypeError: # immutable Params should raise a TypeError exception if self.instance.A.mutable: @@ -249,7 +249,7 @@ def test_setitem_default_override(self): self.assertEqual(value(self.instance.A[idx]), self.instance.A._default_val) if self.instance.A.mutable: - self.assertIsInstance(self.instance.A[idx], _ParamData) + self.assertIsInstance(self.instance.A[idx], ParamData) else: self.assertEqual( type(self.instance.A[idx]), type(value(self.instance.A._default_val)) @@ -260,7 +260,7 @@ def test_setitem_default_override(self): if not self.instance.A.mutable: self.fail("Expected setitem[%s] to fail for immutable Params" % (idx,)) self.assertEqual(self.instance.A[idx].value, 4.3) - self.assertIsInstance(self.instance.A[idx], _ParamData) + self.assertIsInstance(self.instance.A[idx], ParamData) except TypeError: # immutable Params should raise a TypeError exception if self.instance.A.mutable: @@ -1487,7 +1487,7 @@ def test_domain_set_initializer(self): m.I = Set(initialize=[1, 2, 3]) param_vals = {1: 1, 2: 1, 3: -1} m.p = Param(m.I, initialize=param_vals, domain={-1, 1}) - self.assertIsInstance(m.p.domain, _SetData) + self.assertIsInstance(m.p.domain, SetData) @unittest.skipUnless(pint_available, "units test requires pint module") def test_set_value_units(self): diff --git a/pyomo/core/tests/unit/test_piecewise.py b/pyomo/core/tests/unit/test_piecewise.py index af82ef7c06d..7b8e01e6a45 100644 --- a/pyomo/core/tests/unit/test_piecewise.py +++ b/pyomo/core/tests/unit/test_piecewise.py @@ -104,7 +104,7 @@ def test_indexed_with_nonindexed_vars(self): model.con3 = Piecewise(*args, **keywords) # test that nonindexed Piecewise can handle - # _VarData (e.g model.x[1] + # VarData (e.g model.x[1] def test_nonindexed_with_indexed_vars(self): model = ConcreteModel() model.range = Var([1]) diff --git a/pyomo/core/tests/unit/test_reference.py b/pyomo/core/tests/unit/test_reference.py index cfd9b99f945..7370881612f 100644 --- a/pyomo/core/tests/unit/test_reference.py +++ b/pyomo/core/tests/unit/test_reference.py @@ -800,8 +800,8 @@ def test_reference_indexedcomponent_pprint(self): buf.getvalue(), """r : Size=2, Index={1, 2}, ReferenceTo=x Key : Object - 1 : - 2 : + 1 : + 2 : """, ) m.s = Reference(m.x[:, ...], ctype=IndexedComponent) @@ -811,8 +811,8 @@ def test_reference_indexedcomponent_pprint(self): buf.getvalue(), """s : Size=2, Index={1, 2}, ReferenceTo=x[:, ...] Key : Object - 1 : - 2 : + 1 : + 2 : """, ) @@ -1357,8 +1357,8 @@ def test_pprint_nonfinite_sets_ctypeNone(self): 1 IndexedComponent Declarations ref : Size=2, Index=NonNegativeIntegers, ReferenceTo=v Key : Object - 3 : - 5 : + 3 : + 5 : 2 Declarations: v ref """.strip(), diff --git a/pyomo/core/tests/unit/test_set.py b/pyomo/core/tests/unit/test_set.py index 4bbac6ecaa0..f62589a6873 100644 --- a/pyomo/core/tests/unit/test_set.py +++ b/pyomo/core/tests/unit/test_set.py @@ -60,8 +60,8 @@ FiniteSetOf, InfiniteSetOf, RangeSet, - _FiniteRangeSetData, - _InfiniteRangeSetData, + FiniteRangeSetData, + InfiniteRangeSetData, FiniteScalarRangeSet, InfiniteScalarRangeSet, AbstractFiniteScalarRangeSet, @@ -81,10 +81,10 @@ SetProduct_InfiniteSet, SetProduct_FiniteSet, SetProduct_OrderedSet, - _SetData, - _FiniteSetData, - _InsertionOrderSetData, - _SortedSetData, + SetData, + FiniteSetData, + InsertionOrderSetData, + SortedSetData, _FiniteSetMixin, _OrderedSetMixin, SetInitializer, @@ -1285,19 +1285,19 @@ def test_is_functions(self): self.assertTrue(i.isdiscrete()) self.assertTrue(i.isfinite()) self.assertTrue(i.isordered()) - self.assertIsInstance(i, _FiniteRangeSetData) + self.assertIsInstance(i, FiniteRangeSetData) i = RangeSet(1, 3) self.assertTrue(i.isdiscrete()) self.assertTrue(i.isfinite()) self.assertTrue(i.isordered()) - self.assertIsInstance(i, _FiniteRangeSetData) + self.assertIsInstance(i, FiniteRangeSetData) i = RangeSet(1, 3, 0) self.assertFalse(i.isdiscrete()) self.assertFalse(i.isfinite()) self.assertFalse(i.isordered()) - self.assertIsInstance(i, _InfiniteRangeSetData) + self.assertIsInstance(i, InfiniteRangeSetData) def test_pprint(self): m = ConcreteModel() @@ -4137,9 +4137,9 @@ def test_indexed_set(self): self.assertFalse(m.I[1].isordered()) self.assertFalse(m.I[2].isordered()) self.assertFalse(m.I[3].isordered()) - self.assertIs(type(m.I[1]), _FiniteSetData) - self.assertIs(type(m.I[2]), _FiniteSetData) - self.assertIs(type(m.I[3]), _FiniteSetData) + self.assertIs(type(m.I[1]), FiniteSetData) + self.assertIs(type(m.I[2]), FiniteSetData) + self.assertIs(type(m.I[3]), FiniteSetData) self.assertEqual(m.I.data(), {1: (1,), 2: (2,), 3: (4,)}) # Explicit (constant) construction @@ -4155,9 +4155,9 @@ def test_indexed_set(self): self.assertTrue(m.I[1].isordered()) self.assertTrue(m.I[2].isordered()) self.assertTrue(m.I[3].isordered()) - self.assertIs(type(m.I[1]), _InsertionOrderSetData) - self.assertIs(type(m.I[2]), _InsertionOrderSetData) - self.assertIs(type(m.I[3]), _InsertionOrderSetData) + self.assertIs(type(m.I[1]), InsertionOrderSetData) + self.assertIs(type(m.I[2]), InsertionOrderSetData) + self.assertIs(type(m.I[3]), InsertionOrderSetData) self.assertEqual(m.I.data(), {1: (4, 2, 5), 2: (4, 2, 5), 3: (4, 2, 5)}) # Explicit (constant) construction @@ -4173,9 +4173,9 @@ def test_indexed_set(self): self.assertTrue(m.I[1].isordered()) self.assertTrue(m.I[2].isordered()) self.assertTrue(m.I[3].isordered()) - self.assertIs(type(m.I[1]), _SortedSetData) - self.assertIs(type(m.I[2]), _SortedSetData) - self.assertIs(type(m.I[3]), _SortedSetData) + self.assertIs(type(m.I[1]), SortedSetData) + self.assertIs(type(m.I[2]), SortedSetData) + self.assertIs(type(m.I[3]), SortedSetData) self.assertEqual(m.I.data(), {1: (2, 4, 5), 2: (2, 4, 5), 3: (2, 4, 5)}) # Explicit (procedural) construction @@ -4300,7 +4300,7 @@ def _l_tri(model, i, j): # This tests a filter that matches the dimentionality of the # component. construct() needs to recognize that the filter is # returning a constant in construct() and re-assign it to be the - # _filter for each _SetData + # _filter for each SetData def _lt_3(model, i): self.assertIs(model, m) return i < 3 @@ -5297,15 +5297,15 @@ def test_no_normalize_index(self): class TestAbstractSetAPI(unittest.TestCase): - def test_SetData(self): + def testSetData(self): # This tests an anstract non-finite set API m = ConcreteModel() m.I = Set(initialize=[1]) - s = _SetData(m.I) + s = SetData(m.I) # - # _SetData API + # SetData API # with self.assertRaises(DeveloperError): @@ -5395,7 +5395,7 @@ def test_SetData(self): def test_FiniteMixin(self): # This tests an anstract finite set API - class FiniteMixin(_FiniteSetMixin, _SetData): + class FiniteMixin(_FiniteSetMixin, SetData): pass m = ConcreteModel() @@ -5403,7 +5403,7 @@ class FiniteMixin(_FiniteSetMixin, _SetData): s = FiniteMixin(m.I) # - # _SetData API + # SetData API # with self.assertRaises(DeveloperError): @@ -5520,7 +5520,7 @@ class FiniteMixin(_FiniteSetMixin, _SetData): def test_OrderedMixin(self): # This tests an anstract ordered set API - class OrderedMixin(_OrderedSetMixin, _FiniteSetMixin, _SetData): + class OrderedMixin(_OrderedSetMixin, _FiniteSetMixin, SetData): pass m = ConcreteModel() @@ -5528,7 +5528,7 @@ class OrderedMixin(_OrderedSetMixin, _FiniteSetMixin, _SetData): s = OrderedMixin(m.I) # - # _SetData API + # SetData API # with self.assertRaises(DeveloperError): diff --git a/pyomo/core/tests/unit/test_template_expr.py b/pyomo/core/tests/unit/test_template_expr.py index 4f255e3567a..80f5d90b60e 100644 --- a/pyomo/core/tests/unit/test_template_expr.py +++ b/pyomo/core/tests/unit/test_template_expr.py @@ -127,7 +127,7 @@ def test_template_scalar_with_set(self): # Note that structural expressions do not implement polynomial_degree with self.assertRaisesRegex( AttributeError, - "'_InsertionOrderSetData' object has " "no attribute 'polynomial_degree'", + "'InsertionOrderSetData' object has " "no attribute 'polynomial_degree'", ): e.polynomial_degree() self.assertEqual(str(e), "s[{I}]") @@ -490,14 +490,14 @@ def c(m): self.assertEqual( str(resolve_template(template)), 'x[1,1,10] + ' - '(x[2,1,10] + x[2,1,20]) + ' - '(x[3,1,10] + x[3,1,20] + x[3,1,30]) + ' - '(x[1,2,10]) + ' - '(x[2,2,10] + x[2,2,20]) + ' - '(x[3,2,10] + x[3,2,20] + x[3,2,30]) + ' - '(x[1,3,10]) + ' - '(x[2,3,10] + x[2,3,20]) + ' - '(x[3,3,10] + x[3,3,20] + x[3,3,30]) <= 0', + 'x[2,1,10] + x[2,1,20] + ' + 'x[3,1,10] + x[3,1,20] + x[3,1,30] + ' + 'x[1,2,10] + ' + 'x[2,2,10] + x[2,2,20] + ' + 'x[3,2,10] + x[3,2,20] + x[3,2,30] + ' + 'x[1,3,10] + ' + 'x[2,3,10] + x[2,3,20] + ' + 'x[3,3,10] + x[3,3,20] + x[3,3,30] <= 0', ) def test_multidim_nested_sum_rule(self): @@ -566,14 +566,14 @@ def c(m): self.assertEqual( str(resolve_template(template)), 'x[1,1,10] + ' - '(x[2,1,10] + x[2,1,20]) + ' - '(x[3,1,10] + x[3,1,20] + x[3,1,30]) + ' - '(x[1,2,10]) + ' - '(x[2,2,10] + x[2,2,20]) + ' - '(x[3,2,10] + x[3,2,20] + x[3,2,30]) + ' - '(x[1,3,10]) + ' - '(x[2,3,10] + x[2,3,20]) + ' - '(x[3,3,10] + x[3,3,20] + x[3,3,30]) <= 0', + 'x[2,1,10] + x[2,1,20] + ' + 'x[3,1,10] + x[3,1,20] + x[3,1,30] + ' + 'x[1,2,10] + ' + 'x[2,2,10] + x[2,2,20] + ' + 'x[3,2,10] + x[3,2,20] + x[3,2,30] + ' + 'x[1,3,10] + ' + 'x[2,3,10] + x[2,3,20] + ' + 'x[3,3,10] + x[3,3,20] + x[3,3,30] <= 0', ) def test_multidim_nested_getattr_sum_rule(self): @@ -609,14 +609,14 @@ def c(m): self.assertEqual( str(resolve_template(template)), 'x[1,1,10] + ' - '(x[2,1,10] + x[2,1,20]) + ' - '(x[3,1,10] + x[3,1,20] + x[3,1,30]) + ' - '(x[1,2,10]) + ' - '(x[2,2,10] + x[2,2,20]) + ' - '(x[3,2,10] + x[3,2,20] + x[3,2,30]) + ' - '(x[1,3,10]) + ' - '(x[2,3,10] + x[2,3,20]) + ' - '(x[3,3,10] + x[3,3,20] + x[3,3,30]) <= 0', + 'x[2,1,10] + x[2,1,20] + ' + 'x[3,1,10] + x[3,1,20] + x[3,1,30] + ' + 'x[1,2,10] + ' + 'x[2,2,10] + x[2,2,20] + ' + 'x[3,2,10] + x[3,2,20] + x[3,2,30] + ' + 'x[1,3,10] + ' + 'x[2,3,10] + x[2,3,20] + ' + 'x[3,3,10] + x[3,3,20] + x[3,3,30] <= 0', ) def test_eval_getattr(self): diff --git a/pyomo/core/tests/unit/test_var_set_bounds.py b/pyomo/core/tests/unit/test_var_set_bounds.py index bae89556ce3..1686ba4f1c6 100644 --- a/pyomo/core/tests/unit/test_var_set_bounds.py +++ b/pyomo/core/tests/unit/test_var_set_bounds.py @@ -36,7 +36,7 @@ # GAH: These tests been temporarily disabled. It is no longer the job of Var # to validate its domain at the time of construction. It only needs to # ensure that whatever object is passed as its domain is suitable for -# interacting with the _VarData interface (e.g., has a bounds method) +# interacting with the VarData interface (e.g., has a bounds method) # The plan is to start adding functionality to the solver interfaces # that will support custom domains. diff --git a/pyomo/core/tests/unit/test_visitor.py b/pyomo/core/tests/unit/test_visitor.py index 12fb98d1d19..ac61a3a24c7 100644 --- a/pyomo/core/tests/unit/test_visitor.py +++ b/pyomo/core/tests/unit/test_visitor.py @@ -72,7 +72,7 @@ RECURSION_LIMIT, get_stack_depth, ) -from pyomo.core.base.param import _ParamData, ScalarParam +from pyomo.core.base.param import ParamData, ScalarParam from pyomo.core.expr.template_expr import IndexTemplate from pyomo.common.collections import ComponentSet from pyomo.common.errors import TemplateExpressionError @@ -685,7 +685,7 @@ def __init__(self, model): self.model = model def visiting_potential_leaf(self, node): - if node.__class__ in (_ParamData, ScalarParam): + if node.__class__ in (ParamData, ScalarParam): if id(node) in self.substitute: return True, self.substitute[id(node)] self.substitute[id(node)] = 2 * self.model.w.add() diff --git a/pyomo/core/util.py b/pyomo/core/util.py index f337b487cef..4b6cc8f3320 100644 --- a/pyomo/core/util.py +++ b/pyomo/core/util.py @@ -18,7 +18,7 @@ from pyomo.core.expr.numeric_expr import mutable_expression, NPV_SumExpression from pyomo.core.base.var import Var from pyomo.core.base.expression import Expression -from pyomo.core.base.component import _ComponentBase +from pyomo.core.base.component import ComponentBase import logging logger = logging.getLogger(__name__) @@ -238,12 +238,12 @@ def sequence(*args): def target_list(x): - if isinstance(x, _ComponentBase): + if isinstance(x, ComponentBase): return [x] elif hasattr(x, '__iter__'): ans = [] for i in x: - if isinstance(i, _ComponentBase): + if isinstance(i, ComponentBase): ans.append(i) else: raise ValueError( diff --git a/pyomo/dae/flatten.py b/pyomo/dae/flatten.py index febaf7c10c9..3d90cc443c1 100644 --- a/pyomo/dae/flatten.py +++ b/pyomo/dae/flatten.py @@ -259,7 +259,7 @@ def generate_sliced_components( Parameters ---------- - b: _BlockData + b: BlockData Block whose components will be sliced index_stack: list @@ -267,7 +267,7 @@ def generate_sliced_components( component, that have been sliced. This is necessary to return the sets that have been sliced. - slice_: IndexedComponent_slice or _BlockData + slice_: IndexedComponent_slice or BlockData Slice generated so far. This function will yield extensions to this slice at the current level of the block hierarchy. @@ -443,7 +443,7 @@ def flatten_components_along_sets(m, sets, ctype, indices=None, active=None): Parameters ---------- - m: _BlockData + m: BlockData Block whose components (and their sub-components) will be partitioned @@ -546,7 +546,7 @@ def flatten_dae_components(model, time, ctype, indices=None, active=None): Parameters ---------- - model: _BlockData + model: BlockData Block whose components are partitioned time: Set diff --git a/pyomo/dae/integral.py b/pyomo/dae/integral.py index 41114296a93..8c9512d98dd 100644 --- a/pyomo/dae/integral.py +++ b/pyomo/dae/integral.py @@ -14,7 +14,7 @@ from pyomo.core.base.indexed_component import rule_wrapper from pyomo.core.base.expression import ( Expression, - _GeneralExpressionData, + ExpressionData, ScalarExpression, IndexedExpression, ) @@ -151,7 +151,7 @@ class ScalarIntegral(ScalarExpression, Integral): """ def __init__(self, *args, **kwds): - _GeneralExpressionData.__init__(self, None, component=self) + ExpressionData.__init__(self, None, component=self) Integral.__init__(self, *args, **kwds) def clear(self): diff --git a/pyomo/dae/misc.py b/pyomo/dae/misc.py index 3e09a055577..dcb73f60c9e 100644 --- a/pyomo/dae/misc.py +++ b/pyomo/dae/misc.py @@ -263,7 +263,7 @@ def _update_var(v): # Note: This is not required it is handled by the _default method on # Var (which is now a IndexedComponent). However, it # would be much slower to rely on that method to generate new - # _VarData for a large number of new indices. + # VarData for a large number of new indices. new_indices = set(v.index_set()) - set(v._data.keys()) for index in new_indices: v.add(index) diff --git a/pyomo/gdp/__init__.py b/pyomo/gdp/__init__.py index a18bc03084a..d204369cdba 100644 --- a/pyomo/gdp/__init__.py +++ b/pyomo/gdp/__init__.py @@ -9,7 +9,13 @@ # This software is distributed under the 3-clause BSD License. # ___________________________________________________________________________ -from pyomo.gdp.disjunct import GDP_Error, Disjunct, Disjunction +from pyomo.gdp.disjunct import ( + GDP_Error, + Disjunct, + DisjunctData, + Disjunction, + DisjunctionData, +) # Do not import these files: importing them registers the transformation # plugins with the pyomo script so that they get automatically invoked. diff --git a/pyomo/gdp/disjunct.py b/pyomo/gdp/disjunct.py index d6e5fcfec57..658ead27783 100644 --- a/pyomo/gdp/disjunct.py +++ b/pyomo/gdp/disjunct.py @@ -41,7 +41,7 @@ ComponentData, ) from pyomo.core.base.global_set import UnindexedComponent_index -from pyomo.core.base.block import _BlockData +from pyomo.core.base.block import BlockData from pyomo.core.base.misc import apply_indexed_rule from pyomo.core.base.indexed_component import ActiveIndexedComponent from pyomo.core.expr.expr_common import ExpressionType @@ -412,7 +412,7 @@ def process(arg): return (_Initializer.deferred_value, arg) -class _DisjunctData(_BlockData): +class DisjunctData(BlockData): __autoslot_mappers__ = {'_transformation_block': AutoSlots.weakref_mapper} _Block_reserved_words = set() @@ -424,7 +424,7 @@ def transformation_block(self): ) def __init__(self, component): - _BlockData.__init__(self, component) + BlockData.__init__(self, component) with self._declare_reserved_components(): self.indicator_var = AutoLinkedBooleanVar() self.binary_indicator_var = AutoLinkedBinaryVar(self.indicator_var) @@ -434,23 +434,28 @@ def __init__(self, component): self._transformation_block = None def activate(self): - super(_DisjunctData, self).activate() + super(DisjunctData, self).activate() self.indicator_var.unfix() def deactivate(self): - super(_DisjunctData, self).deactivate() + super(DisjunctData, self).deactivate() self.indicator_var.fix(False) def _deactivate_without_fixing_indicator(self): - super(_DisjunctData, self).deactivate() + super(DisjunctData, self).deactivate() def _activate_without_unfixing_indicator(self): - super(_DisjunctData, self).activate() + super(DisjunctData, self).activate() + + +class _DisjunctData(metaclass=RenamedClass): + __renamed__new_class__ = DisjunctData + __renamed__version__ = '6.7.2.dev0' @ModelComponentFactory.register("Disjunctive blocks.") class Disjunct(Block): - _ComponentDataClass = _DisjunctData + _ComponentDataClass = DisjunctData def __new__(cls, *args, **kwds): if cls != Disjunct: @@ -475,7 +480,7 @@ def __init__(self, *args, **kwargs): # def _deactivate_without_fixing_indicator(self): # # Ideally, this would be a super call from this class. However, # # doing that would trigger a call to deactivate() on all the - # # _DisjunctData objects (exactly what we want to avoid!) + # # DisjunctData objects (exactly what we want to avoid!) # # # # For the time being, we will do something bad and directly call # # the base class method from where we would otherwise want to @@ -484,7 +489,7 @@ def __init__(self, *args, **kwargs): def _activate_without_unfixing_indicator(self): # Ideally, this would be a super call from this class. However, # doing that would trigger a call to deactivate() on all the - # _DisjunctData objects (exactly what we want to avoid!) + # DisjunctData objects (exactly what we want to avoid!) # # For the time being, we will do something bad and directly call # the base class method from where we would otherwise want to @@ -495,15 +500,15 @@ def _activate_without_unfixing_indicator(self): component_data._activate_without_unfixing_indicator() -class ScalarDisjunct(_DisjunctData, Disjunct): +class ScalarDisjunct(DisjunctData, Disjunct): def __init__(self, *args, **kwds): ## FIXME: This is a HACK to get around a chicken-and-egg issue - ## where _BlockData creates the indicator_var *before* + ## where BlockData creates the indicator_var *before* ## Block.__init__ declares the _defer_construction flag. self._defer_construction = True self._suppress_ctypes = set() - _DisjunctData.__init__(self, self) + DisjunctData.__init__(self, self) Disjunct.__init__(self, *args, **kwds) self._data[None] = self self._index = UnindexedComponent_index @@ -524,10 +529,10 @@ def active(self): return any(d.active for d in self._data.values()) -_DisjunctData._Block_reserved_words = set(dir(Disjunct())) +DisjunctData._Block_reserved_words = set(dir(Disjunct())) -class _DisjunctionData(ActiveComponentData): +class DisjunctionData(ActiveComponentData): __slots__ = ('disjuncts', 'xor', '_algebraic_constraint', '_transformation_map') __autoslot_mappers__ = {'_algebraic_constraint': AutoSlots.weakref_mapper} _NoArgument = (0,) @@ -542,7 +547,7 @@ def __init__(self, component=None): # # These lines represent in-lining of the # following constructors: - # - _ConstraintData, + # - ConstraintData, # - ActiveComponentData # - ComponentData self._component = weakref_ref(component) if (component is not None) else None @@ -620,9 +625,14 @@ def set_value(self, expr): self.disjuncts.append(disjunct) +class _DisjunctionData(metaclass=RenamedClass): + __renamed__new_class__ = DisjunctionData + __renamed__version__ = '6.7.2.dev0' + + @ModelComponentFactory.register("Disjunction expressions.") class Disjunction(ActiveIndexedComponent): - _ComponentDataClass = _DisjunctionData + _ComponentDataClass = DisjunctionData def __new__(cls, *args, **kwds): if cls != Disjunction: @@ -763,9 +773,9 @@ def _pprint(self): ) -class ScalarDisjunction(_DisjunctionData, Disjunction): +class ScalarDisjunction(DisjunctionData, Disjunction): def __init__(self, *args, **kwds): - _DisjunctionData.__init__(self, component=self) + DisjunctionData.__init__(self, component=self) Disjunction.__init__(self, *args, **kwds) self._index = UnindexedComponent_index @@ -776,7 +786,7 @@ def __init__(self, *args, **kwds): # currently in place). So during initialization only, we will # treat them as "indexed" objects where things like # Constraint.Skip are managed. But after that they will behave - # like _DisjunctionData objects where set_value does not handle + # like DisjunctionData objects where set_value does not handle # Disjunction.Skip but expects a valid expression or None. # diff --git a/pyomo/gdp/plugins/bigm.py b/pyomo/gdp/plugins/bigm.py index 3f450dbbd4f..d715d913db8 100644 --- a/pyomo/gdp/plugins/bigm.py +++ b/pyomo/gdp/plugins/bigm.py @@ -213,21 +213,15 @@ def _apply_to_impl(self, instance, **kwds): bigM = self._config.bigM for t in preprocessed_targets: if t.ctype is Disjunction: - self._transform_disjunctionData( - t, - t.index(), - bigM, - parent_disjunct=gdp_tree.parent(t), - root_disjunct=gdp_tree.root_disjunct(t), - ) + self._transform_disjunctionData(t, t.index(), bigM, gdp_tree) # issue warnings about anything that was in the bigM args dict that we # didn't use _warn_for_unused_bigM_args(bigM, self.used_args, logger) - def _transform_disjunctionData( - self, obj, index, bigM, parent_disjunct=None, root_disjunct=None - ): + def _transform_disjunctionData(self, obj, index, bigM, gdp_tree): + parent_disjunct = gdp_tree.parent(obj) + root_disjunct = gdp_tree.root_disjunct(obj) (transBlock, xorConstraint) = self._setup_transform_disjunctionData( obj, root_disjunct ) @@ -236,7 +230,7 @@ def _transform_disjunctionData( or_expr = 0 for disjunct in obj.disjuncts: or_expr += disjunct.binary_indicator_var - self._transform_disjunct(disjunct, bigM, transBlock) + self._transform_disjunct(disjunct, bigM, transBlock, gdp_tree) if obj.xor: xorConstraint[index] = or_expr == 1 @@ -249,7 +243,7 @@ def _transform_disjunctionData( # and deactivate for the writers obj.deactivate() - def _transform_disjunct(self, obj, bigM, transBlock): + def _transform_disjunct(self, obj, bigM, transBlock, gdp_tree): # We're not using the preprocessed list here, so this could be # inactive. We've already done the error checking in preprocessing, so # we just skip it here. @@ -261,6 +255,12 @@ def _transform_disjunct(self, obj, bigM, transBlock): relaxationBlock = self._get_disjunct_transformation_block(obj, transBlock) + indicator_expression = 0 + node = obj + while node is not None: + indicator_expression += 1 - node.binary_indicator_var + node = gdp_tree.parent_disjunct(node) + # This is crazy, but if the disjunction has been previously # relaxed, the disjunct *could* be deactivated. This is a big # deal for Hull, as it uses the component_objects / @@ -270,13 +270,21 @@ def _transform_disjunct(self, obj, bigM, transBlock): # comparing the two relaxations. # # Transform each component within this disjunct - self._transform_block_components(obj, obj, bigM, arg_list, suffix_list) + self._transform_block_components( + obj, obj, bigM, arg_list, suffix_list, indicator_expression + ) # deactivate disjunct to keep the writers happy obj._deactivate_without_fixing_indicator() def _transform_constraint( - self, obj, disjunct, bigMargs, arg_list, disjunct_suffix_list + self, + obj, + disjunct, + bigMargs, + arg_list, + disjunct_suffix_list, + indicator_expression, ): # add constraint to the transformation block, we'll transform it there. transBlock = disjunct._transformation_block() @@ -348,7 +356,13 @@ def _transform_constraint( bigm_src[c] = (lower, upper) self._add_constraint_expressions( - c, i, M, disjunct.binary_indicator_var, newConstraint, constraint_map + c, + i, + M, + disjunct.binary_indicator_var, + newConstraint, + constraint_map, + indicator_expression=indicator_expression, ) # deactivate because we relaxed diff --git a/pyomo/gdp/plugins/bigm_mixin.py b/pyomo/gdp/plugins/bigm_mixin.py index 510b36b5102..1c3fcb2c64a 100644 --- a/pyomo/gdp/plugins/bigm_mixin.py +++ b/pyomo/gdp/plugins/bigm_mixin.py @@ -232,7 +232,14 @@ def _estimate_M(self, expr, constraint): return tuple(M) def _add_constraint_expressions( - self, c, i, M, indicator_var, newConstraint, constraint_map + self, + c, + i, + M, + indicator_var, + newConstraint, + constraint_map, + indicator_expression=None, ): # Since we are both combining components from multiple blocks and using # local names, we need to make sure that the first index for @@ -244,6 +251,8 @@ def _add_constraint_expressions( # over the constraint indices, but I don't think it matters a lot.) unique = len(newConstraint) name = c.local_name + "_%s" % unique + if indicator_expression is None: + indicator_expression = 1 - indicator_var if c.lower is not None: if M[0] is None: @@ -251,7 +260,7 @@ def _add_constraint_expressions( "Cannot relax disjunctive constraint '%s' " "because M is not defined." % name ) - M_expr = M[0] * (1 - indicator_var) + M_expr = M[0] * indicator_expression newConstraint.add((name, i, 'lb'), c.lower <= c.body - M_expr) constraint_map.transformed_constraints[c].append( newConstraint[name, i, 'lb'] @@ -263,7 +272,7 @@ def _add_constraint_expressions( "Cannot relax disjunctive constraint '%s' " "because M is not defined." % name ) - M_expr = M[1] * (1 - indicator_var) + M_expr = M[1] * indicator_expression newConstraint.add((name, i, 'ub'), c.body - M_expr <= c.upper) constraint_map.transformed_constraints[c].append( newConstraint[name, i, 'ub'] diff --git a/pyomo/gdp/plugins/fix_disjuncts.py b/pyomo/gdp/plugins/fix_disjuncts.py index 44a9d91d513..172363caab7 100644 --- a/pyomo/gdp/plugins/fix_disjuncts.py +++ b/pyomo/gdp/plugins/fix_disjuncts.py @@ -52,7 +52,7 @@ class GDP_Disjunct_Fixer(Transformation): This reclassifies all disjuncts in the passed model instance as ctype Block and deactivates the constraints and disjunctions within inactive disjuncts. - In addition, it transforms relvant LogicalConstraints and BooleanVars so + In addition, it transforms relevant LogicalConstraints and BooleanVars so that the resulting model is a (MI)(N)LP (where it is only mixed-integer if the model contains integer-domain Vars or BooleanVars which were not indicator_vars of Disjuncs. diff --git a/pyomo/gdp/plugins/gdp_var_mover.py b/pyomo/gdp/plugins/gdp_var_mover.py index 5402b576368..7b1df0bb68f 100644 --- a/pyomo/gdp/plugins/gdp_var_mover.py +++ b/pyomo/gdp/plugins/gdp_var_mover.py @@ -115,7 +115,7 @@ def _apply_to(self, instance, **kwds): disjunct_component, Block ) # HACK: activate the block, but do not activate the - # _BlockData objects + # BlockData objects super(ActiveIndexedComponent, disjunct_component).activate() # Deactivate all constraints. Note that we only need to diff --git a/pyomo/gdp/plugins/hull.py b/pyomo/gdp/plugins/hull.py index 5b9d2ad08a9..854366c0cf0 100644 --- a/pyomo/gdp/plugins/hull.py +++ b/pyomo/gdp/plugins/hull.py @@ -42,7 +42,7 @@ Binary, ) from pyomo.gdp import Disjunct, Disjunction, GDP_Error -from pyomo.gdp.disjunct import _DisjunctData +from pyomo.gdp.disjunct import DisjunctData from pyomo.gdp.plugins.gdp_to_mip_transformation import GDP_to_MIP_Transformation from pyomo.gdp.transformed_disjunct import _TransformedDisjunct from pyomo.gdp.util import ( @@ -750,20 +750,20 @@ def _transform_constraint( if obj.is_indexed(): newConstraint.add((name, i, 'eq'), newConsExpr) - # map the _ConstraintDatas (we mapped the container above) + # map the ConstraintDatas (we mapped the container above) constraint_map.transformed_constraints[c].append( newConstraint[name, i, 'eq'] ) constraint_map.src_constraint[newConstraint[name, i, 'eq']] = c else: newConstraint.add((name, 'eq'), newConsExpr) - # map to the _ConstraintData (And yes, for + # map to the ConstraintData (And yes, for # ScalarConstraints, this is overwriting the map to the # container we made above, and that is what I want to # happen. ScalarConstraints will map to lists. For # IndexedConstraints, we can map the container to the # container, but more importantly, we are mapping the - # _ConstraintDatas to each other above) + # ConstraintDatas to each other above) constraint_map.transformed_constraints[c].append( newConstraint[name, 'eq'] ) diff --git a/pyomo/gdp/tests/common_tests.py b/pyomo/gdp/tests/common_tests.py index 5d0d6f6c21b..50bc8b05f86 100644 --- a/pyomo/gdp/tests/common_tests.py +++ b/pyomo/gdp/tests/common_tests.py @@ -30,7 +30,7 @@ from pyomo.gdp import Disjunct, Disjunction, GDP_Error from pyomo.core.expr.compare import assertExpressionsEqual from pyomo.core.base import constraint, ComponentUID -from pyomo.core.base.block import _BlockData +from pyomo.core.base.block import BlockData from pyomo.repn import generate_standard_repn import pyomo.core.expr as EXPR import pyomo.gdp.tests.models as models @@ -952,9 +952,7 @@ def check_disjunction_data_target(self, transformation): transBlock = m.component("_pyomo_gdp_%s_reformulation" % transformation) self.assertIsInstance(transBlock, Block) self.assertIsInstance(transBlock.component("disjunction_xor"), Constraint) - self.assertIsInstance( - transBlock.disjunction_xor[2], constraint._GeneralConstraintData - ) + self.assertIsInstance(transBlock.disjunction_xor[2], constraint.ConstraintData) self.assertIsInstance(transBlock.component("relaxedDisjuncts"), Block) self.assertEqual(len(transBlock.relaxedDisjuncts), 3) @@ -963,7 +961,7 @@ def check_disjunction_data_target(self, transformation): m, targets=[m.disjunction[1]] ) self.assertIsInstance( - m.disjunction[1].algebraic_constraint, constraint._GeneralConstraintData + m.disjunction[1].algebraic_constraint, constraint.ConstraintData ) transBlock = m.component("_pyomo_gdp_%s_reformulation_4" % transformation) self.assertIsInstance(transBlock, Block) @@ -1704,10 +1702,10 @@ def check_all_components_transformed(self, m): # makeNestedDisjunctions_NestedDisjuncts model. self.assertIsInstance(m.disj.algebraic_constraint, Constraint) self.assertIsInstance(m.d1.disj2.algebraic_constraint, Constraint) - self.assertIsInstance(m.d1.transformation_block, _BlockData) - self.assertIsInstance(m.d2.transformation_block, _BlockData) - self.assertIsInstance(m.d1.d3.transformation_block, _BlockData) - self.assertIsInstance(m.d1.d4.transformation_block, _BlockData) + self.assertIsInstance(m.d1.transformation_block, BlockData) + self.assertIsInstance(m.d2.transformation_block, BlockData) + self.assertIsInstance(m.d1.d3.transformation_block, BlockData) + self.assertIsInstance(m.d1.d4.transformation_block, BlockData) def check_transformation_blocks_nestedDisjunctions(self, m, transformation): diff --git a/pyomo/gdp/tests/models.py b/pyomo/gdp/tests/models.py index 0b84641899c..2995cacb450 100644 --- a/pyomo/gdp/tests/models.py +++ b/pyomo/gdp/tests/models.py @@ -840,7 +840,7 @@ def makeAnyIndexedDisjunctionOfDisjunctDatas(): build from DisjunctDatas. Identical mathematically to makeDisjunctionOfDisjunctDatas. - Used to test that the right things happen for a case where soemone + Used to test that the right things happen for a case where someone implements an algorithm which iteratively generates disjuncts and retransforms""" m = ConcreteModel() diff --git a/pyomo/gdp/tests/test_bigm.py b/pyomo/gdp/tests/test_bigm.py index c6ac49f6d36..c27d7cbe0cb 100644 --- a/pyomo/gdp/tests/test_bigm.py +++ b/pyomo/gdp/tests/test_bigm.py @@ -19,15 +19,18 @@ Set, Constraint, ComponentMap, + LogicalConstraint, + Objective, SolverFactory, Suffix, + TerminationCondition, ConcreteModel, Var, Any, value, ) from pyomo.gdp import Disjunct, Disjunction, GDP_Error -from pyomo.core.base import constraint, _ConstraintData +from pyomo.core.base import constraint, ConstraintData from pyomo.core.expr.compare import ( assertExpressionsEqual, assertExpressionsStructurallyEqual, @@ -653,14 +656,14 @@ def test_disjunct_and_constraint_maps(self): if src[0]: # equality self.assertEqual(len(transformed), 2) - self.assertIsInstance(transformed[0], _ConstraintData) - self.assertIsInstance(transformed[1], _ConstraintData) + self.assertIsInstance(transformed[0], ConstraintData) + self.assertIsInstance(transformed[1], ConstraintData) self.assertIs(bigm.get_src_constraint(transformed[0]), srcDisjunct.c) self.assertIs(bigm.get_src_constraint(transformed[1]), srcDisjunct.c) else: # >= self.assertEqual(len(transformed), 1) - self.assertIsInstance(transformed[0], _ConstraintData) + self.assertIsInstance(transformed[0], ConstraintData) # check reverse map from the container self.assertIs(bigm.get_src_constraint(transformed[0]), srcDisjunct.c) @@ -1323,8 +1326,8 @@ def test_do_not_transform_deactivated_constraintDatas(self): self.assertEqual(len(cons_list), 2) lb = cons_list[0] ub = cons_list[1] - self.assertIsInstance(lb, constraint._GeneralConstraintData) - self.assertIsInstance(ub, constraint._GeneralConstraintData) + self.assertIsInstance(lb, constraint.ConstraintData) + self.assertIsInstance(ub, constraint.ConstraintData) def checkMs( self, m, disj1c1lb, disj1c1ub, disj1c2lb, disj1c2ub, disj2c1ub, disj2c2ub @@ -1879,12 +1882,11 @@ def test_m_value_mappings(self): # many of the transformed constraints look like this, so can call this # function to test them. def check_bigM_constraint(self, cons, variable, M, indicator_var): - repn = generate_standard_repn(cons.body) - self.assertTrue(repn.is_linear()) - self.assertEqual(repn.constant, -M) - self.assertEqual(len(repn.linear_vars), 2) - ct.check_linear_coef(self, repn, variable, 1) - ct.check_linear_coef(self, repn, indicator_var, M) + assertExpressionsEqual( + self, + cons.body, + variable - float(M) * (1 - indicator_var.get_associated_binary()), + ) def check_inner_xor_constraint(self, inner_disjunction, outer_disjunct, bigm): inner_xor = inner_disjunction.algebraic_constraint @@ -1949,6 +1951,10 @@ def test_transformed_constraints(self): .binary_indicator_var, ) ), + 1, + EXPR.MonomialTermExpression( + (-1, m.disjunct[1].binary_indicator_var) + ), ] ), ) @@ -1958,37 +1964,69 @@ def test_transformed_constraints(self): ] ), ) - self.assertIsNone(cons1ub.lower) - self.assertEqual(cons1ub.upper, 0) - self.check_bigM_constraint( - cons1ub, m.z, 10, m.disjunct[1].innerdisjunct[0].indicator_var + assertExpressionsEqual( + self, + cons1ub.expr, + m.z + - 10.0 + * ( + 1 + - m.disjunct[1].innerdisjunct[0].binary_indicator_var + + 1 + - m.disjunct[1].binary_indicator_var + ) + <= 0.0, ) cons2 = bigm.get_transformed_constraints(m.disjunct[1].innerdisjunct[1].c) self.assertEqual(len(cons2), 1) cons2lb = cons2[0] - self.assertEqual(cons2lb.lower, 5) - self.assertIsNone(cons2lb.upper) - self.check_bigM_constraint( - cons2lb, m.z, -5, m.disjunct[1].innerdisjunct[1].indicator_var + assertExpressionsEqual( + self, + cons2lb.expr, + 5.0 + <= m.z + - (-5.0) + * ( + 1 + - m.disjunct[1].innerdisjunct[1].binary_indicator_var + + 1 + - m.disjunct[1].binary_indicator_var + ), ) cons3 = bigm.get_transformed_constraints(m.simpledisjunct.innerdisjunct0.c) self.assertEqual(len(cons3), 1) cons3ub = cons3[0] - self.assertEqual(cons3ub.upper, 2) - self.assertIsNone(cons3ub.lower) - self.check_bigM_constraint( - cons3ub, m.x, 7, m.simpledisjunct.innerdisjunct0.indicator_var + assertExpressionsEqual( + self, + cons3ub.expr, + m.x + - 7.0 + * ( + 1 + - m.simpledisjunct.innerdisjunct0.binary_indicator_var + + 1 + - m.simpledisjunct.binary_indicator_var + ) + <= 2.0, ) cons4 = bigm.get_transformed_constraints(m.simpledisjunct.innerdisjunct1.c) self.assertEqual(len(cons4), 1) cons4lb = cons4[0] - self.assertEqual(cons4lb.lower, 4) - self.assertIsNone(cons4lb.upper) - self.check_bigM_constraint( - cons4lb, m.x, -13, m.simpledisjunct.innerdisjunct1.indicator_var + assertExpressionsEqual( + self, + cons4lb.expr, + m.x + - (-13.0) + * ( + 1 + - m.simpledisjunct.innerdisjunct1.binary_indicator_var + + 1 + - m.simpledisjunct.binary_indicator_var + ) + >= 4.0, ) # Here we check that the xor constraint from @@ -2088,35 +2126,6 @@ def innerIndexed(d, i): m._pyomo_gdp_bigm_reformulation.relaxedDisjuncts, ) - def check_first_disjunct_constraint(self, disj1c, x, ind_var): - self.assertEqual(len(disj1c), 1) - cons = disj1c[0] - self.assertIsNone(cons.lower) - self.assertEqual(cons.upper, 1) - repn = generate_standard_repn(cons.body) - self.assertTrue(repn.is_quadratic()) - self.assertEqual(len(repn.linear_vars), 1) - self.assertEqual(len(repn.quadratic_vars), 4) - ct.check_linear_coef(self, repn, ind_var, 143) - self.assertEqual(repn.constant, -143) - for i in range(1, 5): - ct.check_squared_term_coef(self, repn, x[i], 1) - - def check_second_disjunct_constraint(self, disj2c, x, ind_var): - self.assertEqual(len(disj2c), 1) - cons = disj2c[0] - self.assertIsNone(cons.lower) - self.assertEqual(cons.upper, 1) - repn = generate_standard_repn(cons.body) - self.assertTrue(repn.is_quadratic()) - self.assertEqual(len(repn.linear_vars), 5) - self.assertEqual(len(repn.quadratic_vars), 4) - self.assertEqual(repn.constant, -63) # M = 99, so this is 36 - 99 - ct.check_linear_coef(self, repn, ind_var, 99) - for i in range(1, 5): - ct.check_squared_term_coef(self, repn, x[i], 1) - ct.check_linear_coef(self, repn, x[i], -6) - def simplify_cons(self, cons, leq): visitor = LinearRepnVisitor({}, {}, {}, None) repn = visitor.walk_expression(cons.body) @@ -2142,30 +2151,76 @@ def check_hierarchical_nested_model(self, m, bigm): # outer disjunction constraints disj1c = bigm.get_transformed_constraints(m.disj1.c) - self.check_first_disjunct_constraint(disj1c, m.x, m.disj1.binary_indicator_var) + self.assertEqual(len(disj1c), 1) + cons = disj1c[0] + assertExpressionsEqual( + self, + cons.expr, + m.x[1] ** 2 + + m.x[2] ** 2 + + m.x[3] ** 2 + + m.x[4] ** 2 + - 143.0 * (1 - m.disj1.binary_indicator_var) + <= 1.0, + ) disj2c = bigm.get_transformed_constraints(m.disjunct_block.disj2.c) - self.check_second_disjunct_constraint( - disj2c, m.x, m.disjunct_block.disj2.binary_indicator_var + self.assertEqual(len(disj2c), 1) + cons = disj2c[0] + assertExpressionsEqual( + self, + cons.expr, + (3 - m.x[1]) ** 2 + + (3 - m.x[2]) ** 2 + + (3 - m.x[3]) ** 2 + + (3 - m.x[4]) ** 2 + - 99.0 * (1 - m.disjunct_block.disj2.binary_indicator_var) + <= 1.0, ) # inner disjunction constraints innerd1c = bigm.get_transformed_constraints( m.disjunct_block.disj2.disjunction_disjuncts[0].constraint[1] ) - self.check_first_disjunct_constraint( - innerd1c, - m.x, - m.disjunct_block.disj2.disjunction_disjuncts[0].binary_indicator_var, + self.assertEqual(len(innerd1c), 1) + cons = innerd1c[0] + assertExpressionsEqual( + self, + cons.expr, + m.x[1] ** 2 + + m.x[2] ** 2 + + m.x[3] ** 2 + + m.x[4] ** 2 + - 143.0 + * ( + 1 + - m.disjunct_block.disj2.disjunction_disjuncts[0].binary_indicator_var + + 1 + - m.disjunct_block.disj2.binary_indicator_var + ) + <= 1.0, ) innerd2c = bigm.get_transformed_constraints( m.disjunct_block.disj2.disjunction_disjuncts[1].constraint[1] ) - self.check_second_disjunct_constraint( - innerd2c, - m.x, - m.disjunct_block.disj2.disjunction_disjuncts[1].binary_indicator_var, + self.assertEqual(len(innerd2c), 1) + cons = innerd2c[0] + assertExpressionsEqual( + self, + cons.expr, + (3 - m.x[1]) ** 2 + + (3 - m.x[2]) ** 2 + + (3 - m.x[3]) ** 2 + + (3 - m.x[4]) ** 2 + - 99.0 + * ( + 1 + - m.disjunct_block.disj2.disjunction_disjuncts[1].binary_indicator_var + + 1 + - m.disjunct_block.disj2.binary_indicator_var + ) + <= 1.0, ) def test_hierarchical_badly_ordered_targets(self): @@ -2193,10 +2248,50 @@ def test_decl_order_opposite_instantiation_order(self): def test_do_not_assume_nested_indicators_local(self): ct.check_do_not_assume_nested_indicators_local(self, 'gdp.bigm') + @unittest.skipUnless(gurobi_available, "Gurobi is not available") + def test_constraints_not_enforced_when_an_ancestor_indicator_is_False(self): + m = ConcreteModel() + m.x = Var(bounds=(0, 30)) + + m.left = Disjunct() + m.left.left = Disjunct() + m.left.left.c = Constraint(expr=m.x >= 10) + m.left.right = Disjunct() + m.left.right.c = Constraint(expr=m.x >= 9) + m.left.disjunction = Disjunction(expr=[m.left.left, m.left.right]) + m.right = Disjunct() + m.right.left = Disjunct() + m.right.left.c = Constraint(expr=m.x >= 11) + m.right.right = Disjunct() + m.right.right.c = Constraint(expr=m.x >= 8) + m.right.disjunction = Disjunction(expr=[m.right.left, m.right.right]) + m.disjunction = Disjunction(expr=[m.left, m.right]) + + m.equiv_left = LogicalConstraint( + expr=m.left.left.indicator_var.equivalent_to(m.right.left.indicator_var) + ) + m.equiv_right = LogicalConstraint( + expr=m.left.right.indicator_var.equivalent_to(m.right.right.indicator_var) + ) + + m.obj = Objective(expr=m.x) + + TransformationFactory('gdp.bigm').apply_to(m) + results = SolverFactory('gurobi').solve(m) + self.assertEqual( + results.solver.termination_condition, TerminationCondition.optimal + ) + self.assertTrue(value(m.right.indicator_var)) + self.assertFalse(value(m.left.indicator_var)) + self.assertTrue(value(m.right.right.indicator_var)) + self.assertFalse(value(m.right.left.indicator_var)) + self.assertTrue(value(m.left.right.indicator_var)) + self.assertAlmostEqual(value(m.x), 8) + class IndexedDisjunction(unittest.TestCase): # this tests that if the targets are a subset of the - # _DisjunctDatas in an IndexedDisjunction that the xor constraint + # DisjunctDatas in an IndexedDisjunction that the xor constraint # created on the parent block will still be indexed as expected. def test_xor_constraint(self): ct.check_indexed_xor_constraints_with_targets(self, 'bigm') diff --git a/pyomo/gdp/tests/test_hull.py b/pyomo/gdp/tests/test_hull.py index 55edf244731..07876a9d213 100644 --- a/pyomo/gdp/tests/test_hull.py +++ b/pyomo/gdp/tests/test_hull.py @@ -1252,12 +1252,10 @@ def check_second_iteration(self, model): orig = model.component("_pyomo_gdp_hull_reformulation") self.assertIsInstance( - model.disjunctionList[1].algebraic_constraint, - constraint._GeneralConstraintData, + model.disjunctionList[1].algebraic_constraint, constraint.ConstraintData ) self.assertIsInstance( - model.disjunctionList[0].algebraic_constraint, - constraint._GeneralConstraintData, + model.disjunctionList[0].algebraic_constraint, constraint.ConstraintData ) self.assertFalse(model.disjunctionList[1].active) self.assertFalse(model.disjunctionList[0].active) diff --git a/pyomo/gdp/tests/test_util.py b/pyomo/gdp/tests/test_util.py index fd555fc2f59..fa8e953f9f7 100644 --- a/pyomo/gdp/tests/test_util.py +++ b/pyomo/gdp/tests/test_util.py @@ -13,7 +13,7 @@ from pyomo.core import ConcreteModel, Var, Expression, Block, RangeSet, Any import pyomo.core.expr as EXPR -from pyomo.core.base.expression import _ExpressionData +from pyomo.core.base.expression import NamedExpressionData from pyomo.gdp.util import ( clone_without_expression_components, is_child_of, @@ -40,7 +40,7 @@ def test_clone_without_expression_components(self): test = clone_without_expression_components(base, {}) self.assertIsNot(base, test) self.assertEqual(base(), test()) - self.assertIsInstance(base, _ExpressionData) + self.assertIsInstance(base, NamedExpressionData) self.assertIsInstance(test, EXPR.SumExpression) test = clone_without_expression_components(base, {id(m.x): m.y}) self.assertEqual(3**2 + 3 - 1, test()) @@ -51,7 +51,7 @@ def test_clone_without_expression_components(self): self.assertEqual(base(), test()) self.assertIsInstance(base, EXPR.SumExpression) self.assertIsInstance(test, EXPR.SumExpression) - self.assertIsInstance(base.arg(0), _ExpressionData) + self.assertIsInstance(base.arg(0), NamedExpressionData) self.assertIsInstance(test.arg(0), EXPR.SumExpression) test = clone_without_expression_components(base, {id(m.x): m.y}) self.assertEqual(3**2 + 3 - 1 + 3, test()) diff --git a/pyomo/gdp/transformed_disjunct.py b/pyomo/gdp/transformed_disjunct.py index 6cf60abf414..287d5ed1652 100644 --- a/pyomo/gdp/transformed_disjunct.py +++ b/pyomo/gdp/transformed_disjunct.py @@ -10,11 +10,11 @@ # ___________________________________________________________________________ from pyomo.common.autoslots import AutoSlots -from pyomo.core.base.block import _BlockData, IndexedBlock +from pyomo.core.base.block import BlockData, IndexedBlock from pyomo.core.base.global_set import UnindexedComponent_index, UnindexedComponent_set -class _TransformedDisjunctData(_BlockData): +class _TransformedDisjunctData(BlockData): __slots__ = ('_src_disjunct',) __autoslot_mappers__ = {'_src_disjunct': AutoSlots.weakref_mapper} @@ -23,7 +23,7 @@ def src_disjunct(self): return None if self._src_disjunct is None else self._src_disjunct() def __init__(self, component): - _BlockData.__init__(self, component) + BlockData.__init__(self, component) # pointer to the Disjunct whose transformation block this is. self._src_disjunct = None diff --git a/pyomo/gdp/util.py b/pyomo/gdp/util.py index fe11975954d..2fe8e9e1dee 100644 --- a/pyomo/gdp/util.py +++ b/pyomo/gdp/util.py @@ -10,10 +10,9 @@ # ___________________________________________________________________________ from pyomo.gdp import GDP_Error, Disjunction -from pyomo.gdp.disjunct import _DisjunctData, Disjunct +from pyomo.gdp.disjunct import DisjunctData, Disjunct import pyomo.core.expr as EXPR -from pyomo.core.base.component import _ComponentBase from pyomo.core import ( Block, Suffix, @@ -22,7 +21,7 @@ LogicalConstraint, value, ) -from pyomo.core.base.block import _BlockData +from pyomo.core.base.block import BlockData from pyomo.common.collections import ComponentMap, ComponentSet, OrderedSet from pyomo.opt import TerminationCondition, SolverStatus @@ -144,13 +143,13 @@ def parent(self, u): Arg: u : A node in the tree """ + if u in self._parent: + return self._parent[u] if u not in self._vertices: raise ValueError( "'%s' is not a vertex in the GDP tree. Cannot " "retrieve its parent." % u ) - if u in self._parent: - return self._parent[u] else: return None @@ -330,7 +329,7 @@ def get_gdp_tree(targets, instance, knownBlocks=None): "Target '%s' is not a component on instance " "'%s'!" % (t.name, instance.name) ) - if t.ctype is Block or isinstance(t, _BlockData): + if t.ctype is Block or isinstance(t, BlockData): _blocks = t.values() if t.is_indexed() else (t,) for block in _blocks: if not block.active: @@ -387,7 +386,7 @@ def is_child_of(parent, child, knownBlocks=None): if knownBlocks is None: knownBlocks = {} tmp = set() - node = child if isinstance(child, (Block, _BlockData)) else child.parent_block() + node = child if isinstance(child, (Block, BlockData)) else child.parent_block() while True: known = knownBlocks.get(node) if known: @@ -452,7 +451,7 @@ def get_src_disjunct(transBlock): Parameters ---------- - transBlock: _BlockData which is in the relaxedDisjuncts IndexedBlock + transBlock: BlockData which is in the relaxedDisjuncts IndexedBlock on a transformation block. """ if ( @@ -493,7 +492,7 @@ def get_src_constraint(transformedConstraint): def _find_parent_disjunct(constraint): # traverse up until we find the disjunct this constraint lives on parent_disjunct = constraint.parent_block() - while not isinstance(parent_disjunct, _DisjunctData): + while not isinstance(parent_disjunct, DisjunctData): if parent_disjunct is None: raise GDP_Error( "Constraint '%s' is not on a disjunct and so was not " @@ -525,17 +524,17 @@ def get_transformed_constraints(srcConstraint): Parameters ---------- - srcConstraint: ScalarConstraint or _ConstraintData, which must be in + srcConstraint: ScalarConstraint or ConstraintData, which must be in the subtree of a transformed Disjunct """ if srcConstraint.is_indexed(): raise GDP_Error( "Argument to get_transformed_constraint should be " - "a ScalarConstraint or _ConstraintData. (If you " + "a ScalarConstraint or ConstraintData. (If you " "want the container for all transformed constraints " "from an IndexedDisjunction, this is the parent " "component of a transformed constraint originating " - "from any of its _ComponentDatas.)" + "from any of its ComponentDatas.)" ) transBlock = _get_constraint_transBlock(srcConstraint) transformed_constraints = transBlock.private_data( diff --git a/pyomo/mpec/complementarity.py b/pyomo/mpec/complementarity.py index 79f76a9fc34..aa8db922145 100644 --- a/pyomo/mpec/complementarity.py +++ b/pyomo/mpec/complementarity.py @@ -19,7 +19,7 @@ from pyomo.core import Constraint, Var, Block, Set from pyomo.core.base.component import ModelComponentFactory from pyomo.core.base.global_set import UnindexedComponent_index -from pyomo.core.base.block import _BlockData +from pyomo.core.base.block import BlockData from pyomo.core.base.disable_methods import disable_methods from pyomo.core.base.initializer import ( Initializer, @@ -43,7 +43,7 @@ def complements(a, b): return ComplementarityTuple(a, b) -class _ComplementarityData(_BlockData): +class ComplementarityData(BlockData): def _canonical_expression(self, e): # Note: as the complimentarity component maintains references to # the original expression (e), it is NOT safe or valid to bypass @@ -179,9 +179,14 @@ def set_value(self, cc): ) +class _ComplementarityData(metaclass=RenamedClass): + __renamed__new_class__ = ComplementarityData + __renamed__version__ = '6.7.2.dev0' + + @ModelComponentFactory.register("Complementarity conditions.") class Complementarity(Block): - _ComponentDataClass = _ComplementarityData + _ComponentDataClass = ComplementarityData def __new__(cls, *args, **kwds): if cls != Complementarity: @@ -298,9 +303,9 @@ def _conditional_block_printer(ostream, idx, data): ) -class ScalarComplementarity(_ComplementarityData, Complementarity): +class ScalarComplementarity(ComplementarityData, Complementarity): def __init__(self, *args, **kwds): - _ComplementarityData.__init__(self, self) + ComplementarityData.__init__(self, self) Complementarity.__init__(self, *args, **kwds) self._data[None] = self self._index = UnindexedComponent_index diff --git a/pyomo/network/arc.py b/pyomo/network/arc.py index 42b7c6ea075..5e68f181a38 100644 --- a/pyomo/network/arc.py +++ b/pyomo/network/arc.py @@ -52,7 +52,7 @@ def _iterable_to_dict(vals, directed, name): return vals -class _ArcData(ActiveComponentData): +class ArcData(ActiveComponentData): """ This class defines the data for a single Arc @@ -246,6 +246,11 @@ def _validate_ports(self, source, destination, ports): ) +class _ArcData(metaclass=RenamedClass): + __renamed__new_class__ = ArcData + __renamed__version__ = '6.7.2.dev0' + + @ModelComponentFactory.register("Component used for connecting two Ports.") class Arc(ActiveIndexedComponent): """ @@ -267,7 +272,7 @@ class Arc(ActiveIndexedComponent): or a two-member iterable of ports """ - _ComponentDataClass = _ArcData + _ComponentDataClass = ArcData def __new__(cls, *args, **kwds): if cls != Arc: @@ -373,9 +378,9 @@ def _pprint(self): ) -class ScalarArc(_ArcData, Arc): +class ScalarArc(ArcData, Arc): def __init__(self, *args, **kwds): - _ArcData.__init__(self, self) + ArcData.__init__(self, self) Arc.__init__(self, *args, **kwds) self.index = UnindexedComponent_index diff --git a/pyomo/network/foqus_graph.py b/pyomo/network/foqus_graph.py index e4cf3b92014..7c6c05256d9 100644 --- a/pyomo/network/foqus_graph.py +++ b/pyomo/network/foqus_graph.py @@ -358,9 +358,9 @@ def scc_calculation_order(self, sccNodes, ie, oe): done = False for i in range(len(sccNodes)): for j in range(len(sccNodes)): - for ine in ie[i]: - for oute in oe[j]: - if ine == oute: + for in_e in ie[i]: + for out_e in oe[j]: + if in_e == out_e: adj[j].append(i) adjR[i].append(j) done = True diff --git a/pyomo/network/port.py b/pyomo/network/port.py index 26822d4fee9..ee5c915d8db 100644 --- a/pyomo/network/port.py +++ b/pyomo/network/port.py @@ -36,7 +36,7 @@ logger = logging.getLogger('pyomo.network') -class _PortData(ComponentData): +class PortData(ComponentData): """ This class defines the data for a single Port @@ -285,6 +285,11 @@ def get_split_fraction(self, arc): return res +class _PortData(metaclass=RenamedClass): + __renamed__new_class__ = PortData + __renamed__version__ = '6.7.2.dev0' + + @ModelComponentFactory.register( "A bundle of variables that can be connected to other ports." ) @@ -339,7 +344,7 @@ def __init__(self, *args, **kwd): # IndexedComponent that support implicit definition def _getitem_when_not_present(self, idx): """Returns the default component data value.""" - tmp = self._data[idx] = _PortData(component=self) + tmp = self._data[idx] = PortData(component=self) tmp._index = idx return tmp @@ -357,7 +362,7 @@ def construct(self, data=None): for _set in self._anonymous_sets: _set.construct() - # Construct _PortData objects for all index values + # Construct PortData objects for all index values if self.is_indexed(): self._initialize_members(self._index_set) else: @@ -763,9 +768,9 @@ def _create_evar(member, name, eblock, index_set): return evar -class ScalarPort(Port, _PortData): +class ScalarPort(Port, PortData): def __init__(self, *args, **kwd): - _PortData.__init__(self, component=self) + PortData.__init__(self, component=self) Port.__init__(self, *args, **kwd) self._index = UnindexedComponent_index diff --git a/pyomo/opt/base/solvers.py b/pyomo/opt/base/solvers.py index f1f9d653a8a..c0698165603 100644 --- a/pyomo/opt/base/solvers.py +++ b/pyomo/opt/base/solvers.py @@ -536,15 +536,15 @@ def solve(self, *args, **kwds): # If the inputs are models, then validate that they have been # constructed! Collect suffix names to try and import from solution. # - from pyomo.core.base.block import _BlockData + from pyomo.core.base.block import BlockData import pyomo.core.base.suffix from pyomo.core.kernel.block import IBlock import pyomo.core.kernel.suffix _model = None for arg in args: - if isinstance(arg, (_BlockData, IBlock)): - if isinstance(arg, _BlockData): + if isinstance(arg, (BlockData, IBlock)): + if isinstance(arg, BlockData): if not arg.is_constructed(): raise RuntimeError( "Attempting to solve model=%s with unconstructed " @@ -553,7 +553,7 @@ def solve(self, *args, **kwds): _model = arg # import suffixes must be on the top-level model - if isinstance(arg, _BlockData): + if isinstance(arg, BlockData): model_suffixes = list( name for ( diff --git a/pyomo/opt/results/problem.py b/pyomo/opt/results/problem.py index 98f749f3aeb..a8eca1e3b41 100644 --- a/pyomo/opt/results/problem.py +++ b/pyomo/opt/results/problem.py @@ -12,19 +12,16 @@ import enum from pyomo.opt.results.container import MapContainer +from pyomo.common.enums import ExtendedEnumType, ObjectiveSense -class ProblemSense(str, enum.Enum): - unknown = 'unknown' - minimize = 'minimize' - maximize = 'maximize' - # Overloading __str__ is needed to match the behavior of the old - # pyutilib.enum class (removed June 2020). There are spots in the - # code base that expect the string representation for items in the - # enum to not include the class name. New uses of enum shouldn't - # need to do this. +class ProblemSense(enum.IntEnum, metaclass=ExtendedEnumType): + __base_enum__ = ObjectiveSense + + unknown = 0 + def __str__(self): - return self.value + return self.name class ProblemInformation(MapContainer): diff --git a/pyomo/repn/beta/matrix.py b/pyomo/repn/beta/matrix.py index 916b0daf755..0201c46eb18 100644 --- a/pyomo/repn/beta/matrix.py +++ b/pyomo/repn/beta/matrix.py @@ -24,7 +24,7 @@ Constraint, IndexedConstraint, ScalarConstraint, - _ConstraintData, + ConstraintData, ) from pyomo.core.expr.numvalue import native_numeric_types from pyomo.repn import generate_standard_repn @@ -247,7 +247,7 @@ def _get_bound(exp): constraint_containers_removed += 1 for constraint, index in constraint_data_to_remove: # Note that this del is not needed: assigning Constraint.Skip - # above removes the _ConstraintData from the _data dict. + # above removes the ConstraintData from the _data dict. # del constraint[index] constraints_removed += 1 for block, constraint in constraint_containers_to_remove: @@ -348,12 +348,12 @@ def _get_bound(exp): ) -# class _LinearConstraintData(_ConstraintData,LinearCanonicalRepn): +# class _LinearConstraintData(ConstraintData,LinearCanonicalRepn): # # This change breaks this class, but it's unclear whether this # is being used... # -class _LinearConstraintData(_ConstraintData): +class _LinearConstraintData(ConstraintData): """ This class defines the data for a single linear constraint in canonical form. @@ -393,7 +393,7 @@ def __init__(self, index, component=None): # # These lines represent in-lining of the # following constructors: - # - _ConstraintData, + # - ConstraintData, # - ActiveComponentData # - ComponentData self._component = weakref_ref(component) if (component is not None) else None @@ -442,7 +442,7 @@ def __init__(self, index, component=None): # These lines represent in-lining of the # following constructors: # - _LinearConstraintData - # - _ConstraintData, + # - ConstraintData, # - ActiveComponentData # - ComponentData self._component = weakref_ref(component) if (component is not None) else None @@ -584,7 +584,7 @@ def constant(self): return sum(terms) # - # Abstract Interface (_ConstraintData) + # Abstract Interface (ConstraintData) # @property diff --git a/pyomo/repn/linear.py b/pyomo/repn/linear.py index d601ccbcd7c..6d084067511 100644 --- a/pyomo/repn/linear.py +++ b/pyomo/repn/linear.py @@ -200,9 +200,8 @@ def _handle_negation_ANY(visitor, node, arg): _exit_node_handlers[NegationExpression] = { + None: _handle_negation_ANY, (_CONSTANT,): _handle_negation_constant, - (_LINEAR,): _handle_negation_ANY, - (_GENERAL,): _handle_negation_ANY, } # @@ -211,20 +210,18 @@ def _handle_negation_ANY(visitor, node, arg): def _handle_product_constant_constant(visitor, node, arg1, arg2): - _, arg1 = arg1 - _, arg2 = arg2 - ans = arg1 * arg2 + ans = arg1[1] * arg2[1] if ans != ans: - if not arg1 or not arg2: + if not arg1[1] or not arg2[1]: deprecation_warning( - f"Encountered {str(arg1)}*{str(arg2)} in expression tree. " + f"Encountered {str(arg1[1])}*{str(arg2[1])} in expression tree. " "Mapping the NaN result to 0 for compatibility " "with the lp_v1 writer. In the future, this NaN " "will be preserved/emitted to comply with IEEE-754.", version='6.6.0', ) - return _, 0 - return _, arg1 * arg2 + return _CONSTANT, 0 + return _CONSTANT, ans def _handle_product_constant_ANY(visitor, node, arg1, arg2): @@ -276,15 +273,12 @@ def _handle_product_nonlinear(visitor, node, arg1, arg2): _exit_node_handlers[ProductExpression] = { + None: _handle_product_nonlinear, (_CONSTANT, _CONSTANT): _handle_product_constant_constant, (_CONSTANT, _LINEAR): _handle_product_constant_ANY, (_CONSTANT, _GENERAL): _handle_product_constant_ANY, (_LINEAR, _CONSTANT): _handle_product_ANY_constant, - (_LINEAR, _LINEAR): _handle_product_nonlinear, - (_LINEAR, _GENERAL): _handle_product_nonlinear, (_GENERAL, _CONSTANT): _handle_product_ANY_constant, - (_GENERAL, _LINEAR): _handle_product_nonlinear, - (_GENERAL, _GENERAL): _handle_product_nonlinear, } _exit_node_handlers[MonomialTermExpression] = _exit_node_handlers[ProductExpression] @@ -298,7 +292,7 @@ def _handle_division_constant_constant(visitor, node, arg1, arg2): def _handle_division_ANY_constant(visitor, node, arg1, arg2): - arg1[1].multiplier /= arg2[1] + arg1[1].multiplier = apply_node_operation(node, (arg1[1].multiplier, arg2[1])) return arg1 @@ -309,15 +303,10 @@ def _handle_division_nonlinear(visitor, node, arg1, arg2): _exit_node_handlers[DivisionExpression] = { + None: _handle_division_nonlinear, (_CONSTANT, _CONSTANT): _handle_division_constant_constant, - (_CONSTANT, _LINEAR): _handle_division_nonlinear, - (_CONSTANT, _GENERAL): _handle_division_nonlinear, (_LINEAR, _CONSTANT): _handle_division_ANY_constant, - (_LINEAR, _LINEAR): _handle_division_nonlinear, - (_LINEAR, _GENERAL): _handle_division_nonlinear, (_GENERAL, _CONSTANT): _handle_division_ANY_constant, - (_GENERAL, _LINEAR): _handle_division_nonlinear, - (_GENERAL, _GENERAL): _handle_division_nonlinear, } # @@ -325,8 +314,7 @@ def _handle_division_nonlinear(visitor, node, arg1, arg2): # -def _handle_pow_constant_constant(visitor, node, *args): - arg1, arg2 = args +def _handle_pow_constant_constant(visitor, node, arg1, arg2): ans = apply_node_operation(node, (arg1[1], arg2[1])) if ans.__class__ in native_complex_types: ans = complex_number_error(ans, visitor, node) @@ -358,15 +346,10 @@ def _handle_pow_nonlinear(visitor, node, arg1, arg2): _exit_node_handlers[PowExpression] = { + None: _handle_pow_nonlinear, (_CONSTANT, _CONSTANT): _handle_pow_constant_constant, - (_CONSTANT, _LINEAR): _handle_pow_nonlinear, - (_CONSTANT, _GENERAL): _handle_pow_nonlinear, (_LINEAR, _CONSTANT): _handle_pow_ANY_constant, - (_LINEAR, _LINEAR): _handle_pow_nonlinear, - (_LINEAR, _GENERAL): _handle_pow_nonlinear, (_GENERAL, _CONSTANT): _handle_pow_ANY_constant, - (_GENERAL, _LINEAR): _handle_pow_nonlinear, - (_GENERAL, _GENERAL): _handle_pow_nonlinear, } # @@ -389,9 +372,8 @@ def _handle_unary_nonlinear(visitor, node, arg): _exit_node_handlers[UnaryFunctionExpression] = { + None: _handle_unary_nonlinear, (_CONSTANT,): _handle_unary_constant, - (_LINEAR,): _handle_unary_nonlinear, - (_GENERAL,): _handle_unary_nonlinear, } _exit_node_handlers[AbsExpression] = _exit_node_handlers[UnaryFunctionExpression] @@ -414,9 +396,8 @@ def _handle_named_ANY(visitor, node, arg1): _exit_node_handlers[Expression] = { + None: _handle_named_ANY, (_CONSTANT,): _handle_named_constant, - (_LINEAR,): _handle_named_ANY, - (_GENERAL,): _handle_named_ANY, } # @@ -449,12 +430,7 @@ def _handle_expr_if_nonlinear(visitor, node, arg1, arg2, arg3): return _GENERAL, ans -_exit_node_handlers[Expr_ifExpression] = { - (i, j, k): _handle_expr_if_nonlinear - for i in (_LINEAR, _GENERAL) - for j in (_CONSTANT, _LINEAR, _GENERAL) - for k in (_CONSTANT, _LINEAR, _GENERAL) -} +_exit_node_handlers[Expr_ifExpression] = {None: _handle_expr_if_nonlinear} for j in (_CONSTANT, _LINEAR, _GENERAL): for k in (_CONSTANT, _LINEAR, _GENERAL): _exit_node_handlers[Expr_ifExpression][_CONSTANT, j, k] = _handle_expr_if_const @@ -487,11 +463,9 @@ def _handle_equality_general(visitor, node, arg1, arg2): _exit_node_handlers[EqualityExpression] = { - (i, j): _handle_equality_general - for i in (_CONSTANT, _LINEAR, _GENERAL) - for j in (_CONSTANT, _LINEAR, _GENERAL) + None: _handle_equality_general, + (_CONSTANT, _CONSTANT): _handle_equality_const, } -_exit_node_handlers[EqualityExpression][_CONSTANT, _CONSTANT] = _handle_equality_const def _handle_inequality_const(visitor, node, arg1, arg2): @@ -517,13 +491,9 @@ def _handle_inequality_general(visitor, node, arg1, arg2): _exit_node_handlers[InequalityExpression] = { - (i, j): _handle_inequality_general - for i in (_CONSTANT, _LINEAR, _GENERAL) - for j in (_CONSTANT, _LINEAR, _GENERAL) + None: _handle_inequality_general, + (_CONSTANT, _CONSTANT): _handle_inequality_const, } -_exit_node_handlers[InequalityExpression][ - _CONSTANT, _CONSTANT -] = _handle_inequality_const def _handle_ranged_const(visitor, node, arg1, arg2, arg3): @@ -554,14 +524,9 @@ def _handle_ranged_general(visitor, node, arg1, arg2, arg3): _exit_node_handlers[RangedExpression] = { - (i, j, k): _handle_ranged_general - for i in (_CONSTANT, _LINEAR, _GENERAL) - for j in (_CONSTANT, _LINEAR, _GENERAL) - for k in (_CONSTANT, _LINEAR, _GENERAL) + None: _handle_ranged_general, + (_CONSTANT, _CONSTANT, _CONSTANT): _handle_ranged_const, } -_exit_node_handlers[RangedExpression][ - _CONSTANT, _CONSTANT, _CONSTANT -] = _handle_ranged_const class LinearBeforeChildDispatcher(BeforeChildDispatcher): @@ -754,7 +719,10 @@ def _initialize_exit_node_dispatcher(exit_handlers): exit_dispatcher = {} for cls, handlers in exit_handlers.items(): for args, fcn in handlers.items(): - exit_dispatcher[(cls, *args)] = fcn + if args is None: + exit_dispatcher[cls] = fcn + else: + exit_dispatcher[(cls, *args)] = fcn return exit_dispatcher diff --git a/pyomo/repn/plugins/ampl/ampl_.py b/pyomo/repn/plugins/ampl/ampl_.py index f422a085a3c..cc99e9cfdae 100644 --- a/pyomo/repn/plugins/ampl/ampl_.py +++ b/pyomo/repn/plugins/ampl/ampl_.py @@ -33,7 +33,7 @@ from pyomo.core.base import ( SymbolMap, NameLabeler, - _ExpressionData, + NamedExpressionData, SortComponents, var, param, @@ -168,11 +168,11 @@ def _build_op_template(): _op_template[EXPR.EqualityExpression] = "o24{C}\n" _op_comment[EXPR.EqualityExpression] = "\t#eq" - _op_template[var._VarData] = "v%d{C}\n" - _op_comment[var._VarData] = "\t#%s" + _op_template[var.VarData] = "v%d{C}\n" + _op_comment[var.VarData] = "\t#%s" - _op_template[param._ParamData] = "n%r{C}\n" - _op_comment[param._ParamData] = "" + _op_template[param.ParamData] = "n%r{C}\n" + _op_comment[param.ParamData] = "" _op_template[NumericConstant] = "n%r{C}\n" _op_comment[NumericConstant] = "" @@ -724,7 +724,7 @@ def _print_nonlinear_terms_NL(self, exp): self._print_nonlinear_terms_NL(exp.arg(0)) self._print_nonlinear_terms_NL(exp.arg(1)) - elif isinstance(exp, (_ExpressionData, IIdentityExpression)): + elif isinstance(exp, (NamedExpressionData, IIdentityExpression)): self._print_nonlinear_terms_NL(exp.expr) else: @@ -733,24 +733,24 @@ def _print_nonlinear_terms_NL(self, exp): % (exp_type) ) - elif isinstance(exp, (var._VarData, IVariable)) and (not exp.is_fixed()): + elif isinstance(exp, (var.VarData, IVariable)) and (not exp.is_fixed()): # (self._output_fixed_variable_bounds or if not self._symbolic_solver_labels: OUTPUT.write( - self._op_string[var._VarData] + self._op_string[var.VarData] % (self.ampl_var_id[self._varID_map[id(exp)]]) ) else: OUTPUT.write( - self._op_string[var._VarData] + self._op_string[var.VarData] % ( self.ampl_var_id[self._varID_map[id(exp)]], self._name_labeler(exp), ) ) - elif isinstance(exp, param._ParamData): - OUTPUT.write(self._op_string[param._ParamData] % (value(exp))) + elif isinstance(exp, param.ParamData): + OUTPUT.write(self._op_string[param.ParamData] % (value(exp))) elif isinstance(exp, NumericConstant) or exp.is_fixed(): OUTPUT.write(self._op_string[NumericConstant] % (value(exp))) diff --git a/pyomo/repn/plugins/cpxlp.py b/pyomo/repn/plugins/cpxlp.py index 46e6b6d5265..45f4279f8fe 100644 --- a/pyomo/repn/plugins/cpxlp.py +++ b/pyomo/repn/plugins/cpxlp.py @@ -60,7 +60,7 @@ def __init__(self): # The LP writer tracks which variables are # referenced in constraints, so that a user does not end up with a # zillion "unreferenced variables" warning messages. - # This dictionary maps id(_VarData) -> _VarData. + # This dictionary maps id(VarData) -> VarData. self._referenced_variable_ids = {} # Per ticket #4319, we are using %.17g, which mocks the @@ -374,7 +374,7 @@ def _print_expr_canonical( def printSOS(self, symbol_map, labeler, variable_symbol_map, soscondata, output): """ - Prints the SOS constraint associated with the _SOSConstraintData object + Prints the SOS constraint associated with the SOSConstraintData object """ sos_template_string = self.sos_template_string diff --git a/pyomo/repn/plugins/mps.py b/pyomo/repn/plugins/mps.py index ba26783eea1..e1a0d2187fc 100644 --- a/pyomo/repn/plugins/mps.py +++ b/pyomo/repn/plugins/mps.py @@ -62,7 +62,7 @@ def __init__(self, int_marker=False): # referenced in constraints, so that one doesn't end up with a # zillion "unreferenced variables" warning messages. stored at # the object level to avoid additional method arguments. - # dictionary of id(_VarData)->_VarData. + # dictionary of id(VarData)->VarData. self._referenced_variable_ids = {} # Keven Hunter made a nice point about using %.16g in his attachment diff --git a/pyomo/repn/plugins/nl_writer.py b/pyomo/repn/plugins/nl_writer.py index ee5b65149ae..644fd26987b 100644 --- a/pyomo/repn/plugins/nl_writer.py +++ b/pyomo/repn/plugins/nl_writer.py @@ -69,15 +69,11 @@ minimize, ) from pyomo.core.base.component import ActiveComponent -from pyomo.core.base.constraint import _ConstraintData -from pyomo.core.base.expression import ScalarExpression, _GeneralExpressionData -from pyomo.core.base.objective import ( - ScalarObjective, - _GeneralObjectiveData, - _ObjectiveData, -) +from pyomo.core.base.constraint import ConstraintData +from pyomo.core.base.expression import ScalarExpression, ExpressionData +from pyomo.core.base.objective import ScalarObjective, ObjectiveData from pyomo.core.base.suffix import SuffixFinder -from pyomo.core.base.var import _VarData +from pyomo.core.base.var import VarData import pyomo.core.kernel as kernel from pyomo.core.pyomoobject import PyomoObject from pyomo.opt import WriterFactory @@ -113,6 +109,7 @@ TOL = 1e-8 inf = float('inf') minus_inf = -inf +allowable_binary_var_bounds = {(0, 0), (0, 1), (1, 1)} _CONSTANT = ExprType.CONSTANT _MONOMIAL = ExprType.MONOMIAL @@ -129,17 +126,17 @@ class NLWriterInfo(object): Attributes ---------- - variables: List[_VarData] + variables: List[VarData] The list of (unfixed) Pyomo model variables in the order written to the NL file - constraints: List[_ConstraintData] + constraints: List[ConstraintData] The list of (active) Pyomo model constraints in the order written to the NL file - objectives: List[_ObjectiveData] + objectives: List[ObjectiveData] The list of (active) Pyomo model objectives in the order written to the NL file @@ -162,10 +159,10 @@ class NLWriterInfo(object): file in the same order as the :py:attr:`variables` and generated .col file. - eliminated_vars: List[Tuple[_VarData, NumericExpression]] + eliminated_vars: List[Tuple[VarData, NumericExpression]] The list of variables in the model that were eliminated by the - presolve. Each entry is a 2-tuple of (:py:class:`_VarData`, + presolve. Each entry is a 2-tuple of (:py:class:`VarData`, :py:class`NumericExpression`|`float`). The list is in the necessary order for correct evaluation (i.e., all variables appearing in the expression must either have been sent to the @@ -441,6 +438,7 @@ def store(self, obj, val): self.values[obj] = val def compile(self, column_order, row_order, obj_order, model_id): + var_con_obj = {Var, Constraint, Objective} missing_component_data = ComponentSet() unknown_data = ComponentSet() queue = [self.values.items()] @@ -466,18 +464,20 @@ def compile(self, column_order, row_order, obj_order, model_id): self.obj[obj_order[_id]] = val elif _id == model_id: self.prob[0] = val - elif isinstance(obj, (_VarData, _ConstraintData, _ObjectiveData)): - missing_component_data.add(obj) - elif isinstance(obj, (Var, Constraint, Objective)): - # Expand this indexed component to store the - # individual ComponentDatas, but ONLY if the - # component data is not in the original dictionary - # of values that we extracted from the Suffixes - queue.append( - product( - filterfalse(self.values.__contains__, obj.values()), (val,) + elif getattr(obj, 'ctype', None) in var_con_obj: + if obj.is_indexed(): + # Expand this indexed component to store the + # individual ComponentDatas, but ONLY if the + # component data is not in the original dictionary + # of values that we extracted from the Suffixes + queue.append( + product( + filterfalse(self.values.__contains__, obj.values()), + (val,), + ) ) - ) + else: + missing_component_data.add(obj) else: unknown_data.add(obj) if missing_component_data: @@ -882,7 +882,12 @@ def write(self, model): elif v.is_binary(): binary_vars.add(_id) elif v.is_integer(): - integer_vars.add(_id) + # Note: integer variables whose bounds are in {0, 1} + # should be classified as binary + if var_bounds[_id] in allowable_binary_var_bounds: + binary_vars.add(_id) + else: + integer_vars.add(_id) else: raise ValueError( f"Variable '{v.name}' has a domain that is not Real, " @@ -1277,8 +1282,8 @@ def write(self, model): len(linear_binary_vars), len(linear_integer_vars), len(both_vars_nonlinear.intersection(discrete_vars)), - len(con_vars_nonlinear.intersection(discrete_vars)), - len(obj_vars_nonlinear.intersection(discrete_vars)), + len(con_only_nonlinear_vars.intersection(discrete_vars)), + len(obj_only_nonlinear_vars.intersection(discrete_vars)), ) ) # diff --git a/pyomo/repn/plugins/standard_form.py b/pyomo/repn/plugins/standard_form.py index 239cd845930..315b2160d37 100644 --- a/pyomo/repn/plugins/standard_form.py +++ b/pyomo/repn/plugins/standard_form.py @@ -76,25 +76,25 @@ class LinearStandardFormInfo(object): The constraint right-hand sides. - rows : List[Tuple[_ConstraintData, int]] + rows : List[Tuple[ConstraintData, int]] The list of Pyomo constraint objects corresponding to the rows in `A`. Each element in the list is a 2-tuple of - (_ConstraintData, row_multiplier). The `row_multiplier` will be + (ConstraintData, row_multiplier). The `row_multiplier` will be +/- 1 indicating if the row was multiplied by -1 (corresponding to a constraint lower bound) or +1 (upper bound). - columns : List[_VarData] + columns : List[VarData] The list of Pyomo variable objects corresponding to columns in the `A` and `c` matrices. - eliminated_vars: List[Tuple[_VarData, NumericExpression]] + eliminated_vars: List[Tuple[VarData, NumericExpression]] The list of variables from the original model that do not appear in the standard form (usually because they were replaced by nonnegative variables). Each entry is a 2-tuple of - (:py:class:`_VarData`, :py:class`NumericExpression`|`float`). + (:py:class:`VarData`, :py:class`NumericExpression`|`float`). The list is in the necessary order for correct evaluation (i.e., all variables appearing in the expression must either have appeared in the standard form, or appear *earlier* in this list. @@ -139,6 +139,15 @@ class LinearStandardFormCompiler(object): description='Add slack variables and return `min cTx s.t. Ax == b`', ), ) + CONFIG.declare( + 'mixed_form', + ConfigValue( + default=False, + domain=bool, + description='Return A in mixed form (the comparison operator is a ' + 'mix of <=, ==, and >=)', + ), + ) CONFIG.declare( 'show_section_timing', ConfigValue( @@ -332,6 +341,9 @@ def write(self, model): # Tabulate constraints # slack_form = self.config.slack_form + mixed_form = self.config.mixed_form + if slack_form and mixed_form: + raise ValueError("cannot specify both slack_form and mixed_form") rows = [] rhs = [] con_data = [] @@ -372,7 +384,30 @@ def write(self, model): f"model contains a trivially infeasible constraint, '{con.name}'" ) - if slack_form: + if mixed_form: + N = len(repn.linear) + _data = np.fromiter(repn.linear.values(), float, N) + _index = np.fromiter(map(var_order.__getitem__, repn.linear), float, N) + if ub == lb: + rows.append(RowEntry(con, 0)) + rhs.append(ub - offset) + con_data.append(_data) + con_index.append(_index) + con_index_ptr.append(con_index_ptr[-1] + N) + else: + if ub is not None: + rows.append(RowEntry(con, 1)) + rhs.append(ub - offset) + con_data.append(_data) + con_index.append(_index) + con_index_ptr.append(con_index_ptr[-1] + N) + if lb is not None: + rows.append(RowEntry(con, -1)) + rhs.append(lb - offset) + con_data.append(_data) + con_index.append(_index) + con_index_ptr.append(con_index_ptr[-1] + N) + elif slack_form: _data = list(repn.linear.values()) _index = list(map(var_order.__getitem__, repn.linear)) if lb == ub: # TODO: add tolerance? @@ -437,24 +472,22 @@ def write(self, model): # at the index pointer list (an O(num_var) operation). c_ip = c.indptr A_ip = A.indptr - active_var_idx = list( - filter( - lambda i: A_ip[i] != A_ip[i + 1] or c_ip[i] != c_ip[i + 1], - range(len(columns)), - ) - ) - nCol = len(active_var_idx) + active_var_mask = (A_ip[1:] > A_ip[:-1]) | (c_ip[1:] > c_ip[:-1]) + + # Masks on NumPy arrays are very fast. Build the reduced A + # indptr and then check if we actually have to manipulate the + # columns + augmented_mask = np.concatenate((active_var_mask, [True])) + reduced_A_indptr = A.indptr[augmented_mask] + nCol = len(reduced_A_indptr) - 1 if nCol != len(columns): - # Note that the indptr can't just use range() because a var - # may only appear in the objectives or the constraints. - columns = list(map(columns.__getitem__, active_var_idx)) - active_var_idx.append(c.indptr[-1]) + columns = [v for k, v in zip(active_var_mask, columns) if k] c = scipy.sparse.csc_array( - (c.data, c.indices, c.indptr.take(active_var_idx)), [c.shape[0], nCol] + (c.data, c.indices, c.indptr[augmented_mask]), [c.shape[0], nCol] ) - active_var_idx[-1] = A.indptr[-1] + # active_var_idx[-1] = len(columns) A = scipy.sparse.csc_array( - (A.data, A.indices, A.indptr.take(active_var_idx)), [A.shape[0], nCol] + (A.data, A.indices, reduced_A_indptr), [A.shape[0], nCol] ) if self.config.nonnegative_vars: diff --git a/pyomo/repn/quadratic.py b/pyomo/repn/quadratic.py index 0ddfda829ed..f6e0a43623d 100644 --- a/pyomo/repn/quadratic.py +++ b/pyomo/repn/quadratic.py @@ -277,18 +277,11 @@ def _handle_product_nonlinear(visitor, node, arg1, arg2): _exit_node_handlers[ProductExpression].update( { + None: _handle_product_nonlinear, (_CONSTANT, _QUADRATIC): linear._handle_product_constant_ANY, - (_LINEAR, _QUADRATIC): _handle_product_nonlinear, - (_QUADRATIC, _QUADRATIC): _handle_product_nonlinear, - (_GENERAL, _QUADRATIC): _handle_product_nonlinear, (_QUADRATIC, _CONSTANT): linear._handle_product_ANY_constant, - (_QUADRATIC, _LINEAR): _handle_product_nonlinear, - (_QUADRATIC, _GENERAL): _handle_product_nonlinear, # Replace handler from the linear walker (_LINEAR, _LINEAR): _handle_product_linear_linear, - (_GENERAL, _GENERAL): _handle_product_nonlinear, - (_GENERAL, _LINEAR): _handle_product_nonlinear, - (_LINEAR, _GENERAL): _handle_product_nonlinear, } ) @@ -296,15 +289,7 @@ def _handle_product_nonlinear(visitor, node, arg1, arg2): # DIVISION # _exit_node_handlers[DivisionExpression].update( - { - (_CONSTANT, _QUADRATIC): linear._handle_division_nonlinear, - (_LINEAR, _QUADRATIC): linear._handle_division_nonlinear, - (_QUADRATIC, _QUADRATIC): linear._handle_division_nonlinear, - (_GENERAL, _QUADRATIC): linear._handle_division_nonlinear, - (_QUADRATIC, _CONSTANT): linear._handle_division_ANY_constant, - (_QUADRATIC, _LINEAR): linear._handle_division_nonlinear, - (_QUADRATIC, _GENERAL): linear._handle_division_nonlinear, - } + {(_QUADRATIC, _CONSTANT): linear._handle_division_ANY_constant} ) @@ -312,84 +297,42 @@ def _handle_product_nonlinear(visitor, node, arg1, arg2): # EXPONENTIATION # _exit_node_handlers[PowExpression].update( - { - (_CONSTANT, _QUADRATIC): linear._handle_pow_nonlinear, - (_LINEAR, _QUADRATIC): linear._handle_pow_nonlinear, - (_QUADRATIC, _QUADRATIC): linear._handle_pow_nonlinear, - (_GENERAL, _QUADRATIC): linear._handle_pow_nonlinear, - (_QUADRATIC, _CONSTANT): linear._handle_pow_ANY_constant, - (_QUADRATIC, _LINEAR): linear._handle_pow_nonlinear, - (_QUADRATIC, _GENERAL): linear._handle_pow_nonlinear, - } + {(_QUADRATIC, _CONSTANT): linear._handle_pow_ANY_constant} ) # # ABS and UNARY handlers # -_exit_node_handlers[AbsExpression][(_QUADRATIC,)] = linear._handle_unary_nonlinear -_exit_node_handlers[UnaryFunctionExpression][ - (_QUADRATIC,) -] = linear._handle_unary_nonlinear +# (no changes needed) # # NAMED EXPRESSION handlers # -_exit_node_handlers[Expression][(_QUADRATIC,)] = linear._handle_named_ANY +# (no changes needed) # # EXPR_IF handlers # # Note: it is easier to just recreate the entire data structure, rather # than update it -_exit_node_handlers[Expr_ifExpression] = { - (i, j, k): linear._handle_expr_if_nonlinear - for i in (_LINEAR, _QUADRATIC, _GENERAL) - for j in (_CONSTANT, _LINEAR, _QUADRATIC, _GENERAL) - for k in (_CONSTANT, _LINEAR, _QUADRATIC, _GENERAL) -} -for j in (_CONSTANT, _LINEAR, _QUADRATIC, _GENERAL): - for k in (_CONSTANT, _LINEAR, _QUADRATIC, _GENERAL): - _exit_node_handlers[Expr_ifExpression][ - _CONSTANT, j, k - ] = linear._handle_expr_if_const - -# -# RELATIONAL handlers -# -_exit_node_handlers[EqualityExpression].update( +_exit_node_handlers[Expr_ifExpression].update( { - (_CONSTANT, _QUADRATIC): linear._handle_equality_general, - (_LINEAR, _QUADRATIC): linear._handle_equality_general, - (_QUADRATIC, _QUADRATIC): linear._handle_equality_general, - (_GENERAL, _QUADRATIC): linear._handle_equality_general, - (_QUADRATIC, _CONSTANT): linear._handle_equality_general, - (_QUADRATIC, _LINEAR): linear._handle_equality_general, - (_QUADRATIC, _GENERAL): linear._handle_equality_general, + (_CONSTANT, i, _QUADRATIC): linear._handle_expr_if_const + for i in (_CONSTANT, _LINEAR, _QUADRATIC, _GENERAL) } ) -_exit_node_handlers[InequalityExpression].update( +_exit_node_handlers[Expr_ifExpression].update( { - (_CONSTANT, _QUADRATIC): linear._handle_inequality_general, - (_LINEAR, _QUADRATIC): linear._handle_inequality_general, - (_QUADRATIC, _QUADRATIC): linear._handle_inequality_general, - (_GENERAL, _QUADRATIC): linear._handle_inequality_general, - (_QUADRATIC, _CONSTANT): linear._handle_inequality_general, - (_QUADRATIC, _LINEAR): linear._handle_inequality_general, - (_QUADRATIC, _GENERAL): linear._handle_inequality_general, - } -) -_exit_node_handlers[RangedExpression].update( - { - (_CONSTANT, _QUADRATIC): linear._handle_ranged_general, - (_LINEAR, _QUADRATIC): linear._handle_ranged_general, - (_QUADRATIC, _QUADRATIC): linear._handle_ranged_general, - (_GENERAL, _QUADRATIC): linear._handle_ranged_general, - (_QUADRATIC, _CONSTANT): linear._handle_ranged_general, - (_QUADRATIC, _LINEAR): linear._handle_ranged_general, - (_QUADRATIC, _GENERAL): linear._handle_ranged_general, + (_CONSTANT, _QUADRATIC, i): linear._handle_expr_if_const + for i in (_CONSTANT, _LINEAR, _GENERAL) } ) +# +# RELATIONAL handlers +# +# (no changes needed) + class QuadraticRepnVisitor(linear.LinearRepnVisitor): Result = QuadraticRepn diff --git a/pyomo/repn/standard_repn.py b/pyomo/repn/standard_repn.py index 8600a8a50f6..b767ab727af 100644 --- a/pyomo/repn/standard_repn.py +++ b/pyomo/repn/standard_repn.py @@ -19,11 +19,15 @@ import pyomo.core.expr as EXPR from pyomo.core.expr.numvalue import NumericConstant -from pyomo.core.base.objective import _GeneralObjectiveData, ScalarObjective -from pyomo.core.base import _ExpressionData, Expression -from pyomo.core.base.expression import ScalarExpression, _GeneralExpressionData -from pyomo.core.base.var import ScalarVar, Var, _GeneralVarData, value -from pyomo.core.base.param import ScalarParam, _ParamData +from pyomo.core.base.objective import ObjectiveData, ScalarObjective +from pyomo.core.base import Expression +from pyomo.core.base.expression import ( + ScalarExpression, + NamedExpressionData, + ExpressionData, +) +from pyomo.core.base.var import ScalarVar, Var, VarData, value +from pyomo.core.base.param import ScalarParam, ParamData from pyomo.core.kernel.expression import expression, noclone from pyomo.core.kernel.variable import IVariable, variable from pyomo.core.kernel.objective import objective @@ -1136,25 +1140,25 @@ def _collect_external_fn(exp, multiplier, idMap, compute_values, verbose, quadra EXPR.RangedExpression: _collect_comparison, EXPR.EqualityExpression: _collect_comparison, EXPR.ExternalFunctionExpression: _collect_external_fn, - # _ConnectorData : _collect_linear_connector, + # ConnectorData : _collect_linear_connector, # ScalarConnector : _collect_linear_connector, - _ParamData: _collect_const, + ParamData: _collect_const, ScalarParam: _collect_const, # param.Param : _collect_linear_const, # parameter : _collect_linear_const, NumericConstant: _collect_const, - _GeneralVarData: _collect_var, + VarData: _collect_var, ScalarVar: _collect_var, Var: _collect_var, variable: _collect_var, IVariable: _collect_var, - _GeneralExpressionData: _collect_identity, + ExpressionData: _collect_identity, ScalarExpression: _collect_identity, expression: _collect_identity, noclone: _collect_identity, - _ExpressionData: _collect_identity, + NamedExpressionData: _collect_identity, Expression: _collect_identity, - _GeneralObjectiveData: _collect_identity, + ObjectiveData: _collect_identity, ScalarObjective: _collect_identity, objective: _collect_identity, } @@ -1536,24 +1540,24 @@ def _linear_collect_pow(exp, multiplier, idMap, compute_values, verbose, coef): #EXPR.EqualityExpression : _linear_collect_comparison, #EXPR.ExternalFunctionExpression : _linear_collect_external_fn, ##EXPR.LinearSumExpression : _collect_linear_sum, - ##_ConnectorData : _collect_linear_connector, + ##ConnectorData : _collect_linear_connector, ##ScalarConnector : _collect_linear_connector, - ##param._ParamData : _collect_linear_const, + ##param.ParamData : _collect_linear_const, ##param.ScalarParam : _collect_linear_const, ##param.Param : _collect_linear_const, ##parameter : _collect_linear_const, - _GeneralVarData : _linear_collect_var, + VarData : _linear_collect_var, ScalarVar : _linear_collect_var, Var : _linear_collect_var, variable : _linear_collect_var, IVariable : _linear_collect_var, - _GeneralExpressionData : _linear_collect_identity, + ExpressionData : _linear_collect_identity, ScalarExpression : _linear_collect_identity, expression : _linear_collect_identity, noclone : _linear_collect_identity, - _ExpressionData : _linear_collect_identity, + NamedExpressionData : _linear_collect_identity, Expression : _linear_collect_identity, - _GeneralObjectiveData : _linear_collect_identity, + ObjectiveData : _linear_collect_identity, ScalarObjective : _linear_collect_identity, objective : _linear_collect_identity, } diff --git a/pyomo/repn/tests/ampl/test_nlv2.py b/pyomo/repn/tests/ampl/test_nlv2.py index be72025edcd..27d129ca886 100644 --- a/pyomo/repn/tests/ampl/test_nlv2.py +++ b/pyomo/repn/tests/ampl/test_nlv2.py @@ -42,6 +42,8 @@ Suffix, Constraint, Expression, + Binary, + Integers, ) import pyomo.environ as pyo @@ -1266,7 +1268,7 @@ def test_nonfloat_constants(self): 0 0 #network constraints: nonlinear, linear 0 0 0 #nonlinear vars in constraints, objectives, both 0 0 0 1 #linear network variables; functions; arith, flags - 0 4 0 0 0 #discrete variables: binary, integer, nonlinear (b,c,o) + 4 0 0 0 0 #discrete variables: binary, integer, nonlinear (b,c,o) 4 4 #nonzeros in Jacobian, obj. gradient 6 4 #max name lengths: constraints, variables 0 0 0 0 0 #common exprs: b,c,o,c1,o1 @@ -2165,6 +2167,143 @@ def test_named_expressions(self): 0 0 1 0 2 0 +""", + OUT.getvalue(), + ) + ) + + def test_discrete_var_tabulation(self): + # This tests an error reported in #3235 + # + # Among other issues, this verifies that nonlinear discrete + # variables are tabulated correctly (header line 7), and that + # integer variables with bounds in {0, 1} are mapped to binary + # variables. + m = ConcreteModel() + m.p1 = Var(bounds=(0.85, 1.15)) + m.p2 = Var(bounds=(0.68, 0.92)) + m.c1 = Var(bounds=(-0.0, 0.7)) + m.c2 = Var(bounds=(-0.0, 0.7)) + m.t1 = Var(within=Binary, bounds=(0, 1)) + m.t2 = Var(within=Binary, bounds=(0, 1)) + m.t3 = Var(within=Binary, bounds=(0, 1)) + m.t4 = Var(within=Binary, bounds=(0, 1)) + m.t5 = Var(within=Integers, bounds=(0, None)) + m.t6 = Var(within=Integers, bounds=(0, None)) + m.x1 = Var(within=Binary) + m.x2 = Var(within=Integers, bounds=(0, 1)) + m.x3 = Var(within=Integers, bounds=(0, None)) + m.const = Constraint( + expr=( + (0.7 - (m.c1 * m.t1 + m.c2 * m.t2)) + <= (m.p1 * m.t1 + m.p2 * m.t2 + m.p1 * m.t4 + m.t6 * m.t5) + ) + ) + m.OBJ = Objective( + expr=(m.p1 * m.t1 + m.p2 * m.t2 + m.p2 * m.t3 + m.x1 + m.x2 + m.x3) + ) + + OUT = io.StringIO() + nl_writer.NLWriter().write(m, OUT, symbolic_solver_labels=True) + + self.assertEqual( + *nl_diff( + """g3 1 1 0 # problem unknown + 13 1 1 0 0 #vars, constraints, objectives, ranges, eqns + 1 1 0 0 0 0 #nonlinear constrs, objs; ccons: lin, nonlin, nd, nzlb + 0 0 #network constraints: nonlinear, linear + 9 10 4 #nonlinear vars in constraints, objectives, both + 0 0 0 1 #linear network variables; functions; arith, flags + 2 1 2 3 1 #discrete variables: binary, integer, nonlinear (b,c,o) + 9 8 #nonzeros in Jacobian, obj. gradient + 5 2 #max name lengths: constraints, variables + 0 0 0 0 0 #common exprs: b,c,o,c1,o1 +C0 #const +o0 #+ +o16 #- +o0 #+ +o2 #* +v4 #c1 +v2 #t1 +o2 #* +v5 #c2 +v3 #t2 +o16 #- +o54 #sumlist +4 #(n) +o2 #* +v0 #p1 +v2 #t1 +o2 #* +v1 #p2 +v3 #t2 +o2 #* +v0 #p1 +v6 #t4 +o2 #* +v7 #t6 +v8 #t5 +O0 0 #OBJ +o54 #sumlist +3 #(n) +o2 #* +v0 #p1 +v2 #t1 +o2 #* +v1 #p2 +v3 #t2 +o2 #* +v1 #p2 +v9 #t3 +x0 #initial guess +r #1 ranges (rhs's) +1 -0.7 #const +b #13 bounds (on variables) +0 0.85 1.15 #p1 +0 0.68 0.92 #p2 +0 0 1 #t1 +0 0 1 #t2 +0 -0.0 0.7 #c1 +0 -0.0 0.7 #c2 +0 0 1 #t4 +2 0 #t6 +2 0 #t5 +0 0 1 #t3 +0 0 1 #x1 +0 0 1 #x2 +2 0 #x3 +k12 #intermediate Jacobian column lengths +1 +2 +3 +4 +5 +6 +7 +8 +9 +9 +9 +9 +J0 9 #const +0 0 +1 0 +2 0 +3 0 +4 0 +5 0 +6 0 +7 0 +8 0 +G0 8 #OBJ +0 0 +1 0 +2 0 +3 0 +9 0 +10 1 +11 1 +12 1 """, OUT.getvalue(), ) diff --git a/pyomo/repn/tests/test_linear.py b/pyomo/repn/tests/test_linear.py index 0fd428fd8ee..861fecc7888 100644 --- a/pyomo/repn/tests/test_linear.py +++ b/pyomo/repn/tests/test_linear.py @@ -1436,6 +1436,22 @@ def test_errors_propagate_nan(self): m.z = Var() m.y.fix(1) + expr = (m.x + 1) / m.p + cfg = VisitorConfig() + with LoggingIntercept() as LOG: + repn = LinearRepnVisitor(*cfg).walk_expression(expr) + self.assertEqual( + LOG.getvalue(), + "Exception encountered evaluating expression 'div(1, 0)'\n" + "\tmessage: division by zero\n" + "\texpression: (x + 1)/p\n", + ) + self.assertEqual(repn.multiplier, 1) + self.assertEqual(str(repn.constant), 'InvalidNumber(nan)') + self.assertEqual(len(repn.linear), 1) + self.assertEqual(str(repn.linear[id(m.x)]), 'InvalidNumber(nan)') + self.assertEqual(repn.nonlinear, None) + expr = m.y + m.x + m.z + ((3 * m.x) / m.p) / m.y cfg = VisitorConfig() with LoggingIntercept() as LOG: diff --git a/pyomo/repn/tests/test_standard_form.py b/pyomo/repn/tests/test_standard_form.py index e24195edfde..4c66ae87c41 100644 --- a/pyomo/repn/tests/test_standard_form.py +++ b/pyomo/repn/tests/test_standard_form.py @@ -42,6 +42,23 @@ def test_linear_model(self): self.assertTrue(np.all(repn.c == np.array([0, 0, 0]))) self.assertTrue(np.all(repn.A == np.array([[-1, -2, 0], [0, 1, 4]]))) self.assertTrue(np.all(repn.rhs == np.array([-3, 5]))) + self.assertEqual(repn.rows, [(m.c, -1), (m.d, 1)]) + self.assertEqual(repn.columns, [m.x, m.y[1], m.y[3]]) + + def test_almost_dense_linear_model(self): + m = pyo.ConcreteModel() + m.x = pyo.Var() + m.y = pyo.Var([1, 2, 3]) + m.c = pyo.Constraint(expr=m.x + 2 * m.y[1] + 4 * m.y[3] >= 10) + m.d = pyo.Constraint(expr=5 * m.x + 6 * m.y[1] + 8 * m.y[3] <= 20) + + repn = LinearStandardFormCompiler().write(m) + + self.assertTrue(np.all(repn.c == np.array([0, 0, 0]))) + self.assertTrue(np.all(repn.A == np.array([[-1, -2, -4], [5, 6, 8]]))) + self.assertTrue(np.all(repn.rhs == np.array([-10, 20]))) + self.assertEqual(repn.rows, [(m.c, -1), (m.d, 1)]) + self.assertEqual(repn.columns, [m.x, m.y[1], m.y[3]]) def test_linear_model_row_col_order(self): m = pyo.ConcreteModel() @@ -57,6 +74,8 @@ def test_linear_model_row_col_order(self): self.assertTrue(np.all(repn.c == np.array([0, 0, 0]))) self.assertTrue(np.all(repn.A == np.array([[4, 0, 1], [0, -1, -2]]))) self.assertTrue(np.all(repn.rhs == np.array([5, -3]))) + self.assertEqual(repn.rows, [(m.d, 1), (m.c, -1)]) + self.assertEqual(repn.columns, [m.y[3], m.x, m.y[1]]) def test_suffix_warning(self): m = pyo.ConcreteModel() @@ -222,6 +241,28 @@ def test_alternative_forms(self): ) self._verify_solution(soln, repn, True) + repn = LinearStandardFormCompiler().write( + m, mixed_form=True, column_order=col_order + ) + + self.assertEqual( + repn.rows, [(m.c, -1), (m.d, 1), (m.e, 1), (m.e, -1), (m.f, 0)] + ) + self.assertEqual(list(map(str, repn.x)), ['x', 'y[0]', 'y[1]', 'y[3]']) + self.assertEqual( + list(v.bounds for v in repn.x), [(None, None), (0, 10), (-5, 10), (-5, -2)] + ) + ref = np.array( + [[1, 0, 2, 0], [0, 0, 1, 4], [0, 1, 6, 0], [0, 1, 6, 0], [1, 1, 0, 0]] + ) + self.assertTrue(np.all(repn.A == ref)) + self.assertTrue(np.all(repn.b == np.array([3, 5, 6, -3, 8]))) + self.assertTrue(np.all(repn.c == np.array([[-1, 0, -5, 0], [1, 0, 0, 15]]))) + # Note that the mixed_form solution is a mix of inequality and + # equality constraints, so we cannot (easily) reuse the + # _verify_solutions helper (as in the above cases): + # self._verify_solution(soln, repn, False) + repn = LinearStandardFormCompiler().write( m, slack_form=True, nonnegative_vars=True, column_order=col_order ) diff --git a/pyomo/repn/tests/test_util.py b/pyomo/repn/tests/test_util.py index b5e4cc4facf..e0fea0fb45c 100644 --- a/pyomo/repn/tests/test_util.py +++ b/pyomo/repn/tests/test_util.py @@ -718,16 +718,14 @@ class UnknownExpression(NumericExpression): DeveloperError, r".*Unexpected expression node type 'UnknownExpression'" ): end[node.__class__](None, node, *node.args) - self.assertEqual(len(end), 9) - self.assertIn(UnknownExpression, end) + self.assertEqual(len(end), 8) node = UnknownExpression((6, 7)) with self.assertRaisesRegex( DeveloperError, r".*Unexpected expression node type 'UnknownExpression'" ): end[node.__class__, 6, 7](None, node, *node.args) - self.assertEqual(len(end), 10) - self.assertIn((UnknownExpression, 6, 7), end) + self.assertEqual(len(end), 8) def test_BeforeChildDispatcher_registration(self): class BeforeChildDispatcherTester(BeforeChildDispatcher): diff --git a/pyomo/repn/util.py b/pyomo/repn/util.py index 49cca32eaf9..8d902d0f99a 100644 --- a/pyomo/repn/util.py +++ b/pyomo/repn/util.py @@ -40,7 +40,7 @@ SortComponents, ) from pyomo.core.base.component import ActiveComponent -from pyomo.core.base.expression import _ExpressionData +from pyomo.core.base.expression import NamedExpressionData from pyomo.core.expr.numvalue import is_fixed, value import pyomo.core.expr as EXPR import pyomo.core.kernel as kernel @@ -55,7 +55,7 @@ EXPR.NPV_SumExpression, } _named_subexpression_types = ( - _ExpressionData, + NamedExpressionData, kernel.expression.expression, kernel.objective.objective, ) @@ -400,7 +400,15 @@ def __init__(self, *args, **kwargs): def __missing__(self, key): if type(key) is tuple: - node_class = key[0] + # Only lookup/cache argument-specific handlers for unary, + # binary and ternary operators + if len(key) <= 3: + node_class = key[0] + node_args = key[1:] + else: + node_class = key = key[0] + if node_class in self: + return self[node_class] else: node_class = key bases = node_class.__mro__ @@ -412,30 +420,31 @@ def __missing__(self, key): bases = [Expression] fcn = None for base_type in bases: - if isinstance(key, tuple): - base_key = (base_type,) + key[1:] - # Only cache handlers for unary, binary and ternary operators - cache = len(key) <= 4 - else: - base_key = base_type - cache = True - if base_key in self: - fcn = self[base_key] - elif base_type in self: + if key is not node_class: + if (base_type,) + node_args in self: + fcn = self[(base_type,) + node_args] + break + if base_type in self: fcn = self[base_type] - elif any((k[0] if type(k) is tuple else k) is base_type for k in self): - raise DeveloperError( - f"Base expression key '{base_key}' not found when inserting " - f"dispatcher for node '{node_class.__name__}' while walking " - "expression tree." - ) + break if fcn is None: - fcn = self.unexpected_expression_type - if cache: - self[key] = fcn + partial_matches = set( + k[0] for k in self if type(k) is tuple and issubclass(node_class, k[0]) + ) + for base_type in node_class.__mro__: + if node_class is not key: + key = (base_type,) + node_args + if base_type in partial_matches: + raise DeveloperError( + f"Base expression key '{key}' not found when inserting " + f"dispatcher for node '{node_class.__name__}' while walking " + "expression tree." + ) + return self.unexpected_expression_type + self[key] = fcn return fcn - def unexpected_expression_type(self, visitor, node, *arg): + def unexpected_expression_type(self, visitor, node, *args): raise DeveloperError( f"Unexpected expression node type '{type(node).__name__}' " f"found while walking expression tree in {type(visitor).__name__}." @@ -486,7 +495,7 @@ def categorize_valid_components( Parameters ---------- - model: _BlockData + model: BlockData The model tree to walk active: True or None @@ -507,7 +516,7 @@ def categorize_valid_components( Returns ------- - component_map: Dict[type, List[_BlockData]] + component_map: Dict[type, List[BlockData]] A dict mapping component type to a list of block data objects that contain declared component of that type. diff --git a/pyomo/solvers/amplfunc_merge.py b/pyomo/solvers/amplfunc_merge.py new file mode 100644 index 00000000000..e49fd20e20f --- /dev/null +++ b/pyomo/solvers/amplfunc_merge.py @@ -0,0 +1,32 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + + +def amplfunc_string_merge(amplfunc, pyomo_amplfunc): + """Merge two AMPLFUNC variable strings eliminating duplicate lines""" + # Assume that the strings amplfunc and pyomo_amplfunc don't contain duplicates + # Assume that the path separator is correct for the OS so we don't need to + # worry about comparing Unix and Windows paths. + amplfunc_lines = amplfunc.split("\n") + existing = set(amplfunc_lines) + for line in pyomo_amplfunc.split("\n"): + # Skip lines we already have + if line not in existing: + amplfunc_lines.append(line) + # Remove empty lines which could happen if one or both of the strings is + # empty or there are two new lines in a row for whatever reason. + amplfunc_lines = [s for s in amplfunc_lines if s != ""] + return "\n".join(amplfunc_lines) + + +def amplfunc_merge(env): + """Merge AMPLFUNC and PYOMO_AMPLFUNC in an environment var dict""" + return amplfunc_string_merge(env.get("AMPLFUNC", ""), env.get("PYOMO_AMPLFUNC", "")) diff --git a/pyomo/solvers/plugins/solvers/ASL.py b/pyomo/solvers/plugins/solvers/ASL.py index ae7ad82c870..bb8174a013e 100644 --- a/pyomo/solvers/plugins/solvers/ASL.py +++ b/pyomo/solvers/plugins/solvers/ASL.py @@ -23,6 +23,7 @@ from pyomo.opt.solver import SystemCallSolver from pyomo.core.kernel.block import IBlock from pyomo.solvers.mockmip import MockMIP +from pyomo.solvers.amplfunc_merge import amplfunc_merge from pyomo.core import TransformationFactory import logging @@ -158,11 +159,9 @@ def create_command_line(self, executable, problem_files): # Pyomo/Pyomo) with any user-specified external function # libraries # - if 'PYOMO_AMPLFUNC' in env: - if 'AMPLFUNC' in env: - env['AMPLFUNC'] += "\n" + env['PYOMO_AMPLFUNC'] - else: - env['AMPLFUNC'] = env['PYOMO_AMPLFUNC'] + amplfunc = amplfunc_merge(env) + if amplfunc: + env['AMPLFUNC'] = amplfunc cmd = [executable, problem_files[0], '-AMPL'] if self._timer: diff --git a/pyomo/solvers/plugins/solvers/CBCplugin.py b/pyomo/solvers/plugins/solvers/CBCplugin.py index eb6c2c2e1bd..f22fb117c8b 100644 --- a/pyomo/solvers/plugins/solvers/CBCplugin.py +++ b/pyomo/solvers/plugins/solvers/CBCplugin.py @@ -16,6 +16,7 @@ import subprocess from pyomo.common import Executable +from pyomo.common.enums import maximize, minimize from pyomo.common.errors import ApplicationError from pyomo.common.collections import Bunch from pyomo.common.tempfiles import TempfileManager @@ -29,7 +30,6 @@ SolverStatus, TerminationCondition, SolutionStatus, - ProblemSense, Solution, ) from pyomo.opt.solver import SystemCallSolver @@ -443,7 +443,7 @@ def process_logfile(self): # # Parse logfile lines # - results.problem.sense = ProblemSense.minimize + results.problem.sense = minimize results.problem.name = None optim_value = float('inf') lower_bound = None @@ -578,7 +578,7 @@ def process_logfile(self): 'CoinLpIO::readLp(): Maximization problem reformulated as minimization' in ' '.join(tokens) ): - results.problem.sense = ProblemSense.maximize + results.problem.sense = maximize # https://projects.coin-or.org/Cbc/browser/trunk/Cbc/src/CbcSolver.cpp?rev=2497#L3047 elif n_tokens > 3 and tokens[:2] == ('Result', '-'): if tokens[2:4] in [('Run', 'abandoned'), ('User', 'ctrl-c')]: @@ -752,9 +752,9 @@ def process_logfile(self): "maxIterations parameter." ) soln.gap = gap - if results.problem.sense == ProblemSense.minimize: + if results.problem.sense == minimize: upper_bound = optim_value - elif results.problem.sense == ProblemSense.maximize: + elif results.problem.sense == maximize: _ver = self.version() if _ver and _ver[:3] < (2, 10, 2): optim_value *= -1 @@ -824,7 +824,7 @@ def process_soln_file(self, results): INPUT = [] _ver = self.version() - invert_objective_sense = results.problem.sense == ProblemSense.maximize and ( + invert_objective_sense = results.problem.sense == maximize and ( _ver and _ver[:3] < (2, 10, 2) ) diff --git a/pyomo/solvers/plugins/solvers/CPLEX.py b/pyomo/solvers/plugins/solvers/CPLEX.py index 9f876b2d0f8..3a08257c87c 100644 --- a/pyomo/solvers/plugins/solvers/CPLEX.py +++ b/pyomo/solvers/plugins/solvers/CPLEX.py @@ -17,6 +17,7 @@ import subprocess from pyomo.common import Executable +from pyomo.common.enums import maximize, minimize from pyomo.common.errors import ApplicationError from pyomo.common.tempfiles import TempfileManager @@ -28,7 +29,6 @@ SolverStatus, TerminationCondition, SolutionStatus, - ProblemSense, Solution, ) from pyomo.opt.solver import ILMLicensedSystemCallSolver @@ -547,9 +547,9 @@ def process_logfile(self): ): # CPLEX 11.2 and subsequent has two Nonzeros sections. results.problem.number_of_nonzeros = int(tokens[2]) elif len(tokens) >= 5 and tokens[4] == "MINIMIZE": - results.problem.sense = ProblemSense.minimize + results.problem.sense = minimize elif len(tokens) >= 5 and tokens[4] == "MAXIMIZE": - results.problem.sense = ProblemSense.maximize + results.problem.sense = maximize elif ( len(tokens) >= 4 and tokens[0] == "Solution" @@ -859,9 +859,9 @@ def process_soln_file(self, results): else: sense = tokens[0].lower() if sense in ['max', 'maximize']: - results.problem.sense = ProblemSense.maximize + results.problem.sense = maximize if sense in ['min', 'minimize']: - results.problem.sense = ProblemSense.minimize + results.problem.sense = minimize break tINPUT.close() @@ -952,7 +952,7 @@ def process_soln_file(self, results): ) if primal_feasible == 1: soln.status = SolutionStatus.feasible - if results.problem.sense == ProblemSense.minimize: + if results.problem.sense == minimize: results.problem.upper_bound = soln.objective[ '__default_objective__' ]['Value'] @@ -964,7 +964,7 @@ def process_soln_file(self, results): soln.status = SolutionStatus.infeasible if self._best_bound is not None: - if results.problem.sense == ProblemSense.minimize: + if results.problem.sense == minimize: results.problem.lower_bound = self._best_bound else: results.problem.upper_bound = self._best_bound diff --git a/pyomo/solvers/plugins/solvers/GAMS.py b/pyomo/solvers/plugins/solvers/GAMS.py index be3499a2f6b..035bd0b7603 100644 --- a/pyomo/solvers/plugins/solvers/GAMS.py +++ b/pyomo/solvers/plugins/solvers/GAMS.py @@ -36,7 +36,6 @@ Solution, SolutionStatus, TerminationCondition, - ProblemSense, ) from pyomo.common.dependencies import attempt_import @@ -198,8 +197,8 @@ def _get_version(self): return _extract_version('') from gams import GamsWorkspace - ws = GamsWorkspace() - version = tuple(int(i) for i in ws._version.split('.')[:4]) + workspace = GamsWorkspace() + version = tuple(int(i) for i in workspace._version.split('.')[:4]) while len(version) < 4: version += (0,) return version @@ -209,8 +208,8 @@ def _run_simple_model(self, n): try: from gams import GamsWorkspace, DebugLevel - ws = GamsWorkspace(debug=DebugLevel.Off, working_directory=tmpdir) - t1 = ws.add_job_from_string(self._simple_model(n)) + workspace = GamsWorkspace(debug=DebugLevel.Off, working_directory=tmpdir) + t1 = workspace.add_job_from_string(self._simple_model(n)) t1.run() return True except: @@ -330,12 +329,12 @@ def solve(self, *args, **kwds): if tmpdir is not None and os.path.exists(tmpdir): newdir = False - ws = GamsWorkspace( + workspace = GamsWorkspace( debug=DebugLevel.KeepFiles if keepfiles else DebugLevel.Off, working_directory=tmpdir, ) - t1 = ws.add_job_from_string(output_file.getvalue()) + t1 = workspace.add_job_from_string(output_file.getvalue()) try: with OutputStream(tee=tee, logfile=logfile) as output_stream: @@ -349,7 +348,9 @@ def solve(self, *args, **kwds): # Always name working directory or delete files, # regardless of any errors. if keepfiles: - print("\nGAMS WORKING DIRECTORY: %s\n" % ws.working_directory) + print( + "\nGAMS WORKING DIRECTORY: %s\n" % workspace.working_directory + ) elif tmpdir is not None: # Garbage collect all references to t1.out_db # So that .gdx file can be deleted @@ -359,7 +360,7 @@ def solve(self, *args, **kwds): except: # Catch other errors and remove files first if keepfiles: - print("\nGAMS WORKING DIRECTORY: %s\n" % ws.working_directory) + print("\nGAMS WORKING DIRECTORY: %s\n" % workspace.working_directory) elif tmpdir is not None: # Garbage collect all references to t1.out_db # So that .gdx file can be deleted @@ -398,7 +399,9 @@ def solve(self, *args, **kwds): extract_rc = 'rc' in model_suffixes results = SolverResults() - results.problem.name = os.path.join(ws.working_directory, t1.name + '.gms') + results.problem.name = os.path.join( + workspace.working_directory, t1.name + '.gms' + ) results.problem.lower_bound = t1.out_db["OBJEST"].find_record().value results.problem.upper_bound = t1.out_db["OBJEST"].find_record().value results.problem.number_of_variables = t1.out_db["NUMVAR"].find_record().value @@ -418,11 +421,10 @@ def solve(self, *args, **kwds): assert len(obj) == 1, 'Only one objective is allowed.' obj = obj[0] objctvval = t1.out_db["OBJVAL"].find_record().value + results.problem.sense = obj.sense if obj.is_minimizing(): - results.problem.sense = ProblemSense.minimize results.problem.upper_bound = objctvval else: - results.problem.sense = ProblemSense.maximize results.problem.lower_bound = objctvval results.solver.name = "GAMS " + str(self.version()) @@ -587,7 +589,7 @@ def solve(self, *args, **kwds): results.solution.insert(soln) if keepfiles: - print("\nGAMS WORKING DIRECTORY: %s\n" % ws.working_directory) + print("\nGAMS WORKING DIRECTORY: %s\n" % workspace.working_directory) elif tmpdir is not None: # Garbage collect all references to t1.out_db # So that .gdx file can be deleted @@ -980,11 +982,10 @@ def solve(self, *args, **kwds): assert len(obj) == 1, 'Only one objective is allowed.' obj = obj[0] objctvval = stat_vars["OBJVAL"] + results.problem.sense = obj.sense if obj.is_minimizing(): - results.problem.sense = ProblemSense.minimize results.problem.upper_bound = objctvval else: - results.problem.sense = ProblemSense.maximize results.problem.lower_bound = objctvval results.solver.name = "GAMS " + str(self.version()) diff --git a/pyomo/solvers/plugins/solvers/GLPK.py b/pyomo/solvers/plugins/solvers/GLPK.py index e6d8576489d..c8d5bc14237 100644 --- a/pyomo/solvers/plugins/solvers/GLPK.py +++ b/pyomo/solvers/plugins/solvers/GLPK.py @@ -19,6 +19,7 @@ from pyomo.common import Executable from pyomo.common.collections import Bunch +from pyomo.common.enums import maximize, minimize from pyomo.common.errors import ApplicationError from pyomo.opt import ( SolverFactory, @@ -28,7 +29,6 @@ SolverResults, TerminationCondition, SolutionStatus, - ProblemSense, ) from pyomo.opt.base.solvers import _extract_version from pyomo.opt.solver import SystemCallSolver @@ -308,10 +308,8 @@ def process_soln_file(self, results): ): raise ValueError - self.is_integer = 'mip' == ptype and True or False - prob.sense = ( - 'min' == psense and ProblemSense.minimize or ProblemSense.maximize - ) + self.is_integer = 'mip' == ptype + prob.sense = minimize if 'min' == psense else maximize prob.number_of_constraints = prows prob.number_of_nonzeros = pnonz prob.number_of_variables = pcols diff --git a/pyomo/solvers/plugins/solvers/GUROBI.py b/pyomo/solvers/plugins/solvers/GUROBI.py index c8b0912970e..3a3a4d52322 100644 --- a/pyomo/solvers/plugins/solvers/GUROBI.py +++ b/pyomo/solvers/plugins/solvers/GUROBI.py @@ -18,6 +18,7 @@ from pyomo.common import Executable from pyomo.common.collections import Bunch +from pyomo.common.enums import maximize, minimize from pyomo.common.fileutils import this_file_dir from pyomo.common.tee import capture_output from pyomo.common.tempfiles import TempfileManager @@ -28,7 +29,6 @@ SolverStatus, TerminationCondition, SolutionStatus, - ProblemSense, Solution, ) from pyomo.opt.solver import ILMLicensedSystemCallSolver @@ -472,7 +472,7 @@ def process_soln_file(self, results): soln.objective['__default_objective__'] = { 'Value': float(tokens[1]) } - if results.problem.sense == ProblemSense.minimize: + if results.problem.sense == minimize: results.problem.upper_bound = float(tokens[1]) else: results.problem.lower_bound = float(tokens[1]) @@ -514,9 +514,9 @@ def process_soln_file(self, results): elif section == 1: if tokens[0] == 'sense': if tokens[1] == 'minimize': - results.problem.sense = ProblemSense.minimize + results.problem.sense = minimize elif tokens[1] == 'maximize': - results.problem.sense = ProblemSense.maximize + results.problem.sense = maximize else: try: val = eval(tokens[1]) diff --git a/pyomo/solvers/plugins/solvers/IPOPT.py b/pyomo/solvers/plugins/solvers/IPOPT.py index 4ebbbc07d3b..21045cb7b4f 100644 --- a/pyomo/solvers/plugins/solvers/IPOPT.py +++ b/pyomo/solvers/plugins/solvers/IPOPT.py @@ -21,6 +21,8 @@ from pyomo.opt.results import SolverStatus, SolverResults, TerminationCondition from pyomo.opt.solver import SystemCallSolver +from pyomo.solvers.amplfunc_merge import amplfunc_merge + import logging logger = logging.getLogger('pyomo.solvers') @@ -119,11 +121,9 @@ def create_command_line(self, executable, problem_files): # Pyomo/Pyomo) with any user-specified external function # libraries # - if 'PYOMO_AMPLFUNC' in env: - if 'AMPLFUNC' in env: - env['AMPLFUNC'] += "\n" + env['PYOMO_AMPLFUNC'] - else: - env['AMPLFUNC'] = env['PYOMO_AMPLFUNC'] + amplfunc = amplfunc_merge(env) + if amplfunc: + env['AMPLFUNC'] = amplfunc cmd = [executable, problem_files[0], '-AMPL'] if self._timer: diff --git a/pyomo/solvers/plugins/solvers/SCIPAMPL.py b/pyomo/solvers/plugins/solvers/SCIPAMPL.py index fd69954b428..98dad4ca5fd 100644 --- a/pyomo/solvers/plugins/solvers/SCIPAMPL.py +++ b/pyomo/solvers/plugins/solvers/SCIPAMPL.py @@ -20,12 +20,7 @@ from pyomo.opt.base import ProblemFormat, ResultsFormat from pyomo.opt.base.solvers import _extract_version, SolverFactory -from pyomo.opt.results import ( - SolverStatus, - TerminationCondition, - SolutionStatus, - ProblemSense, -) +from pyomo.opt.results import SolverStatus, TerminationCondition, SolutionStatus from pyomo.opt.solver import SystemCallSolver import logging @@ -374,9 +369,11 @@ def _postsolve(self): if len(results.solution) > 0: results.solution(0).status = SolutionStatus.optimal try: - if results.problem.sense == ProblemSense.minimize: + if results.solver.primal_bound < results.solver.dual_bound: results.problem.lower_bound = results.solver.primal_bound + results.problem.upper_bound = results.solver.dual_bound else: + results.problem.lower_bound = results.solver.dual_bound results.problem.upper_bound = results.solver.primal_bound except AttributeError: """ @@ -455,7 +452,7 @@ def read_scip_log(filename: str): solver_status = scip_lines[0][colon_position + 2 : scip_lines[0].index('\n')] solving_time = float( - scip_lines[1][colon_position + 2 : scip_lines[1].index('\n')] + scip_lines[1][colon_position + 2 : scip_lines[1].index('\n')].split(' ')[0] ) try: diff --git a/pyomo/solvers/plugins/solvers/cplex_persistent.py b/pyomo/solvers/plugins/solvers/cplex_persistent.py index fd396a8c87f..754dadc09e2 100644 --- a/pyomo/solvers/plugins/solvers/cplex_persistent.py +++ b/pyomo/solvers/plugins/solvers/cplex_persistent.py @@ -82,7 +82,7 @@ def update_var(self, var): Parameters ---------- - var: Var (scalar Var or single _VarData) + var: Var (scalar Var or single VarData) """ # see PR #366 for discussion about handling indexed @@ -130,7 +130,7 @@ def _add_column(self, var, obj_coef, constraints, coefficients): Parameters ---------- - var: Var (scalar Var or single _VarData) + var: Var (scalar Var or single VarData) obj_coef: float constraints: list of solver constraints coefficients: list of coefficients to put on var in the associated constraint diff --git a/pyomo/solvers/plugins/solvers/direct_or_persistent_solver.py b/pyomo/solvers/plugins/solvers/direct_or_persistent_solver.py index c131b8ad10a..de38a0372d0 100644 --- a/pyomo/solvers/plugins/solvers/direct_or_persistent_solver.py +++ b/pyomo/solvers/plugins/solvers/direct_or_persistent_solver.py @@ -10,7 +10,7 @@ # ___________________________________________________________________________ from pyomo.core.base.PyomoModel import Model -from pyomo.core.base.block import Block, _BlockData +from pyomo.core.base.block import Block, BlockData from pyomo.core.kernel.block import IBlock from pyomo.opt.base.solvers import OptSolver from pyomo.core.base import SymbolMap, NumericLabeler, TextLabeler @@ -177,7 +177,7 @@ def _postsolve(self): """ This method should be implemented by subclasses.""" def _set_instance(self, model, kwds={}): - if not isinstance(model, (Model, IBlock, Block, _BlockData)): + if not isinstance(model, (Model, IBlock, Block, BlockData)): msg = ( "The problem instance supplied to the {0} plugin " "'_presolve' method must be a Model or a Block".format(type(self)) diff --git a/pyomo/solvers/plugins/solvers/direct_solver.py b/pyomo/solvers/plugins/solvers/direct_solver.py index 3eab658391c..609a81b2018 100644 --- a/pyomo/solvers/plugins/solvers/direct_solver.py +++ b/pyomo/solvers/plugins/solvers/direct_solver.py @@ -15,7 +15,7 @@ from pyomo.solvers.plugins.solvers.direct_or_persistent_solver import ( DirectOrPersistentSolver, ) -from pyomo.core.base.block import _BlockData +from pyomo.core.base.block import BlockData from pyomo.core.kernel.block import IBlock from pyomo.core.base.suffix import active_import_suffix_generator from pyomo.core.kernel.suffix import import_suffix_generator @@ -79,8 +79,8 @@ def solve(self, *args, **kwds): # _model = None for arg in args: - if isinstance(arg, (_BlockData, IBlock)): - if isinstance(arg, _BlockData): + if isinstance(arg, (BlockData, IBlock)): + if isinstance(arg, BlockData): if not arg.is_constructed(): raise RuntimeError( "Attempting to solve model=%s with unconstructed " @@ -89,7 +89,7 @@ def solve(self, *args, **kwds): _model = arg # import suffixes must be on the top-level model - if isinstance(arg, _BlockData): + if isinstance(arg, BlockData): model_suffixes = list( name for (name, comp) in active_import_suffix_generator(arg) ) diff --git a/pyomo/solvers/plugins/solvers/gurobi_direct.py b/pyomo/solvers/plugins/solvers/gurobi_direct.py index 1d88eced629..ed66a4e0e7b 100644 --- a/pyomo/solvers/plugins/solvers/gurobi_direct.py +++ b/pyomo/solvers/plugins/solvers/gurobi_direct.py @@ -493,9 +493,8 @@ def _add_constraint(self, con): if not con.active: return None - if is_fixed(con.body): - if self._skip_trivial_constraints: - return None + if self._skip_trivial_constraints and is_fixed(con.body): + return None conname = self._symbol_map.getSymbol(con, self._labeler) diff --git a/pyomo/solvers/plugins/solvers/gurobi_persistent.py b/pyomo/solvers/plugins/solvers/gurobi_persistent.py index 4522a2151c3..94a2ac6b734 100644 --- a/pyomo/solvers/plugins/solvers/gurobi_persistent.py +++ b/pyomo/solvers/plugins/solvers/gurobi_persistent.py @@ -111,7 +111,7 @@ def update_var(self, var): Parameters ---------- - var: Var (scalar Var or single _VarData) + var: Var (scalar Var or single VarData) """ # see PR #366 for discussion about handling indexed @@ -157,7 +157,7 @@ def set_linear_constraint_attr(self, con, attr, val): Parameters ---------- - con: pyomo.core.base.constraint._GeneralConstraintData + con: pyomo.core.base.constraint.ConstraintData The pyomo constraint for which the corresponding gurobi constraint attribute should be modified. attr: str @@ -192,7 +192,7 @@ def set_var_attr(self, var, attr, val): Parameters ---------- - con: pyomo.core.base.var._GeneralVarData + con: pyomo.core.base.var.VarData The pyomo var for which the corresponding gurobi var attribute should be modified. attr: str @@ -342,7 +342,7 @@ def get_var_attr(self, var, attr): Parameters ---------- - var: pyomo.core.base.var._GeneralVarData + var: pyomo.core.base.var.VarData The pyomo var for which the corresponding gurobi var attribute should be retrieved. attr: str @@ -384,7 +384,7 @@ def get_linear_constraint_attr(self, con, attr): Parameters ---------- - con: pyomo.core.base.constraint._GeneralConstraintData + con: pyomo.core.base.constraint.ConstraintData The pyomo constraint for which the corresponding gurobi constraint attribute should be retrieved. attr: str @@ -413,7 +413,7 @@ def get_sos_attr(self, con, attr): Parameters ---------- - con: pyomo.core.base.sos._SOSConstraintData + con: pyomo.core.base.sos.SOSConstraintData The pyomo SOS constraint for which the corresponding gurobi SOS constraint attribute should be retrieved. attr: str @@ -431,7 +431,7 @@ def get_quadratic_constraint_attr(self, con, attr): Parameters ---------- - con: pyomo.core.base.constraint._GeneralConstraintData + con: pyomo.core.base.constraint.ConstraintData The pyomo constraint for which the corresponding gurobi constraint attribute should be retrieved. attr: str @@ -569,7 +569,7 @@ def cbCut(self, con): Parameters ---------- - con: pyomo.core.base.constraint._GeneralConstraintData + con: pyomo.core.base.constraint.ConstraintData The cut to add """ if not con.active: @@ -647,7 +647,7 @@ def cbLazy(self, con): """ Parameters ---------- - con: pyomo.core.base.constraint._GeneralConstraintData + con: pyomo.core.base.constraint.ConstraintData The lazy constraint to add """ if not con.active: @@ -710,7 +710,7 @@ def _add_column(self, var, obj_coef, constraints, coefficients): Parameters ---------- - var: Var (scalar Var or single _VarData) + var: Var (scalar Var or single VarData) obj_coef: float constraints: list of solver constraints coefficients: list of coefficients to put on var in the associated constraint diff --git a/pyomo/solvers/plugins/solvers/mosek_direct.py b/pyomo/solvers/plugins/solvers/mosek_direct.py index 5000a2f35c4..025c71d36f0 100644 --- a/pyomo/solvers/plugins/solvers/mosek_direct.py +++ b/pyomo/solvers/plugins/solvers/mosek_direct.py @@ -492,13 +492,10 @@ def _add_constraints(self, con_seq): ptrb = (0,) + ptre[:-1] asubs = tuple(itertools.chain.from_iterable(l_ids)) avals = tuple(itertools.chain.from_iterable(l_coefs)) - qcsubi = tuple(itertools.chain.from_iterable(q_is)) - qcsubj = tuple(itertools.chain.from_iterable(q_js)) - qcval = tuple(itertools.chain.from_iterable(q_vals)) - qcsubk = tuple(i for i in sub for j in range(len(q_is[i - con_num]))) self._solver_model.appendcons(num_lq) self._solver_model.putarowlist(sub, ptrb, ptre, asubs, avals) - self._solver_model.putqcon(qcsubk, qcsubi, qcsubj, qcval) + for k, i, j, v in zip(sub, q_is, q_js, q_vals): + self._solver_model.putqconk(k, i, j, v) self._solver_model.putconboundlist(sub, bound_types, lbs, ubs) for i, s_n in enumerate(sub_names): self._solver_model.putconname(sub[i], s_n) @@ -558,7 +555,7 @@ def _add_block(self, block): Parameters ---------- - block: Block (scalar Block or single _BlockData) + block: Block (scalar Block or single BlockData) """ var_seq = tuple( block.component_data_objects( diff --git a/pyomo/solvers/plugins/solvers/mosek_persistent.py b/pyomo/solvers/plugins/solvers/mosek_persistent.py index 97f88e0cb9a..efcbb7dd9dd 100644 --- a/pyomo/solvers/plugins/solvers/mosek_persistent.py +++ b/pyomo/solvers/plugins/solvers/mosek_persistent.py @@ -85,7 +85,7 @@ def add_constraints(self, con_seq): Parameters ---------- - con_seq: tuple/list of Constraint (scalar Constraint or single _ConstraintData) + con_seq: tuple/list of Constraint (scalar Constraint or single ConstraintData) """ self._add_constraints(con_seq) @@ -95,7 +95,7 @@ def remove_var(self, solver_var): This will keep any other model components intact. Parameters ---------- - solver_var: Var (scalar Var or single _VarData) + solver_var: Var (scalar Var or single VarData) """ self.remove_vars(solver_var) @@ -106,7 +106,7 @@ def remove_vars(self, *solver_vars): This will keep any other model components intact. Parameters ---------- - *solver_var: Var (scalar Var or single _VarData) + *solver_var: Var (scalar Var or single VarData) """ try: var_ids = [] @@ -137,7 +137,7 @@ def remove_constraint(self, solver_con): To remove a conic-domain, you should use the remove_block method. Parameters ---------- - solver_con: Constraint (scalar Constraint or single _ConstraintData) + solver_con: Constraint (scalar Constraint or single ConstraintData) """ self.remove_constraints(solver_con) @@ -151,7 +151,7 @@ def remove_constraints(self, *solver_cons): Parameters ---------- - *solver_cons: Constraint (scalar Constraint or single _ConstraintData) + *solver_cons: Constraint (scalar Constraint or single ConstraintData) """ lq_cons = tuple( itertools.filterfalse(lambda x: isinstance(x, _ConicBase), solver_cons) @@ -205,7 +205,7 @@ def update_vars(self, *solver_vars): changing variable types and bounds. Parameters ---------- - *solver_var: Constraint (scalar Constraint or single _ConstraintData) + *solver_var: Constraint (scalar Constraint or single ConstraintData) """ try: var_ids = [] diff --git a/pyomo/solvers/plugins/solvers/persistent_solver.py b/pyomo/solvers/plugins/solvers/persistent_solver.py index 29aa3f2bbf5..3c2a9e52eab 100644 --- a/pyomo/solvers/plugins/solvers/persistent_solver.py +++ b/pyomo/solvers/plugins/solvers/persistent_solver.py @@ -12,7 +12,7 @@ from pyomo.solvers.plugins.solvers.direct_or_persistent_solver import ( DirectOrPersistentSolver, ) -from pyomo.core.base.block import _BlockData +from pyomo.core.base.block import BlockData from pyomo.core.kernel.block import IBlock from pyomo.core.base.suffix import active_import_suffix_generator from pyomo.core.kernel.suffix import import_suffix_generator @@ -96,7 +96,7 @@ def add_block(self, block): Parameters ---------- - block: Block (scalar Block or single _BlockData) + block: Block (scalar Block or single BlockData) """ if self._pyomo_model is None: @@ -132,7 +132,7 @@ def add_constraint(self, con): Parameters ---------- - con: Constraint (scalar Constraint or single _ConstraintData) + con: Constraint (scalar Constraint or single ConstraintData) """ if self._pyomo_model is None: @@ -206,9 +206,9 @@ def add_column(self, model, var, obj_coef, constraints, coefficients): Parameters ---------- model: pyomo ConcreteModel to which the column will be added - var: Var (scalar Var or single _VarData) + var: Var (scalar Var or single VarData) obj_coef: float, pyo.Param - constraints: list of scalar Constraints of single _ConstraintDatas + constraints: list of scalar Constraints of single ConstraintDatas coefficients: list of the coefficient to put on var in the associated constraint """ @@ -295,7 +295,7 @@ def remove_block(self, block): Parameters ---------- - block: Block (scalar Block or a single _BlockData) + block: Block (scalar Block or a single BlockData) """ # see PR #366 for discussion about handling indexed @@ -328,7 +328,7 @@ def remove_constraint(self, con): Parameters ---------- - con: Constraint (scalar Constraint or single _ConstraintData) + con: Constraint (scalar Constraint or single ConstraintData) """ # see PR #366 for discussion about handling indexed @@ -380,7 +380,7 @@ def remove_var(self, var): Parameters ---------- - var: Var (scalar Var or single _VarData) + var: Var (scalar Var or single VarData) """ # see PR #366 for discussion about handling indexed @@ -455,7 +455,7 @@ def solve(self, *args, **kwds): self.available(exception_flag=True) # Collect suffix names to try and import from solution. - if isinstance(self._pyomo_model, _BlockData): + if isinstance(self._pyomo_model, BlockData): model_suffixes = list( name for (name, comp) in active_import_suffix_generator(self._pyomo_model) diff --git a/pyomo/solvers/plugins/solvers/xpress_direct.py b/pyomo/solvers/plugins/solvers/xpress_direct.py index 75cf8f921df..c62f76d85ce 100644 --- a/pyomo/solvers/plugins/solvers/xpress_direct.py +++ b/pyomo/solvers/plugins/solvers/xpress_direct.py @@ -667,9 +667,8 @@ def _add_constraint(self, con): if not con.active: return None - if is_fixed(con.body): - if self._skip_trivial_constraints: - return None + if self._skip_trivial_constraints and is_fixed(con.body): + return None conname = self._symbol_map.getSymbol(con, self._labeler) diff --git a/pyomo/solvers/plugins/solvers/xpress_persistent.py b/pyomo/solvers/plugins/solvers/xpress_persistent.py index 513a7fbc257..fbdc2866dcf 100644 --- a/pyomo/solvers/plugins/solvers/xpress_persistent.py +++ b/pyomo/solvers/plugins/solvers/xpress_persistent.py @@ -90,7 +90,7 @@ def update_var(self, var): Parameters ---------- - var: Var (scalar Var or single _VarData) + var: Var (scalar Var or single VarData) """ # see PR #366 for discussion about handling indexed @@ -124,7 +124,7 @@ def _add_column(self, var, obj_coef, constraints, coefficients): Parameters ---------- - var: Var (scalar Var or single _VarData) + var: Var (scalar Var or single VarData) obj_coef: float constraints: list of solver constraints coefficients: list of coefficients to put on var in the associated constraint diff --git a/pyomo/solvers/tests/checks/test_CBCplugin.py b/pyomo/solvers/tests/checks/test_CBCplugin.py index 2ea0e55c5f4..ad8846509ea 100644 --- a/pyomo/solvers/tests/checks/test_CBCplugin.py +++ b/pyomo/solvers/tests/checks/test_CBCplugin.py @@ -29,7 +29,7 @@ maximize, minimize, ) -from pyomo.opt import SolverFactory, ProblemSense, TerminationCondition, SolverStatus +from pyomo.opt import SolverFactory, TerminationCondition, SolverStatus from pyomo.solvers.plugins.solvers.CBCplugin import CBCSHELL cbc_available = SolverFactory('cbc', solver_io='lp').available(exception_flag=False) @@ -62,7 +62,7 @@ def test_infeasible_lp(self): results = self.opt.solve(self.model) - self.assertEqual(ProblemSense.minimize, results.problem.sense) + self.assertEqual(minimize, results.problem.sense) self.assertEqual( TerminationCondition.infeasible, results.solver.termination_condition ) @@ -81,7 +81,7 @@ def test_unbounded_lp(self): results = self.opt.solve(self.model) - self.assertEqual(ProblemSense.maximize, results.problem.sense) + self.assertEqual(maximize, results.problem.sense) self.assertEqual( TerminationCondition.unbounded, results.solver.termination_condition ) @@ -99,7 +99,7 @@ def test_optimal_lp(self): self.assertEqual(0.0, results.problem.lower_bound) self.assertEqual(0.0, results.problem.upper_bound) - self.assertEqual(ProblemSense.minimize, results.problem.sense) + self.assertEqual(minimize, results.problem.sense) self.assertEqual( TerminationCondition.optimal, results.solver.termination_condition ) @@ -118,7 +118,7 @@ def test_infeasible_mip(self): results = self.opt.solve(self.model) - self.assertEqual(ProblemSense.minimize, results.problem.sense) + self.assertEqual(minimize, results.problem.sense) self.assertEqual( TerminationCondition.infeasible, results.solver.termination_condition ) @@ -134,7 +134,7 @@ def test_unbounded_mip(self): results = self.opt.solve(self.model) - self.assertEqual(ProblemSense.minimize, results.problem.sense) + self.assertEqual(minimize, results.problem.sense) self.assertEqual( TerminationCondition.unbounded, results.solver.termination_condition ) @@ -159,7 +159,7 @@ def test_optimal_mip(self): self.assertEqual(1.0, results.problem.upper_bound) self.assertEqual(results.problem.number_of_binary_variables, 2) self.assertEqual(results.problem.number_of_integer_variables, 4) - self.assertEqual(ProblemSense.maximize, results.problem.sense) + self.assertEqual(maximize, results.problem.sense) self.assertEqual( TerminationCondition.optimal, results.solver.termination_condition ) diff --git a/pyomo/solvers/tests/checks/test_CPLEXPersistent.py b/pyomo/solvers/tests/checks/test_CPLEXPersistent.py index 91a60eee9dd..442212d4fbb 100644 --- a/pyomo/solvers/tests/checks/test_CPLEXPersistent.py +++ b/pyomo/solvers/tests/checks/test_CPLEXPersistent.py @@ -101,7 +101,7 @@ def test_add_column_exceptions(self): # add indexed constraint self.assertRaises(AttributeError, opt.add_column, m, m.y, -2, [m.ci], [1]) - # add something not a _ConstraintData + # add something not a ConstraintData self.assertRaises(AttributeError, opt.add_column, m, m.y, -2, [m.x], [1]) # constraint not on solver model diff --git a/pyomo/solvers/tests/checks/test_amplfunc_merge.py b/pyomo/solvers/tests/checks/test_amplfunc_merge.py new file mode 100644 index 00000000000..2c819404d2f --- /dev/null +++ b/pyomo/solvers/tests/checks/test_amplfunc_merge.py @@ -0,0 +1,162 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +import pyomo.common.unittest as unittest +from pyomo.solvers.amplfunc_merge import amplfunc_string_merge, amplfunc_merge + + +class TestAMPLFUNCStringMerge(unittest.TestCase): + def test_merge_no_dup(self): + s1 = "my/place/l1.so\nanother/place/l1.so" + s2 = "my/place/l2.so" + sm = amplfunc_string_merge(s1, s2) + sm_list = sm.split("\n") + self.assertEqual(len(sm_list), 3) + # The order of lines should be maintained with the second string + # following the first + self.assertEqual(sm_list[0], "my/place/l1.so") + self.assertEqual(sm_list[1], "another/place/l1.so") + self.assertEqual(sm_list[2], "my/place/l2.so") + + def test_merge_empty1(self): + s1 = "" + s2 = "my/place/l2.so" + sm = amplfunc_string_merge(s1, s2) + sm_list = sm.split("\n") + self.assertEqual(len(sm_list), 1) + self.assertEqual(sm_list[0], "my/place/l2.so") + + def test_merge_empty2(self): + s1 = "my/place/l2.so" + s2 = "" + sm = amplfunc_string_merge(s1, s2) + sm_list = sm.split("\n") + self.assertEqual(len(sm_list), 1) + self.assertEqual(sm_list[0], "my/place/l2.so") + + def test_merge_empty_both(self): + s1 = "" + s2 = "" + sm = amplfunc_string_merge(s1, s2) + sm_list = sm.split("\n") + self.assertEqual(len(sm_list), 1) + self.assertEqual(sm_list[0], "") + + def test_merge_bad_type(self): + self.assertRaises(AttributeError, amplfunc_string_merge, "", 3) + self.assertRaises(AttributeError, amplfunc_string_merge, 3, "") + self.assertRaises(AttributeError, amplfunc_string_merge, 3, 3) + self.assertRaises(AttributeError, amplfunc_string_merge, None, "") + self.assertRaises(AttributeError, amplfunc_string_merge, "", None) + self.assertRaises(AttributeError, amplfunc_string_merge, 2.3, "") + self.assertRaises(AttributeError, amplfunc_string_merge, "", 2.3) + + def test_merge_duplicate1(self): + s1 = "my/place/l1.so\nanother/place/l1.so" + s2 = "my/place/l1.so\nanother/place/l1.so" + sm = amplfunc_string_merge(s1, s2) + sm_list = sm.split("\n") + self.assertEqual(len(sm_list), 2) + # The order of lines should be maintained with the second string + # following the first + self.assertEqual(sm_list[0], "my/place/l1.so") + self.assertEqual(sm_list[1], "another/place/l1.so") + + def test_merge_duplicate2(self): + s1 = "my/place/l1.so\nanother/place/l1.so" + s2 = "my/place/l1.so" + sm = amplfunc_string_merge(s1, s2) + sm_list = sm.split("\n") + self.assertEqual(len(sm_list), 2) + # The order of lines should be maintained with the second string + # following the first + self.assertEqual(sm_list[0], "my/place/l1.so") + self.assertEqual(sm_list[1], "another/place/l1.so") + + def test_merge_extra_linebreaks(self): + s1 = "\nmy/place/l1.so\nanother/place/l1.so\n" + s2 = "\nmy/place/l1.so\n\n" + sm = amplfunc_string_merge(s1, s2) + sm_list = sm.split("\n") + self.assertEqual(len(sm_list), 2) + # The order of lines should be maintained with the second string + # following the first + self.assertEqual(sm_list[0], "my/place/l1.so") + self.assertEqual(sm_list[1], "another/place/l1.so") + + +class TestAMPLFUNCMerge(unittest.TestCase): + def test_merge_no_dup(self): + env = { + "AMPLFUNC": "my/place/l1.so\nanother/place/l1.so", + "PYOMO_AMPLFUNC": "my/place/l2.so", + } + sm = amplfunc_merge(env) + sm_list = sm.split("\n") + self.assertEqual(len(sm_list), 3) + self.assertEqual(sm_list[0], "my/place/l1.so") + self.assertEqual(sm_list[1], "another/place/l1.so") + self.assertEqual(sm_list[2], "my/place/l2.so") + + def test_merge_empty1(self): + env = {"AMPLFUNC": "", "PYOMO_AMPLFUNC": "my/place/l2.so"} + sm = amplfunc_merge(env) + sm_list = sm.split("\n") + self.assertEqual(len(sm_list), 1) + self.assertEqual(sm_list[0], "my/place/l2.so") + + def test_merge_empty2(self): + env = {"AMPLFUNC": "my/place/l2.so", "PYOMO_AMPLFUNC": ""} + sm = amplfunc_merge(env) + sm_list = sm.split("\n") + self.assertEqual(len(sm_list), 1) + self.assertEqual(sm_list[0], "my/place/l2.so") + + def test_merge_empty_both(self): + env = {"AMPLFUNC": "", "PYOMO_AMPLFUNC": ""} + sm = amplfunc_merge(env) + sm_list = sm.split("\n") + self.assertEqual(len(sm_list), 1) + self.assertEqual(sm_list[0], "") + + def test_merge_duplicate1(self): + env = { + "AMPLFUNC": "my/place/l1.so\nanother/place/l1.so", + "PYOMO_AMPLFUNC": "my/place/l1.so\nanother/place/l1.so", + } + sm = amplfunc_merge(env) + sm_list = sm.split("\n") + self.assertEqual(len(sm_list), 2) + self.assertEqual(sm_list[0], "my/place/l1.so") + self.assertEqual(sm_list[1], "another/place/l1.so") + + def test_merge_no_pyomo(self): + env = {"AMPLFUNC": "my/place/l1.so\nanother/place/l1.so"} + sm = amplfunc_merge(env) + sm_list = sm.split("\n") + self.assertEqual(len(sm_list), 2) + self.assertEqual(sm_list[0], "my/place/l1.so") + self.assertEqual(sm_list[1], "another/place/l1.so") + + def test_merge_no_user(self): + env = {"PYOMO_AMPLFUNC": "my/place/l1.so\nanother/place/l1.so"} + sm = amplfunc_merge(env) + sm_list = sm.split("\n") + self.assertEqual(len(sm_list), 2) + self.assertEqual(sm_list[0], "my/place/l1.so") + self.assertEqual(sm_list[1], "another/place/l1.so") + + def test_merge_nothing(self): + env = {} + sm = amplfunc_merge(env) + sm_list = sm.split("\n") + self.assertEqual(len(sm_list), 1) + self.assertEqual(sm_list[0], "") diff --git a/pyomo/solvers/tests/checks/test_gurobi_persistent.py b/pyomo/solvers/tests/checks/test_gurobi_persistent.py index a2c089207e5..812390c23a4 100644 --- a/pyomo/solvers/tests/checks/test_gurobi_persistent.py +++ b/pyomo/solvers/tests/checks/test_gurobi_persistent.py @@ -382,7 +382,7 @@ def test_add_column_exceptions(self): # add indexed constraint self.assertRaises(AttributeError, opt.add_column, m, m.y, -2, [m.ci], [1]) - # add something not a _ConstraintData + # add something not a ConstraintData self.assertRaises(AttributeError, opt.add_column, m, m.y, -2, [m.x], [1]) # constraint not on solver model diff --git a/pyomo/solvers/tests/checks/test_xpress_persistent.py b/pyomo/solvers/tests/checks/test_xpress_persistent.py index ddae860cd92..dcd36780f62 100644 --- a/pyomo/solvers/tests/checks/test_xpress_persistent.py +++ b/pyomo/solvers/tests/checks/test_xpress_persistent.py @@ -262,7 +262,7 @@ def test_add_column_exceptions(self): # add indexed constraint self.assertRaises(AttributeError, opt.add_column, m, m.y, -2, [m.ci], [1]) - # add something not a _ConstraintData + # add something not a ConstraintData self.assertRaises(AttributeError, opt.add_column, m, m.y, -2, [m.x], [1]) # constraint not on solver model diff --git a/pyomo/solvers/tests/mip/test_scip.py b/pyomo/solvers/tests/mip/test_scip.py index 01de0d16826..ad54daeddc0 100644 --- a/pyomo/solvers/tests/mip/test_scip.py +++ b/pyomo/solvers/tests/mip/test_scip.py @@ -106,6 +106,12 @@ def test_scip_solve_from_instance_options(self): results.write(filename=_out, times=False, format='json') self.compare_json(_out, join(currdir, "test_scip_solve_from_instance.baseline")) + def test_scip_solve_from_instance_with_reoptimization(self): + # Test scip with re-optimization option enabled + # This case changes the Scip output results which may break the results parser + self.scip.options['reoptimization/enable'] = True + self.test_scip_solve_from_instance() + if __name__ == "__main__": deleteFiles = False diff --git a/pyomo/solvers/tests/mip/test_scip_solve_from_instance.baseline b/pyomo/solvers/tests/mip/test_scip_solve_from_instance.baseline index a3eb9ffacec..976e4a1b82e 100644 --- a/pyomo/solvers/tests/mip/test_scip_solve_from_instance.baseline +++ b/pyomo/solvers/tests/mip/test_scip_solve_from_instance.baseline @@ -1,7 +1,7 @@ { "Problem": [ { - "Lower bound": -Infinity, + "Lower bound": 1.0, "Number of constraints": 0, "Number of objectives": 1, "Number of variables": 1, diff --git a/pyomo/util/calc_var_value.py b/pyomo/util/calc_var_value.py index b5e620fea07..42ee3119361 100644 --- a/pyomo/util/calc_var_value.py +++ b/pyomo/util/calc_var_value.py @@ -12,7 +12,7 @@ from pyomo.common.errors import IterationLimitError from pyomo.common.numeric_types import native_numeric_types, native_complex_types, value from pyomo.core.expr.calculus.derivatives import differentiate -from pyomo.core.base.constraint import Constraint, _ConstraintData +from pyomo.core.base.constraint import Constraint import logging @@ -53,9 +53,9 @@ def calculate_variable_from_constraint( Parameters: ----------- - variable: :py:class:`_VarData` + variable: :py:class:`VarData` The variable to solve for - constraint: :py:class:`_ConstraintData` or relational expression or `tuple` + constraint: :py:class:`ConstraintData` or relational expression or `tuple` The equality constraint to use to solve for the variable value. May be a `ConstraintData` object or any valid argument for ``Constraint(expr=<>)`` (i.e., a relational expression or 2- or @@ -81,10 +81,17 @@ def calculate_variable_from_constraint( """ # Leverage all the Constraint logic to process the incoming tuple/expression - if not isinstance(constraint, _ConstraintData): + if not getattr(constraint, 'ctype', None) is Constraint: constraint = Constraint(expr=constraint, name=type(constraint).__name__) constraint.construct() + if constraint.is_indexed(): + raise ValueError( + 'calculate_variable_from_constraint(): constraint must be a ' + 'scalar constraint or a single ConstraintData. Received ' + f'{constraint.__class__.__name__} ("{constraint.name}")' + ) + body = constraint.body lower = constraint.lb upper = constraint.ub diff --git a/pyomo/util/report_scaling.py b/pyomo/util/report_scaling.py index 201319ea92a..02b3710c334 100644 --- a/pyomo/util/report_scaling.py +++ b/pyomo/util/report_scaling.py @@ -11,9 +11,9 @@ import pyomo.environ as pyo import math -from pyomo.core.base.block import _BlockData +from pyomo.core.base.block import BlockData from pyomo.common.collections import ComponentSet -from pyomo.core.base.var import _GeneralVarData +from pyomo.core.base.var import Var from pyomo.contrib.fbbt.fbbt import compute_bounds_on_expr from pyomo.core.expr.calculus.diff_with_pyomo import reverse_sd import logging @@ -42,7 +42,7 @@ def _print_var_set(var_set): return s -def _check_var_bounds(m: _BlockData, too_large: float): +def _check_var_bounds(m: BlockData, too_large: float): vars_without_bounds = ComponentSet() vars_with_large_bounds = ComponentSet() for v in m.component_data_objects(pyo.Var, descend_into=True): @@ -73,7 +73,7 @@ def _check_coefficients( ): ders = reverse_sd(expr) for _v, _der in ders.items(): - if isinstance(_v, _GeneralVarData): + if getattr(_v, 'ctype', None) is Var: if _v.is_fixed(): continue der_lb, der_ub = compute_bounds_on_expr(_der) @@ -90,7 +90,7 @@ def _check_coefficients( def report_scaling( - m: _BlockData, too_large: float = 5e4, too_small: float = 1e-6 + m: BlockData, too_large: float = 5e4, too_small: float = 1e-6 ) -> bool: """ This function logs potentially poorly scaled parts of the model. @@ -107,7 +107,7 @@ def report_scaling( Parameters ---------- - m: _BlockData + m: BlockData The pyomo model or block too_large: float Values above too_large will generate a log entry diff --git a/pyomo/util/slices.py b/pyomo/util/slices.py index 53f6d364219..d85aa3fa926 100644 --- a/pyomo/util/slices.py +++ b/pyomo/util/slices.py @@ -98,7 +98,7 @@ def slice_component_along_sets(comp, sets, context=None): sets: `pyomo.common.collections.ComponentSet` Contains the sets to replace with slices context: `pyomo.core.base.block.Block` or - `pyomo.core.base.block._BlockData` + `pyomo.core.base.block.BlockData` Block below which to search for sets Returns: diff --git a/pyomo/util/tests/test_calc_var_value.py b/pyomo/util/tests/test_calc_var_value.py index a02d7a7d838..4bed4d5c843 100644 --- a/pyomo/util/tests/test_calc_var_value.py +++ b/pyomo/util/tests/test_calc_var_value.py @@ -101,6 +101,15 @@ def test_initialize_value(self): ): calculate_variable_from_constraint(m.x, m.lt) + m.indexed = Constraint([1, 2], rule=lambda m, i: m.x <= i) + with self.assertRaisesRegex( + ValueError, + r"calculate_variable_from_constraint\(\): constraint must be a scalar " + r"constraint or a single ConstraintData. Received IndexedConstraint " + r'\("indexed"\)', + ): + calculate_variable_from_constraint(m.x, m.indexed) + def test_linear(self): m = ConcreteModel() m.x = Var()