Coverage for backend/ahuora-builder/src/ahuora_builder/flowsheet_manager.py: 81%

315 statements  

« prev     ^ index     » next       coverage.py v7.10.7, created at 2026-05-13 02:47 +0000

1from collections import defaultdict, deque 

2import re 

3from pyomo.network import SequentialDecomposition, Port 

4from pyomo.environ import ( 

5 ConcreteModel, 

6 TransformationFactory, 

7 SolverFactory, 

8 Block, 

9 Expression, 

10 Constraint, 

11 Objective, 

12 minimize, 

13 assert_optimal_termination, 

14 ScalarVar, 

15 units as pyunits, 

16 TerminationCondition 

17) 

18from pyomo.core.base.constraint import ScalarConstraint, IndexedConstraint 

19from idaes.core import FlowsheetBlock 

20from .model_statistics_optimizations import ( 

21 degrees_of_freedom, 

22 install_model_statistics_optimizations, 

23) 

24 

25install_model_statistics_optimizations() 

26 

27from idaes.core.util.model_statistics import report_statistics 

28import idaes.logger as idaeslog 

29from ahuora_builder_types import FlowsheetSchema 

30from ahuora_builder_types.flowsheet_schema import SolvedFlowsheetSchema 

31from .property_package_manager import PropertyPackageManager 

32from .port_manager import PortManager 

33from .arc_manager import ArcManager 

34from .tear_manager import TearManager 

35from .unit_model_manager import UnitModelManager 

36from .methods.adapter_library import AdapterLibrary 

37from .methods.adapter import ( 

38 serialize_properties_map, 

39 deactivate_fixed_guesses, 

40 deactivate_component, 

41 deactivate_components, 

42 add_corresponding_constraint, 

43) 

44from .methods.expression_parsing import parse_expression, ExpressionParsingError 

45from .timing import start_timing 

46from .methods.units_handler import get_value, get_attached_unit, check_units_equivalent 

47from .methods.scaling_suffix import sanitize_scaling_suffix 

48from .properties_manager import PropertiesManager 

49from .custom.energy.power_property_package import PowerParameterBlock 

50from .custom.energy.ac_property_package import acParameterBlock 

51from .custom.energy.transformer_property_package import transformerParameterBlock 

52from pyomo.core.base.units_container import units, _PyomoUnit 

53from idaes.core.util.model_serializer import StoreSpec, from_json, to_json 

54from .diagnostics.infeasibilities import print_infeasibilities 

55 

56# CSTR Imports from example flowsheet.  

57# To be used as placeholders until bespoke functions can be developed. 

58from .custom import hda_reaction as reaction_props 

59from .custom.hda_ideal_VLE import HDAParameterBlock 

60from ahuora_builder.properties_manager import PropertyComponent 

61from ahuora_property_packages.build_package import build_package 

62 

63_log = idaeslog.getLogger(__name__) 

64 

65# from amplpy import modules 

66# Import required to allow the library to set the PATH and allow conopt to be found. 

67 

68def build_flowsheet(dynamic=False,time_set=[0]): 

69 """ 

70 Builds a flowsheet block 

71 """ 

72 # create the model and the flowsheet block 

73 model = ConcreteModel() 

74 model.fs = FlowsheetBlock(dynamic=dynamic, time_set=time_set, time_units=units.s) 

75 model.fs.guess_vars = [] 

76 model.fs.controlled_vars = Block() 

77 # Always add the power property package (as there's only one and there's no different property package types) 

78 # TODO: should this be on-demand? 

79 model.fs.power_pp = PowerParameterBlock() 

80 model.fs.ac_pp = acParameterBlock() 

81 model.fs.tr_pp = transformerParameterBlock() 

82 # properties map: { id: pyomo variable or expression } 

83 # used to map symbols for a sympy expression 

84 model.fs.properties_map = PropertiesManager() 

85 # list of component constraints to add later 

86 model.fs.constraint_exprs = [] 

87 

88 # Placholder property packages for CSTR reactor. 

89 model.fs.BTHM_params = HDAParameterBlock() 

90 #Hard-coded peng-robinson package for reactor: 

91 model.fs.peng_robinson = build_package("peng-robinson",["benzene", "toluene", "hydrogen", "methane"], ["Liq","Vap"]) 

92 

93 # Reaction package for the HDA reaction 

94 model.fs.reaction_params = reaction_props.HDAReactionParameterBlock( 

95 property_package=model.fs.peng_robinson 

96 ) 

97 

98 return model 

99 

100 

101class FlowsheetManager: 

102 """ 

103 Manages the flowsheet, including the property packages, unit models, ports, arcs, and tears 

104 Includes methods to load, initialise, and solve the flowsheet 

105 """ 

106 

107 def __init__(self, schema: FlowsheetSchema) -> None: 

108 """ 

109 Stores all relevant information about the flowsheet, without actually loading it 

110 """ 

111 self.timing = start_timing() 

112 self.timing.add_timing("initialise_flowsheet_manager") 

113 

114 self.model = build_flowsheet(dynamic=schema.dynamic, time_set=schema.time_set) 

115 

116 self.schema = schema 

117 # Add property packages first, so that unit models can use them 

118 self.property_packages = PropertyPackageManager(self) 

119 # Add the port manager, so the unit models can register their ports 

120 self.ports = PortManager() 

121 # Add unit models 

122 self.unit_models = UnitModelManager(self) 

123 # Add arcs to connect the unit models together 

124 self.arcs = ArcManager(self) 

125 # set certain arcs as tears 

126 self.tears = TearManager(self) 

127 self.properties_map: PropertiesManager 

128 

129 def load(self) -> None: 

130 """ 

131 Parses the schema and loads the model 

132 """ 

133 self.timing.step_into("load_flowsheet") 

134 # Load property packages first, so that unit models can use them 

135 self.timing.add_timing("load_property_packages") 

136 self.property_packages.load() 

137 self.timing.step_into("load_unit_models") 

138 self.unit_models.load() 

139 self.timing.step_out() 

140 # no need to load ports seperately, they are loaded by the unit models 

141 # Load arcs to connect the unit models together 

142 self.arcs.load() 

143 # load any expressions 

144 self.load_specs() 

145 # if dynamics, apply finite difference transformation. 

146 if self.schema.dynamic: 

147 print("performing finite difference with", len(self.model.fs.time), "time steps") 

148 TransformationFactory("dae.finite_difference").apply_to( 

149 self.model.fs, 

150 nfe=len(self.model.fs.time)-1, # Number of finite elements to use for discretization. We aren't adding any extra steps as our constraints dont work for that. 

151 wrt=self.model.fs.time, 

152 scheme="BACKWARD" 

153 ) 

154 

155 self.properties_map = self.model.fs.properties_map 

156 self.timing.step_out() 

157 

158 def load_specs(self) -> None: 

159 """ 

160 Loads expressions from the schema 

161 """ 

162 fs = self.model.fs 

163 specs = self.schema.expressions or [] 

164 

165 ## Build dependency tree and sort specs before loading. 

166 dependencies, result_ids = self.build_spec_dependency_tree(specs) 

167 sorted_result_ids = self.topological_sort(dependencies, result_ids) 

168 

169 # load the specs (expressions within specifications tab) 

170 for result_id in sorted_result_ids: 

171 spec_config = next(spec for spec in specs if spec["id"] == result_id) 

172 expression_str = spec_config["expression"] 

173 try: 

174 component_name = f"{spec_config['name']}_{spec_config['id']}" 

175 def expression_rule(blk, time_index): 

176 return parse_expression(expression_str, fs,time_index) 

177 

178 component = Expression(fs.time, rule=expression_rule) 

179 fs.add_component(component_name, component) 

180 fs.properties_map.add( 

181 spec_config["id"], component, component.name, unknown_units=True 

182 ) 

183 except ExpressionParsingError as e: 

184 raise ExpressionParsingError(f"{e} when parsing expression '{expression_str}' for {component_name}: ") 

185 

186 # load constraints (expressions for specific property infos) 

187 # can only handle equality constraints for now  

188 for component, expr_str, id in fs.constraint_exprs: 

189 # get the time index 

190 #for time_index in fs.time: 

191 if component.index_set().dimen != 0 and component.index_set() != fs.time: 191 ↛ 192line 191 didn't jump to line 192 because the condition on line 191 was never true

192 raise ExpressionParsingError(f"Cannot add constraint for {component}: only time-indexed components are supported.") 

193 try: 

194 def constraint_rule(blk, time_index): 

195 expression = parse_expression(expr_str, fs,time_index) 

196 # make sure the units of the expression are the same as the component 

197 u1, u2 = get_attached_unit(component), get_attached_unit(expression) 

198 if not check_units_equivalent(u1, u2): 198 ↛ 199line 198 didn't jump to line 199 because the condition on line 198 was never true

199 raise ValueError( 

200 f"Failed to add constraint for {component}: units do not match (expected {u1}, got {u2})" 

201 ) 

202 return pyunits.convert(component[time_index], to_units=u2) == expression 

203 c = Constraint(component.index_set(), rule= constraint_rule) 

204 name = f"equality_constraint_{id}" 

205 fs.add_component(name, c) 

206 add_corresponding_constraint(fs, c, id) 

207 except ExpressionParsingError as e: 

208 raise ExpressionParsingError(f"Failed to parse constraint expression '{expr_str}' for {component}: {expr_str}, error: {e}") 

209 

210 

211 def build_spec_dependency_tree(self, specs) -> tuple: 

212 """ 

213 Builds dependency tree for expressions based on the references in their respective expressions. 

214 """ 

215 dependencies = defaultdict( 

216 set 

217 ) # Maps an expression's result_id to a set of result_ids it depends on 

218 result_ids = set() # A set to track all result_ids 

219 

220 # Get a list of all result_id's. 

221 for spec in specs: 

222 result_id = spec["id"] 

223 result_ids.add(result_id) 

224 

225 for spec in specs: 

226 result_id = spec["id"] 

227 expression = spec["expression"] 

228 

229 # Find the result_ids that this expression depends on 

230 dependent_expressions = self.get_dependent_expressions( 

231 expression, result_ids 

232 ) 

233 

234 if dependent_expressions: 

235 # If the expression depends on another result_id, add dependency 

236 for id in dependent_expressions: 

237 dependencies[id].add(result_id) 

238 

239 return dependencies, result_ids 

240 

241 def get_dependent_expressions(self, expression: str, all_result_ids: set) -> list: 

242 """ 

243 Gets all result_ids referenced in the expression. 

244 """ 

245 # match result_ids starting with 'id_' followed by numbers 

246 ids = re.findall(r"\b(id_\d+)\b", expression) 

247 

248 # Filter the referenced_ids to only include those that are in all_result_ids 

249 valid_referenced_ids = [] 

250 for id in ids: 

251 # get the numeric part after "id_" and check if it's in the all_result_ids 

252 numeric_id = int(id[3:]) 

253 if numeric_id in all_result_ids: 

254 valid_referenced_ids.append(numeric_id) 

255 

256 return valid_referenced_ids 

257 

258 def topological_sort(self, dependencies: dict, result_ids: set) -> list: 

259 """ 

260 Performs topological sorting on the specification dependency tree. 

261 """ 

262 # Track teh in-degree count for all expressions (edges coming into it) 

263 in_degree = defaultdict(int) 

264 

265 # Count dependencies for each expression 

266 for result_id in result_ids: 

267 for dep in dependencies[result_id]: 

268 in_degree[dep] += 1 

269 

270 # Initialise the queue with result_ids that have no dependencies (in-degree 0) 

271 dequeue = deque( 

272 [result_id for result_id in result_ids if in_degree[result_id] == 0] 

273 ) 

274 

275 sorted_result_ids = [] 

276 

277 while dequeue: 

278 result_id = dequeue.popleft() 

279 sorted_result_ids.append(result_id) 

280 

281 # loop thtough and decrement each dependent expression's in_degree, and append it to the deque if it has an in-degree of 0. 

282 for dependent_result_id in dependencies[result_id]: 

283 in_degree[dependent_result_id] -= 1 

284 if in_degree[dependent_result_id] == 0: 284 ↛ 282line 284 didn't jump to line 282 because the condition on line 284 was always true

285 dequeue.append(dependent_result_id) 

286 

287 # If there are any result_ids left with non-zero in-degree, a cycle exists so error. 

288 if len(sorted_result_ids) != len(result_ids): 288 ↛ 289line 288 didn't jump to line 289 because the condition on line 288 was never true

289 raise ValueError( 

290 "Cycle detected in the dependency graph. Check an expression does not reference itself!" 

291 ) 

292 

293 return sorted_result_ids 

294 

295 def initialise(self) -> None: 

296 """ 

297 Expands the arcs and initialises the model 

298 """ 

299 # check if initialisation is disabled for this scenario 

300 if getattr(self.schema, "disable_initialization", False): 

301 # if disable initialisation is set to True, then we don't need to initialise the model 

302 _log.info("Initialisation is disabled for this scenario.") 

303 

304 # We need to "expand_arcs" to make them a bidirection link that actually imposes constraints on the model. 

305 TransformationFactory("network.expand_arcs").apply_to(self.model) 

306 

307 self.timing.step_into("initialise_model") 

308 

309 # load tear guesses (including var/constraint unfixing & deactivation and/or equality constraint deactivation) 

310 self.tears.load() 

311 

312 tears = self.tears._tears 

313 

314 def init_unit(unit): 

315 if getattr(self.schema, "disable_initialization", False): 315 ↛ 316line 315 didn't jump to line 316 because the condition on line 315 was never true

316 return 

317 if ( 

318 getattr( 

319 self.schema, 

320 "skip_initialization_for_units_with_initial_values", 

321 False, 

322 ) 

323 and self.unit_models.has_initial_values(unit) 

324 ): 

325 _log.info( 

326 f"Skipping initialization for unit {unit} because initial values exist." 

327 ) 

328 return 

329 

330 _log.info("Initializing unit %s", unit) 

331 self.timing.add_timing(f"init_{unit.name}") 

332 try: 

333 #unit.display() 

334 unit.initialize(outlvl=idaeslog.INFO) 

335 #unit.report() 

336 except Exception as e: 

337 _log.warning( 

338 "Error occurred while initializing unit %s: %s", 

339 unit, 

340 e, 

341 ) 

342 

343 self.timing.add_timing("setup_sequential_decomposition") 

344 # Use SequentialDecomposition to initialise the model 

345 seq = SequentialDecomposition( 

346 run_first_pass=True, 

347 ) 

348 seq.set_tear_set(tears) 

349 # use create_graph to get the order of sequential decomposition, and also to 

350 # find any units that are not connected to the sequential decomposition 

351 G = seq.create_graph(self.model) 

352 order = seq.calculation_order(G) 

353 seq_blocks = [] 

354 for o in order: 

355 # o is a list of "levels" of the graph, 

356 # all items in the first level have to no dependencies, the next level only depends on the previous levels. 

357 for b in o: 

358 seq_blocks.append(b) 

359 _log.info("Order of initialisation: %s", [blk.name for blk in seq_blocks]) 

360 # set all the tear guesses before running the decomposition 

361 for arc in tears: 

362 port = arc.destination 

363 # guesses used are initial values for each var 

364 guesses = {} 

365 guesses = {key: get_value(var) for key, var in port.vars.items()} 

366 _log.info("Guess for %s: %s", port, guesses) 

367 

368 self.timing.step_into("run_sequential_decomposition") 

369 

370 # sequential decomposition completes when all vars across port 

371 # equalities are within tol of each other 

372 seq.options["tol"] = 1e-2 

373 seq.options["solve_tears"] = False 

374 seq.run(self.model, init_unit) 

375 

376 self.timing.step_out() 

377 self.timing.add_timing("initialise_disconnected_units") 

378 # Initialise any unit model that is not connected to the sequential decomposition 

379 for blk in self.model.fs.component_data_objects( 

380 Block, descend_into=False, active=True 

381 ): 

382 ports = list(blk.component_objects(Port, descend_into=False)) 

383 if len(ports) == 0: 

384 continue # if the block has no ports, then it is not a unit model 

385 if blk in seq_blocks: 

386 continue # already initialised by sequential decomposition 

387 init_unit(blk) 

388 

389 # unfix guess vars 

390 deactivate_fixed_guesses(self.model.fs.guess_vars) 

391 

392 self.timing.step_out() 

393 

394 def serialise(self) -> SolvedFlowsheetSchema: 

395 self.timing.add_timing("serialise_model") 

396 

397 initial_values = {} 

398 for unit_model_id, unit_model in self.unit_models._unit_models.items(): 

399 initial_values[str(unit_model_id)] = to_json(unit_model, return_dict=True, wts=StoreSpec.value()) 

400 

401 solved_flowsheet = SolvedFlowsheetSchema( 

402 group_id=self.schema.group_id, 

403 properties=serialize_properties_map(self.model.fs), 

404 initial_values=initial_values 

405 ) 

406 

407 return solved_flowsheet 

408 

409 def report_statistics(self) -> None: 

410 """ 

411 Reports statistics about the model 

412 """ 

413 report_statistics(self.model) 

414 # I think the diagnostics toolbox is taking too long to run, so commenting it out for now. 

415 # dt = DiagnosticsToolbox(self.model) 

416 # dt.report_structural_issues() 

417 # dt.display_overconstrained_set() 

418 # dt.display_underconstrained_set() 

419 

420 def diagnose_problems(self) -> None: 

421 from idaes.core.util import DiagnosticsToolbox 

422 

423 _log.info("=== DIAGNOSTICS ===") 

424 

425 report_statistics(self.model) 

426 dt = DiagnosticsToolbox(self.model) 

427 dt.report_structural_issues() 

428 dt.display_overconstrained_set() 

429 dt.display_underconstrained_set() 

430 #dt.display_components_with_inconsistent_units() 

431 try: 

432 dt.compute_infeasibility_explanation() 

433 except Exception as e: 

434 _log.warning( 

435 "Error computing infeasibility explanation: %s", 

436 e, 

437 ) 

438 try: 

439 dt.report_numerical_issues() 

440 except Exception as e: 

441 _log.warning("Error reporting numerical issues: %s", e) 

442 try: 

443 dt.display_near_parallel_constraints() 

444 except Exception as e: 

445 _log.warning("Error displaying near parallel constraints: %s", e) 

446 

447 dt.display_variables_at_or_outside_bounds() 

448 dt.display_variables_near_bounds() 

449 dt.display_constraints_with_extreme_jacobians() 

450 dt.display_constraints_with_large_residuals() 

451 _log.info("=== END DIAGNOSTICS ===") 

452 

453 def degrees_of_freedom(self) -> int: 

454 """ 

455 Returns the degrees of freedom of the model 

456 """ 

457 return int(degrees_of_freedom(self.model)) 

458 

459 def check_model_valid(self) -> None: 

460 """ 

461 Checks if the model is valid by checking the 

462 degrees of freedom. Will raise an exception if 

463 the model is not valid. 

464 """ 

465 self.timing.add_timing("check_model_valid") 

466 

467 degrees_of_freedom = self.degrees_of_freedom() 

468 if degrees_of_freedom != 0: 468 ↛ 470line 468 didn't jump to line 470 because the condition on line 468 was never true

469 #self.model.display() # prints the vars/constraints for debugging 

470 raise Exception( 

471 f"Degrees of freedom is not 0. Degrees of freedom: {degrees_of_freedom}" 

472 ) 

473 

474 def solve(self) -> None: 

475 """ 

476 Solves the model. Uses petsc_dae_by_time_element for the petsc solver 

477 (dynamic only), or a standard SolverFactory solver otherwise. 

478 """ 

479 self.timing.add_timing("solve_model") 

480 _log.info("=== Starting Solve ===") 

481 

482 if self.schema.solver_option == "petsc": 482 ↛ 483line 482 didn't jump to line 483 because the condition on line 482 was never true

483 from idaes.core.solvers import petsc as idaes_petsc 

484 

485 if not self.schema.dynamic: 

486 raise ValueError("The PETSc solver is only supported for dynamic flowsheets.") 

487 result = idaes_petsc.petsc_dae_by_time_element( 

488 self.model, 

489 time=self.model.fs.time, 

490 ts_options={ 

491 "--ts_type": "beuler", 

492 "--ts_dt": 1, 

493 "--ts_monitor": "", 

494 "--ts_save_trajectory": 1, 

495 }, 

496 ) 

497 # Check that all time-element solves terminated successfully. 

498 # assert_optimal_termination raises an exception if any step failed. 

499 for res in result.results: 

500 assert_optimal_termination(res) 

501 return 

502 

503 opt = SolverFactory(self.schema.solver_option) 

504 # opt.options["max_iter"] = 5000 

505 

506 opt.options["max_iter"] = 1000 

507 try: 

508 res = opt.solve(self.model, tee=True) 

509 if res.solver.termination_condition != TerminationCondition.optimal: 509 ↛ 510line 509 didn't jump to line 510 because the condition on line 509 was never true

510 print_infeasibilities(self.model.fs.properties_map) 

511 

512 for unit_model in self.unit_models._unit_models.values(): 

513 if hasattr(unit_model, "diagnose"): 

514 try: 

515 results = unit_model.diagnose() 

516 for prop,logs in results: 

517 _log.info("%s %s", prop, logs) 

518 # The unit diagnostics methods may be unreliable, as each unit model has different methods. 

519 # So we wrap in a try-except to avoid the whole diagnostics process failing if one unit model's diagnostics fails. 

520 except Exception as e: 

521 _log.warning("Error Diagnosing %s: %s", unit_model, e) 

522 

523 assert_optimal_termination(res) 

524 except ValueError as e: 

525 if str(e).startswith("No variables appear"): 

526 # https://github.com/Pyomo/pyomo/pull/3445 

527 pass 

528 else: 

529 raise e 

530 

531 def optimize(self) -> None: 

532 if self.schema.optimizations is None or self.schema.optimizations == []: 

533 return 

534 

535 self.timing.add_timing("optimize_model") 

536 _log.info("=== Starting Optimization ===") 

537 

538 # ipopt doesn't support multiple objectives, so we need to create 

539 # a single objective expression. 

540 # this is done by summing all objectives in the model, adding 

541 # or subtracting based on the sense (minimize or maximize) 

542 objective_expr = 0 

543 for schema in self.schema.optimizations: 

544 # get the expression component to optimize 

545 # TODO: This is assuming optimisation is run on a steady-state simulation. We need to change how this works to handle dynamics. 

546 # For now, just hardcoding time_index=0 

547 objective_component = self.model.fs.properties_map.get_component(schema.objective) 

548 # the objective component should be a scalar in non-dynamic models 

549 # in a dynamic model it'll be indexed across all time steps 

550 # We sum up all time steps so each time step is weighted equally. 

551 objective = sum(objective_component.values()) 

552 # add or subtract the objective based on the sense 

553 sense = schema.sense 

554 if sense == "minimize": 

555 objective_expr += objective 

556 else: 

557 objective_expr -= objective 

558 

559 

560 

561 # unfix relevant vars (add to degrees of freedom) 

562 for dof_info in schema.unfixed_variables: 

563 id = dof_info.id # Id of the propertyValue for this degree of freedom 

564 var: PropertyComponent = self.model.fs.properties_map.get(id) 

565 

566 

567 # TODO: may need to handle deactivating constraints, 

568 # for expressions that are constrained (instead of state vars) 

569 c = var.corresponding_constraint 

570 if c is not None: 

571 # TODO: better typing for constraints 

572 if isinstance(c, ScalarConstraint) or isinstance(c, IndexedConstraint): 572 ↛ 573line 572 didn't jump to line 573 because the condition on line 572 was never true

573 c.deactivate() 

574 else: 

575 deactivate_components(c) 

576 else: 

577 for i in var.component.values(): 

578 if isinstance(i, ScalarVar): 578 ↛ 579line 578 didn't jump to line 579 because the condition on line 578 was never true

579 i.unfix() 

580 # Because if not, it is ExpressionData, meaning it is already an expression and doesn't need to be unfixed. (we've already checked if there is a constraint for it above too.) 

581 

582 

583 # TODO: set attributes for upper and lower bounds of property infos. i.e. use propertyinfo id. 

584 # Var is either a Variable or Expression 

585 # set the minimum or maximum bounds for this variable if they are enabled 

586 #self.model.upper_bound_12 = Constraint(expr= var <= upper_bound_value ) 

587 

588 upper_bound = dof_info.upper_bound 

589 lower_bound = dof_info.lower_bound 

590 

591 c = var.component 

592 

593 if upper_bound is not None: 593 ↛ 599line 593 didn't jump to line 599 because the condition on line 593 was always true

594 def upper_bound_rule(model,index): 

595 return c[index] <= upper_bound 

596 upper_bound_constraint = Constraint(c.index_set(),rule=upper_bound_rule) 

597 setattr(self.model,"upper_bound_" + str(id), upper_bound_constraint) 

598 

599 if lower_bound is not None: 599 ↛ 562line 599 didn't jump to line 562 because the condition on line 599 was always true

600 def lower_bound_rule(model,index): 

601 return c[index] >= lower_bound 

602 lower_bound_constraint = Constraint(c.index_set(),rule=lower_bound_rule) 

603 setattr(self.model,"lower_bound_" + str(id), lower_bound_constraint) 

604 

605 

606 # add the objective to the model 

607 self.model.objective = Objective(expr=objective_expr, sense=minimize) 

608 

609 # solve the model with the objective 

610 opt = SolverFactory(self.schema.solver_option) 

611 

612 if self.schema.solver_option != "conopt": 612 ↛ 615line 612 didn't jump to line 615 because the condition on line 612 was always true

613 opt.options["max_iter"] = 1000 

614 

615 try: 

616 res = opt.solve(self.model, tee=True) 

617 assert_optimal_termination(res) 

618 except ValueError as e: 

619 if str(e).startswith("No variables appear"): 

620 # https://github.com/Pyomo/pyomo/pull/3445 

621 pass 

622 else: 

623 raise e