Coverage for backend/idaes_service/solver/flowsheet_manager.py: 87%

277 statements  

« prev     ^ index     » next       coverage.py v7.10.7, created at 2026-02-12 01:47 +0000

1from collections import defaultdict, deque 

2import re 

3from pyomo.network import SequentialDecomposition, Port 

4from pyomo.environ import ( 

5 ConcreteModel, 

6 TransformationFactory, 

7 SolverFactory, 

8 Block, 

9 Expression, 

10 Constraint, 

11 Objective, 

12 minimize, 

13 assert_optimal_termination, 

14 ScalarVar, 

15 units as pyunits, 

16 TerminationCondition 

17) 

18from pyomo.core.base.constraint import ScalarConstraint, IndexedConstraint 

19from idaes.core import FlowsheetBlock 

20from idaes.core.util.model_statistics import report_statistics, degrees_of_freedom 

21import idaes.logger as idaeslog 

22from idaes.core.util import DiagnosticsToolbox 

23from common.models.idaes import FlowsheetSchema 

24from common.models.idaes.flowsheet_schema import SolvedFlowsheetSchema 

25from .property_package_manager import PropertyPackageManager 

26from .port_manager import PortManager 

27from .arc_manager import ArcManager 

28from .tear_manager import TearManager 

29from .unit_model_manager import UnitModelManager 

30from .methods.adapter_library import AdapterLibrary 

31from .methods.adapter import ( 

32 serialize_properties_map, 

33 deactivate_fixed_guesses, 

34 deactivate_component, 

35 deactivate_components, 

36 add_corresponding_constraint, 

37) 

38from .methods.expression_parsing import parse_expression, ExpressionParsingError 

39from .timing import start_timing 

40from .methods.units_handler import get_value, get_attached_unit, check_units_equivalent 

41from .properties_manager import PropertiesManager 

42from .custom.energy.power_property_package import PowerParameterBlock 

43from .custom.energy.ac_property_package import acParameterBlock 

44from .custom.energy.transformer_property_package import transformerParameterBlock 

45from pyomo.core.base.units_container import units, _PyomoUnit 

46from idaes.core.util.model_serializer import StoreSpec, from_json, to_json 

47from .diagnostics.infeasibilities import print_infeasibilities 

48 

49# CSTR Imports from example flowsheet.  

50# To be used as placeholders until bespoke functions can be developed. 

51from .custom import hda_reaction as reaction_props 

52from .custom.hda_ideal_VLE import HDAParameterBlock 

53from ..solver.properties_manager import PropertyComponent 

54from property_packages.build_package import build_package 

55 

56# from amplpy import modules 

57# Import required to allow the library to set the PATH and allow conopt to be found. 

58 

59def build_flowsheet(dynamic=False,time_set=[0]): 

60 """ 

61 Builds a flowsheet block 

62 """ 

63 # create the model and the flowsheet block 

64 model = ConcreteModel() 

65 model.fs = FlowsheetBlock(dynamic=dynamic, time_set=time_set, time_units=units.s) 

66 model.fs.guess_vars = [] 

67 model.fs.controlled_vars = Block() 

68 # Always add the power property package (as there's only one and there's no different property package types) 

69 # TODO: should this be on-demand? 

70 model.fs.power_pp = PowerParameterBlock() 

71 model.fs.ac_pp = acParameterBlock() 

72 model.fs.tr_pp = transformerParameterBlock() 

73 # properties map: { id: pyomo variable or expression } 

74 # used to map symbols for a sympy expression 

75 model.fs.properties_map = PropertiesManager() 

76 # list of component constraints to add later 

77 model.fs.constraint_exprs = [] 

78 

79 # Placholder property packages for CSTR reactor. 

80 model.fs.BTHM_params = HDAParameterBlock() 

81 #Hard-coded peng-robinson package for reactor: 

82 model.fs.peng_robinson = build_package("peng-robinson",["benzene", "toluene", "hydrogen", "methane"], ["Liq","Vap"]) 

83 

84 # Reaction package for the HDA reaction 

85 model.fs.reaction_params = reaction_props.HDAReactionParameterBlock( 

86 property_package=model.fs.peng_robinson 

87 ) 

88 

89 return model 

90 

91 

92class FlowsheetManager: 

93 """ 

94 Manages the flowsheet, including the property packages, unit models, ports, arcs, and tears 

95 Includes methods to load, initialise, and solve the flowsheet 

96 """ 

97 

98 def __init__(self, schema: FlowsheetSchema) -> None: 

99 """ 

100 Stores all relevant information about the flowsheet, without actually loading it 

101 """ 

102 self.timing = start_timing() 

103 self.timing.add_timing("initialise_flowsheet_manager") 

104 

105 self.model = build_flowsheet(dynamic=schema.dynamic, time_set=schema.time_set) 

106 

107 self.schema = schema 

108 # Add property packages first, so that unit models can use them 

109 self.property_packages = PropertyPackageManager(self) 

110 # Add the port manager, so the unit models can register their ports 

111 self.ports = PortManager() 

112 # Add unit models 

113 self.unit_models = UnitModelManager(self) 

114 # Add arcs to connect the unit models together 

115 self.arcs = ArcManager(self) 

116 # set certain arcs as tears 

117 self.tears = TearManager(self) 

118 

119 def load(self) -> None: 

120 """ 

121 Parses the schema and loads the model 

122 """ 

123 self.timing.step_into("load_flowsheet") 

124 # Load property packages first, so that unit models can use them 

125 self.timing.add_timing("load_property_packages") 

126 self.property_packages.load() 

127 self.timing.step_into("load_unit_models") 

128 self.unit_models.load() 

129 self.timing.step_out() 

130 # no need to load ports seperately, they are loaded by the unit models 

131 # Load arcs to connect the unit models together 

132 self.arcs.load() 

133 # load any expressions 

134 self.load_specs() 

135 # if dynamics, apply finite difference transformaiton 

136 if self.schema.dynamic: 

137 print("performing finite difference with", len(self.model.fs.time), "time steps") 

138 TransformationFactory("dae.finite_difference").apply_to( 

139 self.model.fs, 

140 nfe=len(self.model.fs.time)-1, # Number of finite elements to use for discretization. We aren't adding any extra steps as our constraints dont work for that. 

141 wrt=self.model.fs.time, 

142 scheme="BACKWARD" 

143 ) 

144 

145 

146 self.timing.step_out() 

147 

148 def load_specs(self) -> None: 

149 """ 

150 Loads expressions from the schema 

151 """ 

152 fs = self.model.fs 

153 specs = self.schema.expressions or [] 

154 

155 ## Build dependency tree and sort specs before loading. 

156 dependencies, result_ids = self.build_spec_dependency_tree(specs) 

157 sorted_result_ids = self.topological_sort(dependencies, result_ids) 

158 

159 # load the specs (expressions within specifications tab) 

160 for result_id in sorted_result_ids: 

161 spec_config = next(spec for spec in specs if spec["id"] == result_id) 

162 expression_str = spec_config["expression"] 

163 try: 

164 component_name = f"{spec_config['name']}_{spec_config['id']}" 

165 def expression_rule(blk, time_index): 

166 return parse_expression(expression_str, fs,time_index) 

167 

168 component = Expression(fs.time, rule=expression_rule) 

169 fs.add_component(component_name, component) 

170 fs.properties_map.add( 

171 spec_config["id"], component, component.name, unknown_units=True 

172 ) 

173 except ExpressionParsingError as e: 

174 raise ExpressionParsingError(f"{e} when parsing expression '{expression_str}' for {component_name}: ") 

175 

176 # load constraints (expressions for specific property infos) 

177 # can only handle equality constraints for now  

178 for component, expr_str, id in fs.constraint_exprs: 

179 # get the time index 

180 #for time_index in fs.time: 

181 if component.index_set().dimen != 0 and component.index_set() != fs.time: 181 ↛ 182line 181 didn't jump to line 182 because the condition on line 181 was never true

182 raise ExpressionParsingError(f"Cannot add constraint for {component}: only time-indexed components are supported.") 

183 try: 

184 def constraint_rule(blk, time_index): 

185 expression = parse_expression(expr_str, fs,time_index) 

186 # make sure the units of the expression are the same as the component 

187 u1, u2 = get_attached_unit(component), get_attached_unit(expression) 

188 if not check_units_equivalent(u1, u2): 188 ↛ 189line 188 didn't jump to line 189 because the condition on line 188 was never true

189 raise ValueError( 

190 f"Failed to add constraint for {component}: units do not match (expected {u1}, got {u2})" 

191 ) 

192 return pyunits.convert(component[time_index], to_units=u2) == expression 

193 c = Constraint(component.index_set(), rule= constraint_rule) 

194 name = f"equality_constraint_{id}" 

195 fs.add_component(name, c) 

196 add_corresponding_constraint(fs, c, id) 

197 except ExpressionParsingError as e: 

198 raise ExpressionParsingError(f"Failed to parse constraint expression '{expr_str}' for {component}: {expr_str}, error: {e}") 

199 

200 

201 def build_spec_dependency_tree(self, specs) -> tuple: 

202 """ 

203 Builds dependency tree for expressions based on the references in their respective expressions. 

204 """ 

205 dependencies = defaultdict( 

206 set 

207 ) # Maps an expression's result_id to a set of result_ids it depends on 

208 result_ids = set() # A set to track all result_ids 

209 

210 # Get a list of all result_id's. 

211 for spec in specs: 

212 result_id = spec["id"] 

213 result_ids.add(result_id) 

214 

215 for spec in specs: 

216 result_id = spec["id"] 

217 expression = spec["expression"] 

218 

219 # Find the result_ids that this expression depends on 

220 dependent_expressions = self.get_dependent_expressions( 

221 expression, result_ids 

222 ) 

223 

224 if dependent_expressions: 

225 # If the expression depends on another result_id, add dependency 

226 for id in dependent_expressions: 

227 dependencies[id].add(result_id) 

228 

229 return dependencies, result_ids 

230 

231 def get_dependent_expressions(self, expression: str, all_result_ids: set) -> list: 

232 """ 

233 Gets all result_ids referenced in the expression. 

234 """ 

235 # match result_ids starting with 'id_' followed by numbers 

236 ids = re.findall(r"\b(id_\d+)\b", expression) 

237 

238 # Filter the referenced_ids to only include those that are in all_result_ids 

239 valid_referenced_ids = [] 

240 for id in ids: 

241 # get the numeric part after "id_" and check if it's in the all_result_ids 

242 numeric_id = int(id[3:]) 

243 if numeric_id in all_result_ids: 

244 valid_referenced_ids.append(numeric_id) 

245 

246 return valid_referenced_ids 

247 

248 def topological_sort(self, dependencies: dict, result_ids: set) -> list: 

249 """ 

250 Performs topological sorting on the specification dependency tree. 

251 """ 

252 # Track teh in-degree count for all expressions (edges coming into it) 

253 in_degree = defaultdict(int) 

254 

255 # Count dependencies for each expression 

256 for result_id in result_ids: 

257 for dep in dependencies[result_id]: 

258 in_degree[dep] += 1 

259 

260 # Initialise the queue with result_ids that have no dependencies (in-degree 0) 

261 dequeue = deque( 

262 [result_id for result_id in result_ids if in_degree[result_id] == 0] 

263 ) 

264 

265 sorted_result_ids = [] 

266 

267 while dequeue: 

268 result_id = dequeue.popleft() 

269 sorted_result_ids.append(result_id) 

270 

271 # loop thtough and decrement each dependent expression's in_degree, and append it to the deque if it has an in-degree of 0. 

272 for dependent_result_id in dependencies[result_id]: 

273 in_degree[dependent_result_id] -= 1 

274 if in_degree[dependent_result_id] == 0: 274 ↛ 272line 274 didn't jump to line 272 because the condition on line 274 was always true

275 dequeue.append(dependent_result_id) 

276 

277 # If there are any result_ids left with non-zero in-degree, a cycle exists so error. 

278 if len(sorted_result_ids) != len(result_ids): 278 ↛ 279line 278 didn't jump to line 279 because the condition on line 278 was never true

279 raise ValueError( 

280 "Cycle detected in the dependency graph. Check an expression does not reference itself!" 

281 ) 

282 

283 return sorted_result_ids 

284 

285 def initialise(self) -> None: 

286 """ 

287 Expands the arcs and initialises the model 

288 """ 

289 # check if initialisation is disabled for this scenario 

290 if getattr(self.schema, "disable_initialization", False): 

291 # if disable initialisation is set to True, then we don't need to initialise the model 

292 print("Initialisation is disabled for this scenario.") 

293 

294 # We need to "expand_arcs" to make them a bidirection link that actually imposes constraints on the model. 

295 TransformationFactory("network.expand_arcs").apply_to(self.model) 

296 

297 self.timing.step_into("initialise_model") 

298 

299 # load tear guesses (including var/constraint unfixing & deactivation and/or equality constraint deactivation) 

300 self.tears.load() 

301 

302 tears = self.tears._tears 

303 

304 def init_unit(unit): 

305 if getattr(self.schema, "disable_initialization", False): 305 ↛ 306line 305 didn't jump to line 306 because the condition on line 305 was never true

306 return 

307 

308 print(f"Initializing unit {unit}") 

309 self.timing.add_timing(f"init_{unit.name}") 

310 #unit.display() 

311 unit.initialize(outlvl=idaeslog.INFO) 

312 #unit.report() 

313 

314 self.timing.add_timing("setup_sequential_decomposition") 

315 # Use SequentialDecomposition to initialise the model 

316 seq = SequentialDecomposition( 

317 #run_first_pass=True, 

318 iterLim=1, 

319 ) 

320 seq.set_tear_set(tears) 

321 # use create_graph to get the order of sequential decomposition, and also to 

322 # find any units that are not connected to the sequential decomposition 

323 G = seq.create_graph(self.model) 

324 order = seq.calculation_order(G) 

325 seq_blocks = [] 

326 for o in order: 

327 seq_blocks.append(o[0]) 

328 print("Order of initialisation:", [blk.name for blk in seq_blocks]) 

329 # set all the tear guesses before running the decomposition 

330 for arc in tears: 

331 port = arc.destination 

332 # guesses used are initial values for each var 

333 guesses = {} 

334 guesses = {key: get_value(var) for key, var in port.vars.items()} 

335 print(f"Guess for {port}: {guesses}") 

336 

337 self.timing.step_into("run_sequential_decomposition") 

338 

339 # sequential decomposition completes when all vars across port 

340 # equalities are within tol of each other 

341 seq.options["tol"] = 1e-2 

342 # seq.options["solve_tears"] = False 

343 res = seq.run(self.model, init_unit) 

344 

345 self.timing.step_out() 

346 self.timing.add_timing("initialise_disconnected_units") 

347 # Initialise any unit model that is not connected to the sequential decomposition 

348 for blk in self.model.fs.component_data_objects( 

349 Block, descend_into=False, active=True 

350 ): 

351 ports = list(blk.component_objects(Port, descend_into=False)) 

352 if len(ports) == 0: 

353 continue # if the block has no ports, then it is not a unit model 

354 if blk in seq_blocks: 

355 continue # already initialised by sequential decomposition 

356 init_unit(blk) 

357 

358 # unfix guess vars 

359 deactivate_fixed_guesses(self.model.fs.guess_vars) 

360 

361 self.timing.step_out() 

362 

363 def serialise(self) -> SolvedFlowsheetSchema: 

364 self.timing.add_timing("serialise_model") 

365 

366 initial_values = {} 

367 for unit_model_id, unit_model in self.unit_models._unit_models.items(): 

368 initial_values[str(unit_model_id)] = to_json(unit_model, return_dict=True, wts=StoreSpec.value()) 

369 

370 solved_flowsheet = SolvedFlowsheetSchema( 

371 id=self.schema.id, 

372 properties=serialize_properties_map(self.model.fs), 

373 initial_values=initial_values 

374 ) 

375 

376 return solved_flowsheet 

377 

378 def report_statistics(self) -> None: 

379 """ 

380 Reports statistics about the model 

381 """ 

382 report_statistics(self.model) 

383 # I think the diagnostics toolbox is taking too long to run, so commenting it out for now. 

384 # dt = DiagnosticsToolbox(self.model) 

385 # dt.report_structural_issues() 

386 # dt.display_overconstrained_set() 

387 # dt.display_underconstrained_set() 

388 

389 def diagnose_problems(self) -> None: 

390 print("=== DIAGNOSTICS ===") 

391 report_statistics(self.model) 

392 dt = DiagnosticsToolbox(self.model) 

393 dt.report_structural_issues() 

394 dt.display_overconstrained_set() 

395 dt.display_underconstrained_set() 

396 #dt.display_components_with_inconsistent_units() 

397 try: 

398 dt.compute_infeasibility_explanation() 

399 except Exception as e: 

400 print(f"{e}") # error is probably because it is feasible 

401 dt.report_numerical_issues() 

402 dt.display_near_parallel_constraints() 

403 dt.display_variables_at_or_outside_bounds() 

404 print("=== END DIAGNOSTICS ===") 

405 

406 def degrees_of_freedom(self) -> int: 

407 """ 

408 Returns the degrees of freedom of the model 

409 """ 

410 return int(degrees_of_freedom(self.model)) 

411 

412 def check_model_valid(self) -> None: 

413 """ 

414 Checks if the model is valid by checking the 

415 degrees of freedom. Will raise an exception if 

416 the model is not valid. 

417 """ 

418 self.timing.add_timing("check_model_valid") 

419 degrees_of_freedom = self.degrees_of_freedom() 

420 if degrees_of_freedom != 0: 

421 #self.model.display() # prints the vars/constraints for debugging 

422 raise Exception( 

423 f"Degrees of freedom is not 0. Degrees of freedom: {degrees_of_freedom}" 

424 ) 

425 

426 def solve(self) -> None: 

427 """ 

428 Solves the model 

429 """ 

430 self.timing.add_timing("solve_model") 

431 print("=== Starting Solve ===") 

432 

433 opt = SolverFactory(self.schema.solver_option) 

434 # opt.options["max_iter"] = 5000 

435 

436 if self.schema.solver_option != "conopt": 436 ↛ 438line 436 didn't jump to line 438 because the condition on line 436 was always true

437 opt.options["max_iter"] = 1000 

438 try: 

439 res = opt.solve(self.model, tee=True) 

440 if res.solver.termination_condition != TerminationCondition.optimal: 440 ↛ 441line 440 didn't jump to line 441 because the condition on line 440 was never true

441 print_infeasibilities(self.model.fs.properties_map) 

442 assert_optimal_termination(res) 

443 except ValueError as e: 

444 if str(e).startswith("No variables appear"): 444 ↛ 448line 444 didn't jump to line 448 because the condition on line 444 was always true

445 # https://github.com/Pyomo/pyomo/pull/3445 

446 pass 

447 else: 

448 raise e 

449 

450 def optimize(self) -> None: 

451 if self.schema.optimizations is None or self.schema.optimizations == []: 

452 return 

453 

454 self.timing.add_timing("optimize_model") 

455 print("=== Starting Optimization ===") 

456 

457 # ipopt doesn't support multiple objectives, so we need to create 

458 # a single objective expression. 

459 # this is done by summing all objectives in the model, adding 

460 # or subtracting based on the sense (minimize or maximize) 

461 objective_expr = 0 

462 for schema in self.schema.optimizations: 

463 # get the expression component to optimize 

464 # TODO: This is assuming optimisation is run on a steady-state simulation. We need to change how this works to handle dynamics. 

465 # For now, just hardcoding time_index=0 

466 objective_component = self.model.fs.properties_map.get_component(schema.objective) 

467 # the objective component should be a scalar in non-dynamic models 

468 # in a dynamic model it'll be indexed across all time steps 

469 # We sum up all time steps so each time step is weighted equally. 

470 objective = sum(objective_component.values()) 

471 # add or subtract the objective based on the sense 

472 sense = schema.sense 

473 if sense == "minimize": 

474 objective_expr += objective 

475 else: 

476 objective_expr -= objective 

477 

478 

479 

480 # unfix relevant vars (add to degrees of freedom) 

481 for dof_info in schema.unfixed_variables: 

482 id = dof_info.id # Id of the propertyValue for this degree of freedom 

483 var: PropertyComponent = self.model.fs.properties_map.get(id) 

484 

485 

486 # TODO: may need to handle deactivating constraints, 

487 # for expressions that are constrained (instead of state vars) 

488 c = var.corresponding_constraint 

489 if c is not None: 

490 # TODO: better typing for constraints 

491 if isinstance(c, ScalarConstraint) or isinstance(c, IndexedConstraint): 491 ↛ 492line 491 didn't jump to line 492 because the condition on line 491 was never true

492 c.deactivate() 

493 else: 

494 deactivate_components(c) 

495 else: 

496 for i in var.component.values(): 

497 if isinstance(i, ScalarVar): 497 ↛ 498line 497 didn't jump to line 498 because the condition on line 497 was never true

498 i.unfix() 

499 # Because if not, it is ExpressionData, meaning it is already an expression and doesn't need to be unfixed. (we've already checked if there is a constraint for it above too.) 

500 

501 

502 # TODO: set attributes for upper and lower bounds of property infos. i.e. use propertyinfo id. 

503 # Var is either a Variable or Expression 

504 # set the minimum or maximum bounds for this variable if they are enabled 

505 #self.model.upper_bound_12 = Constraint(expr= var <= upper_bound_value ) 

506 

507 upper_bound = dof_info.upper_bound 

508 lower_bound = dof_info.lower_bound 

509 

510 c = var.component 

511 

512 if upper_bound is not None: 512 ↛ 518line 512 didn't jump to line 518 because the condition on line 512 was always true

513 def upper_bound_rule(model,index): 

514 return c[index] <= upper_bound 

515 upper_bound_constraint = Constraint(c.index_set(),rule=upper_bound_rule) 

516 setattr(self.model,"upper_bound_" + str(id), upper_bound_constraint) 

517 

518 if lower_bound is not None: 518 ↛ 481line 518 didn't jump to line 481 because the condition on line 518 was always true

519 def lower_bound_rule(model,index): 

520 return c[index] >= lower_bound 

521 lower_bound_constraint = Constraint(c.index_set(),rule=lower_bound_rule) 

522 setattr(self.model,"lower_bound_" + str(id), lower_bound_constraint) 

523 

524 

525 # add the objective to the model 

526 self.model.objective = Objective(expr=objective_expr, sense=minimize) 

527 

528 # solve the model with the objective 

529 opt = SolverFactory(self.schema.solver_option) 

530 

531 if self.schema.solver_option != "conopt": 531 ↛ 534line 531 didn't jump to line 534 because the condition on line 531 was always true

532 opt.options["max_iter"] = 1000 

533 

534 try: 

535 res = opt.solve(self.model, tee=True) 

536 assert_optimal_termination(res) 

537 except ValueError as e: 

538 if str(e).startswith("No variables appear"): 

539 # https://github.com/Pyomo/pyomo/pull/3445 

540 pass 

541 else: 

542 raise e