Coverage for backend/idaes_service/solver/flowsheet_manager.py: 88%

273 statements  

« prev     ^ index     » next       coverage.py v7.10.7, created at 2025-11-06 23:27 +0000

1from collections import defaultdict, deque 

2import re 

3from pyomo.network import SequentialDecomposition, Port 

4from pyomo.environ import ( 

5 ConcreteModel, 

6 TransformationFactory, 

7 SolverFactory, 

8 Block, 

9 Expression, 

10 Constraint, 

11 Objective, 

12 minimize, 

13 assert_optimal_termination, 

14 ScalarVar, 

15 units as pyunits, 

16) 

17from pyomo.core.base.constraint import ScalarConstraint 

18from idaes.core import FlowsheetBlock 

19from idaes.core.util.model_statistics import report_statistics, degrees_of_freedom 

20import idaes.logger as idaeslog 

21from idaes.core.util import DiagnosticsToolbox 

22from common.models.idaes import FlowsheetSchema 

23from common.models.idaes.flowsheet_schema import SolvedFlowsheetSchema 

24from .property_package_manager import PropertyPackageManager 

25from .port_manager import PortManager 

26from .arc_manager import ArcManager 

27from .tear_manager import TearManager 

28from .unit_model_manager import UnitModelManager 

29from .methods.adapter_library import AdapterLibrary 

30from .methods.adapter import ( 

31 serialize_properties_map, 

32 deactivate_fixed_guesses, 

33 deactivate_component, 

34 deactivate_components, 

35 add_corresponding_constraint, 

36) 

37from .methods.expression_parsing import parse_expression, ExpressionParsingError 

38from .timing import start_timing 

39from .methods.units_handler import get_value, get_attached_unit, check_units_equivalent 

40from .properties_manager import PropertiesManager 

41from .custom.energy.power_property_package import PowerParameterBlock 

42from .custom.energy.ac_property_package import acParameterBlock 

43from .custom.energy.transformer_property_package import transformerParameterBlock 

44from pyomo.core.base.units_container import units, _PyomoUnit 

45from idaes.core.util.model_serializer import StoreSpec, from_json, to_json 

46 

47# CSTR Imports from example flowsheet.  

48# To be used as placeholders until bespoke functions can be developed. 

49from .custom import hda_reaction as reaction_props 

50from .custom.hda_ideal_VLE import HDAParameterBlock 

51from ..solver.properties_manager import PropertyComponent 

52from property_packages.build_package import build_package 

53 

54from amplpy import modules 

55# Import required to allow the library to set the PATH and allow conopt to be found. 

56 

57def build_flowsheet(dynamic=False,time_set=[0]): 

58 """ 

59 Builds a flowsheet block 

60 """ 

61 # create the model and the flowsheet block 

62 model = ConcreteModel() 

63 model.fs = FlowsheetBlock(dynamic=dynamic, time_set=time_set, time_units=units.s) 

64 model.fs.guess_vars = [] 

65 model.fs.controlled_vars = Block() 

66 # Always add the power property package (as there's only one and there's no different property package types) 

67 # TODO: should this be on-demand? 

68 model.fs.power_pp = PowerParameterBlock() 

69 model.fs.ac_pp = acParameterBlock() 

70 model.fs.tr_pp = transformerParameterBlock() 

71 # properties map: { id: pyomo variable or expression } 

72 # used to map symbols for a sympy expression 

73 model.fs.properties_map = PropertiesManager() 

74 # list of component constraints to add later 

75 model.fs.constraint_exprs = [] 

76 

77 # Placholder property packages for CSTR reactor. 

78 model.fs.BTHM_params = HDAParameterBlock() 

79 #Hard-coded peng-robinson package for reactor: 

80 model.fs.peng_robinson = build_package("peng-robinson",["benzene", "toluene", "hydrogen", "methane"], ["Liq","Vap"]) 

81 

82 # Reaction package for the HDA reaction 

83 model.fs.reaction_params = reaction_props.HDAReactionParameterBlock( 

84 property_package=model.fs.peng_robinson 

85 ) 

86 

87 return model 

88 

89 

90class FlowsheetManager: 

91 """ 

92 Manages the flowsheet, including the property packages, unit models, ports, arcs, and tears 

93 Includes methods to load, initialise, and solve the flowsheet 

94 """ 

95 

96 def __init__(self, schema: FlowsheetSchema) -> None: 

97 """ 

98 Stores all relevant information about the flowsheet, without actually loading it 

99 """ 

100 self.timing = start_timing() 

101 self.timing.add_timing("initialise_flowsheet_manager") 

102 

103 self.model = build_flowsheet(dynamic=schema.dynamic, time_set=schema.time_set) 

104 

105 self.schema = schema 

106 # Add property packages first, so that unit models can use them 

107 self.property_packages = PropertyPackageManager(self) 

108 # Add the port manager, so the unit models can register their ports 

109 self.ports = PortManager() 

110 # Add unit models 

111 self.unit_models = UnitModelManager(self) 

112 # Add arcs to connect the unit models together 

113 self.arcs = ArcManager(self) 

114 # set certain arcs as tears 

115 self.tears = TearManager(self) 

116 

117 def load(self) -> None: 

118 """ 

119 Parses the schema and loads the model 

120 """ 

121 self.timing.step_into("load_flowsheet") 

122 # Load property packages first, so that unit models can use them 

123 self.timing.add_timing("load_property_packages") 

124 self.property_packages.load() 

125 self.timing.step_into("load_unit_models") 

126 self.unit_models.load() 

127 self.timing.step_out() 

128 # no need to load ports seperately, they are loaded by the unit models 

129 # Load arcs to connect the unit models together 

130 self.arcs.load() 

131 # load any expressions 

132 self.load_specs() 

133 # if dynamics, apply finite difference transformaiton 

134 if self.schema.dynamic: 

135 print("performing finite difference with", len(self.model.fs.time), "time steps") 

136 TransformationFactory("dae.finite_difference").apply_to( 

137 self.model.fs, 

138 nfe=len(self.model.fs.time)-1, # Number of finite elements to use for discretization. We aren't adding any extra steps as our constraints dont work for that. 

139 wrt=self.model.fs.time, 

140 scheme="BACKWARD" 

141 ) 

142 

143 

144 self.timing.step_out() 

145 

146 def load_specs(self) -> None: 

147 """ 

148 Loads expressions from the schema 

149 """ 

150 fs = self.model.fs 

151 specs = self.schema.expressions or [] 

152 

153 ## Build dependency tree and sort specs before loading. 

154 dependencies, result_ids = self.build_spec_dependency_tree(specs) 

155 sorted_result_ids = self.topological_sort(dependencies, result_ids) 

156 

157 # load the specs (expressions within specifications tab) 

158 for result_id in sorted_result_ids: 

159 spec_config = next(spec for spec in specs if spec["id"] == result_id) 

160 expression_str = spec_config["expression"] 

161 try: 

162 component_name = f"{spec_config['name']}_{spec_config['id']}" 

163 def expression_rule(blk, time_index): 

164 return parse_expression(expression_str, fs,time_index) 

165 

166 component = Expression(fs.time, rule=expression_rule) 

167 fs.add_component(component_name, component) 

168 fs.properties_map.add( 

169 spec_config["id"], component, component.name, unknown_units=True 

170 ) 

171 except ExpressionParsingError as e: 

172 raise ExpressionParsingError(f"{e} when parsing expression '{expression_str}' for {component_name}: ") 

173 

174 # load constraints (expressions for specific property infos) 

175 # can only handle equality constraints for now  

176 for component, expr_str, id in fs.constraint_exprs: 

177 # get the time index 

178 #for time_index in fs.time: 

179 if component.index_set().dimen != 0 and component.index_set() != fs.time: 179 ↛ 180line 179 didn't jump to line 180 because the condition on line 179 was never true

180 raise ExpressionParsingError(f"Cannot add constraint for {component}: only time-indexed components are supported.") 

181 try: 

182 def constraint_rule(blk, time_index): 

183 expression = parse_expression(expr_str, fs,time_index) 

184 # make sure the units of the expression are the same as the component 

185 u1, u2 = get_attached_unit(component), get_attached_unit(expression) 

186 if not check_units_equivalent(u1, u2): 186 ↛ 187line 186 didn't jump to line 187 because the condition on line 186 was never true

187 raise ValueError( 

188 f"Failed to add constraint for {component}: units do not match (expected {u1}, got {u2})" 

189 ) 

190 return pyunits.convert(component[time_index], to_units=u2) == expression 

191 c = Constraint(component.index_set(), rule= constraint_rule) 

192 name = f"equality_constraint_{component.name}" 

193 fs.add_component(name, c) 

194 add_corresponding_constraint(fs, c, id) 

195 except ExpressionParsingError as e: 

196 raise ExpressionParsingError(f"Failed to parse constraint expression '{expr_str}' for {component}: {expr_str}, error: {e}") 

197 

198 

199 def build_spec_dependency_tree(self, specs) -> tuple: 

200 """ 

201 Builds dependency tree for expressions based on the references in their respective expressions. 

202 """ 

203 dependencies = defaultdict( 

204 set 

205 ) # Maps an expression's result_id to a set of result_ids it depends on 

206 result_ids = set() # A set to track all result_ids 

207 

208 # Get a list of all result_id's. 

209 for spec in specs: 

210 result_id = spec["id"] 

211 result_ids.add(result_id) 

212 

213 for spec in specs: 

214 result_id = spec["id"] 

215 expression = spec["expression"] 

216 

217 # Find the result_ids that this expression depends on 

218 dependent_expressions = self.get_dependent_expressions( 

219 expression, result_ids 

220 ) 

221 

222 if dependent_expressions: 

223 # If the expression depends on another result_id, add dependency 

224 for id in dependent_expressions: 

225 dependencies[id].add(result_id) 

226 

227 return dependencies, result_ids 

228 

229 def get_dependent_expressions(self, expression: str, all_result_ids: set) -> list: 

230 """ 

231 Gets all result_ids referenced in the expression. 

232 """ 

233 # match result_ids starting with 'id_' followed by numbers 

234 ids = re.findall(r"\b(id_\d+)\b", expression) 

235 

236 # Filter the referenced_ids to only include those that are in all_result_ids 

237 valid_referenced_ids = [] 

238 for id in ids: 

239 # get the numeric part after "id_" and check if it's in the all_result_ids 

240 numeric_id = int(id[3:]) 

241 if numeric_id in all_result_ids: 

242 valid_referenced_ids.append(numeric_id) 

243 

244 return valid_referenced_ids 

245 

246 def topological_sort(self, dependencies: dict, result_ids: set) -> list: 

247 """ 

248 Performs topological sorting on the specification dependency tree. 

249 """ 

250 # Track teh in-degree count for all expressions (edges coming into it) 

251 in_degree = defaultdict(int) 

252 

253 # Count dependencies for each expression 

254 for result_id in result_ids: 

255 for dep in dependencies[result_id]: 

256 in_degree[dep] += 1 

257 

258 # Initialise the queue with result_ids that have no dependencies (in-degree 0) 

259 dequeue = deque( 

260 [result_id for result_id in result_ids if in_degree[result_id] == 0] 

261 ) 

262 

263 sorted_result_ids = [] 

264 

265 while dequeue: 

266 result_id = dequeue.popleft() 

267 sorted_result_ids.append(result_id) 

268 

269 # loop thtough and decrement each dependent expression's in_degree, and append it to the deque if it has an in-degree of 0. 

270 for dependent_result_id in dependencies[result_id]: 

271 in_degree[dependent_result_id] -= 1 

272 if in_degree[dependent_result_id] == 0: 272 ↛ 270line 272 didn't jump to line 270 because the condition on line 272 was always true

273 dequeue.append(dependent_result_id) 

274 

275 # If there are any result_ids left with non-zero in-degree, a cycle exists so error. 

276 if len(sorted_result_ids) != len(result_ids): 276 ↛ 277line 276 didn't jump to line 277 because the condition on line 276 was never true

277 raise ValueError( 

278 "Cycle detected in the dependency graph. Check an expression does not reference itself!" 

279 ) 

280 

281 return sorted_result_ids 

282 

283 def initialise(self) -> None: 

284 """ 

285 Expands the arcs and initialises the model 

286 """ 

287 # check if initialisation is disabled for this scenario 

288 if getattr(self.schema, "disable_initialization", False): 

289 # if disable initialisation is set to True, then we don't need to initialise the model 

290 print("Initialisation is disabled for this scenario.") 

291 

292 # We need to "expand_arcs" to make them a bidirection link that actually imposes constraints on the model. 

293 TransformationFactory("network.expand_arcs").apply_to(self.model) 

294 

295 self.timing.step_into("initialise_model") 

296 

297 # load tear guesses (including var/constraint unfixing & deactivation and/or equality constraint deactivation) 

298 self.tears.load() 

299 

300 tears = self.tears._tears 

301 

302 def init_unit(unit): 

303 if getattr(self.schema, "disable_initialization", False): 303 ↛ 304line 303 didn't jump to line 304 because the condition on line 303 was never true

304 return 

305 

306 print(f"Initializing unit {unit}") 

307 self.timing.add_timing(f"init_{unit.name}") 

308 #unit.display() 

309 unit.initialize(outlvl=idaeslog.INFO) 

310 #unit.report() 

311 

312 self.timing.add_timing("setup_sequential_decomposition") 

313 # Use SequentialDecomposition to initialise the model 

314 seq = SequentialDecomposition( 

315 #run_first_pass=True, 

316 iterLim=1, 

317 ) 

318 seq.set_tear_set(tears) 

319 # use create_graph to get the order of sequential decomposition, and also to 

320 # find any units that are not connected to the sequential decomposition 

321 G = seq.create_graph(self.model) 

322 order = seq.calculation_order(G) 

323 seq_blocks = [] 

324 for o in order: 

325 seq_blocks.append(o[0]) 

326 print("Order of initialisation:", [blk.name for blk in seq_blocks]) 

327 # set all the tear guesses before running the decomposition 

328 for arc in tears: 

329 port = arc.destination 

330 # guesses used are initial values for each var 

331 guesses = {} 

332 guesses = {key: get_value(var) for key, var in port.vars.items()} 

333 print(f"Guess for {port}: {guesses}") 

334 

335 self.timing.step_into("run_sequential_decomposition") 

336 

337 # sequential decomposition completes when all vars across port 

338 # equalities are within tol of each other 

339 seq.options["tol"] = 1e-2 

340 # seq.options["solve_tears"] = False 

341 res = seq.run(self.model, init_unit) 

342 

343 self.timing.step_out() 

344 self.timing.add_timing("initialise_disconnected_units") 

345 # Initialise any unit model that is not connected to the sequential decomposition 

346 for blk in self.model.fs.component_data_objects( 

347 Block, descend_into=False, active=True 

348 ): 

349 ports = list(blk.component_objects(Port, descend_into=False)) 

350 if len(ports) == 0: 

351 continue # if the block has no ports, then it is not a unit model 

352 if blk in seq_blocks: 

353 continue # already initialised by sequential decomposition 

354 init_unit(blk) 

355 

356 # unfix guess vars 

357 deactivate_fixed_guesses(self.model.fs.guess_vars) 

358 

359 self.timing.step_out() 

360 

361 def serialise(self) -> SolvedFlowsheetSchema: 

362 self.timing.add_timing("serialise_model") 

363 

364 initial_values = {} 

365 for unit_model_id, unit_model in self.unit_models._unit_models.items(): 

366 initial_values[str(unit_model_id)] = to_json(unit_model, return_dict=True, wts=StoreSpec.value()) 

367 

368 solved_flowsheet = SolvedFlowsheetSchema( 

369 id=self.schema.id, 

370 properties=serialize_properties_map(self.model.fs), 

371 initial_values=initial_values 

372 ) 

373 

374 return solved_flowsheet 

375 

376 def report_statistics(self) -> None: 

377 """ 

378 Reports statistics about the model 

379 """ 

380 report_statistics(self.model) 

381 # I think the diagnostics toolbox is taking too long to run, so commenting it out for now. 

382 # dt = DiagnosticsToolbox(self.model) 

383 # dt.report_structural_issues() 

384 # dt.display_overconstrained_set() 

385 # dt.display_underconstrained_set() 

386 

387 def diagnose_problems(self) -> None: 

388 print("=== DIAGNOSTICS ===") 

389 report_statistics(self.model) 

390 dt = DiagnosticsToolbox(self.model) 

391 dt.report_structural_issues() 

392 dt.display_overconstrained_set() 

393 dt.display_underconstrained_set() 

394 #dt.display_components_with_inconsistent_units() 

395 try: 

396 dt.compute_infeasibility_explanation() 

397 except Exception as e: 

398 print(f"{e}") # error is probably because it is feasible 

399 dt.report_numerical_issues() 

400 dt.display_near_parallel_constraints() 

401 dt.display_variables_at_or_outside_bounds() 

402 print("=== END DIAGNOSTICS ===") 

403 

404 def degrees_of_freedom(self) -> int: 

405 """ 

406 Returns the degrees of freedom of the model 

407 """ 

408 return int(degrees_of_freedom(self.model)) 

409 

410 def check_model_valid(self) -> None: 

411 """ 

412 Checks if the model is valid by checking the 

413 degrees of freedom. Will raise an exception if 

414 the model is not valid. 

415 """ 

416 self.timing.add_timing("check_model_valid") 

417 degrees_of_freedom = self.degrees_of_freedom() 

418 if degrees_of_freedom != 0: 418 ↛ 420line 418 didn't jump to line 420 because the condition on line 418 was never true

419 #self.model.display() # prints the vars/constraints for debugging 

420 raise Exception( 

421 f"Degrees of freedom is not 0. Degrees of freedom: {degrees_of_freedom}" 

422 ) 

423 

424 def solve(self) -> None: 

425 """ 

426 Solves the model 

427 """ 

428 self.timing.add_timing("solve_model") 

429 print("=== Starting Solve ===") 

430 

431 opt = SolverFactory(self.schema.solver_option) 

432 

433 if self.schema.solver_option != "conopt": 433 ↛ 435line 433 didn't jump to line 435 because the condition on line 433 was always true

434 opt.options["max_iter"] = 1000 

435 try: 

436 res = opt.solve(self.model, tee=True) 

437 assert_optimal_termination(res) 

438 except ValueError as e: 

439 if str(e).startswith("No variables appear"): 439 ↛ 443line 439 didn't jump to line 443 because the condition on line 439 was always true

440 # https://github.com/Pyomo/pyomo/pull/3445 

441 pass 

442 else: 

443 raise e 

444 

445 def optimize(self) -> None: 

446 if self.schema.optimizations is None or self.schema.optimizations == []: 

447 return 

448 

449 self.timing.add_timing("optimize_model") 

450 print("=== Starting Optimization ===") 

451 

452 # ipopt doesn't support multiple objectives, so we need to create 

453 # a single objective expression. 

454 # this is done by summing all objectives in the model, adding 

455 # or subtracting based on the sense (minimize or maximize) 

456 objective_expr = 0 

457 for schema in self.schema.optimizations: 

458 # get the expression component to optimize 

459 # TODO: This is assuming optimisation is run on a steady-state simulation. We need to change how this works to handle dynamics. 

460 # For now, just hardcoding time_index=0 

461 objective_component = self.model.fs.properties_map.get_component(schema.objective) 

462 # the objective component should be a scalar in non-dynamic models 

463 # in a dynamic model it'll be indexed across all time steps 

464 # We sum up all time steps so each time step is weighted equally. 

465 objective = sum(objective_component.values()) 

466 # add or subtract the objective based on the sense 

467 sense = schema.sense 

468 if sense == "minimize": 

469 objective_expr += objective 

470 else: 

471 objective_expr -= objective 

472 

473 

474 

475 # unfix relevant vars (add to degrees of freedom) 

476 for dof_info in schema.unfixed_variables: 

477 id = dof_info.id # Id of the propertyValue for this degree of freedom 

478 var: PropertyComponent = self.model.fs.properties_map.get(id) 

479 

480 

481 # TODO: may need to handle deactivating constraints, 

482 # for expressions that are constrained (instead of state vars) 

483 c = var.corresponding_constraint 

484 if c is not None: 

485 deactivate_components(c) 

486 else: 

487 for i in var.component.values(): 

488 if isinstance(i, ScalarVar): 488 ↛ 489line 488 didn't jump to line 489 because the condition on line 488 was never true

489 i.unfix() 

490 # Because if not, it is ExpressionData, meaning it is already an expression and doesn't need to be unfixed. (we've already checked if there is a constraint for it above too.) 

491 

492 

493 # TODO: set attributes for upper and lower bounds of property infos. i.e. use propertyinfo id. 

494 # Var is either a Variable or Expression 

495 # set the minimum or maximum bounds for this variable if they are enabled 

496 #self.model.upper_bound_12 = Constraint(expr= var <= upper_bound_value ) 

497 

498 upper_bound = dof_info.upper_bound 

499 lower_bound = dof_info.lower_bound 

500 

501 c = var.component 

502 

503 if upper_bound is not None: 503 ↛ 509line 503 didn't jump to line 509 because the condition on line 503 was always true

504 def upper_bound_rule(model,index): 

505 return c[index] <= upper_bound 

506 upper_bound_constraint = Constraint(c.index_set(),rule=upper_bound_rule) 

507 setattr(self.model,"upper_bound_" + str(id), upper_bound_constraint) 

508 

509 if lower_bound is not None: 509 ↛ 476line 509 didn't jump to line 476 because the condition on line 509 was always true

510 def lower_bound_rule(model,index): 

511 return c[index] >= lower_bound 

512 lower_bound_constraint = Constraint(c.index_set(),rule=lower_bound_rule) 

513 setattr(self.model,"lower_bound_" + str(id), lower_bound_constraint) 

514 

515 

516 # add the objective to the model 

517 self.model.objective = Objective(expr=objective_expr, sense=minimize) 

518 

519 # solve the model with the objective 

520 opt = SolverFactory(self.schema.solver_option) 

521 

522 if self.schema.solver_option != "conopt": 522 ↛ 525line 522 didn't jump to line 525 because the condition on line 522 was always true

523 opt.options["max_iter"] = 1000 

524 

525 try: 

526 res = opt.solve(self.model, tee=True) 

527 assert_optimal_termination(res) 

528 except ValueError as e: 

529 if str(e).startswith("No variables appear"): 

530 # https://github.com/Pyomo/pyomo/pull/3445 

531 pass 

532 else: 

533 raise e