Coverage for backend/idaes_service/solver/flowsheet_manager.py: 88%
274 statements
« prev ^ index » next coverage.py v7.10.7, created at 2025-12-18 04:00 +0000
« prev ^ index » next coverage.py v7.10.7, created at 2025-12-18 04:00 +0000
1from collections import defaultdict, deque
2import re
3from pyomo.network import SequentialDecomposition, Port
4from pyomo.environ import (
5 ConcreteModel,
6 TransformationFactory,
7 SolverFactory,
8 Block,
9 Expression,
10 Constraint,
11 Objective,
12 minimize,
13 assert_optimal_termination,
14 ScalarVar,
15 units as pyunits,
16)
17from pyomo.core.base.constraint import ScalarConstraint, IndexedConstraint
18from idaes.core import FlowsheetBlock
19from idaes.core.util.model_statistics import report_statistics, degrees_of_freedom
20import idaes.logger as idaeslog
21from idaes.core.util import DiagnosticsToolbox
22from common.models.idaes import FlowsheetSchema
23from common.models.idaes.flowsheet_schema import SolvedFlowsheetSchema
24from .property_package_manager import PropertyPackageManager
25from .port_manager import PortManager
26from .arc_manager import ArcManager
27from .tear_manager import TearManager
28from .unit_model_manager import UnitModelManager
29from .methods.adapter_library import AdapterLibrary
30from .methods.adapter import (
31 serialize_properties_map,
32 deactivate_fixed_guesses,
33 deactivate_component,
34 deactivate_components,
35 add_corresponding_constraint,
36)
37from .methods.expression_parsing import parse_expression, ExpressionParsingError
38from .timing import start_timing
39from .methods.units_handler import get_value, get_attached_unit, check_units_equivalent
40from .properties_manager import PropertiesManager
41from .custom.energy.power_property_package import PowerParameterBlock
42from .custom.energy.ac_property_package import acParameterBlock
43from .custom.energy.transformer_property_package import transformerParameterBlock
44from pyomo.core.base.units_container import units, _PyomoUnit
45from idaes.core.util.model_serializer import StoreSpec, from_json, to_json
47# CSTR Imports from example flowsheet.
48# To be used as placeholders until bespoke functions can be developed.
49from .custom import hda_reaction as reaction_props
50from .custom.hda_ideal_VLE import HDAParameterBlock
51from ..solver.properties_manager import PropertyComponent
52from property_packages.build_package import build_package
54# from amplpy import modules
55# Import required to allow the library to set the PATH and allow conopt to be found.
57def build_flowsheet(dynamic=False,time_set=[0]):
58 """
59 Builds a flowsheet block
60 """
61 # create the model and the flowsheet block
62 model = ConcreteModel()
63 model.fs = FlowsheetBlock(dynamic=dynamic, time_set=time_set, time_units=units.s)
64 model.fs.guess_vars = []
65 model.fs.controlled_vars = Block()
66 # Always add the power property package (as there's only one and there's no different property package types)
67 # TODO: should this be on-demand?
68 model.fs.power_pp = PowerParameterBlock()
69 model.fs.ac_pp = acParameterBlock()
70 model.fs.tr_pp = transformerParameterBlock()
71 # properties map: { id: pyomo variable or expression }
72 # used to map symbols for a sympy expression
73 model.fs.properties_map = PropertiesManager()
74 # list of component constraints to add later
75 model.fs.constraint_exprs = []
77 # Placholder property packages for CSTR reactor.
78 model.fs.BTHM_params = HDAParameterBlock()
79 #Hard-coded peng-robinson package for reactor:
80 model.fs.peng_robinson = build_package("peng-robinson",["benzene", "toluene", "hydrogen", "methane"], ["Liq","Vap"])
82 # Reaction package for the HDA reaction
83 model.fs.reaction_params = reaction_props.HDAReactionParameterBlock(
84 property_package=model.fs.peng_robinson
85 )
87 return model
90class FlowsheetManager:
91 """
92 Manages the flowsheet, including the property packages, unit models, ports, arcs, and tears
93 Includes methods to load, initialise, and solve the flowsheet
94 """
96 def __init__(self, schema: FlowsheetSchema) -> None:
97 """
98 Stores all relevant information about the flowsheet, without actually loading it
99 """
100 self.timing = start_timing()
101 self.timing.add_timing("initialise_flowsheet_manager")
103 self.model = build_flowsheet(dynamic=schema.dynamic, time_set=schema.time_set)
105 self.schema = schema
106 # Add property packages first, so that unit models can use them
107 self.property_packages = PropertyPackageManager(self)
108 # Add the port manager, so the unit models can register their ports
109 self.ports = PortManager()
110 # Add unit models
111 self.unit_models = UnitModelManager(self)
112 # Add arcs to connect the unit models together
113 self.arcs = ArcManager(self)
114 # set certain arcs as tears
115 self.tears = TearManager(self)
117 def load(self) -> None:
118 """
119 Parses the schema and loads the model
120 """
121 self.timing.step_into("load_flowsheet")
122 # Load property packages first, so that unit models can use them
123 self.timing.add_timing("load_property_packages")
124 self.property_packages.load()
125 self.timing.step_into("load_unit_models")
126 self.unit_models.load()
127 self.timing.step_out()
128 # no need to load ports seperately, they are loaded by the unit models
129 # Load arcs to connect the unit models together
130 self.arcs.load()
131 # load any expressions
132 self.load_specs()
133 # if dynamics, apply finite difference transformaiton
134 if self.schema.dynamic:
135 print("performing finite difference with", len(self.model.fs.time), "time steps")
136 TransformationFactory("dae.finite_difference").apply_to(
137 self.model.fs,
138 nfe=len(self.model.fs.time)-1, # Number of finite elements to use for discretization. We aren't adding any extra steps as our constraints dont work for that.
139 wrt=self.model.fs.time,
140 scheme="BACKWARD"
141 )
144 self.timing.step_out()
146 def load_specs(self) -> None:
147 """
148 Loads expressions from the schema
149 """
150 fs = self.model.fs
151 specs = self.schema.expressions or []
153 ## Build dependency tree and sort specs before loading.
154 dependencies, result_ids = self.build_spec_dependency_tree(specs)
155 sorted_result_ids = self.topological_sort(dependencies, result_ids)
157 # load the specs (expressions within specifications tab)
158 for result_id in sorted_result_ids:
159 spec_config = next(spec for spec in specs if spec["id"] == result_id)
160 expression_str = spec_config["expression"]
161 try:
162 component_name = f"{spec_config['name']}_{spec_config['id']}"
163 def expression_rule(blk, time_index):
164 return parse_expression(expression_str, fs,time_index)
166 component = Expression(fs.time, rule=expression_rule)
167 fs.add_component(component_name, component)
168 fs.properties_map.add(
169 spec_config["id"], component, component.name, unknown_units=True
170 )
171 except ExpressionParsingError as e:
172 raise ExpressionParsingError(f"{e} when parsing expression '{expression_str}' for {component_name}: ")
174 # load constraints (expressions for specific property infos)
175 # can only handle equality constraints for now
176 for component, expr_str, id in fs.constraint_exprs:
177 # get the time index
178 #for time_index in fs.time:
179 if component.index_set().dimen != 0 and component.index_set() != fs.time: 179 ↛ 180line 179 didn't jump to line 180 because the condition on line 179 was never true
180 raise ExpressionParsingError(f"Cannot add constraint for {component}: only time-indexed components are supported.")
181 try:
182 def constraint_rule(blk, time_index):
183 expression = parse_expression(expr_str, fs,time_index)
184 # make sure the units of the expression are the same as the component
185 u1, u2 = get_attached_unit(component), get_attached_unit(expression)
186 if not check_units_equivalent(u1, u2): 186 ↛ 187line 186 didn't jump to line 187 because the condition on line 186 was never true
187 raise ValueError(
188 f"Failed to add constraint for {component}: units do not match (expected {u1}, got {u2})"
189 )
190 return pyunits.convert(component[time_index], to_units=u2) == expression
191 c = Constraint(component.index_set(), rule= constraint_rule)
192 name = f"equality_constraint_{id}"
193 fs.add_component(name, c)
194 add_corresponding_constraint(fs, c, id)
195 except ExpressionParsingError as e:
196 raise ExpressionParsingError(f"Failed to parse constraint expression '{expr_str}' for {component}: {expr_str}, error: {e}")
199 def build_spec_dependency_tree(self, specs) -> tuple:
200 """
201 Builds dependency tree for expressions based on the references in their respective expressions.
202 """
203 dependencies = defaultdict(
204 set
205 ) # Maps an expression's result_id to a set of result_ids it depends on
206 result_ids = set() # A set to track all result_ids
208 # Get a list of all result_id's.
209 for spec in specs:
210 result_id = spec["id"]
211 result_ids.add(result_id)
213 for spec in specs:
214 result_id = spec["id"]
215 expression = spec["expression"]
217 # Find the result_ids that this expression depends on
218 dependent_expressions = self.get_dependent_expressions(
219 expression, result_ids
220 )
222 if dependent_expressions:
223 # If the expression depends on another result_id, add dependency
224 for id in dependent_expressions:
225 dependencies[id].add(result_id)
227 return dependencies, result_ids
229 def get_dependent_expressions(self, expression: str, all_result_ids: set) -> list:
230 """
231 Gets all result_ids referenced in the expression.
232 """
233 # match result_ids starting with 'id_' followed by numbers
234 ids = re.findall(r"\b(id_\d+)\b", expression)
236 # Filter the referenced_ids to only include those that are in all_result_ids
237 valid_referenced_ids = []
238 for id in ids:
239 # get the numeric part after "id_" and check if it's in the all_result_ids
240 numeric_id = int(id[3:])
241 if numeric_id in all_result_ids:
242 valid_referenced_ids.append(numeric_id)
244 return valid_referenced_ids
246 def topological_sort(self, dependencies: dict, result_ids: set) -> list:
247 """
248 Performs topological sorting on the specification dependency tree.
249 """
250 # Track teh in-degree count for all expressions (edges coming into it)
251 in_degree = defaultdict(int)
253 # Count dependencies for each expression
254 for result_id in result_ids:
255 for dep in dependencies[result_id]:
256 in_degree[dep] += 1
258 # Initialise the queue with result_ids that have no dependencies (in-degree 0)
259 dequeue = deque(
260 [result_id for result_id in result_ids if in_degree[result_id] == 0]
261 )
263 sorted_result_ids = []
265 while dequeue:
266 result_id = dequeue.popleft()
267 sorted_result_ids.append(result_id)
269 # loop thtough and decrement each dependent expression's in_degree, and append it to the deque if it has an in-degree of 0.
270 for dependent_result_id in dependencies[result_id]:
271 in_degree[dependent_result_id] -= 1
272 if in_degree[dependent_result_id] == 0: 272 ↛ 270line 272 didn't jump to line 270 because the condition on line 272 was always true
273 dequeue.append(dependent_result_id)
275 # If there are any result_ids left with non-zero in-degree, a cycle exists so error.
276 if len(sorted_result_ids) != len(result_ids): 276 ↛ 277line 276 didn't jump to line 277 because the condition on line 276 was never true
277 raise ValueError(
278 "Cycle detected in the dependency graph. Check an expression does not reference itself!"
279 )
281 return sorted_result_ids
283 def initialise(self) -> None:
284 """
285 Expands the arcs and initialises the model
286 """
287 # check if initialisation is disabled for this scenario
288 if getattr(self.schema, "disable_initialization", False):
289 # if disable initialisation is set to True, then we don't need to initialise the model
290 print("Initialisation is disabled for this scenario.")
292 # We need to "expand_arcs" to make them a bidirection link that actually imposes constraints on the model.
293 TransformationFactory("network.expand_arcs").apply_to(self.model)
295 self.timing.step_into("initialise_model")
297 # load tear guesses (including var/constraint unfixing & deactivation and/or equality constraint deactivation)
298 self.tears.load()
300 tears = self.tears._tears
302 def init_unit(unit):
303 if getattr(self.schema, "disable_initialization", False): 303 ↛ 304line 303 didn't jump to line 304 because the condition on line 303 was never true
304 return
306 print(f"Initializing unit {unit}")
307 self.timing.add_timing(f"init_{unit.name}")
308 #unit.display()
309 unit.initialize(outlvl=idaeslog.INFO)
310 #unit.report()
312 self.timing.add_timing("setup_sequential_decomposition")
313 # Use SequentialDecomposition to initialise the model
314 seq = SequentialDecomposition(
315 #run_first_pass=True,
316 iterLim=1,
317 )
318 seq.set_tear_set(tears)
319 # use create_graph to get the order of sequential decomposition, and also to
320 # find any units that are not connected to the sequential decomposition
321 G = seq.create_graph(self.model)
322 order = seq.calculation_order(G)
323 seq_blocks = []
324 for o in order:
325 seq_blocks.append(o[0])
326 print("Order of initialisation:", [blk.name for blk in seq_blocks])
327 # set all the tear guesses before running the decomposition
328 for arc in tears:
329 port = arc.destination
330 # guesses used are initial values for each var
331 guesses = {}
332 guesses = {key: get_value(var) for key, var in port.vars.items()}
333 print(f"Guess for {port}: {guesses}")
335 self.timing.step_into("run_sequential_decomposition")
337 # sequential decomposition completes when all vars across port
338 # equalities are within tol of each other
339 seq.options["tol"] = 1e-2
340 # seq.options["solve_tears"] = False
341 res = seq.run(self.model, init_unit)
343 self.timing.step_out()
344 self.timing.add_timing("initialise_disconnected_units")
345 # Initialise any unit model that is not connected to the sequential decomposition
346 for blk in self.model.fs.component_data_objects(
347 Block, descend_into=False, active=True
348 ):
349 ports = list(blk.component_objects(Port, descend_into=False))
350 if len(ports) == 0:
351 continue # if the block has no ports, then it is not a unit model
352 if blk in seq_blocks:
353 continue # already initialised by sequential decomposition
354 init_unit(blk)
356 # unfix guess vars
357 deactivate_fixed_guesses(self.model.fs.guess_vars)
359 self.timing.step_out()
361 def serialise(self) -> SolvedFlowsheetSchema:
362 self.timing.add_timing("serialise_model")
364 initial_values = {}
365 for unit_model_id, unit_model in self.unit_models._unit_models.items():
366 initial_values[str(unit_model_id)] = to_json(unit_model, return_dict=True, wts=StoreSpec.value())
368 solved_flowsheet = SolvedFlowsheetSchema(
369 id=self.schema.id,
370 properties=serialize_properties_map(self.model.fs),
371 initial_values=initial_values
372 )
374 return solved_flowsheet
376 def report_statistics(self) -> None:
377 """
378 Reports statistics about the model
379 """
380 report_statistics(self.model)
381 # I think the diagnostics toolbox is taking too long to run, so commenting it out for now.
382 # dt = DiagnosticsToolbox(self.model)
383 # dt.report_structural_issues()
384 # dt.display_overconstrained_set()
385 # dt.display_underconstrained_set()
387 def diagnose_problems(self) -> None:
388 print("=== DIAGNOSTICS ===")
389 report_statistics(self.model)
390 dt = DiagnosticsToolbox(self.model)
391 dt.report_structural_issues()
392 dt.display_overconstrained_set()
393 dt.display_underconstrained_set()
394 #dt.display_components_with_inconsistent_units()
395 try:
396 dt.compute_infeasibility_explanation()
397 except Exception as e:
398 print(f"{e}") # error is probably because it is feasible
399 dt.report_numerical_issues()
400 dt.display_near_parallel_constraints()
401 dt.display_variables_at_or_outside_bounds()
402 print("=== END DIAGNOSTICS ===")
404 def degrees_of_freedom(self) -> int:
405 """
406 Returns the degrees of freedom of the model
407 """
408 return int(degrees_of_freedom(self.model))
410 def check_model_valid(self) -> None:
411 """
412 Checks if the model is valid by checking the
413 degrees of freedom. Will raise an exception if
414 the model is not valid.
415 """
416 self.timing.add_timing("check_model_valid")
417 degrees_of_freedom = self.degrees_of_freedom()
418 if degrees_of_freedom != 0:
419 #self.model.display() # prints the vars/constraints for debugging
420 raise Exception(
421 f"Degrees of freedom is not 0. Degrees of freedom: {degrees_of_freedom}"
422 )
424 def solve(self) -> None:
425 """
426 Solves the model
427 """
428 self.timing.add_timing("solve_model")
429 print("=== Starting Solve ===")
431 opt = SolverFactory(self.schema.solver_option)
432 # opt.options["max_iter"] = 5000
434 if self.schema.solver_option != "conopt": 434 ↛ 436line 434 didn't jump to line 436 because the condition on line 434 was always true
435 opt.options["max_iter"] = 1000
436 try:
437 res = opt.solve(self.model, tee=True)
438 assert_optimal_termination(res)
439 except ValueError as e:
440 if str(e).startswith("No variables appear"): 440 ↛ 444line 440 didn't jump to line 444 because the condition on line 440 was always true
441 # https://github.com/Pyomo/pyomo/pull/3445
442 pass
443 else:
444 raise e
446 def optimize(self) -> None:
447 if self.schema.optimizations is None or self.schema.optimizations == []:
448 return
450 self.timing.add_timing("optimize_model")
451 print("=== Starting Optimization ===")
453 # ipopt doesn't support multiple objectives, so we need to create
454 # a single objective expression.
455 # this is done by summing all objectives in the model, adding
456 # or subtracting based on the sense (minimize or maximize)
457 objective_expr = 0
458 for schema in self.schema.optimizations:
459 # get the expression component to optimize
460 # TODO: This is assuming optimisation is run on a steady-state simulation. We need to change how this works to handle dynamics.
461 # For now, just hardcoding time_index=0
462 objective_component = self.model.fs.properties_map.get_component(schema.objective)
463 # the objective component should be a scalar in non-dynamic models
464 # in a dynamic model it'll be indexed across all time steps
465 # We sum up all time steps so each time step is weighted equally.
466 objective = sum(objective_component.values())
467 # add or subtract the objective based on the sense
468 sense = schema.sense
469 if sense == "minimize":
470 objective_expr += objective
471 else:
472 objective_expr -= objective
476 # unfix relevant vars (add to degrees of freedom)
477 for dof_info in schema.unfixed_variables:
478 id = dof_info.id # Id of the propertyValue for this degree of freedom
479 var: PropertyComponent = self.model.fs.properties_map.get(id)
482 # TODO: may need to handle deactivating constraints,
483 # for expressions that are constrained (instead of state vars)
484 c = var.corresponding_constraint
485 if c is not None:
486 # TODO: better typing for constraints
487 if isinstance(c, ScalarConstraint) or isinstance(c, IndexedConstraint): 487 ↛ 488line 487 didn't jump to line 488 because the condition on line 487 was never true
488 c.deactivate()
489 else:
490 deactivate_components(c)
491 else:
492 for i in var.component.values():
493 if isinstance(i, ScalarVar): 493 ↛ 494line 493 didn't jump to line 494 because the condition on line 493 was never true
494 i.unfix()
495 # Because if not, it is ExpressionData, meaning it is already an expression and doesn't need to be unfixed. (we've already checked if there is a constraint for it above too.)
498 # TODO: set attributes for upper and lower bounds of property infos. i.e. use propertyinfo id.
499 # Var is either a Variable or Expression
500 # set the minimum or maximum bounds for this variable if they are enabled
501 #self.model.upper_bound_12 = Constraint(expr= var <= upper_bound_value )
503 upper_bound = dof_info.upper_bound
504 lower_bound = dof_info.lower_bound
506 c = var.component
508 if upper_bound is not None: 508 ↛ 514line 508 didn't jump to line 514 because the condition on line 508 was always true
509 def upper_bound_rule(model,index):
510 return c[index] <= upper_bound
511 upper_bound_constraint = Constraint(c.index_set(),rule=upper_bound_rule)
512 setattr(self.model,"upper_bound_" + str(id), upper_bound_constraint)
514 if lower_bound is not None: 514 ↛ 477line 514 didn't jump to line 477 because the condition on line 514 was always true
515 def lower_bound_rule(model,index):
516 return c[index] >= lower_bound
517 lower_bound_constraint = Constraint(c.index_set(),rule=lower_bound_rule)
518 setattr(self.model,"lower_bound_" + str(id), lower_bound_constraint)
521 # add the objective to the model
522 self.model.objective = Objective(expr=objective_expr, sense=minimize)
524 # solve the model with the objective
525 opt = SolverFactory(self.schema.solver_option)
527 if self.schema.solver_option != "conopt": 527 ↛ 530line 527 didn't jump to line 530 because the condition on line 527 was always true
528 opt.options["max_iter"] = 1000
530 try:
531 res = opt.solve(self.model, tee=True)
532 assert_optimal_termination(res)
533 except ValueError as e:
534 if str(e).startswith("No variables appear"):
535 # https://github.com/Pyomo/pyomo/pull/3445
536 pass
537 else:
538 raise e