Coverage for backend/ahuora-builder/src/ahuora_builder/flowsheet_manager.py: 88%
280 statements
« prev ^ index » next coverage.py v7.10.7, created at 2026-03-26 20:57 +0000
« prev ^ index » next coverage.py v7.10.7, created at 2026-03-26 20:57 +0000
1from collections import defaultdict, deque
2import re
3from pyomo.network import SequentialDecomposition, Port
4from pyomo.environ import (
5 ConcreteModel,
6 TransformationFactory,
7 SolverFactory,
8 Block,
9 Expression,
10 Constraint,
11 Objective,
12 minimize,
13 assert_optimal_termination,
14 ScalarVar,
15 units as pyunits,
16 TerminationCondition
17)
18from pyomo.core.base.constraint import ScalarConstraint, IndexedConstraint
19from idaes.core import FlowsheetBlock
20from idaes.core.util.model_statistics import report_statistics, degrees_of_freedom
21import idaes.logger as idaeslog
22from idaes.core.util import DiagnosticsToolbox
23from ahuora_builder_types import FlowsheetSchema
24from ahuora_builder_types.flowsheet_schema import SolvedFlowsheetSchema
25from .property_package_manager import PropertyPackageManager
26from .port_manager import PortManager
27from .arc_manager import ArcManager
28from .tear_manager import TearManager
29from .unit_model_manager import UnitModelManager
30from .methods.adapter_library import AdapterLibrary
31from .methods.adapter import (
32 serialize_properties_map,
33 deactivate_fixed_guesses,
34 deactivate_component,
35 deactivate_components,
36 add_corresponding_constraint,
37)
38from .methods.expression_parsing import parse_expression, ExpressionParsingError
39from .timing import start_timing
40from .methods.units_handler import get_value, get_attached_unit, check_units_equivalent
41from .methods.scaling_suffix import sanitize_scaling_suffix
42from .properties_manager import PropertiesManager
43from .custom.energy.power_property_package import PowerParameterBlock
44from .custom.energy.ac_property_package import acParameterBlock
45from .custom.energy.transformer_property_package import transformerParameterBlock
46from pyomo.core.base.units_container import units, _PyomoUnit
47from idaes.core.util.model_serializer import StoreSpec, from_json, to_json
48from .diagnostics.infeasibilities import print_infeasibilities
50# CSTR Imports from example flowsheet.
51# To be used as placeholders until bespoke functions can be developed.
52from .custom import hda_reaction as reaction_props
53from .custom.hda_ideal_VLE import HDAParameterBlock
54from ahuora_builder.properties_manager import PropertyComponent
55from ahuora_property_packages.build_package import build_package
58# from amplpy import modules
59# Import required to allow the library to set the PATH and allow conopt to be found.
61def build_flowsheet(dynamic=False,time_set=[0]):
62 """
63 Builds a flowsheet block
64 """
65 # create the model and the flowsheet block
66 model = ConcreteModel()
67 model.fs = FlowsheetBlock(dynamic=dynamic, time_set=time_set, time_units=units.s)
68 model.fs.guess_vars = []
69 model.fs.controlled_vars = Block()
70 # Always add the power property package (as there's only one and there's no different property package types)
71 # TODO: should this be on-demand?
72 model.fs.power_pp = PowerParameterBlock()
73 model.fs.ac_pp = acParameterBlock()
74 model.fs.tr_pp = transformerParameterBlock()
75 # properties map: { id: pyomo variable or expression }
76 # used to map symbols for a sympy expression
77 model.fs.properties_map = PropertiesManager()
78 # list of component constraints to add later
79 model.fs.constraint_exprs = []
81 # Placholder property packages for CSTR reactor.
82 model.fs.BTHM_params = HDAParameterBlock()
83 #Hard-coded peng-robinson package for reactor:
84 model.fs.peng_robinson = build_package("peng-robinson",["benzene", "toluene", "hydrogen", "methane"], ["Liq","Vap"])
86 # Reaction package for the HDA reaction
87 model.fs.reaction_params = reaction_props.HDAReactionParameterBlock(
88 property_package=model.fs.peng_robinson
89 )
91 return model
94class FlowsheetManager:
95 """
96 Manages the flowsheet, including the property packages, unit models, ports, arcs, and tears
97 Includes methods to load, initialise, and solve the flowsheet
98 """
100 def __init__(self, schema: FlowsheetSchema) -> None:
101 """
102 Stores all relevant information about the flowsheet, without actually loading it
103 """
104 self.timing = start_timing()
105 self.timing.add_timing("initialise_flowsheet_manager")
107 self.model = build_flowsheet(dynamic=schema.dynamic, time_set=schema.time_set)
109 self.schema = schema
110 # Add property packages first, so that unit models can use them
111 self.property_packages = PropertyPackageManager(self)
112 # Add the port manager, so the unit models can register their ports
113 self.ports = PortManager()
114 # Add unit models
115 self.unit_models = UnitModelManager(self)
116 # Add arcs to connect the unit models together
117 self.arcs = ArcManager(self)
118 # set certain arcs as tears
119 self.tears = TearManager(self)
120 self.properties_map: PropertiesManager
122 def load(self) -> None:
123 """
124 Parses the schema and loads the model
125 """
126 self.timing.step_into("load_flowsheet")
127 # Load property packages first, so that unit models can use them
128 self.timing.add_timing("load_property_packages")
129 self.property_packages.load()
130 self.timing.step_into("load_unit_models")
131 self.unit_models.load()
132 self.timing.step_out()
133 # no need to load ports seperately, they are loaded by the unit models
134 # Load arcs to connect the unit models together
135 self.arcs.load()
136 # load any expressions
137 self.load_specs()
138 # if dynamics, apply finite difference transformaiton
139 if self.schema.dynamic:
140 print("performing finite difference with", len(self.model.fs.time), "time steps")
141 TransformationFactory("dae.finite_difference").apply_to(
142 self.model.fs,
143 nfe=len(self.model.fs.time)-1, # Number of finite elements to use for discretization. We aren't adding any extra steps as our constraints dont work for that.
144 wrt=self.model.fs.time,
145 scheme="BACKWARD"
146 )
148 self.properties_map = self.model.fs.properties_map
149 self.timing.step_out()
151 def load_specs(self) -> None:
152 """
153 Loads expressions from the schema
154 """
155 fs = self.model.fs
156 specs = self.schema.expressions or []
158 ## Build dependency tree and sort specs before loading.
159 dependencies, result_ids = self.build_spec_dependency_tree(specs)
160 sorted_result_ids = self.topological_sort(dependencies, result_ids)
162 # load the specs (expressions within specifications tab)
163 for result_id in sorted_result_ids:
164 spec_config = next(spec for spec in specs if spec["id"] == result_id)
165 expression_str = spec_config["expression"]
166 try:
167 component_name = f"{spec_config['name']}_{spec_config['id']}"
168 def expression_rule(blk, time_index):
169 return parse_expression(expression_str, fs,time_index)
171 component = Expression(fs.time, rule=expression_rule)
172 fs.add_component(component_name, component)
173 fs.properties_map.add(
174 spec_config["id"], component, component.name, unknown_units=True
175 )
176 except ExpressionParsingError as e:
177 raise ExpressionParsingError(f"{e} when parsing expression '{expression_str}' for {component_name}: ")
179 # load constraints (expressions for specific property infos)
180 # can only handle equality constraints for now
181 for component, expr_str, id in fs.constraint_exprs:
182 # get the time index
183 #for time_index in fs.time:
184 if component.index_set().dimen != 0 and component.index_set() != fs.time: 184 ↛ 185line 184 didn't jump to line 185 because the condition on line 184 was never true
185 raise ExpressionParsingError(f"Cannot add constraint for {component}: only time-indexed components are supported.")
186 try:
187 def constraint_rule(blk, time_index):
188 expression = parse_expression(expr_str, fs,time_index)
189 # make sure the units of the expression are the same as the component
190 u1, u2 = get_attached_unit(component), get_attached_unit(expression)
191 if not check_units_equivalent(u1, u2): 191 ↛ 192line 191 didn't jump to line 192 because the condition on line 191 was never true
192 raise ValueError(
193 f"Failed to add constraint for {component}: units do not match (expected {u1}, got {u2})"
194 )
195 return pyunits.convert(component[time_index], to_units=u2) == expression
196 c = Constraint(component.index_set(), rule= constraint_rule)
197 name = f"equality_constraint_{id}"
198 fs.add_component(name, c)
199 add_corresponding_constraint(fs, c, id)
200 except ExpressionParsingError as e:
201 raise ExpressionParsingError(f"Failed to parse constraint expression '{expr_str}' for {component}: {expr_str}, error: {e}")
204 def build_spec_dependency_tree(self, specs) -> tuple:
205 """
206 Builds dependency tree for expressions based on the references in their respective expressions.
207 """
208 dependencies = defaultdict(
209 set
210 ) # Maps an expression's result_id to a set of result_ids it depends on
211 result_ids = set() # A set to track all result_ids
213 # Get a list of all result_id's.
214 for spec in specs:
215 result_id = spec["id"]
216 result_ids.add(result_id)
218 for spec in specs:
219 result_id = spec["id"]
220 expression = spec["expression"]
222 # Find the result_ids that this expression depends on
223 dependent_expressions = self.get_dependent_expressions(
224 expression, result_ids
225 )
227 if dependent_expressions:
228 # If the expression depends on another result_id, add dependency
229 for id in dependent_expressions:
230 dependencies[id].add(result_id)
232 return dependencies, result_ids
234 def get_dependent_expressions(self, expression: str, all_result_ids: set) -> list:
235 """
236 Gets all result_ids referenced in the expression.
237 """
238 # match result_ids starting with 'id_' followed by numbers
239 ids = re.findall(r"\b(id_\d+)\b", expression)
241 # Filter the referenced_ids to only include those that are in all_result_ids
242 valid_referenced_ids = []
243 for id in ids:
244 # get the numeric part after "id_" and check if it's in the all_result_ids
245 numeric_id = int(id[3:])
246 if numeric_id in all_result_ids:
247 valid_referenced_ids.append(numeric_id)
249 return valid_referenced_ids
251 def topological_sort(self, dependencies: dict, result_ids: set) -> list:
252 """
253 Performs topological sorting on the specification dependency tree.
254 """
255 # Track teh in-degree count for all expressions (edges coming into it)
256 in_degree = defaultdict(int)
258 # Count dependencies for each expression
259 for result_id in result_ids:
260 for dep in dependencies[result_id]:
261 in_degree[dep] += 1
263 # Initialise the queue with result_ids that have no dependencies (in-degree 0)
264 dequeue = deque(
265 [result_id for result_id in result_ids if in_degree[result_id] == 0]
266 )
268 sorted_result_ids = []
270 while dequeue:
271 result_id = dequeue.popleft()
272 sorted_result_ids.append(result_id)
274 # loop thtough and decrement each dependent expression's in_degree, and append it to the deque if it has an in-degree of 0.
275 for dependent_result_id in dependencies[result_id]:
276 in_degree[dependent_result_id] -= 1
277 if in_degree[dependent_result_id] == 0: 277 ↛ 275line 277 didn't jump to line 275 because the condition on line 277 was always true
278 dequeue.append(dependent_result_id)
280 # If there are any result_ids left with non-zero in-degree, a cycle exists so error.
281 if len(sorted_result_ids) != len(result_ids): 281 ↛ 282line 281 didn't jump to line 282 because the condition on line 281 was never true
282 raise ValueError(
283 "Cycle detected in the dependency graph. Check an expression does not reference itself!"
284 )
286 return sorted_result_ids
288 def initialise(self) -> None:
289 """
290 Expands the arcs and initialises the model
291 """
292 # check if initialisation is disabled for this scenario
293 if getattr(self.schema, "disable_initialization", False):
294 # if disable initialisation is set to True, then we don't need to initialise the model
295 print("Initialisation is disabled for this scenario.")
297 # We need to "expand_arcs" to make them a bidirection link that actually imposes constraints on the model.
298 TransformationFactory("network.expand_arcs").apply_to(self.model)
301 self.timing.step_into("initialise_model")
303 # load tear guesses (including var/constraint unfixing & deactivation and/or equality constraint deactivation)
304 self.tears.load()
306 tears = self.tears._tears
308 def init_unit(unit):
309 if getattr(self.schema, "disable_initialization", False): 309 ↛ 310line 309 didn't jump to line 310 because the condition on line 309 was never true
310 return
312 print(f"Initializing unit {unit}")
313 self.timing.add_timing(f"init_{unit.name}")
314 #unit.display()
315 unit.initialize(outlvl=idaeslog.INFO)
316 #unit.report()
318 self.timing.add_timing("setup_sequential_decomposition")
319 # Use SequentialDecomposition to initialise the model
320 seq = SequentialDecomposition(
321 #run_first_pass=True,
322 iterLim=1,
323 )
324 seq.set_tear_set(tears)
325 # use create_graph to get the order of sequential decomposition, and also to
326 # find any units that are not connected to the sequential decomposition
327 G = seq.create_graph(self.model)
328 order = seq.calculation_order(G)
329 seq_blocks = []
330 for o in order:
331 seq_blocks.append(o[0])
332 print("Order of initialisation:", [blk.name for blk in seq_blocks])
333 # set all the tear guesses before running the decomposition
334 for arc in tears:
335 port = arc.destination
336 # guesses used are initial values for each var
337 guesses = {}
338 guesses = {key: get_value(var) for key, var in port.vars.items()}
339 print(f"Guess for {port}: {guesses}")
341 self.timing.step_into("run_sequential_decomposition")
343 # sequential decomposition completes when all vars across port
344 # equalities are within tol of each other
345 seq.options["tol"] = 1e-2
346 # seq.options["solve_tears"] = False
347 res = seq.run(self.model, init_unit)
349 self.timing.step_out()
350 self.timing.add_timing("initialise_disconnected_units")
351 # Initialise any unit model that is not connected to the sequential decomposition
352 for blk in self.model.fs.component_data_objects(
353 Block, descend_into=False, active=True
354 ):
355 ports = list(blk.component_objects(Port, descend_into=False))
356 if len(ports) == 0:
357 continue # if the block has no ports, then it is not a unit model
358 if blk in seq_blocks:
359 continue # already initialised by sequential decomposition
360 init_unit(blk)
362 # unfix guess vars
363 deactivate_fixed_guesses(self.model.fs.guess_vars)
365 self.timing.step_out()
367 def serialise(self) -> SolvedFlowsheetSchema:
368 self.timing.add_timing("serialise_model")
370 initial_values = {}
371 for unit_model_id, unit_model in self.unit_models._unit_models.items():
372 initial_values[str(unit_model_id)] = to_json(unit_model, return_dict=True, wts=StoreSpec.value())
374 solved_flowsheet = SolvedFlowsheetSchema(
375 group_id=self.schema.group_id,
376 properties=serialize_properties_map(self.model.fs),
377 initial_values=initial_values
378 )
380 return solved_flowsheet
382 def report_statistics(self) -> None:
383 """
384 Reports statistics about the model
385 """
386 report_statistics(self.model)
387 # I think the diagnostics toolbox is taking too long to run, so commenting it out for now.
388 # dt = DiagnosticsToolbox(self.model)
389 # dt.report_structural_issues()
390 # dt.display_overconstrained_set()
391 # dt.display_underconstrained_set()
393 def diagnose_problems(self) -> None:
394 print("=== DIAGNOSTICS ===")
395 report_statistics(self.model)
396 dt = DiagnosticsToolbox(self.model)
397 dt.report_structural_issues()
398 dt.display_overconstrained_set()
399 dt.display_underconstrained_set()
400 #dt.display_components_with_inconsistent_units()
401 try:
402 dt.compute_infeasibility_explanation()
403 except Exception as e:
404 print(f"{e}") # error is probably because it is feasible
405 dt.report_numerical_issues()
406 dt.display_near_parallel_constraints()
407 dt.display_variables_at_or_outside_bounds()
408 print("=== END DIAGNOSTICS ===")
410 def degrees_of_freedom(self) -> int:
411 """
412 Returns the degrees of freedom of the model
413 """
414 return int(degrees_of_freedom(self.model))
416 def check_model_valid(self) -> None:
417 """
418 Checks if the model is valid by checking the
419 degrees of freedom. Will raise an exception if
420 the model is not valid.
421 """
422 self.timing.add_timing("check_model_valid")
423 degrees_of_freedom = self.degrees_of_freedom()
424 if degrees_of_freedom != 0:
425 #self.model.display() # prints the vars/constraints for debugging
426 raise Exception(
427 f"Degrees of freedom is not 0. Degrees of freedom: {degrees_of_freedom}"
428 )
430 def solve(self) -> None:
431 """
432 Solves the model
433 """
434 self.timing.add_timing("solve_model")
435 print("=== Starting Solve ===")
437 opt = SolverFactory(self.schema.solver_option)
438 # opt.options["max_iter"] = 5000
440 if self.schema.solver_option != "conopt": 440 ↛ 442line 440 didn't jump to line 442 because the condition on line 440 was always true
441 opt.options["max_iter"] = 1000
442 try:
443 res = opt.solve(self.model, tee=True)
444 if res.solver.termination_condition != TerminationCondition.optimal: 444 ↛ 445line 444 didn't jump to line 445 because the condition on line 444 was never true
445 print_infeasibilities(self.model.fs.properties_map)
446 assert_optimal_termination(res)
447 except ValueError as e:
448 if str(e).startswith("No variables appear"): 448 ↛ 452line 448 didn't jump to line 452 because the condition on line 448 was always true
449 # https://github.com/Pyomo/pyomo/pull/3445
450 pass
451 else:
452 raise e
454 def optimize(self) -> None:
455 if self.schema.optimizations is None or self.schema.optimizations == []:
456 return
458 self.timing.add_timing("optimize_model")
459 print("=== Starting Optimization ===")
461 # ipopt doesn't support multiple objectives, so we need to create
462 # a single objective expression.
463 # this is done by summing all objectives in the model, adding
464 # or subtracting based on the sense (minimize or maximize)
465 objective_expr = 0
466 for schema in self.schema.optimizations:
467 # get the expression component to optimize
468 # TODO: This is assuming optimisation is run on a steady-state simulation. We need to change how this works to handle dynamics.
469 # For now, just hardcoding time_index=0
470 objective_component = self.model.fs.properties_map.get_component(schema.objective)
471 # the objective component should be a scalar in non-dynamic models
472 # in a dynamic model it'll be indexed across all time steps
473 # We sum up all time steps so each time step is weighted equally.
474 objective = sum(objective_component.values())
475 # add or subtract the objective based on the sense
476 sense = schema.sense
477 if sense == "minimize":
478 objective_expr += objective
479 else:
480 objective_expr -= objective
484 # unfix relevant vars (add to degrees of freedom)
485 for dof_info in schema.unfixed_variables:
486 id = dof_info.id # Id of the propertyValue for this degree of freedom
487 var: PropertyComponent = self.model.fs.properties_map.get(id)
490 # TODO: may need to handle deactivating constraints,
491 # for expressions that are constrained (instead of state vars)
492 c = var.corresponding_constraint
493 if c is not None:
494 # TODO: better typing for constraints
495 if isinstance(c, ScalarConstraint) or isinstance(c, IndexedConstraint): 495 ↛ 496line 495 didn't jump to line 496 because the condition on line 495 was never true
496 c.deactivate()
497 else:
498 deactivate_components(c)
499 else:
500 for i in var.component.values():
501 if isinstance(i, ScalarVar): 501 ↛ 502line 501 didn't jump to line 502 because the condition on line 501 was never true
502 i.unfix()
503 # Because if not, it is ExpressionData, meaning it is already an expression and doesn't need to be unfixed. (we've already checked if there is a constraint for it above too.)
506 # TODO: set attributes for upper and lower bounds of property infos. i.e. use propertyinfo id.
507 # Var is either a Variable or Expression
508 # set the minimum or maximum bounds for this variable if they are enabled
509 #self.model.upper_bound_12 = Constraint(expr= var <= upper_bound_value )
511 upper_bound = dof_info.upper_bound
512 lower_bound = dof_info.lower_bound
514 c = var.component
516 if upper_bound is not None: 516 ↛ 522line 516 didn't jump to line 522 because the condition on line 516 was always true
517 def upper_bound_rule(model,index):
518 return c[index] <= upper_bound
519 upper_bound_constraint = Constraint(c.index_set(),rule=upper_bound_rule)
520 setattr(self.model,"upper_bound_" + str(id), upper_bound_constraint)
522 if lower_bound is not None: 522 ↛ 485line 522 didn't jump to line 485 because the condition on line 522 was always true
523 def lower_bound_rule(model,index):
524 return c[index] >= lower_bound
525 lower_bound_constraint = Constraint(c.index_set(),rule=lower_bound_rule)
526 setattr(self.model,"lower_bound_" + str(id), lower_bound_constraint)
529 # add the objective to the model
530 self.model.objective = Objective(expr=objective_expr, sense=minimize)
532 # solve the model with the objective
533 opt = SolverFactory(self.schema.solver_option)
535 if self.schema.solver_option != "conopt": 535 ↛ 538line 535 didn't jump to line 538 because the condition on line 535 was always true
536 opt.options["max_iter"] = 1000
538 try:
539 res = opt.solve(self.model, tee=True)
540 assert_optimal_termination(res)
541 except ValueError as e:
542 if str(e).startswith("No variables appear"):
543 # https://github.com/Pyomo/pyomo/pull/3445
544 pass
545 else:
546 raise e