Coverage for backend/django/flowsheetInternals/unitops/viewsets/DuplicateSimulationObject.py: 92%
223 statements
« prev ^ index » next coverage.py v7.10.7, created at 2026-05-13 02:47 +0000
« prev ^ index » next coverage.py v7.10.7, created at 2026-05-13 02:47 +0000
1from django.db import transaction
2from django.db.models import Prefetch
3from itertools import chain
4from core.auxiliary.enums.unitOpData import SimulationObjectClass
5from core.auxiliary.models.PropertySet import PropertySet
6from core.auxiliary.models.PropertyInfo import PropertyInfo
7from core.auxiliary.models.PropertyValue import PropertyValue, PropertyValueIntermediate
8from core.auxiliary.models.ControlValue import ControlValue
9from core.auxiliary.models.IndexedItem import IndexedItem
10from core.auxiliary.models.RecycleData import RecycleData
11from flowsheetInternals.unitops.models.Port import Port
12from flowsheetInternals.graphicData.models.groupingModel import Grouping
13from flowsheetInternals.graphicData.models.graphicObjectModel import GraphicObject
14from flowsheetInternals.unitops.models import SimulationObject
16class Coords:
17 def __init__(self, x, y):
18 self.x = x
19 self.y = y
21def calc_centre_simulation_objects(simulation_object_collection):
22 from django.db.models import Min, Max, F, ExpressionWrapper, FloatField
23 aggregated = GraphicObject.objects.filter(
24 simulationObject__in=simulation_object_collection
25 ).aggregate(
26 min_x=Min('x'),
27 max_edge_x=Max(ExpressionWrapper(F('x') + F('width'), output_field=FloatField())),
28 min_y=Min('y'),
29 max_edge_y=Max(ExpressionWrapper(F('y') + F('height'), output_field=FloatField()))
30 )
31 centre_x = (aggregated['min_x'] + aggregated['max_edge_x']) / 2
32 centre_y = (aggregated['min_y'] + aggregated['max_edge_y']) / 2
33 return Coords(centre_x, centre_y)
35class SimulationObjectDuplicator:
37 def __init__(self, flowsheet_id):
38 self.flowsheet_id = flowsheet_id
40 def create_duplicate_simulation_objects(self, original_simulation_objects):
41 original_to_duplicate_map = {}
42 new_simulation_objects = []
43 for original_simulation_object in original_simulation_objects:
44 new_simulation_object = SimulationObject(
45 flowsheet=original_simulation_object.flowsheet,
46 objectType=original_simulation_object.objectType,
47 componentName=original_simulation_object.componentName + ' copy'
48 )
49 original_to_duplicate_map[original_simulation_object] = new_simulation_object
50 new_simulation_objects.append(new_simulation_object)
51 return original_to_duplicate_map, new_simulation_objects
53 def duplicate_groupings(self, original_simulation_objects, original_to_duplicate_map):
54 grouping_map = {}
55 for original_simulation_object in original_simulation_objects:
56 if original_simulation_object.objectType != SimulationObjectClass.Group:
57 continue
58 original_grouping = getattr(original_simulation_object, "grouping", None)
59 if original_grouping is None: 59 ↛ 60line 59 didn't jump to line 60 because the condition on line 59 was never true
60 continue
61 new_grouping = Grouping.objects.create(
62 flowsheet=original_grouping.flowsheet,
63 simulationObject=original_to_duplicate_map[original_simulation_object],
64 abstractionType=original_grouping.abstractionType
65 )
66 grouping_map[original_grouping.pk] = new_grouping
67 property_infos = list(original_grouping.propertyInfos.all())
68 if property_infos: 68 ↛ 69line 68 didn't jump to line 69 because the condition on line 68 was never true
69 new_grouping.propertyInfos.set(property_infos)
70 return grouping_map
72 def duplicate_graphics(self, delta, original_simulation_objects, original_to_duplicate_map, grouping_map):
73 all_graphics = []
74 property_set_map = {}
75 delta_x = delta.x if delta else 0
76 delta_y = delta.y if delta else 0
77 for original_simulation_object in original_simulation_objects:
78 new_simulation_object = original_to_duplicate_map[original_simulation_object]
79 try:
80 old_set = original_simulation_object.properties
81 except PropertySet.DoesNotExist:
82 old_set = None
84 if old_set is not None: 84 ↛ 88line 84 didn't jump to line 88 because the condition on line 84 was always true
85 new_set = PropertySet(simulationObject=new_simulation_object, flowsheet_id=self.flowsheet_id)
86 property_set_map[old_set.pk] = new_set
88 for graphic_object in original_simulation_object.graphicObject.all():
89 if graphic_object is None: 89 ↛ 90line 89 didn't jump to line 90 because the condition on line 89 was never true
90 continue
91 new_group = grouping_map.get(graphic_object.group_id, graphic_object.group)
92 new_graphics_object = GraphicObject(
93 simulationObject=new_simulation_object,
94 width=graphic_object.width,
95 height=graphic_object.height,
96 x=graphic_object.x + delta_x,
97 y=graphic_object.y + delta_y,
98 group=new_group,
99 flowsheet_id=self.flowsheet_id,
100 visible=graphic_object.visible,
101 rotation=graphic_object.rotation,
102 flipped=graphic_object.flipped
103 )
104 all_graphics.append(new_graphics_object)
105 return all_graphics, property_set_map
107 def duplicate_port_data(self, original_simulation_objects, original_to_duplicate_map):
108 all_ports_to_create = []
109 all_ports_updates = []
110 all_original_ports = Port.objects.filter(unitOp__in=original_simulation_objects).select_related('unitOp')
111 port_map = {}
112 for port in all_original_ports:
113 new_unitOp = original_to_duplicate_map[port.unitOp]
114 new_port = Port(
115 displayName=port.displayName,
116 key=port.key,
117 index=port.index,
118 direction=port.direction,
119 unitOp=new_unitOp,
120 flowsheet_id=self.flowsheet_id
121 )
122 port_map[port.pk] = new_port
123 all_ports_to_create.append(new_port)
124 return all_ports_to_create, all_ports_updates, port_map
126 def duplicate_recycle_data(self, original_simulation_objects, original_to_duplicate_map):
127 recycle_data_to_create = []
128 recycle_updates = []
129 for original_simulation_object in original_simulation_objects:
130 try:
131 original_recycle_data = original_simulation_object.recycleData
132 except RecycleData.DoesNotExist:
133 continue
135 new_simulation_object = original_to_duplicate_map.get(original_simulation_object)
136 if not new_simulation_object: 136 ↛ 137line 136 didn't jump to line 137 because the condition on line 136 was never true
137 continue
139 tear_object = original_recycle_data.tearObject
140 new_tear_object = original_to_duplicate_map.get(tear_object) if tear_object else None
142 new_recycle_data = RecycleData(
143 flowsheet_id=self.flowsheet_id,
144 simulationObject=new_simulation_object,
145 tearObject=new_tear_object
146 )
147 recycle_data_to_create.append(new_recycle_data)
148 recycle_updates.append((new_recycle_data, new_tear_object))
150 if recycle_data_to_create:
151 RecycleData.objects.bulk_create(recycle_data_to_create)
153 return recycle_updates
155 def apply_recycle_updates(self, recycle_updates):
156 for recycle_data, tear_object in recycle_updates:
157 if tear_object is not None: 157 ↛ 158line 157 didn't jump to line 158 because the condition on line 157 was never true
158 recycle_data.update(tear_object)
160 def update_streams(self, original_simulation_objects, original_to_duplicate_map, port_map):
161 connected_ports = list(chain.from_iterable(
162 [list(sim_obj.connectedPorts.all()) for sim_obj in original_simulation_objects]
163 ))
164 all_ports_updates = []
165 for connected_port in connected_ports:
166 if connected_port.unitOp in original_to_duplicate_map:
167 new_unitOp = original_to_duplicate_map[connected_port.unitOp]
168 new_port = port_map.get(connected_port.pk)
169 if new_port: 169 ↛ 165line 169 didn't jump to line 165 because the condition on line 169 was always true
170 new_port.stream = original_to_duplicate_map.get(connected_port.stream, new_unitOp)
171 all_ports_updates.append(new_port)
172 return all_ports_updates
174 def duplicate_properties(self, property_set_map):
175 all_property_sets = list(property_set_map.values())
176 PropertySet.objects.bulk_create(all_property_sets)
178 all_original_set_ids = list(property_set_map.keys())
179 original_property_infos = list(PropertyInfo.objects.filter(set_id__in=all_original_set_ids))
180 original_property_values = list(
181 PropertyValue.objects
182 .filter(property_id__in=[property_info.pk for property_info in original_property_infos])
183 .prefetch_related('indexedItems')
184 )
186 # creates a map of original property values to their associated indexed items
187 original_indexed_item_map = {}
188 for original_property_value in original_property_values:
189 indexed_item_ids = [indexed_item.pk for indexed_item in original_property_value.indexedItems.all()]
190 if original_property_value.pk not in original_indexed_item_map: 190 ↛ 192line 190 didn't jump to line 192 because the condition on line 190 was always true
191 original_indexed_item_map[original_property_value.pk] = []
192 original_indexed_item_map[original_property_value.pk] += indexed_item_ids
194 # creates and bulk inserts property infos
195 property_info_map = {}
196 all_property_infos = []
197 for original_property_info in original_property_infos:
198 new_info = PropertyInfo(
199 set=property_set_map[original_property_info.set_id],
200 type=original_property_info.type,
201 unitType=original_property_info.unitType,
202 unit=original_property_info.unit,
203 key=original_property_info.key,
204 displayName=original_property_info.displayName,
205 index=original_property_info.index,
206 flowsheet_id=self.flowsheet_id
207 )
208 property_info_map[original_property_info.pk] = new_info
209 all_property_infos.append(new_info)
210 PropertyInfo.objects.bulk_create(all_property_infos)
212 # creates and bulk inserts property values
213 value_map = {}
214 all_property_values = []
215 for original_property_value in original_property_values:
216 new_val = PropertyValue(
217 enabled=original_property_value.enabled,
218 value=original_property_value.value,
219 displayValue=original_property_value.displayValue,
220 formula=original_property_value.formula,
221 property=property_info_map[original_property_value.property_id],
222 flowsheet_id=self.flowsheet_id
223 )
224 value_map[original_property_value.pk] = new_val
225 all_property_values.append(new_val)
226 PropertyValue.objects.bulk_create(all_property_values)
228 # ducplicate control values linked to the property values
229 original_value_ids = list(value_map.keys())
230 original_control_values = ControlValue.objects.filter(
231 setPoint_id__in=original_value_ids
232 ).select_related('manipulated', 'setPoint')
233 new_control_values = []
234 for cv in original_control_values:
235 new_setpoint = value_map.get(cv.setPoint_id) # new proeprty value to add a set point to
236 new_manipulated = value_map.get(cv.manipulated_id) # new property value to add a manipulated to
237 if new_setpoint and new_manipulated: 237 ↛ 234line 237 didn't jump to line 234 because the condition on line 237 was always true
238 new_control_values.append(
239 ControlValue(
240 setPoint=new_setpoint,
241 manipulated=new_manipulated,
242 flowsheet_id=self.flowsheet_id
243 )
244 )
245 if new_control_values:
246 ControlValue.objects.bulk_create(new_control_values)
248 # creates and bulk inserts indexed items
249 all_indexed_items = []
250 # gather all unique indexed item IDs from the original_indexed_item_map
251 original_indexed_ids = {
252 pk for id_list in original_indexed_item_map.values() for pk in id_list
253 }
254 original_indexed_items = IndexedItem.objects.filter(pk__in=original_indexed_ids)
255 indexed_item_map = {}
256 for original_indexed_item in original_indexed_items:
257 owner_simulation_object = list(value_map.values())[0].property.set.simulationObject
258 new_indexed_item = IndexedItem(
259 owner=owner_simulation_object,
260 key=original_indexed_item.key,
261 displayName=original_indexed_item.displayName,
262 type=original_indexed_item.type,
263 flowsheet_id=self.flowsheet_id
264 )
265 indexed_item_map[original_indexed_item.pk] = new_indexed_item
266 all_indexed_items.append(new_indexed_item)
267 IndexedItem.objects.bulk_create(all_indexed_items)
269 # creates and bulk inserts intermediary records
270 # linking duplicated property values to their associated indexed items
271 all_intermediates = []
272 for original_property_value_pk, idx_list in original_indexed_item_map.items():
273 for idx_pk in idx_list:
274 new_int = PropertyValueIntermediate(
275 propertyvalue_id=value_map[original_property_value_pk].pk,
276 indexeditem_id=indexed_item_map[idx_pk].pk
277 )
278 all_intermediates.append(new_int)
279 PropertyValueIntermediate.objects.bulk_create(all_intermediates)
281 def duplicate_packages(self, original_simulation_objects, original_to_duplicate_map):
282 for original_simulation_object in original_simulation_objects:
283 new_simulation_object = original_to_duplicate_map[original_simulation_object]
284 new_simulation_object.propertyPackageType = original_simulation_object.propertyPackageType
285 new_simulation_object.save()
288class DuplicateSimulationObject:
289 def handle_duplication_request(self, flowsheet: int, validated_data):
290 object_ids = validated_data.get('objectIDs') or []
291 if not object_ids: 291 ↛ 292line 291 didn't jump to line 292 because the condition on line 291 was never true
292 return
294 with transaction.atomic():
295 expanded_ids = self._expand_object_ids(object_ids)
296 if not expanded_ids: 296 ↛ 297line 296 didn't jump to line 297 because the condition on line 296 was never true
297 return
299 original_simulation_objects = list(
300 SimulationObject.objects
301 .filter(pk__in=expanded_ids)
302 .select_related('flowsheet', 'grouping')
303 .prefetch_related(
304 'properties',
305 'graphicObject',
306 Prefetch('connectedPorts', queryset=Port.objects.select_related('unitOp', 'stream')),
307 Prefetch('grouping__graphicObjects', queryset=GraphicObject.objects.select_related('simulationObject')),
308 'grouping__propertyInfos'
309 )
310 )
312 if not original_simulation_objects: 312 ↛ 313line 312 didn't jump to line 313 because the condition on line 312 was never true
313 return
315 # calculate the centre of the original simulation objects
316 old_centre = calc_centre_simulation_objects(original_simulation_objects)
317 # calculate the new centre of the duplicated simulation objects
318 new_centre = Coords(validated_data.get('x'), validated_data.get('y'))
319 delta = Coords(new_centre.x - old_centre.x, new_centre.y - old_centre.y)
321 # create a new simulation object for each original simulation object
322 duplicator = SimulationObjectDuplicator(flowsheet)
323 original_to_duplicate_map, new_simulation_objects = duplicator.create_duplicate_simulation_objects(original_simulation_objects)
324 SimulationObject.objects.bulk_create(new_simulation_objects)
326 recycle_updates = duplicator.duplicate_recycle_data(original_simulation_objects, original_to_duplicate_map)
328 grouping_map = duplicator.duplicate_groupings(original_simulation_objects, original_to_duplicate_map)
330 # duplicate the graphics, properties, and packages
331 all_graphics, property_set_map = duplicator.duplicate_graphics(
332 delta, original_simulation_objects, original_to_duplicate_map, grouping_map
333 )
335 duplicator.duplicate_packages(original_simulation_objects, original_to_duplicate_map)
337 # duplicate the ports and update the streams
338 all_ports_to_create, all_ports_updates, port_map = duplicator.duplicate_port_data(
339 original_simulation_objects, original_to_duplicate_map
340 )
341 Port.objects.bulk_create(all_ports_to_create)
343 ports_updates = duplicator.update_streams(original_simulation_objects, original_to_duplicate_map, port_map)
344 Port.objects.bulk_update(ports_updates, ['stream'])
346 GraphicObject.objects.bulk_create(all_graphics)
347 duplicator.duplicate_properties(property_set_map)
348 duplicator.apply_recycle_updates(recycle_updates)
350 simulation_objects_to_update = list(original_to_duplicate_map.values())
351 SimulationObject.objects.bulk_update(simulation_objects_to_update, ['flowsheet','objectType'])
353 def _expand_object_ids(self, object_ids):
354 """Recursively collect all simulation objects contained within selected groups."""
355 if not object_ids: 355 ↛ 356line 355 didn't jump to line 356 because the condition on line 355 was never true
356 return set()
358 discovered = set()
359 queue = set(object_ids)
361 while queue:
362 batch_ids = list(queue)
363 queue.clear()
364 queryset = (
365 SimulationObject.objects
366 .filter(pk__in=batch_ids)
367 .select_related('grouping')
368 .prefetch_related(
369 Prefetch(
370 'grouping__graphicObjects',
371 queryset=GraphicObject.objects.select_related('simulationObject')
372 )
373 )
374 )
376 for simulation_object in queryset:
377 if simulation_object.pk in discovered:
378 continue
380 discovered.add(simulation_object.pk)
382 grouping = getattr(simulation_object, 'grouping', None)
383 if simulation_object.objectType == SimulationObjectClass.Group and grouping is not None:
384 for graphic_object in grouping.graphicObjects.all():
385 child = graphic_object.simulationObject
386 if child and child.pk not in discovered: 386 ↛ 384line 386 didn't jump to line 384 because the condition on line 386 was always true
387 queue.add(child.pk)
389 return discovered