Coverage for backend/django/flowsheetInternals/unitops/viewsets/DuplicateSimulationObject.py: 92%

226 statements  

« prev     ^ index     » next       coverage.py v7.10.7, created at 2026-02-12 01:47 +0000

1from django.db import transaction 

2from django.db.models import Prefetch 

3from itertools import chain 

4from core.auxiliary.enums.unitOpData import SimulationObjectClass 

5from core.auxiliary.models.PropertySet import PropertySet 

6from core.auxiliary.models.PropertyInfo import PropertyInfo 

7from core.auxiliary.models.PropertyValue import PropertyValue, PropertyValueIntermediate 

8from core.auxiliary.models.ControlValue import ControlValue 

9from core.auxiliary.models.IndexedItem import IndexedItem 

10from core.auxiliary.models.RecycleData import RecycleData 

11from flowsheetInternals.propertyPackages.models.SimulationObjectPropertyPackages import SimulationObjectPropertyPackages 

12from flowsheetInternals.unitops.models.Port import Port 

13from flowsheetInternals.graphicData.models.groupingModel import Grouping 

14from flowsheetInternals.graphicData.models.graphicObjectModel import GraphicObject 

15from flowsheetInternals.unitops.models import SimulationObject 

16 

17class Coords: 

18 def __init__(self, x, y): 

19 self.x = x 

20 self.y = y 

21 

22def calc_centre_simulation_objects(simulation_object_collection): 

23 from django.db.models import Min, Max, F, ExpressionWrapper, FloatField 

24 aggregated = GraphicObject.objects.filter( 

25 simulationObject__in=simulation_object_collection 

26 ).aggregate( 

27 min_x=Min('x'), 

28 max_edge_x=Max(ExpressionWrapper(F('x') + F('width'), output_field=FloatField())), 

29 min_y=Min('y'), 

30 max_edge_y=Max(ExpressionWrapper(F('y') + F('height'), output_field=FloatField())) 

31 ) 

32 centre_x = (aggregated['min_x'] + aggregated['max_edge_x']) / 2 

33 centre_y = (aggregated['min_y'] + aggregated['max_edge_y']) / 2 

34 return Coords(centre_x, centre_y) 

35 

36class SimulationObjectDuplicator: 

37 

38 def __init__(self, flowsheet_id): 

39 self.flowsheet_id = flowsheet_id 

40 

41 def create_duplicate_simulation_objects(self, original_simulation_objects): 

42 original_to_duplicate_map = {} 

43 new_simulation_objects = [] 

44 for original_simulation_object in original_simulation_objects: 

45 new_simulation_object = SimulationObject( 

46 flowsheet=original_simulation_object.flowsheet, 

47 objectType=original_simulation_object.objectType, 

48 componentName=original_simulation_object.componentName + ' copy' 

49 ) 

50 original_to_duplicate_map[original_simulation_object] = new_simulation_object 

51 new_simulation_objects.append(new_simulation_object) 

52 return original_to_duplicate_map, new_simulation_objects 

53 

54 def duplicate_groupings(self, original_simulation_objects, original_to_duplicate_map): 

55 grouping_map = {} 

56 for original_simulation_object in original_simulation_objects: 

57 if original_simulation_object.objectType != SimulationObjectClass.Group: 

58 continue 

59 original_grouping = getattr(original_simulation_object, "grouping", None) 

60 if original_grouping is None: 60 ↛ 61line 60 didn't jump to line 61 because the condition on line 60 was never true

61 continue 

62 new_grouping = Grouping.objects.create( 

63 flowsheet=original_grouping.flowsheet, 

64 simulationObject=original_to_duplicate_map[original_simulation_object], 

65 abstractionType=original_grouping.abstractionType 

66 ) 

67 grouping_map[original_grouping.pk] = new_grouping 

68 property_infos = list(original_grouping.propertyInfos.all()) 

69 if property_infos: 69 ↛ 70line 69 didn't jump to line 70 because the condition on line 69 was never true

70 new_grouping.propertyInfos.set(property_infos) 

71 return grouping_map 

72 

73 def duplicate_graphics(self, delta, original_simulation_objects, original_to_duplicate_map, grouping_map): 

74 all_graphics = [] 

75 property_set_map = {} 

76 delta_x = delta.x if delta else 0 

77 delta_y = delta.y if delta else 0 

78 for original_simulation_object in original_simulation_objects: 

79 new_simulation_object = original_to_duplicate_map[original_simulation_object] 

80 try: 

81 old_set = original_simulation_object.properties 

82 except PropertySet.DoesNotExist: 

83 old_set = None 

84 

85 if old_set is not None: 85 ↛ 89line 85 didn't jump to line 89 because the condition on line 85 was always true

86 new_set = PropertySet(simulationObject=new_simulation_object, flowsheet_id=self.flowsheet_id) 

87 property_set_map[old_set.pk] = new_set 

88 

89 for graphic_object in original_simulation_object.graphicObject.all(): 

90 if graphic_object is None: 90 ↛ 91line 90 didn't jump to line 91 because the condition on line 90 was never true

91 continue 

92 new_group = grouping_map.get(graphic_object.group_id, graphic_object.group) 

93 new_graphics_object = GraphicObject( 

94 simulationObject=new_simulation_object, 

95 width=graphic_object.width, 

96 height=graphic_object.height, 

97 x=graphic_object.x + delta_x, 

98 y=graphic_object.y + delta_y, 

99 group=new_group, 

100 flowsheet_id=self.flowsheet_id, 

101 visible=graphic_object.visible, 

102 rotation=graphic_object.rotation, 

103 flipped=graphic_object.flipped 

104 ) 

105 all_graphics.append(new_graphics_object) 

106 return all_graphics, property_set_map 

107 

108 def duplicate_port_data(self, original_simulation_objects, original_to_duplicate_map): 

109 all_ports_to_create = [] 

110 all_ports_updates = [] 

111 all_original_ports = Port.objects.filter(unitOp__in=original_simulation_objects).select_related('unitOp') 

112 port_map = {} 

113 for port in all_original_ports: 

114 new_unitOp = original_to_duplicate_map[port.unitOp] 

115 new_port = Port( 

116 displayName=port.displayName, 

117 key=port.key, 

118 index=port.index, 

119 direction=port.direction, 

120 unitOp=new_unitOp, 

121 flowsheet_id=self.flowsheet_id 

122 ) 

123 port_map[port.pk] = new_port 

124 all_ports_to_create.append(new_port) 

125 return all_ports_to_create, all_ports_updates, port_map 

126 

127 def duplicate_recycle_data(self, original_simulation_objects, original_to_duplicate_map): 

128 recycle_data_to_create = [] 

129 recycle_updates = [] 

130 for original_simulation_object in original_simulation_objects: 

131 try: 

132 original_recycle_data = original_simulation_object.recycleData 

133 except RecycleData.DoesNotExist: 

134 continue 

135 

136 new_simulation_object = original_to_duplicate_map.get(original_simulation_object) 

137 if not new_simulation_object: 137 ↛ 138line 137 didn't jump to line 138 because the condition on line 137 was never true

138 continue 

139 

140 tear_object = original_recycle_data.tearObject 

141 new_tear_object = original_to_duplicate_map.get(tear_object) if tear_object else None 

142 

143 new_recycle_data = RecycleData( 

144 flowsheet_id=self.flowsheet_id, 

145 simulationObject=new_simulation_object, 

146 tearObject=new_tear_object 

147 ) 

148 recycle_data_to_create.append(new_recycle_data) 

149 recycle_updates.append((new_recycle_data, new_tear_object)) 

150 

151 if recycle_data_to_create: 

152 RecycleData.objects.bulk_create(recycle_data_to_create) 

153 

154 return recycle_updates 

155 

156 def apply_recycle_updates(self, recycle_updates): 

157 for recycle_data, tear_object in recycle_updates: 

158 if tear_object is not None: 158 ↛ 159line 158 didn't jump to line 159 because the condition on line 158 was never true

159 recycle_data.update(tear_object) 

160 

161 def update_streams(self, original_simulation_objects, original_to_duplicate_map, port_map): 

162 connected_ports = list(chain.from_iterable( 

163 [list(sim_obj.connectedPorts.all()) for sim_obj in original_simulation_objects] 

164 )) 

165 all_ports_updates = [] 

166 for connected_port in connected_ports: 

167 if connected_port.unitOp in original_to_duplicate_map: 

168 new_unitOp = original_to_duplicate_map[connected_port.unitOp] 

169 new_port = port_map.get(connected_port.pk) 

170 if new_port: 170 ↛ 166line 170 didn't jump to line 166 because the condition on line 170 was always true

171 new_port.stream = original_to_duplicate_map.get(connected_port.stream, new_unitOp) 

172 all_ports_updates.append(new_port) 

173 return all_ports_updates 

174 

175 def duplicate_properties(self, property_set_map): 

176 all_property_sets = list(property_set_map.values()) 

177 PropertySet.objects.bulk_create(all_property_sets) 

178 

179 all_original_set_ids = list(property_set_map.keys()) 

180 original_property_infos = list(PropertyInfo.objects.filter(set_id__in=all_original_set_ids)) 

181 original_property_values = list( 

182 PropertyValue.objects 

183 .filter(property_id__in=[property_info.pk for property_info in original_property_infos]) 

184 .prefetch_related('indexedItems') 

185 ) 

186 

187 # creates a map of original property values to their associated indexed items 

188 original_indexed_item_map = {} 

189 for original_property_value in original_property_values: 

190 indexed_item_ids = [indexed_item.pk for indexed_item in original_property_value.indexedItems.all()] 

191 if original_property_value.pk not in original_indexed_item_map: 191 ↛ 193line 191 didn't jump to line 193 because the condition on line 191 was always true

192 original_indexed_item_map[original_property_value.pk] = [] 

193 original_indexed_item_map[original_property_value.pk] += indexed_item_ids 

194 

195 # creates and bulk inserts property infos 

196 property_info_map = {} 

197 all_property_infos = [] 

198 for original_property_info in original_property_infos: 

199 new_info = PropertyInfo( 

200 set=property_set_map[original_property_info.set_id], 

201 type=original_property_info.type, 

202 unitType=original_property_info.unitType, 

203 unit=original_property_info.unit, 

204 key=original_property_info.key, 

205 displayName=original_property_info.displayName, 

206 index=original_property_info.index, 

207 flowsheet_id=self.flowsheet_id 

208 ) 

209 property_info_map[original_property_info.pk] = new_info 

210 all_property_infos.append(new_info) 

211 PropertyInfo.objects.bulk_create(all_property_infos) 

212 

213 # creates and bulk inserts property values 

214 value_map = {} 

215 all_property_values = [] 

216 for original_property_value in original_property_values: 

217 new_val = PropertyValue( 

218 enabled=original_property_value.enabled, 

219 value=original_property_value.value, 

220 displayValue=original_property_value.displayValue, 

221 formula=original_property_value.formula, 

222 property=property_info_map[original_property_value.property_id], 

223 flowsheet_id=self.flowsheet_id 

224 ) 

225 value_map[original_property_value.pk] = new_val 

226 all_property_values.append(new_val) 

227 PropertyValue.objects.bulk_create(all_property_values) 

228 

229 # ducplicate control values linked to the property values 

230 original_value_ids = list(value_map.keys()) 

231 original_control_values = ControlValue.objects.filter( 

232 setPoint_id__in=original_value_ids 

233 ).select_related('manipulated', 'setPoint') 

234 new_control_values = [] 

235 for cv in original_control_values: 

236 new_setpoint = value_map.get(cv.setPoint_id) # new proeprty value to add a set point to 

237 new_manipulated = value_map.get(cv.manipulated_id) # new property value to add a manipulated to 

238 if new_setpoint and new_manipulated: 238 ↛ 235line 238 didn't jump to line 235 because the condition on line 238 was always true

239 new_control_values.append( 

240 ControlValue( 

241 setPoint=new_setpoint, 

242 manipulated=new_manipulated, 

243 flowsheet_id=self.flowsheet_id 

244 ) 

245 ) 

246 if new_control_values: 

247 ControlValue.objects.bulk_create(new_control_values) 

248 

249 # creates and bulk inserts indexed items 

250 all_indexed_items = [] 

251 # gather all unique indexed item IDs from the original_indexed_item_map 

252 original_indexed_ids = { 

253 pk for id_list in original_indexed_item_map.values() for pk in id_list 

254 } 

255 original_indexed_items = IndexedItem.objects.filter(pk__in=original_indexed_ids) 

256 indexed_item_map = {} 

257 for original_indexed_item in original_indexed_items: 

258 owner_simulation_object = list(value_map.values())[0].property.set.simulationObject 

259 new_indexed_item = IndexedItem( 

260 owner=owner_simulation_object, 

261 key=original_indexed_item.key, 

262 displayName=original_indexed_item.displayName, 

263 type=original_indexed_item.type, 

264 flowsheet_id=self.flowsheet_id 

265 ) 

266 indexed_item_map[original_indexed_item.pk] = new_indexed_item 

267 all_indexed_items.append(new_indexed_item) 

268 IndexedItem.objects.bulk_create(all_indexed_items) 

269 

270 # creates and bulk inserts intermediary records  

271 # linking duplicated property values to their associated indexed items 

272 all_intermediates = [] 

273 for original_property_value_pk, idx_list in original_indexed_item_map.items(): 

274 for idx_pk in idx_list: 

275 new_int = PropertyValueIntermediate( 

276 propertyvalue_id=value_map[original_property_value_pk].pk, 

277 indexeditem_id=indexed_item_map[idx_pk].pk 

278 ) 

279 all_intermediates.append(new_int) 

280 PropertyValueIntermediate.objects.bulk_create(all_intermediates) 

281 

282 def duplicate_packages(self, original_simulation_objects, original_to_duplicate_map): 

283 new_property_packages = [] 

284 for original_simulation_object in original_simulation_objects: 

285 new_simulation_object = original_to_duplicate_map[original_simulation_object] 

286 for original_package in original_simulation_object.propertyPackages.all(): 

287 new_property_packages.append( 

288 SimulationObjectPropertyPackages( 

289 simulationObject=new_simulation_object, 

290 name=original_package.name, 

291 propertyPackage=original_package.propertyPackage, 

292 flowsheet_id=self.flowsheet_id 

293 ) 

294 ) 

295 SimulationObjectPropertyPackages.objects.bulk_create(new_property_packages) 

296 

297 

298class DuplicateSimulationObject: 

299 def handle_duplication_request(self, flowsheet: int, validated_data): 

300 object_ids = validated_data.get('objectIDs') or [] 

301 if not object_ids: 301 ↛ 302line 301 didn't jump to line 302 because the condition on line 301 was never true

302 return 

303 

304 with transaction.atomic(): 

305 expanded_ids = self._expand_object_ids(object_ids) 

306 if not expanded_ids: 306 ↛ 307line 306 didn't jump to line 307 because the condition on line 306 was never true

307 return 

308 

309 original_simulation_objects = list( 

310 SimulationObject.objects 

311 .filter(pk__in=expanded_ids) 

312 .select_related('flowsheet', 'grouping') 

313 .prefetch_related( 

314 'properties', 

315 'propertyPackages', 

316 'graphicObject', 

317 Prefetch('connectedPorts', queryset=Port.objects.select_related('unitOp', 'stream')), 

318 Prefetch('grouping__graphicObjects', queryset=GraphicObject.objects.select_related('simulationObject')), 

319 'grouping__propertyInfos' 

320 ) 

321 ) 

322 

323 if not original_simulation_objects: 323 ↛ 324line 323 didn't jump to line 324 because the condition on line 323 was never true

324 return 

325 

326 # calculate the centre of the original simulation objects 

327 old_centre = calc_centre_simulation_objects(original_simulation_objects) 

328 # calculate the new centre of the duplicated simulation objects 

329 new_centre = Coords(validated_data.get('x'), validated_data.get('y')) 

330 delta = Coords(new_centre.x - old_centre.x, new_centre.y - old_centre.y) 

331 

332 # create a new simulation object for each original simulation object 

333 duplicator = SimulationObjectDuplicator(flowsheet) 

334 original_to_duplicate_map, new_simulation_objects = duplicator.create_duplicate_simulation_objects(original_simulation_objects) 

335 SimulationObject.objects.bulk_create(new_simulation_objects) 

336 

337 recycle_updates = duplicator.duplicate_recycle_data(original_simulation_objects, original_to_duplicate_map) 

338 

339 grouping_map = duplicator.duplicate_groupings(original_simulation_objects, original_to_duplicate_map) 

340 

341 # duplicate the graphics, properties, and packages 

342 all_graphics, property_set_map = duplicator.duplicate_graphics( 

343 delta, original_simulation_objects, original_to_duplicate_map, grouping_map 

344 ) 

345 

346 duplicator.duplicate_packages(original_simulation_objects, original_to_duplicate_map) 

347 

348 # duplicate the ports and update the streams 

349 all_ports_to_create, all_ports_updates, port_map = duplicator.duplicate_port_data( 

350 original_simulation_objects, original_to_duplicate_map 

351 ) 

352 Port.objects.bulk_create(all_ports_to_create) 

353 

354 ports_updates = duplicator.update_streams(original_simulation_objects, original_to_duplicate_map, port_map) 

355 Port.objects.bulk_update(ports_updates, ['stream']) 

356 

357 GraphicObject.objects.bulk_create(all_graphics) 

358 duplicator.duplicate_properties(property_set_map) 

359 duplicator.apply_recycle_updates(recycle_updates) 

360 

361 simulation_objects_to_update = list(original_to_duplicate_map.values()) 

362 SimulationObject.objects.bulk_update(simulation_objects_to_update, ['flowsheet','objectType']) 

363 

364 def _expand_object_ids(self, object_ids): 

365 """Recursively collect all simulation objects contained within selected groups.""" 

366 if not object_ids: 366 ↛ 367line 366 didn't jump to line 367 because the condition on line 366 was never true

367 return set() 

368 

369 discovered = set() 

370 queue = set(object_ids) 

371 

372 while queue: 

373 batch_ids = list(queue) 

374 queue.clear() 

375 queryset = ( 

376 SimulationObject.objects 

377 .filter(pk__in=batch_ids) 

378 .select_related('grouping') 

379 .prefetch_related( 

380 Prefetch( 

381 'grouping__graphicObjects', 

382 queryset=GraphicObject.objects.select_related('simulationObject') 

383 ) 

384 ) 

385 ) 

386 

387 for simulation_object in queryset: 

388 if simulation_object.pk in discovered: 

389 continue 

390 

391 discovered.add(simulation_object.pk) 

392 

393 grouping = getattr(simulation_object, 'grouping', None) 

394 if simulation_object.objectType == SimulationObjectClass.Group and grouping is not None: 

395 for graphic_object in grouping.graphicObjects.all(): 

396 child = graphic_object.simulationObject 

397 if child and child.pk not in discovered: 397 ↛ 395line 397 didn't jump to line 395 because the condition on line 397 was always true

398 queue.add(child.pk) 

399 

400 return discovered