Coverage for backend/core/auxiliary/views/UploadMSSData.py: 88%
60 statements
« prev ^ index » next coverage.py v7.10.7, created at 2025-11-06 23:27 +0000
« prev ^ index » next coverage.py v7.10.7, created at 2025-11-06 23:27 +0000
1from rest_framework.response import Response
2from core.auxiliary.models.SolveState import SolveValue, SolveState
3from core.auxiliary.serializers.SolveStateSerializer import SolveValueSerializer, SolveStateSerializer
4from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiTypes
5from core.auxiliary.models.Expression import Expression
6from rest_framework.decorators import api_view
7from drf_spectacular.utils import extend_schema
8from rest_framework import serializers
9from core.validation import api_view_validate
11class UploadDataSerializer(serializers.Serializer):
12 # THe data format is e.g:
13 # {
14 # "data": {
15 # "heater_enthalpy": [1, 2, 3, 4, 5],
16 # "heater_temperature": [1, 2, 3, 4, 5]
17 # }
18 # "flowsheet": 1
19 # }
20 flowsheet = serializers.IntegerField()
21 scenario=serializers.IntegerField()
22 data = serializers.DictField( # expression name
23 child=serializers.ListField( # List of values
24 child=serializers.FloatField() # Value
25 )
26 )
29@api_view_validate
30@extend_schema(request=UploadDataSerializer, responses=None)
31@api_view(['POST'])
32def upload_data(request) -> Response:
33 try:
34 serializer = UploadDataSerializer(data=request.data)
35 serializer.is_valid(raise_exception=True)
36 validated_data = serializer.validated_data
37 data = validated_data.get('data')
38 flowsheet_id = validated_data.get('flowsheet')
39 scenario_id = validated_data.get('scenario')
40 except Exception as e:
41 return Response(status=400, data=f"Invalid csv data: {e}")
43 # Step 1: Create any missing expressions under the given optimization
44 exprs = []
45 for key in data:
46 exprs.append(Expression(name=key, scenario_id=scenario_id,flowsheet_id=flowsheet_id))
48 Expression.objects.bulk_create(exprs, ignore_conflicts=True)
49 # Step 2: Determine number of rows
50 num_rows = len(list(next(iter(data.values()))))
52 # Step 3: Get existing solve states by optimization
53 existing_states = list(SolveState.objects.filter(scenario_id=scenario_id).order_by("index"))
54 existing_indices = {s.index for s in existing_states}
56 # Step 4: Create missing solve states
57 new_states = [
58 SolveState(index=i, flowsheet_id=flowsheet_id, scenario_id=scenario_id)
59 for i in range(num_rows)
60 if i not in existing_indices
61 ]
62 if new_states: 62 ↛ 66line 62 didn't jump to line 66 because the condition on line 62 was always true
63 SolveState.objects.bulk_create(new_states)
65 # Refresh solve state list
66 solve_states = list(SolveState.objects.filter(scenario_id=scenario_id).order_by("index"))
67 solve_state_map = {s.index: s for s in solve_states}
69 # Step 5: Get updated expressions
70 expressions = Expression.objects.filter(scenario_id=scenario_id).prefetch_related("solveValues")
71 expr_map = {expr.name: expr for expr in expressions}
73 # Step 6: Build mapping of existing values
74 existing_values = {
75 expr.name: {sv.solve.index: sv for sv in expr.solveValues.all()}
76 for expr in expressions
77 }
79 # Step 7: Insert or update SolveValues
80 # Step 7: Insert or update SolveValues
81 for i in range(num_rows):
82 solve = solve_state_map[i]
83 for expr_name, values in data.items():
84 value = values[i]
85 expr = expr_map[expr_name]
86 existing = existing_values.get(expr_name, {}).get(i)
88 if existing: 88 ↛ 89line 88 didn't jump to line 89 because the condition on line 88 was never true
89 existing.value = value
90 existing.save()
91 solve = solve_state_map[i]
92 for expr_name, values in data.items():
93 value = values[i]
94 expr = expr_map[expr_name]
95 existing = existing_values.get(expr_name, {}).get(i)
97 if existing: 97 ↛ 98line 97 didn't jump to line 98 because the condition on line 97 was never true
98 existing.value = value
99 existing.save()
100 else:
101 SolveValue.objects.create(value=value, expression=expr, solve=solve, flowsheet_id=flowsheet_id)
103 return Response(status=200, data="Data uploaded successfully")