Cizz22 4 months ago
parent ea677ac6f2
commit e0f6e535c1

@ -1,13 +1,10 @@
{ {
"series": [ "series": [
"3BSS-H611",
{ {
"parallel_no_redundancy": ["3ATT-N501A", "3ATT-N501B"] "parallel_no_redundancy": ["3ATT-N503A", "3ATT-N503B"]
}, },
"3BSS-H621", "3BRS-H611",
{ "3BRS-H621",
"parallel_no_redundancy": ["3ATT-N502A", "3ATT-N502B"] "3BRS-H631"
},
"3BSS-H631"
] ]
} }

@ -1,10 +1,10 @@
{ {
"series": [ "series": [
"3SCW-PF001", "3GSS-H010",
{ {
"parallel": [ "parallel": [
{"series": ["3SCW-H023A", "3SCW-M001A", "3SCW-P001A"]}, {"series": ["3GSS-M011A", "3GSS-F011A"]},
{"series": ["3SCW-H023B", "3SCW-M001B", "3SCW-P001B"]} {"series": ["3GSS-M011B", "3GSS-F011B"]}
] ]
} }
] ]

File diff suppressed because it is too large Load Diff

@ -624,24 +624,15 @@
}, },
"3CRH-W002", "3CRH-W002",
{ {
"series": [ "series": [
"3BSS-H611", {
{ "parallel_no_redundancy": ["3ATT-N503A", "3ATT-N503B"]
"parallel_no_redundancy": [ },
"3ATT-N501A", "3BRS-H611",
"3ATT-N501B" "3BRS-H621",
] "3BRS-H631"
}, ]
"3BSS-H621", },
{
"parallel_no_redundancy": [
"3ATT-N502A",
"3ATT-N502B"
]
},
"3BSS-H631"
]
},
{ {
"series": [ "series": [
{ {
@ -1196,29 +1187,17 @@
}, },
{ {
"series": [ "series": [
{ {
"series": [ "series": [
"3SCW-PF001", "3GSS-H010",
{ {
"parallel": [ "parallel": [
{ {"series": ["3GSS-M011A", "3GSS-F011A"]},
"series": [ {"series": ["3GSS-M011B", "3GSS-F011B"]}
"3SCW-H023A", ]
"3SCW-M001A", }
"3SCW-P001A" ]
] },
},
{
"series": [
"3SCW-H023B",
"3SCW-M001B",
"3SCW-P001B"
]
}
]
}
]
},
"3GEN-GM001", "3GEN-GM001",
{ {
"series": [ "series": [

@ -1,12 +1,10 @@
import json import json
import logging import logging
from typing import Dict, Union from typing import Dict, Union, Tuple
from decimal import Decimal, getcontext from decimal import Decimal, getcontext
import math import math
from src.aeros_simulation.service import get_simulation_with_calc_result from src.aeros_simulation.service import get_simulation_with_calc_result
from src.database.core import DbSession
from src.logging import setup_logging
# Set high precision for decimal calculations # Set high precision for decimal calculations
getcontext().prec = 50 getcontext().prec = 50
@ -14,7 +12,6 @@ getcontext().prec = 50
Structure = Union[str, Dict[str, list]] Structure = Union[str, Dict[str, list]]
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
setup_logging(logger=log)
def prod(iterable): def prod(iterable):
"""Compute product of all elements in iterable with high precision.""" """Compute product of all elements in iterable with high precision."""
@ -28,7 +25,7 @@ def prod(iterable):
def system_availability(structure: Structure, availabilities: Dict[str, float]) -> float: def system_availability(structure: Structure, availabilities: Dict[str, float]) -> float:
"""Recursively compute system availability with precise calculations.""" """Recursively compute system availability with precise calculations."""
if isinstance(structure, str): # base case if isinstance(structure, str): # base case - component
if structure not in availabilities: if structure not in availabilities:
raise ValueError(f"Component '{structure}' not found in availabilities") raise ValueError(f"Component '{structure}' not found in availabilities")
return float(Decimal(str(availabilities[structure]))) return float(Decimal(str(availabilities[structure])))
@ -39,7 +36,7 @@ def system_availability(structure: Structure, availabilities: Dict[str, float])
if not components: # Handle empty series if not components: # Handle empty series
return 1.0 return 1.0
# Convert to Decimal for precise calculation # Series: A_system = A1 * A2 * ... * An
product = Decimal('1.0') product = Decimal('1.0')
for s in components: for s in components:
availability = system_availability(s, availabilities) availability = system_availability(s, availabilities)
@ -51,7 +48,7 @@ def system_availability(structure: Structure, availabilities: Dict[str, float])
if not components: # Handle empty parallel if not components: # Handle empty parallel
return 0.0 return 0.0
# Calculate 1 - prod(1 - availability) with high precision # Parallel: A_system = 1 - (1-A1) * (1-A2) * ... * (1-An)
product = Decimal('1.0') product = Decimal('1.0')
for s in components: for s in components:
availability = system_availability(s, availabilities) availability = system_availability(s, availabilities)
@ -62,8 +59,9 @@ def system_availability(structure: Structure, availabilities: Dict[str, float])
return float(result) return float(result)
elif "parallel_no_redundancy" in structure: elif "parallel_no_redundancy" in structure:
# Load sharing - system availability is minimum of components
components = structure["parallel_no_redundancy"] components = structure["parallel_no_redundancy"]
if not components: # Handle empty parallel_no_redundancy if not components:
return 0.0 return 0.0
availabilities_list = [system_availability(s, availabilities) for s in components] availabilities_list = [system_availability(s, availabilities) for s in components]
@ -88,140 +86,207 @@ def get_all_components(structure: Structure) -> set:
return components return components
def compute_contributions(structure: Structure, availabilities: Dict[str, float]): def birnbaum_importance(structure: Structure, availabilities: Dict[str, float], component: str) -> float:
""" """
Compute contributions of each component to system availability. Calculate Birnbaum importance for a component.
Handles nested structures recursively with precise calculations.
Birnbaum importance = A_system/A_component
This is approximated as:
I_B = A_system(A_i=1) - A_system(A_i=0)
Where A_i is the availability of component i.
""" """
# Convert all availabilities to precise decimals for internal calculations # Create copies for calculations
precise_availabilities = {k: float(Decimal(str(v))) for k, v in availabilities.items()} avail_up = availabilities.copy()
avail_down = availabilities.copy()
# Validate inputs # Set component availability to 1 (perfect)
all_components = get_all_components(structure) avail_up[component] = 1.0
missing_components = all_components - set(precise_availabilities.keys())
if missing_components:
missing_availabilities = {node: 1.0 for node in missing_components} # Changed from 100 to 1.0 (assuming availability is 0-1 range)
log.warning(f"Missing Component: {missing_components}")
precise_availabilities.update(missing_availabilities)
baseline = system_availability(structure, precise_availabilities) # Set component availability to 0 (failed)
deltas = {c: Decimal('0.0') for c in precise_availabilities} avail_down[component] = 0.0
# Calculate system availability in both cases
system_up = system_availability(structure, avail_up)
system_down = system_availability(structure, avail_down)
# Birnbaum importance is the difference
return system_up - system_down
def force_component_down(substructure: Structure, component: str, avail_copy: Dict[str, float]):
"""Recursively set a component's availability to 0 in the structure."""
if isinstance(substructure, str):
if substructure == component:
avail_copy[substructure] = 0.0
elif isinstance(substructure, dict):
for component_list in substructure.values():
for sub_component in component_list:
force_component_down(sub_component, component, avail_copy)
def recurse_contributions(substructure: Structure, avail: Dict[str, float], weight: Decimal):
"""
Recursively assign contributions with precise arithmetic.
weight = fraction of total system availability change attributed to this substructure.
"""
if isinstance(substructure, str):
deltas[substructure] += weight
return
if isinstance(substructure, dict):
if "series" in substructure:
# In series, each component contributes equally to the weight
for s in substructure["series"]:
recurse_contributions(s, avail, weight)
elif "parallel" in substructure:
# For parallel systems, calculate delta contribution for each branch
for s in substructure["parallel"]:
av_copy = avail.copy()
# Get all components in this branch and force them down
branch_components = get_all_components(s)
for comp in branch_components:
force_component_down(substructure, comp, av_copy)
reduced = system_availability(substructure, av_copy)
delta = Decimal(str(baseline)) - Decimal(str(reduced))
if delta > 0: # Only distribute weight if there's actual contribution
baseline_decimal = Decimal(str(baseline))
contribution_weight = (delta * weight / baseline_decimal) if baseline_decimal > 0 else Decimal('0')
recurse_contributions(s, avail, contribution_weight)
elif "parallel_no_redundancy" in substructure:
components = substructure["parallel_no_redundancy"]
component_values = []
for s in components:
component_values.append((s, system_availability(s, avail)))
# Find minimum availability with proper float comparison
min_val = min(val for _, val in component_values)
# Use small epsilon for float comparison
epsilon = Decimal('1e-10')
weakest_components = [comp for comp, val in component_values
if abs(Decimal(str(val)) - Decimal(str(min_val))) < epsilon]
# Distribute weight equally among weakest components
if weakest_components:
weight_per_weakest = weight / Decimal(str(len(weakest_components)))
else:
weight_per_weakest = Decimal('0')
for s in components:
if s in weakest_components:
recurse_contributions(s, avail, weight_per_weakest)
else:
recurse_contributions(s, avail, Decimal('0.0'))
# Start recursion with full baseline weight
if baseline > 0:
recurse_contributions(structure, precise_availabilities, Decimal(str(baseline)))
# Convert deltas back to float for final calculations
deltas_float = {k: float(v) for k, v in deltas.items()}
total_delta = sum(deltas_float.values())
# Calculate percentages with precision handling
if total_delta > 1e-10: # Use small epsilon instead of direct comparison to 0
total_delta_decimal = Decimal(str(total_delta))
percentages = {c: float(Decimal(str(d)) / total_delta_decimal) for c, d in deltas_float.items()}
else:
percentages = {c: 0.0 for c in deltas_float.keys()}
# Ensure percentages sum to 1.0 (or very close) by normalizing def criticality_importance(structure: Structure, availabilities: Dict[str, float], component: str) -> float:
percentage_sum = sum(percentages.values()) """
if percentage_sum > 1e-10: # Only normalize if sum is meaningful Calculate Criticality importance for a component.
percentages = {k: v / percentage_sum for k, v in percentages.items()}
Criticality importance = Birnbaum importance * (1 - A_component) / (1 - A_system)
This represents the probability that component i is critical to system failure.
"""
birnbaum = birnbaum_importance(structure, availabilities, component)
system_avail = system_availability(structure, availabilities)
component_avail = availabilities[component]
if system_avail >= 1.0: # Perfect system
return 0.0
criticality = birnbaum * (1.0 - component_avail) / (1.0 - system_avail)
return criticality
return baseline, deltas_float, percentages
def fussell_vesely_importance(structure: Structure, availabilities: Dict[str, float], component: str) -> float:
"""
Calculate Fussell-Vesely importance for a component.
FV importance = (A_system - A_system(A_i=0)) / A_system
This represents the fractional decrease in system availability when component i fails.
"""
system_avail = system_availability(structure, availabilities)
if system_avail <= 0.0:
return 0.0
# Calculate system availability with component failed
avail_down = availabilities.copy()
avail_down[component] = 0.0
system_down = system_availability(structure, avail_down)
fv = (system_avail - system_down) / system_avail
return fv
def calculate_contribution(availabilities): def compute_all_importance_measures(structure: Structure, availabilities: Dict[str, float]) -> Dict[str, Dict[str, float]]:
"""Calculate contribution with input validation and normalization.""" """
with open('src/aeros_contribution/result.json', 'r') as model_file: Compute all importance measures for each component.
structure = json.load(model_file)
# Normalize availabilities to 0-1 range if they appear to be percentages Returns:
Dictionary with component names as keys and importance measures as values
"""
# Normalize availabilities to 0-1 range if needed
normalized_availabilities = {} normalized_availabilities = {}
for k, v in availabilities.items(): for k, v in availabilities.items():
if v > 1.0: if v > 1.0:
# Assume it's a percentage, convert to fraction
normalized_availabilities[k] = v / 100.0 normalized_availabilities[k] = v / 100.0
else: else:
normalized_availabilities[k] = v normalized_availabilities[k] = v
# Clamp to valid range [0, 1] # Clamp to valid range [0, 1]
normalized_availabilities[k] = max(0.0, min(1.0, normalized_availabilities[k])) normalized_availabilities[k] = max(0.0, min(1.0, normalized_availabilities[k]))
baseline, deltas, percentages = compute_contributions(structure, normalized_availabilities) # Get all components in the system
all_components = get_all_components(structure)
# Check for missing components
missing_components = all_components - set(normalized_availabilities.keys())
if missing_components:
log.warning(f"Missing components (assuming 100% availability): {missing_components}")
for comp in missing_components:
normalized_availabilities[comp] = 1.0
# Calculate system baseline availability
system_avail = system_availability(structure, normalized_availabilities)
# Calculate importance measures for each component
results = {}
total_birnbaum = 0.0
for component in all_components:
if component in normalized_availabilities:
birnbaum = birnbaum_importance(structure, normalized_availabilities, component)
criticality = criticality_importance(structure, normalized_availabilities, component)
fv = fussell_vesely_importance(structure, normalized_availabilities, component)
results[component] = {
'birnbaum_importance': birnbaum,
'criticality_importance': criticality,
'fussell_vesely_importance': fv,
'component_availability': normalized_availabilities[component]
}
total_birnbaum += birnbaum
# Calculate contribution percentages based on Birnbaum importance
if total_birnbaum > 0:
for component in results:
contribution_pct = results[component]['birnbaum_importance'] / total_birnbaum
results[component]['contribution_percentage'] = contribution_pct
else:
for component in results:
results[component]['contribution_percentage'] = 0.0
# Add system-level information
results['_system_info'] = {
'system_availability': system_avail,
'system_unavailability': 1.0 - system_avail,
'total_birnbaum_importance': total_birnbaum
}
return results
def calculate_contribution_accurate(availabilities: Dict[str, float], structure_file: str = 'src/aeros_contribution/result.json') -> Dict[str, Dict[str, float]]:
"""
Calculate component contributions using proper importance measures.
Args:
availabilities: Dictionary of component availabilities
structure_file: Path to RBD structure JSON file
Returns:
Dictionary containing all importance measures and contributions
"""
try:
with open(structure_file, 'r') as model_file:
structure = json.load(model_file)
except FileNotFoundError:
raise FileNotFoundError(f"Structure file not found: {structure_file}")
except json.JSONDecodeError:
raise ValueError(f"Invalid JSON in structure file: {structure_file}")
# Compute all importance measures
results = compute_all_importance_measures(structure, availabilities)
# Extract system information
system_info = results.pop('_system_info')
# Log results
log.info(f"System Availability: {system_info['system_availability']:.6f}")
log.info(f"System Unavailability: {system_info['system_unavailability']:.6f}")
# Sort components by Birnbaum importance (most critical first)
sorted_components = sorted(results.items(),
key=lambda x: x[1]['birnbaum_importance'],
reverse=True)
print("\n=== COMPONENT IMPORTANCE ANALYSIS ===")
print(f"System Availability: {system_info['system_availability']:.6f} ({system_info['system_availability']*100:.4f}%)")
print(f"System Unavailability: {system_info['system_unavailability']:.6f}")
print("\nComponent Rankings (by Birnbaum Importance):")
print(f"{'Component':<20} {'Availability':<12} {'Birnbaum':<12} {'Criticality':<12} {'F-V':<12} {'Contribution%':<12}")
print("-" * 92)
for component, measures in sorted_components:
print(f"{component:<20} {measures['component_availability']:<12.6f} "
f"{measures['birnbaum_importance']:<12.6f} {measures['criticality_importance']:<12.6f} "
f"{measures['fussell_vesely_importance']:<12.6f} {measures['contribution_percentage']*100:<12.4f}")
log.info(f"System EAF: {baseline:.10f}") # Return results with system info included
# results['_system_info'] = system_info
return percentages return results
# Legacy function for backwards compatibility
def calculate_contribution(availabilities):
"""Legacy function - redirects to improved version."""
try:
return calculate_contribution_accurate(availabilities)
except Exception as e:
log.error(f"Error in contribution calculation: {e}")
raise
async def update_contribution_bulk_mappings(*, db_session, simulation_id): async def update_contribution_bulk_mappings(*, db_session, simulation_id):
@ -235,19 +300,22 @@ async def update_contribution_bulk_mappings(*, db_session, simulation_id):
# Ensure availability values are properly normalized # Ensure availability values are properly normalized
availabilities = {} availabilities = {}
for calc in calc_results: for calc in calc_results:
availability = calc.availability # Convert percentage to fraction availability = calc.availability
# Clamp to valid range and handle potential precision issues
availability = max(0.0, min(1.0, availability))
availabilities[calc.aeros_node.node_name] = availability availabilities[calc.aeros_node.node_name] = availability
contribution = calculate_contribution(availabilities) importance = calculate_contribution(availabilities)
# Prepare bulk update data with rounded contributions to avoid precision issues in DB # Prepare bulk update data with rounded contributions to avoid precision issues in DB
for calc in calc_results: for calc in calc_results:
contribution_value = contribution.get(calc.aeros_node.node_name, 0.0)
# Round to reasonable precision for database storage # Round to reasonable precision for database storage
calc.contribution = round(contribution_value, 10) eq_importance = importance.get(calc.aeros_node.node_name, {})
if not eq_importance:
continue
calc.contribution = importance.get(calc.aeros_node.node_name).get('birnbaum_importance', 0)
calc.criticality = importance.get(calc.aeros_node.node_name).get('criticality_importance', 0)
await db_session.commit() await db_session.commit()
return contribution return importance

@ -128,6 +128,7 @@ class AerosSimulationCalcResult(Base, DefaultMixin):
mttr = Column(Integer, nullable=True) mttr = Column(Integer, nullable=True)
parameters = Column(JSON, nullable=True) parameters = Column(JSON, nullable=True)
contribution = Column(Float, nullable=True) contribution = Column(Float, nullable=True)
criticality = Column(Float, nullable=True)
aeros_simulation_id = Column( aeros_simulation_id = Column(
UUID(as_uuid=True), ForeignKey("rbd_tr_aeros_simulation.id"), nullable=False UUID(as_uuid=True), ForeignKey("rbd_tr_aeros_simulation.id"), nullable=False

@ -30,6 +30,7 @@ from .service import (
# execute_simulation, # execute_simulation,
get_all, get_all,
get_custom_parameters, get_custom_parameters,
get_default_simulation,
get_simulation_by_id, get_simulation_by_id,
get_simulation_with_calc_result, get_simulation_with_calc_result,
get_simulation_with_plot_result, get_simulation_with_plot_result,
@ -81,7 +82,7 @@ async def run_simulations(
) )
simulation_id = simulation.id simulation_id = simulation.id
# simulation_id = "dece6294-13c0-4dce-82d6-5d79b66e730e" # simulation_id = "efa8ef4c-0417-4d2d-95f3-41e4283737ab"
project = await get_project(db_session=db_session) project = await get_project(db_session=db_session)
@ -91,7 +92,7 @@ async def run_simulations(
sim_data["projectName"] = project.project_name sim_data["projectName"] = project.project_name
# ##background_tasks.add_task(execute_simulation, db_session=db_session ,simulation_id=simulation_id, sim_data=sim_data) # # ##background_tasks.add_task(execute_simulation, db_session=db_session ,simulation_id=simulation_id, sim_data=sim_data)
results = await update_equipment_for_simulation( results = await update_equipment_for_simulation(
db_session=db_session, project_name=project.project_name, schematic_name=simulation_in.SchematicName, custom_input=simulation_in.CustomInput db_session=db_session, project_name=project.project_name, schematic_name=simulation_in.SchematicName, custom_input=simulation_in.CustomInput
@ -124,6 +125,10 @@ async def run_simulations(
) )
async def get_simulation_result(db_session: DbSession, simulation_id, schematic_name: Optional[str] = Query(None), node_type = Query(None, alias="nodetype")): async def get_simulation_result(db_session: DbSession, simulation_id, schematic_name: Optional[str] = Query(None), node_type = Query(None, alias="nodetype")):
"""Get simulation result.""" """Get simulation result."""
if simulation_id == 'default':
simulation = await get_default_simulation(db_session=db_session)
simulation_id = simulation.id
simulation_result = await get_simulation_with_calc_result( simulation_result = await get_simulation_with_calc_result(
db_session=db_session, simulation_id=simulation_id, schematic_name=schematic_name, node_type=node_type db_session=db_session, simulation_id=simulation_id, schematic_name=schematic_name, node_type=node_type
) )
@ -140,6 +145,11 @@ async def get_simulation_result(db_session: DbSession, simulation_id, schematic_
) )
async def get_simulation_result_plant(db_session: DbSession, simulation_id): async def get_simulation_result_plant(db_session: DbSession, simulation_id):
"""Get simulation result.""" """Get simulation result."""
if simulation_id == 'default':
simulation = await get_default_simulation(db_session=db_session)
simulation_id = simulation.id
simulation_result = await get_plant_calc_result( simulation_result = await get_plant_calc_result(
db_session=db_session, simulation_id=simulation_id db_session=db_session, simulation_id=simulation_id
) )
@ -158,6 +168,10 @@ async def get_simulation_result_plant(db_session: DbSession, simulation_id):
) )
async def get_simulation_result_plot(db_session: DbSession, simulation_id): async def get_simulation_result_plot(db_session: DbSession, simulation_id):
"""Get simulation result.""" """Get simulation result."""
if simulation_id == 'default':
simulation = await get_default_simulation(db_session=db_session)
simulation_id = simulation.id
simulation_result = await get_simulation_with_plot_result( simulation_result = await get_simulation_with_plot_result(
db_session=db_session, simulation_id=simulation_id db_session=db_session, simulation_id=simulation_id
) )
@ -175,6 +189,11 @@ async def get_simulation_result_plot(db_session: DbSession, simulation_id):
) )
async def get_simulation_result_plot_per_node(db_session: DbSession, simulation_id, node_id, use_location_tag: Optional[int] = Query(0)): async def get_simulation_result_plot_per_node(db_session: DbSession, simulation_id, node_id, use_location_tag: Optional[int] = Query(0)):
"""Get simulation result.""" """Get simulation result."""
if simulation_id == 'default':
simulation = await get_default_simulation(db_session=db_session)
simulation_id = simulation.id
simulation_result = await get_simulation_with_plot_result( simulation_result = await get_simulation_with_plot_result(
db_session=db_session, simulation_id=simulation_id, node_id=node_id, use_location_tag=use_location_tag db_session=db_session, simulation_id=simulation_id, node_id=node_id, use_location_tag=use_location_tag
) )
@ -189,6 +208,11 @@ async def get_simulation_result_plot_per_node(db_session: DbSession, simulation_
@router.get("/result/ranking/{simulation_id}", response_model=StandardResponse[List[SimulationRankingParameters]]) @router.get("/result/ranking/{simulation_id}", response_model=StandardResponse[List[SimulationRankingParameters]])
async def get_simulation_result_ranking(db_session: DbSession, simulation_id): async def get_simulation_result_ranking(db_session: DbSession, simulation_id):
"""Get simulation result.""" """Get simulation result."""
if simulation_id == 'default':
simulation = await get_default_simulation(db_session=db_session)
simulation_id = simulation.id
simulation_result = await get_result_ranking(db_session=db_session, simulation_id=simulation_id) simulation_result = await get_result_ranking(db_session=db_session, simulation_id=simulation_id)
return { return {

@ -63,6 +63,7 @@ class SimulationCalc(BaseModel):
derating_hours: Optional[float] derating_hours: Optional[float]
aeros_node: SimulationNode aeros_node: SimulationNode
contribution: Optional[float] = 0 contribution: Optional[float] = 0
criticality: Optional[float]
class SimulationPlot(BaseModel): class SimulationPlot(BaseModel):
id: UUID id: UUID

@ -78,6 +78,18 @@ async def get_simulation_by_id(
results = await db_session.execute(query) results = await db_session.execute(query)
return results.scalar() return results.scalar()
async def get_default_simulation(
*,
db_session:DbSession
):
query = select(AerosSimulation)
query = query.where(AerosSimulation.status == "completed").where(AerosSimulation.is_default == True)
query = query.order_by(AerosSimulation.created_at.desc()).limit(1)
results = await db_session.execute(query)
return results.scalar()
async def get_simulation_node_by(*, db_session: DbSession, **kwargs): async def get_simulation_node_by(*, db_session: DbSession, **kwargs):
"""Get a simulation node by column.""" """Get a simulation node by column."""
# Build WHERE conditions from kwargs # Build WHERE conditions from kwargs
@ -382,7 +394,7 @@ async def execute_simulation(*, db_session: DbSession, simulation_id: Optional[U
try: try:
if not is_saved: if not is_saved:
response = await client.post( response = await client.post(
f"{AEROS_BASE_URL_OLD}/api/Simulation/RunSimulation", f"{AEROS_BASE_URL}/api/Simulation/RunSimulation",
json=sim_data, json=sim_data,
headers={"Content-Type": "application/json"}, headers={"Content-Type": "application/json"},
) )
@ -864,12 +876,16 @@ async def save_recusive_simulation_result_node(*, db_session: DbSession, data, s
# continue looping through all plot data, check if it regular node and schemmaticName = highest parent schematic ID, save # continue looping through all plot data, check if it regular node and schemmaticName = highest parent schematic ID, save
# If schematicName = Parent schematic name, but not regular node, that mean that node is schematic and should have children # If schematicName = Parent schematic name, but not regular node, that mean that node is schematic and should have children
# search for children schematic and save them # search for children schematic and save them
with open("model/structure_name.json", 'r') as structure_file:
structure_data = json.load(structure_file)
structure_dict = {
result["node_name"]: result["structure_name"]
for result in structure_data
if result["node_name"] is not None
}
plotResult = data["plotNodeOuts"] plotResult = data["plotNodeOuts"]
structure_names = {result["nodeName"]:result["structureName"] for result in data["nodeResultOuts"]}
results = [] results = []
for result in plotResult: for result in plotResult:
@ -882,7 +898,7 @@ async def save_recusive_simulation_result_node(*, db_session: DbSession, data, s
node_type="RegularNode", node_type="RegularNode",
schematic_name=schematic_name, schematic_name=schematic_name,
aeros_schematic_id=aeros_schematic_id, aeros_schematic_id=aeros_schematic_id,
structure_name=structure_names.get(result["nodeName"]) structure_name=structure_dict.get(result["nodeName"])
) )
results.append(node) results.append(node)
@ -896,7 +912,7 @@ async def save_recusive_simulation_result_node(*, db_session: DbSession, data, s
schematic_id=schematic_id, schematic_id=schematic_id,
node_type="SchematicNode", node_type="SchematicNode",
aeros_schematic_id=aeros_schematic_id, aeros_schematic_id=aeros_schematic_id,
structure_name=structure_names.get(result["nodeName"]) structure_name=structure_dict.get(result["nodeName"])
) )
results.append(schematic) results.append(schematic)

Loading…
Cancel
Save