add endpoint to get eaf contribution

main
Cizz22 4 months ago
parent 0447eaa82b
commit 213fd0617a

@ -376,7 +376,8 @@ async def update_equipment_for_simulation(*, db_session: DbSession, project_name
"mttr": eq["cmDisP1"], "mttr": eq["cmDisP1"],
"distribution": eq["relDisType"], "distribution": eq["relDisType"],
"beta": eq["relDisP1"], "beta": eq["relDisP1"],
"eta": 0 "eta": 0,
"parameters": {}
} }
continue continue

@ -160,3 +160,10 @@ class AerosSimulationPlotResult(Base, DefaultMixin):
aeros_simulation = relationship( aeros_simulation = relationship(
"AerosSimulation", back_populates="plot_results", lazy="raise" "AerosSimulation", back_populates="plot_results", lazy="raise"
) )
class EafContribution(Base, DefaultMixin):
__tablename__ = "rbd_ms_eaf_contribution"
location_tag = Column(String, nullable=False)
eaf_contribution = Column(Float, nullable=False)

@ -1,9 +1,13 @@
from collections import defaultdict
from datetime import datetime from datetime import datetime
from typing import List, Optional from typing import List, Optional
from uuid import UUID from uuid import UUID
from fastapi import APIRouter, BackgroundTasks, HTTPException, background, status, Query from fastapi import APIRouter, BackgroundTasks, HTTPException, background, status, Query
from sqlalchemy import select
from src.aeros_equipment.model import AerosEquipment
from src.aeros_simulation.model import EafContribution
from src.auth.service import CurrentUser from src.auth.service import CurrentUser
from src.database.core import DbSession from src.database.core import DbSession
from src.database.service import CommonParameters from src.database.service import CommonParameters
@ -33,7 +37,7 @@ from .service import (
get_plant_calc_result get_plant_calc_result
) )
from .simulation_save_service import execute_simulation from .simulation_save_service import calculate_plant_eaf, execute_simulation
from src.aeros_equipment.schema import EquipmentWithCustomParameters from src.aeros_equipment.schema import EquipmentWithCustomParameters
@ -77,6 +81,8 @@ async def run_simulations(
) )
simulation_id = simulation.id simulation_id = simulation.id
#simulation_id = "2e0755bf-8cce-4743-9659-8d9920d556e7"
project = await get_project(db_session=db_session) project = await get_project(db_session=db_session)
try: try:
@ -85,7 +91,7 @@ async def run_simulations(
sim_data["projectName"] = project.project_name sim_data["projectName"] = project.project_name
##background_tasks.add_task(execute_simulation, db_session=db_session ,simulation_id=simulation_id, sim_data=sim_data) # ##background_tasks.add_task(execute_simulation, db_session=db_session ,simulation_id=simulation_id, sim_data=sim_data)
results = await update_equipment_for_simulation( results = await update_equipment_for_simulation(
db_session=db_session, project_name=project.project_name, schematic_name=simulation_in.SchematicName, custom_input=simulation_in.CustomInput db_session=db_session, project_name=project.project_name, schematic_name=simulation_in.SchematicName, custom_input=simulation_in.CustomInput
@ -99,6 +105,8 @@ async def run_simulations(
db_session=db_session, simulation_id=simulation_id, sim_data=sim_data, is_saved=True, eq_update=results db_session=db_session, simulation_id=simulation_id, sim_data=sim_data, is_saved=True, eq_update=results
) )
await calculate_plant_eaf(db_session=db_session, simulation_id=simulation_id)
return { return {
"data": str(simulation_id), "data": str(simulation_id),
"status": "success", "status": "success",
@ -209,6 +217,90 @@ async def get_custom_parameters_controller(db_session: DbSession):
"message": "Simulation result retrieved successfully", "message": "Simulation result retrieved successfully",
} }
@router.post("/calculate_eaf_contribution", response_model=StandardResponse[dict])
async def calculate_contribution(
db_session: DbSession,
simulation_in: SimulationInput,
batch_num: int = Query(0, ge=0)
):
"""RUN Simulation"""
#simulation_id = "2e0755bf-8cce-4743-9659-8d9920d556e7"
project = await get_project(db_session=db_session)
main_edh = 43.00797663527534
try:
contribution_results = defaultdict()
simulations_eq = select(AerosEquipment)
eqs = (await db_session.execute(simulations_eq)).scalars().all()
batch_size = 100
start_index = batch_num * batch_size
end_index = start_index + batch_size
if end_index > len(eqs):
end_index = len(eqs)
eqs = eqs[start_index:end_index]
for eq in eqs:
simulation = await create_simulation(
db_session=db_session, simulation_in=simulation_in
)
sim_data = simulation_in.model_dump(exclude={"SimulationName"})
sim_data["HubCnnId"] = str(simulation.id)
sim_data["projectName"] = project.project_name
custom_input = {
eq.node_name: {
"mttr": 8760,
"failure_rate": 0.1,
}
}
results = await update_equipment_for_simulation(
db_session=db_session, project_name=project.project_name, schematic_name=simulation_in.SchematicName, custom_input=custom_input
)
# await update_simulation(
# db_session=db_session, simulation_id=simulation_id, data={"reliability": results}
# )
await execute_simulation(
db_session=db_session, simulation_id=simulation.id, sim_data=sim_data, is_saved=True, eq_update=results
)
eaf, edh = await calculate_plant_eaf(db_session=db_session, simulation_id=simulation.id)
eaf_contribution = (main_edh - edh)/main_edh if main_edh else 0
contribution_results[eq.node_name] = {
"eaf": eaf,
"edh": edh,
"eaf_contribution": eaf_contribution
}
eaf_conf = EafContribution(
location_tag=eq.node_name,
eaf_contribution=eaf_contribution
)
db_session.add(eaf_conf)
await db_session.commit()
await db_session.delete(simulation)
return {
"data": contribution_results,
"status": "success",
"message": "Simulation created successfully",
}
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)
)
# @router.get("/status/{simulation_id}", response_model=StandardResponse[None]) # @router.get("/status/{simulation_id}", response_model=StandardResponse[None])
# async def get_simulation_status(simulation_id: str): # async def get_simulation_status(simulation_id: str):

@ -12,7 +12,7 @@ import ijson
from fastapi import HTTPException, status from fastapi import HTTPException, status
from src.aeros_simulation.model import AerosSimulationCalcResult, AerosSimulationPlotResult from src.aeros_simulation.model import AerosSimulationCalcResult, AerosSimulationPlotResult
from src.aeros_simulation.service import get_all_aeros_node, get_or_save_node, get_simulation_by_id from src.aeros_simulation.service import get_all_aeros_node, get_or_save_node, get_plant_calc_result, get_simulation_by_id, get_simulation_with_plot_result
from src.aeros_simulation.utils import calculate_eaf from src.aeros_simulation.utils import calculate_eaf
from src.config import AEROS_BASE_URL from src.config import AEROS_BASE_URL
from src.database.core import DbSession from src.database.core import DbSession
@ -189,13 +189,13 @@ async def create_calc_result_object(
"eta": 0, "beta": 0, "mttr": 0, "parameters": {} "eta": 0, "beta": 0, "mttr": 0, "parameters": {}
}) })
eaf, derating_hours = calculate_eaf( # eaf, derating_hours = calculate_eaf(
available_hours=result["totalUpTime"], # available_hours=result["totalUpTime"],
period_hours=result["totalUpTime"] + result["totalDowntime"], # period_hours=result["totalUpTime"] + result["totalDowntime"],
actual_production=result["production"], # actual_production=result["production"],
ideal_production=result["idealProduction"], # ideal_production=result["idealProduction"],
downtime_hours=result["totalDowntime"] # downtime_hours=result["totalDowntime"]
) # )
efor = (result["totalDowntime"] / (result["totalDowntime"] + result["totalUpTime"])) * 100 if (result["totalDowntime"] + result["totalUpTime"]) > 0 else 0 efor = (result["totalDowntime"] / (result["totalDowntime"] + result["totalUpTime"])) * 100 if (result["totalDowntime"] + result["totalUpTime"]) > 0 else 0
@ -232,11 +232,39 @@ async def create_calc_result_object(
stg_output=result["stgOutput"], stg_output=result["stgOutput"],
average_level=result["averageLevel"], average_level=result["averageLevel"],
potential_production=result["potentialProduction"], potential_production=result["potentialProduction"],
eaf=eaf, eaf=0,
efor=efor, efor=efor,
derating_hours=derating_hours, derating_hours=0,
beta=eq_reliability["beta"] if node_type == "RegularNode" else None, beta=eq_reliability["beta"] if node_type == "RegularNode" else None,
eta=eq_reliability["eta"] if node_type == "RegularNode" else None, eta=eq_reliability["eta"] if node_type == "RegularNode" else None,
mttr=eq_reliability["mttr"] if node_type == "RegularNode" else None, mttr=eq_reliability["mttr"] if node_type == "RegularNode" else None,
parameters=eq_reliability["parameters"] if node_type == "RegularNode" else None parameters=eq_reliability["parameters"] if node_type == "RegularNode" else None
) )
async def calculate_plant_eaf(
db_session: DbSession, simulation_id: UUID
):
"""Calculate overall plant EAF from individual node results."""
plant_calc_data = await get_plant_calc_result(
db_session=db_session, simulation_id=simulation_id
)
plant_plot_data = await get_simulation_with_plot_result(
db_session=db_session, simulation_id=simulation_id, node_id="plant"
)
eaf, derated_hours = calculate_eaf(
available_hours=plant_calc_data.total_uptime,
period_hours=plant_calc_data.total_uptime + plant_calc_data.total_downtime,
actual_production=plant_calc_data.production,
ideal_production=plant_calc_data.ideal_production,
downtime_hours=plant_calc_data.total_downtime,
plot_data=plant_plot_data.timestamp_outs
)
plant_calc_data.eaf = eaf
plant_calc_data.derating_hours = derated_hours
await db_session.commit()
return eaf, derated_hours

@ -1,5 +1,11 @@
import json import json
import logging
from src.logging import setup_logging
log = logging.getLogger(__name__)
setup_logging(log)
def calculate_eaf( def calculate_eaf(
available_hours: float, available_hours: float,
@ -27,12 +33,12 @@ def calculate_eaf(
try: try:
# Calculate lost production # Calculate lost production
max_capacity = 660 max_capacity = 660
derate_production = ideal_production - actual_production - (max_capacity * downtime_hours) derate_production = ideal_production - actual_production
# Calculate total equivalent derate and outage hours # Calculate total equivalent derate and outage hours
derate_equivalent_hours = derate_production / max_capacity if max_capacity > 0 else 0 edh = calculate_equivalent_derate_hours(plot_data, max_flow_rate=max_capacity)
# Calculate EAF # Calculate EAF
effective_available_hours = available_hours - derate_equivalent_hours effective_available_hours = available_hours - edh
return (effective_available_hours / period_hours) * 100 if period_hours > 0 else 0, derate_equivalent_hours return (effective_available_hours / period_hours) * 100 if period_hours > 0 else 0, edh
except Exception as e: except Exception as e:
print("Error calculating EAF:", e) print("Error calculating EAF:", e)
raise raise
@ -61,33 +67,30 @@ def calculate_eaf(
def calculate_derating(data_list, max_flow_rate: float = 660) -> float: def calculate_equivalent_derate_hours(data_list, max_flow_rate: float = 660) -> float:
""" """
Calculate total time when flow rate is below maximum. Calculate Equivalent Forced Derated Hours (EFDH).
Method 2: Time intervals AFTER each measurement point. Each data point represents the start of a period with that flow rate,
This assumes each data point represents the start of a period with that flow rate. valid until the next cumulativeTime.
""" """
# Sort data by cumulative time to ensure proper order
sorted_data = sorted(data_list, key=lambda x: x['cumulativeTime']) sorted_data = sorted(data_list, key=lambda x: x['cumulativeTime'])
total_equivalent_derate_hours = 0.0
total_time_below_max = 0.0 for i in range(len(sorted_data) - 1):
current = sorted_data[i]
next_ = sorted_data[i + 1]
print("=== Method 2: Time intervals AFTER each measurement ===") time_interval = next_['cumulativeTime'] - current['cumulativeTime']
derating = max_flow_rate - current['flowRate']
for i in range(len(sorted_data) - 1):
current_data = sorted_data[i]
next_data = sorted_data[i + 1]
# If current flow rate is below max, add this time interval if derating > 0 and derating < max_flow_rate: # Only count when capacity is reduced
if current_data['flowRate'] < max_flow_rate and current_data['flowRate'] != 0: log.info(f"Time Interval: {time_interval}, Derating: {derating}, Max Flow Rate: {max_flow_rate}")
# Time interval until next measurement total_equivalent_derate_hours += (time_interval * derating / max_flow_rate)
time_interval = next_data['cumulativeTime'] - current_data['cumulativeTime']
total_time_below_max += time_interval return total_equivalent_derate_hours
print(f"Derating hours: {time_interval:.2f}")
return total_time_below_max
def stream_large_array(filepath, key): def stream_large_array(filepath, key):

Loading…
Cancel
Save