reliability target

feature/reliability_stat
Cizz22 5 months ago
parent bcc96f664b
commit a2812aea02

@ -4,9 +4,10 @@ from fastapi import APIRouter, HTTPException, status
from fastapi.params import Query from fastapi.params import Query
from src.database.core import DbSession from src.database.core import DbSession
from src.auth.service import Token
from src.models import StandardResponse from src.models import StandardResponse
from .service import get_eaf_timeline from .service import get_eaf_timeline, run_rbd_simulation, get_simulation_results
router = APIRouter() router = APIRouter()
@ -28,9 +29,10 @@ router = APIRouter()
# ) # )
@router.get("", response_model=StandardResponse[List[Dict]]) @router.get("", response_model=StandardResponse[dict])
async def get_target_reliability( async def get_target_reliability(
db_session: DbSession, db_session: DbSession,
token: Token,
oh_session_id: Optional[str] = Query(None), oh_session_id: Optional[str] = Query(None),
eaf_input: float = Query(0.5), eaf_input: float = Query(0.5),
duration: int = Query(8000), duration: int = Query(8000),
@ -42,11 +44,21 @@ async def get_target_reliability(
detail="oh_session_id is required", detail="oh_session_id is required",
) )
results = await get_eaf_timeline( # results = await get_eaf_timeline(
db_session=db_session, # db_session=db_session,
oh_session_id=oh_session_id, # oh_session_id=oh_session_id,
eaf_input=eaf_input, # eaf_input=eaf_input,
oh_duration=duration # oh_duration=duration
# )
simulation_id = await run_rbd_simulation(
sim_hours=duration,
token=token
)
results = await get_simulation_results(
simulation_id=simulation_id['data'],
token=token
) )

@ -17,177 +17,9 @@ from collections import defaultdict
import asyncio import asyncio
RBD_SERVICE_API = "https://example.com/api" RBD_SERVICE_API = "http://192.168.1.82:8000/rbd"
client = httpx.AsyncClient(timeout=300.0) client = httpx.AsyncClient(timeout=300.0)
# async def get_all_target_reliability(
# *, db_session: DbSession, scope_name: str, eaf_threshold: float = 100.0
# ):
# """Get all overhaul overview with EAF values that sum to 100%, aggregated by system."""
# equipments = await get_by_scope_name(db_session=db_session, scope_name=scope_name)
# equipment_system = await get_equipment_level_by_no(db_session=db_session, level=1)
# equipment_subsystem = await get_equipment_level_by_no(
# db_session=db_session, level=2
# )
# # If no equipments found, return empty list
# if not equipments:
# return []
# import random
# n = len(equipments)
# base_value = 100 / n # Even distribution as base
# # Generate EAF values with ±30% variation from base
# eaf_values = [
# base_value + random.uniform(-0.3 * base_value, 0.3 * base_value)
# for _ in range(n)
# ]
# # Normalize to ensure sum is 100
# total = sum(eaf_values)
# eaf_values = [(v * 100 / total) for v in eaf_values]
# # Create result array of dictionaries
# result = [
# {
# "id": equipment.id,
# "assetnum": equipment.assetnum,
# "location_tag": equipment.master_equipment.location_tag,
# "name": equipment.master_equipment.name,
# "parent_id": equipment.master_equipment.parent_id, # Add parent_id to identify the system
# "eaf": round(eaf, 4), # Add EAF value
# }
# for equipment, eaf in zip(equipments, eaf_values)
# ]
# # Group equipment by system
# sub_system = {
# subsystem.id: subsystem.parent_id for subsystem in equipment_subsystem
# }
# systems = {
# system.id: {"name": system.name, "total_eaf": 0, "equipments": []}
# for system in equipment_system
# }
# for equipment in result:
# if equipment["parent_id"] in sub_system:
# systems[sub_system[equipment["parent_id"]]]["equipments"].append(equipment)
# systems[sub_system[equipment["parent_id"]]]["total_eaf"] += equipment["eaf"]
# # Convert the systems dictionary to a list of aggregated results
# aggregated_result = [
# {
# "system_id": system_id,
# "system_name": system_data["name"],
# "total_eaf": round(system_data["total_eaf"], 4),
# "equipments": system_data["equipments"],
# }
# for system_id, system_data in systems.items()
# ]
# # Sort the aggregated result by total_eaf in descending order
# aggregated_result.sort(key=lambda x: x["total_eaf"], reverse=True)
# # Filter systems up to the threshold
# cumulative_eaf = 0
# filtered_aggregated_result = []
# for system in aggregated_result:
# cumulative_eaf += system["total_eaf"]
# filtered_aggregated_result.append(system)
# if cumulative_eaf >= eaf_threshold:
# break
# return filtered_aggregated_result
# async def get_eaf_timeline(*, db_session, eaf_input: float, oh_session_id: str, oh_duration = 8000) -> List[dict]:
# """
# Generate a timeline of EAF values based on input parameters.
# Args:
# eaf_input (float): EAF value to check against thresholds
# oh_session_id (str): OH session identifier
# Returns:
# set[dict]: Set of dictionaries containing dates and their EAF values
# """
# # Define EAF thresholds
# MIN_EAF = 30
# MAX_EAF = 80
# #Get OH session
# oh_session = await get_overhaul(db_session=db_session, overhaul_session_id=oh_session_id)
# # Dummy OH session dates
# oh_session_start = oh_session.start_date
# oh_session_end = oh_session_start + timedelta(hours=oh_duration)
# # Initialize result set
# results = []
# # Determine date range based on EAF input
# if MIN_EAF <= eaf_input <= MAX_EAF:
# start_date = oh_session_start
# end_date = oh_session_end
# elif eaf_input < MIN_EAF:
# # If below minimum, extend end date by 2 months weeks
# start_date = oh_session_start
# end_date = oh_session_end + timedelta(days=360)
# else: # eaf_input > MAX_EAF
# # If above maximum, reduce end date by 1 month
# start_date = oh_session_start
# end_date = oh_session_end - timedelta(days=180)
# total_hours = (end_date - start_date).total_seconds() / 3600
# # Generate random down periods
# results = []
# # Generate down periods for each EAF scenario
# down_periods = {
# 'eaf1': generate_down_periods(start_date, end_date, 5, min_duration=30, max_duration=90),
# 'eaf2': generate_down_periods(start_date, end_date, 5, min_duration=30, max_duration=90),
# 'eaf3': generate_down_periods(start_date, end_date, 5, min_duration=30, max_duration=90),
# 'eaf4': generate_down_periods(start_date, end_date, 5, min_duration=30, max_duration=90)
# }
# # Define EAF values for downtime periods
# eaf_downtime_values = {
# 'eaf1': 0.8,
# 'eaf2': 0.65,
# 'eaf3': 0.35,
# 'eaf4': 0
# }
# # Generate daily entries
# current_time = start_date
# while current_time <= end_date:
# time_str = current_time.strftime('%Y-%m-%d %H:00:00')
# # Initialize dictionary for this hour with default values (system up)
# hourly_entry = {
# 'date': time_str,
# 'eaf1_value': 1.0,
# 'eaf2_value': 0.75,
# 'eaf3_value': 0.6,
# 'eaf4_value': 0.3
# }
# # Check each EAF scenario
# for eaf_key in down_periods:
# # Check if current hour is in any down period for this EAF
# for period_start, period_end in down_periods[eaf_key]:
# if period_start <= current_time <= period_end:
# hourly_entry[f'{eaf_key}_value'] = eaf_downtime_values[eaf_key]
# break
# results.append(hourly_entry)
# current_time += timedelta(hours=1)
# return results # return results
async def run_rbd_simulation(*, sim_hours: int, token): async def run_rbd_simulation(*, sim_hours: int, token):
@ -217,8 +49,8 @@ async def get_simulation_results(*, simulation_id: str, token: str):
"Content-Type": "application/json" "Content-Type": "application/json"
} }
calc_result_url = f"{RBD_SERVICE_API}/aeros/simulation/result/calc/{simulation_id}" calc_result_url = f"{RBD_SERVICE_API}/aeros/simulation/result/calc/{simulation_id}?nodetype=RegularNode"
plot_result_url = f"{RBD_SERVICE_API}/aeros/simulation/result/plot/{simulation_id}" plot_result_url = f"{RBD_SERVICE_API}/aeros/simulation/result/plot/{simulation_id}?nodetype=RegularNode"
async with httpx.AsyncClient(timeout=300.0) as client: async with httpx.AsyncClient(timeout=300.0) as client:
calc_task = client.get(calc_result_url, headers=headers) calc_task = client.get(calc_result_url, headers=headers)

Loading…
Cancel
Save