|
|
|
@ -28,7 +28,7 @@ from .schema import (CalculationResultsRead,
|
|
|
|
CalculationTimeConstrainsParametersCreate,
|
|
|
|
CalculationTimeConstrainsParametersCreate,
|
|
|
|
CalculationTimeConstrainsRead, OptimumResult)
|
|
|
|
CalculationTimeConstrainsRead, OptimumResult)
|
|
|
|
|
|
|
|
|
|
|
|
from .utils import analyze_monthly_metrics, calculate_failures_per_month, calculate_risk_cost_per_failure, create_time_series_data, get_monthly_risk_analysis, get_months_between
|
|
|
|
from .utils import analyze_monthly_metrics, calculate_failures_per_month, calculate_risk_cost_per_failure, create_time_series_data, get_monthly_risk_analysis, get_months_between, plant_simulation_metrics
|
|
|
|
from src.equipment_sparepart.model import ScopeEquipmentPart
|
|
|
|
from src.equipment_sparepart.model import ScopeEquipmentPart
|
|
|
|
import copy
|
|
|
|
import copy
|
|
|
|
import random
|
|
|
|
import random
|
|
|
|
@ -115,9 +115,9 @@ class OptimumCostModelWithSpareparts:
|
|
|
|
await self.session.close()
|
|
|
|
await self.session.close()
|
|
|
|
self.session = None
|
|
|
|
self.session = None
|
|
|
|
|
|
|
|
|
|
|
|
async def get_failures_prediction(self, simulation_id: str, location_tag: str, birnbaum_importance: float):
|
|
|
|
async def get_failures_prediction(self, simulation_id: str, location_tag: str, birnbaum_importance: float, use_location_tag: int = 1):
|
|
|
|
"""Get failure predictions for equipment from simulation service"""
|
|
|
|
"""Get failure predictions for equipment from simulation service"""
|
|
|
|
plot_result_url = f"{self.api_base_url}/aeros/simulation/result/plot/{simulation_id}/{location_tag}?use_location_tag=1"
|
|
|
|
plot_result_url = f"{self.api_base_url}/aeros/simulation/result/plot/{simulation_id}/{location_tag}?use_location_tag={use_location_tag}"
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
response = requests.get(
|
|
|
|
response = requests.get(
|
|
|
|
@ -140,8 +140,8 @@ class OptimumCostModelWithSpareparts:
|
|
|
|
self.logger.warning(f"No plot data available for {location_tag}")
|
|
|
|
self.logger.warning(f"No plot data available for {location_tag}")
|
|
|
|
return None
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
time_series = create_time_series_data(plot_data, 43830)
|
|
|
|
time_series = create_time_series_data(plot_data, (self.time_window_months * 24 * 31))
|
|
|
|
monthly_data = analyze_monthly_metrics(time_series)
|
|
|
|
monthly_data = analyze_monthly_metrics(time_series, self.last_oh_date)
|
|
|
|
|
|
|
|
|
|
|
|
return monthly_data
|
|
|
|
return monthly_data
|
|
|
|
|
|
|
|
|
|
|
|
@ -362,7 +362,15 @@ class OptimumCostModelWithSpareparts:
|
|
|
|
eq["Location"]: eq
|
|
|
|
eq["Location"]: eq
|
|
|
|
for eq in data
|
|
|
|
for eq in data
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
plant_monthly_metrics = await self.get_failures_prediction(simulation_id=simulation_id, location_tag="plant", use_location_tag=0, birnbaum_importance=0)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
REFERENCE_CAPACITY = 630 # or 550
|
|
|
|
|
|
|
|
COST_PER_MWH = 1_000_000 # rupiah
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
plant_capacity_loss_money = [metrics['derated_mwh'] * COST_PER_MWH for metrics in plant_monthly_metrics.values()]
|
|
|
|
|
|
|
|
cumulative_loss_money = np.cumsum(plant_capacity_loss_money)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for equipment in equipments:
|
|
|
|
for equipment in equipments:
|
|
|
|
location_tag = equipment.location_tag
|
|
|
|
location_tag = equipment.location_tag
|
|
|
|
birnbaum = equipment_birnbaum.get(location_tag, 0.0)
|
|
|
|
birnbaum = equipment_birnbaum.get(location_tag, 0.0)
|
|
|
|
@ -418,13 +426,14 @@ class OptimumCostModelWithSpareparts:
|
|
|
|
|
|
|
|
|
|
|
|
# Phase 3: Generate final results and database objects
|
|
|
|
# Phase 3: Generate final results and database objects
|
|
|
|
fleet_results = []
|
|
|
|
fleet_results = []
|
|
|
|
total_corrective_costs = np.zeros(max_interval)
|
|
|
|
total_corrective_costs = np.zeros(max_interval) + cumulative_loss_money[0:max_interval]
|
|
|
|
total_preventive_costs = np.zeros(max_interval)
|
|
|
|
total_preventive_costs = np.zeros(max_interval)
|
|
|
|
total_procurement_costs = np.zeros(max_interval)
|
|
|
|
total_procurement_costs = np.zeros(max_interval)
|
|
|
|
total_costs = np.zeros(max_interval)
|
|
|
|
total_costs = np.zeros(max_interval)
|
|
|
|
|
|
|
|
|
|
|
|
total_fleet_procurement_cost = 0
|
|
|
|
total_fleet_procurement_cost = 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for equipment in equipments:
|
|
|
|
for equipment in equipments:
|
|
|
|
location_tag = equipment.location_tag
|
|
|
|
location_tag = equipment.location_tag
|
|
|
|
|
|
|
|
|
|
|
|
@ -499,6 +508,7 @@ class OptimumCostModelWithSpareparts:
|
|
|
|
# Update calculation with results
|
|
|
|
# Update calculation with results
|
|
|
|
calculation.optimum_oh_day = fleet_optimal_index
|
|
|
|
calculation.optimum_oh_day = fleet_optimal_index
|
|
|
|
calculation.max_interval = max_interval
|
|
|
|
calculation.max_interval = max_interval
|
|
|
|
|
|
|
|
calculation.rbd_simulation_id = simulation_id
|
|
|
|
|
|
|
|
|
|
|
|
# Save all results to database
|
|
|
|
# Save all results to database
|
|
|
|
db_session.add_all(fleet_results)
|
|
|
|
db_session.add_all(fleet_results)
|
|
|
|
@ -717,190 +727,6 @@ async def run_simulation_with_spareparts(*, db_session, calculation, token: str,
|
|
|
|
await optimum_oh_model._close_session()
|
|
|
|
await optimum_oh_model._close_session()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def get_corrective_cost_time_chart(
|
|
|
|
|
|
|
|
material_cost: float,
|
|
|
|
|
|
|
|
service_cost: float,
|
|
|
|
|
|
|
|
location_tag: str,
|
|
|
|
|
|
|
|
token,
|
|
|
|
|
|
|
|
start_date: datetime,
|
|
|
|
|
|
|
|
end_date: datetime
|
|
|
|
|
|
|
|
) -> Tuple[np.ndarray, np.ndarray]:
|
|
|
|
|
|
|
|
days_difference = (end_date - start_date).days
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
today = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
|
|
|
|
|
|
|
|
tomorrow = today + timedelta(days=1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Initialize monthly data dictionary
|
|
|
|
|
|
|
|
monthly_data = {}
|
|
|
|
|
|
|
|
latest_num = 1
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Handle historical data (any portion before or including today)
|
|
|
|
|
|
|
|
historical_start = start_date if start_date <= today else None
|
|
|
|
|
|
|
|
historical_end = min(today, end_date)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if historical_start and historical_start <= historical_end:
|
|
|
|
|
|
|
|
url_history = f"http://192.168.1.82:8000/reliability/main/failures/{location_tag}/{historical_start.strftime('%Y-%m-%d')}/{historical_end.strftime('%Y-%m-%d')}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
|
|
response = requests.get(
|
|
|
|
|
|
|
|
url_history,
|
|
|
|
|
|
|
|
headers={
|
|
|
|
|
|
|
|
"Content-Type": "application/json",
|
|
|
|
|
|
|
|
"Authorization": f"Bearer {token}",
|
|
|
|
|
|
|
|
},
|
|
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
history_data = response.json()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Process historical data - accumulate failures by month
|
|
|
|
|
|
|
|
history_dict = {}
|
|
|
|
|
|
|
|
monthly_failures = {}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for item in history_data["data"]:
|
|
|
|
|
|
|
|
date = datetime.datetime.strptime(item["date"], "%d %b %Y")
|
|
|
|
|
|
|
|
month_key = datetime.datetime(date.year, date.month, 1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Initialize if first occurrence of this month
|
|
|
|
|
|
|
|
if month_key not in history_dict:
|
|
|
|
|
|
|
|
history_dict[month_key] = 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Accumulate failures for this month
|
|
|
|
|
|
|
|
if item["num_fail"] is not None:
|
|
|
|
|
|
|
|
history_dict[month_key] += item["num_fail"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Sort months chronologically
|
|
|
|
|
|
|
|
sorted_months = sorted(history_dict.keys())
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if sorted_months:
|
|
|
|
|
|
|
|
failures = np.array([history_dict[month] for month in sorted_months])
|
|
|
|
|
|
|
|
cum_failure = np.cumsum(failures)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for month_key in sorted_months:
|
|
|
|
|
|
|
|
monthly_failures[month_key] = int(cum_failure[sorted_months.index(month_key)])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Update monthly_data with cumulative historical data
|
|
|
|
|
|
|
|
monthly_data.update(monthly_failures)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Get the latest number for predictions if we have historical data
|
|
|
|
|
|
|
|
if failures.size > 0:
|
|
|
|
|
|
|
|
latest_num = max(1, failures[-1]) # Use the last month's failures, minimum 1
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
|
|
raise Exception(f"Error fetching historical data: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if location_tag == '3TR-TF005':
|
|
|
|
|
|
|
|
raise Exception("tes",monthly_data)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if end_date >= start_date:
|
|
|
|
|
|
|
|
url_prediction = f"http://192.168.1.82:8000/reliability/main/number-of-failures/{location_tag}/{start_date.strftime('%Y-%m-%d')}/{end_date.strftime('%Y-%m-%d')}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
|
|
response = requests.get(
|
|
|
|
|
|
|
|
url_prediction,
|
|
|
|
|
|
|
|
headers={
|
|
|
|
|
|
|
|
"Content-Type": "application/json",
|
|
|
|
|
|
|
|
"Authorization": f"Bearer {token}",
|
|
|
|
|
|
|
|
},
|
|
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
prediction_data = response.json()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Process prediction data - but only use it for future dates
|
|
|
|
|
|
|
|
if prediction_data["data"]:
|
|
|
|
|
|
|
|
for item in prediction_data["data"]:
|
|
|
|
|
|
|
|
date = datetime.strptime(item["date"], "%d %b %Y")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Only apply prediction data for dates after today
|
|
|
|
|
|
|
|
if date > today:
|
|
|
|
|
|
|
|
month_key = datetime(date.year, date.month, 1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
monthly_data[month_key] = item["num_fail"] if item["num_fail"] is not None else 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Update latest_num with the last prediction if available
|
|
|
|
|
|
|
|
last_prediction = prediction_data["data"][-1]["num_fail"]
|
|
|
|
|
|
|
|
if last_prediction is not None:
|
|
|
|
|
|
|
|
latest_num = max(1, round(last_prediction))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
|
|
print(f"Error fetching prediction data: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Fill in any missing months in the range
|
|
|
|
|
|
|
|
current_date = datetime(start_date.year, start_date.month, 1)
|
|
|
|
|
|
|
|
end_month = datetime(end_date.year, end_date.month, 1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
while current_date <= end_month:
|
|
|
|
|
|
|
|
if current_date not in monthly_data:
|
|
|
|
|
|
|
|
# Try to find the most recent month with data
|
|
|
|
|
|
|
|
prev_months = [m for m in monthly_data.keys() if m < current_date]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if prev_months:
|
|
|
|
|
|
|
|
# Use the most recent previous month's data
|
|
|
|
|
|
|
|
latest_month = max(prev_months)
|
|
|
|
|
|
|
|
monthly_data[current_date] = monthly_data[latest_month]
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
# If no previous months exist, look for future months
|
|
|
|
|
|
|
|
future_months = [m for m in monthly_data.keys() if m > current_date]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if future_months:
|
|
|
|
|
|
|
|
# Use the earliest future month's data
|
|
|
|
|
|
|
|
earliest_future = min(future_months)
|
|
|
|
|
|
|
|
monthly_data[current_date] = monthly_data[earliest_future]
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
# No data available at all, use default
|
|
|
|
|
|
|
|
monthly_data[current_date] = latest_num
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Move to next month
|
|
|
|
|
|
|
|
if current_date.month == 12:
|
|
|
|
|
|
|
|
current_date = datetime(current_date.year + 1, 1, 1)
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
current_date = datetime(current_date.year, current_date.month + 1, 1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Convert to list maintaining chronological order
|
|
|
|
|
|
|
|
complete_data = []
|
|
|
|
|
|
|
|
for month in sorted(monthly_data.keys()):
|
|
|
|
|
|
|
|
complete_data.append(monthly_data[month])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if latest_num < 1:
|
|
|
|
|
|
|
|
raise ValueError("Number of failures cannot be negative", latest_num)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Convert to numpy array
|
|
|
|
|
|
|
|
monthly_failure = np.array(complete_data)
|
|
|
|
|
|
|
|
cost_per_failure = (material_cost + service_cost) / latest_num
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
raise Exception(monthly_data, location_tag)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
|
|
corrective_costs = monthly_failure * cost_per_failure
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
|
|
raise Exception(f"Error calculating corrective costs: {monthly_failure}", location_tag)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return corrective_costs, monthly_failure
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_overhaul_cost_by_time_chart(
|
|
|
|
|
|
|
|
overhaul_cost: float, months_num: int, numEquipments: int, decay_base: float = 1.01
|
|
|
|
|
|
|
|
) -> np.ndarray:
|
|
|
|
|
|
|
|
if overhaul_cost < 0:
|
|
|
|
|
|
|
|
raise ValueError("Overhaul cost cannot be negative")
|
|
|
|
|
|
|
|
if months_num <= 0:
|
|
|
|
|
|
|
|
raise ValueError("months_num must be positive")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
rate = np.arange(1, months_num + 1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
cost_per_equipment = overhaul_cost / numEquipments
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# results = cost_per_equipment - ((cost_per_equipment / hours) * rate)
|
|
|
|
|
|
|
|
results = cost_per_equipment / rate
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def create_param_and_data(
|
|
|
|
async def create_param_and_data(
|
|
|
|
*,
|
|
|
|
*,
|
|
|
|
db_session: DbSession,
|
|
|
|
db_session: DbSession,
|
|
|
|
@ -927,7 +753,7 @@ async def create_param_and_data(
|
|
|
|
return calculationData
|
|
|
|
return calculationData
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def get_calculation_result(db_session: DbSession, calculation_id: str):
|
|
|
|
async def get_calculation_result(db_session: DbSession, calculation_id: str, token):
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
Get calculation results with improved error handling, performance, and sparepart details
|
|
|
|
Get calculation results with improved error handling, performance, and sparepart details
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
@ -982,12 +808,20 @@ async def get_calculation_result(db_session: DbSession, calculation_id: str):
|
|
|
|
'total_procurement_items': 0,
|
|
|
|
'total_procurement_items': 0,
|
|
|
|
'critical_procurement_items': 0
|
|
|
|
'critical_procurement_items': 0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
plant_monthly_metrics = await plant_simulation_metrics(simulation_id=scope_calculation.rbd_simulation_id, location_tag="plant", use_location_tag=0, token=token, last_oh_date=prev_oh_scope.end_date, max_interval=scope_calculation.max_interval)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
REFERENCE_CAPACITY = 630 # or 550
|
|
|
|
|
|
|
|
COST_PER_MWH = 1_000_000 # rupiah
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
plant_capacity_loss_money = [metrics['derated_mwh'] * COST_PER_MWH for metrics in plant_monthly_metrics.values()]
|
|
|
|
|
|
|
|
cumulative_loss_money = np.cumsum(plant_capacity_loss_money)
|
|
|
|
|
|
|
|
|
|
|
|
# Process each month
|
|
|
|
# Process each monthself
|
|
|
|
for month_index in range(data_num):
|
|
|
|
for month_index in range(data_num):
|
|
|
|
month_result = {
|
|
|
|
month_result = {
|
|
|
|
"overhaul_cost": 0.0,
|
|
|
|
"overhaul_cost": 0.0,
|
|
|
|
"corrective_cost": 0.0,
|
|
|
|
"corrective_cost": cumulative_loss_money[month_index],
|
|
|
|
"procurement_cost": 0.0,
|
|
|
|
"procurement_cost": 0.0,
|
|
|
|
"num_failures": 0.0,
|
|
|
|
"num_failures": 0.0,
|
|
|
|
"day": month_index + 1,
|
|
|
|
"day": month_index + 1,
|
|
|
|
|