|
|
|
|
@ -1,16 +1,11 @@
|
|
|
|
|
from typing import List
|
|
|
|
|
from typing import Optional
|
|
|
|
|
from typing import Tuple
|
|
|
|
|
import datetime
|
|
|
|
|
from typing import List, Optional, Tuple
|
|
|
|
|
from uuid import UUID
|
|
|
|
|
|
|
|
|
|
import numpy as np
|
|
|
|
|
from fastapi import HTTPException
|
|
|
|
|
from fastapi import status
|
|
|
|
|
from sqlalchemy import and_
|
|
|
|
|
from sqlalchemy import case
|
|
|
|
|
from sqlalchemy import func
|
|
|
|
|
from sqlalchemy import select
|
|
|
|
|
from sqlalchemy import update
|
|
|
|
|
import requests
|
|
|
|
|
from fastapi import HTTPException, status
|
|
|
|
|
from sqlalchemy import and_, case, func, select, update
|
|
|
|
|
from sqlalchemy.orm import joinedload
|
|
|
|
|
|
|
|
|
|
from src.database.core import DbSession
|
|
|
|
|
@ -18,20 +13,17 @@ from src.overhaul_activity.service import get_all_by_session_id
|
|
|
|
|
from src.overhaul_scope.service import get as get_scope
|
|
|
|
|
from src.workorder.model import MasterWorkOrder
|
|
|
|
|
|
|
|
|
|
from .model import CalculationData
|
|
|
|
|
from .model import CalculationEquipmentResult
|
|
|
|
|
from .model import CalculationResult
|
|
|
|
|
from .schema import CalculationResultsRead
|
|
|
|
|
from .schema import CalculationTimeConstrainsParametersCreate
|
|
|
|
|
from .schema import CalculationTimeConstrainsRead
|
|
|
|
|
from .schema import OptimumResult
|
|
|
|
|
from .schema import CalculationSelectedEquipmentUpdate
|
|
|
|
|
|
|
|
|
|
import requests
|
|
|
|
|
import datetime
|
|
|
|
|
from .model import (CalculationData, CalculationEquipmentResult,
|
|
|
|
|
CalculationResult)
|
|
|
|
|
from .schema import (CalculationResultsRead,
|
|
|
|
|
CalculationSelectedEquipmentUpdate,
|
|
|
|
|
CalculationTimeConstrainsParametersCreate,
|
|
|
|
|
CalculationTimeConstrainsRead, OptimumResult)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_overhaul_cost_by_time_chart(overhaul_cost: float, days: int,numEquipments:int ,decay_base: float = 1.01) -> np.ndarray:
|
|
|
|
|
def get_overhaul_cost_by_time_chart(
|
|
|
|
|
overhaul_cost: float, days: int, numEquipments: int, decay_base: float = 1.01
|
|
|
|
|
) -> np.ndarray:
|
|
|
|
|
if overhaul_cost < 0:
|
|
|
|
|
raise ValueError("Overhaul cost cannot be negative")
|
|
|
|
|
if days <= 0:
|
|
|
|
|
@ -40,7 +32,7 @@ def get_overhaul_cost_by_time_chart(overhaul_cost: float, days: int,numEquipment
|
|
|
|
|
exponents = np.arange(0, days)
|
|
|
|
|
cost_per_equipment = overhaul_cost / numEquipments
|
|
|
|
|
# Using a slower decay base to spread the budget depletion over more days
|
|
|
|
|
results = cost_per_equipment / (decay_base ** exponents)
|
|
|
|
|
results = cost_per_equipment / (decay_base**exponents)
|
|
|
|
|
results = np.where(np.isfinite(results), results, 0)
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
@ -61,7 +53,10 @@ def get_overhaul_cost_by_time_chart(overhaul_cost: float, days: int,numEquipment
|
|
|
|
|
# results = np.where(np.isfinite(results), results, 0)
|
|
|
|
|
# return results
|
|
|
|
|
|
|
|
|
|
async def get_corrective_cost_time_chart(material_cost: float, service_cost: float, location_tag: str, token) -> Tuple[np.ndarray, np.ndarray]:
|
|
|
|
|
|
|
|
|
|
async def get_corrective_cost_time_chart(
|
|
|
|
|
material_cost: float, service_cost: float, location_tag: str, token
|
|
|
|
|
) -> Tuple[np.ndarray, np.ndarray]:
|
|
|
|
|
"""
|
|
|
|
|
Fetch failure data from API and calculate corrective costs, ensuring 365 days of data.
|
|
|
|
|
|
|
|
|
|
@ -74,14 +69,14 @@ async def get_corrective_cost_time_chart(material_cost: float, service_cost: flo
|
|
|
|
|
Returns:
|
|
|
|
|
Tuple of (corrective_costs, daily_failure_rate)
|
|
|
|
|
"""
|
|
|
|
|
url = f'http://192.168.1.82:8000/reliability/main/number-of-failures/{location_tag}/2024-01-01/2024-12-31'
|
|
|
|
|
url = f"http://192.168.1.82:8000/reliability/main/number-of-failures/{location_tag}/2024-01-01/2024-12-31"
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
response = requests.get(
|
|
|
|
|
url,
|
|
|
|
|
headers={
|
|
|
|
|
'Content-Type': 'application/json',
|
|
|
|
|
'Authorization': f'Bearer {token}'
|
|
|
|
|
"Content-Type": "application/json",
|
|
|
|
|
"Authorization": f"Bearer {token}",
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
data = response.json()
|
|
|
|
|
@ -92,8 +87,8 @@ async def get_corrective_cost_time_chart(material_cost: float, service_cost: flo
|
|
|
|
|
|
|
|
|
|
# Create a dictionary of existing data
|
|
|
|
|
data_dict = {
|
|
|
|
|
datetime.datetime.strptime(item['date'], '%d %b %Y'): item['num_fail']
|
|
|
|
|
for item in data['data']
|
|
|
|
|
datetime.datetime.strptime(item["date"], "%d %b %Y"): item["num_fail"]
|
|
|
|
|
for item in data["data"]
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
# Fill in missing dates with nearest available value
|
|
|
|
|
@ -121,6 +116,7 @@ async def get_corrective_cost_time_chart(material_cost: float, service_cost: flo
|
|
|
|
|
print(f"Error fetching or processing data: {str(e)}")
|
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# def get_corrective_cost_time_chart(material_cost: float, service_cost: float, days: int, numEquipments: int) -> Tuple[np.ndarray, np.ndarray]:
|
|
|
|
|
# day_points = np.arange(0, days)
|
|
|
|
|
|
|
|
|
|
@ -147,12 +143,18 @@ async def get_corrective_cost_time_chart(material_cost: float, service_cost: flo
|
|
|
|
|
# return corrective_costs, daily_failure_rate
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def create_param_and_data(*, db_session: DbSession, calculation_param_in: CalculationTimeConstrainsParametersCreate, created_by: str, parameter_id: Optional[UUID] = None):
|
|
|
|
|
async def create_param_and_data(
|
|
|
|
|
*,
|
|
|
|
|
db_session: DbSession,
|
|
|
|
|
calculation_param_in: CalculationTimeConstrainsParametersCreate,
|
|
|
|
|
created_by: str,
|
|
|
|
|
parameter_id: Optional[UUID] = None,
|
|
|
|
|
):
|
|
|
|
|
"""Creates a new document."""
|
|
|
|
|
if calculation_param_in.ohSessionId is None:
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
|
|
|
detail="overhaul_session_id is required"
|
|
|
|
|
detail="overhaul_session_id is required",
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
calculationData = await CalculationData.create_with_param(
|
|
|
|
|
@ -161,36 +163,39 @@ async def create_param_and_data(*, db_session: DbSession, calculation_param_in:
|
|
|
|
|
avg_failure_cost=calculation_param_in.costPerFailure,
|
|
|
|
|
overhaul_cost=calculation_param_in.overhaulCost,
|
|
|
|
|
created_by=created_by,
|
|
|
|
|
params_id=parameter_id
|
|
|
|
|
params_id=parameter_id,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return calculationData
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def get_calculation_result(db_session: DbSession, calculation_id: str):
|
|
|
|
|
days=365
|
|
|
|
|
scope_calculation = await get_calculation_data_by_id(db_session=db_session, calculation_id=calculation_id)
|
|
|
|
|
days = 365
|
|
|
|
|
scope_calculation = await get_calculation_data_by_id(
|
|
|
|
|
db_session=db_session, calculation_id=calculation_id
|
|
|
|
|
)
|
|
|
|
|
if not scope_calculation:
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
|
|
|
detail="A data with this id does not exist.",
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
scope_overhaul = await get_scope(db_session=db_session, overhaul_session_id=scope_calculation.overhaul_session_id)
|
|
|
|
|
scope_overhaul = await get_scope(
|
|
|
|
|
db_session=db_session, overhaul_session_id=scope_calculation.overhaul_session_id
|
|
|
|
|
)
|
|
|
|
|
if not scope_overhaul:
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
|
|
|
detail="A data with this id does not exist.",
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
calculation_results = []
|
|
|
|
|
for i in range(days):
|
|
|
|
|
result = {
|
|
|
|
|
"overhaul_cost": 0,
|
|
|
|
|
"corrective_cost": 0,
|
|
|
|
|
"num_failures": 0,
|
|
|
|
|
"day": i + 1
|
|
|
|
|
"day": i + 1,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for eq in scope_calculation.equipment_results:
|
|
|
|
|
@ -200,10 +205,8 @@ async def get_calculation_result(db_session: DbSession, calculation_id: str):
|
|
|
|
|
result["overhaul_cost"] += float(eq.overhaul_costs[i])
|
|
|
|
|
result["num_failures"] += int(eq.daily_failures[i])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
calculation_results.append(CalculationResultsRead(**result))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Check if calculation already exist
|
|
|
|
|
return CalculationTimeConstrainsRead(
|
|
|
|
|
id=scope_calculation.id,
|
|
|
|
|
@ -211,18 +214,22 @@ async def get_calculation_result(db_session: DbSession, calculation_id: str):
|
|
|
|
|
scope=scope_overhaul.type,
|
|
|
|
|
results=calculation_results,
|
|
|
|
|
optimum_oh=scope_calculation.optimum_oh_day,
|
|
|
|
|
equipment_results=scope_calculation.equipment_results
|
|
|
|
|
equipment_results=scope_calculation.equipment_results,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def get_calculation_data_by_id(db_session: DbSession, calculation_id) -> CalculationData:
|
|
|
|
|
stmt = select(CalculationData).filter(
|
|
|
|
|
CalculationData.id == calculation_id
|
|
|
|
|
).options(
|
|
|
|
|
joinedload(CalculationData.equipment_results), joinedload(CalculationData.parameter)
|
|
|
|
|
async def get_calculation_data_by_id(
|
|
|
|
|
db_session: DbSession, calculation_id
|
|
|
|
|
) -> CalculationData:
|
|
|
|
|
stmt = (
|
|
|
|
|
select(CalculationData)
|
|
|
|
|
.filter(CalculationData.id == calculation_id)
|
|
|
|
|
.options(
|
|
|
|
|
joinedload(CalculationData.equipment_results),
|
|
|
|
|
joinedload(CalculationData.parameter),
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
result = await db_session.execute(stmt)
|
|
|
|
|
return result.unique().scalar()
|
|
|
|
|
|
|
|
|
|
@ -287,20 +294,21 @@ async def get_calculation_data_by_id(db_session: DbSession, calculation_id) -> C
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def create_calculation_result_service(
|
|
|
|
|
db_session: DbSession,
|
|
|
|
|
calculation: CalculationData,
|
|
|
|
|
token: str
|
|
|
|
|
db_session: DbSession, calculation: CalculationData, token: str
|
|
|
|
|
) -> CalculationTimeConstrainsRead:
|
|
|
|
|
days = 365 # Changed to 365 days as per requirement
|
|
|
|
|
|
|
|
|
|
# Get all equipment for this calculation session
|
|
|
|
|
equipments = await get_all_by_session_id(db_session=db_session, overhaul_session_id=calculation.overhaul_session_id)
|
|
|
|
|
scope = await get_scope(db_session=db_session, overhaul_session_id=calculation.overhaul_session_id)
|
|
|
|
|
|
|
|
|
|
calculation_data = await get_calculation_data_by_id(db_session=db_session, calculation_id=calculation.id)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
equipments = await get_all_by_session_id(
|
|
|
|
|
db_session=db_session, overhaul_session_id=calculation.overhaul_session_id
|
|
|
|
|
)
|
|
|
|
|
scope = await get_scope(
|
|
|
|
|
db_session=db_session, overhaul_session_id=calculation.overhaul_session_id
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
calculation_data = await get_calculation_data_by_id(
|
|
|
|
|
db_session=db_session, calculation_id=calculation.id
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Store results for each equipment
|
|
|
|
|
equipment_results: List[CalculationEquipmentResult] = []
|
|
|
|
|
@ -313,31 +321,33 @@ async def create_calculation_result_service(
|
|
|
|
|
material_cost=eq.material_cost,
|
|
|
|
|
service_cost=eq.service_cost,
|
|
|
|
|
token=token,
|
|
|
|
|
location_tag=eq.equipment.location_tag
|
|
|
|
|
location_tag=eq.equipment.location_tag,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
overhaul_cost_points = get_overhaul_cost_by_time_chart(
|
|
|
|
|
calculation_data.parameter.overhaul_cost,
|
|
|
|
|
days=len(corrective_costs),
|
|
|
|
|
numEquipments=len(equipments)
|
|
|
|
|
)
|
|
|
|
|
numEquipments=len(equipments),
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Calculate individual equipment optimum points
|
|
|
|
|
equipment_total_cost = corrective_costs + overhaul_cost_points
|
|
|
|
|
equipment_optimum_index = np.argmin(equipment_total_cost)
|
|
|
|
|
equipment_failure_sum = sum(daily_failures[:equipment_optimum_index])
|
|
|
|
|
|
|
|
|
|
equipment_results.append(CalculationEquipmentResult(
|
|
|
|
|
corrective_costs=corrective_costs.tolist(),
|
|
|
|
|
overhaul_costs=overhaul_cost_points.tolist(),
|
|
|
|
|
daily_failures=daily_failures.tolist(),
|
|
|
|
|
assetnum=eq.assetnum,
|
|
|
|
|
material_cost=eq.material_cost,
|
|
|
|
|
service_cost=eq.service_cost,
|
|
|
|
|
optimum_day=int(equipment_optimum_index + 1),
|
|
|
|
|
calculation_data_id=calculation.id,
|
|
|
|
|
master_equipment=eq.equipment
|
|
|
|
|
))
|
|
|
|
|
equipment_results.append(
|
|
|
|
|
CalculationEquipmentResult(
|
|
|
|
|
corrective_costs=corrective_costs.tolist(),
|
|
|
|
|
overhaul_costs=overhaul_cost_points.tolist(),
|
|
|
|
|
daily_failures=daily_failures.tolist(),
|
|
|
|
|
assetnum=eq.assetnum,
|
|
|
|
|
material_cost=eq.material_cost,
|
|
|
|
|
service_cost=eq.service_cost,
|
|
|
|
|
optimum_day=int(equipment_optimum_index + 1),
|
|
|
|
|
calculation_data_id=calculation.id,
|
|
|
|
|
master_equipment=eq.equipment,
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Add to totals
|
|
|
|
|
total_corrective_costs += corrective_costs
|
|
|
|
|
@ -345,7 +355,6 @@ async def create_calculation_result_service(
|
|
|
|
|
|
|
|
|
|
db_session.add_all(equipment_results)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Calculate optimum points using total costs
|
|
|
|
|
total_cost = total_corrective_costs + overhaul_cost_points
|
|
|
|
|
optimum_oh_index = np.argmin(total_cost)
|
|
|
|
|
@ -355,7 +364,7 @@ async def create_calculation_result_service(
|
|
|
|
|
overhaul_cost=float(overhaul_cost_points[optimum_oh_index]),
|
|
|
|
|
corrective_cost=float(total_corrective_costs[optimum_oh_index]),
|
|
|
|
|
num_failures=int(numbers_of_failure),
|
|
|
|
|
days=int(optimum_oh_index + 1)
|
|
|
|
|
days=int(optimum_oh_index + 1),
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# # Create calculation results for database
|
|
|
|
|
@ -376,7 +385,6 @@ async def create_calculation_result_service(
|
|
|
|
|
|
|
|
|
|
await db_session.commit()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Return results including individual equipment data
|
|
|
|
|
return CalculationTimeConstrainsRead(
|
|
|
|
|
id=calculation.id,
|
|
|
|
|
@ -384,26 +392,34 @@ async def create_calculation_result_service(
|
|
|
|
|
scope=scope.type,
|
|
|
|
|
results=[],
|
|
|
|
|
optimum_oh=optimum,
|
|
|
|
|
equipment_results=equipment_results
|
|
|
|
|
equipment_results=equipment_results,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def get_calculation_by_reference_and_parameter(*, db_session: DbSession, calculation_reference_id, parameter_id):
|
|
|
|
|
stmt = select(CalculationData).filter(and_(
|
|
|
|
|
CalculationData.reference_id == calculation_reference_id,
|
|
|
|
|
CalculationData.parameter_id == parameter_id,
|
|
|
|
|
))
|
|
|
|
|
async def get_calculation_by_reference_and_parameter(
|
|
|
|
|
*, db_session: DbSession, calculation_reference_id, parameter_id
|
|
|
|
|
):
|
|
|
|
|
stmt = select(CalculationData).filter(
|
|
|
|
|
and_(
|
|
|
|
|
CalculationData.reference_id == calculation_reference_id,
|
|
|
|
|
CalculationData.parameter_id == parameter_id,
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
result = await db_session.execute(stmt)
|
|
|
|
|
|
|
|
|
|
return result.scalar()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def get_calculation_result_by_day(*, db_session: DbSession, calculation_id, simulation_day):
|
|
|
|
|
stmt = select(CalculationResult).filter(and_(
|
|
|
|
|
CalculationResult.day == simulation_day,
|
|
|
|
|
CalculationResult.calculation_data_id == calculation_id
|
|
|
|
|
))
|
|
|
|
|
async def get_calculation_result_by_day(
|
|
|
|
|
*, db_session: DbSession, calculation_id, simulation_day
|
|
|
|
|
):
|
|
|
|
|
stmt = select(CalculationResult).filter(
|
|
|
|
|
and_(
|
|
|
|
|
CalculationResult.day == simulation_day,
|
|
|
|
|
CalculationResult.calculation_data_id == calculation_id,
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
result = await db_session.execute(stmt)
|
|
|
|
|
|
|
|
|
|
@ -411,21 +427,22 @@ async def get_calculation_result_by_day(*, db_session: DbSession, calculation_id
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def get_avg_cost_by_asset(*, db_session: DbSession, assetnum: str):
|
|
|
|
|
stmt = (
|
|
|
|
|
select(func.avg(MasterWorkOrder.total_cost_max).label('average_cost'))
|
|
|
|
|
.where(MasterWorkOrder.assetnum == assetnum)
|
|
|
|
|
stmt = select(func.avg(MasterWorkOrder.total_cost_max).label("average_cost")).where(
|
|
|
|
|
MasterWorkOrder.assetnum == assetnum
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
result = await db_session.execute(stmt)
|
|
|
|
|
return result.scalar_one_or_none()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def bulk_update_equipment(*, db: DbSession, selected_equipments: List[CalculationSelectedEquipmentUpdate], calculation_data_id: UUID):
|
|
|
|
|
async def bulk_update_equipment(
|
|
|
|
|
*,
|
|
|
|
|
db: DbSession,
|
|
|
|
|
selected_equipments: List[CalculationSelectedEquipmentUpdate],
|
|
|
|
|
calculation_data_id: UUID,
|
|
|
|
|
):
|
|
|
|
|
# Create a dictionary mapping assetnum to is_included status
|
|
|
|
|
case_mappings = {
|
|
|
|
|
asset.assetnum: asset.is_included
|
|
|
|
|
for asset in selected_equipments
|
|
|
|
|
}
|
|
|
|
|
case_mappings = {asset.assetnum: asset.is_included for asset in selected_equipments}
|
|
|
|
|
|
|
|
|
|
# Get all assetnums that need to be updated
|
|
|
|
|
assetnums = list(case_mappings.keys())
|
|
|
|
|
@ -441,9 +458,13 @@ async def bulk_update_equipment(*, db: DbSession, selected_equipments: List[Calc
|
|
|
|
|
update(CalculationEquipmentResult)
|
|
|
|
|
.where(CalculationEquipmentResult.calculation_data_id == calculation_data_id)
|
|
|
|
|
.where(CalculationEquipmentResult.assetnum.in_(assetnums))
|
|
|
|
|
.values({
|
|
|
|
|
"is_included": case(*when_clauses) # Unpack the when clauses as separate arguments
|
|
|
|
|
})
|
|
|
|
|
.values(
|
|
|
|
|
{
|
|
|
|
|
"is_included": case(
|
|
|
|
|
*when_clauses
|
|
|
|
|
) # Unpack the when clauses as separate arguments
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
await db.execute(stmt)
|
|
|
|
|
|