diff --git a/src/calculation_time_constrains/service.py b/src/calculation_time_constrains/service.py index 3204b1e..49fe394 100644 --- a/src/calculation_time_constrains/service.py +++ b/src/calculation_time_constrains/service.py @@ -20,6 +20,7 @@ from .schema import (CalculationResultsRead, CalculationTimeConstrainsParametersCreate, CalculationTimeConstrainsRead, OptimumResult) +from src.utils import get_latest_numOfFail def get_overhaul_cost_by_time_chart( overhaul_cost: float, days: int, numEquipments: int, decay_base: float = 1.01 @@ -29,10 +30,10 @@ def get_overhaul_cost_by_time_chart( if days <= 0: raise ValueError("Days must be positive") - exponents = np.arange(0, days) + exponents = np.arange(1, days+1) cost_per_equipment = overhaul_cost / numEquipments # Using a slower decay base to spread the budget depletion over more days - results = cost_per_equipment / (decay_base**exponents) + results = cost_per_equipment - ((cost_per_equipment / days) * exponents) results = np.where(np.isfinite(results), results, 0) return results @@ -55,7 +56,7 @@ def get_overhaul_cost_by_time_chart( async def get_corrective_cost_time_chart( - material_cost: float, service_cost: float, location_tag: str, token + material_cost: float, service_cost: float, location_tag: str, token, max_days: int ) -> Tuple[np.ndarray, np.ndarray]: """ Fetch failure data from API and calculate corrective costs, ensuring 365 days of data. @@ -69,7 +70,11 @@ async def get_corrective_cost_time_chart( Returns: Tuple of (corrective_costs, daily_failure_rate) """ - url = f"http://192.168.1.82:8000/reliability/main/number-of-failures/{location_tag}/2024-01-01/2024-12-31" + start_date = datetime.datetime(2025, 1, 1) + +# Calculate end date (667 days after start date) + end_date = start_date + datetime.timedelta(days=max_days) + url = f"http://192.168.1.82:8000/reliability/main/number-of-failures/{location_tag}/{start_date.strftime('%Y-%m-%d')}/{end_date.strftime('%Y-%m-%d')}" try: response = requests.get( @@ -81,9 +86,17 @@ async def get_corrective_cost_time_chart( ) data = response.json() + ## Get latest data fromdata_today + # latest_num_of_fail:float = get_latest_numOfFail(location_tag=location_tag, token=token) + + latest_num = data['data'][-1]['num_fail'] + + if not latest_num: + latest_num = 1 + # Create a complete date range for 2024 - start_date = datetime.datetime(2024, 1, 1) - date_range = [start_date + datetime.timedelta(days=x) for x in range(365)] + start_date = datetime.datetime(2025, 1, 1) + date_range = [start_date + datetime.timedelta(days=x) for x in range(max_days)] # Create a dictionary of existing data data_dict = { @@ -91,6 +104,7 @@ async def get_corrective_cost_time_chart( for item in data["data"] } + # Fill in missing dates with nearest available value complete_data = [] last_known_value = 0 # Default value if no data is available @@ -102,7 +116,6 @@ async def get_corrective_cost_time_chart( last_known_value = data_dict[date] complete_data.append(last_known_value) else: - not_full_data.append(location_tag) complete_data.append(0) # Convert to numpy array daily_failure = np.array(complete_data) @@ -110,7 +123,11 @@ async def get_corrective_cost_time_chart( # failure_counts = np.cumsum(daily_failure) # Calculate corrective costs - cost_per_failure = material_cost + service_cost + cost_per_failure = (material_cost + service_cost) / latest_num + + if cost_per_failure == 0: + raise ValueError("Cost per failure cannot be zero") + corrective_costs = daily_failure * cost_per_failure return corrective_costs, daily_failure @@ -173,7 +190,7 @@ async def create_param_and_data( async def get_calculation_result(db_session: DbSession, calculation_id: str): - days = 365 + days = 667 scope_calculation = await get_calculation_data_by_id( db_session=db_session, calculation_id=calculation_id ) @@ -299,7 +316,7 @@ async def get_calculation_data_by_id( async def create_calculation_result_service( db_session: DbSession, calculation: CalculationData, token: str ) -> CalculationTimeConstrainsRead: - days = 365 # Changed to 365 days as per requirement + days = 667 # Changed to 365 days as per requirement # Get all equipment for this calculation session equipments = await get_all_by_session_id( @@ -325,6 +342,7 @@ async def create_calculation_result_service( service_cost=eq.service_cost, token=token, location_tag=eq.equipment.location_tag, + max_days=667 ) # corrective_costs, daily_failures = get_corrective_cost_time_chart( # material_cost=eq.material_cost, diff --git a/src/utils.py b/src/utils.py index 12e7023..4b9dff1 100644 --- a/src/utils.py +++ b/src/utils.py @@ -88,3 +88,31 @@ def parse_date_string(date_str: str) -> Optional[datetime]: def time_now(): return datetime.now(pytz.timezone(TIMEZONE)) + +import requests + +def get_latest_numOfFail(location_tag, token)-> float: + today = datetime.today().strftime("%Y-%m-%d") + url_today = f"http://192.168.1.82:8000/reliability/main/number-of-failures/{location_tag}/2016-01-01/{today}" + + try: + response = requests.get(url_today, { + "Content-Type": "application/json", + "Authorization": f"Bearer {token}", + }) + data = response.json() + + raise Exception(data) + + latest_num = data['data'][-1]['num_fail'] + + raise Exception(latest_num) + + if not latest_num: + latest_num = 0 + + + return latest_num + except requests.exceptions.RequestException as e: + print(f"Error fetching data: {e}") + return 0