fix: add dummy data functiong

feature/reliability_stat
Cizz22 7 months ago
parent dacca03f60
commit a3bec8cf0c

@ -14,3 +14,6 @@ COLLECTOR_PORT=5432
COLLECTOR_CREDENTIAL_USER=postgres
COLLECTOR_CREDENTIAL_PASSWORD=postgres
COLLECTOR_NAME=digital_twin
BACKEND_SERVER=

@ -1,8 +1,8 @@
import datetime
from typing import defaultdict, Coroutine, List, Optional, Tuple,Dict
from typing import Coroutine, List, Optional, Tuple,Dict
from uuid import UUID
import calendar
from src.config import REALIBILITY_SERVICE_API
import numpy as np
import requests
from fastapi import HTTPException, status
@ -26,16 +26,22 @@ from .schema import (CalculationResultsRead,
from .utils import get_months_between
from src.scope_equipment_part.model import ScopeEquipmentPart
import copy
import random
import math
class ReliabilityService:
"""Service class for handling reliability API calls"""
def __init__(self, base_url: str = "http://192.168.1.82:8000"):
def __init__(self, base_url: str = "http://192.168.1.82:8000", use_dummy_data=False):
self.base_url = base_url
self.use_dummy_data = use_dummy_data
async def get_number_of_failures(self, location_tag, start_date, end_date, token, max_interval=24):
if self.use_dummy_data:
return self._generate_dummy_failure_data(location_tag, start_date, end_date, max_interval)
url_prediction = (
f"http://192.168.1.82:8000/reliability/main/number-of-failures/"
f"{self.base_url}/main/number-of-failures/"
f"{location_tag}/{start_date.strftime('%Y-%m-%d')}/{end_date.strftime('%Y-%m-%d')}"
)
results = {}
@ -106,11 +112,75 @@ class ReliabilityService:
results = dict(sorted(results.items()))
return results
def _generate_dummy_failure_data(self, location_tag: str, start_date: datetime.date, end_date: datetime.date, max_interval: int = 24) -> Dict[datetime.date, float]:
"""
Generate realistic dummy failure prediction data for demonstration purposes.
Creates a realistic pattern with seasonal variations and some randomness.
"""
results = {}
# Set seed based on location_tag for consistent results per location
random.seed(hash(location_tag) % 1000)
# Base parameters for realistic failure patterns
base_monthly_failures = random.uniform(0.25, 1.25) # Base failures per month
seasonal_amplitude = random.uniform(0.3, 0.8) # Seasonal variation strength
trend_slope = random.uniform(-0.01, 0.02) # Long-term trend (slight increase over time)
noise_level = random.uniform(0.1, 0.3) # Random variation
# Determine equipment factor random between 1.0 and 1.9
equipment_factor = random.uniform(1.0, 1.9)
current = start_date.replace(day=1)
cumulative_failures = 0
month_count = 0
for _ in range(max_interval):
last_day = calendar.monthrange(current.year, current.month)[1]
last_day_date = datetime.datetime(current.year, current.month, last_day)
# Stop if we've passed the end_date
if last_day_date > end_date:
break
# Calculate seasonal factor (higher in summer/winter, lower in spring/fall)
seasonal_factor = 1 + seasonal_amplitude * math.sin(2 * math.pi * current.month / 12)
# Calculate trend factor (gradual increase over time)
trend_factor = 1 + trend_slope * month_count
# Calculate noise (random variation)
noise_factor = 1 + random.uniform(-noise_level, noise_level)
# Calculate monthly failures (non-cumulative)
monthly_failures = (base_monthly_failures *
equipment_factor *
seasonal_factor *
trend_factor *
noise_factor)
# Ensure minimum realistic value
monthly_failures = max(0.1, monthly_failures)
# Add to cumulative total
cumulative_failures += monthly_failures
# Store cumulative value rounded to 3 decimal places
results[last_day_date] = round(cumulative_failures, 3)
# Move to next month
month_count += 1
if current.month == 12:
current = current.replace(year=current.year + 1, month=1)
else:
current = current.replace(month=current.month + 1)
return dict(sorted(results.items()))
async def get_equipment_foh(self, location_tag: str, token: str) -> float:
"""
Get forced outage hours for equipment
"""
url = f"{self.base_url}/reliability/asset/mdt/{location_tag}"
url = f"{self.base_url}/asset/mdt/{location_tag}"
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {token}",
@ -308,7 +378,7 @@ class OverhaulCalculator:
if months_since_overhaul == 0:
expected_failures = 0 # No failures immediately after overhaul
else:
expected_failures = failures_by_month.get(months_since_overhaul, 0)
expected_failures = round(failures_by_month.get(months_since_overhaul, 0))
equivalent_force_derated_hours = 0 # Can be enhanced based on requirements
failure_cost = (
@ -410,7 +480,8 @@ class OverhaulCalculator:
location_tag=equipment.equipment.location_tag,
start_date=start_date,
end_date=end_date,
token=token
token=token,
max_interval=max_interval
)
@ -438,7 +509,7 @@ class OverhaulCalculator:
preventive_costs = [r["preventive_cost"] for r in all_results]
procurement_costs = [r["procurement_cost"] for r in all_results]
procurement_details = [r["procurement_details"] for r in all_results]
failures = list(predicted_failures.values())
failures = [round(r) for r in predicted_failures.values()]
fleet_results.append(
@ -518,7 +589,8 @@ async def run_simulation(*, db_session: DbSession, calculation: CalculationData,
} for sparepart in sparepars_query.scalars().all()
}
reliability_service = ReliabilityService()
reliability_service = ReliabilityService(base_url=REALIBILITY_SERVICE_API, use_dummy_data=True)
spare_parts_service = SparePartsService(spareparts)
optimum_calculator_service = OverhaulCalculator(reliability_service, spare_parts_service)

@ -74,3 +74,4 @@ MAXIMO_BASE_URL = config("MAXIMO_BASE_URL", default="http://example.com")
MAXIMO_API_KEY = config("MAXIMO_API_KEY", default="keys")
AUTH_SERVICE_API = config("AUTH_SERVICE_API", default="http://192.168.1.82:8000/auth")
REALIBILITY_SERVICE_API = config("REALIBILITY_SERVICE_API", default="http://192.168.1.82:8000/reliability")

Loading…
Cancel
Save