feature/reliability_stat
Cizz22 3 months ago
parent 8089dcb9b5
commit 82141a1992

@ -10,7 +10,7 @@ from src.models import StandardResponse
from .schema import (OverhaulActivityCreate, OverhaulActivityPagination,
OverhaulActivityRead, OverhaulActivityUpdate)
from .service import create, delete, get, get_all, update
from .service import add_multiple_equipment_to_session, create, delete, get, get_all, update
router = APIRouter()
@ -42,20 +42,22 @@ async def get_scope_equipments(
)
@router.post("/{overhaul_session}", response_model=StandardResponse[List[str]])
@router.post("/{overhaul_session_id}", response_model=StandardResponse[None])
async def create_overhaul_equipment(
db_session: DbSession,
collector_db_session: CollectorDbSession,
overhaul_activty_in: OverhaulActivityCreate,
overhaul_session: str,
overhaul_session_id: UUID,
):
activity = await create(
activity = await add_multiple_equipment_to_session(
db_session=db_session,
overhaul_activty_in=overhaul_activty_in,
overhaul_session_id=overhaul_session,
collector_db=collector_db_session,
overhaul_session_id=overhaul_session_id,
location_tags=overhaul_activty_in.location_tags
)
return StandardResponse(data=activity, message="Data created successfully")
return StandardResponse(data=None, message="Data created successfully")
@router.get(

@ -13,8 +13,7 @@ class OverhaulActivityBase(DefultBase):
class OverhaulActivityCreate(OverhaulActivityBase):
assetnums: List[str]
scope_name: str
location_tags: List[str]
class OverhaulActivityUpdate(OverhaulActivityBase):

@ -135,6 +135,8 @@ async def get_all(
results.append(res)
results.sort(key=lambda x: x.material_cost, reverse=True)
equipments['items'] = results
return equipments
@ -195,6 +197,222 @@ async def get_standard_scope_by_session_id(*, db_session: DbSession, overhaul_se
return results
async def add_equipment_to_session(
*,
db_session: DbSession,
collector_db: CollectorDbSession,
overhaul_session_id: UUID,
location_tag: str
) -> Optional[StandardScope]:
"""
Add a new equipment to an existing overhaul session.
Creates a dummy workscope group if needed.
Silently skips if equipment already exists in the session.
Args:
db_session: Database session
collector_db: Collector database session
overhaul_session_id: The UUID of the overhaul session
location_tag: The location tag of the equipment to add
Returns:
StandardScope: The newly created StandardScope record, or None if already exists
"""
try:
# Get the overhaul session
overhaul = await get_session(db_session=db_session, overhaul_session_id=overhaul_session_id)
# Check if MasterEquipment exists
master_eq_query = select(MasterEquipment).filter(
MasterEquipment.location_tag == location_tag
)
master_eq_result = await db_session.execute(master_eq_query)
master_equipment = master_eq_result.scalar_one_or_none()
if not master_equipment:
print("equipment not found in master")
return None
# Check if equipment already exists in StandardScope
existing_query = select(StandardScope).filter(
StandardScope.location_tag == location_tag
)
existing_result = await db_session.execute(existing_query)
existing_equipment = existing_result.scalar_one_or_none()
if existing_equipment:
# Equipment already in StandardScope, check if it's in current session
# by verifying if it matches the session's maintenance type criteria
check_query = (
Select(StandardScope)
.outerjoin(StandardScope.oh_history)
.join(StandardScope.workscope_groups)
.join(EquipmentWorkscopeGroup.workscope_group)
.join(MasterActivity.oh_types)
.join(WorkscopeOHType.oh_type)
.filter(StandardScope.location_tag == location_tag)
.filter(MaintenanceType.name == overhaul.maintenance_type.name)
.filter(
(StandardScope.is_alternating_oh == False)
| (StandardScope.oh_history is None)
| (
StandardScope.oh_history.has(
EquipmentOHHistory.last_oh_type != overhaul.maintenance_type.name
)
)
)
)
check_result = await db_session.execute(check_query)
in_session = check_result.scalar_one_or_none()
if in_session:
# Already in current session, skip silently
return None
# Equipment exists but not in this session, need to add workscope
# Find or create dummy workscope group
dummy_workscope = await get_or_create_dummy_workscope(
db_session=db_session,
maintenance_type_name=overhaul.maintenance_type.name
)
# Add the dummy workscope to existing equipment if not already there
if dummy_workscope not in existing_equipment.workscope_groups:
existing_equipment.workscope_groups.append(dummy_workscope)
await db_session.commit()
await db_session.refresh(existing_equipment)
return existing_equipment
# Equipment not in StandardScope at all, create new
# First, get or create dummy workscope group
dummy_workscope = await get_or_create_dummy_workscope(
db_session=db_session,
maintenance_type_name=overhaul.maintenance_type.name
)
# Create new StandardScope record
new_equipment = StandardScope(
location_tag=location_tag,
is_alternating_oh=False,
# Add other required fields based on your model
)
# Associate with dummy workscope group
new_equipment.workscope_groups = [dummy_workscope]
db_session.add(new_equipment)
await db_session.commit()
await db_session.refresh(new_equipment)
return new_equipment
except Exception as e:
# Log the error but don't raise it
print(f"Error adding equipment {location_tag}: {str(e)}")
await db_session.rollback()
return None
async def get_or_create_dummy_workscope(
*,
db_session: DbSession,
maintenance_type_name: str
) -> EquipmentWorkscopeGroup:
"""
Get or create a dummy workscope group for included equipment.
Args:
db_session: Database session
maintenance_type_name: Name of the maintenance type (e.g., "A", "B")
Returns:
EquipmentWorkscopeGroup: The dummy workscope group
"""
dummy_name = f"Included Equipment Workscope - {maintenance_type_name}"
# Check if dummy workscope already exists
query = (
select(EquipmentWorkscopeGroup)
.join(EquipmentWorkscopeGroup.workscope_group)
.join(MasterActivity.oh_types)
.join(WorkscopeOHType.oh_type)
.filter(MasterActivity.name == dummy_name)
.filter(MaintenanceType.name == maintenance_type_name)
)
result = await db_session.execute(query)
existing = result.scalar_one_or_none()
if existing:
return existing
# Create dummy workscope group
# First, get the maintenance type
mt_query = select(MaintenanceType).filter(MaintenanceType.name == maintenance_type_name)
mt_result = await db_session.execute(mt_query)
maintenance_type = mt_result.scalar_one()
# Create MasterActivity (workscope_group)
master_activity = MasterActivity(
workscope=dummy_name,
)
# Create WorkscopeOHType relationship
workscope_oh_type = WorkscopeOHType(
workscope_group=master_activity,
oh_type=maintenance_type
)
# Create EquipmentWorkscopeGroup
equipment_workscope_group = EquipmentWorkscopeGroup(
workscope_group=master_activity,
# Add other required fields
)
db_session.add(equipment_workscope_group)
await db_session.commit()
await db_session.refresh(equipment_workscope_group)
return equipment_workscope_group
async def add_multiple_equipment_to_session(
*,
db_session: DbSession,
collector_db: CollectorDbSession,
overhaul_session_id: UUID,
location_tags: List[str]
) -> List[StandardScope]:
"""
Add multiple equipment to an existing overhaul session.
Silently skips equipment that already exist or have errors.
Args:
db_session: Database session
collector_db: Collector database session
overhaul_session_id: The UUID of the overhaul session
location_tags: List of location tags to add
Returns:
List[StandardScope]: List of newly created/updated StandardScope records
"""
results = []
for location_tag in location_tags:
equipment = await add_equipment_to_session(
db_session=db_session,
collector_db=collector_db,
overhaul_session_id=overhaul_session_id,
location_tag=location_tag
)
if equipment:
results.append(equipment)
return results
async def get_all_by_session_id(*, db_session: DbSession, overhaul_session_id):
query = (
Select(OverhaulActivity)
@ -208,39 +426,6 @@ async def get_all_by_session_id(*, db_session: DbSession, overhaul_session_id):
return results.scalars().all()
# async def create(*, db_session: DbSession, overhaul_activty_in: OverhaulActivityCreate, overhaul_session_id: UUID):
# # Check if the combination of assetnum and activity_id already exists
# existing_equipment_query = (
# Select(OverhaulActivity)
# .where(
# OverhaulActivity.assetnum == overhaul_activty_in.assetnum,
# OverhaulActivity.overhaul_scope_id == overhaul_session_id
# )
# )
# result = await db_session.execute(existing_equipment_query)
# existing_activity = result.scalar_one_or_none()
# # If the combination exists, raise an exception or return the existing activity
# if existing_activity:
# raise ValueError("This assetnum already exist.")
# activity = OverhaulActivity(
# **overhaul_activty_in.model_dump(),
# overhaul_scope_id=overhaul_session_id)
# db_session.add(activity)
# await db_session.commit()
# # Refresh and load relationships using joinedload
# query = (
# Select(OverhaulActivity)
# .options(joinedload(OverhaulActivity.equipment))
# .where(OverhaulActivity.id == activity.id)
# )
# result = await db_session.execute(query)
# activity_with_relationship = result.scalar_one()
# return activity_with_relationship
async def create(
*,

@ -125,6 +125,7 @@ class SparepartRequirement:
quantity_required: int
lead_time: int
sparepart_name: str
unit_cost: float
@dataclass
class SparepartStock:
@ -147,6 +148,7 @@ class ProcurementRecord:
order_date: date
expected_delivery_date: date
status: ProcurementStatus
po_vendor_delivery_date: date
class SparepartManager:
"""Manages sparepart availability and procurement for overhaul optimization"""
@ -277,61 +279,17 @@ class SparepartManager:
sparepart_id = requirement.sparepart_id
needed_quantity = requirement.quantity_required
sparepart_name = requirement.sparepart_name
unit_cost = requirement.unit_cost
current_stock = adjusted_stocks.get(sparepart_id, 0)
existing_sparepart_orders = [order for order in existing_orders
if order.sparepart_id == sparepart_id]
total_ordered_quantity = sum(order.quantity for order in existing_sparepart_orders
if order.status in [ProcurementStatus.PLANNED, ProcurementStatus.ORDERED])
total_ordered_quantity = sum(order.quantity for order in existing_sparepart_orders)
effective_stock = current_stock + total_ordered_quantity
# if sparepart_id not in adjusted_stocks:
# missing_parts.append({
# 'sparepart_id': sparepart_id,
# 'sparepart_name': requirement.sparepart_name,
# 'required': needed_quantity,
# 'available': 0,
# 'shortage': needed_quantity,
# 'criticality': "warning"
# })
# continue
# available_stock = adjusted_stocks[sparepart_id]
# if available_stock < needed_quantity:
# shortage = needed_quantity - available_stock
# missing_parts.append({
# 'sparepart_id': sparepart_id,
# 'sparepart_name': requirement.sparepart_name,
# 'required': needed_quantity,
# 'available': available_stock,
# 'shortage': shortage,
# 'criticality': "warning"
# })
# # Calculate procurement needs
# if sparepart_id in self.sparepart_stocks:
# stock_info = self.sparepart_stocks[sparepart_id]
# procurement_cost = shortage * stock_info.unit_cost
# total_procurement_cost += procurement_cost
# # Calculate when to order (considering lead time)
# order_month = max(0, target_month - requirement.lead_time)
# procurement_needed.append({
# 'sparepart_id': sparepart_id,
# 'sparepart_name': requirement.sparepart_name,
# 'quantity_needed': shortage,
# 'unit_cost': stock_info.unit_cost,
# 'total_cost': procurement_cost,
# 'order_by_month': order_month,
# 'lead_time_months': requirement.lead_time,
# 'criticality': "warning"
# })
if effective_stock >= needed_quantity:
# Sufficient stock available (including from existing orders)
if existing_sparepart_orders:
@ -381,8 +339,7 @@ class SparepartManager:
# Calculate additional procurement needed
if sparepart_id in self.sparepart_stocks:
stock_info = self.sparepart_stocks[sparepart_id]
procurement_cost = shortage * stock_info.unit_cost
procurement_cost = shortage * unit_cost
total_procurement_cost += procurement_cost
# Calculate when to order (considering lead time)
@ -394,7 +351,7 @@ class SparepartManager:
'sparepart_id': sparepart_id,
'sparepart_name': sparepart_name,
'quantity_needed': shortage,
'unit_cost': stock_info.unit_cost,
'unit_cost': unit_cost,
'total_cost': procurement_cost,
'order_by_month': order_month,
'recommended_order_date': order_date.isoformat(),
@ -774,33 +731,64 @@ location_sparepart_stats AS (
GROUP BY asset_location, itemnum
),
pr_po_combined AS (
-- Combine PR and PO data by num to get issue_date and estimated_arrival_date
-- Combine PR and PO data by num to get issue_date and delivery dates
SELECT
mspl.item_num,
mspl.num,
mspl.unit_cost,
mspl.qty_ordered,
MAX(CASE WHEN mspo.type = 'PR' THEN mspo.issue_date END) as issue_date,
MAX(CASE WHEN mspo.type = 'PO' THEN mspo.vendeliverydate END) as vendeliverydate,
MAX(CASE WHEN mspo.type = 'PO' THEN mspo.estimated_arrival_date END) as estimated_arrival_date
FROM public.maximo_sparepart_pr_po_line mspl
INNER JOIN public.maximo_sparepart_pr_po mspo
ON mspl.num = mspo.num
WHERE mspo.type IN ('PR', 'PO')
GROUP BY mspl.item_num, mspl.num
GROUP BY mspl.item_num, mspl.num, mspl.unit_cost, mspl.qty_ordered
),
leadtime_stats AS (
-- Calculate lead time statistics for each item
-- Prioritize vendeliverydate over estimated_arrival_date
SELECT
item_num,
ROUND(AVG(
EXTRACT(EPOCH FROM (estimated_arrival_date - issue_date)) / 86400 / 30.44
), 1) as avg_leadtime_months,
ROUND(MIN(
EXTRACT(EPOCH FROM (estimated_arrival_date - issue_date)) / 86400 / 30.44
), 1) as min_leadtime_months,
COUNT(*) as leadtime_sample_size
ROUND(CAST(AVG(
EXTRACT(EPOCH FROM (
COALESCE(vendeliverydate, estimated_arrival_date) - issue_date
)) / 86400 / 30.44
) AS NUMERIC), 1) as avg_leadtime_months,
ROUND(CAST(MIN(
EXTRACT(EPOCH FROM (
COALESCE(vendeliverydate, estimated_arrival_date) - issue_date
)) / 86400 / 30.44
) AS NUMERIC), 1) as min_leadtime_months,
ROUND(CAST(MAX(
EXTRACT(EPOCH FROM (
COALESCE(vendeliverydate, estimated_arrival_date) - issue_date
)) / 86400 / 30.44
) AS NUMERIC), 1) as max_leadtime_months,
COUNT(*) as leadtime_sample_size,
-- Additional metrics for transparency
COUNT(CASE WHEN vendeliverydate IS NOT NULL THEN 1 END) as vendelivery_count,
COUNT(CASE WHEN vendeliverydate IS NULL AND estimated_arrival_date IS NOT NULL THEN 1 END) as estimated_only_count
FROM pr_po_combined
WHERE issue_date IS NOT NULL
AND estimated_arrival_date IS NOT NULL
AND estimated_arrival_date > issue_date
AND COALESCE(vendeliverydate, estimated_arrival_date) IS NOT NULL
AND COALESCE(vendeliverydate, estimated_arrival_date) > issue_date
GROUP BY item_num
),
cost_stats AS (
-- Calculate cost statistics for each item
SELECT
item_num,
ROUND(CAST(AVG(unit_cost) AS NUMERIC), 2) as avg_unit_cost,
ROUND(CAST(MIN(unit_cost) AS NUMERIC), 2) as min_unit_cost,
ROUND(CAST(MAX(unit_cost) AS NUMERIC), 2) as max_unit_cost,
COUNT(*) as cost_sample_size,
-- Total value statistics
ROUND(CAST(AVG(unit_cost * qty_ordered) AS NUMERIC), 2) as avg_order_value,
ROUND(CAST(SUM(unit_cost * qty_ordered) AS NUMERIC), 2) as total_value_ordered
FROM pr_po_combined
WHERE unit_cost IS NOT NULL AND unit_cost > 0
GROUP BY item_num
),
item_descriptions AS (
@ -820,15 +808,29 @@ SELECT
COALESCE(id.description, 'No description available') as item_description,
lss.total_wo_count,
lss.total_qty_used,
ROUND(lss.avg_qty_per_wo, 2) as avg_qty_per_wo,
ROUND(CAST(lss.avg_qty_per_wo AS NUMERIC), 2) as avg_qty_per_wo,
lss.min_qty_used,
lss.max_qty_used,
-- Lead time metrics
COALESCE(lt.avg_leadtime_months, 0) as avg_leadtime_months,
COALESCE(lt.min_leadtime_months, 0) as min_leadtime_months,
COALESCE(lt.leadtime_sample_size, 0) as leadtime_sample_size
COALESCE(lt.max_leadtime_months, 0) as max_leadtime_months,
COALESCE(lt.leadtime_sample_size, 0) as leadtime_sample_size,
COALESCE(lt.vendelivery_count, 0) as vendelivery_count,
COALESCE(lt.estimated_only_count, 0) as estimated_only_count,
-- Cost metrics
COALESCE(cs.avg_unit_cost, 0) as avg_unit_cost,
COALESCE(cs.min_unit_cost, 0) as min_unit_cost,
COALESCE(cs.max_unit_cost, 0) as max_unit_cost,
COALESCE(cs.cost_sample_size, 0) as cost_sample_size,
COALESCE(cs.avg_order_value, 0) as avg_order_value,
COALESCE(cs.total_value_ordered, 0) as total_value_ordered,
-- Estimated total cost for average OH
ROUND(CAST(COALESCE(lss.avg_qty_per_wo * cs.avg_unit_cost, 0) AS NUMERIC), 2) as estimated_cost_per_oh
FROM location_sparepart_stats lss
LEFT JOIN item_descriptions id ON lss.itemnum = id.item_num
LEFT JOIN leadtime_stats lt ON lss.itemnum = lt.item_num
LEFT JOIN cost_stats cs ON lss.itemnum = cs.item_num
ORDER BY lss.asset_location, lss.itemnum;""")
equipment_requirements_query = await db_session.execute(query)
@ -839,7 +841,8 @@ ORDER BY lss.asset_location, lss.itemnum;""")
sparepart_id=req_record.itemnum,
quantity_required=float(req_record.avg_qty_per_wo),
lead_time=float(req_record.avg_leadtime_months),
sparepart_name=req_record.item_description
sparepart_name=req_record.item_description,
unit_cost=float(req_record.avg_unit_cost)
)
equipment_requirements[req_record.asset_location].append(requirement)
@ -849,19 +852,92 @@ ORDER BY lss.asset_location, lss.itemnum;""")
# Load procurement records (PO/PR)
procurement_query = await get_all(db_session=db_session)
query = text("""
WITH active_pos AS (
-- Get all POs that are NOT complete (not in inventory yet) and NOT closed
SELECT
pl.item_num,
h.num as po_number,
pl.qty_received,
pl.qty_ordered,
h.estimated_arrival_date,
h.vendeliverydate,
h.receipts as po_receipts,
h.status as po_status,
pl.description,
pl.unit_cost,
pl.line_cost
FROM public.maximo_sparepart_pr_po h
JOIN public.maximo_sparepart_pr_po_line pl ON h.num = pl.num
WHERE h.type = 'PO'
-- Exclude POs where receipts = 'COMPLETE'
AND (h.receipts IS NULL OR h.receipts != 'COMPLETE')
-- Exclude closed POs
AND (h.status IS NULL OR h.status != 'CLOSE')
),
po_with_pr_date AS (
-- Join with PR to get the issue_date
SELECT
po.*,
pr.issue_date as pr_issue_date
FROM active_pos po
LEFT JOIN public.maximo_sparepart_pr_po pr
ON pr.num = po.po_number
AND pr.type = 'PR'
)
SELECT
po.item_num,
po.description,
po.line_cost,
po.unit_cost,
COALESCE(i.curbaltotal, 0) as current_balance_total,
po.po_number,
po.pr_issue_date,
po.po_status,
po.po_receipts,
COALESCE(po.qty_received, 0) as po_qty_received,
COALESCE(po.qty_ordered, 0) as po_qty_ordered,
po.estimated_arrival_date as po_estimated_arrival_date,
po.vendeliverydate as po_vendor_delivery_date
FROM po_with_pr_date po
LEFT JOIN public.maximo_inventory i ON po.item_num = i.itemnum
ORDER BY po.item_num, po.pr_issue_date DESC;
""")
# Execute the query
result = await db_session.execute(query)
# Fetch all results and convert to list of dictionaries
procurement_query = []
for row in result:
procurement_query.append({
"item_num": row.item_num,
"description": row.description,
"line_cost": row.line_cost,
"unit_cost": row.unit_cost,
"current_balance_total": float(row.current_balance_total) if row.current_balance_total is not None else 0.0,
"po_number": row.po_number,
"pr_issue_date": row.pr_issue_date,
"po_status": row.po_status,
"po_receipts": row.po_receipts,
"po_qty_received": float(row.po_qty_received) if row.po_qty_received is not None else 0.0,
"po_qty_ordered": float(row.po_qty_ordered) if row.po_qty_ordered is not None else 0.0,
"po_estimated_arrival_date": row.po_estimated_arrival_date,
"po_vendor_delivery_date": row.po_vendor_delivery_date
})
for proc_record in procurement_query:
procurement = ProcurementRecord(
po_pr_id=proc_record["pr_number"],
po_pr_id=proc_record["po_number"],
sparepart_id=proc_record["item_num"],
sparepart_name=proc_record["description"],
quantity=proc_record["pr_qty_ordered"],
quantity=proc_record["po_qty_ordered"],
unit_cost=proc_record["unit_cost"],
total_cost=proc_record["line_cost"],
order_date=proc_record['pr_issue_date'],
expected_delivery_date=proc_record['po_estimated_arrival_date'],
po_vendor_delivery_date=proc_record['po_vendor_delivery_date'],
status=ProcurementStatus("ordered"),
)
sparepart_manager.add_procurement_record(procurement)

Loading…
Cancel
Save