first commit

main
Cizz22 7 months ago
parent 76e1493876
commit 7e179093f7

@ -121,7 +121,7 @@
"errorLens.enabled": false,
"[python]": {
"editor.formatOnSave": false,
"editor.defaultFormatter": "charliermarsh.ruff",
"editor.defaultFormatter": "mikoz.black-py",
"editor.formatOnType": false
},
"python.languageServer": "Jedi",

22
Jenkinsfile vendored

@ -1,15 +1,15 @@
pipeline {
agent any
environment {
// Replace with your Docker Hub username/organization
DOCKER_HUB_USERNAME = 'aimodocker'
// Use credentials for Docker Hub
DOCKER_CREDENTIALS = credentials('aimodocker')
// Replace with your image name
IMAGE_NAME = 'oh-service'
IMAGE_NAME = 'rbd-service'
// Replace with your docker compose service name
SERVICE_NAME = 'oh-app'
SERVICE_NAME = 'rbd-app'
// Variable for Git commit hash
GIT_COMMIT_HASH = ''
@ -18,7 +18,7 @@ pipeline {
// SSH_CREDENTIALS_USR = 'aimo'
// SSH_SERVER_IP = '192.168.1.82'
}
stages {
stage('Checkout') {
steps {
@ -31,7 +31,7 @@ pipeline {
}
}
}
stage('Docker Login') {
steps {
sh '''
@ -39,7 +39,7 @@ pipeline {
'''
}
}
stage('Build Docker Image') {
steps {
script {
@ -51,7 +51,7 @@ pipeline {
}
}
}
stage('Push to Docker Hub') {
steps {
sh """
@ -61,7 +61,7 @@ pipeline {
"""
}
}
// stage('Deploy') {
// steps {
// script {
@ -78,12 +78,12 @@ pipeline {
// }
// }
}
post {
always {
// Clean up
sh 'docker logout'
// Clean up local images
script {
try {
@ -104,4 +104,4 @@ pipeline {
echo 'Failed to build/push/deploy Docker image!'
}
}
}
}

18
poetry.lock generated

@ -638,6 +638,22 @@ files = [
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
[[package]]
name = "isort"
version = "6.0.1"
description = "A Python utility / library to sort Python imports."
optional = false
python-versions = ">=3.9.0"
groups = ["main"]
files = [
{file = "isort-6.0.1-py3-none-any.whl", hash = "sha256:2dc5d7f65c9678d94c88dfc29161a320eec67328bc97aad576874cb4be1e9615"},
{file = "isort-6.0.1.tar.gz", hash = "sha256:1cb5df28dfbc742e490c5e41bad6da41b805b0a8be7bc93cd0fb2a8a890ac450"},
]
[package.extras]
colors = ["colorama"]
plugins = ["setuptools"]
[[package]]
name = "jinja2"
version = "3.1.4"
@ -2035,4 +2051,4 @@ files = [
[metadata]
lock-version = "2.1"
python-versions = "^3.11"
content-hash = "34cff1e8de752f5b94a7012e1810f7f86fc5b7f6978da8ff5adc8e1e4516ef59"
content-hash = "dce48738423bd996d6e51e639ac38bf8f3c5e5ad4baef51fdf06bd049cf15e00"

@ -1,5 +1,5 @@
[tool.poetry]
name = "optimumohservice"
name = "rbdservice"
version = "0.1.0"
description = ""
authors = ["Cizz22 <cisatraa@gmail.com>"]
@ -26,6 +26,7 @@ temporalio = "^1.8.0"
pandas = "^2.2.3"
psycopg2-binary = "^2.9.10"
greenlet = "^3.1.1"
isort = "^6.0.1"
[build-system]

@ -0,0 +1,85 @@
from sqlalchemy import (
JSON,
UUID,
Column,
DateTime,
Float,
ForeignKey,
Integer,
Numeric,
String,
)
from sqlalchemy.orm import relationship
from src.database.core import Base
from src.models import DefaultMixin
class AerosEquipment(Base, DefaultMixin):
__tablename__ = "rbd_ms_aeros_equipment"
# Basic equipment information
node_name = Column(String, nullable=True)
location_tag = Column(String, nullable=True)
# max_flowrate = Column(Numeric, nullable=True)
# design_flowrate = Column(Numeric, nullable=True)
# flowrate_unit = Column(String, nullable=True)
# # Release discharge parameters
# rel_dis_type = Column(String, nullable=True)
# rel_dis_p1 = Column(Numeric, nullable=True)
# rel_dis_p2 = Column(Numeric, nullable=True)
# rel_dis_p3 = Column(Numeric, nullable=True)
# rel_dis_unit_code = Column(String, nullable=True)
# # CM discharge parameters
# cm_dis_type = Column(String, nullable=True)
# cm_dis_p1 = Column(Numeric, nullable=True)
# cm_dis_p2 = Column(Numeric, nullable=True)
# cm_dis_p3 = Column(Numeric, nullable=True)
# cm_dis_unit_code = Column(String, nullable=True)
# # IP discharge parameters
# ip_dis_type = Column(String, nullable=True)
# ip_dis_p1 = Column(Numeric, nullable=True)
# ip_dis_p2 = Column(Numeric, nullable=True)
# ip_dis_p3 = Column(Numeric, nullable=True)
# ip_dis_unit_code = Column(String, nullable=True)
# # PM discharge parameters
# pm_dis_type = Column(String, nullable=True)
# pm_dis_p1 = Column(Numeric, nullable=True)
# pm_dis_p2 = Column(Numeric, nullable=True)
# pm_dis_p3 = Column(Numeric, nullable=True)
# pm_dis_unit_code = Column(String, nullable=True)
# # OH discharge parameters
# oh_dis_type = Column(String, nullable=True)
# oh_dis_p1 = Column(Numeric, nullable=True)
# oh_dis_p2 = Column(Numeric, nullable=True)
# oh_dis_p3 = Column(Numeric, nullable=True)
# oh_dis_unit_code = Column(String, nullable=True)
aeros_equipment_details = relationship(
"AerosEquipmentDetail", back_populates="aeros_equipment", lazy="raise"
)
class AerosEquipmentDetail(Base, DefaultMixin):
__tablename__ = "rbd_ms_aeros_equipment_detail"
aeros_equipment_id = Column(
UUID(as_uuid=True), ForeignKey("rbd_ms_aeros_equipment.id"), nullable=False
)
location_tag = Column(String, nullable=True)
aeros_equipment = relationship(
"AerosEquipment", back_populates="aeros_equipment_details", lazy="raise"
)
class MasterEquipment(Base, DefaultMixin):
__tablename__ = "ms_equipment_master"
location_tag = Column(String, nullable=True)

@ -0,0 +1,30 @@
from typing import List, Optional
from fastapi import APIRouter, HTTPException, status
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import CommonParameters
from src.models import StandardResponse
from .service import save_default_equipment
# from .schema import (OverhaulScheduleCreate, OverhaulSchedulePagination, OverhaulScheduleUpdate)
router = APIRouter()
# @router.post("", response_model=StandardResponse[SimulationPagination])
# async def get_all_simulation(db_session: DbSession, common: CommonParameters):
# """Get all simulation."""
# results = await get_all(common)
@router.get("", response_model=StandardResponse[None])
async def save_default_equipments(
db_session: DbSession, project_name: str = "trialapi"
):
await save_default_equipment(db_session=db_session, project_name=project_name)
return {"data": None, "status": "success", "message": "Success"}

@ -0,0 +1,184 @@
from datetime import datetime
from enum import Enum
from typing import List, Optional
from uuid import UUID
from pydantic import Field
from src.models import DefultBase, Pagination
# from src.overhaul_scope.schema import ScopeRead
# from src.scope_equipment_job.schema import ScopeEquipmentJobRead
# from src.job.schema import ActivityMasterRead
class EquipmentBase(DefultBase):
pass
# class OverhaulScheduleCreate(OverhaulScheduleBase):
# year: int
# plan_duration: Optional[int] = Field(None)
# planned_outage: Optional[int] = Field(None)
# actual_shutdown: Optional[int] = Field(None)
# start: datetime
# finish: datetime
# remark: Optional[str] = Field(None)
# class OverhaulScheduleUpdate(OverhaulScheduleBase):
# start: datetime
# finish: datetime
# class OverhaulScheduleRead(OverhaulScheduleBase):
# id: UUID
# year: int
# plan_duration: Optional[int]
# planned_outage: Optional[int]
# actual_shutdown: Optional[int]
# start: datetime
# finish: datetime
# remark: Optional[str]
# class OverhaulSchedulePagination(Pagination):
# items: List[OverhaulScheduleRead] = []
class FlowrateUnit(str, Enum):
PER_DAY = "PerDay"
PER_HOUR = "PerHour"
PER_MINUTE = "PerMinute"
class DistributionType(str, Enum):
LOGNORMAL = "Lognormal"
NORMAL = "Normal"
FIXED = "Fixed"
UNIFORM = "Uniform"
EXPONENTIAL = "Exponential"
class UnitCode(str, Enum):
U_DAY = "UDay"
U_HOUR = "UHour"
U_MINUTE = "UMinute"
class EquipmentConfiguration(EquipmentBase):
"""
Schema for equipment configuration with flow rates and distribution parameters
"""
equipment_name: str = Field(
..., alias="equipmentName", description="Name of the equipment"
)
max_flowrate: float = Field(
..., alias="maxFlowrate", ge=0, description="Maximum flow rate"
)
design_flowrate: float = Field(
..., alias="designFlowrate", ge=0, description="Design flow rate"
)
flowrate_unit: FlowrateUnit = Field(
..., alias="flowrateUnit", description="Unit for flow rate"
)
# Reliability Distribution Parameters
rel_dis_type: DistributionType = Field(
..., alias="relDisType", description="Reliability distribution type"
)
rel_dis_p1: float = Field(
..., alias="relDisP1", description="Reliability distribution parameter 1"
)
rel_dis_p2: float = Field(
..., alias="relDisP2", description="Reliability distribution parameter 2"
)
rel_dis_p3: float = Field(
..., alias="relDisP3", description="Reliability distribution parameter 3"
)
rel_dis_unit_code: UnitCode = Field(
..., alias="relDisUnitCode", description="Reliability distribution unit code"
)
# Corrective Maintenance Distribution Parameters
cm_dis_type: DistributionType = Field(
..., alias="cmDisType", description="Corrective maintenance distribution type"
)
cm_dis_p1: float = Field(
...,
alias="cmDisP1",
description="Corrective maintenance distribution parameter 1",
)
cm_dis_p2: float = Field(
...,
alias="cmDisP2",
description="Corrective maintenance distribution parameter 2",
)
cm_dis_p3: float = Field(
...,
alias="cmDisP3",
description="Corrective maintenance distribution parameter 3",
)
cm_dis_unit_code: UnitCode = Field(
...,
alias="cmDisUnitCode",
description="Corrective maintenance distribution unit code",
)
# Inspection Distribution Parameters
ip_dis_type: DistributionType = Field(
..., alias="ipDisType", description="Inspection distribution type"
)
ip_dis_p1: float = Field(
..., alias="ipDisP1", description="Inspection distribution parameter 1"
)
ip_dis_p2: float = Field(
..., alias="ipDisP2", description="Inspection distribution parameter 2"
)
ip_dis_p3: float = Field(
..., alias="ipDisP3", description="Inspection distribution parameter 3"
)
ip_dis_unit_code: UnitCode = Field(
..., alias="ipDisUnitCode", description="Inspection distribution unit code"
)
# Preventive Maintenance Distribution Parameters
pm_dis_type: DistributionType = Field(
..., alias="pmDisType", description="Preventive maintenance distribution type"
)
pm_dis_p1: float = Field(
...,
alias="pmDisP1",
description="Preventive maintenance distribution parameter 1",
)
pm_dis_p2: float = Field(
...,
alias="pmDisP2",
description="Preventive maintenance distribution parameter 2",
)
pm_dis_p3: float = Field(
...,
alias="pmDisP3",
description="Preventive maintenance distribution parameter 3",
)
pm_dis_unit_code: UnitCode = Field(
...,
alias="pmDisUnitCode",
description="Preventive maintenance distribution unit code",
)
# Overhaul Distribution Parameters
oh_dis_type: DistributionType = Field(
..., alias="ohDisType", description="Overhaul distribution type"
)
oh_dis_p1: float = Field(
..., alias="ohDisP1", description="Overhaul distribution parameter 1"
)
oh_dis_p2: float = Field(
..., alias="ohDisP2", description="Overhaul distribution parameter 2"
)
oh_dis_p3: float = Field(
..., alias="ohDisP3", description="Overhaul distribution parameter 3"
)
oh_dis_unit_code: UnitCode = Field(
..., alias="ohDisUnitCode", description="Overhaul distribution unit code"
)

@ -0,0 +1,152 @@
from typing import List, Optional
from uuid import UUID
import httpx
from fastapi import HTTPException, status
from sqlalchemy import Delete, Select, func
from sqlalchemy.orm import selectinload
from src.auth.service import CurrentUser
from src.config import AEROS_BASE_URL
from src.database.core import DbSession
from src.database.service import search_filter_sort_paginate
from .model import AerosEquipment, AerosEquipmentDetail, MasterEquipment
from .schema import EquipmentConfiguration
client = httpx.AsyncClient(timeout=300.0)
async def get_all(*, common):
"""Returns all documents."""
query = Select(AerosEquipment).options(
selectinload(AerosEquipment.aeros_equipment_details)
)
results = search_filter_sort_paginate(model=query, **common)
requestedNode = {
"projectName": "ParallelNode",
"equipmentName": [node.node_name for node in results["items"]],
}
try:
response = await client.post(
f"{AEROS_BASE_URL}/api/UpdateDisParams/GetUpdatedNodeDistributions",
json=requestedNode,
headers={"Content-Type": "application/json"},
)
response.raise_for_status()
aerosEquipmentResult = response.json()
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)
)
results["items"] = aerosEquipmentResult
return results
async def get_by_id(*, db_session: DbSession, id: UUID):
query = (
Select(AerosEquipment)
.where(AerosEquipment.id == id)
.options(selectinload(AerosEquipment.aeros_equipment_details))
)
result = await db_session.execute(query)
aerosEquipmentResult = result.scalar()
if not aerosEquipmentResult:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="AerosEquipment not found"
)
aerosNodeReq = {
"projectName": "ParallelNode",
"equipmentName": [aerosEquipmentResult.node_name],
}
try:
response = await client.post(
f"{AEROS_BASE_URL}/api/UpdateDisParams/GetUpdatedNodeDistributions",
json=aerosNodeReq,
headers={"Content-Type": "application/json"},
)
response.raise_for_status()
aerosEquipmentData = response.json()
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)
)
return aerosEquipmentResult, aerosEquipmentData
async def update_node(
*, db_session: DbSession, equipment_nodes: List[EquipmentConfiguration]
):
updateNodeReq = {"projectName": "ParallelNode", "regNodeInputs": equipment_nodes}
try:
response = await client.post(
f"{AEROS_BASE_URL}/api/UpdateDisParams/UpdateEquipmentDistributions",
json=updateNodeReq,
headers={"Content-Type": "application/json"},
)
response.raise_for_status()
result = response.json()
return result
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)
)
async def save_default_equipment(*, db_session: DbSession, project_name: str):
equipmments = Select(MasterEquipment).where(
MasterEquipment.location_tag.isnot(None)
)
equipment_nodes = await db_session.execute(equipmments)
reg_nodes = [node.location_tag for node in equipment_nodes.scalars().all()]
updateNodeReq = {"projectName": project_name, "equipmentNames": reg_nodes}
# Delete old data
query = Delete(AerosEquipment)
await db_session.execute(query)
try:
response = await client.post(
f"{AEROS_BASE_URL}/api/UpdateDisParams/GetUpdatedNodeDistributions",
json=updateNodeReq,
headers={"Content-Type": "application/json"},
)
response.raise_for_status()
results = response.json()
nodes = []
# save to db
for equipment in results:
node = AerosEquipment(
node_name=equipment["equipmentName"],
location_tag=equipment["equipmentName"],
)
nodes.append(node)
db_session.add_all(nodes)
await db_session.commit()
return results
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)
)

@ -0,0 +1,12 @@
from sqlalchemy import JSON, UUID, Column, DateTime, Float, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from src.database.core import Base
from src.models import DefaultMixin
class AerosProject(Base, DefaultMixin):
__tablename__ = "rbd_ms_aeros_project"
project_name = Column(String, nullable=False)
aro_file_path = Column(String, nullable=False)

@ -0,0 +1,68 @@
import os
from typing import List, Optional
from fastapi import APIRouter, HTTPException, status
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import CommonParameters
from src.models import StandardResponse
from .schema import AerosProjectInput
from .service import import_aro_project
router = APIRouter()
@router.get("", response_model=StandardResponse[None])
async def save_all_default(db_session: DbSession, project_name: str = "trialapi"):
await import_aro_project(db_session=db_session)
return {"data": None, "status": "success", "message": "Success"}
# @router.post("/import")
# async def upload_with_validation(aeros_project_in: AerosProjectInput):
# # Check file extension
# return {
# "message": "File uploaded successfully",
# "filename": file.filename,
# "size": len(content)
# }
# @router.post("", response_model=StandardResponse[None])
# async def create_overhaul_equipment_jobs(
# db_session: DbSession, overhaul_job_in: OverhaulScheduleCreate
# ):
# await create(
# db_session=db_session,
# overhaul_job_in=overhaul_job_in,
# )
# return StandardResponse(
# data=None,
# message="Data created successfully",
# )
# @router.put("/{overhaul_job_id}", response_model=StandardResponse[None])
# async def update_overhaul_schedule(
# db_session: DbSession, overhaul_job_id: str, overhaul_job_in: OverhaulScheduleUpdate
# ):
# await update(db_session=db_session, overhaul_schedule_id=overhaul_job_id, overhaul_job_in=overhaul_job_in)
# return StandardResponse(
# data=None,
# message="Data updated successfully",
# )
# @router.delete("/{overhaul_job_id}", response_model=StandardResponse[None])
# async def delete_overhaul_equipment_job(db_session: DbSession, overhaul_job_id):
# await delete(db_session=db_session, overhaul_schedule_id=overhaul_job_id)
# return StandardResponse(
# data=None,
# message="Data deleted successfully",
# )

@ -0,0 +1,50 @@
from datetime import datetime
from typing import List, Optional
from uuid import UUID
from fastapi import File, UploadFile
from pydantic import Field
from src.models import DefultBase, Pagination
# class OverhaulScheduleBase(DefultBase):
# pass
class AerosProjectBase(DefultBase):
pass
class AerosProjectInput(AerosProjectBase):
schematic_name: str
aro_file: UploadFile = File(..., description="ARO file")
# class OverhaulScheduleCreate(OverhaulScheduleBase):
# year: int
# plan_duration: Optional[int] = Field(None)
# planned_outage: Optional[int] = Field(None)
# actual_shutdown: Optional[int] = Field(None)
# start: datetime
# finish: datetime
# remark: Optional[str] = Field(None)
# class OverhaulScheduleUpdate(OverhaulScheduleBase):
# start: datetime
# finish: datetime
# class OverhaulScheduleRead(OverhaulScheduleBase):
# id: UUID
# year: int
# plan_duration: Optional[int]
# planned_outage: Optional[int]
# actual_shutdown: Optional[int]
# start: datetime
# finish: datetime
# remark: Optional[str]
# class OverhaulSchedulePagination(Pagination):
# items: List[OverhaulScheduleRead] = []

@ -0,0 +1,103 @@
import os
from typing import Optional
import httpx
from fastapi import HTTPException, status
from sqlalchemy import Delete, Select, func
from sqlalchemy.orm import selectinload
from src.aeros_equipment.service import save_default_equipment
from src.aeros_simulation.service import save_default_simulation_node
from src.auth.service import CurrentUser
from src.config import AEROS_BASE_URL
from src.database.core import DbSession
from src.database.service import search_filter_sort_paginate
from .model import AerosProject
from .schema import AerosProjectInput
ALLOWED_EXTENSIONS = {".aro"}
MAX_FILE_SIZE = 5 * 1024 * 1024 # 5MB
client = httpx.AsyncClient(timeout=300.0)
async def import_aro_project(*, db_session: DbSession):
# file = aeros_project_in.aro_file
# file_ext = os.path.splitext(file.filename)[1].lower()
# if file_ext not in ALLOWED_EXTENSIONS:
# raise HTTPException(
# status_code=400,
# detail=f"File type not allowed. Allowed: {ALLOWED_EXTENSIONS}"
# )
# # Read and check file size
# content = await file.read()
# if len(content) > MAX_FILE_SIZE:
# raise HTTPException(
# status_code=400,
# detail="File too large. Max size: 5MB"
# )
project_name = "trialapi"
## save File to windows app
# Output is string of file path, examole
aro_path = r"C:/Users/user/Documents/Aeros/sample_project.aro"
aeros_project = AerosProject(project_name=project_name, aro_file_path=aro_path)
db_session.add(aeros_project)
await db_session.commit()
# Update path to AEROS APP
# Example BODy "C/dsad/dsad.aro"
try:
response = await client.post(
f"{AEROS_BASE_URL}/api/Project/ImportAROFile",
json=f"/",
headers={"Content-Type": "application/json"},
)
response.raise_for_status()
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)
)
await _initialize_default_project_data(
db_session=db_session,
project_name=project_name
)
async def _initialize_default_project_data(
*,
db_session: DbSession,
project_name: str
) -> None:
"""
Initialize default equipment and simulation nodes for a project.
Args:
db_session: Database session
project_name: Name of the project to initialize
"""
try:
# Save default equipment
await save_default_equipment(
db_session=db_session,
project_name=project_name
)
# Save default simulation node
await save_default_simulation_node(
db_session=db_session,
project_name=project_name
)
await db_session.commit()
except Exception as e:
await db_session.rollback()
raise e

@ -0,0 +1,132 @@
from sqlalchemy import JSON, UUID, Column, DateTime, Float, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from src.database.core import Base
from src.models import DefaultMixin
class AerosSimulation(Base, DefaultMixin):
__tablename__ = "rbd_tr_aeros_simulation"
status = Column(String, nullable=False)
started_at = Column(DateTime, nullable=True)
completed_at = Column(DateTime, nullable=True)
input = Column(JSON, nullable=True)
result = Column(JSON, nullable=True)
error = Column(JSON, nullable=True)
calc_results = relationship(
"AerosSimulationCalcResult", back_populates="aeros_simulation", lazy="raise"
)
plot_results = relationship(
"AerosSimulationPlotResult", back_populates="aeros_simulation", lazy="raise"
)
class AerosNode(Base, DefaultMixin):
__tablename__ = "rbd_ms_aeros_node"
node_type = Column(String, nullable=False)
original_node_id = Column(Integer, nullable=False)
node_id = Column(Integer, nullable=False)
node_name = Column(String, nullable=False)
structure_name = Column(String, nullable=False)
schematic_name = Column(String, nullable=False)
schematic_id = Column(
Integer, ForeignKey("rbd_ms_aeros_node.ref_schematic_id"), nullable=False
)
original_schematic_id = Column(Integer, nullable=False)
ref_schematic_id = Column(Integer, nullable=False)
orignal_ref_schematic_id = Column(Integer, nullable=False)
parent = relationship(
"AerosNode",
remote_side=[ref_schematic_id],
back_populates="children",
foreign_keys=[schematic_id],
)
children = relationship(
"AerosNode", back_populates="parent", foreign_keys=[schematic_id]
)
calc_results = relationship(
"AerosSimulationCalcResult", back_populates="aeros_node"
)
plot_results = relationship(
"AerosSimulationPlotResult", back_populates="aeros_node"
)
class AerosSimulationCalcResult(Base, DefaultMixin):
__tablename__ = "rbd_tr_aeros_simulation_calc_result"
total_downtime = Column(Float, nullable=False)
total_uptime = Column(Float, nullable=False)
num_events = Column(Integer, nullable=False)
production = Column(Float, nullable=False)
production_std = Column(Float, nullable=False)
ideal_production = Column(Float, nullable=False)
availability = Column(Float, nullable=False)
efficiency = Column(Float, nullable=False)
effective_loss = Column(Float, nullable=False)
num_cm = Column(Integer, nullable=False)
cm_waiting_time = Column(Float, nullable=False)
total_cm_downtime = Column(Float, nullable=False)
num_pm = Column(Integer, nullable=False)
total_pm_downtime = Column(Float, nullable=False)
num_ip = Column(Integer, nullable=False)
total_ip_downtime = Column(Float, nullable=False)
num_oh = Column(Integer, nullable=False)
total_oh_downtime = Column(Float, nullable=False)
t_wait_for_crew = Column(Float, nullable=False)
t_wait_for_spare = Column(Float, nullable=False)
duration_at_full = Column(Float, nullable=False)
duration_above_hh = Column(Float, nullable=False)
duration_above_h = Column(Float, nullable=False)
duration_below_l = Column(Float, nullable=False)
duration_below_ll = Column(Float, nullable=False)
duration_at_empty = Column(Float, nullable=False)
stg_input = Column(Float, nullable=True)
stg_output = Column(Float, nullable=True)
average_level = Column(Float, nullable=True)
potential_production = Column(Float, nullable=True)
eaf = Column(Float, nullable=True)
aeros_simulation_id = Column(
UUID(as_uuid=True), ForeignKey("rbd_tr_aeros_simulation.id"), nullable=False
)
aeros_node_id = Column(
UUID(as_uuid=True), ForeignKey("rbd_ms_aeros_node.id"), nullable=False
)
aeros_node = relationship("AerosNode", back_populates="calc_results", lazy="raise")
aeros_simulation = relationship(
"AerosSimulation", back_populates="calc_results", lazy="raise"
)
class AerosSimulationPlotResult(Base, DefaultMixin):
__tablename__ = "rbd_tr_aeros_simulation_plot_result"
max_flow_rate = Column(Float, nullable=False)
storage_capacity = Column(Float, nullable=False)
point_availabilities = Column(JSON, nullable=False)
point_flowrates = Column(JSON, nullable=False)
timestamp_outs = Column(JSON, nullable=False)
aeros_simulation_id = Column(
UUID(as_uuid=True), ForeignKey("rbd_tr_aeros_simulation.id"), nullable=False
)
aeros_node_id = Column(
UUID(as_uuid=True), ForeignKey("rbd_ms_aeros_node.id"), nullable=False
)
aeros_node = relationship("AerosNode", back_populates="plot_results", lazy="raise")
aeros_simulation = relationship(
"AerosSimulation", back_populates="plot_results", lazy="raise"
)

@ -0,0 +1,147 @@
from datetime import datetime
from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, BackgroundTasks, HTTPException, background, status
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import CommonParameters
from src.models import StandardResponse
from .schema import (
SimulationCalcResult,
SimulationInput,
SimulationPagination,
SimulationPlotResult,
)
from .service import (
create_simulation,
execute_simulation,
get_all,
get_custom_parameters,
get_simulation_by_id,
get_simulation_with_calc_result,
get_simulation_with_plot_result,
)
router = APIRouter()
active_simulations = {}
@router.post("", response_model=StandardResponse[SimulationPagination])
async def get_all_simulation(db_session: DbSession, common: CommonParameters):
"""Get all simulation."""
results = await get_all(common)
@router.post("/run", response_model=StandardResponse[str])
async def run_simulations(
db_session: DbSession,
simulation_in: SimulationInput,
background_tasks: BackgroundTasks,
):
"""RUN Simulation"""
simulation = await create_simulation(
db_session=db_session, simulation_in=simulation_in
)
simulation_id = simulation.id
try:
sim_data = simulation_in.model_dump()
sim_data["HubCnnId"] = str(simulation_id)
##background_tasks.add_task(execute_simulation, db_session=db_session ,simulation_id=simulation_id, sim_data=sim_data)
await execute_simulation(
db_session=db_session, simulation_id=simulation_id, sim_data=sim_data, is_saved=True
)
return {
"data": str(simulation_id),
"status": "success",
"message": "Simulation created successfully",
}
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)
)
@router.get(
"/result/calc/{simulation_id}",
response_model=StandardResponse[SimulationCalcResult],
)
async def get_simulation_result(db_session: DbSession, simulation_id):
"""Get simulation result."""
simulation_result = await get_simulation_with_calc_result(
db_session=db_session, simulation_id=simulation_id
)
return {
"data": simulation_result,
"status": "success",
"message": "Simulation result retrieved successfully",
}
@router.get(
"/result/plot/{simulation_id}",
response_model=StandardResponse[SimulationPlotResult],
)
async def get_simulation_result_plot(db_session: DbSession, simulation_id):
"""Get simulation result."""
simulation_result = await get_simulation_with_plot_result(
db_session=db_session, simulation_id=simulation_id
)
return {
"data": simulation_result,
"status": "success",
"message": "Simulation result retrieved successfully",
}
@router.get("/custom_parameters", response_model=StandardResponse[list])
async def get_custom_parameters_controller(db_session: DbSession):
"""Get simulation result."""
latest_simulation = await get_simulation_by_id(
db_session=db_session, simulation_id=None, is_completed=True
)
custom_parameters = await get_custom_parameters(
db_session=db_session, simulation_id=latest_simulation.id
)
results = [{node.aeros_node.node_name: node.eaf} for node in custom_parameters]
return {
"data": results,
"status": "success",
"message": "Simulation result retrieved successfully",
}
# @router.get("/status/{simulation_id}", response_model=StandardResponse[None])
# async def get_simulation_status(simulation_id: str):
# """Get simulation status."""
# if simulation_id not in active_simulations:
# raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Simulation not found")
# return active_simulations[simulation_id]
# @router.post("/cancel/{simulation_id}", response_model=StandardResponse[None])
# async def cancel_simulation(simulation_id: str):
# """Cancel simulation."""
# if simulation_id not in active_simulations:
# raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Simulation not found")
# active_simulations[simulation_id].update({
# "status": "cancelled",
# "cancelled_at": datetime.now()
# })
# return active_simulations[simulation_id]

@ -0,0 +1,83 @@
from datetime import datetime
from typing import List, Optional
from uuid import UUID
from pydantic import Field
from src.models import BaseModel, Pagination
# Pydantic models for request/response validation
class SimulationInput(BaseModel):
projectName: str = "OffshoreGas"
SchematicName: str = "Main"
SimSeed: int = 1
SimDuration: int = 3
DurationUnit: str = "UYear"
SimNumRun: int = 1
class SimulationNode(BaseModel):
id: UUID
node_type: Optional[str]
node_id: Optional[int]
node_name: Optional[str]
structure_name: Optional[str]
schematic_name: Optional[str]
schematic_id: Optional[int]
class SimulationCalc(BaseModel):
id: UUID
total_downtime: float
total_uptime: float
num_events: float
production: float
production_std: float
ideal_production: float
availability: float
efficiency: float
effective_loss: float
num_cm: float
cm_waiting_time: float
total_cm_downtime: float
num_pm: float
total_pm_downtime: float
num_ip: float
total_ip_downtime: float
num_oh: float
total_oh_downtime: float
t_wait_for_crew: float
t_wait_for_spare: float
duration_at_full: float
duration_above_hh: float
duration_above_h: float
duration_below_l: float
duration_below_ll: float
duration_at_empty: float
stg_input: float
aeros_node: SimulationNode
class SimulationPlot(BaseModel):
id: UUID
max_flow_rate: float
storage_capacity: float
point_availabilities: list
point_flowrates: list
timestamp_outs: list
aeros_node: SimulationNode
class SimulationCalcResult(BaseModel):
id: UUID
calc_results: List[SimulationCalc]
class SimulationPlotResult(BaseModel):
id: UUID
plot_results: List[SimulationPlot]
class SimulationPagination(Pagination):
items: List[SimulationCalc] = []

@ -0,0 +1,382 @@
from datetime import datetime
from typing import Optional
from uuid import UUID
import httpx
from fastapi import HTTPException, status
from sqlalchemy import delete, select
from sqlalchemy.orm import selectinload
from src.config import AEROS_BASE_URL
from src.database.core import DbSession
from src.database.service import CommonParameters, search_filter_sort_paginate
from .model import (
AerosNode,
AerosSimulation,
AerosSimulationCalcResult,
AerosSimulationPlotResult,
)
from .schema import SimulationInput
client = httpx.AsyncClient(timeout=300.0)
active_simulations = {}
async def get_all(common: CommonParameters):
query = select(AerosSimulation)
results = await search_filter_sort_paginate(model=query, **common)
return results
async def get_simulation_by_id(
*,
db_session: DbSession,
simulation_id: Optional[UUID] = None,
is_completed: bool = False,
):
"""Get a simulation by id."""
query = select(AerosSimulation)
if is_completed:
query = query.where(AerosSimulation.status == "completed")
if simulation_id:
query = query.where(AerosSimulation.id == simulation_id)
else:
query = query.order_by(AerosSimulation.id.asc()).limit(1)
results = await db_session.execute(query)
return results.scalar()
async def get_simulation_node_by(*, db_session: DbSession, **kwargs):
"""Get a simulation node by column."""
# Build WHERE conditions from kwargs
conditions = []
for key, value in kwargs.items():
if hasattr(AerosNode, key):
conditions.append(getattr(AerosNode, key) == value)
if not conditions:
raise ValueError("No valid column conditions provided")
query = select(AerosNode).where(*conditions)
result = await db_session.execute(query)
return result.scalar()
async def get_or_save_node(*, db_session: DbSession, node_data: dict):
"""Get a simulation node by column."""
node = await get_simulation_node_by(
db_session=db_session, node_id=node_data["nodeId"]
)
if not node:
node = AerosNode(
node_type=node_data["nodeType"],
original_node_id=node_data["originalNodeId"],
node_id=node_data["nodeId"],
node_name=node_data["nodeName"],
structure_name=node_data["structureName"],
schematic_name=node_data["schematicName"],
schematic_id=node_data["schematicId"],
original_schematic_id=node_data["originalSchematicId"],
ref_schematic_id=node_data["refSchematicId"],
orignal_ref_schematic_id=node_data["orignalRefSchematicId"],
)
db_session.add(node)
await db_session.commit()
return node
async def execute_simulation(
*,
db_session: DbSession,
simulation_id: Optional[UUID] = None,
sim_data: dict,
is_saved: bool = False,
):
"""Execute the actual simulation call"""
try:
response = await client.post(
f"{AEROS_BASE_URL}/api/Simulation/RunSimulation",
json=sim_data,
headers={"Content-Type": "application/json"},
)
response.raise_for_status()
result = response.json()
if is_saved:
simulation = await get_simulation_by_id(
db_session=db_session, simulation_id=simulation_id
)
simulation.status = "proccessing"
simulation.result = result
await db_session.commit()
await save_simulation_result(
db_session=db_session, simulation_id=simulation_id, result=result
)
return result
except Exception as e:
simulation = await get_simulation_by_id(
db_session=db_session, simulation_id=simulation_id
)
simulation.status = "failed"
simulation.error = str(e)
await db_session.commit()
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)
)
async def save_simulation_result(
*, db_session: DbSession, simulation_id: UUID, result: dict
):
"""Save the simulation result."""
calc_result = result["nodeResultOuts"]
plot_result = result["plotNodeOuts"]
"""Save the simulation result"""
avaiable_nodes = {
node["nodeId"]: await get_simulation_node_by(
db_session=db_session, node_id=node["nodeId"]
)
for node in calc_result
}
calc_objects = []
plot_objects = []
try:
for result in calc_result:
node = avaiable_nodes.get(result["nodeId"], None)
if not node:
if result["nodeName"] == "Main":
node = await get_simulation_node_by(
db_session=db_session, node_name=result["nodeName"]
)
else:
continue
calc_result = AerosSimulationCalcResult(
aeros_simulation_id=simulation_id,
aeros_node_id=node.id,
total_downtime=result["totalDowntime"],
total_uptime=result["totalUpTime"],
num_events=result["numEvents"],
production=result["production"],
production_std=result["productionStd"],
ideal_production=result["idealProduction"],
availability=result["availability"],
efficiency=result["efficiency"],
effective_loss=result["effectiveLoss"],
num_cm=result["numCM"],
cm_waiting_time=result["cmWaitingTime"],
total_cm_downtime=result["totalCMDowntime"],
num_pm=result["numPM"],
total_pm_downtime=result["totalPMDowntime"],
num_ip=result["numIP"],
total_ip_downtime=result["totalIPDowntime"],
num_oh=result["numOH"],
total_oh_downtime=result["totalOHDowntime"],
t_wait_for_crew=result["tWaitForCrew"],
t_wait_for_spare=result["tWaitForSpare"],
duration_at_full=result["durationAtFull"],
duration_above_hh=result["durationAboveHH"],
duration_above_h=result["durationAboveH"],
duration_below_l=result["durationBelowL"],
duration_below_ll=result["durationBelowLL"],
duration_at_empty=result["durationAtEmpty"],
stg_input=result["stgInput"],
stg_output=result["stgOutput"],
average_level=result["averageLevel"],
potential_production=result["potentialProduction"],
eaf=result["production"] / result["idealProduction"],
)
calc_objects.append(calc_result)
for result in plot_result:
node = avaiable_nodes.get(result["nodeId"], None)
if not node:
if result["nodeName"] == "Main":
node = await get_simulation_node_by(
db_session=db_session, node_name=result["nodeName"]
)
else:
continue
plot_result = AerosSimulationPlotResult(
aeros_simulation_id=simulation_id,
aeros_node_id=node.id,
max_flow_rate=result["maxFlowrate"],
storage_capacity=result["storageCapacity"],
point_availabilities=result["pointAvailabilities"],
point_flowrates=result["pointFlowrates"],
timestamp_outs=result["timeStampOuts"],
)
plot_objects.append(plot_result)
except Exception as e:
simulation = await get_simulation_by_id(
db_session=db_session, simulation_id=simulation_id
)
simulation.status = "failed"
simulation.result = str(e)
await db_session.commit()
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)
)
db_session.add_all(calc_objects)
db_session.add_all(plot_objects)
simulation = await get_simulation_by_id(
db_session=db_session, simulation_id=simulation_id
)
simulation.status = "completed"
simulation.completed_at = datetime.now()
await db_session.commit()
return
async def save_default_simulation_node(
*, db_session: DbSession, project_name: str = "trialapi"
):
sim_data = {
"projectName": project_name,
"SchematicName": "Boiler",
"SimSeed": 1,
"SimDuration": 3,
"DurationUnit": "UYear",
"SimNumRun": 1,
}
results = await execute_simulation(db_session=db_session, sim_data=sim_data)
nodes = []
# delete old data
await db_session.execute(delete(AerosNode))
for result in results["nodeResultOuts"]:
aeros_node = AerosNode(
node_name=result["nodeName"],
node_type=result["nodeType"],
node_id=convert_id_to_none_if_negative(result["nodeId"]),
original_node_id=convert_id_to_none_if_negative(result["originalNodeId"]),
structure_name=result["structureName"],
schematic_name=result["schematicName"],
schematic_id=convert_id_to_none_if_negative(result["schematicId"]),
original_schematic_id=convert_id_to_none_if_negative(
result["originalSchematicId"]
),
ref_schematic_id=convert_id_to_none_if_negative(result["refSchematicId"]),
orignal_ref_schematic_id=convert_id_to_none_if_negative(
result["orinalRefSchematic"]
),
)
nodes.append(aeros_node)
db_session.add_all(nodes)
await db_session.commit()
def convert_id_to_none_if_negative(value):
"""Convert ID to None if it's below 0, otherwise return the value."""
return None if value < 0 else value
async def create_simulation(*, db_session: DbSession, simulation_in: SimulationInput):
"""Create a new simulation."""
input = simulation_in.model_dump()
active_simulations = {
"status": "running",
"started_at": datetime.now(),
"input": input,
}
simulation = AerosSimulation(**active_simulations)
db_session.add(simulation)
await db_session.commit()
return simulation
async def get_simulation_with_calc_result(
*, db_session: DbSession, simulation_id: UUID, aeros_node_id: Optional[UUID] = None
):
"""Get a simulation by id."""
query = (
select(AerosSimulation)
.where(AerosSimulation.id == simulation_id)
.options(
selectinload(AerosSimulation.calc_results).options(
selectinload(AerosSimulationCalcResult.aeros_node)
)
)
)
simulation = await db_session.execute(query)
return simulation.scalar()
async def get_simulation_with_plot_result(
*, db_session: DbSession, simulation_id: UUID
):
"""Get a simulation by id."""
query = (
select(AerosSimulation)
.where(AerosSimulation.id == simulation_id)
.options(
selectinload(AerosSimulation.plot_results).options(
selectinload(AerosSimulationPlotResult.aeros_node)
)
)
)
simulation = await db_session.execute(query)
return simulation.scalar()
async def get_calc_result_by(
*, db_session: DbSession, simulation_id: UUID, aeros_node_id: Optional[UUID] = None
):
"""Get a simulation node by column."""
# Build WHERE conditions from kwargs
query = select(AerosSimulationCalcResult).where(
AerosSimulationCalcResult.aeros_simulation_id == simulation_id
)
if aeros_node_id:
query = query.where(AerosSimulationCalcResult.aeros_node_id == aeros_node_id)
result = await db_session.execute(query)
return result.scalar()
async def get_custom_parameters(*, db_session: DbSession, simulation_id: UUID):
"""Get a simulation node by column."""
# Build WHERE conditions from kwargs
query = select(AerosSimulationCalcResult).where(
AerosSimulationCalcResult.aeros_simulation_id == simulation_id
)
query = query.join(
AerosNode, AerosNode.id == AerosSimulationCalcResult.aeros_node_id
)
query = query.where(AerosNode.node_type == "RegularNode")
query = (
query.order_by(AerosSimulationCalcResult.eaf.desc())
.limit(20)
.options(selectinload(AerosSimulationCalcResult.aeros_node))
)
result = await db_session.execute(query)
return result.scalars().all()

@ -4,31 +4,10 @@ from fastapi import APIRouter, Depends
from fastapi.responses import JSONResponse
from pydantic import BaseModel
from src.aeros_project.router import router as aeros_project_router
from src.aeros_simulation.router import router as aeros_simulation_router
from src.auth.service import JWTBearer
from src.calculation_budget_constrains.router import \
router as calculation_budget_constraint
from src.calculation_target_reliability.router import \
router as calculation_target_reliability
from src.calculation_time_constrains.router import \
router as calculation_time_constrains_router
from src.job.router import router as job_router
from src.overhaul.router import router as overhaul_router
from src.overhaul_activity.router import router as overhaul_activity_router
from src.overhaul_job.router import router as job_overhaul_router
from src.overhaul_scope.router import router as scope_router
from src.scope_equipment.router import router as scope_equipment_router
from src.scope_equipment_job.router import router as scope_equipment_job_router
# from src.overhaul_scope.router import router as scope_router
# from src.scope_equipment.router import router as scope_equipment_router
# from src.overhaul.router import router as overhaul_router
# from src.overhaul_history.router import router as overhaul_history_router
# from src.overhaul_activity.router import router as scope_equipment_activity_router
# # from src.overhaul_schedule.router import router as ovehaul_schedule_router
# from src.scope_equipment_part.router import router as scope_equipment_part_router
# from src.calculation_target_reliability.router import router as calculation_target_reliability
#
# from src.master_activity.router import router as activity_router
from src.dashboard_model.router import router as dashboard_model_router
class ErrorMessage(BaseModel):
@ -59,80 +38,16 @@ def healthcheck():
authenticated_api_router = APIRouter(
dependencies=[Depends(JWTBearer())],
)
# overhaul data
authenticated_api_router.include_router(
overhaul_router, prefix="/overhauls", tags=["overhaul"]
)
authenticated_api_router.include_router(job_router, prefix="/jobs", tags=["job"])
# # Overhaul session data
authenticated_api_router.include_router(
scope_router, prefix="/overhaul-session", tags=["overhaul-session"]
dashboard_model_router, prefix="/dashboard_model"
)
authenticated_api_router.include_router(
scope_equipment_router, prefix="/scope-equipments", tags=["scope_equipment"]
)
aeros_routes = APIRouter(prefix="/aeros")
authenticated_api_router.include_router(
overhaul_activity_router, prefix="/overhaul-activity", tags=["activity"]
)
authenticated_api_router.include_router(
scope_equipment_job_router,
prefix="/scope-equipment-jobs",
tags=["scope_equipment", "job"],
)
authenticated_api_router.include_router(
job_overhaul_router, prefix="/overhaul-jobs", tags=["job", "overhaul"]
)
# authenticated_api_router.include_router(
# overhaul_history_router, prefix="/overhaul-history", tags=["overhaul_history"]
# )
# authenticated_api_router.include_router(
# scope_equipment_activity_router, prefix="/equipment-activities", tags=["scope_equipment_activities"]
# )
# authenticated_api_router.include_router(
# activity_router, prefix="/activities", tags=["activities"]
# )
# authenticated_api_router.include_router(
# scope_equipment_part_router, prefix="/equipment-parts", tags=["scope_equipment_parts"]
# )
# authenticated_api_router.include_router(
# ovehaul_schedule_router, prefix="/overhaul-schedules", tags=["overhaul_schedules"]
# )
# calculation
calculation_router = APIRouter(prefix="/calculation", tags=["calculations"])
# Time constrains
calculation_router.include_router(
calculation_time_constrains_router,
prefix="/time-constraint",
tags=["calculation", "time_constraint"],
)
# Target reliability
calculation_router.include_router(
calculation_target_reliability,
prefix="/target-reliability",
tags=["calculation", "target_reliability"],
)
# # Budget Constrain
calculation_router.include_router(
calculation_budget_constraint,
prefix="/budget-constraint",
tags=["calculation", "budget_constraint"],
)
aeros_routes.include_router(aeros_simulation_router, prefix="/simulation")
aeros_routes.include_router(aeros_project_router, prefix="/project")
authenticated_api_router.include_router(calculation_router)
authenticated_api_router.include_router(aeros_routes)
api_router.include_router(authenticated_api_router)

@ -1,31 +0,0 @@
from typing import Dict, List, Optional
from fastapi import APIRouter, HTTPException, status
from fastapi.params import Query
from src.database.core import DbSession
from src.models import StandardResponse
from .service import get_all_budget_constrains
router = APIRouter()
@router.get("/{session_id}", response_model=StandardResponse[Dict])
async def get_target_reliability(
db_session: DbSession,
session_id: str,
cost_threshold: float = Query(100),
):
"""Get all scope pagination."""
results, consequesce = await get_all_budget_constrains(
db_session=db_session, session_id=session_id, cost_threshold=cost_threshold
)
return StandardResponse(
data={
"results": results,
"consequence": consequesce
},
message="Data retrieved successfully",
)

@ -1,71 +0,0 @@
from datetime import datetime
from typing import Any, Dict, List, Optional
from uuid import UUID
from pydantic import BaseModel, Field
from src.models import DefultBase, Pagination
class OverhaulBase(BaseModel):
pass
class OverhaulCriticalParts(OverhaulBase):
criticalParts: List[str] = Field(..., description="List of critical parts")
class OverhaulSchedules(OverhaulBase):
schedules: List[Dict[str, Any]] = Field(..., description="List of schedules")
class OverhaulSystemComponents(OverhaulBase):
systemComponents: Dict[str, Any] = Field(
..., description="List of system components"
)
class OverhaulRead(OverhaulBase):
overview: Dict[str, Any]
criticalParts: List[str]
schedules: List[Dict[str, Any]]
systemComponents: Dict[str, Any]
# {
# "overview": {
# "totalEquipment": 30,
# "nextSchedule": {
# "date": "2025-01-12",
# "Overhaul": "B",
# "equipmentCount": 30
# }
# },
# "criticalParts": [
# "Boiler feed pump",
# "Boiler reheater system",
# "Drum Level (Right) Root Valve A",
# "BCP A Discharge Valve",
# "BFPT A EXH Press HI Root VLV"
# ],
# "schedules": [
# {
# "date": "2025-01-12",
# "Overhaul": "B",
# "status": "upcoming"
# }
# // ... other scheduled overhauls
# ],
# "systemComponents": {
# "boiler": {
# "status": "operational",
# "lastOverhaul": "2024-06-15"
# },
# "turbine": {
# "hpt": { "status": "operational" },
# "ipt": { "status": "operational" },
# "lpt": { "status": "operational" }
# }
# // ... other major components
# }
# }

@ -1,133 +0,0 @@
import random
from typing import Optional
from sqlalchemy import Delete, Select
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.scope_equipment.model import ScopeEquipment
from src.scope_equipment.service import get_by_scope_name
from src.overhaul_activity.service import get_all_by_session_id
# async def get_all_budget_constrains(
# *, db_session: DbSession, session_id: str, cost_threshold: float = 100000000
# ):
# At the module level, add this dictionary to store persistent EAF values
_equipment_eaf_cache = {}
import random
async def get_all_budget_constrains(
*, db_session: DbSession, session_id: str, cost_threshold: float = 100000000
):
"""Get all overhaul overview with EAF values that sum to 100%."""
equipments = await get_all_by_session_id(db_session=db_session, overhaul_session_id=session_id)
# If no equipments found, return empty list
if not equipments:
return [], []
# Create or retrieve persistent EAF values
global _equipment_eaf_cache
# Generate EAF values for new equipment IDs
equipment_ids = [equipment.id for equipment in equipments]
# Generate new random EAF values if they don't exist
if not _equipment_eaf_cache or set(equipment_ids) != set(_equipment_eaf_cache.keys()):
total_eaf = 100.0
remaining_items = len(equipment_ids)
_equipment_eaf_cache.clear()
# Ensure minimum EAF value for each equipment
min_eaf = 1.0 # Minimum 1% for each equipment
reserved_eaf = min_eaf * remaining_items
distributable_eaf = total_eaf - reserved_eaf
for eq_id in equipment_ids[:-1]: # All except last item
if remaining_items > 1:
# Generate a random value between min_eaf and the remaining distributable EAF
max_allowed = distributable_eaf / (remaining_items - 1)
eaf = round(min_eaf + random.uniform(0, max_allowed), 2)
_equipment_eaf_cache[eq_id] = eaf
distributable_eaf -= (eaf - min_eaf)
remaining_items -= 1
# Assign remaining EAF to last item, ensuring it's at least min_eaf
_equipment_eaf_cache[equipment_ids[-1]] = round(distributable_eaf + min_eaf, 2)
# Create result array of dictionaries
result = [
{
"id": equipment.id,
"assetnum": equipment.assetnum,
"location_tag": equipment.equipment.location_tag,
"name": equipment.equipment.name,
"total_cost": equipment.material_cost + equipment.service_cost,
"eaf_contribution": _equipment_eaf_cache[equipment.id]
}
for equipment in equipments
]
# Sort by EAF contribution (highest to lowest)
result.sort(key=lambda x: x["eaf_contribution"], reverse=True)
# Filter equipment up to threshold
cumulative_cost = 0
included_results = []
for equipment in result:
cumulative_cost += equipment["total_cost"]
if cumulative_cost >= cost_threshold:
break
included_results.append(equipment)
# Rest equipment is consequence list
consequence_results = result[len(included_results):]
#Sort
consequence_results.sort(key=lambda x: x["eaf_contribution"], reverse=True)
included_results.sort(key=lambda x: x["eaf_contribution"], reverse=True)
return included_results, consequence_results
# """Get all overhaul overview with EAF values that sum to 100%."""
# # equipments = await get_by_scope_name(db_session=db_session, scope_name=scope_name)
# equipments = await get_all_by_session_id(db_session=db_session, overhaul_session_id=session_id)
# # If no equipments found, return empty list
# if not equipments:
# return []
# # Create result array of dictionaries
# result = [
# {
# "id": equipment.id,
# "assetnum": equipment.assetnum,
# "location_tag": equipment.equipment.location_tag,
# "name": equipment.equipment.name,
# "total_cost": equipment.material_cost + equipment.service_cost
# "eaf_contribution": ## Create Dummy % number, each equipment has different value
# }
# for equipment in equipments
# ]
# result.sort(key=lambda x: x["eaf_contribution"], reverse=True) #Sort from biggest contribution
# # Filter equipment up to threshold
# cumulative_cost = 0
# included_results = []
# for equipment in result:
# cumulative_cost += equipment["total_cost"]
# if cumulative_cost >= cost_threshold:
# break
# included_results.append(equipment)
# # rest equipemnt is consequence list
# consequence_results = result[len(included_results):]
# return included_results ,consequence_results

@ -1,56 +0,0 @@
from typing import Dict, List, Optional
from fastapi import APIRouter, HTTPException, status
from fastapi.params import Query
from src.database.core import DbSession
from src.models import StandardResponse
from .service import get_eaf_timeline
router = APIRouter()
# @router.get("", response_model=StandardResponse[List[Dict]])
# async def get_target_reliability(
# db_session: DbSession,
# scope_name: Optional[str] = Query(None),
# eaf_threshold: float = Query(100),
# ):
# """Get all scope pagination."""
# results = await get_all_target_reliability(
# db_session=db_session, scope_name=scope_name, eaf_threshold=eaf_threshold
# )
# return StandardResponse(
# data=results,
# message="Data retrieved successfully",
# )
@router.get("", response_model=StandardResponse[List[Dict]])
async def get_target_reliability(
db_session: DbSession,
oh_session_id: Optional[str] = Query(None),
eaf_input: float = Query(0.5),
duration: int = Query(8000),
):
"""Get all scope pagination."""
if not oh_session_id:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="oh_session_id is required",
)
results = await get_eaf_timeline(
db_session=db_session,
oh_session_id=oh_session_id,
eaf_input=eaf_input,
oh_duration=duration
)
return StandardResponse(
data=results,
message="Data retrieved successfully",
)

@ -1,71 +0,0 @@
from datetime import datetime
from typing import Any, Dict, List, Optional
from uuid import UUID
from pydantic import BaseModel, Field
from src.models import DefultBase, Pagination
class OverhaulBase(BaseModel):
pass
class OverhaulCriticalParts(OverhaulBase):
criticalParts: List[str] = Field(..., description="List of critical parts")
class OverhaulSchedules(OverhaulBase):
schedules: List[Dict[str, Any]] = Field(..., description="List of schedules")
class OverhaulSystemComponents(OverhaulBase):
systemComponents: Dict[str, Any] = Field(
..., description="List of system components"
)
class OverhaulRead(OverhaulBase):
overview: Dict[str, Any]
criticalParts: List[str]
schedules: List[Dict[str, Any]]
systemComponents: Dict[str, Any]
# {
# "overview": {
# "totalEquipment": 30,
# "nextSchedule": {
# "date": "2025-01-12",
# "Overhaul": "B",
# "equipmentCount": 30
# }
# },
# "criticalParts": [
# "Boiler feed pump",
# "Boiler reheater system",
# "Drum Level (Right) Root Valve A",
# "BCP A Discharge Valve",
# "BFPT A EXH Press HI Root VLV"
# ],
# "schedules": [
# {
# "date": "2025-01-12",
# "Overhaul": "B",
# "status": "upcoming"
# }
# // ... other scheduled overhauls
# ],
# "systemComponents": {
# "boiler": {
# "status": "operational",
# "lastOverhaul": "2024-06-15"
# },
# "turbine": {
# "hpt": { "status": "operational" },
# "ipt": { "status": "operational" },
# "lpt": { "status": "operational" }
# }
# // ... other major components
# }
# }

@ -1,275 +0,0 @@
from typing import Optional
from sqlalchemy import Delete, Select
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.scope_equipment.model import ScopeEquipment
from src.scope_equipment.service import get_by_scope_name
from src.scope_equipment_job.service import get_equipment_level_by_no
from datetime import datetime, timedelta
import random
from typing import List
from .utils import generate_down_periods
from src.overhaul_scope.service import get as get_overhaul
from bisect import bisect_left
from collections import defaultdict
# async def get_all_target_reliability(
# *, db_session: DbSession, scope_name: str, eaf_threshold: float = 100.0
# ):
# """Get all overhaul overview with EAF values that sum to 100%, aggregated by system."""
# equipments = await get_by_scope_name(db_session=db_session, scope_name=scope_name)
# equipment_system = await get_equipment_level_by_no(db_session=db_session, level=1)
# equipment_subsystem = await get_equipment_level_by_no(
# db_session=db_session, level=2
# )
# # If no equipments found, return empty list
# if not equipments:
# return []
# import random
# n = len(equipments)
# base_value = 100 / n # Even distribution as base
# # Generate EAF values with ±30% variation from base
# eaf_values = [
# base_value + random.uniform(-0.3 * base_value, 0.3 * base_value)
# for _ in range(n)
# ]
# # Normalize to ensure sum is 100
# total = sum(eaf_values)
# eaf_values = [(v * 100 / total) for v in eaf_values]
# # Create result array of dictionaries
# result = [
# {
# "id": equipment.id,
# "assetnum": equipment.assetnum,
# "location_tag": equipment.master_equipment.location_tag,
# "name": equipment.master_equipment.name,
# "parent_id": equipment.master_equipment.parent_id, # Add parent_id to identify the system
# "eaf": round(eaf, 4), # Add EAF value
# }
# for equipment, eaf in zip(equipments, eaf_values)
# ]
# # Group equipment by system
# sub_system = {
# subsystem.id: subsystem.parent_id for subsystem in equipment_subsystem
# }
# systems = {
# system.id: {"name": system.name, "total_eaf": 0, "equipments": []}
# for system in equipment_system
# }
# for equipment in result:
# if equipment["parent_id"] in sub_system:
# systems[sub_system[equipment["parent_id"]]]["equipments"].append(equipment)
# systems[sub_system[equipment["parent_id"]]]["total_eaf"] += equipment["eaf"]
# # Convert the systems dictionary to a list of aggregated results
# aggregated_result = [
# {
# "system_id": system_id,
# "system_name": system_data["name"],
# "total_eaf": round(system_data["total_eaf"], 4),
# "equipments": system_data["equipments"],
# }
# for system_id, system_data in systems.items()
# ]
# # Sort the aggregated result by total_eaf in descending order
# aggregated_result.sort(key=lambda x: x["total_eaf"], reverse=True)
# # Filter systems up to the threshold
# cumulative_eaf = 0
# filtered_aggregated_result = []
# for system in aggregated_result:
# cumulative_eaf += system["total_eaf"]
# filtered_aggregated_result.append(system)
# if cumulative_eaf >= eaf_threshold:
# break
# return filtered_aggregated_result
# async def get_eaf_timeline(*, db_session, eaf_input: float, oh_session_id: str, oh_duration = 8000) -> List[dict]:
# """
# Generate a timeline of EAF values based on input parameters.
# Args:
# eaf_input (float): EAF value to check against thresholds
# oh_session_id (str): OH session identifier
# Returns:
# set[dict]: Set of dictionaries containing dates and their EAF values
# """
# # Define EAF thresholds
# MIN_EAF = 30
# MAX_EAF = 80
# #Get OH session
# oh_session = await get_overhaul(db_session=db_session, overhaul_session_id=oh_session_id)
# # Dummy OH session dates
# oh_session_start = oh_session.start_date
# oh_session_end = oh_session_start + timedelta(hours=oh_duration)
# # Initialize result set
# results = []
# # Determine date range based on EAF input
# if MIN_EAF <= eaf_input <= MAX_EAF:
# start_date = oh_session_start
# end_date = oh_session_end
# elif eaf_input < MIN_EAF:
# # If below minimum, extend end date by 2 months weeks
# start_date = oh_session_start
# end_date = oh_session_end + timedelta(days=360)
# else: # eaf_input > MAX_EAF
# # If above maximum, reduce end date by 1 month
# start_date = oh_session_start
# end_date = oh_session_end - timedelta(days=180)
# total_hours = (end_date - start_date).total_seconds() / 3600
# # Generate random down periods
# results = []
# # Generate down periods for each EAF scenario
# down_periods = {
# 'eaf1': generate_down_periods(start_date, end_date, 5, min_duration=30, max_duration=90),
# 'eaf2': generate_down_periods(start_date, end_date, 5, min_duration=30, max_duration=90),
# 'eaf3': generate_down_periods(start_date, end_date, 5, min_duration=30, max_duration=90),
# 'eaf4': generate_down_periods(start_date, end_date, 5, min_duration=30, max_duration=90)
# }
# # Define EAF values for downtime periods
# eaf_downtime_values = {
# 'eaf1': 0.8,
# 'eaf2': 0.65,
# 'eaf3': 0.35,
# 'eaf4': 0
# }
# # Generate daily entries
# current_time = start_date
# while current_time <= end_date:
# time_str = current_time.strftime('%Y-%m-%d %H:00:00')
# # Initialize dictionary for this hour with default values (system up)
# hourly_entry = {
# 'date': time_str,
# 'eaf1_value': 1.0,
# 'eaf2_value': 0.75,
# 'eaf3_value': 0.6,
# 'eaf4_value': 0.3
# }
# # Check each EAF scenario
# for eaf_key in down_periods:
# # Check if current hour is in any down period for this EAF
# for period_start, period_end in down_periods[eaf_key]:
# if period_start <= current_time <= period_end:
# hourly_entry[f'{eaf_key}_value'] = eaf_downtime_values[eaf_key]
# break
# results.append(hourly_entry)
# current_time += timedelta(hours=1)
# return results
async def get_eaf_timeline(*, db_session, eaf_input: float, oh_session_id: str, oh_duration = 8000) -> List[dict]:
"""
Generate a timeline of EAF values based on input parameters.
Optimized version with reduced time complexity.
Args:
eaf_input (float): EAF value to check against thresholds
oh_session_id (str): OH session identifier
oh_duration (int): Duration in hours
Returns:
List[dict]: List of dictionaries containing dates and their EAF values
"""
MIN_EAF = 30
MAX_EAF = 80
oh_session = await get_overhaul(db_session=db_session, overhaul_session_id=oh_session_id)
oh_session_start = datetime.fromisoformat(oh_session.start_date.isoformat())
# Determine date range
if MIN_EAF <= eaf_input <= MAX_EAF:
end_date = oh_session_start + timedelta(hours=oh_duration)
elif eaf_input < MIN_EAF:
end_date = oh_session_start + timedelta(hours=oh_duration, days=360)
else: # eaf_input > MAX_EAF
end_date = oh_session_start + timedelta(hours=oh_duration) - timedelta(days=180)
# Default EAF values when system is up
default_values = {
'eaf1_value': 1.0,
'eaf2_value': 0.75,
'eaf3_value': 0.6,
'eaf4_value': 0.3
}
# EAF values during downtime
downtime_values = {
'eaf1': 0.8,
'eaf2': 0.65,
'eaf3': 0.35,
'eaf4': 0
}
# Generate down periods for all EAF scenarios at once
all_down_periods = {}
for eaf_key in ['eaf1', 'eaf2', 'eaf3', 'eaf4']:
periods = generate_down_periods(oh_session_start, end_date, 5, min_duration=30, max_duration=90)
# Sort periods by start time for binary search
all_down_periods[eaf_key] = sorted(periods, key=lambda x: x[0])
# Create a list of all state change times
state_changes = defaultdict(dict)
for eaf_key, periods in all_down_periods.items():
for start, end in periods:
# Record state changes at period boundaries
state_changes[start][eaf_key] = downtime_values[eaf_key]
state_changes[end + timedelta(hours=1)][eaf_key] = default_values[f'{eaf_key}_value']
# Convert state_changes to sorted list of times
change_times = sorted(state_changes.keys())
results = []
current_values = default_values.copy()
# Process changes between state change points
current_time = oh_session_start
idx = 0
while current_time <= end_date:
# Update values if we've hit a state change point
if idx < len(change_times) and current_time >= change_times[idx]:
changes = state_changes[change_times[idx]]
for eaf_key, value in changes.items():
current_values[f'{eaf_key}_value'] = value
idx += 1
results.append({
'date': current_time.strftime('%Y-%m-%d %H:00:00'),
**current_values
})
current_time += timedelta(hours=1)
return results

@ -1,54 +0,0 @@
from datetime import datetime, timedelta
import random
from typing import List, Optional
def generate_down_periods(start_date: datetime, end_date: datetime,
num_periods: Optional[int] = None, min_duration: int = 3,
max_duration: int = 7) -> list[tuple[datetime, datetime]]:
"""
Generate random system down periods within a date range.
Args:
start_date (datetime): Start date of the overall period
end_date (datetime): End date of the overall period
num_periods (int, optional): Number of down periods to generate.
If None, generates 1-3 periods randomly
min_duration (int): Minimum duration of each down period in days
max_duration (int): Maximum duration of each down period in days
Returns:
list[tuple[datetime, datetime]]: List of (start_date, end_date) tuples
for each down period
"""
if num_periods is None:
num_periods = random.randint(1, 3)
total_days = (end_date - start_date).days
down_periods = []
# Generate random down periods
for _ in range(num_periods):
# Random duration for this period
duration = random.randint(min_duration, max_duration)
# Ensure we don't exceed the total date range
latest_possible_start = total_days - duration
if latest_possible_start < 0:
continue
# Random start day within available range
start_day = random.randint(0, latest_possible_start)
period_start = start_date + timedelta(days=start_day)
period_end = period_start + timedelta(days=duration)
# Check for overlaps with existing periods
overlaps = any(
(p_start <= period_end and period_start <= p_end)
for p_start, p_end in down_periods
)
if not overlaps:
down_periods.append((period_start, period_end))
return sorted(down_periods)

@ -1,125 +0,0 @@
from typing import Optional
from uuid import UUID
import numpy as np
from fastapi import HTTPException, status
from sqlalchemy import Select, func, select
from sqlalchemy.orm import joinedload
from src.auth.service import Token
from src.database.core import DbSession
from src.overhaul_scope.service import get_all
from src.scope_equipment.model import ScopeEquipment
from src.scope_equipment.service import get_by_assetnum
from src.workorder.model import MasterWorkOrder
from .schema import (CalculationTimeConstrainsParametersCreate,
CalculationTimeConstrainsParametersRead,
CalculationTimeConstrainsParametersRetrive,
CalculationTimeConstrainsRead)
from .service import (create_calculation_result_service, create_param_and_data,
get_avg_cost_by_asset,
get_calculation_by_reference_and_parameter,
get_calculation_data_by_id, get_calculation_result,
get_corrective_cost_time_chart,
get_overhaul_cost_by_time_chart)
async def get_create_calculation_parameters(
*, db_session: DbSession, calculation_id: Optional[str] = None
):
if calculation_id is not None:
calculation = await get_calculation_data_by_id(
calculation_id=calculation_id, db_session=db_session
)
if not calculation:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="A data with this id does not exist.",
)
return CalculationTimeConstrainsParametersRead(
costPerFailure=calculation.parameter.avg_failure_cost,
overhaulCost=calculation.parameter.overhaul_cost,
reference=calculation,
)
stmt = (
select(
ScopeEquipment.scope_id,
func.avg(MasterWorkOrder.total_cost_max).label("average_cost"),
)
.outerjoin(MasterWorkOrder, ScopeEquipment.assetnum == MasterWorkOrder.assetnum)
.group_by(ScopeEquipment.scope_id)
.order_by(ScopeEquipment.scope_id)
)
results = await db_session.execute(stmt)
costFailure = results.all()
scopes = await get_all(db_session=db_session)
avaiableScopes = {scope.id: scope.scope_name for scope in scopes}
costFailurePerScope = {
avaiableScopes.get(costPerFailure[0]): costPerFailure[1]
for costPerFailure in costFailure
}
return CalculationTimeConstrainsParametersRetrive(
costPerFailure=costFailurePerScope,
availableScopes=avaiableScopes.values(),
recommendedScope="A",
# historicalData={
# "averageOverhaulCost": 10000000,
# "lastCalculation": {
# "id": "calc_122",
# "date": "2024-10-15",
# "scope": "B",
# },
# },
)
async def create_calculation(
*,
token: str,
db_session: DbSession,
calculation_time_constrains_in: CalculationTimeConstrainsParametersCreate,
created_by: str
):
calculation_data = await create_param_and_data(
db_session=db_session,
calculation_param_in=calculation_time_constrains_in,
created_by=created_by,
)
results = await create_calculation_result_service(
db_session=db_session, calculation=calculation_data, token=token
)
return results
async def get_or_create_scope_equipment_calculation(
*,
db_session: DbSession,
scope_calculation_id,
calculation_time_constrains_in: Optional[CalculationTimeConstrainsParametersCreate]
):
scope_calculation = await get_calculation_data_by_id(
db_session=db_session, calculation_id=scope_calculation_id
)
if not scope_calculation:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="A data with this id does not exist.",
)
# Check if calculation already exist
return CalculationTimeConstrainsRead(
id=scope_calculation.id,
reference=scope_calculation.overhaul_session_id,
results=scope_calculation.results,
optimum_oh=scope_calculation.optimum_oh_day,
equipment_results=scope_calculation.equipment_results,
)

@ -1,157 +0,0 @@
from enum import Enum
from typing import List, Optional, Union
from sqlalchemy import (JSON, UUID, Boolean, Column, Float, ForeignKey,
Integer, Numeric, String)
from sqlalchemy.orm import relationship
from src.database.core import Base, DbSession
from src.models import DefaultMixin, IdentityMixin, TimeStampMixin, UUIDMixin
class OverhaulReferenceType(str, Enum):
SCOPE = "SCOPE"
ASSET = "ASSET"
class CalculationParam(Base, DefaultMixin, IdentityMixin):
__tablename__ = "oh_ms_calculation_param"
avg_failure_cost = Column(Float, nullable=False)
overhaul_cost = Column(Float, nullable=False)
# Relationships
calculation_data = relationship("CalculationData", back_populates="parameter")
results = relationship("CalculationResult", back_populates="parameter")
# @classmethod
# async def create_with_references(
# cls,
# db: DbSession,
# avg_failure_cost: float,
# overhaul_cost: float,
# created_by: str,
# # list of {"reference_type": OverhaulReferenceType, "reference_id": str}
# references: List[dict]
# ):
# # Create parameter
# param = cls(
# avg_failure_cost=avg_failure_cost,
# overhaul_cost=overhaul_cost,
# created_by=created_by
# )
# db.add(param)
# await db.flush() # Flush to get the param.id
# # Create reference links
# for ref in references:
# reference_link = ReferenceLink(
# parameter_id=param.id,
# overhaul_reference_type=ref["reference_type"],
# reference_id=ref["reference_id"]
# )
# db.add(reference_link)
# await db.commit()
# await db.refresh(param)
# return param
class CalculationData(Base, DefaultMixin, IdentityMixin):
__tablename__ = "oh_tr_calculation_data"
parameter_id = Column(
UUID(as_uuid=True), ForeignKey("oh_ms_calculation_param.id"), nullable=True
)
overhaul_session_id = Column(
UUID(as_uuid=True), ForeignKey("oh_ms_overhaul_scope.id")
)
optimum_oh_day = Column(Integer, nullable=True)
session = relationship("OverhaulScope", lazy="raise")
parameter = relationship("CalculationParam", back_populates="calculation_data")
equipment_results = relationship(
"CalculationEquipmentResult", lazy="raise", viewonly=True
)
results = relationship("CalculationResult", lazy="raise", viewonly=True)
@classmethod
async def create_with_param(
cls,
overhaul_session_id: str,
db: DbSession,
avg_failure_cost: Optional[float],
overhaul_cost: Optional[float],
created_by: str,
params_id: Optional[UUID],
):
if not params_id:
# Create Params
params = CalculationParam(
avg_failure_cost=avg_failure_cost,
overhaul_cost=overhaul_cost,
created_by=created_by,
)
db.add(params)
await db.flush()
params_id = params.id
calculation_data = cls(
overhaul_session_id=overhaul_session_id,
created_by=created_by,
parameter_id=params_id,
)
db.add(calculation_data)
await db.commit()
await db.refresh(calculation_data)
return calculation_data
class CalculationResult(Base, DefaultMixin):
__tablename__ = "oh_tr_calculation_result"
parameter_id = Column(
UUID(as_uuid=True), ForeignKey("oh_ms_calculation_param.id"), nullable=False
)
calculation_data_id = Column(
UUID(as_uuid=True), ForeignKey("oh_tr_calculation_data.id"), nullable=False
)
day = Column(Integer, nullable=False)
corrective_cost = Column(Float, nullable=False)
overhaul_cost = Column(Float, nullable=False)
num_failures = Column(Integer, nullable=False)
parameter = relationship("CalculationParam", back_populates="results")
reference_link = relationship("CalculationData")
class CalculationEquipmentResult(Base, DefaultMixin):
__tablename__ = "oh_tr_calculation_equipment_result"
corrective_costs = Column(JSON, nullable=False)
overhaul_costs = Column(JSON, nullable=False)
daily_failures = Column(JSON, nullable=False)
assetnum = Column(String(255), nullable=False)
material_cost = Column(Float, nullable=False)
service_cost = Column(Float, nullable=False)
calculation_data_id = Column(
UUID(as_uuid=True), ForeignKey("oh_tr_calculation_data.id"), nullable=True
)
optimum_day = Column(Integer, default=1)
is_included = Column(Boolean, default=True)
master_equipment = relationship(
"MasterEquipment",
lazy="joined",
primaryjoin="and_(CalculationEquipmentResult.assetnum == foreign(MasterEquipment.assetnum))",
uselist=False, # Add this if it's a one-to-one relationship
)

@ -1,146 +0,0 @@
from typing import List, Optional, Union
from fastapi import APIRouter
from fastapi.params import Query
from src.auth.service import CurrentUser, Token
from src.database.core import DbSession
from src.models import StandardResponse
from .flows import (create_calculation, get_create_calculation_parameters,
get_or_create_scope_equipment_calculation)
from .schema import (CalculationResultsRead,
CalculationSelectedEquipmentUpdate,
CalculationTimeConstrainsCreate,
CalculationTimeConstrainsParametersCreate,
CalculationTimeConstrainsParametersRead,
CalculationTimeConstrainsParametersRetrive,
CalculationTimeConstrainsRead, EquipmentResult)
from .service import (bulk_update_equipment, get_calculation_result,
get_calculation_result_by_day, get_calculation_by_assetnum)
router = APIRouter()
@router.post(
"", response_model=StandardResponse[Union[str, CalculationTimeConstrainsRead]]
)
async def create_calculation_time_constrains(
token: Token,
db_session: DbSession,
current_user: CurrentUser,
calculation_time_constrains_in: CalculationTimeConstrainsParametersCreate,
scope_calculation_id: Optional[str] = Query(None),
with_results: Optional[int] = Query(0),
):
"""Save calculation time constrains Here"""
if scope_calculation_id:
results = await get_or_create_scope_equipment_calculation(
db_session=db_session,
scope_calculation_id=scope_calculation_id,
calculation_time_constrains_in=calculation_time_constrains_in,
)
else:
results = await create_calculation(
token=token,
db_session=db_session,
calculation_time_constrains_in=calculation_time_constrains_in,
created_by=current_user.name,
)
if not with_results:
results = str(results.id)
return StandardResponse(data=results, message="Data created successfully")
@router.get(
"/parameters",
response_model=StandardResponse[
Union[
CalculationTimeConstrainsParametersRetrive,
CalculationTimeConstrainsParametersRead,
]
],
)
async def get_calculation_parameters(
db_session: DbSession, calculation_id: Optional[str] = Query(default=None)
):
"""Get all calculation parameter."""
parameters = await get_create_calculation_parameters(
db_session=db_session, calculation_id=calculation_id
)
return StandardResponse(
data=parameters,
message="Data retrieved successfully",
)
@router.get(
"/{calculation_id}", response_model=StandardResponse[CalculationTimeConstrainsRead]
)
async def get_calculation_results(db_session: DbSession, calculation_id):
results = await get_calculation_result(
db_session=db_session, calculation_id=calculation_id
)
return StandardResponse(
data=results,
message="Data retrieved successfully",
)
@router.get(
"/{calculation_id}/{assetnum}", response_model=StandardResponse[EquipmentResult]
)
async def get_calculation_per_equipment(db_session: DbSession, calculation_id, assetnum):
results = await get_calculation_by_assetnum(
db_session=db_session, assetnum=assetnum, calculation_id=calculation_id
)
return StandardResponse(
data=results,
message="Data retrieved successfully",
)
@router.post(
"/{calculation_id}/simulation",
response_model=StandardResponse[CalculationResultsRead],
)
async def get_simulation_result(
db_session: DbSession,
calculation_id,
calculation_simuation_in: CalculationTimeConstrainsCreate,
):
simulation_result = await get_calculation_result_by_day(
db_session=db_session,
calculation_id=calculation_id,
simulation_day=calculation_simuation_in.intervalDays,
)
return StandardResponse(
data=simulation_result, message="Data retrieved successfully"
)
@router.put("/{calculation_id}", response_model=StandardResponse[List[str]])
async def update_selected_equipment(
db_session: DbSession,
calculation_id,
calculation_time_constrains_in: List[CalculationSelectedEquipmentUpdate],
):
results = await bulk_update_equipment(
db=db_session,
selected_equipments=calculation_time_constrains_in,
calculation_data_id=calculation_id,
)
return StandardResponse(
data=results,
message="Data retrieved successfully",
)

@ -1,94 +0,0 @@
from dataclasses import dataclass
from datetime import datetime
from typing import Any, Dict, List, Optional, Union
from uuid import UUID
from pydantic import Field
from src.models import DefultBase
from src.scope_equipment.schema import MasterEquipmentBase
class CalculationTimeConstrainsBase(DefultBase):
pass
class ReferenceLinkBase(DefultBase):
reference_id: str = Field(..., description="Reference ID")
overhaul_reference_type: str = Field(..., description="Overhaul reference type")
class CalculationTimeConstrainsParametersRetrive(CalculationTimeConstrainsBase):
# type: ignore
costPerFailure: Union[dict, float] = Field(..., description="Cost per failure")
availableScopes: List[str] = Field(..., description="Available scopes")
recommendedScope: str = Field(..., description="Recommended scope")
# historicalData: Dict[str, Any] = Field(..., description="Historical data")
class CalculationTimeConstrainsParametersRead(CalculationTimeConstrainsBase):
costPerFailure: Union[dict, float] = Field(..., description="Cost per failure")
overhaulCost: Optional[float] = Field(None, description="Overhaul cost")
reference: Optional[List[ReferenceLinkBase]] = Field(None, description="Reference")
class CalculationTimeConstrainsParametersCreate(CalculationTimeConstrainsBase):
overhaulCost: Optional[float] = Field(0, description="Overhaul cost")
ohSessionId: Optional[UUID] = Field(None, description="Scope OH")
costPerFailure: Optional[float] = Field(0, description="Cost per failure")
# class CalculationTimeConstrainsCreate(CalculationTimeConstrainsBase):
# overhaulCost: float = Field(..., description="Overhaul cost")
# scopeOH: str = Field(..., description="Scope OH")
# costPerFailure: float = Field(..., description="Cost per failure")
# metadata: Dict[str, Any] = Field(..., description="Metadata")
class CalculationResultsRead(CalculationTimeConstrainsBase):
day: int
corrective_cost: float
overhaul_cost: float
num_failures: int
class OptimumResult(CalculationTimeConstrainsBase):
overhaul_cost: float
corrective_cost: float
num_failures: int
days: int
class EquipmentResult(CalculationTimeConstrainsBase):
id: UUID
corrective_costs: List[float]
overhaul_costs: List[float]
daily_failures: List[float]
assetnum: str
material_cost: float
service_cost: float
optimum_day: int # Added optimum result for each equipment
is_included: bool
master_equipment: Optional[MasterEquipmentBase] = Field(None)
class CalculationTimeConstrainsRead(CalculationTimeConstrainsBase):
id: UUID
reference: UUID
scope: str
results: List[CalculationResultsRead]
equipment_results: List[EquipmentResult]
optimum_oh: Any
class CalculationTimeConstrainsCreate(CalculationTimeConstrainsBase):
intervalDays: int
class CalculationTimeConstrainsSimulationRead(CalculationTimeConstrainsBase):
simulation: CalculationResultsRead
class CalculationSelectedEquipmentUpdate(CalculationTimeConstrainsBase):
is_included: bool
assetnum: str

@ -1,606 +0,0 @@
import datetime
from typing import Coroutine, List, Optional, Tuple
from uuid import UUID
import numpy as np
import requests
from fastapi import HTTPException, status
from sqlalchemy import and_, case, func, select, update
from sqlalchemy.orm import joinedload
from src.database.core import DbSession
from src.overhaul_activity.service import get_all_by_session_id
from src.overhaul_scope.service import get as get_scope
from src.utils import get_latest_numOfFail
from src.workorder.model import MasterWorkOrder
from .model import (CalculationData, CalculationEquipmentResult,
CalculationResult)
from .schema import (CalculationResultsRead,
CalculationSelectedEquipmentUpdate,
CalculationTimeConstrainsParametersCreate,
CalculationTimeConstrainsRead, OptimumResult)
from .utils import get_months_between
# def get_overhaul_cost_by_time_chart(
# overhaul_cost: float, days: int, numEquipments: int, decay_base: float = 1.01
# ) -> np.ndarray:
# if overhaul_cost < 0:
# raise ValueError("Overhaul cost cannot be negative")
# if days <= 0:
# raise ValueError("Days must be positive")
# hours = days * 24
# rate = np.arange(1, hours + 1)
# cost_per_equipment = overhaul_cost / numEquipments
# results = cost_per_equipment - ((cost_per_equipment / hours) * rate)
# return results
# def get_overhaul_cost_by_time_chart(overhaul_cost: float, days: int, numEquipments: int, decay_base: float = 1.1) -> np.ndarray:
# if overhaul_cost < 0:
# raise ValueError("Overhaul cost cannot be negative")
# if days <= 0:
# raise ValueError("Days must be positive")
# exponents = np.arange(0, days)
# cost_per_equipment = overhaul_cost / numEquipments
# # Introduce randomness by multiplying with a random factor
# random_factors = np.random.normal(1.0, 0.1, numEquipments) # Mean 1.0, Std Dev 0.1
# results = np.array([cost_per_equipment * factor / (decay_base ** exponents) for factor in random_factors])
# results = np.where(np.isfinite(results), results, 0)
# return results
# async def get_corrective_cost_time_chart(
# material_cost: float, service_cost: float, location_tag: str, token, max_days: int
# ) -> Tuple[np.ndarray, np.ndarray]:
# start_date = datetime.datetime(2025, 1, 1)
# end_date = start_date + datetime.timedelta(days=max_days)
# url = f"http://192.168.1.82:8000/reliability/main/number-of-failures/{location_tag}/{start_date.strftime('%Y-%m-%d')}/{end_date.strftime('%Y-%m-%d')}"
# try:
# response = requests.get(
# url,
# headers={
# "Content-Type": "application/json",
# "Authorization": f"Bearer {token}",
# },
# )
# data = response.json()
# ## Get latest data fromdata_today
# # latest_num_of_fail:float = get_latest_numOfFail(location_tag=location_tag, token=token)
# latest_num = data["data"][-1]["num_fail"]
# if not latest_num:
# latest_num = 1
# # Create a complete date range for 2024
# start_date = datetime.datetime(2025, 1, 1)
# date_range = [start_date + datetime.timedelta(days=x) for x in range(max_days)]
# # Create a dictionary of existing data
# data_dict = {
# datetime.datetime.strptime(item["date"], "%d %b %Y"): item["num_fail"]
# for item in data["data"]
# }
# # Fill in missing dates with nearest available value
# complete_data = []
# last_known_value = 0 # Default value if no data is available
# not_full_data = []
# for date in date_range:
# if date in data_dict:
# if data_dict[date] is not None:
# last_known_value = data_dict[date]
# complete_data.append(last_known_value)
# else:
# complete_data.append(0)
# # Convert to numpy array
# daily_failure = np.array(complete_data)
# hourly_failure = np.repeat(daily_failure, 24) / 24
# # failure_counts = np.cumsum(daily_failure)
# # Calculate corrective costs
# cost_per_failure = (material_cost + service_cost) / latest_num
# if cost_per_failure == 0:
# raise ValueError("Cost per failure cannot be zero")
# corrective_costs = hourly_failure * cost_per_failure
# return corrective_costs, hourly_failure
# except Exception as e:
# print(f"Error fetching or processing data: {str(e)}")
# raise
async def get_corrective_cost_time_chart(
material_cost: float,
service_cost: float,
location_tag: str,
token,
start_date: datetime.datetime,
end_date: datetime.datetime
) -> Tuple[np.ndarray, np.ndarray]:
days_difference = (end_date - start_date).days
url = f"http://192.168.1.82:8000/reliability/main/number-of-failures/{location_tag}/{start_date.strftime('%Y-%m-%d')}/{end_date.strftime('%Y-%m-%d')}"
try:
response = requests.get(
url,
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {token}",
},
)
data = response.json()
latest_num = data["data"][-1]["num_fail"]
if not latest_num:
latest_num = 1
# Create a complete date range for 2025
start_date = datetime.datetime(2025, 1, 1)
date_range = [start_date + datetime.timedelta(days=x) for x in range(days_difference)]
# Create a dictionary of existing data
data_dict = {
datetime.datetime.strptime(item["date"], "%d %b %Y"): item["num_fail"]
for item in data["data"]
}
# Initialize all months in the range with 0
monthly_data = {}
current_date = start_date.replace(day=1)
while current_date <= end_date:
monthly_data[current_date] = 0
# Move to next month
if current_date.month == 12:
current_date = datetime.datetime(current_date.year + 1, 1, 1)
else:
current_date = datetime.datetime(current_date.year, current_date.month + 1, 1)
# Get the last day's value for each month
for date in data_dict.keys():
month_key = datetime.datetime(date.year, date.month, 1)
if month_key in monthly_data and data_dict[date] is not None:
# Update only if the value is higher (to get the last day's value)
monthly_data[month_key] = max(monthly_data[month_key], data_dict[date])
# Convert to list maintaining chronological order
complete_data = []
for month in sorted(monthly_data.keys()):
complete_data.append(monthly_data[month])
# Convert to numpy array
monthly_failure = np.array(complete_data)
# Calculate corrective costs
cost_per_failure = (material_cost + service_cost) / latest_num
if cost_per_failure == 0:
raise ValueError("Cost per failure cannot be zero")
corrective_costs = monthly_failure * cost_per_failure
return corrective_costs, monthly_failure
except Exception as e:
print(f"Error fetching or processing data: {str(e)}")
raise
def get_overhaul_cost_by_time_chart(
overhaul_cost: float, months_num: int, numEquipments: int, decay_base: float = 1.01
) -> np.ndarray:
if overhaul_cost < 0:
raise ValueError("Overhaul cost cannot be negative")
if months_num <= 0:
raise ValueError("months_num must be positive")
rate = np.arange(1, months_num + 1)
cost_per_equipment = overhaul_cost / numEquipments
# results = cost_per_equipment - ((cost_per_equipment / hours) * rate)
results = cost_per_equipment / rate
return results
# def get_corrective_cost_time_chart(material_cost: float, service_cost: float, days: int, numEquipments: int) -> Tuple[np.ndarray, np.ndarray]:
# day_points = np.arange(0, days)
# # Parameters for failure rate
# base_rate = 0.04 # Base failure rate per day
# acceleration = 0.7 # How quickly failure rate increases
# grace_period = 49 # Days before failures start increasing significantly
# # Calculate daily failure rate using sigmoid function
# daily_failure_rate = base_rate / (1 + np.exp(-acceleration * (day_points - grace_period)/days))
# # Introduce randomness in the failure rate
# random_noise = np.random.normal(0.0, 0.05, (numEquipments, days)) # Mean 0.0, Std Dev 0.05
# daily_failure_rate = daily_failure_rate + random_noise
# daily_failure_rate = np.clip(daily_failure_rate, 0, None) # Ensure failure rate is non-negative
# # Calculate cumulative failures
# failure_counts = np.cumsum(daily_failure_rate)
# # Calculate corrective costs based on cumulative failures and combined costs
# cost_per_failure = material_cost + service_cost
# corrective_costs = failure_counts * cost_per_failure
# return corrective_costs, daily_failure_rate
async def create_param_and_data(
*,
db_session: DbSession,
calculation_param_in: CalculationTimeConstrainsParametersCreate,
created_by: str,
parameter_id: Optional[UUID] = None,
):
"""Creates a new document."""
if calculation_param_in.ohSessionId is None:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="overhaul_session_id is required",
)
calculationData = await CalculationData.create_with_param(
db=db_session,
overhaul_session_id=calculation_param_in.ohSessionId,
avg_failure_cost=calculation_param_in.costPerFailure,
overhaul_cost=calculation_param_in.overhaulCost,
created_by=created_by,
params_id=parameter_id,
)
return calculationData
async def get_calculation_result(db_session: DbSession, calculation_id: str):
start_date = datetime.datetime(2025, 1, 1)
end_date = datetime.datetime(2026, 12, 31)
months_num = get_months_between(start_date, end_date)
scope_calculation = await get_calculation_data_by_id(
db_session=db_session, calculation_id=calculation_id
)
if not scope_calculation:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="A data with this id does not exist.",
)
scope_overhaul = await get_scope(
db_session=db_session, overhaul_session_id=scope_calculation.overhaul_session_id
)
if not scope_overhaul:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="A data with this id does not exist.",
)
calculation_results = []
for i in range(months_num):
result = {
"overhaul_cost": 0,
"corrective_cost": 0,
"num_failures": 0,
"day": i + 1,
}
## Add risk Cost
# risk cost = ((Down Time1 * MW Loss 1) + (Downtime2 * Mw 2) + .... (DowntimeN * MwN) ) * Harga listrik (Efficicency HL App)
for eq in scope_calculation.equipment_results:
if not eq.is_included:
continue
result["corrective_cost"] += float(eq.corrective_costs[i])
result["overhaul_cost"] += float(eq.overhaul_costs[i])
result["num_failures"] += int(eq.daily_failures[i])
calculation_results.append(CalculationResultsRead(**result))
# Check if calculation already exist
return CalculationTimeConstrainsRead(
id=scope_calculation.id,
reference=scope_calculation.overhaul_session_id,
scope=scope_overhaul.type,
results=calculation_results,
optimum_oh=scope_calculation.optimum_oh_day,
equipment_results=scope_calculation.equipment_results,
)
async def get_calculation_data_by_id(
db_session: DbSession, calculation_id
) -> CalculationData:
stmt = (
select(CalculationData)
.filter(CalculationData.id == calculation_id)
.options(
joinedload(CalculationData.equipment_results),
joinedload(CalculationData.parameter),
)
)
result = await db_session.execute(stmt)
return result.unique().scalar()
async def get_calculation_by_assetnum(
*, db_session: DbSession, assetnum: str, calculation_id: str
):
stmt = (
select(CalculationEquipmentResult)
.where(CalculationEquipmentResult.assetnum == assetnum)
.where(CalculationEquipmentResult.calculation_data_id == calculation_id)
)
result = await db_session.execute(stmt)
return result.scalar()
# async def create_calculation_result_service(db_session: DbSession, calculation_id: UUID, costPerFailure: Optional[float] = None):
# days = 360
# calculation = await get_calculation_data_by_id(db_session=db_session, calculation_id=calculation_id)
# # reference = await get_by_assetnum(db_session=db_session, assetnum=calculation.reference_id) if calculation.overhaul_reference_type == OverhaulReferenceType.ASSET else await get(db_session=db_session, scope_id=calculation.reference_id)
# # Multiple Eequipment
# equipments_scope = get_all_by_session_id(db_session=db_session, overhaul_session_id=calculation.overhaul_session_id)
# # Parameter
# overhaulCost = calculation.parameter.overhaul_cost
# costPerFailure = costPerFailure if costPerFailure else calculation.parameter.avg_failure_cost
# overhaul_cost_points = get_overhaul_cost_by_time_chart(
# overhaulCost, days=days)
# for eq in equipments_scope:
# corrective_cost_points, dailyNumberOfFailure = get_corrective_cost_time_chart(
# costPerFailure, days)
# total_cost = overhaul_cost_points + corrective_cost_points
# optimumOHIndex = np.argmin(total_cost)
# numbersOfFailure = sum(dailyNumberOfFailure[:optimumOHIndex])
# optimum = {
# "overhaulCost": float(overhaul_cost_points[optimumOHIndex]),
# "correctiveCost": float(corrective_cost_points[optimumOHIndex]),
# "numOfFailures": int(numbersOfFailure),
# "days": int(optimumOHIndex+1)
# }
# calculation_results = []
# for i in range(days):
# result = CalculationResult(
# parameter_id=calculation.parameter_id,
# calculation_data_id=calculation.id,
# day=(i + 1),
# corrective_cost=float(corrective_cost_points[i]),
# overhaul_cost=float(overhaul_cost_points[i]),
# num_failures=int(dailyNumberOfFailure[i]),
# )
# calculation_results.append(result)
# calculation.optimum_oh_day = int(optimumOHIndex+1)
# db_session.add_all(calculation_results)
# await db_session.commit()
# return CalculationTimeConstrainsRead(
# id=calculation.id,
# name=reference.scope_name if hasattr(
# reference, "scope_name") else reference.master_equipment.name,
# reference=reference.assetnum if hasattr(
# reference, "assetnum") else reference.scope_name,
# results=calculation_results,
# optimumOh=optimum
# )
async def create_calculation_result_service(
db_session: DbSession, calculation: CalculationData, token: str
) -> CalculationTimeConstrainsRead:
start_date = datetime.datetime(2025, 1, 1)
end_date = datetime.datetime(2027, 12, 31)
months_num = get_months_between(start_date, end_date)
# Get all equipment for this calculation session
equipments = await get_all_by_session_id(
db_session=db_session, overhaul_session_id=calculation.overhaul_session_id
)
scope = await get_scope(
db_session=db_session, overhaul_session_id=calculation.overhaul_session_id
)
calculation_data = await get_calculation_data_by_id(
db_session=db_session, calculation_id=calculation.id
)
# Store results for each equipment
equipment_results: List[CalculationEquipmentResult] = []
total_corrective_costs = np.zeros(months_num)
total_daily_failures = np.zeros(months_num)
# Calculate for each equipment
for eq in equipments:
corrective_costs, daily_failures = await get_corrective_cost_time_chart(
material_cost=eq.material_cost,
service_cost=eq.service_cost,
token=token,
location_tag=eq.equipment.location_tag,
start_date=start_date,
end_date=end_date
)
overhaul_cost_points = get_overhaul_cost_by_time_chart(
calculation_data.parameter.overhaul_cost,
months_num=months_num,
numEquipments=len(equipments),
)
# Calculate individual equipment optimum points
equipment_total_cost = corrective_costs + overhaul_cost_points
equipment_optimum_index = np.argmin(equipment_total_cost)
equipment_failure_sum = sum(daily_failures[:equipment_optimum_index])
equipment_results.append(
CalculationEquipmentResult(
corrective_costs=corrective_costs.tolist(),
overhaul_costs=overhaul_cost_points.tolist(),
daily_failures=daily_failures.tolist(),
assetnum=eq.assetnum,
material_cost=eq.material_cost,
service_cost=eq.service_cost,
optimum_day=int(equipment_optimum_index + 1),
calculation_data_id=calculation.id,
master_equipment=eq.equipment,
)
)
# Add to totals
total_corrective_costs += corrective_costs
total_daily_failures += daily_failures
db_session.add_all(equipment_results)
# Calculate optimum points using total costs
total_cost = total_corrective_costs + overhaul_cost_points
optimum_oh_index = np.argmin(total_cost)
numbers_of_failure = sum(total_daily_failures[:optimum_oh_index])
optimum = OptimumResult(
overhaul_cost=float(overhaul_cost_points[optimum_oh_index]),
corrective_cost=float(total_corrective_costs[optimum_oh_index]),
num_failures=int(numbers_of_failure),
days=int(optimum_oh_index + 1),
)
# # Create calculation results for database
# calculation_results = []
# for i in range(days):
# result = CalculationResult(
# parameter_id=calculation.parameter_id,
# calculation_data_id=calculation.id,
# day=(i + 1),
# corrective_cost=float(total_corrective_costs[i]),
# overhaul_cost=float(overhaul_cost_points[i]),
# num_failures=int(total_daily_failures[i]),
# )
# calculation_results.append(result)
# Update calculation with optimum day
calculation.optimum_oh_day = optimum.days
await db_session.commit()
# Return results including individual equipment data
return CalculationTimeConstrainsRead(
id=calculation.id,
reference=calculation.overhaul_session_id,
scope=scope.type,
results=[],
optimum_oh=optimum,
equipment_results=equipment_results,
)
async def get_calculation_by_reference_and_parameter(
*, db_session: DbSession, calculation_reference_id, parameter_id
):
stmt = select(CalculationData).filter(
and_(
CalculationData.reference_id == calculation_reference_id,
CalculationData.parameter_id == parameter_id,
)
)
result = await db_session.execute(stmt)
return result.scalar()
async def get_calculation_result_by_day(
*, db_session: DbSession, calculation_id, simulation_day
):
stmt = select(CalculationResult).filter(
and_(
CalculationResult.day == simulation_day,
CalculationResult.calculation_data_id == calculation_id,
)
)
result = await db_session.execute(stmt)
return result.scalar()
async def get_avg_cost_by_asset(*, db_session: DbSession, assetnum: str):
stmt = select(func.avg(MasterWorkOrder.total_cost_max).label("average_cost")).where(
MasterWorkOrder.assetnum == assetnum
)
result = await db_session.execute(stmt)
return result.scalar_one_or_none()
async def bulk_update_equipment(
*,
db: DbSession,
selected_equipments: List[CalculationSelectedEquipmentUpdate],
calculation_data_id: UUID,
):
# Create a dictionary mapping assetnum to is_included status
case_mappings = {asset.assetnum: asset.is_included for asset in selected_equipments}
# Get all assetnums that need to be updated
assetnums = list(case_mappings.keys())
# Create a list of when clauses for the case statement
when_clauses = [
(CalculationEquipmentResult.assetnum == assetnum, is_included)
for assetnum, is_included in case_mappings.items()
]
# Build the update statement
stmt = (
update(CalculationEquipmentResult)
.where(CalculationEquipmentResult.calculation_data_id == calculation_data_id)
.where(CalculationEquipmentResult.assetnum.in_(assetnums))
.values(
{
"is_included": case(
*when_clauses
) # Unpack the when clauses as separate arguments
}
)
)
await db.execute(stmt)
await db.commit()
return assetnums

@ -1,9 +0,0 @@
import datetime
def get_months_between(start_date: datetime.datetime, end_date: datetime.datetime) -> int:
"""
Calculate number of months between two dates.
"""
months = (end_date.year - start_date.year) * 12 + (end_date.month - start_date.month)
# Add 1 to include both start and end months
return months + 1

@ -74,3 +74,4 @@ MAXIMO_BASE_URL = config("MAXIMO_BASE_URL", default="http://example.com")
MAXIMO_API_KEY = config("MAXIMO_API_KEY", default="keys")
AUTH_SERVICE_API = config("AUTH_SERVICE_API", default="http://192.168.1.82:8000/auth")
AEROS_BASE_URL = config("AEROS_BASE_URL", default="http://20.198.228.3")

@ -0,0 +1,26 @@
from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, HTTPException, Query, status
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import CommonParameters
from src.models import StandardResponse
from .service import get_model_data
router = APIRouter()
@router.get("", response_model=StandardResponse[dict])
async def get_dashboard_model_data(
db_session: DbSession,
simulation_id: Optional[UUID] = Query(None),
):
result = await get_model_data(db_session=db_session, simulation_id=simulation_id)
return StandardResponse(
data=result,
message="Data retrieved successfully",
)

@ -0,0 +1,43 @@
# from datetime import datetime
# from typing import List, Optional
# from uuid import UUID
# from pydantic import Field
# from src.models import DefultBase, Pagination
# from src.overhaul_scope.schema import ScopeRead
# from src.scope_equipment_job.schema import ScopeEquipmentJobRead
# from src.job.schema import ActivityMasterRead
# class OverhaulScheduleBase(DefultBase):
# pass
# class OverhaulScheduleCreate(OverhaulScheduleBase):
# year: int
# plan_duration: Optional[int] = Field(None)
# planned_outage: Optional[int] = Field(None)
# actual_shutdown: Optional[int] = Field(None)
# start: datetime
# finish: datetime
# remark: Optional[str] = Field(None)
# class OverhaulScheduleUpdate(OverhaulScheduleBase):
# start: datetime
# finish: datetime
# class OverhaulScheduleRead(OverhaulScheduleBase):
# id: UUID
# year: int
# plan_duration: Optional[int]
# planned_outage: Optional[int]
# actual_shutdown: Optional[int]
# start: datetime
# finish: datetime
# remark: Optional[str]
# class OverhaulSchedulePagination(Pagination):
# items: List[OverhaulScheduleRead] = []

@ -0,0 +1,40 @@
from typing import Optional
from uuid import UUID
from fastapi import HTTPException, status
from sqlalchemy import select
from sqlalchemy.orm import selectinload
from src.aeros_simulation.model import AerosSimulation
from src.aeros_simulation.service import (
get_calc_result_by,
get_simulation_by_id,
get_simulation_node_by,
)
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import search_filter_sort_paginate
async def get_model_data(*, db_session: DbSession, simulation_id: Optional[UUID]):
simulation = await get_simulation_by_id(
db_session=db_session, simulation_id=simulation_id, is_completed=True
)
main_node = await get_simulation_node_by(db_session=db_session, node_name="Main")
main_calc_data = await get_calc_result_by(
db_session=db_session, simulation_id=simulation.id, aeros_node_id=main_node.id
)
# Total time period
total_time = main_calc_data.total_uptime + main_calc_data.total_downtime
# Availability Factor (same as your first formula - this one is correct)
availability = main_calc_data.total_uptime / total_time
# Equivalent Forced Outage Rate (EFOR)
EFOR = main_calc_data.total_downtime / total_time
EAF = main_calc_data.production / main_calc_data.ideal_production
return {"availability": availability, "EFOR": EFOR, "EAF": EAF}

@ -9,8 +9,7 @@ from pydantic import BaseModel
from sqlalchemy import create_engine, inspect
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.ext.declarative import declarative_base, declared_attr
from sqlalchemy.orm import (DeclarativeBase, Session, object_session,
sessionmaker)
from sqlalchemy.orm import DeclarativeBase, Session, object_session, sessionmaker
from sqlalchemy.sql.expression import true
from sqlalchemy_utils import get_mapper
from starlette.requests import Request

@ -1,7 +1,7 @@
from enum import StrEnum
class OptimumOHEnum(StrEnum):
class RBDEnum(StrEnum):
"""
A custom Enum class that extends StrEnum.
@ -19,6 +19,6 @@ class OptimumOHEnum(StrEnum):
pass # No additional implementation needed
class ResponseStatus(OptimumOHEnum):
class ResponseStatus(RBDEnum):
SUCCESS = "success"
ERROR = "error"

@ -10,8 +10,7 @@ from fastapi.responses import JSONResponse
from pydantic import BaseModel
from slowapi import _rate_limit_exceeded_handler
from slowapi.errors import RateLimitExceeded
from sqlalchemy.exc import (DataError, DBAPIError, IntegrityError,
SQLAlchemyError)
from sqlalchemy.exc import DataError, DBAPIError, IntegrityError, SQLAlchemyError
from src.enums import ResponseStatus

@ -1,45 +0,0 @@
from sqlalchemy import UUID, Column, Float, ForeignKey, Integer, String
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import relationship
from src.database.core import Base
from src.models import DefaultMixin, IdentityMixin, TimeStampMixin
from src.workorder.model import MasterWorkOrder
class MasterActivitytask(Base, DefaultMixin):
__tablename__ = "oh_ms_job_task"
description = Column(String, nullable=False)
oh_type = Column(String, nullable=False)
job_id = Column(
UUID(as_uuid=True),
ForeignKey("oh_ms_job.id", ondelete="cascade"),
nullable=False,
)
class MasterActivity(Base, DefaultMixin):
__tablename__ = "oh_ms_job"
workscope = Column(String, nullable=True)
system = Column(String, nullable=True)
subsystem = Column(String, nullable=True)
tasks = relationship(
"MasterActivitytask",
lazy="selectin",
)
# details = relationship(
# "MasterActivityDetail",
# lazy="raise",
# primaryjoin="and_(MasterActivity.id == foreign(MasterActivityDetail.activity_id))",
# )
# class MasterActivityDetail(Base, DefaultMixin):
# __tablename__ = "oh_ms_activity_detail"
# name = Column(String, nullable=False)
# activity_id = Column(UUID(as_uuid=True))

@ -1,84 +0,0 @@
from fastapi import APIRouter, HTTPException, Query, status
from src.database.service import (CommonParameters, DbSession,
search_filter_sort_paginate)
from src.models import StandardResponse
from .schema import (ActivityMaster, ActivityMasterCreate,
ActivityMasterPagination)
from .service import create, delete, get, get_all, update
router = APIRouter()
@router.get("", response_model=StandardResponse[ActivityMasterPagination])
async def get_activities(common: CommonParameters):
"""Get all scope activity pagination."""
# return
data = await get_all(common=common)
return StandardResponse(
data=data,
message="Data retrieved successfully",
)
@router.post("", response_model=StandardResponse[ActivityMasterCreate])
async def create_activity(db_session: DbSession, activity_in: ActivityMasterCreate):
activity = await create(db_session=db_session, activty_in=activity_in)
return StandardResponse(data=activity, message="Data created successfully")
@router.get(
"/{scope_equipment_activity_id}", response_model=StandardResponse[ActivityMaster]
)
async def get_activity(db_session: DbSession, activity_id: str):
activity = await get(db_session=db_session, activity_id=activity_id)
if not activity:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="A data with this id does not exist.",
)
return StandardResponse(data=activity, message="Data retrieved successfully")
@router.put(
"/{scope_equipment_activity_id}", response_model=StandardResponse[ActivityMaster]
)
async def update_scope(
db_session: DbSession, activity_in: ActivityMasterCreate, activity_id
):
activity = await get(db_session=db_session, activity_id=activity_id)
if not activity:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="A data with this id does not exist.",
)
return StandardResponse(
data=await update(
db_session=db_session, activity=activity, activity_in=activity_in
),
message="Data updated successfully",
)
@router.delete(
"/{scope_equipment_activity_id}", response_model=StandardResponse[ActivityMaster]
)
async def delete_scope(db_session: DbSession, activity_id: str):
activity = await get(db_session=db_session, activity_id=activity_id)
if not activity:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=[{"msg": "A data with this id does not exist."}],
)
await delete(db_session=db_session, activity_id=activity_id)
return StandardResponse(message="Data deleted successfully", data=activity)

@ -1,75 +0,0 @@
from datetime import datetime
from typing import Any, Dict, List, Optional
from uuid import UUID
from pydantic import BaseModel, Field
from src.models import DefultBase, Pagination
class ActivityMaster(DefultBase):
pass
class ActivityMasterDetail(DefultBase):
name: str
class ActivityMasterCreate(ActivityMaster):
description: str
class ActivityMasterTasks(DefultBase):
description: str
oh_type: str
class ActivityMasterRead(ActivityMaster):
id: UUID
workscope: str
system: str
subsystem: str
tasks: List[ActivityMasterTasks]
class ActivityMasterPagination(Pagination):
items: List[ActivityMasterRead] = []
# {
# "overview": {
# "totalEquipment": 30,
# "nextSchedule": {
# "date": "2025-01-12",
# "Overhaul": "B",
# "equipmentCount": 30
# }
# },
# "criticalParts": [
# "Boiler feed pump",
# "Boiler reheater system",
# "Drum Level (Right) Root Valve A",
# "BCP A Discharge Valve",
# "BFPT A EXH Press HI Root VLV"
# ],
# "schedules": [
# {
# "date": "2025-01-12",
# "Overhaul": "B",
# "status": "upcoming"
# }
# // ... other scheduled overhauls
# ],
# "systemComponents": {
# "boiler": {
# "status": "operational",
# "lastOverhaul": "2024-06-15"
# },
# "turbine": {
# "hpt": { "status": "operational" },
# "ipt": { "status": "operational" },
# "lpt": { "status": "operational" }
# }
# // ... other major components
# }
# }

@ -1,59 +0,0 @@
from typing import Optional
from sqlalchemy import Delete, Select
from sqlalchemy.orm import joinedload, selectinload
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import CommonParameters, search_filter_sort_paginate
from .model import MasterActivity
from .schema import ActivityMaster, ActivityMasterCreate
async def get(*, db_session: DbSession, activity_id: str) -> Optional[ActivityMaster]:
"""Returns a document based on the given document id."""
result = await db_session.get(MasterActivity, activity_id)
return result
async def get_all(common: CommonParameters):
query = Select(MasterActivity)
results = await search_filter_sort_paginate(model=query, **common)
return results
async def create(*, db_session: DbSession, activty_in: ActivityMasterCreate):
activity = MasterActivity(**activty_in.model_dump())
db_session.add(activity)
await db_session.commit()
return activity
async def update(
*,
db_session: DbSession,
activity: MasterActivity,
activity_in: ActivityMasterCreate
):
"""Updates a document."""
data = activity_in.model_dump()
update_data = activity_in.model_dump(exclude_defaults=True)
for field in data:
if field in update_data:
setattr(activity, field, update_data[field])
await db_session.commit()
return activity
async def delete(*, db_session: DbSession, activity_id: str):
"""Deletes a document."""
activity = await db_session.get(MasterActivity, activity_id)
await db_session.delete(activity)
await db_session.commit()

@ -1,12 +1,12 @@
import logging
from src.config import LOG_LEVEL
from src.enums import OptimumOHEnum
from src.enums import RBDEnum
LOG_FORMAT_DEBUG = "%(levelname)s:%(message)s:%(pathname)s:%(funcName)s:%(lineno)d"
class LogLevels(OptimumOHEnum):
class LogLevels(RBDEnum):
info = "INFO"
warn = "WARN"
error = "ERROR"

@ -14,8 +14,7 @@ from slowapi.errors import RateLimitExceeded
from sqlalchemy import inspect
from sqlalchemy.ext.asyncio import async_scoped_session
from sqlalchemy.orm import scoped_session
from starlette.middleware.base import (BaseHTTPMiddleware,
RequestResponseEndpoint)
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from starlette.middleware.gzip import GZipMiddleware
from starlette.requests import Request
from starlette.responses import FileResponse, Response, StreamingResponse
@ -42,7 +41,7 @@ app = FastAPI(
exception_handlers=exception_handlers,
openapi_url="",
title="LCCA API",
description="Welcome to LCCA's API documentation!",
description="Welcome to RBD's API documentation!",
version="0.1.0",
)
app.state.limiter = limiter

@ -1,154 +0,0 @@
from datetime import datetime, timedelta
from typing import Any, Dict
import httpx
from fastapi import HTTPException
from starlette.config import Config
from src.config import MAXIMO_API_KEY, MAXIMO_BASE_URL
class MaximoDataMapper:
"""
Helper class to map MAXIMO API response to our data structure.
Update these mappings according to actual MAXIMO API documentation.
"""
def __init__(self, maximo_data: Dict[Any, Any]):
self.data = maximo_data
def get_start_date(self) -> datetime:
"""
Extract start date from MAXIMO data.
TODO: Update this based on actual MAXIMO API response structure
Example: might be data['startDate'] or data['SCHEDSTART'] etc.
"""
# This is a placeholder - update with actual MAXIMO field name
start_date_str = self.data.get("scheduleStart")
if not start_date_str:
raise ValueError("Start date not found in MAXIMO data")
return datetime.fromisoformat(start_date_str)
def get_end_date(self) -> datetime:
"""
Extract end date from MAXIMO data.
TODO: Update this based on actual MAXIMO API response structure
"""
# This is a placeholder - update with actual MAXIMO field name
end_date_str = self.data.get("scheduleEnd")
if not end_date_str:
raise ValueError("End date not found in MAXIMO data")
return datetime.fromisoformat(end_date_str)
def get_maximo_id(self) -> str:
"""
Extract MAXIMO ID from response.
TODO: Update this based on actual MAXIMO API response structure
"""
# This is a placeholder - update with actual MAXIMO field name
maximo_id = self.data.get("workOrderId")
if not maximo_id:
raise ValueError("MAXIMO ID not found in response")
return str(maximo_id)
def get_status(self) -> str:
"""
Extract status from MAXIMO data.
TODO: Update this based on actual MAXIMO API response structure
"""
# This is a placeholder - update with actual MAXIMO status field and values
status = self.data.get("status", "").upper()
return status
def get_total_cost(self) -> float:
"""
Extract total cost from MAXIMO data.
TODO: Update this based on actual MAXIMO API response structure
"""
# This is a placeholder - update with actual MAXIMO field name
cost = self.data.get("totalCost", 0)
return float(cost)
def get_scope_name(self) -> str:
scope_name = self.data.get("location", "A")
return scope_name
class MaximoService:
def __init__(self):
# TODO: Update these settings based on actual MAXIMO API configuration
self.base_url = MAXIMO_BASE_URL
self.api_key = MAXIMO_API_KEY
async def get_recent_overhaul(self) -> dict:
"""
Fetch most recent overhaul from MAXIMO.
TODO: Update this method based on actual MAXIMO API endpoints and parameters
"""
current_date = datetime.now()
schedule_start = current_date + timedelta(days=30) # Starting in 30 days
schedule_end = schedule_start + timedelta(days=90) # 90 day overhaul period
return {
"scheduleStart": schedule_start.isoformat(),
"scheduleEnd": schedule_end.isoformat(),
"workOrderId": "WO-2024-12345",
"status": "PLAN", # Common Maximo statuses: SCHEDULED, INPRG, COMP, CLOSE
"totalCost": 10000000.00,
"description": "Annual Turbine Overhaul",
"priority": 1,
"location": "A",
"assetDetails": [
{
"assetnum": "ASSET001",
"description": "Gas Turbine",
"status": "OPERATING",
},
{
"assetnum": "ASSET002",
"description": "Steam Turbine",
"status": "OPERATING",
},
],
"workType": "OH", # OH for Overhaul
"createdBy": "MAXADMIN",
"createdDate": (current_date - timedelta(days=10)).isoformat(),
"lastModifiedBy": "MAXADMIN",
"lastModifiedDate": current_date.isoformat(),
}
async with httpx.AsyncClient() as client:
try:
# TODO: Update endpoint and parameters based on actual MAXIMO API
response = await client.get(
f"{self.base_url}/your-endpoint-here",
headers={
"Authorization": f"Bearer {self.api_key}",
# Add any other required headers
},
params={
# Update these parameters based on actual MAXIMO API
"orderBy": "-scheduleEnd", # Example parameter
"limit": 1,
},
)
if response.status_code != 200:
raise HTTPException(
status_code=response.status_code,
detail=f"MAXIMO API error: {response.text}",
)
data = response.json()
if not data:
raise HTTPException(
status_code=404, detail="No recent overhaul found"
)
# TODO: Update this based on actual MAXIMO response structure
return data[0] if isinstance(data, list) else data
except httpx.RequestError as e:
raise HTTPException(
status_code=503, detail=f"Failed to connect to MAXIMO: {str(e)}"
)

@ -1,67 +0,0 @@
from typing import List
from fastapi import APIRouter, HTTPException, status
from src.database.core import DbSession
from src.models import StandardResponse
from src.overhaul.service import (get_overhaul_critical_parts,
get_overhaul_overview,
get_overhaul_schedules,
get_overhaul_system_components)
from src.overhaul_scope.schema import ScopeRead
from .schema import (OverhaulCriticalParts, OverhaulRead,
OverhaulSystemComponents)
router = APIRouter()
@router.get("", response_model=StandardResponse[OverhaulRead])
async def get_overhaul(db_session: DbSession):
"""Get all scope pagination."""
overview = await get_overhaul_overview(db_session=db_session)
schedules = await get_overhaul_schedules(db_session=db_session)
criticalParts = get_overhaul_critical_parts()
systemComponents = get_overhaul_system_components()
return StandardResponse(
data=OverhaulRead(
overview=overview,
schedules=schedules,
criticalParts=criticalParts,
systemComponents=systemComponents,
),
message="Data retrieved successfully",
)
@router.get("/schedules", response_model=StandardResponse[List[ScopeRead]])
async def get_schedules():
"""Get all overhaul schedules."""
schedules = get_overhaul_schedules()
return StandardResponse(
data=schedules,
message="Data retrieved successfully",
)
@router.get("/critical-parts", response_model=StandardResponse[OverhaulCriticalParts])
async def get_critical_parts():
"""Get all overhaul critical parts."""
criticalParts = get_overhaul_critical_parts()
return StandardResponse(
data=OverhaulCriticalParts(criticalParts=criticalParts),
message="Data retrieved successfully",
)
@router.get(
"/system-components", response_model=StandardResponse[OverhaulSystemComponents]
)
async def get_system_components():
"""Get all overhaul system components."""
systemComponents = get_overhaul_system_components()
return StandardResponse(
data=OverhaulSystemComponents(systemComponents=systemComponents),
message="Data retrieved successfully",
)

@ -1,72 +0,0 @@
from datetime import datetime
from typing import Any, Dict, List, Optional
from uuid import UUID
from pydantic import BaseModel, Field
from src.models import DefultBase, Pagination
from src.overhaul_scope.schema import ScopeRead
class OverhaulBase(BaseModel):
pass
class OverhaulCriticalParts(OverhaulBase):
criticalParts: List[str] = Field(..., description="List of critical parts")
class OverhaulSchedules(OverhaulBase):
schedules: List[Dict[str, Any]] = Field(..., description="List of schedules")
class OverhaulSystemComponents(OverhaulBase):
systemComponents: Dict[str, Any] = Field(
..., description="List of system components"
)
class OverhaulRead(OverhaulBase):
overview: Dict[str, Any]
criticalParts: List[str]
schedules: List[ScopeRead]
systemComponents: Dict[str, Any]
# {
# "overview": {
# "totalEquipment": 30,
# "nextSchedule": {
# "date": "2025-01-12",
# "Overhaul": "B",
# "equipmentCount": 30
# }
# },
# "criticalParts": [
# "Boiler feed pump",
# "Boiler reheater system",
# "Drum Level (Right) Root Valve A",
# "BCP A Discharge Valve",
# "BFPT A EXH Press HI Root VLV"
# ],
# "schedules": [
# {
# "date": "2025-01-12",
# "Overhaul": "B",
# "status": "upcoming"
# }
# // ... other scheduled overhauls
# ],
# "systemComponents": {
# "boiler": {
# "status": "operational",
# "lastOverhaul": "2024-06-15"
# },
# "turbine": {
# "hpt": { "status": "operational" },
# "ipt": { "status": "operational" },
# "lpt": { "status": "operational" }
# }
# // ... other major components
# }
# }

@ -1,151 +0,0 @@
from typing import Optional
from sqlalchemy import Delete, Select
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.overhaul_scope.model import OverhaulScope
from src.overhaul_scope.service import get_all as get_all_session
from src.overhaul_scope.service import get_overview_overhaul
async def get_overhaul_overview(db_session: DbSession):
"""Get all overhaul overview."""
results = await get_overview_overhaul(db_session=db_session)
return results
def get_overhaul_critical_parts():
"""Get all overhaul critical parts."""
return [
"Boiler feed pump",
"Boiler reheater system",
"Drum Level (Right) Root Valve A",
"BCP A Discharge Valve",
"BFPT A EXH Press HI Root VLV",
]
async def get_overhaul_schedules(*, db_session: DbSession):
"""Get all overhaul schedules."""
query = Select(OverhaulScope)
results = await db_session.execute(query)
return results.scalars().all()
def get_overhaul_system_components():
"""Get all overhaul system components with dummy data."""
return {
"HPT": {
"efficiency": "92%",
"work_hours": "1200",
"reliability": "96%",
},
"IPT": {
"efficiency": "91%",
"work_hours": "1100",
"reliability": "95%",
},
"LPT": {
"efficiency": "90%",
"work_hours": "1000",
"reliability": "94%",
},
"EG": {
"efficiency": "88%",
"work_hours": "950",
"reliability": "93%",
},
"boiler": {
"efficiency": "90%",
"work_hours": "1000",
"reliability": "95%",
},
"HPH1": {
"efficiency": "89%",
"work_hours": "1050",
"reliability": "94%",
},
"HPH2": {
"efficiency": "88%",
"work_hours": "1020",
"reliability": "93%",
},
"HPH3": {
"efficiency": "87%",
"work_hours": "1010",
"reliability": "92%",
},
"HPH5": {
"efficiency": "86%",
"work_hours": "980",
"reliability": "91%",
},
"HPH6": {
"efficiency": "85%",
"work_hours": "970",
"reliability": "90%",
},
"HPH7": {
"efficiency": "84%",
"work_hours": "960",
"reliability": "89%",
},
"Condensor": {
"efficiency": "83%",
"work_hours": "940",
"reliability": "88%",
},
"Deaerator": {
"efficiency": "82%",
"work_hours": "930",
"reliability": "87%",
},
}
# async def get(*, db_session: DbSession, scope_id: str) -> Optional[Scope]:
# """Returns a document based on the given document id."""
# query = Select(Scope).filter(Scope.id == scope_id)
# result = await db_session.execute(query)
# return result.scalars().one_or_none()
# async def get_all(*, db_session: DbSession):
# """Returns all documents."""
# query = Select(Scope)
# result = await db_session.execute(query)
# return result.scalars().all()
# async def create(*, db_session: DbSession, scope_id: ScopeCreate):
# """Creates a new document."""
# scope = Scope(**scope_id.model_dump())
# db_session.add(scope)
# await db_session.commit()
# return scope
# async def update(*, db_session: DbSession, scope: Scope, scope_id: ScopeUpdate):
# """Updates a document."""
# data = scope_id.model_dump()
# update_data = scope_id.model_dump(exclude_defaults=True)
# for field in data:
# if field in update_data:
# setattr(scope, field, update_data[field])
# await db_session.commit()
# return scope
# async def delete(*, db_session: DbSession, scope_id: str):
# """Deletes a document."""
# query = Delete(Scope).where(Scope.id == scope_id)
# await db_session.execute(query)
# await db_session.commit()

@ -1,31 +0,0 @@
from sqlalchemy import UUID, Column, Float, ForeignKey, Integer, String
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import relationship
from src.database.core import Base
from src.models import DefaultMixin, IdentityMixin, TimeStampMixin
from src.workorder.model import MasterWorkOrder
class OverhaulActivity(Base, DefaultMixin):
__tablename__ = "oh_tr_overhaul_activity"
assetnum = Column(String, nullable=True)
overhaul_scope_id = Column(
UUID(as_uuid=True), ForeignKey("oh_ms_overhaul_scope.id"), nullable=False
)
material_cost = Column(Float, nullable=False, default=0)
service_cost = Column(Float, nullable=False, default=0)
status = Column(String, nullable=False, default="pending")
equipment = relationship(
"MasterEquipment",
lazy="raise",
primaryjoin="and_(OverhaulActivity.assetnum == foreign(MasterEquipment.assetnum))",
uselist=False, # Add this if it's a one-to-one relationship
)
overhaul_scope = relationship(
"OverhaulScope",
lazy="raise",
)

@ -1,118 +0,0 @@
from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, HTTPException, Query, status
from src.database.service import (CommonParameters, DbSession,
search_filter_sort_paginate)
from src.models import StandardResponse
from .schema import (OverhaulActivityCreate, OverhaulActivityPagination,
OverhaulActivityRead, OverhaulActivityUpdate)
from .service import create, delete, get, get_all, update
router = APIRouter()
@router.get(
"/{overhaul_session}", response_model=StandardResponse[OverhaulActivityPagination]
)
async def get_scope_equipments(
common: CommonParameters,
overhaul_session: str,
assetnum: Optional[str] = Query(None),
scope_name: Optional[str] = Query(None),
):
"""Get all scope activity pagination."""
# return
data = await get_all(
common=common,
assetnum=assetnum,
scope_name=scope_name,
overhaul_session_id=overhaul_session,
)
return StandardResponse(
data=data,
message="Data retrieved successfully",
)
@router.post("/{overhaul_session}", response_model=StandardResponse[List[str]])
async def create_overhaul_equipment(
db_session: DbSession,
overhaul_activty_in: OverhaulActivityCreate,
overhaul_session: str,
):
activity = await create(
db_session=db_session,
overhaul_activty_in=overhaul_activty_in,
overhaul_session_id=overhaul_session,
)
return StandardResponse(data=activity, message="Data created successfully")
@router.get(
"/{overhaul_session}/{assetnum}",
response_model=StandardResponse[OverhaulActivityRead],
)
async def get_overhaul_equipment(
db_session: DbSession, assetnum: str, overhaul_session
):
equipment = await get(
db_session=db_session, assetnum=assetnum, overhaul_session_id=overhaul_session
)
if not equipment:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="A data with this id does not exist.",
)
return StandardResponse(data=equipment, message="Data retrieved successfully")
@router.put(
"/{overhaul_session}/{assetnum}",
response_model=StandardResponse[OverhaulActivityRead],
)
async def update_scope(
db_session: DbSession,
scope_equipment_activity_in: OverhaulActivityUpdate,
assetnum: str,
):
activity = await get(db_session=db_session, assetnum=assetnum)
if not activity:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="A data with this id does not exist.",
)
return StandardResponse(
data=await update(
db_session=db_session,
activity=activity,
scope_equipment_activity_in=scope_equipment_activity_in,
),
message="Data updated successfully",
)
@router.delete(
"/{overhaul_session}/{assetnum}",
response_model=StandardResponse[OverhaulActivityRead],
)
async def delete_scope(db_session: DbSession, assetnum: str):
activity = await get(db_session=db_session, assetnum=assetnum)
if not activity:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=[{"msg": "A data with this id does not exist."}],
)
await delete(db_session=db_session, assetnum=assetnum)
return StandardResponse(message="Data deleted successfully", data=activity)

@ -1,35 +0,0 @@
from datetime import datetime
from typing import Any, Dict, List, Optional
from uuid import UUID
from pydantic import Field
from src.models import DefultBase, Pagination
from src.scope_equipment.schema import MasterEquipmentRead
class OverhaulActivityBase(DefultBase):
pass
class OverhaulActivityCreate(OverhaulActivityBase):
assetnums: List[str]
scope_name: str
class OverhaulActivityUpdate(OverhaulActivityBase):
material_cost: Optional[float] = Field(0)
service_cost: Optional[float] = Field(0)
class OverhaulActivityRead(OverhaulActivityBase):
id: UUID
material_cost: Optional[float] = Field(0)
service_cost: Optional[float] = Field(0)
assetnum: str = Field(..., description="Assetnum is required")
status: str
equipment: MasterEquipmentRead
class OverhaulActivityPagination(Pagination):
items: List[OverhaulActivityRead] = []

@ -1,193 +0,0 @@
import asyncio
from typing import List, Optional
from uuid import UUID
from sqlalchemy import Delete, Select, func, select
from sqlalchemy import update as sqlUpdate
from sqlalchemy.dialects.postgresql import insert
from sqlalchemy.orm import joinedload
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import CommonParameters, search_filter_sort_paginate
from src.overhaul_activity.utils import get_material_cost, get_service_cost
from src.overhaul_scope.model import OverhaulScope
from src.overhaul_scope.service import get as get_session
from .model import OverhaulActivity
from .schema import (OverhaulActivityCreate, OverhaulActivityRead,
OverhaulActivityUpdate)
async def get(
*, db_session: DbSession, assetnum: str, overhaul_session_id: Optional[UUID] = None
) -> Optional[OverhaulActivityRead]:
"""Returns a document based on the given document id."""
query = (
Select(OverhaulActivity)
.where(OverhaulActivity.assetnum == assetnum)
.options(joinedload(OverhaulActivity.equipment))
)
if overhaul_session_id:
query = query.filter(OverhaulActivity.overhaul_scope_id == overhaul_session_id)
result = await db_session.execute(query)
return result.scalar()
async def get_all(
*,
common: CommonParameters,
overhaul_session_id: UUID,
assetnum: Optional[str] = None,
scope_name: Optional[str] = None
):
query = (
Select(OverhaulActivity)
.where(OverhaulActivity.overhaul_scope_id == overhaul_session_id)
.options(joinedload(OverhaulActivity.equipment))
)
if assetnum:
query = query.filter(OverhaulActivity.assetnum == assetnum).options(
joinedload(OverhaulActivity.overhaul_scope)
)
if scope_name:
query = query.filter(OverhaulActivity.scope_name == scope_name).options(
joinedload(OverhaulActivity.overhaul_scope)
)
results = await search_filter_sort_paginate(model=query, **common)
return results
async def get_all_by_session_id(*, db_session: DbSession, overhaul_session_id):
query = (
Select(OverhaulActivity)
.where(OverhaulActivity.overhaul_scope_id == overhaul_session_id)
.options(joinedload(OverhaulActivity.equipment))
)
results = await db_session.execute(query)
return results.scalars().all()
# async def create(*, db_session: DbSession, overhaul_activty_in: OverhaulActivityCreate, overhaul_session_id: UUID):
# # Check if the combination of assetnum and activity_id already exists
# existing_equipment_query = (
# Select(OverhaulActivity)
# .where(
# OverhaulActivity.assetnum == overhaul_activty_in.assetnum,
# OverhaulActivity.overhaul_scope_id == overhaul_session_id
# )
# )
# result = await db_session.execute(existing_equipment_query)
# existing_activity = result.scalar_one_or_none()
# # If the combination exists, raise an exception or return the existing activity
# if existing_activity:
# raise ValueError("This assetnum already exist.")
# activity = OverhaulActivity(
# **overhaul_activty_in.model_dump(),
# overhaul_scope_id=overhaul_session_id)
# db_session.add(activity)
# await db_session.commit()
# # Refresh and load relationships using joinedload
# query = (
# Select(OverhaulActivity)
# .options(joinedload(OverhaulActivity.equipment))
# .where(OverhaulActivity.id == activity.id)
# )
# result = await db_session.execute(query)
# activity_with_relationship = result.scalar_one()
# return activity_with_relationship
async def create(
*,
db_session: DbSession,
overhaul_activty_in: OverhaulActivityCreate,
overhaul_session_id: UUID
):
"""Creates a new document."""
assetnums = overhaul_activty_in.assetnums
if not assetnums:
return []
# Get session and count in parallel
session = await get_session(
db_session=db_session, overhaul_session_id=overhaul_session_id
)
equipment_count = await db_session.scalar(
select(func.count())
.select_from(OverhaulActivity)
.where(OverhaulActivity.overhaul_scope_id == overhaul_session_id)
)
# Calculate costs for all records
total_equipment = equipment_count + len(assetnums)
material_cost = get_material_cost(
scope=session.type, total_equipment=total_equipment
)
service_cost = get_service_cost(scope=session.type, total_equipment=total_equipment)
# Create the insert statement
stmt = insert(OverhaulActivity).values(
[
{
"assetnum": assetnum,
"overhaul_scope_id": overhaul_session_id,
"material_cost": material_cost,
"service_cost": service_cost,
}
for assetnum in assetnums
]
)
# Add the ON CONFLICT DO NOTHING clause
stmt = stmt.on_conflict_do_nothing(index_elements=["assetnum", "overhaul_scope_id"])
# Execute the statement
await db_session.execute(stmt)
await db_session.execute(
sqlUpdate(OverhaulActivity)
.where(OverhaulActivity.overhaul_scope_id == overhaul_session_id)
.values(material_cost=material_cost, service_cost=service_cost)
)
await db_session.commit()
return assetnums
async def update(
*,
db_session: DbSession,
activity: OverhaulActivity,
overhaul_activity_in: OverhaulActivityUpdate
):
"""Updates a document."""
data = overhaul_activity_in.model_dump()
update_data = overhaul_activity_in.model_dump(exclude_defaults=True)
for field in data:
if field in update_data:
setattr(activity, field, update_data[field])
await db_session.commit()
return activity
async def delete(*, db_session: DbSession, overhaul_activity_id: str):
"""Deletes a document."""
activity = await db_session.get(OverhaulActivity, overhaul_activity_id)
await db_session.delete(activity)
await db_session.commit()

@ -1,35 +0,0 @@
from decimal import Decimal, getcontext
def get_material_cost(scope, total_equipment):
# Set precision to 28 digits (maximum precision for Decimal)
getcontext().prec = 28
if not total_equipment: # Guard against division by zero
return float(0)
if scope == "B":
result = Decimal("365539731101") / Decimal(str(total_equipment))
return float(result)
else:
result = Decimal("8565468127") / Decimal(str(total_equipment))
return float(result)
return float(0)
def get_service_cost(scope, total_equipment):
# Set precision to 28 digits (maximum precision for Decimal)
getcontext().prec = 28
if not total_equipment: # Guard against division by zero
return float(0)
if scope == "B":
result = Decimal("36405830225") / Decimal(str(total_equipment))
return float(result)
else:
result = Decimal("36000000000") / Decimal(str(total_equipment))
return float(result)
return float(0)

@ -1,29 +0,0 @@
from sqlalchemy import (UUID, Column, DateTime, Float, ForeignKey, Integer,
String)
from sqlalchemy.orm import relationship
from src.database.core import Base
from src.models import DefaultMixin, IdentityMixin, TimeStampMixin
class OverhaulJob(Base, DefaultMixin):
__tablename__ = "oh_tr_overhaul_job"
overhaul_activity_id = Column(
UUID(as_uuid=True), ForeignKey("oh_tr_overhaul_activity.id"), nullable=False
)
scope_equipment_job_id = Column(
UUID(as_uuid=True),
ForeignKey("oh_ms_scope_equipment_job.id", ondelete="cascade"),
nullable=False,
)
notes = Column(String, nullable=True)
status = Column(String, nullable=True, default="pending")
scope_equipment_job = relationship(
"ScopeEquipmentJob", lazy="raise", back_populates="overhaul_jobs"
)
overhaul_activity = relationship("OverhaulActivity", lazy="raise")

@ -1,91 +0,0 @@
from typing import List, Optional
from fastapi import APIRouter, HTTPException, status
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import CommonParameters
from src.models import StandardResponse
from .schema import (OverhaulJobBase, OverhaulJobCreate, OverhaulJobPagination,
OverhaulJobRead)
from .service import create, delete, get_all
router = APIRouter()
@router.get(
"/{overhaul_equipment_id}", response_model=StandardResponse[OverhaulJobPagination]
)
async def get_jobs(common: CommonParameters, overhaul_equipment_id: str):
"""Get all scope pagination."""
# return
results = await get_all(common=common, overhaul_equipment_id=overhaul_equipment_id)
return StandardResponse(
data=results,
message="Data retrieved successfully",
)
@router.post("/{overhaul_equipment_id}", response_model=StandardResponse[None])
async def create_overhaul_equipment_jobs(
db_session: DbSession, overhaul_equipment_id, overhaul_job_in: OverhaulJobCreate
):
"""Get all scope activity pagination."""
# return
await create(
db_session=db_session,
overhaul_equipment_id=overhaul_equipment_id,
overhaul_job_in=overhaul_job_in,
)
return StandardResponse(
data=None,
message="Data created successfully",
)
@router.delete("/{overhaul_job_id}", response_model=StandardResponse[None])
async def delete_overhaul_equipment_job(db_session: DbSession, overhaul_job_id):
await delete(db_session=db_session, overhaul_job_id=overhaul_job_id)
return StandardResponse(
data=None,
message="Data deleted successfully",
)
# @router.post("", response_model=StandardResponse[List[str]])
# async def create_scope(db_session: DbSession, scope_in: OverhaulJobCreate):
# overhaul_job = await create(db_session=db_session, scope_in=scope_in)
# return StandardResponse(data=overhaul_job, message="Data created successfully")
# @router.put("/{scope_id}", response_model=StandardResponse[ScopeRead])
# async def update_scope(db_session: DbSession, scope_id: str, scope_in: ScopeUpdate, current_user: CurrentUser):
# scope = await get(db_session=db_session, scope_id=scope_id)
# if not scope:
# raise HTTPException(
# status_code=status.HTTP_404_NOT_FOUND,
# detail="A data with this id does not exist.",
# )
# return StandardResponse(data=await update(db_session=db_session, scope=scope, scope_in=scope_in), message="Data updated successfully")
# @router.delete("/{scope_id}", response_model=StandardResponse[ScopeRead])
# async def delete_scope(db_session: DbSession, scope_id: str):
# scope = await get(db_session=db_session, scope_id=scope_id)
# if not scope:
# raise HTTPException(
# status_code=status.HTTP_404_NOT_FOUND,
# detail=[{"msg": "A data with this id does not exist."}],
# )
# await delete(db_session=db_session, scope_id=scope_id)
# return StandardResponse(message="Data deleted successfully", data=scope)

@ -1,37 +0,0 @@
from datetime import datetime
from typing import List, Optional
from uuid import UUID
from pydantic import Field
from src.models import DefultBase, Pagination
from src.overhaul_scope.schema import ScopeRead
from src.scope_equipment_job.schema import ScopeEquipmentJobRead
class OverhaulJobBase(DefultBase):
pass
class OverhaulJobCreate(OverhaulJobBase):
job_ids: Optional[List[UUID]] = []
class OverhaulJobUpdate(OverhaulJobBase):
pass
class OverhaulActivity(DefultBase):
id: UUID
overhaul_scope_id: UUID
overhaul_scope: ScopeRead
class OverhaulJobRead(OverhaulJobBase):
id: UUID
scope_equipment_job: ScopeEquipmentJobRead
overhaul_activity: OverhaulActivity
class OverhaulJobPagination(Pagination):
items: List[OverhaulJobRead] = []

@ -1,115 +0,0 @@
from typing import Optional
from fastapi import HTTPException, status
from sqlalchemy import Delete, Select, func
from sqlalchemy.orm import selectinload
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import search_filter_sort_paginate
from .model import OverhaulJob
from .schema import OverhaulJobCreate
async def get_all(*, common, overhaul_equipment_id: str):
"""Returns all documents."""
query = (
Select(OverhaulJob)
.where(OverhaulJob.overhaul_activity_id == overhaul_equipment_id)
.options(
selectinload(OverhaulJob.scope_equipment_job),
selectinload(OverhaulJob.overhaul_activity),
)
)
results = await search_filter_sort_paginate(model=query, **common)
return results
async def create(
*, db_session: DbSession, overhaul_equipment_id, overhaul_job_in: OverhaulJobCreate
):
overhaul_jobs = []
if not overhaul_equipment_id:
raise ValueError("assetnum parameter is required")
equipment_stmt = Select(OverhaulJob).where(
OverhaulJob.overhaul_activity_id == overhaul_equipment_id
)
equipment = await db_session.scalar(equipment_stmt)
for job_id in overhaul_job_in.job_ids:
overhaul_equipment_job = OverhaulJob(
overhaul_activity_id=overhaul_equipment_id, scope_equipment_job_id=job_id
)
overhaul_jobs.append(overhaul_equipment_job)
db_session.add_all(overhaul_jobs)
await db_session.commit()
return overhaul_job_in.job_ids
async def delete(
*,
db_session: DbSession,
overhaul_job_id: str,
) -> bool:
"""
Deletes a scope job and returns success status.
Args:
db_session: Database session
scope_job_id: ID of the scope job to delete
user_id: ID of user performing the deletion
Returns:
bool: True if deletion was successful, False otherwise
Raises:
NotFoundException: If scope job doesn't exist
AuthorizationError: If user lacks delete permission
"""
try:
# Check if job exists
scope_job = await db_session.get(OverhaulJob, overhaul_job_id)
if not scope_job:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="A data with this id does not exist.",
)
# Perform deletion
await db_session.delete(scope_job)
await db_session.commit()
return True
except Exception as e:
await db_session.rollback()
raise
# async def update(*, db_session: DbSession, scope: OverhaulScope, scope_in: ScopeUpdate):
# """Updates a document."""
# data = scope_in.model_dump()
# update_data = scope_in.model_dump(exclude_defaults=True)
# for field in data:
# if field in update_data:
# setattr(scope, field, update_data[field])
# await db_session.commit()
# return scope
# async def delete(*, db_session: DbSession, scope_id: str):
# """Deletes a document."""
# query = Delete(OverhaulScope).where(OverhaulScope.id == scope_id)
# await db_session.execute(query)
# await db_session.commit()

@ -1,18 +0,0 @@
from sqlalchemy import Column, DateTime, Float, Integer, String
from sqlalchemy.orm import relationship
from src.database.core import Base
from src.models import DefaultMixin, IdentityMixin, TimeStampMixin
class OverhaulScope(Base, DefaultMixin):
__tablename__ = "oh_ms_overhaul_scope"
type = Column(String, nullable=True)
start_date = Column(DateTime(timezone=True))
end_date = Column(DateTime(timezone=True))
duration_oh = Column(Integer, nullable=True)
crew_number = Column(Integer, nullable=True, default=1)
status = Column(String, nullable=False, default="upcoming")
activity_equipments = relationship("OverhaulActivity", lazy="selectin")

@ -1,81 +0,0 @@
from typing import Optional
from fastapi import APIRouter, HTTPException, status
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import CommonParameters, search_filter_sort_paginate
from src.models import StandardResponse
from .model import OverhaulScope
from .schema import ScopeCreate, ScopePagination, ScopeRead, ScopeUpdate
from .service import create, delete, get, get_all, update
router = APIRouter()
@router.get("", response_model=StandardResponse[ScopePagination])
async def get_scopes(common: CommonParameters, scope_name: Optional[str] = None):
"""Get all scope pagination."""
# return
results = await get_all(common=common, scope_name=scope_name)
return StandardResponse(
data=results,
message="Data retrieved successfully",
)
@router.get("/{overhaul_session_id}", response_model=StandardResponse[ScopeRead])
async def get_scope(db_session: DbSession, overhaul_session_id: str):
scope = await get(db_session=db_session, overhaul_session_id=overhaul_session_id)
if not scope:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="A data with this id does not exist.",
)
return StandardResponse(data=scope, message="Data retrieved successfully")
@router.post("", response_model=StandardResponse[ScopeRead])
async def create_scope(db_session: DbSession, scope_in: ScopeCreate):
scope = await create(db_session=db_session, scope_in=scope_in)
return StandardResponse(data=scope, message="Data created successfully")
@router.put("/{scope_id}", response_model=StandardResponse[ScopeRead])
async def update_scope(
db_session: DbSession,
scope_id: str,
scope_in: ScopeUpdate,
current_user: CurrentUser,
):
scope = await get(db_session=db_session, scope_id=scope_id)
if not scope:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="A data with this id does not exist.",
)
return StandardResponse(
data=await update(db_session=db_session, scope=scope, scope_in=scope_in),
message="Data updated successfully",
)
@router.delete("/{scope_id}", response_model=StandardResponse[ScopeRead])
async def delete_scope(db_session: DbSession, scope_id: str):
scope = await get(db_session=db_session, scope_id=scope_id)
if not scope:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=[{"msg": "A data with this id does not exist."}],
)
await delete(db_session=db_session, scope_id=scope_id)
return StandardResponse(message="Data deleted successfully", data=scope)

@ -1,33 +0,0 @@
from datetime import datetime
from typing import List, Optional
from uuid import UUID
from pydantic import Field
from src.models import DefultBase, Pagination
class ScopeBase(DefultBase):
duration_oh: Optional[int] = Field(None, title="Duration OH")
crew_number: Optional[int] = Field(1, title="Crew")
status: Optional[str] = Field("Upcoming")
type: str
class ScopeCreate(ScopeBase):
start_date: datetime
end_date: Optional[datetime] = Field(None)
class ScopeUpdate(ScopeBase):
pass
class ScopeRead(ScopeBase):
id: UUID
start_date: datetime
end_date: Optional[datetime]
class ScopePagination(Pagination):
items: List[ScopeRead] = []

@ -1,162 +0,0 @@
from typing import Optional
from sqlalchemy import Delete, Select, func
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import search_filter_sort_paginate
from src.overhaul_activity.model import OverhaulActivity
from src.scope_equipment.service import get_by_scope_name
from src.utils import time_now
from .model import OverhaulScope
from .schema import ScopeCreate, ScopeUpdate
from .utils import get_material_cost, get_service_cost
async def get(
*, db_session: DbSession, overhaul_session_id: str
) -> Optional[OverhaulScope]:
"""Returns a document based on the given document id."""
query = Select(OverhaulScope).filter(OverhaulScope.id == overhaul_session_id)
result = await db_session.execute(query)
return result.scalars().one_or_none()
async def get_all(*, common, scope_name: Optional[str] = None):
"""Returns all documents."""
query = Select(OverhaulScope)
if scope_name:
query = query.filter(OverhaulScope.type == scope_name)
results = await search_filter_sort_paginate(model=query, **common)
return results
async def create(*, db_session: DbSession, scope_in: ScopeCreate):
"""Creates a new document."""
overhaul_session = OverhaulScope(**scope_in.model_dump())
db_session.add(overhaul_session)
# Need to flush to get the id
await db_session.flush()
scope_name = scope_in.type
# Fix the function call - parameters were in wrong order
equipments = await get_by_scope_name(db_session=db_session, scope_name=scope_name)
material_cost = get_material_cost(
scope=overhaul_session.type, total_equipment=len(equipments)
)
service_cost = get_service_cost(
scope=overhaul_session.type, total_equipment=len(equipments)
)
scope_equipments = [
OverhaulActivity(
assetnum=equipment.assetnum,
overhaul_scope_id=overhaul_session.id,
material_cost=material_cost,
service_cost=service_cost,
)
for equipment in equipments
]
if scope_equipments: # Only add if there are items
db_session.add_all(scope_equipments)
await db_session.commit()
return overhaul_session
async def update(*, db_session: DbSession, scope: OverhaulScope, scope_in: ScopeUpdate):
"""Updates a document."""
data = scope_in.model_dump()
update_data = scope_in.model_dump(exclude_defaults=True)
for field in data:
if field in update_data:
setattr(scope, field, update_data[field])
await db_session.commit()
return scope
async def delete(*, db_session: DbSession, scope_id: str):
"""Deletes a document."""
query = Delete(OverhaulScope).where(OverhaulScope.id == scope_id)
await db_session.execute(query)
await db_session.commit()
async def get_overview_overhaul(*, db_session: DbSession):
current_date = time_now().date()
# For ongoing overhaul with count
ongoing_query = (
Select(OverhaulScope, func.count(OverhaulActivity.id).label("equipment_count"))
.outerjoin(OverhaulScope.activity_equipments)
.where(
OverhaulScope.start_date <= current_date,
OverhaulScope.end_date >= current_date,
)
.group_by(OverhaulScope.id)
)
ongoing_result = await db_session.execute(ongoing_query)
# Use first() instead of scalar_one_or_none()
ongoing_result = ongoing_result.first()
if ongoing_result:
ongoing_overhaul, equipment_count = ongoing_result # Unpack the result tuple
return {
"status": "Ongoing",
"overhaul": {
"id": ongoing_overhaul.id,
"type": ongoing_overhaul.type,
"start_date": ongoing_overhaul.start_date,
"end_date": ongoing_overhaul.end_date,
"duration_oh": ongoing_overhaul.duration_oh,
"crew_number": ongoing_overhaul.crew_number,
"remaining_days": (ongoing_overhaul.end_date - current_date).days,
"equipment_count": equipment_count,
},
}
# For upcoming overhaul with count
upcoming_query = (
Select(OverhaulScope, func.count(OverhaulActivity.id).label("equipment_count"))
.outerjoin(OverhaulScope.activity_equipments)
.where(
OverhaulScope.start_date > current_date,
)
.group_by(OverhaulScope.id)
.order_by(OverhaulScope.start_date)
)
upcoming_result = await db_session.execute(upcoming_query)
upcoming_result = upcoming_result.first()
if upcoming_result:
upcoming_overhaul, equipment_count = upcoming_result # Unpack the result tuple
days_until = (upcoming_overhaul.start_date - current_date).days
return {
"status": "Upcoming",
"overhaul": {
"id": upcoming_overhaul.id,
"type": upcoming_overhaul.type,
"start_date": upcoming_overhaul.start_date,
"end_date": upcoming_overhaul.end_date,
"duration_oh": upcoming_overhaul.duration_oh,
"crew_number": upcoming_overhaul.crew_number,
"remaining_days": days_until,
"equipment_count": equipment_count,
},
}
return {"status": "no_overhaul", "overhaul": None}

@ -1,35 +0,0 @@
from decimal import Decimal, getcontext
def get_material_cost(scope, total_equipment):
# Set precision to 28 digits (maximum precision for Decimal)
getcontext().prec = 28
if not total_equipment: # Guard against division by zero
return float(0)
if scope == "B":
result = Decimal("365539731101") / Decimal(str(total_equipment))
return float(result)
else:
result = Decimal("8565468127") / Decimal(str(total_equipment))
return float(result)
return float(0)
def get_service_cost(scope, total_equipment):
# Set precision to 28 digits (maximum precision for Decimal)
getcontext().prec = 28
if not total_equipment: # Guard against division by zero
return float(0)
if scope == "B":
result = Decimal("36405830225") / Decimal(str(total_equipment))
return float(result)
else:
result = Decimal("36000000000") / Decimal(str(total_equipment))
return float(result)
return float(0)

@ -1,6 +0,0 @@
from src.enums import OptimumOHEnum
class ScopeEquipmentType(OptimumOHEnum):
TEMP = "Temporary"
PERM = "Permanent"

@ -1,45 +0,0 @@
from sqlalchemy import UUID, Column, Date, Float, ForeignKey, Integer, String
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import relationship
from src.database.core import Base
from src.models import DefaultMixin, IdentityMixin, TimeStampMixin
from src.workorder.model import MasterWorkOrder
class ScopeEquipment(Base, DefaultMixin):
__tablename__ = "oh_ms_scope_equipment"
assetnum = Column(String, nullable=True)
scope_overhaul = Column(String, nullable=False)
type = Column(String, nullable=False, default="Permanent")
removal_date = Column(Date, nullable=True)
assigned_date = Column(Date, nullable=True)
master_equipment = relationship(
"MasterEquipment",
lazy="raise",
primaryjoin="and_(ScopeEquipment.assetnum == foreign(MasterEquipment.assetnum))",
uselist=False, # Add this if it's a one-to-one relationship
)
class MasterEquipment(Base, DefaultMixin):
__tablename__ = "ms_equipment_master"
parent_id = Column(UUID(as_uuid=True), nullable=True)
assetnum = Column(String, nullable=True)
system_tag = Column(String, nullable=True)
location_tag = Column(String, nullable=True)
name = Column(String, nullable=True)
equipment_tree_id = Column(
UUID(as_uuid=True), ForeignKey("ms_equipment_tree.id"), nullable=True
)
equipment_tree = relationship("MasterEquipmentTree", backref="master_equipments")
class MasterEquipmentTree(Base, DefaultMixin):
__tablename__ = "ms_equipment_tree"
level_no = Column(Integer)

@ -1,86 +0,0 @@
from typing import List, Optional
from fastapi import APIRouter, HTTPException, status
from fastapi.params import Query
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import CommonParameters, search_filter_sort_paginate
from src.models import StandardResponse
from .model import ScopeEquipment
from .schema import (MasterEquipmentPagination, ScopeEquipmentCreate,
ScopeEquipmentPagination, ScopeEquipmentRead,
ScopeEquipmentUpdate)
from .service import (create, delete, get_all, get_all_master_equipment,
get_by_assetnum, update)
router = APIRouter()
@router.get("", response_model=StandardResponse[ScopeEquipmentPagination])
async def get_scope_equipments(common: CommonParameters, scope_name: str = Query(None)):
"""Get all scope pagination."""
# return
data = await get_all(common=common, scope_name=scope_name)
return StandardResponse(
data=data,
message="Data retrieved successfully",
)
@router.get(
"/available/{scope_name}",
response_model=StandardResponse[MasterEquipmentPagination],
)
async def get_master_equipment(common: CommonParameters, scope_name: str):
results = await get_all_master_equipment(common=common, scope_name=scope_name)
return StandardResponse(data=results, message="Data retrieved successfully")
@router.post("", response_model=StandardResponse[List[str]])
async def create_scope_equipment(
db_session: DbSession, scope_equipment_in: ScopeEquipmentCreate
):
scope = await create(db_session=db_session, scope_equipment_in=scope_equipment_in)
return StandardResponse(data=scope, message="Data created successfully")
@router.put("/{assetnum}", response_model=StandardResponse[ScopeEquipmentRead])
async def update_scope_equipment(
db_session: DbSession, assetnum: str, scope__equipment_in: ScopeEquipmentUpdate
):
scope_equipment = await get_by_assetnum(db_session=db_session, assetnum=assetnum)
if not scope_equipment:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="A data with this id does not exist.",
)
return StandardResponse(
data=await update(
db_session=db_session,
scope_equipment=scope_equipment,
scope__equipment_in=scope__equipment_in,
),
message="Data updated successfully",
)
@router.delete("/{assetnum}", response_model=StandardResponse[None])
async def delete_scope_equipment(db_session: DbSession, assetnum: str):
scope_equipment = await get_by_assetnum(db_session=db_session, assetnum=assetnum)
if not scope_equipment:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=[{"msg": "A data with this id does not exist."}],
)
await delete(db_session=db_session, assetnum=assetnum)
return StandardResponse(message="Data deleted successfully", data=None)

@ -1,51 +0,0 @@
from datetime import datetime
from typing import List, Optional
from uuid import UUID
from pydantic import Field, computed_field, field_validator, validator
from src.models import DefultBase, Pagination
from src.overhaul_scope.schema import ScopeRead
from .enum import ScopeEquipmentType
class MasterEquipmentBase(DefultBase):
name: Optional[str] = Field(None, title="Name")
location_tag: Optional[str] = Field(None, title="Location Tag")
class ScopeEquipmentBase(DefultBase):
scope_overhaul: Optional[str] = Field(None, title="Scope ID")
class ScopeEquipmentCreate(DefultBase):
assetnums: List[str]
scope_name: str
removal_date: Optional[datetime] = Field(None)
type: Optional[str] = Field(ScopeEquipmentType.PERM)
class ScopeEquipmentUpdate(ScopeEquipmentBase):
assetnum: Optional[str] = Field(None, title="Asset Number")
class ScopeEquipmentRead(ScopeEquipmentBase):
id: UUID
assetnum: str
assigned_date: datetime
master_equipment: Optional[MasterEquipmentBase] = Field(None)
class ScopeEquipmentPagination(Pagination):
items: List[ScopeEquipmentRead] = []
class MasterEquipmentRead(DefultBase):
assetnum: Optional[str] = Field(None, title="Asset Number")
location_tag: str
name: str
class MasterEquipmentPagination(Pagination):
items: List[MasterEquipmentRead] = []

@ -1,202 +0,0 @@
from datetime import datetime, timedelta
from typing import Optional, Union
from fastapi import HTTPException, status
from sqlalchemy import Delete, Select, and_, desc, func, not_, or_
from sqlalchemy.dialects.postgresql import insert
from sqlalchemy.orm import selectinload
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import CommonParameters, search_filter_sort_paginate
from src.overhaul_scope.model import OverhaulScope
from src.scope_equipment.enum import ScopeEquipmentType
from src.workorder.model import MasterWorkOrder
from .model import MasterEquipment, MasterEquipmentTree, ScopeEquipment
from .schema import ScopeEquipmentCreate, ScopeEquipmentUpdate
async def get_by_assetnum(*, db_session: DbSession, assetnum: str):
query = (
Select(ScopeEquipment)
.filter(ScopeEquipment.assetnum == assetnum)
.options(selectinload(ScopeEquipment.master_equipment))
)
result = await db_session.execute(query)
return result.unique().scalars().one_or_none()
async def get_all(*, common, scope_name: str = None):
"""Returns all documents."""
query = Select(ScopeEquipment).options(
selectinload(ScopeEquipment.master_equipment)
)
query = query.order_by(desc(ScopeEquipment.created_at))
if scope_name:
query = query.where(ScopeEquipment.scope_overhaul == scope_name)
results = await search_filter_sort_paginate(model=query, **common)
return results
async def create(*, db_session: DbSession, scope_equipment_in: ScopeEquipmentCreate):
"""Creates a new document."""
# scope_equipment = ScopeEquipment(**scope_equipment_in.model_dump())
assetnums = scope_equipment_in.assetnums
results = []
removal_date = scope_equipment_in.removal_date
if scope_equipment_in.type == ScopeEquipmentType.TEMP:
# Search for the next or ongoing overhaul session for the given scope
stmt = (
Select(OverhaulScope.end_date)
.where(
OverhaulScope.type == scope_equipment_in.scope_name,
(OverhaulScope.start_date <= datetime.now())
& (OverhaulScope.end_date >= datetime.now()) # Ongoing
| (OverhaulScope.start_date > datetime.now()), # Upcoming
)
.order_by(OverhaulScope.start_date.asc())
.limit(1)
)
result = await db_session.execute(stmt)
removal_date = result.scalar_one_or_none()
# If no overhaul found, set a default removal date or handle the error
if removal_date is None:
# Handle if no overhaul session is found, set default or raise an error
removal_date = datetime.now() + timedelta(
days=30
) # Example: 30 days from now
for assetnum in assetnums:
stmt = insert(ScopeEquipment).values(
assetnum=assetnum,
scope_overhaul=scope_equipment_in.scope_name,
type=scope_equipment_in.type,
removal_date=removal_date,
)
stmt = stmt.on_conflict_do_nothing(
index_elements=["assetnum", "scope_overhaul"]
)
await db_session.execute(stmt)
results.append(assetnum)
await db_session.commit()
return results
async def update(
*,
db_session: DbSession,
scope_equipment: ScopeEquipment,
scope_equipment_in: ScopeEquipmentUpdate
):
"""Updates a document."""
data = scope_equipment_in.model_dump()
update_data = scope_equipment_in.model_dump(exclude_defaults=True)
for field in data:
if field in update_data:
setattr(scope_equipment, field, update_data[field])
await db_session.commit()
return scope_equipment
async def delete(*, db_session: DbSession, assetnum: str):
"""Deletes a document."""
query = Delete(ScopeEquipment).where(ScopeEquipment.assetnum == assetnum)
await db_session.execute(query)
await db_session.commit()
return assetnum
# query = Select(ScopeEquipment).filter(
# ScopeEquipment.id == scope_equipment_id)
# scope_equipment = await db_session.execute(query)
# scope_equipment: ScopeEquipment = scope_equipment.scalars().one_or_none()
# if not scope_equipment:
# raise HTTPException(
# status_code=status.HTTP_404_NOT_FOUND,
# detail="A data with this id does not exist.",
# )
# if not scope_equipment.scope_id:
# await db_session.delete(scope_equipment)
# else:
# if scope_equipment.current_scope_id == scope_equipment.scope_id:
# await db_session.delete(scope_equipment)
# else:
# scope_equipment.current_scope_id = scope_equipment.scope_id
# await db_session.commit()
async def get_by_scope_name(
*, db_session: DbSession, scope_name: Optional[str]
) -> Optional[ScopeEquipment]:
"""Returns a document based on the given document id."""
query = Select(ScopeEquipment).options(
selectinload(ScopeEquipment.master_equipment)
)
if scope_name:
query = query.filter(ScopeEquipment.scope_overhaul == scope_name)
result = await db_session.execute(query)
return result.scalars().all()
# async def get_exculed_scope_name(*, db_session: DbSession, scope_name: Union[str, list]) -> Optional[ScopeEquipment]:
# scope = await get_scope_by_name_service(db_session=db_session, scope_name=scope_name)
# query = Select(ScopeEquipment)
# if scope:
# query = query.filter(ScopeEquipment.current_scope_id != scope.id)
# else:
# query = query.filter(ScopeEquipment.current_scope_id != None)
# result = await db_session.execute(query)
# return result.scalars().all()
async def get_all_master_equipment(*, common: CommonParameters, scope_name):
equipments_scope = [
equip.assetnum
for equip in await get_by_scope_name(
db_session=common.get("db_session"), scope_name=scope_name
)
]
query = Select(MasterEquipment).filter(MasterEquipment.assetnum.is_not(None))
# Only add not_in filter if there are items in equipments_scope
if equipments_scope:
query = query.filter(MasterEquipment.assetnum.not_in(equipments_scope))
results = await search_filter_sort_paginate(model=query, **common)
return results
async def get_equipment_level_by_no(*, db_session: DbSession, level: int):
query = (
Select(MasterEquipment)
.join(MasterEquipment.equipment_tree)
.where(MasterEquipmentTree.level_no == level)
)
result = await db_session.execute(query)
return result.scalars().all()

@ -1,20 +0,0 @@
from sqlalchemy import UUID, Column, Float, ForeignKey, Integer, String
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import relationship
from src.database.core import Base
from src.models import DefaultMixin, IdentityMixin, TimeStampMixin
from src.workorder.model import MasterWorkOrder
class ScopeEquipmentJob(Base, DefaultMixin):
__tablename__ = "oh_ms_scope_equipment_job"
assetnum = Column(String, nullable=False)
job_id = Column(UUID(as_uuid=True), ForeignKey("oh_ms_job.id", ondelete="cascade"))
job = relationship("MasterActivity", lazy="selectin")
overhaul_jobs = relationship(
"OverhaulJob", back_populates="scope_equipment_job", lazy="selectin"
)

@ -1,51 +0,0 @@
from typing import Dict, List
from fastapi import APIRouter, HTTPException, Query, status
from src.database.service import (CommonParameters, DbSession,
search_filter_sort_paginate)
from src.models import StandardResponse
from .schema import ScopeEquipmentJobCreate, ScopeEquipmentJobPagination
from .service import create, delete, get_all
router = APIRouter()
@router.get("/{assetnum}", response_model=StandardResponse[ScopeEquipmentJobPagination])
async def get_scope_equipment_jobs(
db_session: DbSession, assetnum, common: CommonParameters
):
"""Get all scope activity pagination."""
# return
data = await get_all(db_session=db_session, assetnum=assetnum, common=common)
return StandardResponse(
data=data,
message="Data retrieved successfully",
)
@router.post("/{assetnum}", response_model=StandardResponse[None])
async def create_scope_equipment_jobs(
db_session: DbSession, assetnum, scope_job_in: ScopeEquipmentJobCreate
):
"""Get all scope activity pagination."""
# return
await create(db_session=db_session, assetnum=assetnum, scope_job_in=scope_job_in)
return StandardResponse(
data=None,
message="Data created successfully",
)
@router.delete("/{scope_job_id}", response_model=StandardResponse[None])
async def delete_scope_equipment_job(db_session: DbSession, scope_job_id):
await delete(db_session=db_session, scope_job_id=scope_job_id)
return StandardResponse(
data=None,
message="Data deleted successfully",
)

@ -1,81 +0,0 @@
from datetime import datetime
from typing import Any, Dict, List, Optional
from uuid import UUID
from pydantic import BaseModel, Field
from src.job.schema import ActivityMasterRead
from src.models import DefultBase, Pagination
from src.overhaul_scope.schema import ScopeRead
class ScopeEquipmentJobBase(DefultBase):
assetnum: Optional[str] = Field(None, description="Assetnum is required")
class ScopeEquipmentJobCreate(ScopeEquipmentJobBase):
job_ids: Optional[List[UUID]] = []
class ScopeEquipmentJobUpdate(ScopeEquipmentJobBase):
name: Optional[str] = Field(None)
cost: Optional[str] = Field(0)
class OverhaulActivity(DefultBase):
id: UUID
overhaul_scope: ScopeRead
class OverhaulJob(DefultBase):
id: UUID
overhaul_activity: OverhaulActivity
class ScopeEquipmentJobRead(ScopeEquipmentJobBase):
id: UUID
job: ActivityMasterRead
overhaul_jobs: List[OverhaulJob] = []
class ScopeEquipmentJobPagination(Pagination):
items: List[ScopeEquipmentJobRead] = []
# {
# "overview": {
# "totalEquipment": 30,
# "nextSchedule": {
# "date": "2025-01-12",
# "Overhaul": "B",
# "equipmentCount": 30
# }
# },
# "criticalParts": [
# "Boiler feed pump",
# "Boiler reheater system",
# "Drum Level (Right) Root Valve A",
# "BCP A Discharge Valve",
# "BFPT A EXH Press HI Root VLV"
# ],
# "schedules": [
# {
# "date": "2025-01-12",
# "Overhaul": "B",
# "status": "upcoming"
# }
# // ... other scheduled overhauls
# ],
# "systemComponents": {
# "boiler": {
# "status": "operational",
# "lastOverhaul": "2024-06-15"
# },
# "turbine": {
# "hpt": { "status": "operational" },
# "ipt": { "status": "operational" },
# "lpt": { "status": "operational" }
# }
# // ... other major components
# }
# }

@ -1,130 +0,0 @@
import random
from typing import Optional
from fastapi import HTTPException, status
from sqlalchemy import Delete, Select, and_
from sqlalchemy.orm import selectinload
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import CommonParameters, search_filter_sort_paginate
from src.overhaul_activity.model import OverhaulActivity
from src.overhaul_job.model import OverhaulJob
from src.scope_equipment.model import MasterEquipment, MasterEquipmentTree
from src.scope_equipment.service import get_equipment_level_by_no
from .model import ScopeEquipmentJob
from .schema import ScopeEquipmentJobCreate
# async def get(*, db_session: DbSession, scope_equipment_activity_id: str) -> Optional[ScopeEquipmentActivity]:
# """Returns a document based on the given document id."""
# result = await db_session.get(ScopeEquipmentActivity, scope_equipment_activity_id)
# return result
async def get_all(db_session: DbSession, assetnum: Optional[str], common):
# Example usage
if not assetnum:
raise ValueError("assetnum parameter is required")
# First get the parent equipment
equipment_stmt = Select(MasterEquipment).where(MasterEquipment.assetnum == assetnum)
equipment: MasterEquipment = await db_session.scalar(equipment_stmt)
if not equipment:
raise ValueError(f"No equipment found with assetnum: {assetnum}")
# Build query for parts
stmt = (
Select(ScopeEquipmentJob)
.where(ScopeEquipmentJob.assetnum == assetnum)
.options(
selectinload(ScopeEquipmentJob.job),
selectinload(ScopeEquipmentJob.overhaul_jobs)
.selectinload(OverhaulJob.overhaul_activity)
.selectinload(OverhaulActivity.overhaul_scope),
)
)
results = await search_filter_sort_paginate(model=stmt, **common)
return results
async def create(
*, db_session: DbSession, assetnum, scope_job_in: ScopeEquipmentJobCreate
):
scope_jobs = []
if not assetnum:
raise ValueError("assetnum parameter is required")
# First get the parent equipment
equipment_stmt = Select(MasterEquipment).where(MasterEquipment.assetnum == assetnum)
equipment: MasterEquipment = await db_session.scalar(equipment_stmt)
if not equipment:
raise ValueError(f"No equipment found with assetnum: {assetnum}")
for job_id in scope_job_in.job_ids:
scope_equipment_job = ScopeEquipmentJob(assetnum=assetnum, job_id=job_id)
scope_jobs.append(scope_equipment_job)
db_session.add_all(scope_jobs)
await db_session.commit()
return
# async def update(*, db_session: DbSession, activity: ScopeEquipmentActivity, scope_equipment_activty_in: ScopeEquipmentActivityUpdate):
# """Updates a document."""
# data = scope_equipment_activty_in.model_dump()
# update_data = scope_equipment_activty_in.model_dump(exclude_defaults=True)
# for field in data:
# if field in update_data:
# setattr(activity, field, update_data[field])
# await db_session.commit()
# return activity
async def delete(
*,
db_session: DbSession,
scope_job_id: int,
) -> bool:
"""
Deletes a scope job and returns success status.
Args:
db_session: Database session
scope_job_id: ID of the scope job to delete
user_id: ID of user performing the deletion
Returns:
bool: True if deletion was successful, False otherwise
Raises:
NotFoundException: If scope job doesn't exist
AuthorizationError: If user lacks delete permission
"""
try:
# Check if job exists
scope_job = await db_session.get(ScopeEquipmentJob, scope_job_id)
if not scope_job:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="A data with this id does not exist.",
)
# Perform deletion
await db_session.delete(scope_job)
await db_session.commit()
return True
except Exception as e:
await db_session.rollback()
raise

@ -1,21 +0,0 @@
from sqlalchemy import UUID, Column, Float, ForeignKey, Integer, String
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import relationship
from src.database.core import Base
from src.models import DefaultMixin, IdentityMixin, TimeStampMixin
from src.workorder.model import MasterWorkOrder
class ScopeEquipmentPart(Base, DefaultMixin):
__tablename__ = "oh_tr_scope_equipment_part"
assetnum = Column(String, nullable=False)
stock = Column(Integer, nullable=False, default=0)
master_equipments = relationship(
"MasterEquipment",
lazy="raise",
primaryjoin="and_(ScopeEquipmentPart.assetnum == foreign(MasterEquipment.assetnum))",
uselist=False,
)

@ -1,26 +0,0 @@
from typing import Dict, List
from fastapi import APIRouter, HTTPException, Query, status
from src.database.service import (CommonParameters, DbSession,
search_filter_sort_paginate)
from src.models import StandardResponse
from .schema import (ScopeEquipmentActivityCreate,
ScopeEquipmentActivityPagination,
ScopeEquipmentActivityRead, ScopeEquipmentActivityUpdate)
from .service import get_all
router = APIRouter()
@router.get("/{assetnum}", response_model=StandardResponse[List[Dict]])
async def get_scope_equipment_parts(db_session: DbSession, assetnum):
"""Get all scope activity pagination."""
# return
data = await get_all(db_session=db_session, assetnum=assetnum)
return StandardResponse(
data=data,
message="Data retrieved successfully",
)

@ -1,69 +0,0 @@
from datetime import datetime
from typing import Any, Dict, List, Optional
from uuid import UUID
from pydantic import BaseModel, Field
from src.models import DefultBase, Pagination
class ScopeEquipmentActivityBase(DefultBase):
assetnum: str = Field(..., description="Assetnum is required")
class ScopeEquipmentActivityCreate(ScopeEquipmentActivityBase):
name: str
cost: Optional[float] = Field(0)
class ScopeEquipmentActivityUpdate(ScopeEquipmentActivityBase):
name: Optional[str] = Field(None)
cost: Optional[str] = Field(0)
class ScopeEquipmentActivityRead(ScopeEquipmentActivityBase):
name: str
cost: float
class ScopeEquipmentActivityPagination(Pagination):
items: List[ScopeEquipmentActivityRead] = []
# {
# "overview": {
# "totalEquipment": 30,
# "nextSchedule": {
# "date": "2025-01-12",
# "Overhaul": "B",
# "equipmentCount": 30
# }
# },
# "criticalParts": [
# "Boiler feed pump",
# "Boiler reheater system",
# "Drum Level (Right) Root Valve A",
# "BCP A Discharge Valve",
# "BFPT A EXH Press HI Root VLV"
# ],
# "schedules": [
# {
# "date": "2025-01-12",
# "Overhaul": "B",
# "status": "upcoming"
# }
# // ... other scheduled overhauls
# ],
# "systemComponents": {
# "boiler": {
# "status": "operational",
# "lastOverhaul": "2024-06-15"
# },
# "turbine": {
# "hpt": { "status": "operational" },
# "ipt": { "status": "operational" },
# "lpt": { "status": "operational" }
# }
# // ... other major components
# }
# }

@ -1,104 +0,0 @@
import random
from typing import Optional
from sqlalchemy import Delete, Select, and_
from sqlalchemy.orm import selectinload
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import CommonParameters, search_filter_sort_paginate
from src.scope_equipment.model import MasterEquipment, MasterEquipmentTree
from src.scope_equipment.service import get_equipment_level_by_no
from .model import ScopeEquipmentPart
from .schema import ScopeEquipmentActivityCreate, ScopeEquipmentActivityUpdate
# async def get(*, db_session: DbSession, scope_equipment_activity_id: str) -> Optional[ScopeEquipmentActivity]:
# """Returns a document based on the given document id."""
# result = await db_session.get(ScopeEquipmentActivity, scope_equipment_activity_id)
# return result
def create_dummy_parts(assetnum: str, count: int = 5):
"""
Create a list of dummy ScopeEquipmentPart objects with random stock values.
Args:
assetnum (str): The base asset number to generate dummy parts for.
count (int): The number of parts to create. Default is 5.
Returns:
List[ScopeEquipmentPart]: A list of dummy ScopeEquipmentPart objects.
"""
parts = []
for i in range(1, count + 1):
# Generate a unique part asset number
part_assetnum = f"{assetnum}_PART_{i}"
stock = random.randint(1, 100) # Random stock value between 1 and 100
parts.append({"assetnum": part_assetnum, "stock": stock})
return parts
async def get_all(db_session: DbSession, assetnum: Optional[str]):
# Example usage
dummy_parts = create_dummy_parts(assetnum, count=10)
# if not assetnum:
# raise ValueError("assetnum parameter is required")
# db_session: DbSession = common.get("db_session")
# # First get the parent equipment
# equipment_stmt = Select(MasterEquipment).where(
# MasterEquipment.assetnum == assetnum)
# equipment: MasterEquipment = await db_session.scalar(equipment_stmt)
# if not equipment:
# raise ValueError(f"No equipment found with assetnum: {assetnum}")
# # Build query for parts
# stmt = (
# Select(ScopeEquipmentPart)
# .join(ScopeEquipmentPart.master_equipments)
# .join(MasterEquipment.equipment_tree)
# .where(
# and_(
# MasterEquipment.parent_id == equipment.id,
# MasterEquipmentTree.level_no == 4
# )
# ).options(selectinload(ScopeEquipmentPart.master_equipments))
# )
# results = await search_filter_sort_paginate(model=stmt, **common)
return dummy_parts
# async def create(*, db_session: DbSession, scope_equipment_activty_in: ScopeEquipmentActivityCreate):
# activity = ScopeEquipmentActivity(
# **scope_equipment_activty_in.model_dump())
# db_session.add(activity)
# await db_session.commit()
# return activity
# async def update(*, db_session: DbSession, activity: ScopeEquipmentActivity, scope_equipment_activty_in: ScopeEquipmentActivityUpdate):
# """Updates a document."""
# data = scope_equipment_activty_in.model_dump()
# update_data = scope_equipment_activty_in.model_dump(exclude_defaults=True)
# for field in data:
# if field in update_data:
# setattr(activity, field, update_data[field])
# await db_session.commit()
# return activity
# async def delete(*, db_session: DbSession, scope_equipment_activity_id: str):
# """Deletes a document."""
# activity = await db_session.get(ScopeEquipmentActivity, scope_equipment_activity_id)
# await db_session.delete(activity)
# await db_session.commit()

@ -1,20 +0,0 @@
from sqlalchemy import UUID, Column, Float, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from src.database.core import Base
from src.models import DefaultMixin, IdentityMixin, TimeStampMixin
class MasterWorkOrder(Base, DefaultMixin):
__tablename__ = "oh_wo_master"
assetnum = Column(String, nullable=True)
worktype = Column(String, nullable=True)
workgroup = Column(String, nullable=True)
total_cost_max = Column(Float, nullable=True)
scope_equipments = relationship(
"ScopeEquipment",
lazy="raise",
primaryjoin="and_(MasterWorkOrder.assetnum == foreign(ScopeEquipment.assetnum))",
)

@ -1,8 +1,14 @@
import uuid
from datetime import datetime
from factory import (LazyAttribute, LazyFunction, SelfAttribute, Sequence,
SubFactory, post_generation)
from factory import (
LazyAttribute,
LazyFunction,
SelfAttribute,
Sequence,
SubFactory,
post_generation,
)
from factory.alchemy import SQLAlchemyModelFactory
from factory.fuzzy import FuzzyChoice, FuzzyDateTime, FuzzyInteger, FuzzyText
from faker import Faker

Loading…
Cancel
Save