Compare commits
1 Commits
main
...
feature/re
| Author | SHA1 | Date |
|---|---|---|
|
|
aa106a5a85 | 5 months ago |
@ -1,96 +1,107 @@
|
|||||||
pipeline {
|
pipeline {
|
||||||
agent any
|
agent any
|
||||||
|
|
||||||
environment {
|
environment {
|
||||||
|
// Replace with your Docker Hub username/organization
|
||||||
DOCKER_HUB_USERNAME = 'aimodocker'
|
DOCKER_HUB_USERNAME = 'aimodocker'
|
||||||
// This creates DOCKER_AUTH_USR and DOCKER_AUTH_PSW
|
// Use credentials for Docker Hub
|
||||||
DOCKER_AUTH = credentials('aimodocker')
|
DOCKER_CREDENTIALS = credentials('aimodocker')
|
||||||
|
// Replace with your image name
|
||||||
IMAGE_NAME = 'oh-service'
|
IMAGE_NAME = 'oh-service'
|
||||||
SERVICE_NAME = 'ahm-app'
|
// Replace with your docker compose service name
|
||||||
|
SERVICE_NAME = 'oh-app'
|
||||||
SECURITY_PREFIX = 'security'
|
// Variable for Git commit hash
|
||||||
|
GIT_COMMIT_HASH = ''
|
||||||
|
|
||||||
// Initialize variables to be updated in script blocks
|
// Replace with the SSH credentials for development server
|
||||||
GIT_COMMIT_HASH = ""
|
// SSH_CREDENTIALS = credentials('backend-server-digitaltwin')
|
||||||
IMAGE_TAG = ""
|
// SSH_CREDENTIALS_USR = 'aimo'
|
||||||
SECONDARY_TAG = ""
|
// SSH_SERVER_IP = '192.168.1.82'
|
||||||
}
|
}
|
||||||
|
|
||||||
stages {
|
stages {
|
||||||
stage('Checkout & Setup') {
|
stage('Checkout') {
|
||||||
steps {
|
steps {
|
||||||
script {
|
script {
|
||||||
|
// Checkout and get git commit hash
|
||||||
checkout scm
|
checkout scm
|
||||||
GIT_COMMIT_HASH = sh(script: 'git rev-parse --short HEAD', returnStdout: true).trim()
|
def commitHash = sh(script: 'git rev-parse --short HEAD', returnStdout: true).trim()
|
||||||
|
GIT_COMMIT_HASH = commitHash
|
||||||
// Use env.BRANCH_NAME or logic to handle detached HEAD if necessary
|
echo "Git commit hash: ${GIT_COMMIT_HASH}"
|
||||||
def branch = env.BRANCH_NAME ?: 'unknown'
|
|
||||||
echo "Current Branch: ${branch}"
|
|
||||||
|
|
||||||
if (branch == 'main') {
|
|
||||||
IMAGE_TAG = GIT_COMMIT_HASH
|
|
||||||
SECONDARY_TAG = 'latest'
|
|
||||||
} else if (branch == 'oh_security') {
|
|
||||||
IMAGE_TAG = "${SECURITY_PREFIX}-${GIT_COMMIT_HASH}"
|
|
||||||
SECONDARY_TAG = "${SECURITY_PREFIX}-latest"
|
|
||||||
} else {
|
|
||||||
IMAGE_TAG = "temp-${GIT_COMMIT_HASH}"
|
|
||||||
SECONDARY_TAG = "" // Ensure it's empty for other branches
|
|
||||||
}
|
|
||||||
|
|
||||||
echo "Primary Tag: ${IMAGE_TAG}"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
stage('Docker Login') {
|
stage('Docker Login') {
|
||||||
steps {
|
steps {
|
||||||
// Fixed variable names based on the 'DOCKER_AUTH' environment key
|
sh '''
|
||||||
sh "echo ${DOCKER_AUTH_PSW} | docker login -u ${DOCKER_AUTH_USR} --password-stdin"
|
echo ${DOCKER_CREDENTIALS_PSW} | docker login -u ${DOCKER_CREDENTIALS_USR} --password-stdin
|
||||||
|
'''
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
stage('Build & Tag') {
|
stage('Build Docker Image') {
|
||||||
steps {
|
steps {
|
||||||
script {
|
script {
|
||||||
def fullImageName = "${DOCKER_HUB_USERNAME}/${IMAGE_NAME}"
|
// Build with commit hash tag
|
||||||
sh "docker build -t ${fullImageName}:${IMAGE_TAG} ."
|
sh """
|
||||||
|
docker build -t ${DOCKER_HUB_USERNAME}/${IMAGE_NAME}:latest .
|
||||||
if (SECONDARY_TAG) {
|
docker tag ${DOCKER_HUB_USERNAME}/${IMAGE_NAME}:latest ${DOCKER_HUB_USERNAME}/${IMAGE_NAME}:${GIT_COMMIT_HASH}
|
||||||
sh "docker tag ${fullImageName}:${IMAGE_TAG} ${fullImageName}:${SECONDARY_TAG}"
|
"""
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
stage('Push to Docker Hub') {
|
stage('Push to Docker Hub') {
|
||||||
steps {
|
steps {
|
||||||
script {
|
sh """
|
||||||
def fullImageName = "${DOCKER_HUB_USERNAME}/${IMAGE_NAME}"
|
# Push both tags
|
||||||
sh "docker push ${fullImageName}:${IMAGE_TAG}"
|
docker push ${DOCKER_HUB_USERNAME}/${IMAGE_NAME}:${GIT_COMMIT_HASH}
|
||||||
|
docker push ${DOCKER_HUB_USERNAME}/${IMAGE_NAME}:latest
|
||||||
if (SECONDARY_TAG) {
|
"""
|
||||||
sh "docker push ${fullImageName}:${SECONDARY_TAG}"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// stage('Deploy') {
|
||||||
|
// steps {
|
||||||
|
// script {
|
||||||
|
// sshagent(credentials: ['backend-server-digitaltwin']) {
|
||||||
|
// sh """
|
||||||
|
// ssh -o StrictHostKeyChecking=no -p 12558 aimo@0.tcp.ap.ngrok.io '
|
||||||
|
// cd ~/digital-twin/Docker
|
||||||
|
// sudo docker compose pull ${SERVICE_NAME}
|
||||||
|
// sudo docker compose up -d ${SERVICE_NAME}
|
||||||
|
// '
|
||||||
|
// """
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
|
|
||||||
post {
|
post {
|
||||||
always {
|
always {
|
||||||
|
// Clean up
|
||||||
|
sh 'docker logout'
|
||||||
|
|
||||||
|
// Clean up local images
|
||||||
script {
|
script {
|
||||||
sh 'docker logout'
|
try {
|
||||||
def fullImageName = "${DOCKER_HUB_USERNAME}/${IMAGE_NAME}"
|
sh """
|
||||||
// Clean up images to save agent disk space
|
# Push both tags
|
||||||
sh "docker rmi ${fullImageName}:${IMAGE_TAG} || true"
|
docker rmi ${DOCKER_HUB_USERNAME}/${IMAGE_NAME}:${GIT_COMMIT_HASH}
|
||||||
if (SECONDARY_TAG) {
|
docker rmi ${DOCKER_HUB_USERNAME}/${IMAGE_NAME}:latest
|
||||||
sh "docker rmi ${fullImageName}:${SECONDARY_TAG} || true"
|
"""
|
||||||
|
} catch (err) {
|
||||||
|
echo "Failed to clean up images: ${err}"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
success {
|
success {
|
||||||
echo "Successfully processed ${env.BRANCH_NAME}."
|
echo "Successfully built, pushed, and deployed Docker image with tags: latest and ${GIT_COMMIT_HASH}"
|
||||||
|
}
|
||||||
|
failure {
|
||||||
|
echo 'Failed to build/push/deploy Docker image!'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1,44 +0,0 @@
|
|||||||
# Unit Testing Guide - be-optimumoh
|
|
||||||
|
|
||||||
This document provides instructions on how to set up and run unit tests for the **be-optimumoh** project.
|
|
||||||
|
|
||||||
## 1. Preparation
|
|
||||||
|
|
||||||
### Install Dependencies
|
|
||||||
Ensure you have all dependencies installed. This project uses `poetry`.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Install dependencies
|
|
||||||
poetry install
|
|
||||||
```
|
|
||||||
|
|
||||||
## 2. Configuration
|
|
||||||
|
|
||||||
### Pytest Configuration
|
|
||||||
Ensure the `pytest.ini` file in the root directory points to the `unit` test folder:
|
|
||||||
|
|
||||||
```ini
|
|
||||||
[pytest]
|
|
||||||
testpaths = tests/unit
|
|
||||||
python_files = test_*.py
|
|
||||||
asyncio_mode = auto
|
|
||||||
```
|
|
||||||
|
|
||||||
## 3. Running Tests
|
|
||||||
|
|
||||||
### Run Unit Tests
|
|
||||||
To run all unit tests in the project:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
poetry run pytest tests/unit
|
|
||||||
```
|
|
||||||
|
|
||||||
### Run Specific Unit Test File
|
|
||||||
```bash
|
|
||||||
poetry run pytest tests/unit/test_specific_feature.py
|
|
||||||
```
|
|
||||||
|
|
||||||
## 4. Best Practices
|
|
||||||
|
|
||||||
- **Isolation**: Unit tests should be isolated from external services. Use mocking for APIs and databases.
|
|
||||||
- **Async Testing**: Use `@pytest.mark.asyncio` for asynchronous test functions.
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
[pytest]
|
|
||||||
asyncio_mode = auto
|
|
||||||
testpaths = tests/unit
|
|
||||||
python_files = test_*.py
|
|
||||||
filterwarnings =
|
|
||||||
ignore::pydantic.PydanticDeprecatedSince20
|
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,42 +0,0 @@
|
|||||||
from contextvars import ContextVar
|
|
||||||
from typing import Optional, Final
|
|
||||||
|
|
||||||
REQUEST_ID_CTX_KEY: Final[str] = "request_id"
|
|
||||||
USER_ID_CTX_KEY: Final[str] = "user_id"
|
|
||||||
USERNAME_CTX_KEY: Final[str] = "username"
|
|
||||||
ROLE_CTX_KEY: Final[str] = "role"
|
|
||||||
|
|
||||||
_request_id_ctx_var: ContextVar[Optional[str]] = ContextVar(REQUEST_ID_CTX_KEY, default=None)
|
|
||||||
_user_id_ctx_var: ContextVar[Optional[str]] = ContextVar(USER_ID_CTX_KEY, default=None)
|
|
||||||
_username_ctx_var: ContextVar[Optional[str]] = ContextVar(USERNAME_CTX_KEY, default=None)
|
|
||||||
_role_ctx_var: ContextVar[Optional[str]] = ContextVar(ROLE_CTX_KEY, default=None)
|
|
||||||
|
|
||||||
|
|
||||||
def get_request_id() -> Optional[str]:
|
|
||||||
return _request_id_ctx_var.get()
|
|
||||||
|
|
||||||
|
|
||||||
def set_request_id(request_id: str):
|
|
||||||
return _request_id_ctx_var.set(request_id)
|
|
||||||
|
|
||||||
|
|
||||||
def reset_request_id(token):
|
|
||||||
_request_id_ctx_var.reset(token)
|
|
||||||
|
|
||||||
def get_user_id() -> Optional[str]:
|
|
||||||
return _user_id_ctx_var.get()
|
|
||||||
|
|
||||||
def set_user_id(user_id: str):
|
|
||||||
return _user_id_ctx_var.set(user_id)
|
|
||||||
|
|
||||||
def get_username() -> Optional[str]:
|
|
||||||
return _username_ctx_var.get()
|
|
||||||
|
|
||||||
def set_username(username: str):
|
|
||||||
return _username_ctx_var.set(username)
|
|
||||||
|
|
||||||
def get_role() -> Optional[str]:
|
|
||||||
return _role_ctx_var.get()
|
|
||||||
|
|
||||||
def set_role(role: str):
|
|
||||||
return _role_ctx_var.set(role)
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from pydantic import Field
|
|
||||||
from src.models import DefultBase
|
|
||||||
|
|
||||||
|
|
||||||
class CommonParams(DefultBase):
|
|
||||||
# This ensures no extra query params are allowed
|
|
||||||
current_user: Optional[str] = Field(None, alias="currentUser")
|
|
||||||
page: int = Field(1, gt=0, lt=2147483647)
|
|
||||||
items_per_page: int = Field(5, gt=0, le=50, multiple_of=5, alias="itemsPerPage")
|
|
||||||
query_str: Optional[str] = Field(None, alias="q")
|
|
||||||
filter_spec: Optional[str] = Field(None, alias="filter")
|
|
||||||
sort_by: List[str] = Field(default_factory=list, alias="sortBy[]")
|
|
||||||
descending: List[bool] = Field(default_factory=list, alias="descending[]")
|
|
||||||
exclude: List[str] = Field(default_factory=list, alias="exclude[]")
|
|
||||||
all_params: int = Field(0, alias="all")
|
|
||||||
|
|
||||||
# Property to mirror your original return dict's bool conversion
|
|
||||||
@property
|
|
||||||
def is_all(self) -> bool:
|
|
||||||
return bool(self.all_params)
|
|
||||||
@ -1,332 +1,79 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Optional, Union
|
from sqlalchemy import select, func, cast, Numeric
|
||||||
from sqlalchemy import select, func, cast, Numeric, text
|
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from sqlalchemy import and_
|
from sqlalchemy import and_
|
||||||
from sqlalchemy.sql import not_
|
from sqlalchemy.sql import not_
|
||||||
from src.maximo.model import WorkOrderData # Assuming this is where your model is
|
from src.maximo.model import WorkOrderData # Assuming this is where your model is
|
||||||
from src.database.core import CollectorDbSession, DbSession
|
from src.database.core import CollectorDbSession
|
||||||
from src.overhaul_scope.model import OverhaulScope
|
|
||||||
|
|
||||||
async def get_cm_cost_summary(collector_db: CollectorDbSession, last_oh_date:datetime, upcoming_oh_date:datetime):
|
async def get_cm_cost_summary(collector_db: CollectorDbSession, last_oh_date:datetime, upcoming_oh_date:datetime):
|
||||||
query = text("""WITH part_costs AS (
|
query = select(
|
||||||
SELECT
|
WorkOrderData.location,
|
||||||
mu.wonum,
|
(func.sum(WorkOrderData.total_cost_max).cast(Numeric) / func.count(WorkOrderData.wonum)).label('avg_cost')
|
||||||
SUM(mu.itemqty * COALESCE(inv.avgcost, po.unit_cost, 0)) AS parts_total_cost
|
).where(
|
||||||
FROM maximo_workorder_materials mu
|
and_(
|
||||||
LEFT JOIN maximo_inventory inv
|
# WorkOrderData.wo_start >= last_oh_date,
|
||||||
ON mu.itemnum = inv.itemnum
|
# WorkOrderData.wo_start <= upcoming_oh_date,
|
||||||
LEFT JOIN (
|
WorkOrderData.worktype.in_(['CM', 'EM', 'PROACTIVE']),
|
||||||
SELECT item_num, AVG(unit_cost) AS unit_cost
|
WorkOrderData.system_tag.in_(['HPB', 'AH', 'APC', 'SCR', 'CL', 'DM', 'CRH', 'ASH', 'BAD', 'DS', 'WTP',
|
||||||
FROM maximo_sparepart_pr_po_line
|
'MT', 'SUP', 'DCS', 'FF', 'EG', 'AI', 'SPS', 'EVM', 'SCW', 'KLH', 'CH',
|
||||||
GROUP BY item_num
|
'TUR', 'LOT', 'HRH', 'ESP', 'CAE', 'GMC', 'BFT', 'LSH', 'CHB', 'BSS',
|
||||||
) po
|
'LOS', 'LPB', 'SAC', 'CP', 'EHS', 'RO', 'GG', 'MS', 'CW', 'SO', 'ATT',
|
||||||
ON mu.itemnum = po.item_num
|
'AFG', 'EHB', 'RP', 'FO', 'PC', 'APE', 'AF', 'DMW', 'BRS', 'GEN', 'ABS',
|
||||||
GROUP BY mu.wonum
|
'CHA', 'TR', 'H2', 'BDW', 'LOM', 'ACR', 'AL', 'FW', 'COND', 'CCCW', 'IA',
|
||||||
),
|
'GSS', 'BOL', 'SSB', 'CO', 'OA', 'CTH-UPD', 'AS', 'DP']),
|
||||||
wo_costs AS (
|
WorkOrderData.reportdate.is_not(None),
|
||||||
SELECT
|
WorkOrderData.actstart.is_not(None),
|
||||||
w.wonum,
|
WorkOrderData.actfinish.is_not(None),
|
||||||
w.asset_location,
|
WorkOrderData.unit.in_([3, 0]),
|
||||||
(COALESCE(w.mat_cost_max, 0) + COALESCE(pc.parts_total_cost, 0)) AS total_wo_cost
|
WorkOrderData.reportdate >= datetime.strptime('2015-01-01', '%Y-%m-%d'),
|
||||||
FROM wo_staging_maximo_2 w
|
not_(WorkOrderData.wonum.like('T%'))
|
||||||
LEFT JOIN part_costs pc
|
|
||||||
ON w.wonum = pc.wonum
|
|
||||||
WHERE
|
|
||||||
w.worktype IN ('CM', 'EM', 'PROACTIVE')
|
|
||||||
AND w.asset_system IN (
|
|
||||||
'HPB','AH','APC','SCR','CL','DM','CRH','ASH','BAD','DS','WTP',
|
|
||||||
'MT','SUP','DCS','FF','EG','AI','SPS','EVM','SCW','KLH','CH',
|
|
||||||
'TUR','LOT','HRH','ESP','CAE','GMC','BFT','LSH','CHB','BSS',
|
|
||||||
'LOS','LPB','SAC','CP','EHS','RO','GG','MS','CW','SO','ATT',
|
|
||||||
'AFG','EHB','RP','FO','PC','APE','AF','DMW','BRS','GEN','ABS',
|
|
||||||
'CHA','TR','H2','BDW','LOM','ACR','AL','FW','COND','CCCW','IA',
|
|
||||||
'GSS','BOL','SSB','CO','OA','CTH-UPD','AS','DP'
|
|
||||||
)
|
)
|
||||||
AND w.reportdate IS NOT NULL
|
).group_by(
|
||||||
AND w.actstart IS NOT NULL
|
WorkOrderData.location
|
||||||
AND w.actfinish IS NOT NULL
|
).order_by(
|
||||||
AND w.asset_unit IN ('3','00')
|
func.count(WorkOrderData.wonum).desc()
|
||||||
AND w.reportdate >= '2015-01-01'
|
)
|
||||||
AND w.wonum NOT LIKE 'T%'
|
result = await collector_db.execute(query)
|
||||||
),
|
data = result.all()
|
||||||
-- find max cost per location
|
|
||||||
location_max AS (
|
|
||||||
SELECT asset_location, MAX(total_wo_cost) AS max_cost
|
|
||||||
FROM wo_costs
|
|
||||||
WHERE total_wo_cost > 0
|
|
||||||
GROUP BY asset_location
|
|
||||||
),
|
|
||||||
-- filter WO costs to only reasonable range (e.g. >0 and >=10% of max)
|
|
||||||
filtered_wo AS (
|
|
||||||
SELECT w.*
|
|
||||||
FROM wo_costs w
|
|
||||||
JOIN location_max lm ON w.asset_location = lm.asset_location
|
|
||||||
WHERE w.total_wo_cost > 0
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
asset_location,
|
|
||||||
SUM(total_wo_cost)::numeric / COUNT(wonum) AS avg_cost
|
|
||||||
FROM filtered_wo
|
|
||||||
GROUP BY asset_location
|
|
||||||
ORDER BY avg_cost DESC;
|
|
||||||
""")
|
|
||||||
results = await collector_db.execute(query)
|
|
||||||
data = []
|
|
||||||
|
|
||||||
for row in results:
|
|
||||||
data.append({
|
|
||||||
"location_tag": row.asset_location,
|
|
||||||
"avg_cost": row.avg_cost
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
item["location_tag"]: item["avg_cost"] for item in data
|
data.location: data.avg_cost for data in data
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# async def get_oh_cost_summary(collector_db: CollectorDbSession, last_oh_date:datetime, upcoming_oh_date:datetime):
|
|
||||||
# query = text("""
|
|
||||||
# WITH target_wo AS (
|
|
||||||
# -- Get work orders under a specific parent(s)
|
|
||||||
# SELECT
|
|
||||||
# wonum,
|
|
||||||
# xx_parent,
|
|
||||||
# assetnum,
|
|
||||||
# location_tag AS asset_location,
|
|
||||||
# actmatcost,
|
|
||||||
# actservcost,
|
|
||||||
# reportdate
|
|
||||||
# FROM public.wo_maxim
|
|
||||||
# WHERE xx_parent = ANY(:parent_nums)
|
|
||||||
# ),
|
|
||||||
# part_costs AS (
|
|
||||||
# -- Calculate parts cost per WO if actmatcost = 0
|
|
||||||
# SELECT
|
|
||||||
# wm.wonum,
|
|
||||||
# SUM(
|
|
||||||
# wm.itemqty *
|
|
||||||
# COALESCE(wm.inv_avgcost, po.unit_cost, 0)
|
|
||||||
# ) AS parts_total_cost
|
|
||||||
# FROM public.wo_maxim_material wm
|
|
||||||
# LEFT JOIN (
|
|
||||||
# SELECT item_num, AVG(unit_cost) AS unit_cost
|
|
||||||
# FROM public.maximo_sparepart_pr_po_line
|
|
||||||
# GROUP BY item_num
|
|
||||||
# ) po ON wm.itemnum = po.item_num
|
|
||||||
# WHERE wm.itemnum IS NOT NULL
|
|
||||||
# GROUP BY wm.wonum
|
|
||||||
# ),
|
|
||||||
# wo_costs AS (
|
|
||||||
# SELECT
|
|
||||||
# w.wonum,
|
|
||||||
# w.asset_location,
|
|
||||||
# CASE
|
|
||||||
# WHEN COALESCE(w.actmatcost, 0) > 0 THEN COALESCE(w.actmatcost, 0)
|
|
||||||
# ELSE COALESCE(pc.parts_total_cost, 0)
|
|
||||||
# END AS material_cost,
|
|
||||||
# COALESCE(w.actservcost, 0) AS service_cost
|
|
||||||
# FROM target_wo w
|
|
||||||
# LEFT JOIN part_costs pc ON w.wonum = pc.wonum
|
|
||||||
# )
|
|
||||||
# SELECT
|
|
||||||
# asset_location,
|
|
||||||
# ROUND(SUM(material_cost + service_cost)::numeric / COUNT(wonum), 2) AS avg_cost,
|
|
||||||
# COUNT(wonum) AS total_wo_count
|
|
||||||
# FROM wo_costs
|
|
||||||
# GROUP BY asset_location
|
|
||||||
# ORDER BY total_wo_count DESC;
|
|
||||||
# """)
|
|
||||||
|
|
||||||
# parent_nums = []
|
|
||||||
|
|
||||||
# result = await collector_db.execute(query, {"parent_nums": parent_nums})
|
|
||||||
# data = []
|
|
||||||
|
|
||||||
# for row in result:
|
|
||||||
# data.append({
|
|
||||||
# "location_tag": row.asset_location,
|
|
||||||
# "avg_cost": float(row.avg_cost or 0.0),
|
|
||||||
# "total_wo_count": row.total_wo_count,
|
|
||||||
# })
|
|
||||||
|
|
||||||
# return {item["location_tag"]: item["avg_cost"] for item in data}
|
|
||||||
|
|
||||||
|
|
||||||
async def get_oh_cost_summary(collector_db: CollectorDbSession, last_oh_date:datetime, upcoming_oh_date:datetime):
|
async def get_oh_cost_summary(collector_db: CollectorDbSession, last_oh_date:datetime, upcoming_oh_date:datetime):
|
||||||
# query = text("""
|
query = select(
|
||||||
# WITH part_costs AS (
|
WorkOrderData.location,
|
||||||
# SELECT
|
(func.sum(WorkOrderData.total_cost_max).cast(Numeric) / func.count(WorkOrderData.wonum)).label('avg_cost')
|
||||||
# wm.wonum,
|
).where(
|
||||||
# SUM(wm.itemqty * COALESCE(wm.inv_avgcost, po.unit_cost, 0)) AS parts_total_cost
|
and_(
|
||||||
# FROM public.wo_maxim_material wm
|
# WorkOrderData.wo_start >= last_oh_date,
|
||||||
# LEFT JOIN (
|
# WorkOrderData.wo_start <= upcoming_oh_date,
|
||||||
# SELECT item_num, AVG(unit_cost) AS unit_cost
|
WorkOrderData.worktype.in_(['OH']),
|
||||||
# FROM public.maximo_sparepart_pr_po_line
|
WorkOrderData.system_tag.in_(['HPB', 'AH', 'APC', 'SCR', 'CL', 'DM', 'CRH', 'ASH', 'BAD', 'DS', 'WTP',
|
||||||
# GROUP BY item_num
|
'MT', 'SUP', 'DCS', 'FF', 'EG', 'AI', 'SPS', 'EVM', 'SCW', 'KLH', 'CH',
|
||||||
# ) po ON wm.itemnum = po.item_num
|
'TUR', 'LOT', 'HRH', 'ESP', 'CAE', 'GMC', 'BFT', 'LSH', 'CHB', 'BSS',
|
||||||
# WHERE wm.itemnum IS NOT NULL
|
'LOS', 'LPB', 'SAC', 'CP', 'EHS', 'RO', 'GG', 'MS', 'CW', 'SO', 'ATT',
|
||||||
# GROUP BY wm.wonum
|
'AFG', 'EHB', 'RP', 'FO', 'PC', 'APE', 'AF', 'DMW', 'BRS', 'GEN', 'ABS',
|
||||||
# ),
|
'CHA', 'TR', 'H2', 'BDW', 'LOM', 'ACR', 'AL', 'FW', 'COND', 'CCCW', 'IA',
|
||||||
# wo_costs AS (
|
'GSS', 'BOL', 'SSB', 'CO', 'OA', 'CTH-UPD', 'AS', 'DP']),
|
||||||
# SELECT
|
WorkOrderData.reportdate.is_not(None),
|
||||||
# w.wonum,
|
WorkOrderData.actstart.is_not(None),
|
||||||
# w.asset_location,
|
WorkOrderData.actfinish.is_not(None),
|
||||||
# -- Use mat_cost_max if parts_total_cost = 0
|
WorkOrderData.unit.in_([3, 0]),
|
||||||
# CASE
|
WorkOrderData.reportdate >= datetime.strptime('2015-01-01', '%Y-%m-%d'),
|
||||||
# WHEN COALESCE(pc.parts_total_cost, 0) = 0 THEN COALESCE(w.mat_cost_max , 0)
|
not_(WorkOrderData.wonum.like('T%'))
|
||||||
# ELSE COALESCE(pc.parts_total_cost, 0)
|
)
|
||||||
# END AS total_wo_cost
|
).group_by(
|
||||||
# FROM wo_staging_maximo_2 w
|
WorkOrderData.location
|
||||||
# LEFT JOIN part_costs pc
|
).order_by(
|
||||||
# ON w.wonum = pc.wonum
|
func.count(WorkOrderData.wonum).desc()
|
||||||
# WHERE
|
|
||||||
# w.worktype = 'OH'
|
|
||||||
# AND w.reportdate IS NOT NULL
|
|
||||||
# AND w.actstart IS NOT NULL
|
|
||||||
# AND w.actfinish IS NOT NULL
|
|
||||||
# AND w.asset_unit IN ('3', '00')
|
|
||||||
# AND w.wonum NOT LIKE 'T%'
|
|
||||||
# )
|
|
||||||
# SELECT
|
|
||||||
# asset_location,
|
|
||||||
# AVG(total_wo_cost) AS avg_cost
|
|
||||||
# FROM wo_costs
|
|
||||||
# GROUP BY asset_location
|
|
||||||
# ORDER BY COUNT(wonum) DESC;
|
|
||||||
# """)
|
|
||||||
|
|
||||||
query = text("""
|
|
||||||
WITH part_costs AS (
|
|
||||||
SELECT
|
|
||||||
wm.wonum,
|
|
||||||
SUM(wm.itemqty * COALESCE(inv.avgcost, po.unit_cost, 0)) AS parts_total_cost
|
|
||||||
FROM public.maximo_workorder_materials wm
|
|
||||||
JOIN public.maximo_inventory AS inv on inv.itemnum = wm.itemnum
|
|
||||||
LEFT JOIN (
|
|
||||||
SELECT item_num, AVG(unit_cost) AS unit_cost
|
|
||||||
FROM public.maximo_sparepart_pr_po_line
|
|
||||||
GROUP BY item_num
|
|
||||||
) po ON wm.itemnum = po.item_num
|
|
||||||
WHERE wm.itemnum IS NOT NULL
|
|
||||||
GROUP BY wm.wonum
|
|
||||||
),
|
|
||||||
wo_costs AS (
|
|
||||||
SELECT
|
|
||||||
w.wonum,
|
|
||||||
w.asset_location,
|
|
||||||
-- Use mat_cost_max if parts_total_cost = 0
|
|
||||||
CASE
|
|
||||||
WHEN COALESCE(pc.parts_total_cost, 0) = 0 THEN COALESCE(w.mat_cost_max , 0)
|
|
||||||
ELSE COALESCE(pc.parts_total_cost, 0)
|
|
||||||
END AS total_wo_cost
|
|
||||||
FROM wo_staging_maximo_2 w
|
|
||||||
LEFT JOIN part_costs pc
|
|
||||||
ON w.wonum = pc.wonum
|
|
||||||
WHERE
|
|
||||||
w.worktype = 'OH'
|
|
||||||
AND w.reportdate IS NOT NULL
|
|
||||||
AND w.actstart IS NOT NULL
|
|
||||||
AND w.actfinish IS NOT NULL
|
|
||||||
AND w.asset_unit IN ('3', '00')
|
|
||||||
AND w.wonum NOT LIKE 'T%'
|
|
||||||
)
|
)
|
||||||
SELECT
|
|
||||||
asset_location,
|
|
||||||
AVG(total_wo_cost) AS avg_cost
|
|
||||||
FROM wo_costs
|
|
||||||
GROUP BY asset_location
|
|
||||||
ORDER BY COUNT(wonum) DESC;
|
|
||||||
""")
|
|
||||||
|
|
||||||
result = await collector_db.execute(query)
|
result = await collector_db.execute(query)
|
||||||
data = []
|
data = result.all()
|
||||||
|
|
||||||
for row in result:
|
|
||||||
data.append({
|
|
||||||
"location_tag": row.asset_location,
|
|
||||||
"avg_cost": row.avg_cost
|
|
||||||
})
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
item["location_tag"]: item["avg_cost"] for item in data
|
data.location: data.avg_cost for data in data
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
from uuid import UUID
|
|
||||||
|
|
||||||
async def get_history_oh_wo(*, db_session: DbSession, collector_db_session: CollectorDbSession, oh_session_id: UUID, parent_wo_num: Optional[Union[str, list]] = None):
|
|
||||||
## Get Parent wo num from oh session table
|
|
||||||
if not parent_wo_num:
|
|
||||||
query = select(OverhaulScope.wo_parent).where(OverhaulScope.id == oh_session_id)
|
|
||||||
result = await db_session.execute(query)
|
|
||||||
parent_wo_num = result.scalar()
|
|
||||||
|
|
||||||
if not parent_wo_num:
|
|
||||||
return []
|
|
||||||
|
|
||||||
# Ensure parent_wo_num is a list and removed duplicates if any
|
|
||||||
if isinstance(parent_wo_num, str):
|
|
||||||
parent_wo_num = [parent_wo_num]
|
|
||||||
else:
|
|
||||||
parent_wo_num = list(set(parent_wo_num))
|
|
||||||
|
|
||||||
sql_query = text("""
|
|
||||||
WITH target_wos AS (
|
|
||||||
SELECT
|
|
||||||
w.wonum,
|
|
||||||
w.assetnum,
|
|
||||||
COALESCE(w.actmatcost, 0) as actmatcost,
|
|
||||||
COALESCE(w.actservcost, 0) as actservcost
|
|
||||||
FROM public.wo_maximo w
|
|
||||||
WHERE w.xx_parent = ANY(:parent_wo_num)
|
|
||||||
),
|
|
||||||
wo_tasks AS (
|
|
||||||
SELECT
|
|
||||||
t.xx_parent AS parent_wonum,
|
|
||||||
JSON_AGG(t.description) AS task_list
|
|
||||||
FROM public.wo_maximo t
|
|
||||||
JOIN target_wos tw ON t.xx_parent = tw.wonum
|
|
||||||
GROUP BY t.xx_parent
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
w.assetnum,
|
|
||||||
e.name AS equipment_name,
|
|
||||||
e.location_tag,
|
|
||||||
JSON_OBJECT_AGG(w.wonum, COALESCE(wt.task_list, '[]'::json)) AS wonum_list,
|
|
||||||
COUNT(w.wonum) AS total_wo_count,
|
|
||||||
COALESCE(SUM(w.actmatcost), 0) AS total_material_cost,
|
|
||||||
COALESCE(SUM(w.actservcost), 0) AS total_service_cost,
|
|
||||||
COALESCE(SUM(w.actmatcost + w.actservcost), 0) AS total_actual_cost
|
|
||||||
FROM target_wos w
|
|
||||||
INNER JOIN public.ms_equipment_master e
|
|
||||||
ON w.assetnum = e.assetnum
|
|
||||||
LEFT JOIN wo_tasks wt
|
|
||||||
ON w.wonum = wt.parent_wonum
|
|
||||||
GROUP BY
|
|
||||||
w.assetnum,
|
|
||||||
e.name,
|
|
||||||
e.location_tag
|
|
||||||
ORDER BY total_actual_cost DESC;
|
|
||||||
""")
|
|
||||||
|
|
||||||
results = await collector_db_session.execute(sql_query, {"parent_wo_num": parent_wo_num})
|
|
||||||
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
"assetnum": row.assetnum,
|
|
||||||
"equipment_name": row.equipment_name,
|
|
||||||
"location_tag": row.location_tag,
|
|
||||||
"wonum_list": row.wonum_list,
|
|
||||||
"total_wo_count": row.total_wo_count,
|
|
||||||
"total_material_cost": float(row.total_material_cost),
|
|
||||||
"total_service_cost": float(row.total_service_cost),
|
|
||||||
"total_actual_cost": float(row.total_actual_cost)
|
|
||||||
}
|
|
||||||
for row in results
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,426 +0,0 @@
|
|||||||
import json
|
|
||||||
import re
|
|
||||||
import logging
|
|
||||||
from collections import Counter
|
|
||||||
from fastapi import Request, HTTPException
|
|
||||||
from starlette.middleware.base import BaseHTTPMiddleware
|
|
||||||
|
|
||||||
# =========================
|
|
||||||
# Configuration
|
|
||||||
# =========================
|
|
||||||
|
|
||||||
ALLOWED_MULTI_PARAMS = {
|
|
||||||
"sortBy[]",
|
|
||||||
"descending[]",
|
|
||||||
"exclude[]",
|
|
||||||
"assetnums",
|
|
||||||
"plant_ids",
|
|
||||||
"job_ids",
|
|
||||||
}
|
|
||||||
|
|
||||||
ALLOWED_DATA_PARAMS = {
|
|
||||||
"actual_shutdown", "all_params", "analysis_metadata", "asset_contributions",
|
|
||||||
"assetnum", "assetnums", "assigned_date", "availability", "availableScopes",
|
|
||||||
"avg_cost", "birbaum", "calculation_type", "capacity_weight", "code",
|
|
||||||
"contribution", "corrective_cost", "corrective_costs", "cost", "costPerFailure",
|
|
||||||
"cost_savings_vs_planned", "cost_threshold", "cost_trend", "created_at",
|
|
||||||
"crew_number", "criticalParts", "critical_procurement_items", "current_eaf",
|
|
||||||
"current_plant_eaf", "current_stock", "current_user", "cut_hours",
|
|
||||||
"daily_failures", "data", "datetime", "day", "days", "descending",
|
|
||||||
"description", "down_time", "duration", "duration_oh", "eaf_gap",
|
|
||||||
"eaf_improvement_text", "eaf_input", "efficiency", "end_date",
|
|
||||||
"equipment_name", "equipment_results", "equipment_with_sparepart_constraints",
|
|
||||||
"exclude", "excluded_equipment", "expected_delivery_date", "filter_spec",
|
|
||||||
"finish", "fleet_statistics", "id", "improvement_impact", "included_equipment",
|
|
||||||
"included_in_optimization", "intervalDays", "is_included", "itemnum",
|
|
||||||
"items", "itemsPerPage", "items_per_page", "job", "job_ids",
|
|
||||||
"last_overhaul_date", "lead_time", "location", "location_tag", "location_tags",
|
|
||||||
"maintenance_type", "master_equipment", "material_cost", "max_interval",
|
|
||||||
"max_interval_months", "message", "month", "months_from_planned", "name",
|
|
||||||
"next_planned_overhaul", "node", "num_failures", "num_of_failures",
|
|
||||||
"ohSessionId", "oh_scope", "oh_session_id", "oh_type", "oh_types",
|
|
||||||
"optimal_analysis", "optimal_breakdown", "optimal_month", "optimal_total_cost",
|
|
||||||
"optimization_success", "optimum_analysis", "optimum_day", "optimum_oh",
|
|
||||||
"optimum_oh_day", "optimum_oh_month", "order_date", "overhaulCost",
|
|
||||||
"overhaul_activity", "overhaul_cost", "overhaul_costs",
|
|
||||||
"overhaul_reference_type", "overhaul_scope", "overhaul_scope_id", "overview",
|
|
||||||
"page", "parent", "parent_id", "plan_duration", "planned_month",
|
|
||||||
"planned_outage", "plant_level_benefit", "po_pr_id", "po_vendor_delivery_date",
|
|
||||||
"possible_plant_eaf", "priority_score", "procurement_cost", "procurement_costs",
|
|
||||||
"procurement_details", "projected_eaf_improvement", "quantity",
|
|
||||||
"quantity_required", "query_str", "recommendedScope", "recommended_reduced_outage",
|
|
||||||
"reference", "reference_id", "remark", "removal_date", "required_improvement",
|
|
||||||
"results", "schedules", "scope", "scope_calculation_id", "scope_equipment_job",
|
|
||||||
"scope_name", "scope_overhaul", "service_cost", "session", "simulation",
|
|
||||||
"simulation_id", "sort_by", "sortBy[]", "descending[]", "exclude[]",
|
|
||||||
"sparepart_id", "sparepart_impact", "sparepart_name", "sparepart_summary",
|
|
||||||
"spreadsheet_link", "start", "start_date", "status", "subsystem", "system",
|
|
||||||
"systemComponents", "target_plant_eaf", "tasks", "timing_recommendation",
|
|
||||||
"total", "totalPages", "total_cost", "total_equipment", "total_equipment_analyzed",
|
|
||||||
"total_procurement_items", "type", "unit_cost", "warning_message", "with_results",
|
|
||||||
"workscope", "workscope_group", "year", "_", "t", "timestamp",
|
|
||||||
"q", "filter", "currentUser", "risk_cost", "all", "with_results",
|
|
||||||
"eaf_threshold", "simulation_id", "scope_calculation_id", "calculation_id"
|
|
||||||
}
|
|
||||||
|
|
||||||
ALLOWED_HEADERS = {
|
|
||||||
"host",
|
|
||||||
"user-agent",
|
|
||||||
"accept",
|
|
||||||
"accept-language",
|
|
||||||
"accept-encoding",
|
|
||||||
"connection",
|
|
||||||
"upgrade-insecure-requests",
|
|
||||||
"if-modified-since",
|
|
||||||
"if-none-match",
|
|
||||||
"cache-control",
|
|
||||||
"authorization",
|
|
||||||
"content-type",
|
|
||||||
"content-length",
|
|
||||||
"origin",
|
|
||||||
"referer",
|
|
||||||
"sec-fetch-dest",
|
|
||||||
"sec-fetch-mode",
|
|
||||||
"sec-fetch-site",
|
|
||||||
"sec-fetch-user",
|
|
||||||
"sec-ch-ua",
|
|
||||||
"sec-ch-ua-mobile",
|
|
||||||
"sec-ch-ua-platform",
|
|
||||||
"pragma",
|
|
||||||
"dnt",
|
|
||||||
"priority",
|
|
||||||
"x-forwarded-for",
|
|
||||||
"x-forwarded-proto",
|
|
||||||
"x-forwarded-host",
|
|
||||||
"x-forwarded-port",
|
|
||||||
"x-real-ip",
|
|
||||||
"x-request-id",
|
|
||||||
"x-correlation-id",
|
|
||||||
"x-requested-with",
|
|
||||||
"x-csrf-token",
|
|
||||||
"x-xsrf-token",
|
|
||||||
"postman-token",
|
|
||||||
"x-forwarded-path",
|
|
||||||
"x-forwarded-prefix",
|
|
||||||
"cookie",
|
|
||||||
"x-kong-request-id"
|
|
||||||
}
|
|
||||||
|
|
||||||
MAX_QUERY_PARAMS = 50
|
|
||||||
MAX_QUERY_LENGTH = 2000
|
|
||||||
MAX_JSON_BODY_SIZE = 1024 * 500 # 500 KB
|
|
||||||
|
|
||||||
XSS_PATTERN = re.compile(
|
|
||||||
r"("
|
|
||||||
r"<(script|iframe|embed|object|svg|img|video|audio|base|link|meta|form|button|details|animate)\b|"
|
|
||||||
r"javascript\s*:|vbscript\s*:|data\s*:[^,]*base64[^,]*|data\s*:text/html|"
|
|
||||||
r"\bon[a-z]+\s*=|" # Catch-all for any 'on' event (onerror, onclick, etc.)
|
|
||||||
r"style\s*=.*expression\s*\(|" # Old IE specific
|
|
||||||
r"\b(eval|setTimeout|setInterval|Function)\s*\("
|
|
||||||
r")",
|
|
||||||
re.IGNORECASE,
|
|
||||||
)
|
|
||||||
|
|
||||||
SQLI_PATTERN = re.compile(
|
|
||||||
r"("
|
|
||||||
# 1. Keywords followed by whitespace and common SQL characters
|
|
||||||
r"\b(UNION|SELECT|INSERT|UPDATE|DELETE|DROP|ALTER|CREATE|TRUNCATE|EXEC(UTE)?|DECLARE)\b\s+[\w\*\(\']|"
|
|
||||||
|
|
||||||
# 2. Time-based attacks (more specific than just 'SLEEP')
|
|
||||||
r"\b(WAITFOR\b\s+DELAY|PG_SLEEP|SLEEP\s*\()|"
|
|
||||||
|
|
||||||
# 3. System tables/functions
|
|
||||||
r"\b(INFORMATION_SCHEMA|SYS\.|SYSOBJECTS|XP_CMDSHELL|LOAD_FILE|INTO\s+OUTFILE)\b|"
|
|
||||||
|
|
||||||
# 4. Logical Tautologies (OR 1=1) - Optimized for boundaries
|
|
||||||
r"\b(OR|AND)\b\s+['\"]?\d+['\"]?\s*=\s*['\"]?\d+|"
|
|
||||||
|
|
||||||
# 5. Comments
|
|
||||||
# Match '--' if at start or preceded by whitespace
|
|
||||||
r"(?<!\S)--|"
|
|
||||||
# Match block comments, ensuring they aren't part of mime patterns like */*
|
|
||||||
r"(?<!\*)/\*|(?<!\*)\*/(?!\*)|"
|
|
||||||
# Match '#' if at start or preceded by whitespace
|
|
||||||
r"(?<!\S)#|"
|
|
||||||
|
|
||||||
# 6. Hex / Stacked Queries
|
|
||||||
r";\s*\b(SELECT|DROP|DELETE|UPDATE|INSERT)\b"
|
|
||||||
r")",
|
|
||||||
re.IGNORECASE
|
|
||||||
)
|
|
||||||
|
|
||||||
RCE_PATTERN = re.compile(
|
|
||||||
r"("
|
|
||||||
r"\$\(.*\)|`.*`|" # Command substitution $(...) or `...`
|
|
||||||
r"[;&|]\s*(cat|ls|id|whoami|pwd|ifconfig|ip|netstat|nc|netcat|nmap|curl|wget|python|php|perl|ruby|bash|sh|cmd|powershell|pwsh|sc\s+|tasklist|taskkill|base64|sudo|crontab|ssh|ftp|tftp)|"
|
|
||||||
# Only flag naked commands if they are clearly standalone or system paths
|
|
||||||
r"\b(/etc/passwd|/etc/shadow|/etc/group|/etc/issue|/proc/self/|/windows/system32/|C:\\Windows\\)\b"
|
|
||||||
r")",
|
|
||||||
re.IGNORECASE,
|
|
||||||
)
|
|
||||||
|
|
||||||
TRAVERSAL_PATTERN = re.compile(
|
|
||||||
r"(\.\.[/\\]|%2e%2e%2f|%2e%2e/|\.\.%2f|%2e%2e%5c|%252e%252e%252f|\\00)",
|
|
||||||
re.IGNORECASE,
|
|
||||||
)
|
|
||||||
|
|
||||||
FORBIDDEN_JSON_KEYS = {"__proto__", "constructor", "prototype"}
|
|
||||||
|
|
||||||
DYNAMIC_KEYS = {
|
|
||||||
"data",
|
|
||||||
"results",
|
|
||||||
"analysis_metadata",
|
|
||||||
"asset_contributions",
|
|
||||||
"equipment_results",
|
|
||||||
"optimal_analysis",
|
|
||||||
"optimum_analysis",
|
|
||||||
"schedules",
|
|
||||||
"tasks",
|
|
||||||
"all_params",
|
|
||||||
"parameters",
|
|
||||||
"program_data"
|
|
||||||
}
|
|
||||||
|
|
||||||
log = logging.getLogger("security_logger")
|
|
||||||
|
|
||||||
|
|
||||||
def has_control_chars(value: str) -> bool:
|
|
||||||
return any(ord(c) < 32 and c not in ("\n", "\r", "\t") for c in value)
|
|
||||||
|
|
||||||
|
|
||||||
def inspect_value(value: str, source: str):
|
|
||||||
if not isinstance(value, str) or value == "*/*":
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
if XSS_PATTERN.search(value):
|
|
||||||
log.warning(f"Security violation: Potential XSS payload detected in {source}, value: {value}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
|
|
||||||
if SQLI_PATTERN.search(value):
|
|
||||||
log.warning(f"Security violation: Potential SQL injection payload detected in {source}, value: {value}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
|
|
||||||
if RCE_PATTERN.search(value):
|
|
||||||
log.warning(f"Security violation: Potential RCE payload detected in {source}, value: {value}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
|
|
||||||
if TRAVERSAL_PATTERN.search(value):
|
|
||||||
log.warning(f"Security violation: Potential Path Traversal payload detected in {source}, value: {value}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
|
|
||||||
if has_control_chars(value):
|
|
||||||
log.warning(f"Security violation: Invalid control characters detected in {source}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def inspect_json(obj, path="body", check_whitelist=True):
|
|
||||||
if isinstance(obj, dict):
|
|
||||||
for key, value in obj.items():
|
|
||||||
if key in FORBIDDEN_JSON_KEYS:
|
|
||||||
log.warning(f"Security violation: Forbidden JSON key detected: {path}.{key}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
|
|
||||||
if check_whitelist and key not in ALLOWED_DATA_PARAMS:
|
|
||||||
log.warning(f"Security violation: Unknown JSON key detected: {path}.{key}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Recurse. If the key is a dynamic container, we stop whitelist checking for children.
|
|
||||||
should_check_subkeys = check_whitelist and (key not in DYNAMIC_KEYS)
|
|
||||||
inspect_json(value, f"{path}.{key}", check_whitelist=should_check_subkeys)
|
|
||||||
elif isinstance(obj, list):
|
|
||||||
for i, item in enumerate(obj):
|
|
||||||
inspect_json(item, f"{path}[{i}]", check_whitelist=check_whitelist)
|
|
||||||
elif isinstance(obj, str):
|
|
||||||
inspect_value(obj, path)
|
|
||||||
|
|
||||||
|
|
||||||
# =========================
|
|
||||||
# Middleware
|
|
||||||
# =========================
|
|
||||||
|
|
||||||
class RequestValidationMiddleware(BaseHTTPMiddleware):
|
|
||||||
async def dispatch(self, request: Request, call_next):
|
|
||||||
# -------------------------
|
|
||||||
# 0. Header validation
|
|
||||||
# -------------------------
|
|
||||||
header_keys = [key.lower() for key, _ in request.headers.items()]
|
|
||||||
|
|
||||||
# Check for duplicate headers
|
|
||||||
header_counter = Counter(header_keys)
|
|
||||||
duplicate_headers = [key for key, count in header_counter.items() if count > 1]
|
|
||||||
|
|
||||||
ALLOW_DUPLICATE_HEADERS = {'accept', 'accept-encoding', 'accept-language', 'accept-charset', 'cookie'}
|
|
||||||
real_duplicates = [h for h in duplicate_headers if h not in ALLOW_DUPLICATE_HEADERS]
|
|
||||||
if real_duplicates:
|
|
||||||
log.warning(f"Security violation: Duplicate headers detected: {real_duplicates}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Whitelist headers
|
|
||||||
unknown_headers = [key for key in header_keys if key not in ALLOWED_HEADERS]
|
|
||||||
if unknown_headers:
|
|
||||||
filtered_unknown = [h for h in unknown_headers if not h.startswith('sec-')]
|
|
||||||
if filtered_unknown:
|
|
||||||
log.warning(f"Security violation: Unknown headers detected: {filtered_unknown}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Inspect header values
|
|
||||||
for key, value in request.headers.items():
|
|
||||||
if value:
|
|
||||||
inspect_value(value, f"header '{key}'")
|
|
||||||
|
|
||||||
# -------------------------
|
|
||||||
# 1. Query string limits
|
|
||||||
# -------------------------
|
|
||||||
if len(request.url.query) > MAX_QUERY_LENGTH:
|
|
||||||
log.warning(f"Security violation: Query string too long")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
|
|
||||||
params = request.query_params.multi_items()
|
|
||||||
|
|
||||||
if len(params) > MAX_QUERY_PARAMS:
|
|
||||||
log.warning(f"Security violation: Too many query parameters")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check for unknown query parameters
|
|
||||||
unknown_params = [key for key, _ in params if key not in ALLOWED_DATA_PARAMS]
|
|
||||||
if unknown_params:
|
|
||||||
log.warning(f"Security violation: Unknown query parameters detected: {unknown_params}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
|
|
||||||
# -------------------------
|
|
||||||
# 2. Duplicate parameters
|
|
||||||
# -------------------------
|
|
||||||
counter = Counter(key for key, _ in params)
|
|
||||||
duplicates = [
|
|
||||||
key for key, count in counter.items()
|
|
||||||
if count > 1 and key not in ALLOWED_MULTI_PARAMS
|
|
||||||
]
|
|
||||||
|
|
||||||
if duplicates:
|
|
||||||
log.warning(f"Security violation: Duplicate query parameters detected: {duplicates}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
|
|
||||||
# -------------------------
|
|
||||||
# 3. Query param inspection & Pagination
|
|
||||||
# -------------------------
|
|
||||||
pagination_size_keys = {"size", "itemsPerPage", "per_page", "limit", "items_per_page"}
|
|
||||||
for key, value in params:
|
|
||||||
if value:
|
|
||||||
inspect_value(value, f"query param '{key}'")
|
|
||||||
|
|
||||||
if key in pagination_size_keys and value:
|
|
||||||
try:
|
|
||||||
size_val = int(value)
|
|
||||||
if size_val > 50:
|
|
||||||
log.warning(f"Security violation: Pagination size too large ({size_val})")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
if size_val % 5 != 0:
|
|
||||||
log.warning(f"Security violation: Pagination size not multiple of 5 ({size_val})")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
except ValueError:
|
|
||||||
log.warning(f"Security violation: Pagination size invalid value")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
|
|
||||||
# -------------------------
|
|
||||||
# 4. Content-Type sanity
|
|
||||||
# -------------------------
|
|
||||||
content_type = request.headers.get("content-type", "")
|
|
||||||
if content_type and not any(
|
|
||||||
content_type.startswith(t)
|
|
||||||
for t in ("application/json", "multipart/form-data", "application/x-www-form-urlencoded")
|
|
||||||
):
|
|
||||||
log.warning(f"Security violation: Unsupported Content-Type: {content_type}")
|
|
||||||
raise HTTPException(status_code=422, detail="Invalid request parameters")
|
|
||||||
|
|
||||||
# -------------------------
|
|
||||||
# 5. Single source check (Query vs JSON Body)
|
|
||||||
# -------------------------
|
|
||||||
has_query = len(params) > 0
|
|
||||||
has_body = False
|
|
||||||
|
|
||||||
if content_type.startswith("application/json"):
|
|
||||||
# We can't easily check body existence without consuming it,
|
|
||||||
# so we check if Content-Length > 0
|
|
||||||
content_length = request.headers.get("content-length")
|
|
||||||
if content_length and int(content_length) > 0:
|
|
||||||
has_body = True
|
|
||||||
|
|
||||||
if has_query and has_body:
|
|
||||||
log.warning(f"Security violation: Mixed parameters (query + JSON body)")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=422,
|
|
||||||
detail="Invalid request parameters",
|
|
||||||
)
|
|
||||||
|
|
||||||
# -------------------------
|
|
||||||
# 6. JSON body inspection
|
|
||||||
# -------------------------
|
|
||||||
if content_type.startswith("application/json"):
|
|
||||||
body = await request.body()
|
|
||||||
# if len(body) > MAX_JSON_BODY_SIZE:
|
|
||||||
# raise HTTPException(status_code=422, detail="JSON body too large")
|
|
||||||
|
|
||||||
if body:
|
|
||||||
try:
|
|
||||||
payload = json.loads(body)
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
log.warning(f"Security violation: Invalid JSON body")
|
|
||||||
raise HTTPException(status_code=422, detail="Invalid request parameters")
|
|
||||||
|
|
||||||
inspect_json(payload)
|
|
||||||
|
|
||||||
# Re-inject body for downstream handlers
|
|
||||||
async def receive():
|
|
||||||
return {"type": "http.request", "body": body}
|
|
||||||
request._receive = receive
|
|
||||||
|
|
||||||
return await call_next(request)
|
|
||||||
@ -1,17 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
from sqlalchemy import Column, String
|
|
||||||
from src.database.core import Base
|
|
||||||
from src.models import DefaultMixin
|
|
||||||
|
|
||||||
|
|
||||||
class OverhaulGantt(Base, DefaultMixin):
|
|
||||||
__tablename__ = "oh_ms_monitoring_spreadsheet"
|
|
||||||
|
|
||||||
spreadsheet_id = Column(String, nullable=True)
|
|
||||||
spreadsheet_link = Column(String, nullable=True)
|
|
||||||
|
|
||||||
@ -1,68 +1,68 @@
|
|||||||
# import asyncio
|
import asyncio
|
||||||
# from typing import AsyncGenerator, Generator
|
from typing import AsyncGenerator, Generator
|
||||||
|
|
||||||
# import pytest
|
import pytest
|
||||||
# from httpx import AsyncClient
|
from httpx import AsyncClient
|
||||||
# from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||||
# from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
# from sqlalchemy.pool import StaticPool
|
from sqlalchemy.pool import StaticPool
|
||||||
# from sqlalchemy_utils import database_exists, drop_database
|
from sqlalchemy_utils import database_exists, drop_database
|
||||||
# from starlette.config import environ
|
from starlette.config import environ
|
||||||
# from starlette.testclient import TestClient
|
from starlette.testclient import TestClient
|
||||||
|
|
||||||
# # from src.database import Base, get_db
|
# from src.database import Base, get_db
|
||||||
# # from src.main import app
|
# from src.main import app
|
||||||
|
|
||||||
# # Test database URL
|
# Test database URL
|
||||||
# TEST_DATABASE_URL = "sqlite+aiosqlite:///:memory:"
|
TEST_DATABASE_URL = "sqlite+aiosqlite:///:memory:"
|
||||||
|
|
||||||
# engine = create_async_engine(
|
engine = create_async_engine(
|
||||||
# TEST_DATABASE_URL,
|
TEST_DATABASE_URL,
|
||||||
# connect_args={"check_same_thread": False},
|
connect_args={"check_same_thread": False},
|
||||||
# poolclass=StaticPool,
|
poolclass=StaticPool,
|
||||||
# )
|
)
|
||||||
|
|
||||||
# async_session = sessionmaker(
|
async_session = sessionmaker(
|
||||||
# engine,
|
engine,
|
||||||
# class_=AsyncSession,
|
class_=AsyncSession,
|
||||||
# expire_on_commit=False,
|
expire_on_commit=False,
|
||||||
# autocommit=False,
|
autocommit=False,
|
||||||
# autoflush=False,
|
autoflush=False,
|
||||||
# )
|
)
|
||||||
|
|
||||||
|
|
||||||
# async def override_get_db() -> AsyncGenerator[AsyncSession, None]:
|
async def override_get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||||
# async with async_session() as session:
|
async with async_session() as session:
|
||||||
# try:
|
try:
|
||||||
# yield session
|
yield session
|
||||||
# await session.commit()
|
await session.commit()
|
||||||
# except Exception:
|
except Exception:
|
||||||
# await session.rollback()
|
await session.rollback()
|
||||||
# raise
|
raise
|
||||||
# finally:
|
finally:
|
||||||
# await session.close()
|
await session.close()
|
||||||
|
|
||||||
|
|
||||||
# app.dependency_overrides[get_db] = override_get_db
|
app.dependency_overrides[get_db] = override_get_db
|
||||||
|
|
||||||
|
|
||||||
# @pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
# def event_loop() -> Generator:
|
def event_loop() -> Generator:
|
||||||
# loop = asyncio.get_event_loop_policy().new_event_loop()
|
loop = asyncio.get_event_loop_policy().new_event_loop()
|
||||||
# yield loop
|
yield loop
|
||||||
# loop.close()
|
loop.close()
|
||||||
|
|
||||||
|
|
||||||
# @pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
# async def setup_db() -> AsyncGenerator[None, None]:
|
async def setup_db() -> AsyncGenerator[None, None]:
|
||||||
# async with engine.begin() as conn:
|
async with engine.begin() as conn:
|
||||||
# await conn.run_sync(Base.metadata.create_all)
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
# yield
|
yield
|
||||||
# async with engine.begin() as conn:
|
async with engine.begin() as conn:
|
||||||
# await conn.run_sync(Base.metadata.drop_all)
|
await conn.run_sync(Base.metadata.drop_all)
|
||||||
|
|
||||||
|
|
||||||
# @pytest.fixture
|
@pytest.fixture
|
||||||
# async def client() -> AsyncGenerator[AsyncClient, None]:
|
async def client() -> AsyncGenerator[AsyncClient, None]:
|
||||||
# async with AsyncClient(app=app, base_url="http://test") as client:
|
async with AsyncClient(app=app, base_url="http://test") as client:
|
||||||
# yield client
|
yield client
|
||||||
|
|||||||
@ -0,0 +1,3 @@
|
|||||||
|
from sqlalchemy.orm import scoped_session, sessionmaker
|
||||||
|
|
||||||
|
Session = scoped_session(sessionmaker())
|
||||||
@ -0,0 +1,28 @@
|
|||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from factory import (LazyAttribute, LazyFunction, SelfAttribute, Sequence,
|
||||||
|
SubFactory, post_generation)
|
||||||
|
from factory.alchemy import SQLAlchemyModelFactory
|
||||||
|
from factory.fuzzy import FuzzyChoice, FuzzyDateTime, FuzzyInteger, FuzzyText
|
||||||
|
from faker import Faker
|
||||||
|
from faker.providers import misc
|
||||||
|
|
||||||
|
from .database import Session
|
||||||
|
|
||||||
|
# from pytz import UTC
|
||||||
|
|
||||||
|
|
||||||
|
fake = Faker()
|
||||||
|
fake.add_provider(misc)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseFactory(SQLAlchemyModelFactory):
|
||||||
|
"""Base Factory."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
"""Factory configuration."""
|
||||||
|
|
||||||
|
abstract = True
|
||||||
|
sqlalchemy_session = Session
|
||||||
|
sqlalchemy_session_persistence = "commit"
|
||||||
@ -1,44 +0,0 @@
|
|||||||
import pytest
|
|
||||||
from src.calculation_budget_constrains.service import greedy_selection, knapsack_selection
|
|
||||||
|
|
||||||
def test_greedy_selection():
|
|
||||||
equipments = [
|
|
||||||
{"id": 1, "total_cost": 100, "priority_score": 10, "cost": 100},
|
|
||||||
{"id": 2, "total_cost": 50, "priority_score": 20, "cost": 50},
|
|
||||||
{"id": 3, "total_cost": 60, "priority_score": 15, "cost": 60},
|
|
||||||
]
|
|
||||||
budget = 120
|
|
||||||
# Items sorted by priority_score: id 2 (20), id 3 (15), id 1 (10)
|
|
||||||
# 2 (50) + 3 (60) = 110. Item 1 (100) won't fit.
|
|
||||||
selected, excluded = greedy_selection(equipments, budget)
|
|
||||||
|
|
||||||
selected_ids = [e["id"] for e in selected]
|
|
||||||
assert 2 in selected_ids
|
|
||||||
assert 3 in selected_ids
|
|
||||||
assert len(selected) == 2
|
|
||||||
assert excluded[0]["id"] == 1
|
|
||||||
|
|
||||||
def test_knapsack_selection_basic():
|
|
||||||
# Similar items but where greedy might fail if cost/value ratio is tricky
|
|
||||||
# item 1: value 10, cost 60
|
|
||||||
# item 2: value 7, cost 35
|
|
||||||
# item 3: value 7, cost 35
|
|
||||||
# budget: 70
|
|
||||||
# Greedy would take item 1 (value 10, remaining budget 10, can't take more)
|
|
||||||
# Optimal would take item 2 and 3 (value 14, remaining budget 0)
|
|
||||||
|
|
||||||
scale = 1 # No scaling for simplicity in this test
|
|
||||||
equipments = [
|
|
||||||
{"id": 1, "total_cost": 60, "priority_score": 10},
|
|
||||||
{"id": 2, "total_cost": 35, "priority_score": 7},
|
|
||||||
{"id": 3, "total_cost": 35, "priority_score": 7},
|
|
||||||
]
|
|
||||||
budget = 70
|
|
||||||
|
|
||||||
selected, excluded = knapsack_selection(equipments, budget, scale=1)
|
|
||||||
|
|
||||||
selected_ids = [e["id"] for e in selected]
|
|
||||||
assert 2 in selected_ids
|
|
||||||
assert 3 in selected_ids
|
|
||||||
assert len(selected) == 2
|
|
||||||
assert 1 not in selected_ids
|
|
||||||
@ -1,14 +0,0 @@
|
|||||||
from src.context import set_request_id, get_request_id, set_user_id, get_user_id
|
|
||||||
|
|
||||||
def test_request_id_context():
|
|
||||||
test_id = "test-request-id-123"
|
|
||||||
set_request_id(test_id)
|
|
||||||
assert get_request_id() == test_id
|
|
||||||
|
|
||||||
def test_user_id_context():
|
|
||||||
test_uid = "user-456"
|
|
||||||
set_user_id(test_uid)
|
|
||||||
assert get_user_id() == test_uid
|
|
||||||
|
|
||||||
def test_context_default_none():
|
|
||||||
assert get_request_id() is None or get_request_id() != ""
|
|
||||||
@ -1,53 +0,0 @@
|
|||||||
import pytest
|
|
||||||
from decimal import Decimal
|
|
||||||
from src.contribution_util import prod, system_availability, get_all_components, birnbaum_importance
|
|
||||||
|
|
||||||
def test_prod():
|
|
||||||
assert prod([1, 2, 3]) == 6.0
|
|
||||||
assert prod([0.5, 0.5]) == 0.25
|
|
||||||
assert prod([]) == 1.0
|
|
||||||
|
|
||||||
def test_system_availability_series():
|
|
||||||
structure = {"series": ["A", "B"]}
|
|
||||||
availabilities = {"A": 0.9, "B": 0.8}
|
|
||||||
# 0.9 * 0.8 = 0.72
|
|
||||||
assert system_availability(structure, availabilities) == pytest.approx(0.72)
|
|
||||||
|
|
||||||
def test_system_availability_parallel():
|
|
||||||
structure = {"parallel": ["A", "B"]}
|
|
||||||
availabilities = {"A": 0.9, "B": 0.8}
|
|
||||||
# 1 - (1-0.9)*(1-0.8) = 1 - 0.1*0.2 = 1 - 0.02 = 0.98
|
|
||||||
assert system_availability(structure, availabilities) == pytest.approx(0.98)
|
|
||||||
|
|
||||||
def test_system_availability_nested():
|
|
||||||
# (A in series with (B in parallel with C))
|
|
||||||
structure = {
|
|
||||||
"series": [
|
|
||||||
"A",
|
|
||||||
{"parallel": ["B", "C"]}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
availabilities = {"A": 0.9, "B": 0.8, "C": 0.7}
|
|
||||||
# B||C = 1 - (1-0.8)*(1-0.7) = 1 - 0.2*0.3 = 0.94
|
|
||||||
# A && (B||C) = 0.9 * 0.94 = 0.846
|
|
||||||
assert system_availability(structure, availabilities) == pytest.approx(0.846)
|
|
||||||
|
|
||||||
def test_get_all_components():
|
|
||||||
structure = {
|
|
||||||
"series": [
|
|
||||||
"A",
|
|
||||||
{"parallel": ["B", "C"]}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
assert get_all_components(structure) == {"A", "B", "C"}
|
|
||||||
|
|
||||||
def test_birnbaum_importance():
|
|
||||||
structure = {"series": ["A", "B"]}
|
|
||||||
availabilities = {"A": 0.9, "B": 0.8}
|
|
||||||
# I_B(A) = A_sys(A=1) - A_sys(A=0)
|
|
||||||
# A_sys(A=1, B=0.8) = 1 * 0.8 = 0.8
|
|
||||||
# A_sys(A=0, B=0.8) = 0 * 0.8 = 0
|
|
||||||
# I_B(A) = 0.8
|
|
||||||
assert birnbaum_importance(structure, availabilities, "A") == pytest.approx(0.8)
|
|
||||||
# I_B(B) = A_sys(B=1, A=0.9) - A_sys(B=0, A=0.9) = 0.9 - 0 = 0.9
|
|
||||||
assert birnbaum_importance(structure, availabilities, "B") == pytest.approx(0.9)
|
|
||||||
@ -1,31 +0,0 @@
|
|||||||
import pytest
|
|
||||||
from sqlalchemy.exc import IntegrityError, DataError, DBAPIError
|
|
||||||
from src.exceptions import handle_sqlalchemy_error
|
|
||||||
|
|
||||||
def test_handle_sqlalchemy_error_unique_constraint():
|
|
||||||
err = IntegrityError("Unique constraint", params=None, orig=Exception("unique constraint violation"))
|
|
||||||
msg, status = handle_sqlalchemy_error(err)
|
|
||||||
assert status == 409
|
|
||||||
assert "already exists" in msg
|
|
||||||
|
|
||||||
def test_handle_sqlalchemy_error_foreign_key():
|
|
||||||
err = IntegrityError("Foreign key constraint", params=None, orig=Exception("foreign key constraint violation"))
|
|
||||||
msg, status = handle_sqlalchemy_error(err)
|
|
||||||
assert status == 400
|
|
||||||
assert "Related record not found" in msg
|
|
||||||
|
|
||||||
def test_handle_sqlalchemy_error_data_error():
|
|
||||||
err = DataError("Invalid data", params=None, orig=None)
|
|
||||||
msg, status = handle_sqlalchemy_error(err)
|
|
||||||
assert status == 400
|
|
||||||
assert "Invalid data" in msg
|
|
||||||
|
|
||||||
def test_handle_sqlalchemy_error_generic_dbapi():
|
|
||||||
class MockError:
|
|
||||||
def __str__(self):
|
|
||||||
return "Some generic database error"
|
|
||||||
|
|
||||||
err = DBAPIError("Generic error", params=None, orig=MockError())
|
|
||||||
msg, status = handle_sqlalchemy_error(err)
|
|
||||||
assert status == 500
|
|
||||||
assert "Database error" in msg
|
|
||||||
@ -1,56 +0,0 @@
|
|||||||
import pytest
|
|
||||||
from unittest.mock import AsyncMock, MagicMock
|
|
||||||
from fastapi import HTTPException
|
|
||||||
from src.middleware import RequestValidationMiddleware
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_request_validation_middleware_query_length():
|
|
||||||
middleware = RequestValidationMiddleware(app=MagicMock())
|
|
||||||
request = MagicMock()
|
|
||||||
request.url.query = "a" * 2001
|
|
||||||
|
|
||||||
with pytest.raises(HTTPException) as excinfo:
|
|
||||||
await middleware.dispatch(request, AsyncMock())
|
|
||||||
assert excinfo.value.status_code == 414
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_request_validation_middleware_too_many_params():
|
|
||||||
middleware = RequestValidationMiddleware(app=MagicMock())
|
|
||||||
request = MagicMock()
|
|
||||||
request.url.query = "a=1"
|
|
||||||
request.query_params.multi_items.return_value = [("param", "val")] * 51
|
|
||||||
|
|
||||||
with pytest.raises(HTTPException) as excinfo:
|
|
||||||
await middleware.dispatch(request, AsyncMock())
|
|
||||||
assert excinfo.value.status_code == 400
|
|
||||||
assert "Too many query parameters" in excinfo.value.detail
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_request_validation_middleware_xss_detection():
|
|
||||||
middleware = RequestValidationMiddleware(app=MagicMock())
|
|
||||||
request = MagicMock()
|
|
||||||
request.url.query = "q=<script>"
|
|
||||||
request.query_params.multi_items.return_value = [("q", "<script>")]
|
|
||||||
|
|
||||||
with pytest.raises(HTTPException) as excinfo:
|
|
||||||
await middleware.dispatch(request, AsyncMock())
|
|
||||||
assert excinfo.value.status_code == 400
|
|
||||||
assert "Potential XSS payload" in excinfo.value.detail
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_request_validation_middleware_pagination_logic():
|
|
||||||
middleware = RequestValidationMiddleware(app=MagicMock())
|
|
||||||
request = MagicMock()
|
|
||||||
request.url.query = "size=55"
|
|
||||||
request.query_params.multi_items.return_value = [("size", "55")]
|
|
||||||
request.headers = {}
|
|
||||||
|
|
||||||
with pytest.raises(HTTPException) as excinfo:
|
|
||||||
await middleware.dispatch(request, AsyncMock())
|
|
||||||
assert excinfo.value.status_code == 400
|
|
||||||
assert "cannot exceed 50" in excinfo.value.detail
|
|
||||||
|
|
||||||
request.query_params.multi_items.return_value = [("size", "7")]
|
|
||||||
with pytest.raises(HTTPException) as excinfo:
|
|
||||||
await middleware.dispatch(request, AsyncMock())
|
|
||||||
assert "must be a multiple of 5" in excinfo.value.detail
|
|
||||||
@ -1,64 +0,0 @@
|
|||||||
import pytest
|
|
||||||
import math
|
|
||||||
from src.calculation_target_reliability.service import calculate_asset_eaf_contributions
|
|
||||||
|
|
||||||
def test_calculate_asset_eaf_contributions_basic():
|
|
||||||
# Mock plant result
|
|
||||||
plant_result = {
|
|
||||||
"total_uptime": 7000,
|
|
||||||
"total_downtime": 1000,
|
|
||||||
"eaf": 85.0
|
|
||||||
}
|
|
||||||
# total_hours = 8000
|
|
||||||
|
|
||||||
# Mock equipment results
|
|
||||||
eq_results = [
|
|
||||||
{
|
|
||||||
"aeros_node": {"node_name": "Asset1"},
|
|
||||||
"num_events": 5,
|
|
||||||
"contribution_factor": 0.5,
|
|
||||||
"contribution": 0.1, # Birnbaum
|
|
||||||
"availability": 0.9,
|
|
||||||
"total_downtime": 100
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"aeros_node": {"node_name": "Asset2"},
|
|
||||||
"num_events": 2,
|
|
||||||
"contribution_factor": 0.3,
|
|
||||||
"contribution": 0.05,
|
|
||||||
"availability": 0.95,
|
|
||||||
"total_downtime": 50
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
standard_scope = ["Asset1", "Asset2"]
|
|
||||||
eaf_gap = 2.0 # 2% gap
|
|
||||||
scheduled_outage = 500
|
|
||||||
|
|
||||||
results = calculate_asset_eaf_contributions(
|
|
||||||
plant_result, eq_results, standard_scope, eaf_gap, scheduled_outage
|
|
||||||
)
|
|
||||||
|
|
||||||
assert len(results) == 2
|
|
||||||
# Check sorting (highest birnbaum first)
|
|
||||||
assert results[0].node["node_name"] == "Asset1"
|
|
||||||
assert results[0].birbaum > results[1].birbaum
|
|
||||||
|
|
||||||
# Check that required_improvement is positive
|
|
||||||
assert results[0].required_improvement > 0
|
|
||||||
assert results[0].improvement_impact > 0
|
|
||||||
|
|
||||||
def test_calculate_asset_eaf_contributions_skipping():
|
|
||||||
plant_result = {"total_uptime": 1000, "total_downtime": 0, "eaf": 100}
|
|
||||||
eq_results = [{
|
|
||||||
"aeros_node": {"node_name": "Asset1"},
|
|
||||||
"num_events": 0,
|
|
||||||
"contribution_factor": 0.5,
|
|
||||||
"contribution": 0.1,
|
|
||||||
"availability": 1.0,
|
|
||||||
"total_downtime": 0
|
|
||||||
}]
|
|
||||||
results = calculate_asset_eaf_contributions(
|
|
||||||
plant_result, eq_results, ["Asset1"], 1.0, 0
|
|
||||||
)
|
|
||||||
assert len(results) == 0
|
|
||||||
@ -1,49 +0,0 @@
|
|||||||
import pytest
|
|
||||||
from pydantic import ValidationError
|
|
||||||
from src.database.schema import CommonParams
|
|
||||||
from src.overhaul.schema import OverhaulCriticalParts
|
|
||||||
|
|
||||||
def test_common_params_valid():
|
|
||||||
params = CommonParams(
|
|
||||||
page=1,
|
|
||||||
itemsPerPage=10,
|
|
||||||
q="search test",
|
|
||||||
all=1
|
|
||||||
)
|
|
||||||
assert params.page == 1
|
|
||||||
assert params.items_per_page == 10
|
|
||||||
assert params.query_str == "search test"
|
|
||||||
assert params.is_all is True
|
|
||||||
|
|
||||||
def test_common_params_page_constraints():
|
|
||||||
# Test page must be > 0
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
CommonParams(page=0)
|
|
||||||
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
CommonParams(page=-1)
|
|
||||||
|
|
||||||
def test_common_params_items_per_page_constraints():
|
|
||||||
# Test items_per_page must be multiple of 5
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
CommonParams(itemsPerPage=7)
|
|
||||||
|
|
||||||
# Test items_per_page maximum
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
CommonParams(itemsPerPage=55)
|
|
||||||
|
|
||||||
# Valid multiples of 5
|
|
||||||
assert CommonParams(itemsPerPage=50).items_per_page == 50
|
|
||||||
assert CommonParams(itemsPerPage=5).items_per_page == 5
|
|
||||||
|
|
||||||
def test_overhaul_critical_parts_valid():
|
|
||||||
parts = OverhaulCriticalParts(criticalParts=["Part A", "Part B"])
|
|
||||||
assert parts.criticalParts == ["Part A", "Part B"]
|
|
||||||
|
|
||||||
def test_overhaul_critical_parts_invalid():
|
|
||||||
# criticalParts is required and must be a list
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
OverhaulCriticalParts()
|
|
||||||
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
OverhaulCriticalParts(criticalParts="Not a list")
|
|
||||||
@ -1,58 +0,0 @@
|
|||||||
import pytest
|
|
||||||
from fastapi import HTTPException
|
|
||||||
from src.middleware import (
|
|
||||||
inspect_value,
|
|
||||||
inspect_json,
|
|
||||||
has_control_chars,
|
|
||||||
XSS_PATTERN,
|
|
||||||
SQLI_PATTERN
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_xss_patterns():
|
|
||||||
# Test common XSS payloads in be-optimumoh
|
|
||||||
payloads = [
|
|
||||||
"<script>",
|
|
||||||
"javascript:",
|
|
||||||
"onerror=",
|
|
||||||
"onload=",
|
|
||||||
"<svg",
|
|
||||||
"<img"
|
|
||||||
]
|
|
||||||
for payload in payloads:
|
|
||||||
assert XSS_PATTERN.search(payload) is not None
|
|
||||||
|
|
||||||
def test_sqli_patterns():
|
|
||||||
# Test common SQLi payloads in be-optimumoh
|
|
||||||
payloads = [
|
|
||||||
"UNION",
|
|
||||||
"SELECT",
|
|
||||||
"INSERT",
|
|
||||||
"DELETE",
|
|
||||||
"DROP",
|
|
||||||
"--",
|
|
||||||
"OR 1=1"
|
|
||||||
]
|
|
||||||
for payload in payloads:
|
|
||||||
assert SQLI_PATTERN.search(payload) is not None
|
|
||||||
|
|
||||||
def test_inspect_value_raises():
|
|
||||||
with pytest.raises(HTTPException) as excinfo:
|
|
||||||
inspect_value("<script>", "source")
|
|
||||||
assert excinfo.value.status_code == 400
|
|
||||||
assert "Potential XSS payload" in excinfo.value.detail
|
|
||||||
|
|
||||||
with pytest.raises(HTTPException) as excinfo:
|
|
||||||
inspect_value("UNION SELECT", "source")
|
|
||||||
assert excinfo.value.status_code == 400
|
|
||||||
assert "Potential SQL injection" in excinfo.value.detail
|
|
||||||
|
|
||||||
def test_inspect_json_raises():
|
|
||||||
with pytest.raises(HTTPException) as excinfo:
|
|
||||||
inspect_json({"__proto__": "polluted"})
|
|
||||||
assert excinfo.value.status_code == 400
|
|
||||||
assert "Forbidden JSON key" in excinfo.value.detail
|
|
||||||
|
|
||||||
def test_has_control_chars():
|
|
||||||
assert has_control_chars("normal string") is False
|
|
||||||
assert has_control_chars("string with \x00 null") is True
|
|
||||||
assert has_control_chars("string with \n newline") is False
|
|
||||||
@ -1,33 +0,0 @@
|
|||||||
import pytest
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from src.calculation_target_reliability.utils import generate_down_periods
|
|
||||||
|
|
||||||
def test_generate_down_periods_count():
|
|
||||||
start = datetime(2025, 1, 1)
|
|
||||||
end = datetime(2025, 1, 31)
|
|
||||||
# Test fixed number of periods
|
|
||||||
periods = generate_down_periods(start, end, num_periods=5)
|
|
||||||
# It attempts to generate 5, but might be fewer due to overlaps
|
|
||||||
assert len(periods) <= 5
|
|
||||||
|
|
||||||
# Check they are within range
|
|
||||||
for p_start, p_end in periods:
|
|
||||||
assert p_start >= start
|
|
||||||
assert p_end <= end
|
|
||||||
assert p_start < p_end
|
|
||||||
|
|
||||||
def test_generate_down_periods_no_overlap():
|
|
||||||
start = datetime(2025, 1, 1)
|
|
||||||
end = datetime(2025, 1, 31)
|
|
||||||
periods = generate_down_periods(start, end, num_periods=10)
|
|
||||||
|
|
||||||
# Sort and check gaps
|
|
||||||
for i in range(len(periods) - 1):
|
|
||||||
assert periods[i][1] <= periods[i+1][0]
|
|
||||||
|
|
||||||
def test_generate_down_periods_too_small_range():
|
|
||||||
start = datetime(2025, 1, 1)
|
|
||||||
end = datetime(2025, 1, 2)
|
|
||||||
# Requesting 5 days duration in 1 day range
|
|
||||||
periods = generate_down_periods(start, end, num_periods=1, min_duration=5)
|
|
||||||
assert len(periods) == 0
|
|
||||||
@ -1,36 +0,0 @@
|
|||||||
import pytest
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from src.utils import parse_relative_expression, parse_date_string
|
|
||||||
|
|
||||||
def test_parse_relative_expression_days():
|
|
||||||
# Test T, T+n, T-n
|
|
||||||
result = parse_relative_expression("T")
|
|
||||||
assert result is not None
|
|
||||||
assert isinstance(result, datetime)
|
|
||||||
|
|
||||||
result_plus = parse_relative_expression("T+5")
|
|
||||||
assert result_plus is not None
|
|
||||||
|
|
||||||
result_minus = parse_relative_expression("T-3")
|
|
||||||
assert result_minus is not None
|
|
||||||
|
|
||||||
def test_parse_relative_expression_invalid():
|
|
||||||
assert parse_relative_expression("abc") is None
|
|
||||||
assert parse_relative_expression("123") is None
|
|
||||||
assert parse_relative_expression("T++1") is None
|
|
||||||
|
|
||||||
def test_parse_date_string_formats():
|
|
||||||
# Test various ISO and common formats
|
|
||||||
dt = parse_date_string("2024-11-08")
|
|
||||||
assert dt.year == 2024
|
|
||||||
assert dt.month == 11
|
|
||||||
assert dt.day == 8
|
|
||||||
|
|
||||||
dt = parse_date_string("08-11-2024")
|
|
||||||
assert dt.year == 2024
|
|
||||||
assert dt.month == 11
|
|
||||||
assert dt.day == 8
|
|
||||||
|
|
||||||
def test_parse_date_string_invalid():
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
parse_date_string("invalid-date")
|
|
||||||
Loading…
Reference in New Issue