Compare commits

..

No commits in common. 'main' and 'rest-api' have entirely different histories.

@ -39,6 +39,9 @@ COPY --from=builder /app/.venv /app/.venv
# Copy application files
COPY . /app/
# Delete Tests for production
RUN rm -rf /app/tests/
# Add password access
RUN echo "# Custom configurations added by Dockerfile" >> /root/.bashrc && \
echo "export APP_PATH=/app" >> /root/.bashrc && \

132
Jenkinsfile vendored

@ -2,104 +2,106 @@ pipeline {
agent any
environment {
// Replace with your Docker Hub username/organization
DOCKER_HUB_USERNAME = 'aimodocker'
// This creates DOCKER_AUTH_USR and DOCKER_AUTH_PSW
// Use credentials for Docker Hub
DOCKER_CREDENTIALS = credentials('aimodocker')
// Replace with your image name
IMAGE_NAME = 'lcca-service'
SERVICE_NAME = 'ahm-app'
// Replace with your docker compose service name
SERVICE_NAME = 'lcca-app'
// Variable for Git commit hash
GIT_COMMIT_HASH = ''
SECURITY_PREFIX = 'security'
// Initialize variables to be updated in script blocks
GIT_COMMIT_HASH = ""
IMAGE_TAG = ""
SECONDARY_TAG = ""
// Replace with the SSH credentials for development server
// SSH_CREDENTIALS = credentials('backend-server-digitaltwin')
// SSH_CREDENTIALS_USR = 'aimo'
// SSH_SERVER_IP = '192.168.1.82'
}
stages {
stage('Checkout & Setup') {
stage('Checkout') {
steps {
script {
// Checkout and get git commit hash
checkout scm
GIT_COMMIT_HASH = sh(script: 'git rev-parse --short HEAD', returnStdout: true).trim()
// Use env.BRANCH_NAME or logic to handle detached HEAD if necessary
def branch = env.BRANCH_NAME ?: 'unknown'
echo "Current Branch: ${branch}"
if (branch == 'main') {
IMAGE_TAG = GIT_COMMIT_HASH
SECONDARY_TAG = 'latest'
} else if (branch == 'lcca_security') {
IMAGE_TAG = "${SECURITY_PREFIX}-${GIT_COMMIT_HASH}"
SECONDARY_TAG = "${SECURITY_PREFIX}-latest"
} else {
IMAGE_TAG = "temp-${GIT_COMMIT_HASH}"
SECONDARY_TAG = "" // Ensure it's empty for other branches
}
echo "Primary Tag: ${IMAGE_TAG}"
def commitHash = sh(script: 'git rev-parse --short HEAD', returnStdout: true).trim()
GIT_COMMIT_HASH = commitHash
echo "Git commit hash: ${GIT_COMMIT_HASH}"
}
}
}
// stage('Run Unit Tests') {
// steps {
// sh 'poetry run pytest tests/unit'
// }
// }
// stage('Run E2E Tests') {
// steps {
// sh 'poetry run pytest tests/e2e'
// }
// }
stage('Build & Tag') {
stage('Docker Login') {
steps {
script {
def fullImageName = "${DOCKER_HUB_USERNAME}/${IMAGE_NAME}"
sh "docker build -t ${fullImageName}:${IMAGE_TAG} ."
if (SECONDARY_TAG) {
sh "docker tag ${fullImageName}:${IMAGE_TAG} ${fullImageName}:${SECONDARY_TAG}"
}
}
sh '''
echo ${DOCKER_CREDENTIALS_PSW} | docker login -u ${DOCKER_CREDENTIALS_USR} --password-stdin
'''
}
}
stage('Docker Login & Push') {
stage('Build Docker Image') {
steps {
script {
def fullImageName = "${DOCKER_HUB_USERNAME}/${IMAGE_NAME}"
withCredentials([usernamePassword(credentialsId: 'aimodocker', passwordVariable: 'DOCKER_PSW', usernameVariable: 'DOCKER_USR')]) {
// Use single quotes to prevent Groovy from interpolating the secret in logs
sh 'echo $DOCKER_PSW | docker login -u $DOCKER_USR --password-stdin'
sh "docker push ${fullImageName}:${IMAGE_TAG}"
if (SECONDARY_TAG) {
sh "docker push ${fullImageName}:${SECONDARY_TAG}"
// Build with commit hash tag
sh """
docker build -t ${DOCKER_HUB_USERNAME}/${IMAGE_NAME}:latest .
docker tag ${DOCKER_HUB_USERNAME}/${IMAGE_NAME}:latest ${DOCKER_HUB_USERNAME}/${IMAGE_NAME}:${GIT_COMMIT_HASH}
"""
}
}
}
stage('Push to Docker Hub') {
steps {
sh """
# Push both tags
docker push ${DOCKER_HUB_USERNAME}/${IMAGE_NAME}:${GIT_COMMIT_HASH}
docker push ${DOCKER_HUB_USERNAME}/${IMAGE_NAME}:latest
"""
}
}
// stage('Deploy') {
// steps {
// script {
// sshagent(credentials: ['backend-server-digitaltwin']) {
// sh """
// ssh -o StrictHostKeyChecking=no -p 12558 aimo@0.tcp.ap.ngrok.io '
// cd ~/digital-twin/Docker
// sudo docker compose pull ${SERVICE_NAME}
// sudo docker compose up -d ${SERVICE_NAME}
// '
// """
// }
// }
// }
// }
}
post {
always {
script {
// Clean up
sh 'docker logout'
def fullImageName = "${DOCKER_HUB_USERNAME}/${IMAGE_NAME}"
// Clean up images to save agent disk space
sh "docker rmi ${fullImageName}:${IMAGE_TAG} || true"
if (SECONDARY_TAG) {
sh "docker rmi ${fullImageName}:${SECONDARY_TAG} || true"
// Clean up local images
script {
try {
sh """
# Push both tags
docker rmi ${DOCKER_HUB_USERNAME}/${IMAGE_NAME}:${GIT_COMMIT_HASH}
docker rmi ${DOCKER_HUB_USERNAME}/${IMAGE_NAME}:latest
"""
} catch (err) {
echo "Failed to clean up images: ${err}"
}
}
}
success {
echo "Successfully processed ${env.BRANCH_NAME}."
echo "Successfully built, pushed, and deployed Docker image with tags: latest and ${GIT_COMMIT_HASH}"
}
failure {
echo 'Failed to build/push/deploy Docker image!'
}
}
}

@ -1,44 +0,0 @@
# Unit Testing Guide - be-lcca
This document provides instructions on how to set up and run unit tests for the **be-lcca** project.
## 1. Preparation
### Install Dependencies
Ensure you have all dependencies installed. This project uses `poetry`.
```bash
# Install dependencies
poetry install
```
## 2. Configuration
### Pytest Configuration
Ensure the `pytest.ini` file in the root directory points to the `unit` test folder:
```ini
[pytest]
testpaths = tests/unit
python_files = test_*.py
asyncio_mode = auto
```
## 3. Running Tests
### Run Unit Tests
To run all unit tests in the project:
```bash
poetry run pytest tests/unit
```
### Run Specific Unit Test File
```bash
poetry run pytest tests/unit/test_specific_feature.py
```
## 4. Best Practices
- **Isolation**: Ensure tests do not rely on a live database; use local data structures or mock objects.
- **Factory Boy**: Use factories for creating complex models in your tests.

@ -1,88 +0,0 @@
# Panduan Menjalankan Script Testing di BE LCCA Digital Twin
Proyek ini menggunakan **Pytest** sebagai framework pengujian. Infrastruktur testing terletak di direktori `tests/` dan dikonfigurasi untuk menangani sifat asynchronous dari aplikasi FastAPI serta isolasi database.
---
## **1. Persiapan Lingkungan (Environment Setup)**
Pastikan Anda berada di root direktori proyek dan environment sudah siap.
### **Opsi A: Menggunakan Virtual Environment (Direkomendasikan)**
Aktifkan `venv` sebelum menjalankan perintah apapun:
```bash
python -m venv venv
source venv/bin/activate
pip install poetry
poetry install
```
### **Opsi B: Menggunakan Poetry**
Jika Anda lebih suka menggunakan Poetry secara langsung tanpa aktivasi manual:
```bash
poetry run pytest
```
---
## **2. Menjalankan Pengujian**
| Tujuan | Perintah |
| :--- | :--- |
| **Jalankan Unit Tests** | `pytest tests/unit` |
| **Jalankan E2E Tests** | `pytest tests/e2e` |
| **Jalankan semua test** | `pytest` |
| **Tampilkan statement print** | `pytest -s` |
| **Berhenti di kegagalan pertama** | `pytest -x` |
| **Jalankan file spesifik** | `pytest tests/unit/test_example.py` |
> **Catatan**: Verbose output (`-v`) sudah aktif secara default di konfigurasi `pyproject.toml`.
---
## **3. Peringatan Penting (Caution for E2E Tests)**
⚠️ **PENTING**: Saat menjalankan pengujian **End-to-End (E2E)**, pastikan Anda menggunakan **Testing Database**.
* **JANGAN PERNAH** menjalankan E2E tests menggunakan database **Production** atau **Development**.
* Pengujian E2E seringkali melakukan operasi manipulasi data (create, update, delete) dan pembersihan database secara otomatis yang dapat mengakibatkan **kehilangan data permanen**.
* Selalu gunakan database terpisah (misalnya PostgreSQL instance khusus testing atau SQLite) yang aman untuk dihapus isinya sewaktu-waktu.
---
## **4. Gambaran Infrastruktur Testing**
Direktori `tests/` berisi beberapa utility script yang memudahkan proses testing:
* **`conftest.py`**: Berisi fixture global. Sudah terkonfigurasi dengan:
* `client`: `AsyncClient` untuk simulasi request API ke aplikasi FastAPI Anda.
* `setup_db`: Secara otomatis membuat dan menghapus database test (SQLite in-memory) untuk setiap sesi pengujian.
* **`factories.py`**: Menggunakan `factory-boy` untuk menghasilkan mock data untuk model Anda.
* **`database.py`**: Mengonfigurasi session database untuk kebutuhan pengujian.
---
## **5. Menulis Test Pertama Anda**
Agar `pytest` mengenali sebuah file sebagai test, file tersebut harus dinamai dengan format `test_*.py` atau `*_test.py`.
**Contoh (`tests/test_api.py`):**
```python
import pytest
@pytest.mark.asyncio
async def test_api_status(client):
"""Contoh pengujian menggunakan fixture 'client' dari conftest.py"""
response = await client.get("/")
assert response.status_code == 200
```
---
## **6. Tips Troubleshooting**
* **Masalah Module Path**: Jika Anda menemui error `ModuleNotFoundError`, jalankan test dengan menambahkan direktori saat ini ke `PYTHONPATH`:
```bash
export PYTHONPATH=$PYTHONPATH:.
pytest
```
* **Menjalankan Test yang Gagal Saja**: Untuk menghemat waktu, jalankan hanya test yang gagal pada sesi sebelumnya:
```bash
pytest --lf
```

@ -1,59 +0,0 @@
# Updated Equipment Acquisition & Simulation Algorithm
This document outlines the refactored logic for equipment acquisition cost calculation and simulation forecasting, implemented in February 2026.
## 1. Timeline Definitions
The simulation follows a strict temporal alignment to ensure consistency across the fleet:
| Parameter | Value | Description |
| :--- | :--- | :--- |
| **Base Year** | `2015` | The target year for all "Value of Money" (Net Present Value) calculations. |
| **Forecasting Start** | `2015` | The year from which future predictions and Economic Life reports begin. |
| **Calculation Start** | `2014` | The technical sequence start ($seq = 0$) used to establish an initial state. |
---
## 2. Capital Cost Adjustment (Value of Money)
To account for the time value of money, both the **Initial Acquisition Cost** and the **Replacement Cost** are normalized to the **2015 Base Year** using the project's inflation rate.
### 2.1 Adjustment Formula
The value of any cost $V$ at a specific $Year$ is adjusted to its equivalent value in $2015$ using the following formula:
$$V_{2015} = \frac{V_{Year}}{(1 + r)^{(Year - 2015)}}$$
Where:
- $V_{2015}$ = Adjusted value in 2015 terms.
- $V_{Year}$ = Raw cost recorded in the database or Maximo.
- $r$ = Inflation rate (from `lcc_ms_master`, defaults to $0.05$ if undefined).
- $Year$ = The year the cost was recorded ($Y_{acq}$ or $Y_{replace}$).
### 2.2 Total Acquisition Cost
The total capital cost $C_{total}$ stored in the master data is the sum of the adjusted initial cost and the adjusted first detected replacement cost:
$$C_{total} = \frac{C_{initial}}{(1+r)^{(Y_{acq} - 2015)}} + \frac{C_{replace}}{(1+r)^{(Y_{replace} - 2015)}}$$
---
## 3. Maintenance Cost Suppression Logic
A specific business rule is applied to prevent "double counting" or distorted maintenance records during major equipment replacement years:
### 3.1 Replacement Year Rule
In the **first year** where a `replace_cost > 0` is detected in Maximo ($Y_{replace}$):
- All **Material Costs** are set to $0.0$.
- All **Labor Costs** (and labor hours) are set to $0.0$.
### 3.2 Logic Rationale
The replacement cost is treated as a capital expenditure (CAPEX) that restarts the equipment's life cycle. Standard maintenance (OPEX) for that specific year is ignored because the replacement action supersedes regular repair tasks.
---
## 4. Implementation Reference
The logic is primarily contained in:
- `src/equipment/service.py`: `check_and_update_acquisition_data()` (Cost adjustments).
- `src/modules/equipment/insert_actual_data.py`: `query_data()` (Timeline and cost suppression).

184
poetry.lock generated

@ -1,20 +1,4 @@
# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand.
[[package]]
name = "aiosqlite"
version = "0.22.1"
description = "asyncio bridge to the standard sqlite3 module"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "aiosqlite-0.22.1-py3-none-any.whl", hash = "sha256:21c002eb13823fad740196c5a2e9d8e62f6243bd9e7e4a1f87fb5e44ecb4fceb"},
{file = "aiosqlite-0.22.1.tar.gz", hash = "sha256:043e0bd78d32888c0a9ca90fc788b38796843360c855a7262a532813133a0650"},
]
[package.extras]
dev = ["attribution (==1.8.0)", "black (==25.11.0)", "build (>=1.2)", "coverage[toml] (==7.10.7)", "flake8 (==7.3.0)", "flake8-bugbear (==24.12.12)", "flit (==3.12.0)", "mypy (==1.19.0)", "ufmt (==2.8.0)", "usort (==1.0.8.post1)"]
docs = ["sphinx (==8.1.3)", "sphinx-mdinclude (==0.6.2)"]
# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
[[package]]
name = "annotated-types"
@ -22,7 +6,6 @@ version = "0.7.0"
description = "Reusable constraint types to use with typing.Annotated"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
@ -34,7 +17,6 @@ version = "4.8.0"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"},
{file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"},
@ -48,7 +30,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""}
[package.extras]
doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"]
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""]
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"]
trio = ["trio (>=0.26.1)"]
[[package]]
@ -57,8 +39,6 @@ version = "5.0.1"
description = "Timeout context manager for asyncio programs"
optional = false
python-versions = ">=3.8"
groups = ["main"]
markers = "python_version == \"3.10\""
files = [
{file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"},
{file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"},
@ -70,7 +50,6 @@ version = "0.30.0"
description = "An asyncio PostgreSQL driver"
optional = false
python-versions = ">=3.8.0"
groups = ["main"]
files = [
{file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"},
{file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"},
@ -128,21 +107,8 @@ async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.11.0\""}
[package.extras]
docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"]
gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""]
test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""]
[[package]]
name = "backports-asyncio-runner"
version = "1.2.0"
description = "Backport of asyncio.Runner, a context manager that controls event loop life cycle."
optional = false
python-versions = "<3.11,>=3.8"
groups = ["main"]
markers = "python_version == \"3.10\""
files = [
{file = "backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5"},
{file = "backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162"},
]
gssauth = ["gssapi", "sspilib"]
test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi", "k5test", "mypy (>=1.8.0,<1.9.0)", "sspilib", "uvloop (>=0.15.3)"]
[[package]]
name = "certifi"
@ -150,7 +116,6 @@ version = "2025.1.31"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
groups = ["main"]
files = [
{file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"},
{file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"},
@ -162,7 +127,6 @@ version = "3.4.1"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"},
{file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"},
@ -264,7 +228,6 @@ version = "8.1.8"
description = "Composable command line interface toolkit"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"},
{file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"},
@ -279,8 +242,6 @@ version = "0.4.6"
description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
groups = ["main"]
markers = "platform_system == \"Windows\" or sys_platform == \"win32\""
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
@ -292,7 +253,6 @@ version = "1.3.1"
description = "Python library for calculating contours of 2D quadrilateral grids"
optional = false
python-versions = ">=3.10"
groups = ["main"]
files = [
{file = "contourpy-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a045f341a77b77e1c5de31e74e966537bba9f3c4099b35bf4c2e3939dd54cdab"},
{file = "contourpy-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:500360b77259914f7805af7462e41f9cb7ca92ad38e9f94d6c8641b089338124"},
@ -366,7 +326,6 @@ version = "0.12.1"
description = "Composable style cycles"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"},
{file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"},
@ -382,7 +341,6 @@ version = "1.2.18"
description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
groups = ["main"]
files = [
{file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"},
{file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"},
@ -392,7 +350,7 @@ files = [
wrapt = ">=1.10,<2"
[package.extras]
dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools ; python_version >= \"3.12\"", "tox"]
dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"]
[[package]]
name = "dnspython"
@ -400,7 +358,6 @@ version = "2.7.0"
description = "DNS toolkit"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"},
{file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"},
@ -421,7 +378,6 @@ version = "2.2.0"
description = "A robust email address syntax and deliverability validation library."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"},
{file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"},
@ -437,7 +393,6 @@ version = "2.0.0"
description = "An implementation of lxml.xmlfile for the standard library"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa"},
{file = "et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54"},
@ -449,8 +404,6 @@ version = "1.2.2"
description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
groups = ["main"]
markers = "python_version == \"3.10\""
files = [
{file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
{file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
@ -465,7 +418,6 @@ version = "3.3.1"
description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "factory_boy-3.3.1-py2.py3-none-any.whl", hash = "sha256:7b1113c49736e1e9995bc2a18f4dbf2c52cf0f841103517010b1d825712ce3ca"},
{file = "factory_boy-3.3.1.tar.gz", hash = "sha256:8317aa5289cdfc45f9cae570feb07a6177316c82e34d14df3c2e1f22f26abef0"},
@ -484,7 +436,6 @@ version = "30.10.0"
description = "Faker is a Python package that generates fake data for you."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "Faker-30.10.0-py3-none-any.whl", hash = "sha256:5f05ee92ddf0e1736d95dca41b2a16ee06d987b736fa4ddecdb047abf2e9024b"},
{file = "faker-30.10.0.tar.gz", hash = "sha256:c2e627d3becec67f7a45400d3670018b5abb3f0728b7dfaa06c135b7df1ce3fb"},
@ -500,7 +451,6 @@ version = "0.115.8"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "fastapi-0.115.8-py3-none-any.whl", hash = "sha256:753a96dd7e036b34eeef8babdfcfe3f28ff79648f86551eb36bfc1b0bf4a8cbf"},
{file = "fastapi-0.115.8.tar.gz", hash = "sha256:0ce9111231720190473e222cdf0f07f7206ad7e53ea02beb1d2dc36e2f0741e9"},
@ -527,7 +477,6 @@ version = "0.0.7"
description = "Run and manage FastAPI apps from the command line with FastAPI CLI. 🚀"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "fastapi_cli-0.0.7-py3-none-any.whl", hash = "sha256:d549368ff584b2804336c61f192d86ddea080c11255f375959627911944804f4"},
{file = "fastapi_cli-0.0.7.tar.gz", hash = "sha256:02b3b65956f526412515907a0793c9094abd4bfb5457b389f645b0ea6ba3605e"},
@ -547,7 +496,6 @@ version = "4.56.0"
description = "Tools to manipulate font files"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "fonttools-4.56.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:331954d002dbf5e704c7f3756028e21db07097c19722569983ba4d74df014000"},
{file = "fonttools-4.56.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8d1613abd5af2f93c05867b3a3759a56e8bf97eb79b1da76b2bc10892f96ff16"},
@ -602,18 +550,18 @@ files = [
]
[package.extras]
all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0) ; python_version <= \"3.12\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"]
all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"]
graphite = ["lz4 (>=1.7.4.2)"]
interpolatable = ["munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\""]
interpolatable = ["munkres", "pycairo", "scipy"]
lxml = ["lxml (>=4.0)"]
pathops = ["skia-pathops (>=0.5.0)"]
plot = ["matplotlib"]
repacker = ["uharfbuzz (>=0.23.0)"]
symfont = ["sympy"]
type1 = ["xattr ; sys_platform == \"darwin\""]
type1 = ["xattr"]
ufo = ["fs (>=2.2.0,<3)"]
unicode = ["unicodedata2 (>=15.1.0) ; python_version <= \"3.12\""]
woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"]
unicode = ["unicodedata2 (>=15.1.0)"]
woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"]
[[package]]
name = "greenlet"
@ -621,8 +569,6 @@ version = "3.1.1"
description = "Lightweight in-process concurrent programming"
optional = false
python-versions = ">=3.7"
groups = ["main"]
markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"
files = [
{file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"},
{file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"},
@ -709,7 +655,6 @@ version = "0.14.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
@ -721,7 +666,6 @@ version = "1.0.7"
description = "A minimal low-level HTTP client."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"},
{file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"},
@ -743,7 +687,6 @@ version = "0.6.4"
description = "A collection of framework independent HTTP protocol utils."
optional = false
python-versions = ">=3.8.0"
groups = ["main"]
files = [
{file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"},
{file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"},
@ -799,7 +742,6 @@ version = "0.27.2"
description = "The next generation HTTP client."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"},
{file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"},
@ -813,7 +755,7 @@ idna = "*"
sniffio = "*"
[package.extras]
brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
brotli = ["brotli", "brotlicffi"]
cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (==1.*)"]
@ -825,7 +767,6 @@ version = "3.10"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.6"
groups = ["main"]
files = [
{file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
{file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
@ -840,7 +781,6 @@ version = "2.0.0"
description = "brain-dead simple config-ini parsing"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
@ -852,7 +792,6 @@ version = "3.1.5"
description = "A very fast and expressive template engine."
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"},
{file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"},
@ -870,7 +809,6 @@ version = "1.4.2"
description = "Lightweight pipelining with Python functions"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"},
{file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"},
@ -882,7 +820,6 @@ version = "1.4.8"
description = "A fast implementation of the Cassowary constraint solver"
optional = false
python-versions = ">=3.10"
groups = ["main"]
files = [
{file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db"},
{file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b"},
@ -972,7 +909,6 @@ version = "4.0.1"
description = "Rate limiting utilities"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "limits-4.0.1-py3-none-any.whl", hash = "sha256:67667e669f570cf7be4e2c2bc52f763b3f93bdf66ea945584360bc1a3f251901"},
{file = "limits-4.0.1.tar.gz", hash = "sha256:a54f5c058dfc965319ae3ee78faf222294659e371b46d22cd7456761f7e46d5a"},
@ -984,9 +920,9 @@ packaging = ">=21,<25"
typing-extensions = "*"
[package.extras]
all = ["aetcd", "coredis (>=3.4.0,<5)", "emcache (>=0.6.1) ; python_version < \"3.11\"", "emcache (>=1) ; python_version >= \"3.11\" and python_version < \"3.13.0\"", "etcd3", "motor (>=3,<4)", "pymemcache (>3,<5.0.0)", "pymongo (>4.1,<5)", "redis (>3,!=4.5.2,!=4.5.3,<6.0.0)", "redis (>=4.2.0,!=4.5.2,!=4.5.3)"]
all = ["aetcd", "coredis (>=3.4.0,<5)", "emcache (>=0.6.1)", "emcache (>=1)", "etcd3", "motor (>=3,<4)", "pymemcache (>3,<5.0.0)", "pymongo (>4.1,<5)", "redis (>3,!=4.5.2,!=4.5.3,<6.0.0)", "redis (>=4.2.0,!=4.5.2,!=4.5.3)"]
async-etcd = ["aetcd"]
async-memcached = ["emcache (>=0.6.1) ; python_version < \"3.11\"", "emcache (>=1) ; python_version >= \"3.11\" and python_version < \"3.13.0\""]
async-memcached = ["emcache (>=0.6.1)", "emcache (>=1)"]
async-mongodb = ["motor (>=3,<4)"]
async-redis = ["coredis (>=3.4.0,<5)"]
etcd = ["etcd3"]
@ -1001,7 +937,6 @@ version = "3.0.0"
description = "Python port of markdown-it. Markdown parsing, done right!"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
{file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
@ -1026,7 +961,6 @@ version = "3.0.2"
description = "Safely add untrusted strings to HTML/XML markup."
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"},
{file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"},
@ -1097,7 +1031,6 @@ version = "3.10.0"
description = "Python plotting package"
optional = false
python-versions = ">=3.10"
groups = ["main"]
files = [
{file = "matplotlib-3.10.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2c5829a5a1dd5a71f0e31e6e8bb449bc0ee9dbfb05ad28fc0c6b55101b3a4be6"},
{file = "matplotlib-3.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2a43cbefe22d653ab34bb55d42384ed30f611bcbdea1f8d7f431011a2e1c62e"},
@ -1155,7 +1088,6 @@ version = "0.1.2"
description = "Markdown URL utilities"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
@ -1167,7 +1099,6 @@ version = "2.2.3"
description = "Fundamental package for array computing in Python"
optional = false
python-versions = ">=3.10"
groups = ["main"]
files = [
{file = "numpy-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cbc6472e01952d3d1b2772b720428f8b90e2deea8344e854df22b0618e9cce71"},
{file = "numpy-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdfe0c22692a30cd830c0755746473ae66c4a8f2e7bd508b35fb3b6a0813d787"},
@ -1232,7 +1163,6 @@ version = "1.0.0"
description = "Simple financial functions"
optional = false
python-versions = ">=3.5"
groups = ["main"]
files = [
{file = "numpy-financial-1.0.0.tar.gz", hash = "sha256:f84341bc62b2485d5604a73d5fac7e91975b4b9cd5f4a5a9cf608902ea00cb40"},
{file = "numpy_financial-1.0.0-py3-none-any.whl", hash = "sha256:bae534b357516f12258862d1f0181d911032d0467f215bfcd1c264b4da579047"},
@ -1247,7 +1177,6 @@ version = "3.1.5"
description = "A Python library to read/write Excel 2010 xlsx/xlsm files"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"},
{file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"},
@ -1262,7 +1191,6 @@ version = "24.2"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"},
{file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"},
@ -1274,7 +1202,6 @@ version = "2.2.3"
description = "Powerful data structures for data analysis, time series, and statistics"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"},
{file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"},
@ -1361,7 +1288,6 @@ version = "1.0.1"
description = "A Python package for describing statistical models and for building design matrices."
optional = false
python-versions = ">=3.6"
groups = ["main"]
files = [
{file = "patsy-1.0.1-py2.py3-none-any.whl", hash = "sha256:751fb38f9e97e62312e921a1954b81e1bb2bcda4f5eeabaf94db251ee791509c"},
{file = "patsy-1.0.1.tar.gz", hash = "sha256:e786a9391eec818c054e359b737bbce692f051aee4c661f4141cc88fb459c0c4"},
@ -1379,7 +1305,6 @@ version = "11.1.0"
description = "Python Imaging Library (Fork)"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8"},
{file = "pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192"},
@ -1459,7 +1384,7 @@ docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline
fpx = ["olefile"]
mic = ["olefile"]
tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "trove-classifiers (>=2024.10.12)"]
typing = ["typing-extensions ; python_version < \"3.10\""]
typing = ["typing-extensions"]
xmp = ["defusedxml"]
[[package]]
@ -1468,7 +1393,6 @@ version = "1.5.0"
description = "plugin and hook calling mechanisms for python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
@ -1484,7 +1408,6 @@ version = "2.9.10"
description = "psycopg2 - Python-PostgreSQL Database Adapter"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"},
{file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"},
@ -1533,7 +1456,6 @@ files = [
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"},
@ -1562,7 +1484,6 @@ version = "2.10.6"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"},
{file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"},
@ -1575,7 +1496,7 @@ typing-extensions = ">=4.12.2"
[package.extras]
email = ["email-validator (>=2.0.0)"]
timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""]
timezone = ["tzdata"]
[[package]]
name = "pydantic-core"
@ -1583,7 +1504,6 @@ version = "2.27.2"
description = "Core functionality for Pydantic validation and serialization"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"},
{file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"},
@ -1696,7 +1616,6 @@ version = "2.19.1"
description = "Pygments is a syntax highlighting package written in Python."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"},
{file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"},
@ -1711,7 +1630,6 @@ version = "3.2.1"
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"},
{file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"},
@ -1726,7 +1644,6 @@ version = "8.3.4"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"},
{file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"},
@ -1743,34 +1660,12 @@ tomli = {version = ">=1", markers = "python_version < \"3.11\""}
[package.extras]
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-asyncio"
version = "1.3.0"
description = "Pytest support for asyncio"
optional = false
python-versions = ">=3.10"
groups = ["main"]
files = [
{file = "pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5"},
{file = "pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5"},
]
[package.dependencies]
backports-asyncio-runner = {version = ">=1.1,<2", markers = "python_version < \"3.11\""}
pytest = ">=8.2,<10"
typing-extensions = {version = ">=4.12", markers = "python_version < \"3.13\""}
[package.extras]
docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"]
testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"]
[[package]]
name = "python-dateutil"
version = "2.9.0.post0"
description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main"]
files = [
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
@ -1785,7 +1680,6 @@ version = "1.0.1"
description = "Read key-value pairs from a .env file and set them as environment variables"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
{file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
@ -1800,7 +1694,6 @@ version = "0.0.20"
description = "A streaming multipart parser for Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104"},
{file = "python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13"},
@ -1812,7 +1705,6 @@ version = "2024.2"
description = "World timezone definitions, modern and historical"
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"},
{file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"},
@ -1824,7 +1716,6 @@ version = "6.0.2"
description = "YAML parser and emitter for Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
{file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
@ -1887,7 +1778,6 @@ version = "2.32.3"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
{file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
@ -1909,7 +1799,6 @@ version = "13.9.4"
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
optional = false
python-versions = ">=3.8.0"
groups = ["main"]
files = [
{file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"},
{file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"},
@ -1929,7 +1818,6 @@ version = "0.13.2"
description = "Rich toolkit for building command-line applications"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "rich_toolkit-0.13.2-py3-none-any.whl", hash = "sha256:f3f6c583e5283298a2f7dbd3c65aca18b7f818ad96174113ab5bec0b0e35ed61"},
{file = "rich_toolkit-0.13.2.tar.gz", hash = "sha256:fea92557530de7c28f121cbed572ad93d9e0ddc60c3ca643f1b831f2f56b95d3"},
@ -1946,7 +1834,6 @@ version = "1.6.1"
description = "A set of python modules for machine learning and data mining"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "scikit_learn-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d056391530ccd1e501056160e3c9673b4da4805eb67eb2bdf4e983e1f9c9204e"},
{file = "scikit_learn-1.6.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0c8d036eb937dbb568c6242fa598d551d88fb4399c0344d95c001980ec1c7d36"},
@ -2001,7 +1888,6 @@ version = "1.15.1"
description = "Fundamental algorithms for scientific computing in Python"
optional = false
python-versions = ">=3.10"
groups = ["main"]
files = [
{file = "scipy-1.15.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:c64ded12dcab08afff9e805a67ff4480f5e69993310e093434b10e85dc9d43e1"},
{file = "scipy-1.15.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5b190b935e7db569960b48840e5bef71dc513314cc4e79a1b7d14664f57fd4ff"},
@ -2051,7 +1937,7 @@ numpy = ">=1.23.5,<2.5"
[package.extras]
dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"]
doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.16.5)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.0.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"]
test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"]
test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"]
[[package]]
name = "shellingham"
@ -2059,7 +1945,6 @@ version = "1.5.4"
description = "Tool to Detect Surrounding Shell"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"},
{file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"},
@ -2071,7 +1956,6 @@ version = "1.17.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main"]
files = [
{file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"},
{file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
@ -2083,7 +1967,6 @@ version = "0.1.9"
description = "A rate limiting extension for Starlette and Fastapi"
optional = false
python-versions = ">=3.7,<4.0"
groups = ["main"]
files = [
{file = "slowapi-0.1.9-py3-none-any.whl", hash = "sha256:cfad116cfb84ad9d763ee155c1e5c5cbf00b0d47399a769b227865f5df576e36"},
{file = "slowapi-0.1.9.tar.gz", hash = "sha256:639192d0f1ca01b1c6d95bf6c71d794c3a9ee189855337b4821f7f457dddad77"},
@ -2101,7 +1984,6 @@ version = "1.3.1"
description = "Sniff out which async library your code is running under"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
@ -2113,7 +1995,6 @@ version = "2.0.37"
description = "Database Abstraction Library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da36c3b0e891808a7542c5c89f224520b9a16c7f5e4d6a1156955605e54aef0e"},
{file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e7402ff96e2b073a98ef6d6142796426d705addd27b9d26c3b32dbaa06d7d069"},
@ -2209,7 +2090,6 @@ version = "0.13.0"
description = "A library to filter SQLAlchemy queries."
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "sqlalchemy-filters-0.13.0.tar.gz", hash = "sha256:40f2daead93c4db2409cf5e5abf67a420179f9e5c1df5c15fa1b474f6533b105"},
{file = "sqlalchemy_filters-0.13.0-py3-none-any.whl", hash = "sha256:aa4595b90d152eb76fa312a3e03d5d675f0c2e16762751f340f5449468689d9a"},
@ -2230,7 +2110,6 @@ version = "0.41.2"
description = "Various utility functions for SQLAlchemy."
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "SQLAlchemy-Utils-0.41.2.tar.gz", hash = "sha256:bc599c8c3b3319e53ce6c5c3c471120bd325d0071fb6f38a10e924e3d07b9990"},
{file = "SQLAlchemy_Utils-0.41.2-py3-none-any.whl", hash = "sha256:85cf3842da2bf060760f955f8467b87983fb2e30f1764fd0e24a48307dc8ec6e"},
@ -2248,8 +2127,8 @@ intervals = ["intervals (>=0.7.1)"]
password = ["passlib (>=1.6,<2.0)"]
pendulum = ["pendulum (>=2.0.5)"]
phone = ["phonenumbers (>=5.9.2)"]
test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo ; python_version < \"3.9\"", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"]
test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo ; python_version < \"3.9\"", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"]
test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"]
test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"]
timezone = ["python-dateutil"]
url = ["furl (>=0.4.1)"]
@ -2259,7 +2138,6 @@ version = "0.45.3"
description = "The little ASGI library that shines."
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "starlette-0.45.3-py3-none-any.whl", hash = "sha256:dfb6d332576f136ec740296c7e8bb8c8a7125044e7c6da30744718880cdd059d"},
{file = "starlette-0.45.3.tar.gz", hash = "sha256:2cbcba2a75806f8a41c722141486f37c28e30a0921c5f6fe4346cb0dcee1302f"},
@ -2277,7 +2155,6 @@ version = "0.14.4"
description = "Statistical computations and models for Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "statsmodels-0.14.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7a62f1fc9086e4b7ee789a6f66b3c0fc82dd8de1edda1522d30901a0aa45e42b"},
{file = "statsmodels-0.14.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:46ac7ddefac0c9b7b607eed1d47d11e26fe92a1bc1f4d9af48aeed4e21e87981"},
@ -2320,7 +2197,7 @@ scipy = ">=1.8,<1.9.2 || >1.9.2"
[package.extras]
build = ["cython (>=3.0.10)"]
develop = ["colorama", "cython (>=3.0.10)", "cython (>=3.0.10,<4)", "flake8", "isort", "joblib", "matplotlib (>=3)", "pytest (>=7.3.0,<8)", "pytest-cov", "pytest-randomly", "pytest-xdist", "pywinpty ; os_name == \"nt\"", "setuptools-scm[toml] (>=8.0,<9.0)"]
develop = ["colorama", "cython (>=3.0.10)", "cython (>=3.0.10,<4)", "flake8", "isort", "joblib", "matplotlib (>=3)", "pytest (>=7.3.0,<8)", "pytest-cov", "pytest-randomly", "pytest-xdist", "pywinpty", "setuptools-scm[toml] (>=8.0,<9.0)"]
docs = ["ipykernel", "jupyter-client", "matplotlib", "nbconvert", "nbformat", "numpydoc", "pandas-datareader", "sphinx"]
[[package]]
@ -2329,7 +2206,6 @@ version = "3.5.0"
description = "threadpoolctl"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"},
{file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"},
@ -2341,8 +2217,6 @@ version = "2.2.1"
description = "A lil' TOML parser"
optional = false
python-versions = ">=3.8"
groups = ["main"]
markers = "python_version == \"3.10\""
files = [
{file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"},
{file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"},
@ -2384,7 +2258,6 @@ version = "0.15.1"
description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "typer-0.15.1-py3-none-any.whl", hash = "sha256:7994fb7b8155b64d3402518560648446072864beefd44aa2dc36972a5972e847"},
{file = "typer-0.15.1.tar.gz", hash = "sha256:a0588c0a7fa68a1978a069818657778f86abe6ff5ea6abf472f940a08bfe4f0a"},
@ -2402,7 +2275,6 @@ version = "4.12.2"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
@ -2414,7 +2286,6 @@ version = "2025.1"
description = "Provider of IANA time zone data"
optional = false
python-versions = ">=2"
groups = ["main"]
files = [
{file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"},
{file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"},
@ -2426,14 +2297,13 @@ version = "2.3.0"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"},
{file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"},
]
[package.extras]
brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
@ -2444,7 +2314,6 @@ version = "0.32.1"
description = "The lightning-fast ASGI server."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "uvicorn-0.32.1-py3-none-any.whl", hash = "sha256:82ad92fd58da0d12af7482ecdb5f2470a04c9c9a53ced65b9bbb4a205377602e"},
{file = "uvicorn-0.32.1.tar.gz", hash = "sha256:ee9519c246a72b1c084cea8d3b44ed6026e78a4a309cbedae9c37e4cb9fbb175"},
@ -2458,12 +2327,12 @@ httptools = {version = ">=0.6.3", optional = true, markers = "extra == \"standar
python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""}
typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""}
uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""}
watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""}
[package.extras]
standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"]
standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"]
[[package]]
name = "uvloop"
@ -2471,8 +2340,6 @@ version = "0.21.0"
description = "Fast implementation of asyncio event loop on top of libuv"
optional = false
python-versions = ">=3.8.0"
groups = ["main"]
markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\""
files = [
{file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"},
{file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"},
@ -2524,7 +2391,6 @@ version = "1.0.4"
description = "Simple, modern and high performance file watching and code reload in python."
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "watchfiles-1.0.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ba5bb3073d9db37c64520681dd2650f8bd40902d991e7b4cfaeece3e32561d08"},
{file = "watchfiles-1.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f25d0ba0fe2b6d2c921cf587b2bf4c451860086534f40c384329fb96e2044d1"},
@ -2608,7 +2474,6 @@ version = "14.2"
description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "websockets-14.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e8179f95323b9ab1c11723e5d91a89403903f7b001828161b480a7810b334885"},
{file = "websockets-14.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d8c3e2cdb38f31d8bd7d9d28908005f6fa9def3324edb9bf336d7e4266fd397"},
@ -2687,7 +2552,6 @@ version = "1.17.2"
description = "Module for decorators, wrappers and monkey patching."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"},
{file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"},
@ -2771,6 +2635,6 @@ files = [
]
[metadata]
lock-version = "2.1"
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "a67faa975147cf6652ac87a3767b499a2c0e344cb4d3f7c47d0526e81fe2bbd0"
content-hash = "8d70f1df8b24fbd51e128ed36fbf43c4ccfdcd3b7dbd1f0f718870cab0c4d568"

@ -29,12 +29,6 @@ pandas = "^2.2.3"
numpy-financial = "^1.0.0"
numpy = "^2.2.3"
statsmodels = "^0.14.4"
pytest-asyncio = "^1.3.0"
aiosqlite = "^0.22.1"
[tool.pytest.ini_options]
asyncio_default_fixture_loop_scope = "function"
addopts = "-v"
[build-system]

@ -1,4 +0,0 @@
[pytest]
testpaths = tests/unit
python_files = test_*.py
asyncio_mode = auto

@ -33,23 +33,6 @@ async def get_yeardatas(
message="Data retrieved successfully",
)
@router.get("/export-all", response_model=StandardResponse[AcquisitionCostDataPagination])
async def get_yeardatas_export_all(
db_session: DbSession,
common: CommonParameters,
):
"""Get all acquisition_cost_data for export."""
common["all"] = True
get_acquisition_cost_data = await get_all(
db_session=db_session,
items_per_page=-1,
common=common,
)
return StandardResponse(
data=get_acquisition_cost_data,
message="All Acquisition Cost Data retrieved successfully",
)
@router.get("/{acquisition_cost_data_id}", response_model=StandardResponse[AcquisitionCostDataRead])
async def get_acquisition_cost_data(db_session: DbSession, acquisition_cost_data_id: str):
@ -73,7 +56,7 @@ async def create_acquisition_cost_data(
return StandardResponse(data=acquisition_cost_data, message="Data created successfully")
@router.post("/update/{acquisition_cost_data_id}", response_model=StandardResponse[AcquisitionCostDataRead])
@router.put("/{acquisition_cost_data_id}", response_model=StandardResponse[AcquisitionCostDataRead])
async def update_acquisition_cost_data(
db_session: DbSession,
acquisition_cost_data_id: str,
@ -97,7 +80,7 @@ async def update_acquisition_cost_data(
)
@router.post("/delete/{acquisition_cost_data_id}", response_model=StandardResponse[AcquisitionCostDataRead])
@router.delete("/{acquisition_cost_data_id}", response_model=StandardResponse[AcquisitionCostDataRead])
async def delete_acquisition_cost_data(db_session: DbSession, acquisition_cost_data_id: str):
acquisition_cost_data = await get(db_session=db_session, acquisition_cost_data_id=acquisition_cost_data_id)

@ -7,14 +7,14 @@ from src.models import CommonParams, DefaultBase, Pagination
class AcquisitionCostDataBase(DefaultBase):
category_no: Optional[str] = Field(None)
name: Optional[str] = Field(None)
cost_unit_3_n_4: Optional[float] = Field(None)
cost_unit_3: Optional[float] = Field(None)
created_at: Optional[datetime] = Field(None)
updated_at: Optional[datetime] = Field(None)
created_by: Optional[str] = Field(None)
updated_by: Optional[str] = Field(None)
category_no: Optional[str] = Field(None, nullable=True)
name: Optional[str] = Field(None, nullable=True)
cost_unit_3_n_4: Optional[float] = Field(None, nullable=True)
cost_unit_3: Optional[float] = Field(None, nullable=True)
created_at: Optional[datetime] = Field(None, nullable=True)
updated_at: Optional[datetime] = Field(None, nullable=True)
created_by: Optional[str] = Field(None, nullable=True)
updated_by: Optional[str] = Field(None, nullable=True)
class AcquisitionCostDataCreate(AcquisitionCostDataBase):
@ -34,4 +34,13 @@ class AcquisitionCostDataPagination(Pagination):
class ListQueryParams(CommonParams):
pass
items_per_page: Optional[int] = Field(
default=5,
ge=1,
le=1000,
description="Number of items per page"
)
search: Optional[str] = Field(
default=None,
description="Search keyword"
)

@ -28,17 +28,6 @@ class JWTBearer(HTTPBearer):
)
request.state.user = user_info
from src.context import set_user_id, set_username, set_role
if hasattr(user_info, "user_id"):
set_user_id(str(user_info.user_id))
if hasattr(user_info, "username"):
set_username(user_info.username)
elif hasattr(user_info, "name"):
set_username(user_info.name)
if hasattr(user_info, "role"):
set_role(user_info.role)
return user_info
else:
raise HTTPException(status_code=403, detail="Invalid authorization code.")
@ -57,7 +46,7 @@ class JWTBearer(HTTPBearer):
return UserBase(**user_data["data"])
except Exception as e:
logging.error(f"Token verification error: {str(e)}")
print(f"Token verification error: {str(e)}")
return None
@ -78,76 +67,3 @@ async def get_token(request: Request):
CurrentUser = Annotated[UserBase, Depends(get_current_user)]
Token = Annotated[str, Depends(get_token)]
import httpx
import logging
from typing import Dict, Any
log = logging.getLogger(__name__)
AUTH_NOTIFY_ENDPOINT = f"{config.AUTH_SERVICE_API}/admin/notify-limit"
async def notify_admin_on_rate_limit(
endpoint_name: str,
ip_address: str,
method: str = "POST",
cooldown: int = 900,
timeout: int = 5
) -> Dict[str, Any]:
"""
Kirim notifikasi ke admin via be-auth service ketika rate limit terlampaui.
Async version - gunakan di async context.
"""
payload = {
"endpoint_name": endpoint_name,
"ip_address": ip_address,
"method": method,
"cooldown": cooldown,
}
try:
async with httpx.AsyncClient(timeout=timeout) as client:
response = await client.post(AUTH_NOTIFY_ENDPOINT, json=payload)
response.raise_for_status()
result = response.json()
log.info(f"Notifikasi admin sent | Endpoint: {endpoint_name}")
return result
except Exception as e:
log.error(f"Error notifying admin: {str(e)}")
return {"status": False, "message": str(e), "data": payload}
def notify_admin_on_rate_limit_sync(
endpoint_name: str,
ip_address: str,
method: str = "POST",
cooldown: int = 900,
timeout: int = 5
) -> Dict[str, Any]:
"""
Kirim notifikasi ke admin via be-auth service.
Sync version - gunakan di exception handler atau sync context.
RECOMMENDED untuk use case ini.
"""
payload = {
"endpoint_name": endpoint_name,
"ip_address": ip_address,
"method": method,
"cooldown": cooldown,
}
try:
response = httpx.post(AUTH_NOTIFY_ENDPOINT, json=payload, timeout=timeout)
response.raise_for_status()
result = response.json()
log.info(f"Notifikasi admin sent | Endpoint: {endpoint_name}")
return result
except Exception as e:
log.error(f"Error notifying admin: {str(e)}")
return {"status": False, "message": str(e), "data": payload}

@ -51,7 +51,7 @@ def get_config():
config = get_config()
LOG_LEVEL = config("LOG_LEVEL", default="INFO")
LOG_LEVEL = config("LOG_LEVEL", default=logging.WARNING)
ENV = config("ENV", default="local")
PORT = config("PORT", cast=int, default=8000)
HOST = config("HOST", default="localhost")

@ -2,18 +2,8 @@ from contextvars import ContextVar
from typing import Optional, Final
REQUEST_ID_CTX_KEY: Final[str] = "request_id"
USER_ID_CTX_KEY: Final[str] = "user_id"
USERNAME_CTX_KEY: Final[str] = "username"
ROLE_CTX_KEY: Final[str] = "role"
_request_id_ctx_var: ContextVar[Optional[str]] = ContextVar(
REQUEST_ID_CTX_KEY, default=None)
_user_id_ctx_var: ContextVar[Optional[str]] = ContextVar(
USER_ID_CTX_KEY, default=None)
_username_ctx_var: ContextVar[Optional[str]] = ContextVar(
USERNAME_CTX_KEY, default=None)
_role_ctx_var: ContextVar[Optional[str]] = ContextVar(
ROLE_CTX_KEY, default=None)
def get_request_id() -> Optional[str]:
@ -26,27 +16,3 @@ def set_request_id(request_id: str):
def reset_request_id(token):
_request_id_ctx_var.reset(token)
def get_user_id() -> Optional[str]:
return _user_id_ctx_var.get()
def set_user_id(user_id: str):
return _user_id_ctx_var.set(user_id)
def get_username() -> Optional[str]:
return _username_ctx_var.get()
def set_username(username: str):
return _username_ctx_var.set(username)
def get_role() -> Optional[str]:
return _role_ctx_var.get()
def set_role(role: str):
return _role_ctx_var.set(role)

@ -2,8 +2,8 @@
from starlette.requests import Request
from sqlalchemy_utils import get_mapper
from sqlalchemy.sql.expression import true
from sqlalchemy.orm import object_session, sessionmaker, Session, declarative_base
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import object_session, sessionmaker, Session
from sqlalchemy.ext.declarative import declarative_base, declared_attr
from sqlalchemy import create_engine, inspect
from pydantic import BaseModel
from fastapi import Depends

@ -1,5 +1,5 @@
import logging
from typing import Annotated, List, Optional
from typing import Annotated, List
from sqlalchemy import desc, func, or_, Select
from sqlalchemy_filters import apply_pagination
@ -18,11 +18,9 @@ QueryStr = constr(pattern=r"^[ -~]+$", min_length=1)
def common_parameters(
db_session: DbSession, # type: ignore
current_user: Optional[str] = Query(None, alias="currentUser"), # type: ignore
current_user_snake: Optional[str] = Query(None, alias="current_user"), # type: ignore
current_user: QueryStr = Query(None, alias="currentUser"), # type: ignore
page: int = Query(1, gt=0, lt=2147483647),
items_per_page: Optional[int] = Query(None, alias="items_per_page", gt=-2, lt=2147483647),
items_per_page_camel: Optional[int] = Query(None, alias="itemsPerPage", gt=-2, lt=2147483647),
items_per_page: int = Query(5, alias="itemsPerPage", gt=-2, lt=2147483647),
query_str: QueryStr = Query(None, alias="q"), # type: ignore
filter_spec: QueryStr = Query(None, alias="filter"), # type: ignore
sort_by: List[str] = Query([], alias="sortBy[]"),
@ -30,23 +28,15 @@ def common_parameters(
all: int = Query(0),
# role: QueryStr = Depends(get_current_role),
):
# Support both snake_case and camelCase for pagination size
final_items_per_page = items_per_page_camel if items_per_page_camel is not None else (
items_per_page if items_per_page is not None else 5
)
# Support both snake_case and camelCase for current user
final_current_user = current_user or current_user_snake
return {
"db_session": db_session,
"page": page,
"items_per_page": final_items_per_page,
"items_per_page": items_per_page,
"query_str": query_str,
"filter_spec": filter_spec,
"sort_by": sort_by,
"descending": descending,
"current_user": final_current_user,
"current_user": current_user,
# "role": role,
"all": bool(all),
}
@ -144,7 +134,7 @@ async def search_filter_sort_paginate(
# Get total count
count_query = Select(func.count()).select_from(query.subquery())
total = await db_session.scalar(count_query)
if all or items_per_page == -1:
if all:
result = await db_session.execute(query)
items = _extract_result_items(result)
return {

@ -62,23 +62,6 @@ async def get_equipments(
message="Data retrieved successfully",
)
@router.get("/export-all", response_model=StandardResponse[EquipmentPagination])
async def get_equipments_export_all(
db_session: DbSession,
common: CommonParameters,
):
"""Get all equipment for export."""
common["all"] = True
equipment_data = await get_all(
db_session=db_session,
items_per_page=-1,
common=common,
)
return StandardResponse(
data=equipment_data,
message="All Equipment Data retrieved successfully",
)
@router.get("/maximo/{assetnum}", response_model=StandardResponse[List[dict]])
@ -92,7 +75,7 @@ async def get_maximo_record_by_assetnum(db_session: CollectorDbSession, assetnum
)
@router.get("/simulate/{assetnum}")
async def simulate_equipment(db_session: DbSession, assetnum: str, token: Token):
async def simulate_equipment(db_session: DbSession, assetnum: str):
"""Stream progress events while running the simulation (prediksi + EAC).
This endpoint returns Server-Sent Events (SSE). Each event's `data` is
@ -115,7 +98,7 @@ async def simulate_equipment(db_session: DbSession, assetnum: str, token: Token)
yield f"data: {json.dumps({'status':'started','step':'prediksi','message':'Menghitung prediksi'})}\n\n"
try:
prediksi = await prediksi_main(assetnum=assetnum, token=token)
prediksi = await prediksi_main(assetnum=assetnum)
except Exception as exc:
# send error event and stop
yield f"data: {json.dumps({'status':'error','step':'prediksi','message':str(exc)})}\n\n"
@ -157,7 +140,7 @@ async def simulate_equipment(db_session: DbSession, assetnum: str, token: Token)
@router.get("/simulate-all")
async def simulate_all_equipment(db_session: DbSession, token: Token):
async def simulate_all_equipment(db_session: DbSession):
"""Run simulation (prediksi + EAC) for ALL equipment.
Returns SSE stream of progress.
"""
@ -167,34 +150,34 @@ async def simulate_all_equipment(db_session: DbSession, token: Token):
try:
assetnums = await get_all_assetnums(db_session=db_session)
except Exception as e:
yield f"data: {json.dumps({'status':'error', 'message': f'Failed to fetch assetnums: {str(e)}'})}\n\n"
yield f"data: {json.dumps({'status':'error', 'message': f'Failed to fetch assetnums: {str(e)}'})}\\n\\n"
return
total = len(assetnums)
yield f"data: {json.dumps({'status':'started', 'message': f'Simulasi dimulai untuk {total} asset'})}\n\n"
yield f"data: {json.dumps({'status':'started', 'message': f'Simulasi dimulai untuk {total} asset'})}\\n\\n"
success_count = 0
error_count = 0
for idx, assetnum in enumerate(assetnums, start=1):
yield f"data: {json.dumps({'status':'working', 'step':f'Proses {idx}/{total}', 'assetnum': assetnum})}\n\n"
yield f"data: {json.dumps({'status':'working', 'step':f'Proses {idx}/{total}', 'assetnum': assetnum})}\\n\\n"
try:
# Update acquisition year and target year
await update_initial_simulation_data(db_session=db_session, assetnum=assetnum)
# Prediksi
await prediksi_main(assetnum=assetnum, token=token)
await prediksi_main(assetnum=assetnum)
# EAC
eac = Eac()
eac.hitung_eac_equipment(assetnum=assetnum)
success_count += 1
except Exception as e:
error_count += 1
yield f"data: {json.dumps({'status':'partial_error', 'assetnum': assetnum, 'message': str(e)})}\n\n"
yield f"data: {json.dumps({'status':'partial_error', 'assetnum': assetnum, 'message': str(e)})}\\n\\n"
continue
yield f"data: {json.dumps({'status':'done', 'message':f'Simulasi selesai. Success: {success_count}, Errors: {error_count}'})}\n\n"
yield f"data: {json.dumps({'status':'done', 'message':f'Simulasi selesai. Success: {success_count}, Errors: {error_count}'})}\\n\\n"
headers = {
"Content-type": "text/event-stream",
@ -228,18 +211,6 @@ async def get_calculated_top_10_replacement_priorities(db_session: DbSession, co
message="Top 10 Replacement Priorities Data retrieved successfully",
)
@router.get(
"/top-10-replacement-priorities-export-all",
response_model=StandardResponse[EquipmentTop10Pagination],
)
async def get_calculated_top_10_replacement_priorities_all(db_session: DbSession, common: CommonParameters):
common["all"] = True
equipment_data = await get_top_10_replacement_priorities(db_session=db_session, common=common)
return StandardResponse(
data=equipment_data,
message="All Replacement Priorities Data retrieved successfully",
)
@router.get(
"/top-10-economic-life",
response_model=StandardResponse[EquipmentTop10Pagination],
@ -253,18 +224,6 @@ async def get_calculated_top_10_economic_life(db_session: DbSession, common: Com
message="Top 10 Economic Life Data retrieved successfully",
)
@router.get(
"/top-10-economic-life-export-all",
response_model=StandardResponse[EquipmentTop10Pagination],
)
async def get_calculated_top_10_economic_life_all(db_session: DbSession, common: CommonParameters):
common["all"] = True
equipment_data = await get_top_10_economic_life(db_session=db_session, common=common)
return StandardResponse(
data=equipment_data,
message="All Economic Life Data retrieved successfully",
)
@router.get("/tree", response_model=StandardResponse[EquipmentRead])
async def get_equipment_tree():
@ -340,7 +299,7 @@ async def create_equipment(
return StandardResponse(data=equipment, message="Data created successfully")
@router.post("/update/{assetnum}", response_model=StandardResponse[EquipmentDataMaster])
@router.put("/{assetnum}", response_model=StandardResponse[EquipmentDataMaster])
async def update_equipment(
db_session: DbSession,
assetnum: str,
@ -368,7 +327,7 @@ async def update_equipment(
)
@router.post("/delete/{equipment_id}", response_model=StandardResponse[EquipmentDataMaster])
@router.delete("/{equipment_id}", response_model=StandardResponse[EquipmentDataMaster])
async def delete_equipment(db_session: DbSession, equipment_id: str):
equipment = await get_by_id(db_session=db_session, equipment_id=equipment_id)

@ -8,95 +8,87 @@ from src.models import CommonParams, DefaultBase, Pagination
MAX_PRICE = 1_000_000_000_000_000 # thousands of trillion
class EquipmentBase(DefaultBase):
assetnum: Optional[str] = Field(None)
acquisition_year: Optional[int] = Field(None)
acquisition_cost: Optional[float] = Field(None, le=MAX_PRICE)
capital_cost_record_time: Optional[int] = Field(None)
design_life: Optional[int] = Field(None)
forecasting_start_year: Optional[int] = Field(None)
forecasting_target_year: Optional[int] = Field(None)
manhours_rate: Optional[float] = Field(None)
harga_saat_ini: Optional[float] = Field(None, le=MAX_PRICE)
minimum_eac_seq: Optional[int] = Field(None)
minimum_eac_year: Optional[int] = Field(None)
minimum_eac: Optional[float] = Field(None, le=MAX_PRICE)
minimum_npv: Optional[float] = Field(None, le=MAX_PRICE)
minimum_pmt: Optional[float] = Field(None, le=MAX_PRICE)
minimum_pmt_aq_cost: Optional[float] = Field(None, le=MAX_PRICE)
minimum_is_actual: Optional[int] = Field(None)
efdh_equivalent_forced_derated_hours: Optional[float] = Field(None)
foh_forced_outage_hours: Optional[float] = Field(None)
category_no: Optional[str] = Field(None)
proportion: Optional[float] = Field(None)
created_at: Optional[datetime] = Field(None)
updated_at: Optional[datetime] = Field(None)
created_by: Optional[str] = Field(None)
updated_by: Optional[str] = Field(None)
assetnum: Optional[str] = Field(None, nullable=True)
acquisition_year: Optional[int] = Field(None, nullable=True)
acquisition_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
capital_cost_record_time: Optional[int] = Field(None, nullable=True)
design_life: Optional[int] = Field(None, nullable=True)
forecasting_start_year: Optional[int] = Field(None, nullable=True)
forecasting_target_year: Optional[int] = Field(None, nullable=True)
manhours_rate: Optional[float] = Field(None, nullable=True)
harga_saat_ini: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
minimum_eac_seq: Optional[int] = Field(None, nullable=True)
minimum_eac_year: Optional[int] = Field(None, nullable=True)
minimum_eac: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
minimum_npv: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
minimum_pmt: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
minimum_pmt_aq_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
minimum_is_actual: Optional[int] = Field(None, nullable=True)
efdh_equivalent_forced_derated_hours: Optional[float] = Field(None, nullable=True)
foh_forced_outage_hours: Optional[float] = Field(None, nullable=True)
category_no: Optional[str] = Field(None, nullable=True)
proportion: Optional[float] = Field(None, nullable=True)
created_at: Optional[datetime] = Field(None, nullable=True)
updated_at: Optional[datetime] = Field(None, nullable=True)
created_by: Optional[str] = Field(None, nullable=True)
updated_by: Optional[str] = Field(None, nullable=True)
class EquipmentMasterBase(DefaultBase):
id: Optional[UUID] = Field(None)
name: Optional[str] = Field(None)
parent_id: Optional[UUID] = Field(None)
equipment_tree_id: Optional[UUID] = Field(None)
category_id: Optional[UUID] = Field(None)
system_tag: Optional[str] = Field(None)
assetnum: Optional[str] = Field(None)
location_tag: Optional[str] = Field(None)
image_name: Optional[str] = Field(None)
description: Optional[str] = Field(None)
location_tag: Optional[str] = Field(None, nullable=True)
assetnum: Optional[str] = Field(None, nullable=True)
name: Optional[str] = Field(None, nullable=True)
class MasterBase(DefaultBase):
assetnum: Optional[str] = Field(None)
tahun: Optional[int] = Field(None)
seq: Optional[int] = Field(None)
is_actual: Optional[float] = Field(None)
raw_cm_interval: Optional[float] = Field(None)
raw_cm_material_cost: Optional[float] = Field(None, le=MAX_PRICE)
raw_cm_labor_time: Optional[float] = Field(None)
raw_cm_labor_human: Optional[float] = Field(None)
raw_pm_interval: Optional[float] = Field(None)
raw_pm_material_cost: Optional[float] = Field(None, le=MAX_PRICE)
raw_pm_labor_time: Optional[float] = Field(None)
raw_pm_labor_human: Optional[float] = Field(None)
raw_predictive_interval: Optional[float] = Field(None)
raw_predictive_material_cost: Optional[float] = Field(None, le=MAX_PRICE)
raw_predictive_labor_time: Optional[float] = Field(None)
raw_predictive_labor_human: Optional[float] = Field(None)
raw_oh_interval: Optional[float] = Field(None)
raw_oh_material_cost: Optional[float] = Field(None, le=MAX_PRICE)
raw_oh_labor_time: Optional[float] = Field(None)
raw_oh_labor_human: Optional[float] = Field(None)
raw_project_task_material_cost: Optional[float] = Field(None, le=MAX_PRICE)
raw_loss_output_MW: Optional[float] = Field(None)
raw_loss_output_price: Optional[float] = Field(None, le=MAX_PRICE)
raw_operational_cost: Optional[float] = Field(None, le=MAX_PRICE)
raw_maintenance_cost: Optional[float] = Field(None, le=MAX_PRICE)
rc_cm_material_cost: Optional[float] = Field(None, le=MAX_PRICE)
rc_cm_labor_cost: Optional[float] = Field(None, le=MAX_PRICE)
rc_pm_material_cost: Optional[float] = Field(None, le=MAX_PRICE)
rc_pm_labor_cost: Optional[float] = Field(None, le=MAX_PRICE)
rc_predictive_labor_cost: Optional[float] = Field(None, le=MAX_PRICE)
rc_oh_material_cost: Optional[float] = Field(None, le=MAX_PRICE)
rc_oh_labor_cost: Optional[float] = Field(None, le=MAX_PRICE)
rc_project_material_cost: Optional[float] = Field(None, le=MAX_PRICE)
rc_lost_cost: Optional[float] = Field(None, le=MAX_PRICE)
rc_operation_cost: Optional[float] = Field(None, le=MAX_PRICE)
rc_maintenance_cost: Optional[float] = Field(None, le=MAX_PRICE)
asset_criticality: Optional[float] = Field(None)
rc_total_cost: Optional[float] = Field(None, le=MAX_PRICE)
eac_npv: Optional[float] = Field(None, le=MAX_PRICE)
eac_annual_mnt_cost: Optional[float] = Field(None, le=MAX_PRICE)
eac_annual_acq_cost: Optional[float] = Field(None, le=MAX_PRICE)
eac_disposal_cost: Optional[float] = Field(None, le=MAX_PRICE)
eac_eac: Optional[float] = Field(None, le=MAX_PRICE)
efdh_equivalent_forced_derated_hours: Optional[float] = Field(None)
foh_forced_outage_hours: Optional[float] = Field(None)
category_no: Optional[str] = Field(None)
proportion: Optional[float] = Field(None)
assetnum: Optional[str] = Field(None, nullable=True)
tahun: Optional[int] = Field(None, nullable=True)
seq: Optional[int] = Field(None, nullable=True)
is_actual: Optional[float] = Field(None, nullable=True)
raw_cm_interval: Optional[float] = Field(None, nullable=True)
raw_cm_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
raw_cm_labor_time: Optional[float] = Field(None, nullable=True)
raw_cm_labor_human: Optional[float] = Field(None, nullable=True)
raw_pm_interval: Optional[float] = Field(None, nullable=True)
raw_pm_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
raw_pm_labor_time: Optional[float] = Field(None, nullable=True)
raw_pm_labor_human: Optional[float] = Field(None, nullable=True)
raw_predictive_interval: Optional[float] = Field(None, nullable=True)
raw_predictive_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
raw_predictive_labor_time: Optional[float] = Field(None, nullable=True)
raw_predictive_labor_human: Optional[float] = Field(None, nullable=True)
raw_oh_interval: Optional[float] = Field(None, nullable=True)
raw_oh_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
raw_oh_labor_time: Optional[float] = Field(None, nullable=True)
raw_oh_labor_human: Optional[float] = Field(None, nullable=True)
raw_project_task_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
raw_loss_output_MW: Optional[float] = Field(None, nullable=True)
raw_loss_output_price: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
raw_operational_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
raw_maintenance_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
rc_cm_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
rc_cm_labor_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
rc_pm_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
rc_pm_labor_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
rc_predictive_labor_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
rc_oh_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
rc_oh_labor_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
rc_project_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
rc_lost_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
rc_operation_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
rc_maintenance_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
asset_criticality: Optional[float] = Field(None, nullable=True)
rc_total_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
eac_npv: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
eac_annual_mnt_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
eac_annual_acq_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
eac_disposal_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
eac_eac: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
efdh_equivalent_forced_derated_hours: Optional[float] = Field(None, nullable=True)
foh_forced_outage_hours: Optional[float] = Field(None, nullable=True)
category_no: Optional[str] = Field(None, nullable=True)
proportion: Optional[float] = Field(None, nullable=True)
class HistoricalBase(MasterBase):
acquisition_year_ref: Optional[str] = Field(None)
acquisition_year_ref: Optional[str] = Field(None, nullable=True)
class EquipmentCreate(EquipmentBase):
pass
@ -107,54 +99,54 @@ class EquipmentUpdate(EquipmentBase):
class ReplacementBase(DefaultBase):
"""Schema for replacement history (from lcc_ms_equipment_historical_data)."""
acquisition_year: Optional[int] = Field(None, ge=1900, le=9999)
acquisition_cost: Optional[float] = Field(None, le=MAX_PRICE)
acquisition_year_ref: Optional[str] = Field(None)
created_at: Optional[datetime] = Field(None)
acquisition_year: Optional[int] = Field(None, nullable=True, ge=1900, le=9999)
acquisition_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
acquisition_year_ref: Optional[str] = Field(None, nullable=True)
created_at: Optional[datetime] = Field(None, nullable=True)
class EquipmentRead(DefaultBase):
equipment_master_record: EquipmentMasterBase
equipment_data: EquipmentBase
chart_data: List[MasterBase]
min_eac_value: Optional[float] = Field(None, le=MAX_PRICE)
min_seq: Optional[float] = Field(None)
min_eac_year: Optional[float] = Field(None)
last_actual_year: Optional[int] = Field(None, ge=1900, le=9999)
maximo_data: Optional[List[dict]] = Field(None)
joined_maximo: Optional[List[dict]] = Field(None)
min_eac_disposal_cost: Optional[float] = Field(None, le=MAX_PRICE)
historical_data: Optional[List[HistoricalBase]] = Field(None)
replacement_data: Optional[List[ReplacementBase]] = Field(None)
min_eac_value: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
min_seq: Optional[float] = Field(None, nullable=True)
min_eac_year: Optional[float] = Field(None, nullable=True)
last_actual_year: Optional[int] = Field(None, nullable=True, ge=1900, le=9999)
maximo_data: Optional[List[dict]] = Field(None, nullable=True)
joined_maximo: Optional[List[dict]] = Field(None, nullable=True)
min_eac_disposal_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
historical_data: Optional[List[HistoricalBase]] = Field(None, nullable=True)
replacement_data: Optional[List[ReplacementBase]] = Field(None, nullable=True)
class EquipmentTop10(EquipmentBase):
id: UUID
equipment_master: EquipmentMasterBase
forecasting_target_year: Optional[int] = Field(None, ge=1900, le=9999)
minimum_eac_seq: Optional[int] = Field(None)
minimum_eac_year: Optional[int] = Field(None)
minimum_eac: Optional[float] = Field(None, le=MAX_PRICE)
minimum_npv: Optional[float] = Field(None, le=MAX_PRICE)
minimum_pmt: Optional[float] = Field(None, le=MAX_PRICE)
minimum_pmt_aq_cost: Optional[float] = Field(None, le=MAX_PRICE)
minimum_is_actual: Optional[int] = Field(None)
harga_saat_ini: Optional[float] = Field(None, le=MAX_PRICE)
remaining_life: Optional[int] = Field(None)
forecasting_target_year: Optional[int] = Field(None, nullable=True, ge=1900, le=9999)
minimum_eac_seq: Optional[int] = Field(None, nullable=True)
minimum_eac_year: Optional[int] = Field(None, nullable=True)
minimum_eac: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
minimum_npv: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
minimum_pmt: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
minimum_pmt_aq_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
minimum_is_actual: Optional[int] = Field(None, nullable=True)
harga_saat_ini: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
remaining_life: Optional[int] = Field(None, nullable=True)
class EquipmentTop10Pagination(Pagination):
items: List[EquipmentTop10] = []
class EquipmentDataMaster(EquipmentBase):
id: UUID
equipment_master: Optional[EquipmentMasterBase] = Field(None)
forecasting_target_year: Optional[int] = Field(None, ge=1900, le=9999)
minimum_eac_seq: Optional[int] = Field(None, ge=0)
minimum_eac_year: Optional[int] = Field(None, ge=1900, le=9999)
minimum_eac: Optional[float] = Field(None, le=MAX_PRICE)
minimum_npv: Optional[float] = Field(None, le=MAX_PRICE)
minimum_pmt: Optional[float] = Field(None, le=MAX_PRICE)
minimum_pmt_aq_cost: Optional[float] = Field(None, le=MAX_PRICE)
minimum_is_actual: Optional[int] = Field(None)
harga_saat_ini: Optional[float] = Field(None, le=MAX_PRICE)
equipment_master: Optional[EquipmentMasterBase] = Field(None, nullable=True)
forecasting_target_year: Optional[int] = Field(None, nullable=True, ge=1900, le=9999)
minimum_eac_seq: Optional[int] = Field(None, nullable=True, ge=0)
minimum_eac_year: Optional[int] = Field(None, nullable=True, ge=1900, le=9999)
minimum_eac: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
minimum_npv: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
minimum_pmt: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
minimum_pmt_aq_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
minimum_is_actual: Optional[int] = Field(None, nullable=True)
harga_saat_ini: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
# class EquipmentTop10EconomicLife(DefaultBase):
# equipment: EquipmentDataMaster
@ -170,5 +162,15 @@ class CountRemainingLifeResponse(DefaultBase):
critical: int
class ListQueryParams(CommonParams):
pass
items_per_page: Optional[int] = Field(
default=5,
ge=1,
le=1000,
description="Number of items per page",
alias="itemsPerPage"
)
search: Optional[str] = Field(
default=None,
description="Search keyword"
)

@ -301,19 +301,11 @@ async def get_master_by_assetnum(
None,
)
# Historical data query - filter to only one reference (prioritize oldest acquisition year)
oldest_ref_subquery = (
Select(EquipmentHistoricalTransactionRecords.acquisition_year_ref)
.filter(EquipmentHistoricalTransactionRecords.assetnum == assetnum)
.order_by(EquipmentHistoricalTransactionRecords.acquisition_year_ref.asc())
.limit(1)
.scalar_subquery()
)
# Historical data query
historical_query = (
Select(EquipmentHistoricalTransactionRecords)
.filter(EquipmentHistoricalTransactionRecords.assetnum == assetnum)
.filter(EquipmentHistoricalTransactionRecords.acquisition_year_ref == oldest_ref_subquery)
.join(EquipmentHistoricalTransactionRecords.equipment)
.filter(Equipment.assetnum == assetnum)
.order_by(EquipmentHistoricalTransactionRecords.tahun.asc())
)
historical_result = await db_session.execute(historical_query)
@ -670,9 +662,9 @@ async def delete(*, db_session: DbSession, equipment_id: str):
async def check_and_update_acquisition_data(db_session: DbSession, assetnum: str) -> bool:
"""
Check if acquisition cost in Maximo differs from local DB.
Updates master acquisition_cost (initial + replacement) and sets forecasting_start_year to 2015.
Returns True if master record was updated, False otherwise.
Check if acquisition year/cost in Maximo differs from local DB.
If changed, archive history, delete transaction data, update master, and return True.
Otherwise return False.
"""
conn = get_production_connection()
first_year = None
@ -680,7 +672,7 @@ async def check_and_update_acquisition_data(db_session: DbSession, assetnum: str
if conn:
try:
cursor = conn.cursor()
# Query the oldest year from wo_maximo to detect the original replacement cost
# Query the first year from wo_maximo
query = """
select DATE_PART('year', a.reportdate) AS year, a.asset_replacecost AS cost
from wo_maximo a
@ -697,7 +689,7 @@ async def check_and_update_acquisition_data(db_session: DbSession, assetnum: str
cursor.close()
conn.close()
except Exception as e:
print(f"Error fetching replacement data for {assetnum}: {e}")
print(f"Error fetching acquisition year for {assetnum}: {e}")
if conn:
try:
conn.close()
@ -706,6 +698,7 @@ async def check_and_update_acquisition_data(db_session: DbSession, assetnum: str
updates_performed = False
if first_year:
# Fetch equipment to update
eq = await get_by_assetnum(db_session=db_session, assetnum=assetnum)
if eq:
@ -714,64 +707,104 @@ async def check_and_update_acquisition_data(db_session: DbSession, assetnum: str
current_acq = eq.acquisition_year
current_life = eq.design_life
current_target = eq.forecasting_target_year
current_acq_cost = eq.acquisition_cost
# If current_target is logically "default", we update it.
# If user changed it to something else, we might want to preserve it
# if it currently holds the default value (based on old acq year).
is_valid_default = False
if current_acq and current_life and current_target:
is_valid_default = current_target == (current_acq + current_life)
# Fetch inflation rate from lcc_ms_master for value-of-money adjustment
inflation_rate = 0.05 # Default fallback
try:
rate_query = text("SELECT value_num / 100.0 FROM lcc_ms_master WHERE name = 'inflation_rate'")
rate_result = (await db_session.execute(rate_query)).scalar()
if rate_result is not None:
inflation_rate = float(rate_result)
except Exception as e:
print(f"Warning: Could not fetch inflation_rate for {assetnum}: {e}")
# Calculate initial cost from category/proportion (base acquisition cost)
initial_cost = 0.0
if eq.category_no and eq.proportion:
_, aggregated_cost = await fetch_acquisition_cost_with_rollup(
db_session=db_session, base_category_no=eq.category_no
)
if aggregated_cost:
initial_cost = (eq.proportion * 0.01) * aggregated_cost
# Adjust initial cost to 2015 value (Base Year)
# Formula: Value_2015 = Value_Year / (1 + rate)^(Year - 2015)
adj_initial_cost = initial_cost
if current_acq and current_acq != 2015:
adj_initial_cost = initial_cost / ((1 + inflation_rate) ** (current_acq - 2015))
# Adjust replace cost to 2015 value (Base Year)
adj_replace_cost = (first_cost or 0.0)
if first_year and first_year != 2015:
adj_replace_cost = (first_cost or 0.0) / ((1 + inflation_rate) ** (first_year - 2015))
# Total cost is adjusted initial cost plus the adjusted replacement cost
total_cost = adj_initial_cost + adj_replace_cost
change_cost = (eq.acquisition_cost != total_cost)
# Requirement: forecasting_start_year always starts from 2015
change_start = (eq.forecasting_start_year != 2015)
# Note: acquisition_year itself is no longer updated as per requirements.
if change_cost or change_start:
if change_cost:
print(
f"Acquisition cost update for {assetnum}: {eq.acquisition_cost} -> {total_cost} "
f"(Adj. Initial: {adj_initial_cost} + Adj. Replacement: {adj_replace_cost} | Rate: {inflation_rate})"
)
eq.acquisition_cost = total_cost
if change_start:
print(f"Aligning forecasting_start_year to 2015 for {assetnum}")
eq.forecasting_start_year = 2015
# If target was default, we update it to 2015 + design_life
# Check for changes
change_year = (eq.acquisition_year != first_year)
change_cost = (first_cost is not None and eq.acquisition_cost != first_cost)
if change_year or change_cost:
print(f"Acquisition change detected for {assetnum}: Year {current_acq}->{first_year}, Cost {current_acq_cost}->{first_cost}. Archiving history.")
acq_year_ref = f"{current_acq}_{current_target}"
# --- ARCHIVE HISTORICAL DATA ---
# 1. Copy old equipment master data to history
history_ms_query = text("""
INSERT INTO lcc_ms_equipment_historical_data (
id, assetnum, acquisition_year, acquisition_cost, capital_cost_record_time, design_life,
forecasting_start_year, forecasting_target_year, manhours_rate, created_at, created_by,
updated_at, updated_by, min_eac_info, harga_saat_ini, minimum_eac_seq, minimum_eac_year,
minimum_eac, minimum_npv, minimum_pmt, minimum_pmt_aq_cost, minimum_is_actual,
efdh_equivalent_forced_derated_hours, foh_forced_outage_hours, category_no, proportion,
acquisition_year_ref
)
SELECT
uuid_generate_v4(), assetnum, acquisition_year, acquisition_cost, capital_cost_record_time, design_life,
forecasting_start_year, forecasting_target_year, manhours_rate, created_at, created_by,
updated_at, updated_by, min_eac_info, harga_saat_ini, minimum_eac_seq, minimum_eac_year,
minimum_eac, minimum_npv, minimum_pmt, minimum_pmt_aq_cost, minimum_is_actual,
efdh_equivalent_forced_derated_hours, foh_forced_outage_hours, category_no, proportion,
:acq_year_ref
FROM lcc_ms_equipment_data
WHERE assetnum = :assetnum
""")
await db_session.execute(history_ms_query, {"acq_year_ref": acq_year_ref, "assetnum": assetnum})
# 2. Copy old transaction data to lcc_equipment_historical_tr_data
# Format: {acquisition_year}_{forecasting_target_year}
history_tr_query = text("""
INSERT INTO lcc_equipment_historical_tr_data (
id, assetnum, tahun, seq, is_actual,
raw_cm_interval, raw_cm_material_cost, raw_cm_labor_time, raw_cm_labor_human,
raw_pm_interval, raw_pm_material_cost, raw_pm_labor_time, raw_pm_labor_human,
raw_oh_interval, raw_oh_material_cost, raw_oh_labor_time, raw_oh_labor_human,
raw_predictive_interval, raw_predictive_material_cost, raw_predictive_labor_time, raw_predictive_labor_human,
raw_project_task_material_cost, "raw_loss_output_MW", raw_loss_output_price,
raw_operational_cost, raw_maintenance_cost,
rc_cm_material_cost, rc_cm_labor_cost,
rc_pm_material_cost, rc_pm_labor_cost,
rc_oh_material_cost, rc_oh_labor_cost,
rc_predictive_labor_cost,
rc_project_material_cost, rc_lost_cost, rc_operation_cost, rc_maintenance_cost,
rc_total_cost,
eac_npv, eac_annual_mnt_cost, eac_annual_acq_cost, eac_disposal_cost, eac_eac,
efdh_equivalent_forced_derated_hours, foh_forced_outage_hours,
created_by, created_at, acquisition_year_ref
)
SELECT
uuid_generate_v4(), assetnum, tahun, seq, is_actual,
raw_cm_interval, raw_cm_material_cost, raw_cm_labor_time, raw_cm_labor_human,
raw_pm_interval, raw_pm_material_cost, raw_pm_labor_time, raw_pm_labor_human,
raw_oh_interval, raw_oh_material_cost, raw_oh_labor_time, raw_oh_labor_human,
raw_predictive_interval, raw_predictive_material_cost, raw_predictive_labor_time, raw_predictive_labor_human,
raw_project_task_material_cost, "raw_loss_output_MW", raw_loss_output_price,
raw_operational_cost, raw_maintenance_cost,
rc_cm_material_cost, rc_cm_labor_cost,
rc_pm_material_cost, rc_pm_labor_cost,
rc_oh_material_cost, rc_oh_labor_cost,
rc_predictive_labor_cost,
rc_project_material_cost, rc_lost_cost, rc_operation_cost, rc_maintenance_cost,
rc_total_cost,
eac_npv, eac_annual_mnt_cost, eac_annual_acq_cost, eac_disposal_cost, eac_eac,
efdh_equivalent_forced_derated_hours, foh_forced_outage_hours,
created_by, NOW(), :acq_year_ref
FROM lcc_equipment_tr_data
WHERE assetnum = :assetnum
""")
await db_session.execute(history_tr_query, {"acq_year_ref": acq_year_ref, "assetnum": assetnum})
# 3. Delete old data
del_query = text("DELETE FROM lcc_equipment_tr_data WHERE assetnum = :assetnum")
await db_session.execute(del_query, {"assetnum": assetnum})
# Update Equipment Master
if first_cost is not None and eq.acquisition_cost != first_cost:
eq.acquisition_cost = first_cost
if eq.acquisition_year != first_year:
eq.acquisition_year = first_year
if is_valid_default and current_life:
eq.forecasting_target_year = 2015 + current_life
eq.forecasting_target_year = first_year + current_life
await db_session.commit()
updates_performed = True

@ -31,8 +31,7 @@ class EquipmentMaster(Base, DefaultMixin):
system_tag = Column(String, nullable=True)
assetnum = Column(String, nullable=True)
location_tag = Column(String, nullable=True)
image_name = Column(String, nullable=True)
description = Column(String, nullable=True)
# Relationship definitions
# Define both sides of the relationship
# parent = relationship(

@ -28,21 +28,6 @@ async def get_all_equipment_master_tree(
data=equipment_masters, message="Data retrieved successfully"
)
@router.get("/export-all", response_model=StandardResponse[EquipmentMasterPaginated])
async def get_all_equipment_master_tree_export_all(
db_session: DbSession,
common: CommonParameters,
):
common["all"] = True
equipment_masters = await get_all_master(
db_session=db_session,
common=common,
)
return StandardResponse(
data=equipment_masters, message="All Equipment Master Data retrieved successfully"
)
@router.get(
"/{equipment_master_id}", response_model=StandardResponse[EquipmentMasterRead]

@ -7,10 +7,10 @@ from src.models import CommonParams, DefaultBase, Pagination
class EquipmentMasterBase(DefaultBase):
parent_id: Optional[UUID] = Field(None)
name: Optional[str] = Field(None)
created_at: Optional[datetime] = Field(None)
updated_at: Optional[datetime] = Field(None)
parent_id: Optional[UUID] = Field(None, nullable=True)
name: Optional[str] = Field(None, nullable=True)
created_at: Optional[datetime] = Field(None, nullable=True)
updated_at: Optional[datetime] = Field(None, nullable=True)
class EquipmentMasterCreate(EquipmentMasterBase):
@ -18,8 +18,8 @@ class EquipmentMasterCreate(EquipmentMasterBase):
class EquipmentTree(DefaultBase):
level_no: Optional[int] = Field(None)
name: Optional[str] = Field(None)
level_no: Optional[int] = Field(None, nullable=True)
name: Optional[str] = Field(None, nullable=True)
class EquipmentMasterUpdate(EquipmentMasterBase):
@ -31,12 +31,12 @@ EquipmentMasterReadRef = ForwardRef("EquipmentMasterRead")
class EquipmentMasterRead(EquipmentMasterBase):
id: UUID
equipment_tree_id: Optional[UUID] = Field(None)
equipment_tree_id: Optional[UUID] = Field(None, nullable=True)
equipment_tree: EquipmentTree
category_id: Optional[UUID] = Field(None)
system_tag: Optional[str] = Field(None)
assetnum: Optional[str] = Field(None)
location_tag: Optional[str] = Field(None)
category_id: Optional[UUID] = Field(None, nullable=True)
system_tag: Optional[str] = Field(None, nullable=True)
assetnum: Optional[str] = Field(None, nullable=True)
location_tag: Optional[str] = Field(None, nullable=True)
children: List[EquipmentMasterReadRef] # type: ignore
@ -46,4 +46,5 @@ class EquipmentMasterPaginated(Pagination):
class EquipmentMasterQuery(CommonParams):
parent_id : Optional[str] = None
items_per_page : Optional[int] = 5
search : Optional[str] = None

@ -7,7 +7,6 @@ from fastapi.responses import JSONResponse
from pydantic import BaseModel
from src.enums import ResponseStatus
from src.auth.service import notify_admin_on_rate_limit_sync
from slowapi import _rate_limit_exceeded_handler
from slowapi.errors import RateLimitExceeded
@ -97,93 +96,35 @@ def handle_exception(request: Request, exc: Exception):
"""
Global exception handler for Fastapi application.
"""
import uuid
error_id = str(uuid.uuid1())
request_info = get_request_context(request)
# Store error_id in request.state for middleware/logging
request.state.error_id = error_id
if isinstance(exc, RateLimitExceeded):
# Kirim notifikasi ke admin
notify_admin_on_rate_limit_sync(
endpoint_name=request_info["endpoint"],
ip_address=request_info["remote_addr"],
method=request_info["method"],
)
logging.warning(
f"Rate limit exceeded | Error ID: {error_id}",
extra={
"error_id": error_id,
"error_category": "rate_limit",
"request": request_info,
"detail": str(exc.description) if hasattr(exc, "description") else str(exc),
},
)
return JSONResponse(
status_code=429,
content={
"data": None,
"message": "Rate limit exceeded",
"status": ResponseStatus.ERROR,
"error_id": error_id
}
)
if isinstance(exc, RequestValidationError):
logging.warning(
f"Validation error occurred | Error ID: {error_id}",
extra={
"error_id": error_id,
"error_category": "validation",
"errors": exc.errors(),
"request": request_info,
},
)
return JSONResponse(
status_code=422,
content={
"data": exc.errors(),
"message": "Validation Error",
"status": ResponseStatus.ERROR,
"error_id": error_id
},
)
_rate_limit_exceeded_handler(request, exc)
if isinstance(exc, HTTPException):
logging.error(
f"HTTP exception occurred | Error ID: {error_id}",
extra={
"error_id": error_id,
"error_category": "http",
"status_code": exc.status_code,
"detail": exc.detail if hasattr(exc, "detail") else str(exc),
"request": request_info,
},
f"HTTP exception | Code: {exc.status_code} | Error: {exc.detail} | Request: {request_info}",
extra={"error_category": "http"},
)
return JSONResponse(
status_code=exc.status_code,
content={
"data": None,
"message": str(exc.detail) if hasattr(exc, "detail") else str(exc),
"message": str(exc.detail),
"status": ResponseStatus.ERROR,
"error_id": error_id
},
"errors": [
ErrorDetail(
message=str(exc.detail)
).model_dump()
]
}
)
if isinstance(exc, SQLAlchemyError):
error_message, status_code = handle_sqlalchemy_error(exc)
logging.error(
f"Database error occurred | Error ID: {error_id}",
extra={
"error_id": error_id,
"error_category": "database",
"error_message": error_message,
"request": request_info,
"exception": str(exc),
},
f"Database Error | Error: {str(error_message)} | Request: {request_info}",
extra={"error_category": "database"},
)
return JSONResponse(
@ -192,28 +133,42 @@ def handle_exception(request: Request, exc: Exception):
"data": None,
"message": error_message,
"status": ResponseStatus.ERROR,
"error_id": error_id
},
"errors": [
ErrorDetail(
message=error_message
).model_dump()
]
}
)
# Log unexpected errors
error_message = f"{exc.__class__.__name__}: {str(exc)}"
error_traceback = exc.__traceback__
# Get file and line info if available
if error_traceback:
tb = error_traceback
while tb.tb_next:
tb = tb.tb_next
file_name = tb.tb_frame.f_code.co_filename
line_num = tb.tb_lineno
error_message = f"{error_message}\nFile {file_name}, line {line_num}"
logging.error(
f"Unexpected error occurred | Error ID: {error_id}",
extra={
"error_id": error_id,
"error_category": "unexpected",
"error_message": str(exc),
"request": request_info,
},
exc_info=True,
f"Unexpected Error | Error: {error_message} | Request: {request_info}",
extra={"error_category": "unexpected"},
)
return JSONResponse(
status_code=500,
content={
"data": None,
"message": "An unexpected error occurred",
"message": error_message,
"status": ResponseStatus.ERROR,
"error_id": error_id
},
"errors": [
ErrorDetail(
message=error_message
).model_dump()
]
}
)

@ -11,17 +11,6 @@ from src.enums import OptimumOHEnum
LOG_FORMAT_DEBUG = "%(levelname)s:%(message)s:%(pathname)s:%(funcName)s:%(lineno)d"
# ANSI Color Codes
RESET = "\033[0m"
COLORS = {
"DEBUG": "\033[36m", # Cyan
"INFO": "\033[32m", # Green
"WARNING": "\033[33m", # Yellow
"WARN": "\033[33m", # Yellow
"ERROR": "\033[31m", # Red
"CRITICAL": "\033[1;31m", # Bold Red
}
class LogLevels(OptimumOHEnum):
info = "INFO"
@ -35,44 +24,27 @@ class JSONFormatter(logging.Formatter):
Custom formatter to output logs in JSON format.
"""
def format(self, record):
from src.context import get_request_id, get_user_id, get_username, get_role
from src.context import get_request_id
request_id = None
user_id = None
username = None
role = None
request_id = None
try:
request_id = get_request_id()
user_id = get_user_id()
username = get_username()
role = get_role()
except Exception:
pass
# Standard fields from requirements
log_record = {
"timestamp": datetime.datetime.fromtimestamp(record.created).strftime("%Y-%m-%d %H:%M:%S"),
"timestamp": datetime.datetime.fromtimestamp(record.created).astimezone().isoformat(),
"level": record.levelname,
"name": record.name,
"message": record.getMessage(),
"logger": record.name,
"module": record.module,
"funcName": record.funcName,
"lineno": record.lineno,
"pid": os.getpid(),
"request_id": request_id,
}
# Add Context information if available
if user_id:
log_record["user_id"] = user_id
if username:
log_record["username"] = username
if role:
log_record["role"] = role
if request_id:
log_record["request_id"] = request_id
# Add Error context if available
if hasattr(record, "error_id"):
log_record["error_id"] = record.error_id
elif "error_id" in record.__dict__:
log_record["error_id"] = record.error_id
# Capture exception info if available
if record.exc_info:
@ -83,25 +55,18 @@ class JSONFormatter(logging.Formatter):
log_record["stack_trace"] = self.formatStack(record.stack_info)
# Add any extra attributes passed to the log call
# We skip standard attributes to avoid duplication
standard_attrs = {
"args", "asctime", "created", "exc_info", "exc_text", "filename",
"funcName", "levelname", "levelno", "lineno", "module", "msecs",
"message", "msg", "name", "pathname", "process", "processName",
"relativeCreated", "stack_info", "thread", "threadName", "error_id",
"color_message", "request", "scope"
"relativeCreated", "stack_info", "thread", "threadName"
}
for key, value in record.__dict__.items():
if key not in standard_attrs and not key.startswith("_"):
if key not in standard_attrs:
log_record[key] = value
log_json = json.dumps(log_record)
# Apply color if the output is a terminal
if sys.stdout.isatty():
level_color = COLORS.get(record.levelname, "")
return f"{level_color}{log_json}{RESET}"
return log_json
return json.dumps(log_record)
def configure_logging():
@ -134,19 +99,11 @@ def configure_logging():
root_logger.addHandler(handler)
# Reconfigure uvicorn loggers to use our JSON formatter
for logger_name in ["uvicorn", "uvicorn.error", "fastapi"]:
for logger_name in ["uvicorn", "uvicorn.access", "uvicorn.error", "fastapi"]:
logger = logging.getLogger(logger_name)
logger.handlers = []
logger.propagate = True
# Disable uvicorn access logs as we handle request logging in our middleware
access_logger = logging.getLogger("uvicorn.access")
access_logger.handlers = []
access_logger.propagate = False
# set uvicorn access log level to warning
logging.getLogger("uvicorn.access").setLevel(logging.WARNING)
# sometimes the slack client can be too verbose
logging.getLogger("slack_sdk.web.base_client").setLevel(logging.CRITICAL)

@ -7,14 +7,12 @@ from typing import Optional, Final
from fastapi import FastAPI, HTTPException, status
from fastapi.exceptions import RequestValidationError
from fastapi.responses import JSONResponse
from pydantic import ValidationError
from slowapi import _rate_limit_exceeded_handler
from slowapi.errors import RateLimitExceeded
from sqlalchemy import inspect
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import scoped_session
from sqlalchemy.ext.asyncio import async_scoped_session
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
@ -40,18 +38,17 @@ log = logging.getLogger(__name__)
# we configure the logging level and format
configure_logging()
# we define the exception handlers
exception_handlers = {Exception: handle_exception}
# we create the ASGI for the app
app = FastAPI(openapi_url="", title="LCCA API",
app = FastAPI(exception_handlers=exception_handlers, openapi_url="", title="LCCA API",
description="Welcome to LCCA's API documentation!",
version="0.1.0")
app.state.limiter = limiter
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
app.add_middleware(GZipMiddleware, minimum_size=2000)
# we define the exception handlers
app.add_exception_handler(Exception, handle_exception)
app.add_exception_handler(HTTPException, handle_exception)
app.add_exception_handler(RequestValidationError, handle_exception)
app.add_exception_handler(RateLimitExceeded, handle_exception)
app.add_exception_handler(SQLAlchemyError, handle_exception)
from src.context import set_request_id, reset_request_id, get_request_id
@ -68,74 +65,12 @@ async def db_session_middleware(request: Request, call_next):
try:
start_time = time.time()
session = async_scoped_session(async_session, scopefunc=get_request_id)
request.state.db = session()
collector_session = async_scoped_session(collector_async_session, scopefunc=get_request_id)
request.state.collector_db = collector_session()
response = await call_next(request)
process_time = (time.time() - start_time) * 1000
from src.context import get_username, get_role, get_user_id, set_user_id, set_username, set_role
# Pull from context or fallback to request.state.user
username = get_username()
role = get_role()
user_id = get_user_id()
user_obj = getattr(request.state, "user", None)
if user_obj:
# UserBase in this project
u_id = getattr(user_obj, "user_id", None)
u_name = getattr(user_obj, "name", None) or getattr(user_obj, "username", None)
u_role = getattr(user_obj, "role", None)
if not user_id and u_id:
user_id = str(u_id)
set_user_id(user_id)
if not username and u_name:
username = u_name
set_username(username)
if not role and u_role:
role = u_role
set_role(role)
user_info_str = ""
if username:
user_info_str = f" | User: {username}"
if role:
user_info_str += f" ({role})"
log.info(
f"HTTP {request.method} {request.url.path} completed in {round(process_time, 2)}ms{user_info_str}",
extra={
"method": request.method,
"path": request.url.path,
"status_code": response.status_code,
"duration_ms": round(process_time, 2),
"user_id": user_id,
"role": role,
},
)
except Exception as e:
# Generate an error_id here if it hasn't been generated yet
error_id = getattr(request.state, "error_id", None)
if not error_id:
import uuid
error_id = str(uuid.uuid1())
request.state.error_id = error_id
log.error(
f"Request failed | Error ID: {error_id}",
extra={
"method": request.method,
"path": request.url.path,
"error": str(e),
"error_id": error_id,
},
exc_info=True,
)
raise e from None
finally:
await request.state.db.close()

@ -32,23 +32,6 @@ async def get_yeardatas(
message="Data retrieved successfully",
)
@router.get("/export-all", response_model=StandardResponse[ManpowerCostPagination])
async def get_yeardatas_export_all(
db_session: DbSession,
common: CommonParameters,
):
"""Get all manpower_cost_data for export."""
common["all"] = True
get_acquisition_cost_data = await get_all(
db_session=db_session,
items_per_page=-1,
common=common,
)
return StandardResponse(
data=get_acquisition_cost_data,
message="All Manpower Cost Data retrieved successfully",
)
@router.get("/{acquisition_cost_data_id}", response_model=StandardResponse[ManpowerCostRead])
async def get_acquisition_cost_data(db_session: DbSession, acquisition_cost_data_id: str):
@ -72,7 +55,7 @@ async def create_acquisition_cost_data(
return StandardResponse(data=acquisition_cost_data, message="Data created successfully")
@router.post("/update/{acquisition_cost_data_id}", response_model=StandardResponse[ManpowerCostRead])
@router.put("/{acquisition_cost_data_id}", response_model=StandardResponse[ManpowerCostRead])
async def update_acquisition_cost_data(
db_session: DbSession,
acquisition_cost_data_id: str,
@ -96,7 +79,7 @@ async def update_acquisition_cost_data(
)
@router.post("/delete/{acquisition_cost_data_id}", response_model=StandardResponse[ManpowerCostRead])
@router.delete("/{acquisition_cost_data_id}", response_model=StandardResponse[ManpowerCostRead])
async def delete_acquisition_cost_data(db_session: DbSession, acquisition_cost_data_id: str):
acquisition_cost_data = await get(db_session=db_session, acquisition_cost_data_id=acquisition_cost_data_id)

@ -7,14 +7,14 @@ from src.models import CommonParams, DefaultBase, Pagination
class ManpowerCostBase(DefaultBase):
staff_job_level: str = Field(...)
salary_per_month_idr: float = Field(...)
salary_per_day_idr: float = Field(...)
salary_per_hour_idr: float = Field(...)
created_at: Optional[datetime] = Field(None)
updated_at: Optional[datetime] = Field(None)
created_by: Optional[str] = Field(None)
updated_by: Optional[str] = Field(None)
staff_job_level: str = Field(..., nullable=False)
salary_per_month_idr: float = Field(..., nullable=False)
salary_per_day_idr: float = Field(..., nullable=False)
salary_per_hour_idr: float = Field(..., nullable=False)
created_at: Optional[datetime] = Field(None, nullable=True)
updated_at: Optional[datetime] = Field(None, nullable=True)
created_by: Optional[str] = Field(None, nullable=True)
updated_by: Optional[str] = Field(None, nullable=True)
class ManpowerCostCreate(ManpowerCostBase):
@ -34,4 +34,5 @@ class ManpowerCostPagination(Pagination):
class QueryParams(CommonParams):
pass
items_per_page: Optional[int] = Field(5)
search: Optional[str] = Field(None)

@ -32,23 +32,6 @@ async def get_yeardatas(
message="Data retrieved successfully",
)
@router.get("/export-all", response_model=StandardResponse[ManpowerCostPagination])
async def get_yeardatas_export_all(
db_session: DbSession,
common: CommonParameters,
):
"""Get all manpower_master_data for export."""
common["all"] = True
get_acquisition_cost_data = await get_all(
db_session=db_session,
items_per_page=-1,
common=common,
)
return StandardResponse(
data=get_acquisition_cost_data,
message="All Manpower Master Data retrieved successfully",
)
@router.get("/{acquisition_cost_data_id}", response_model=StandardResponse[ManpowerCostRead])
async def get_acquisition_cost_data(db_session: DbSession, acquisition_cost_data_id: str):
@ -72,7 +55,7 @@ async def create_acquisition_cost_data(
return StandardResponse(data=acquisition_cost_data, message="Data created successfully")
@router.post("/update/{acquisition_cost_data_id}", response_model=StandardResponse[ManpowerCostRead])
@router.put("/{acquisition_cost_data_id}", response_model=StandardResponse[ManpowerCostRead])
async def update_acquisition_cost_data(
db_session: DbSession,
acquisition_cost_data_id: str,
@ -96,7 +79,7 @@ async def update_acquisition_cost_data(
)
@router.post("/delete/{acquisition_cost_data_id}", response_model=StandardResponse[ManpowerCostRead])
@router.delete("/{acquisition_cost_data_id}", response_model=StandardResponse[ManpowerCostRead])
async def delete_acquisition_cost_data(db_session: DbSession, acquisition_cost_data_id: str):
acquisition_cost_data = await get(db_session=db_session, acquisition_cost_data_id=acquisition_cost_data_id)

@ -7,14 +7,14 @@ from src.models import CommonParams, DefaultBase, Pagination
class ManpowerCostBase(DefaultBase):
staff_job_level: str = Field(...)
salary_per_month_idr: float = Field(...)
salary_per_day_idr: float = Field(...)
salary_per_hour_idr: float = Field(...)
created_at: Optional[datetime] = Field(None)
updated_at: Optional[datetime] = Field(None)
created_by: Optional[str] = Field(None)
updated_by: Optional[str] = Field(None)
staff_job_level: str = Field(..., nullable=False)
salary_per_month_idr: float = Field(..., nullable=False)
salary_per_day_idr: float = Field(..., nullable=False)
salary_per_hour_idr: float = Field(..., nullable=False)
created_at: Optional[datetime] = Field(None, nullable=True)
updated_at: Optional[datetime] = Field(None, nullable=True)
created_by: Optional[str] = Field(None, nullable=True)
updated_by: Optional[str] = Field(None, nullable=True)
class ManpowerCostCreate(ManpowerCostBase):
@ -33,4 +33,5 @@ class ManpowerCostPagination(Pagination):
items: List[ManpowerCostRead] = []
class QueryParams(CommonParams):
items_per_page: Optional[int] = Field(5)
search: Optional[str] = Field(None)

@ -1,9 +1,9 @@
from typing import Annotated, Optional, List
from fastapi import APIRouter, HTTPException, status, Query, Depends
from fastapi import APIRouter, HTTPException, status, Query
from sqlalchemy import Select
from .schema import QueryParams
from src.manpower_cost.schema import QueryParams
from .model import MasterData
from .schema import (
MasterDataPagination,
@ -25,7 +25,7 @@ router = APIRouter()
async def get_masterdatas(
db_session: DbSession,
common: CommonParameters,
params: Annotated[QueryParams, Depends()],
params: Annotated[QueryParams, Query()],
):
"""Get all documents."""
# return
@ -40,23 +40,6 @@ async def get_masterdatas(
message="Data retrieved successfully",
)
@router.get("/export-all", response_model=StandardResponse[MasterDataPagination])
async def get_masterdatas_export_all(
db_session: DbSession,
common: CommonParameters,
):
"""Get all documents for export."""
common["all"] = True
master_datas = await get_all(
db_session=db_session,
items_per_page=-1,
common=common,
)
return StandardResponse(
data=master_datas,
message="All Master Data retrieved successfully",
)
@router.get("/{masterdata_id}", response_model=StandardResponse[MasterDataRead])
async def get_masterdata(db_session: DbSession, masterdata_id: str):
@ -80,7 +63,7 @@ async def create_masterdata(
return StandardResponse(data=masterdata, message="Data created successfully")
@router.post("/update/bulk", response_model=StandardResponse[List[MasterDataRead]])
@router.put("/bulk", response_model=StandardResponse[List[MasterDataRead]])
async def update_masterdata(
db_session: DbSession,
data: BulkMasterDataUpdate,
@ -113,7 +96,7 @@ async def update_masterdata(
message="Data updated successfully",
)
@router.post("/update/{masterdata_id}", response_model=StandardResponse[MasterDataRead])
@router.put("/{masterdata_id}", response_model=StandardResponse[MasterDataRead])
async def update_masterdata(
db_session: DbSession,
masterdata_id: str,
@ -137,7 +120,7 @@ async def update_masterdata(
)
@router.post("/delete/{masterdata_id}", response_model=StandardResponse[MasterDataRead])
@router.delete("/{masterdata_id}", response_model=StandardResponse[MasterDataRead])
async def delete_masterdata(db_session: DbSession, masterdata_id: str):
masterdata = await get(db_session=db_session, masterdata_id=masterdata_id)

@ -2,8 +2,8 @@ from datetime import datetime
from typing import List, Optional
from uuid import UUID
from pydantic import BaseModel, Field, model_validator
from src.models import CommonParams, DefaultBase, Pagination
from pydantic import BaseModel, Field
from src.models import DefaultBase, Pagination
from src.auth.service import CurrentUser
@ -11,29 +11,29 @@ class MasterdataBase(DefaultBase):
# discount_rate: Optional[float]
# inflation_rate: Optional[float]
# manhours_rate: Optional[float]
name: Optional[str] = Field(None)
description: Optional[str] = Field(None)
unit_of_measurement: Optional[str] = Field(None)
name: Optional[str] = Field(None, nullable=True)
description: Optional[str] = Field(None, nullable=True)
unit_of_measurement: Optional[str] = Field(None, nullable=True)
value_num: Optional[float] = Field(
None, le=1_000_000_000_000_000 # 1 quadrillion
None, nullable=True, le=1_000_000_000_000_000 # 1 quadrillion
)
value_str: Optional[str] = Field(None)
seq: Optional[int] = Field(None)
created_at: Optional[datetime] = Field(None)
updated_at: Optional[datetime] = Field(None)
created_by: Optional[str] = Field(None)
updated_by: Optional[str] = Field(None)
value_str: Optional[str] = Field(None, nullable=True)
seq: Optional[int] = Field(None, nullable=True)
created_at: Optional[datetime] = Field(None, nullable=True)
updated_at: Optional[datetime] = Field(None, nullable=True)
created_by: Optional[str] = Field(None, nullable=True)
updated_by: Optional[str] = Field(None, nullable=True)
class MasterDataCreate(MasterdataBase):
name: str = Field(...)
description: str = Field(...)
unit_of_measurement: str = Field(...)
name: str = Field(..., nullable=True)
description: str = Field(..., nullable=True)
unit_of_measurement: str = Field(..., nullable=True)
value_num: float = Field(
..., le=1_000_000_000_000_000 # 1 quadrillion
..., nullable=True, le=1_000_000_000_000_000 # 1 quadrillion
)
value_str: str = Field(None)
seq: int = Field(None)
value_str: str = Field(None, nullable=True)
seq: int = Field(None, nullable=True)
class MasterDataUpdate(MasterdataBase):
@ -52,5 +52,13 @@ class MasterDataPagination(Pagination):
items: List[MasterDataRead] = []
class QueryParams(CommonParams):
pass
class QueryParams(BaseModel):
items_per_page: Optional[int] = Field(
5,
ge=1,
description="Items per page"
)
search: Optional[str] = Field(
None,
description="Search keyword"
)

@ -66,7 +66,7 @@ async def create_masterdata_simulation(
return StandardResponse(data=masterdata, message="Data created successfully")
@router.post("/update/bulk", response_model=StandardResponse[List[MasterDataSimulationRead]])
@router.put("/bulk", response_model=StandardResponse[List[MasterDataSimulationRead]])
async def bulk_update_masterdata_simulation(
db_session: DbSession,
data: BulkMasterDataSimulationUpdate,
@ -105,7 +105,7 @@ async def bulk_update_masterdata_simulation(
)
@router.post("/update/{masterdata_id}", response_model=StandardResponse[MasterDataSimulationRead])
@router.put("/{masterdata_id}", response_model=StandardResponse[MasterDataSimulationRead])
async def update_masterdata_simulation(
db_session: DbSession,
masterdata_id: str,
@ -132,7 +132,7 @@ async def update_masterdata_simulation(
)
@router.post("/delete/{masterdata_id}", response_model=StandardResponse[MasterDataSimulationRead])
@router.delete("/{masterdata_id}", response_model=StandardResponse[MasterDataSimulationRead])
async def delete_masterdata_simulation(db_session: DbSession, masterdata_id: str):
masterdata = await get(db_session=db_session, masterdata_id=masterdata_id)

@ -8,19 +8,19 @@ from src.models import CommonParams, DefaultBase, Pagination
class MasterDataSimulationBase(MasterdataBase):
simulation_id: Optional[UUID] = Field(None)
simulation_id: Optional[UUID] = Field(None, nullable=True)
class MasterDataSimulationCreate(MasterDataSimulationBase):
simulation_id: UUID = Field(...)
name: str = Field(...)
description: str = Field(...)
unit_of_measurement: str = Field(...)
simulation_id: UUID = Field(..., nullable=False)
name: str = Field(..., nullable=True)
description: str = Field(..., nullable=True)
unit_of_measurement: str = Field(..., nullable=True)
value_num: float = Field(
..., le=1_000_000_000_000_000
..., nullable=True, le=1_000_000_000_000_000
)
value_str: str = Field(...)
seq: int = Field(...)
value_str: str = Field(..., nullable=True)
seq: int = Field(..., nullable=True)
class MasterDataSimulationUpdate(MasterDataSimulationBase):
@ -28,7 +28,7 @@ class MasterDataSimulationUpdate(MasterDataSimulationBase):
class BulkMasterDataSimulationUpdate(DefaultBase):
simulation_id: UUID = Field(...)
simulation_id: UUID = Field(..., nullable=False)
updates: List[dict]
@ -45,3 +45,9 @@ class QueryParams(CommonParams):
...,
description="Simulation identifier",
)
items_per_page: Optional[int] = Field(
5,
ge=1,
description="Items per page"
)
search: Optional[str] = Field(None)

@ -14,70 +14,17 @@ ALLOWED_MULTI_PARAMS = {
"exclude[]",
}
# Whitelist of ALL allowed query parameter names across the application.
# Any param NOT in this set will be rejected.
ALLOWED_QUERY_PARAMS = {
# CommonParameters (from database/service.py common_parameters)
"currentUser",
"page",
"itemsPerPage",
"q",
"filter",
"sortBy[]",
"descending[]",
"all",
# ListQueryParams / QueryParams used across routers
"items_per_page",
"search",
# equipment_master specific
"parent_id",
# masterdata_simulations / plant_transaction_data_simulations specific
"simulation_id",
# exclude
"exclude[]",
}
# Query params that are ONLY allowed for "write" operations (read operations use ALLOWED_QUERY_PARAMS).
# For GET/POST/PUT/etc, whitelisting still applies.
WRITE_METHOD_ALLOWED_PARAMS = {
# Only auth/session params are allowed in query for write methods.
# Data values (like simulation_id) must be in the JSON body for these methods.
"currentUser",
}
MAX_QUERY_PARAMS = 50
MAX_QUERY_LENGTH = 2000
MAX_JSON_BODY_SIZE = 1024 * 100 # 100 KB
XSS_PATTERN = re.compile(
r"(<script|<iframe|<embed|<object|<svg|<img|<video|<audio|<base|<link|<meta|<form|<button|"
r"javascript:|vbscript:|data:text/html|onerror\s*=|onload\s*=|onmouseover\s*=|onfocus\s*=|"
r"onclick\s*=|onscroll\s*=|ondblclick\s*=|onkeydown\s*=|onkeypress\s*=|onkeyup\s*=|"
r"onloadstart\s*=|onpageshow\s*=|onresize\s*=|onunload\s*=|style\s*=\s*['\"].*expression\s*\(|"
r"eval\s*\(|setTimeout\s*\(|setInterval\s*\(|Function\s*\()",
re.IGNORECASE,
)
# Very targeted patterns. Avoid catastrophic regex nonsense.
XSS_PATTERN_STR = r"(<script|</script|javascript:|onerror\s*=|onload\s*=|<svg|<img)"
XSS_PATTERN = re.compile(XSS_PATTERN_STR, re.IGNORECASE)
SQLI_PATTERN = re.compile(
r"(\bUNION\b|\bSELECT\b|\bINSERT\b|\bUPDATE\b|\bDELETE\b|\bDROP\b|\bALTER\b|\bCREATE\b|\bTRUNCATE\b|"
r"\bEXEC\b|\bEXECUTE\b|\bDECLARE\b|\bWAITFOR\b|\bDELAY\b|\bGROUP\b\s+\bBY\b|\bHAVING\b|\bORDER\b\s+\bBY\b|"
r"\bINFORMATION_SCHEMA\b|\bSYS\b\.|\bSYSOBJECTS\b|\bPG_SLEEP\b|\bSLEEP\b\(|--|/\*|\*/|#|\bOR\b\s+['\"]?\d+['\"]?\s*=\s*['\"]?\d+|"
r"\bAND\b\s+['\"]?\d+['\"]?\s*=\s*['\"]?\d+|"
r"\bXP_CMDSHELL\b|\bLOAD_FILE\b|\bINTO\s+OUTFILE\b)",
re.IGNORECASE,
)
SQLI_PATTERN_STR = r"(\bUNION\b|\bSELECT\b|\bINSERT\b|\bDELETE\b|\bDROP\b|--|\bOR\b\s+1=1)"
SQLI_PATTERN = re.compile(SQLI_PATTERN_STR, re.IGNORECASE)
RCE_PATTERN = re.compile(
r"(\$\(|`.*`|[;&|]\s*(cat|ls|id|whoami|pwd|ifconfig|ip|netstat|nc|netcat|nmap|curl|wget|python|php|perl|ruby|bash|sh|cmd|powershell|pwsh|sc\s+|tasklist|taskkill|base64|sudo|crontab|ssh|ftp|tftp)|"
r"\b(cat|ls|id|whoami|pwd|ifconfig|ip|netstat|nc|netcat|nmap|curl|wget|python|php|perl|ruby|bash|sh|cmd|powershell|pwsh|base64|sudo|crontab)\b|"
r"/etc/passwd|/etc/shadow|/etc/group|/etc/issue|/proc/self/|/windows/system32/|C:\\Windows\\)",
re.IGNORECASE,
)
TRAVERSAL_PATTERN = re.compile(
r"(\.\./|\.\.\\|%2e%2e%2f|%2e%2e/|\.\.%2f|%2e%2e%5c)",
re.IGNORECASE,
)
# JSON prototype pollution keys
FORBIDDEN_JSON_KEYS = {"__proto__", "constructor", "prototype"}
@ -93,31 +40,19 @@ def has_control_chars(value: str) -> bool:
def inspect_value(value: str, source: str):
if XSS_PATTERN.search(value):
raise HTTPException(
status_code=422,
status_code=400,
detail=f"Potential XSS payload detected in {source}",
)
if SQLI_PATTERN.search(value):
raise HTTPException(
status_code=422,
status_code=400,
detail=f"Potential SQL injection payload detected in {source}",
)
if RCE_PATTERN.search(value):
raise HTTPException(
status_code=422,
detail=f"Potential RCE payload detected in {source}",
)
if TRAVERSAL_PATTERN.search(value):
raise HTTPException(
status_code=422,
detail=f"Potential Path Traversal payload detected in {source}",
)
if has_control_chars(value):
raise HTTPException(
status_code=422,
status_code=400,
detail=f"Invalid control characters detected in {source}",
)
@ -127,7 +62,7 @@ def inspect_json(obj, path="body"):
for key, value in obj.items():
if key in FORBIDDEN_JSON_KEYS:
raise HTTPException(
status_code=422,
status_code=400,
detail=f"Forbidden JSON key detected: {path}.{key}",
)
inspect_json(value, f"{path}.{key}")
@ -157,28 +92,12 @@ class RequestValidationMiddleware(BaseHTTPMiddleware):
if len(params) > MAX_QUERY_PARAMS:
raise HTTPException(
status_code=422,
status_code=400,
detail="Too many query parameters",
)
# -------------------------
# 2. Query param whitelist
# -------------------------
# For GET, we allow data parameters like page, search, etc.
# For POST, PUT, DELETE, PATCH, we ONLY allow auth/session params.
active_whitelist = ALLOWED_QUERY_PARAMS if request.method == "GET" else WRITE_METHOD_ALLOWED_PARAMS
unknown_params = [
key for key, _ in params if key not in active_whitelist
]
if unknown_params:
raise HTTPException(
status_code=422,
detail=f"Unknown query parameters are not allowed for {request.method} request: {unknown_params}",
)
# -------------------------
# 3. Duplicate parameters
# 2. Duplicate parameters
# -------------------------
counter = Counter(key for key, _ in params)
duplicates = [
@ -188,77 +107,21 @@ class RequestValidationMiddleware(BaseHTTPMiddleware):
if duplicates:
raise HTTPException(
status_code=422,
status_code=400,
detail=f"Duplicate query parameters are not allowed: {duplicates}",
)
# -------------------------
# 4. JSON body inspection & Single source enforcement
# Ensuring data comes from ONLY one source (Query OR Body).
# -------------------------
content_type = request.headers.get("content-type", "")
has_json_header = content_type.startswith("application/json")
# Read body now so we can check if it's actually empty
body = b""
if has_json_header:
body = await request.body()
# We consider it a "JSON body" source ONLY if it's not empty and not just "{}"
has_actual_json_body = has_json_header and body and body.strip() != b"{}"
# Check for data parameters in query (anything whitelisted as 'data' but not 'session/auth')
data_params_in_query = [
key for key, _ in params
if key in ALLOWED_QUERY_PARAMS and key not in WRITE_METHOD_ALLOWED_PARAMS
]
if has_actual_json_body:
# If sending actual JSON body, we forbid any data in query string (one source only)
if data_params_in_query:
raise HTTPException(
status_code=422,
detail=f"Single source enforcement: Data received from both JSON body and query string ({data_params_in_query}). Use only one source.",
)
# Special case: GET with actual body is discouraged/forbidden
if request.method == "GET":
raise HTTPException(
status_code=422,
detail="GET requests must use query parameters, not JSON body.",
)
# -------------------------
# 5. Query param inspection
# 3. Query param inspection
# -------------------------
pagination_size_keys = {"size", "itemsPerPage", "per_page", "limit", "items_per_page"}
for key, value in params:
if value:
inspect_value(value, f"query param '{key}'")
# Pagination constraint: multiples of 5, max 50
if key in pagination_size_keys and value:
try:
size_val = int(value)
if size_val > 50:
raise HTTPException(
status_code=422,
detail=f"Pagination size '{key}' cannot exceed 50",
)
if size_val % 5 != 0:
raise HTTPException(
status_code=422,
detail=f"Pagination size '{key}' must be a multiple of 5",
)
except ValueError:
raise HTTPException(
status_code=422,
detail=f"Pagination size '{key}' must be an integer",
)
# -------------------------
# 6. Content-Type sanity
# 4. Content-Type sanity
# -------------------------
content_type = request.headers.get("content-type", "")
if content_type and not any(
content_type.startswith(t)
for t in (
@ -273,22 +136,32 @@ class RequestValidationMiddleware(BaseHTTPMiddleware):
)
# -------------------------
# 7. JSON body inspection & Re-injection
# 5. JSON body inspection
# -------------------------
if has_json_header:
if content_type.startswith("application/json"):
body = await request.body()
#if len(body) > MAX_JSON_BODY_SIZE:
# raise HTTPException(
# status_code=413,
# detail="JSON body too large",
# )
if body:
try:
payload = json.loads(body)
except json.JSONDecodeError:
raise HTTPException(
status_code=422,
status_code=400,
detail="Invalid JSON body",
)
inspect_json(payload)
# Re-inject body for downstream handlers
async def receive():
return {"type": "http.request", "body": body}
request._receive = receive # noqa: protected-access
return await call_next(request)

@ -2,7 +2,7 @@
from datetime import datetime
from typing import Generic, List, Optional, TypeVar
import uuid
from pydantic import BaseModel, Field, SecretStr, ConfigDict, model_validator
from pydantic import BaseModel, Field, SecretStr
from sqlalchemy import Column, DateTime, String, func, event
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import Mapped, mapped_column
@ -67,14 +67,19 @@ class DefaultMixin(TimeStampMixin, UUIDMixin):
# Pydantic Models
class DefaultBase(BaseModel):
model_config = ConfigDict(
from_attributes=True,
validate_assignment=True,
arbitrary_types_allowed=True,
str_strip_whitespace=True,
extra="forbid",
populate_by_name=True,
)
class Config:
from_attributes = True
validate_assignment = True
arbitrary_types_allowed = True
str_strip_whitespace = True
extra = "forbid"
populate_by_name=True
json_encoders = {
# custom output conversion for datetime
datetime: lambda v: v.strftime("%Y-%m-%dT%H:%M:%S.%fZ") if v else None,
SecretStr: lambda v: v.get_secret_value() if v else None,
}
class Pagination(DefaultBase):
@ -100,29 +105,16 @@ class StandardResponse(BaseModel, Generic[T]):
class CommonParams(DefaultBase):
# This ensures no extra query params are allowed
current_user: Optional[str] = Field(None, alias="current_user")
currentUser: Optional[str] = Field(None, description="Alias for current_user")
current_user: Optional[str] = Field(None, alias="currentUser")
page: int = Field(1, gt=0, lt=2147483647)
items_per_page: int = Field(5, gt=-2, lt=2147483647, alias="items_per_page")
itemsPerPage: Optional[int] = Field(None, description="Alias for items_per_page")
items_per_page: int = Field(5, gt=-2, lt=2147483647, alias="itemsPerPage")
query_str: Optional[str] = Field(None, alias="q")
search: Optional[str] = Field(None, description="Search keyword")
filter_spec: Optional[str] = Field(None, alias="filter")
sort_by: List[str] = Field(default=[], alias="sortBy[]")
descending: List[bool] = Field(default=[], alias="descending[]")
exclude: List[str] = Field(default=[], alias="exclude[]")
sort_by: List[str] = Field(default_factory=list, alias="sortBy[]")
descending: List[bool] = Field(default_factory=list, alias="descending[]")
exclude: List[str] = Field(default_factory=list, alias="exclude[]")
all_params: int = Field(0, alias="all")
@model_validator(mode="before")
@classmethod
def resolve_aliases(cls, data: any) -> any:
if isinstance(data, dict):
if "itemsPerPage" in data and data["itemsPerPage"] is not None:
data.setdefault("items_per_page", data["itemsPerPage"])
if "currentUser" in data and data["currentUser"] is not None:
data.setdefault("current_user", data["currentUser"])
return data
# Property to mirror your original return dict's bool conversion
@property
def is_all(self) -> bool:

@ -587,7 +587,7 @@ class Prediksi:
self.refresh_token = d.get("refresh_token")
return data
except httpx.HTTPError as e:
print(f"Sign-in failed for URL {self.AUTH_APP_URL}/sign-in: {type(e).__name__} - {e}")
print(f"Sign-in failed: {e}")
# Try to sign out if sign-in failed
try:
signout_url = f"{self.AUTH_APP_URL}/sign-out"
@ -595,8 +595,14 @@ class Prediksi:
await client.get(signout_url, timeout=10.0)
print("Signed out due to sign-in failure.")
except Exception as signout_exc:
print(f"Sign-out failed for URL {self.AUTH_APP_URL}/sign-out: {type(signout_exc).__name__} - {signout_exc}")
print(f"Sign-out failed: {signout_exc}")
# Try to sign in again
try:
signin_res = await self.sign_in()
if self.access_token:
return signin_res
except Exception as signin_exc:
print(f"Sign-in failed after sign-out: {signin_exc}")
return None
async def refresh_access_token(self) -> str:
@ -674,39 +680,6 @@ class Prediksi:
print(f"HTTP error occurred: {e}")
return {}
def __get_historical_cost_per_failure(self, assetnum):
connection = None
try:
connection = get_production_connection()
if connection is None:
return 0.0
cursor = connection.cursor()
# Optimized single-pass query: counts and sums in one scan
query = """
SELECT
SUM(a.actmatcost) / NULLIF(COUNT(CASE WHEN a.wonum NOT LIKE 'T%%' THEN 1 END), 0) as cost_failure
FROM wo_maximo a
WHERE (a.asset_unit = '3' OR a.asset_unit = '00')
AND a.status IN ('COMP', 'CLOSE')
AND a.asset_assetnum = %s
AND a.worktype IN ('CM', 'PROACTIVE', 'EM')
AND a.wojp8 != 'S1'
AND (
a.description NOT ILIKE '%%U4%%'
OR (a.description ILIKE '%%U3%%' AND a.description ILIKE '%%U4%%')
)
"""
cursor.execute(query, (assetnum,))
result = cursor.fetchone()
cost_failure = float(result[0]) if result and result[0] is not None else 0.0
return cost_failure
except Exception as e:
print(f"Error fetching historical cost per failure for {assetnum}: {e}")
return 0.0
finally:
if connection:
connection.close()
def __get_man_hour_rate(self, staff_level: str = "Junior"):
connection = None
try:
@ -788,8 +761,7 @@ class Prediksi:
rate, max_year = self.__get_rate_and_max_year(assetnum)
man_hour_rate = self.__get_man_hour_rate() # Defaults to 'junior'
# Pre-fetch cost per failure once per asset to avoid redundant DB queries
avg_cost_per_failure = self.__get_historical_cost_per_failure(assetnum)
pmt = 0
# Prediksi untuk setiap kolom
for column in df.columns:
@ -841,32 +813,16 @@ class Prediksi:
preds_list.append(cost)
preds = np.array(preds_list, dtype=float)
elif recent_vals.empty:
avg = 0.0
preds = np.repeat(float(avg), n_future)
else:
# Use pre-fetched cost per failure
preds_list = []
for yr in future_years:
failures_data = await self._fetch_api_data(assetnum, yr)
# Interval from predicted number of failures
interval = 0.0
if isinstance(failures_data, dict):
data_list = failures_data.get("data")
if isinstance(data_list, list) and len(data_list) > 0:
first_item = data_list[0]
if isinstance(first_item, dict):
num_fail = first_item.get("num_fail")
if num_fail is not None:
try:
interval = float(num_fail)
except Exception:
interval = 0.0
# predicted_cost = predicted_failures * avg_cost_per_failure
cost = interval * avg_cost_per_failure
preds_list.append(cost)
preds = np.array(preds_list, dtype=float)
avg = pd.to_numeric(recent_vals, errors="coerce").fillna(0).mean()
avg = 0.0 if pd.isna(avg) else float(avg)
preds = np.repeat(float(avg), n_future)
else:
# kolom non-cm, gunakan nilai dari last actual year bila ada,
# Для kolom non-cm, gunakan nilai dari last actual year bila ada,
# jika tidak ada gunakan last available non-NA value, jika tidak ada pakai 0.0
if "is_actual" in df.columns and not df[df["is_actual"] == 1].empty:
last_actual_year_series = df[df["is_actual"] == 1]["year"]

@ -6,14 +6,6 @@ This file consolidates the core mathematical/financial formulas used across:
- `insert_actual_data.py` (aggregation formulas, man-hour conversion)
- `Prediksi.py` (future value / fv wrappers)
### Prediction Logic Summary
| Category | Logic Type | Formula Basis |
| :--- | :--- | :--- |
| **CM Labor** | **Reliability-Based** | `Failures x 3.0 x 1.0 x ManPowerRate` |
| **CM Other** | **Reliability-Based** | `Failures x CostPerFailure (from Production SQL)` |
| **PM / OH / PDM** | **Last Scenario** | `Value from Last Actual Year` (Carry Forward) |
| **Total Risk Cost** | **Aggregated** | `Sum of above + Asset Criticality Multiplier` |
Keep these functions pure and well-documented to make debugging and
comparisons easier.
"""

@ -39,6 +39,72 @@ def get_recursive_query(cursor, assetnum, worktype="CM"):
Fungsi untuk menjalankan query rekursif berdasarkan assetnum dan worktype.
worktype memiliki nilai default 'CM'.
"""
# query = f"""
# SELECT
# ROW_NUMBER() OVER (ORDER BY tbl.assetnum, tbl.year, tbl.worktype) AS seq,
# *
# FROM (
# SELECT
# a.worktype,
# a.assetnum,
# EXTRACT(YEAR FROM a.reportdate) AS year,
# COUNT(a.wonum) AS raw_corrective_failure_interval,
# SUM(a.total_cost_max) AS raw_corrective_material_cost,
# ROUND(
# SUM(
# EXTRACT(EPOCH FROM (
# a.actfinish -
# a.actstart
# ))
# ) / 3600
# , 2) AS raw_corrective_labor_time_jam,
# SUM(a.jumlah_labor) AS raw_corrective_labor_technician
# FROM
# public.wo_staging_3 AS a
# WHERE
# a.unit = '3'
# GROUP BY
# a.worktype,
# a.assetnum,
# EXTRACT(YEAR FROM a.reportdate)
# ) AS tbl
# WHERE
# tbl.worktype = '{worktype}'
# AND tbl.assetnum = '{assetnum}'
# ORDER BY
# tbl.assetnum,
# tbl.year,
# tbl.worktype
# """
# query = f"""
# select d.tahun, SUM(d.actmatcost) AS raw_corrective_material_cost, sum(d.man_hour) as man_hour_peryear from
# (
# SELECT
# a.wonum,
# a.actmatcost,
# DATE_PART('year', a.reportdate) AS tahun,
# (
# ROUND(SUM(EXTRACT(EPOCH FROM (a.actfinish - a.actstart)) / 3600), 2)
# ) AS man_hour,
# CASE
# WHEN COUNT(b.laborcode) = 0 THEN 3
# ELSE COUNT(b.laborcode)
# END AS man_count
# FROM public.wo_maximo AS a
# LEFT JOIN public.wo_maximo_labtrans AS b
# ON b.wonum = a.wonum
# WHERE
# a.asset_unit = '3'
# AND a.worktype = '{worktype}'
# AND a.asset_assetnum = '{assetnum}'
# and a.wonum not like 'T%'
# GROUP BY
# a.wonum,
# a.actmatcost,
# DATE_PART('year', a.reportdate)
# ) as d group by d.tahun
# ;
# """
where_query = get_where_query_sql(assetnum, worktype)
query = f"""
@ -294,11 +360,48 @@ def _build_tr_row_values(
)
rc_cm_material_cost = raw_cm_material_cost_total
# rc_cm_labor_cost = (
# data_cm_row.get("raw_cm_labor_time")
# * data_cm_row.get("rc_cm_labor_human")
# * man_hour_value
# if data_cm_row
# and data_cm_row.get("rc_cm_labor_cost")
# and data_cm_row.get("rc_cm_labor_human")
# and man_hour_value is not None
# else 0
# )
rc_pm_material_cost = raw_pm_material_cost
# rc_pm_labor_cost = (
# data_pm_row.get("raw_pm_labor_time")
# * data_pm_row.get("rc_pm_labor_human")
# * man_hour_value
# if data_pm_row
# and data_pm_row.get("rc_pm_labor_cost")
# and data_pm_row.get("rc_pm_labor_human")
# and man_hour_value is not None
# else 0
# )
rc_oh_material_cost = raw_oh_material_cost
# rc_oh_labor_cost = (
# data_oh_row.get("raw_oh_labor_time")
# * data_oh_row.get("rc_oh_labor_human")
# * man_hour_value
# if data_oh_row
# and data_oh_row.get("rc_oh_labor_cost")
# and data_oh_row.get("rc_oh_labor_human")
# and man_hour_value is not None
# else 0
# )
# rc_predictive_labor_cost = (
# data_predictive_row.get("raw_predictive_labor_human") * man_hour_value
# if data_predictive_row
# and data_predictive_row.get("rc_predictive_labor_cost")
# and man_hour_value is not None
# else 0
# )
if labour_cost_lookup and year is not None:
cm_lookup = labour_cost_lookup.get("CM", {})
@ -884,14 +987,18 @@ async def query_data(target_assetnum: str = None):
print(f"Error checking acquisition data for {assetnum}: {exc}")
# Calculation start is always 2014 (forecasting start is 2015)
# Forecasting and calculation start configuration
loop_start_year = 2014
# Delete data before calculation start (2014)
cursor.execute("DELETE FROM lcc_equipment_tr_data WHERE assetnum = %s AND tahun < %s", (assetnum, loop_start_year))
forecasting_start_year_db = row.get("forecasting_start_year")
acquisition_year = row.get("acquisition_year")
forecasting_start_year = loop_start_year
if acquisition_year:
# Remove data before acquisition_year
cursor.execute("DELETE FROM lcc_equipment_tr_data WHERE assetnum = %s AND tahun < %s", (assetnum, acquisition_year))
forecasting_start_year = acquisition_year - 1
elif forecasting_start_year_db:
# If no acquisition_year but forecasting_start_year defined in DB
forecasting_start_year = forecasting_start_year_db
else:
forecasting_start_year = 2014
asset_start = datetime.now()
processed_assets += 1
@ -917,18 +1024,6 @@ async def query_data(target_assetnum: str = None):
"OH": get_labour_cost_totals(cursor_wo, assetnum, "OH"),
}
# Find first year with replace_cost > 0 in Maximo (Requirement: ignore costs in this year)
cursor_wo.execute("""
select DATE_PART('year', a.reportdate) AS year
from wo_maximo a
where a.asset_replacecost > 0
and a.asset_assetnum = %s
order by a.reportdate asc
limit 1;
""", (assetnum,))
res_rep = cursor_wo.fetchone()
first_rep_year = int(res_rep[0]) if res_rep else None
seq = 0
# Looping untuk setiap tahun
for year in range(forecasting_start_year, current_year + 1):
@ -979,23 +1074,6 @@ async def query_data(target_assetnum: str = None):
year=year,
labour_cost_lookup=labour_cost_lookup,
)
# Requirement: At the first year of the replace cost detected > 0,
# The material cost/ labor cost is ignored.
if first_rep_year and year == first_rep_year:
cost_keys = [
"raw_cm_material_cost", "raw_cm_labor_time",
"raw_pm_material_cost", "raw_pm_labor_time",
"raw_oh_material_cost", "raw_oh_labor_time",
"raw_predictive_material_cost", "raw_predictive_labor_time",
"rc_cm_material_cost", "rc_cm_labor_cost",
"rc_pm_material_cost", "rc_pm_labor_cost",
"rc_oh_material_cost", "rc_oh_labor_cost",
"rc_predictive_labor_cost"
]
for k in cost_keys:
if k in row_values:
row_values[k] = 0.0
if not data_exists:
cursor.execute(
insert_query,

@ -6,60 +6,18 @@ from src.modules.equipment.insert_actual_data import query_data
from src.modules.equipment.Prediksi import Prediksi, main as predict_run
from src.modules.equipment.Eac import Eac, main as eac_run
def format_execution_time(execution_time):
if execution_time >= 3600:
hours = int(execution_time // 3600)
minutes = int((execution_time % 3600) // 60)
seconds = execution_time % 60
return f"{hours}h {minutes}m {seconds:.2f}s."
elif execution_time >= 60:
minutes = int(execution_time // 60)
seconds = execution_time % 60
return f"{minutes}m {seconds:.2f}s."
else:
return f"{execution_time:.2f} seconds."
# Alternative calling function to just predict and calculate eac without inserting actual data
async def simulate(assetnum: str = None):
start_time = time.time()
print(f"Starting simulation (predict + eac) {'for ' + assetnum if assetnum else 'for all assets'}...")
try:
prediction_result = await predict_run(assetnum=assetnum)
if prediction_result is False:
print("Prediction step failed or was skipped. Skipping EAC run.")
return
except Exception as e:
print(f"Error in predict_run: {str(e)}")
return
try:
result = eac_run(assetnum=assetnum)
if asyncio.iscoroutine(result):
result = await result
print("EAC run completed.")
except Exception as e:
print(f"Error in eac_run: {str(e)}")
return
end_time = time.time()
message = f"Simulation finished in {format_execution_time(end_time - start_time)}"
print(message)
return message
# Panggil fungsi
async def main(assetnum: str = None):
async def main():
start_time = time.time()
print(f"Starting calculation workflow {'for ' + assetnum if assetnum else 'for all assets'}...")
try:
await query_data(target_assetnum=assetnum)
await query_data()
except Exception as e:
print(f"Error in query_data: {str(e)}")
return
try:
prediction_result = await predict_run(assetnum=assetnum)
prediction_result = await predict_run()
if prediction_result is False:
print("Prediction step failed or was skipped. Skipping EAC run.")
return
@ -68,28 +26,37 @@ async def main(assetnum: str = None):
return
try:
result = eac_run(assetnum=assetnum)
result = eac_run()
if asyncio.iscoroutine(result):
result = await result
if isinstance(result, (list, tuple)):
print(f"EAC run returned {len(result)} items.")
else:
print("EAC run completed.")
except Exception as e:
print(f"Error in eac_run: {str(e)}")
return
end_time = time.time()
message = f"Script calculation finished in {format_execution_time(end_time - start_time)}"
execution_time = end_time - start_time
# format execution time into h/m/s as needed
if execution_time >= 3600:
hours = int(execution_time // 3600)
minutes = int((execution_time % 3600) // 60)
seconds = execution_time % 60
message = f"Script calculation finished in {hours}h {minutes}m {seconds:.2f}s."
elif execution_time >= 60:
minutes = int(execution_time // 60)
seconds = execution_time % 60
message = f"Script calculation finished in {minutes}m {seconds:.2f}s."
else:
message = f"Script calculation finished in {execution_time:.2f} seconds."
print(message)
return message
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Run LCCA Simulation")
parser.add_argument("mode", nargs="?", choices=["main", "simulate"], default="main", help="Mode to run: 'main' (full) or 'simulate' (no data refresh)")
parser.add_argument("--assetnum", type=str, help="Specific asset number to process")
args = parser.parse_args()
if args.mode == "simulate":
asyncio.run(simulate(assetnum=args.assetnum))
else:
asyncio.run(main(assetnum=args.assetnum))
asyncio.run(
main()
)

@ -1,4 +1,4 @@
from typing import Annotated, List, Optional
from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, HTTPException, Query, status
@ -16,7 +16,6 @@ from .schema import (
PlantFSTransactionDataRead,
PlantFSTransactionDataUpdate,
PlantFSChartData,
ListQueryParams,
)
from .service import create, delete, get, get_all, update, update_fs_charts_from_matrix, get_charts
@ -29,14 +28,15 @@ router = APIRouter()
async def list_fs_transactions(
db_session: DbSession,
common: CommonParameters,
params: Annotated[ListQueryParams, Query()],
items_per_page: Optional[int] = Query(5),
search: Optional[str] = Query(None),
):
"""Return paginated financial statement transaction data."""
records = await get_all(
db_session=db_session,
items_per_page=params.items_per_page,
search=params.search,
items_per_page=items_per_page,
search=search,
common=common,
)
@ -121,8 +121,8 @@ async def create_fs_transaction(
return StandardResponse(data=record, message="Data created successfully")
@router.post(
"/update/{fs_transaction_id}",
@router.put(
"/{fs_transaction_id}",
response_model=StandardResponse[PlantFSTransactionDataRead],
)
async def update_fs_transaction(
@ -148,8 +148,8 @@ async def update_fs_transaction(
return StandardResponse(data=updated, message="Data updated successfully")
@router.post(
"/delete/{fs_transaction_id}",
@router.delete(
"/{fs_transaction_id}",
response_model=StandardResponse[PlantFSTransactionDataRead],
)
async def delete_fs_transaction(
@ -166,3 +166,8 @@ async def delete_fs_transaction(
await delete(db_session=db_session, fs_transaction_id=str(fs_transaction_id))
return StandardResponse(data=record, message="Data deleted successfully")

@ -4,70 +4,70 @@ from uuid import UUID
from pydantic import Field
from src.models import CommonParams, DefaultBase, Pagination
from src.models import DefaultBase, Pagination
class PlantFSTransactionDataBase(DefaultBase):
fs_chart_total_revenue: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_revenue_a: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_revenue_b: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_revenue_c: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_revenue_d: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_revenue_annualized: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_fuel_cost_component_c: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_fuel_cost: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_fuel_cost_annualized: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_oem_component_bd: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_oem_bd_cost: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_oem_periodic_maintenance_cost: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_oem_annualized: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_capex_component_a: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_capex_biaya_investasi_tambahan: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_capex_acquisition_cost: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_capex_annualized: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_cost_disposal_cost: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
tahun: Optional[int] = Field(None, ge=1900, le=9999)
seq: Optional[int] = Field(None, ge=0, le=9999)
created_at: Optional[datetime] = Field(None)
updated_at: Optional[datetime] = Field(None)
created_by: Optional[str] = Field(None)
updated_by: Optional[str] = Field(None)
tahun: Optional[int] = Field(None, nullable=True, ge=1900, le=9999)
seq: Optional[int] = Field(None, nullable=True, ge=0, le=9999)
created_at: Optional[datetime] = Field(None, nullable=True)
updated_at: Optional[datetime] = Field(None, nullable=True)
created_by: Optional[str] = Field(None, nullable=True)
updated_by: Optional[str] = Field(None, nullable=True)
class PlantFSTransactionDataCreate(PlantFSTransactionDataBase):
@ -97,14 +97,6 @@ class PlantFSTransactionChart(PlantFSTransactionDataBase):
class PlantFSChartData(DefaultBase):
items: List[PlantFSTransactionChart]
bep_year: Optional[int] = Field(None, ge=0, le=9999)
bep_total_lcc: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
class ListQueryParams(CommonParams):
search: Optional[str] = Field(
default=None,
description="Search keyword",
)
bep_year: Optional[int] = Field(None, nullable=True, ge=0, le=9999)
bep_total_lcc: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)

@ -1,4 +1,4 @@
from typing import Annotated, Optional
from typing import Optional
from fastapi import APIRouter, HTTPException, status, Query
from .model import PlantMasterData
@ -7,7 +7,6 @@ from .schema import (
PlantMasterDataRead,
PlantMasterDataCreate,
PlantMasterDataUpdate,
ListQueryParams,
)
from .service import get, get_all, create, update, delete
@ -23,14 +22,15 @@ router = APIRouter()
async def get_masterdatas(
db_session: DbSession,
common: CommonParameters,
params: Annotated[ListQueryParams, Query()],
items_per_page: Optional[int] = Query(5),
search: Optional[str] = Query(None),
):
"""Get all documents."""
# return
master_datas = await get_all(
db_session=db_session,
items_per_page=params.items_per_page,
search=params.search,
items_per_page=items_per_page,
search=search,
common=common,
)
return StandardResponse(
@ -38,23 +38,6 @@ async def get_masterdatas(
message="Data retrieved successfully",
)
@router.get("/export-all", response_model=StandardResponse[PlantMasterDataPagination])
async def get_masterdatas_export_all(
db_session: DbSession,
common: CommonParameters,
):
"""Get all documents for export."""
common["all"] = True
master_datas = await get_all(
db_session=db_session,
items_per_page=-1,
common=common,
)
return StandardResponse(
data=master_datas,
message="All Plant Master Data retrieved successfully",
)
@router.get("/{masterdata_id}", response_model=StandardResponse[PlantMasterDataRead])
async def get_masterdata(db_session: DbSession, masterdata_id: str):
@ -78,7 +61,7 @@ async def create_masterdata(
return StandardResponse(data=masterdata, message="Data created successfully")
@router.post("/update/{masterdata_id}", response_model=StandardResponse[PlantMasterDataRead])
@router.put("/{masterdata_id}", response_model=StandardResponse[PlantMasterDataRead])
async def update_masterdata(
db_session: DbSession,
masterdata_id: str,
@ -102,7 +85,7 @@ async def update_masterdata(
)
@router.post("/delete/{masterdata_id}", response_model=StandardResponse[PlantMasterDataRead])
@router.delete("/{masterdata_id}", response_model=StandardResponse[PlantMasterDataRead])
async def delete_masterdata(db_session: DbSession, masterdata_id: str):
masterdata = await get(db_session=db_session, masterdata_id=masterdata_id)

@ -3,76 +3,76 @@ from typing import List, Optional
from uuid import UUID
from pydantic import Field
from src.models import CommonParams, DefaultBase, Pagination
from src.models import DefaultBase, Pagination
from src.auth.service import CurrentUser
MAX_NUMERIC_VALUE = 1_000_000_000_000_000 # thousands of trillion
class PlantMasterdataBase(DefaultBase):
discount_rate: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
total_project_cost: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
umur_teknis: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
interest_rate: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
loan_portion: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
equity_portion: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
loan: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
loan_tenor: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
principal_interest_payment: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
corporate_tax_rate: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
wacc_on_project: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
wacc_on_equity: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
equity: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
daya_mampu_netto: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
auxiliary: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
susut_trafo: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
sfc: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
electricity_price_a: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
electricity_price_b: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
electricity_price_c: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
electricity_price_d: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
harga_bahan_bakar: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
calc_on_project_irr: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
calc_on_project_npv: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
calc_on_equity_irr: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
calc_on_equity_npv: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
calc_roa_all: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
calc_roa_current: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE)
created_at: Optional[datetime] = Field(None)
updated_at: Optional[datetime] = Field(None)
created_by: Optional[str] = Field(None)
updated_by: Optional[str] = Field(None)
discount_rate: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
total_project_cost: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
umur_teknis: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
interest_rate: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
loan_portion: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
equity_portion: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
loan: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
loan_tenor: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
principal_interest_payment: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
corporate_tax_rate: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
wacc_on_project: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
wacc_on_equity: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
equity: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
daya_mampu_netto: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
auxiliary: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
susut_trafo: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
sfc: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
electricity_price_a: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
electricity_price_b: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
electricity_price_c: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
electricity_price_d: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
harga_bahan_bakar: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
calc_on_project_irr: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
calc_on_project_npv: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
calc_on_equity_irr: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
calc_on_equity_npv: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
calc_roa_all: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
calc_roa_current: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
created_at: Optional[datetime] = Field(None, nullable=True)
updated_at: Optional[datetime] = Field(None, nullable=True)
created_by: Optional[str] = Field(None, nullable=True)
updated_by: Optional[str] = Field(None, nullable=True)
class PlantMasterDataCreate(PlantMasterdataBase):
discount_rate: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
total_project_cost: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
umur_teknis: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
interest_rate: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
loan_portion: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
equity_portion: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
loan: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
loan_tenor: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
principal_interest_payment: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
corporate_tax_rate: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
wacc_on_project: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
wacc_on_equity: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
equity: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
daya_mampu_netto: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
auxiliary: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
susut_trafo: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
sfc: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
electricity_price_a: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
electricity_price_b: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
electricity_price_c: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
electricity_price_d: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
harga_bahan_bakar: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
calc_on_project_irr: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
calc_on_project_npv: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
calc_on_equity_irr: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
calc_on_equity_npv: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
calc_roa_all: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
calc_roa_current: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE)
discount_rate: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
total_project_cost: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
umur_teknis: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
interest_rate: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
loan_portion: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
equity_portion: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
loan: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
loan_tenor: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
principal_interest_payment: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
corporate_tax_rate: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
wacc_on_project: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
wacc_on_equity: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
equity: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
daya_mampu_netto: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
auxiliary: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
susut_trafo: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
sfc: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
electricity_price_a: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
electricity_price_b: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
electricity_price_c: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
electricity_price_d: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
harga_bahan_bakar: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
calc_on_project_irr: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
calc_on_project_npv: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
calc_on_equity_irr: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
calc_on_equity_npv: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
calc_roa_all: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
calc_roa_current: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE)
class PlantMasterDataUpdate(PlantMasterdataBase):
@ -85,11 +85,3 @@ class PlantMasterDataRead(PlantMasterdataBase):
class PlantMasterDataPagination(Pagination):
items: List[PlantMasterDataRead] = []
class ListQueryParams(CommonParams):
search: Optional[str] = Field(
default=None,
description="Search keyword",
)

@ -1,4 +1,4 @@
from typing import Annotated, List, Optional
from typing import List, Optional
from fastapi import APIRouter, HTTPException, status, Query
from .model import PlantTransactionData
@ -10,7 +10,6 @@ from .schema import (
PlantTransactionDataCreate,
PlantTransactionDataUpdate,
PlantTransactionFSImport,
ListQueryParams,
)
from .service import (
get,
@ -34,13 +33,14 @@ router = APIRouter()
async def get_transaction_datas(
db_session: DbSession,
common: CommonParameters,
params: Annotated[ListQueryParams, Query()],
items_per_page: Optional[int] = Query(5),
search: Optional[str] = Query(None),
):
"""Get all transaction_data pagination."""
plant_transaction_data = await get_all(
db_session=db_session,
items_per_page=params.items_per_page,
search=params.search,
items_per_page=items_per_page,
search=search,
common=common,
)
# return
@ -49,23 +49,6 @@ async def get_transaction_datas(
message="Data retrieved successfully",
)
@router.get("/export-all", response_model=StandardResponse[PlantTransactionDataPagination])
async def get_transaction_datas_export_all(
db_session: DbSession,
common: CommonParameters,
):
"""Get all transaction_data for export."""
common["all"] = True
plant_transaction_data = await get_all(
db_session=db_session,
items_per_page=-1,
common=common,
)
return StandardResponse(
data=plant_transaction_data,
message="All Plant Transaction Data retrieved successfully",
)
@router.get("/charts", response_model=StandardResponse[PlantChartData])
async def get_chart_data(db_session: DbSession, common: CommonParameters):
chart_data, bep_year, bep_total_lcc = await get_charts(
@ -145,8 +128,8 @@ async def create_transaction_data(
return StandardResponse(data=transaction_data, message="Data created successfully")
@router.post(
"/update/bulk", response_model=StandardResponse[List[PlantTransactionDataRead]]
@router.put(
"/bulk", response_model=StandardResponse[List[PlantTransactionDataRead]]
)
async def bulk_update_transaction_data(
db_session: DbSession,
@ -175,8 +158,8 @@ async def bulk_update_transaction_data(
message="Bulk update completed successfully",
)
@router.post(
"/update/{transaction_data_id}", response_model=StandardResponse[PlantTransactionDataRead]
@router.put(
"/{transaction_data_id}", response_model=StandardResponse[PlantTransactionDataRead]
)
async def update_transaction_data(
db_session: DbSession,
@ -208,8 +191,8 @@ async def update_transaction_data(
)
@router.post(
"/delete/{transaction_data_id}", response_model=StandardResponse[PlantTransactionDataRead]
@router.delete(
"/{transaction_data_id}", response_model=StandardResponse[PlantTransactionDataRead]
)
async def delete_transaction_data(db_session: DbSession, transaction_data_id: str):
transaction_data = await get(

@ -3,87 +3,87 @@ from typing import Any, List, Optional
from uuid import UUID
from pydantic import Field
from src.models import CommonParams, DefaultBase, Pagination
from src.models import DefaultBase, Pagination
class PlantTransactionDataBase(DefaultBase):
tahun: Optional[int] = Field(None, ge=1900, le=9999)
is_actual: Optional[int] = Field(None, ge=0, le=1)
seq: Optional[int] = Field(None, ge=0, le=9999)
net_capacity_factor: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
eaf: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
production_bruto: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
production_netto: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
energy_sales: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fuel_consumption: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
revenue_a: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
revenue_b: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
revenue_c: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
revenue_d: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
revenue_total: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
revenue_pv: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
revenue_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_a_replacement: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_a_pm: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_a_acquisition: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_a_pinjaman: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_a_depreciation: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_a_total: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_a_pv: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_a_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_c_fuel: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_c_pv: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_c_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_bd_om: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_bd_pm_nonmi: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_bd_bd: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_bd_total: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_bd_pv: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_bd_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_disposal_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
total_expense: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
total_cost_eac: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
total_profit_loss: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
total_residual_value: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
total_profit_loss: Optional[float] = Field(None)
total_residual_value: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
calc_depreciation: Optional[float] = Field(None)
calc_interest_payment: Optional[float] = Field(None)
calc_principal_payment: Optional[float] = Field(None)
calc_dept_amount: Optional[float] = Field(None)
calc2_ebitda: Optional[float] = Field(None)
calc2_earning_before_tax: Optional[float] = Field(None)
calc2_tax: Optional[float] = Field(None)
calc2_earning_after_tax: Optional[float] = Field(None)
calc2_nopat: Optional[float] = Field(None)
calc3_interest_after_tax: Optional[float] = Field(None)
calc3_free_cash_flow_on_project: Optional[float] = Field(None)
calc3_discounted_fcf_on_project: Optional[float] = Field(None)
calc4_principal_repayment: Optional[float] = Field(None)
calc4_free_cash_flow_on_equity: Optional[float] = Field(None)
calc4_discounted_fcf_on_equity: Optional[float] = Field(None)
created_at: Optional[datetime] = Field(None)
updated_at: Optional[datetime] = Field(None)
created_by: Optional[str] = Field(None)
updated_by: Optional[str] = Field(None)
cost_disposal_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_total_revenue: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_revenue_a: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_revenue_b: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_revenue_c: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_revenue_d: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_revenue_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_fuel_cost_component_c: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_fuel_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_fuel_cost_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_oem_component_bd: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_oem_bd_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_oem_periodic_maintenance_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_oem_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_capex_component_a: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_capex_biaya_investasi_tambahan: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_capex_acquisition_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_capex_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
tahun: Optional[int] = Field(None, nullable=True, ge=1900, le=9999)
is_actual: Optional[int] = Field(None, nullable=True, ge=0, le=1)
seq: Optional[int] = Field(None, nullable=True, ge=0, le=9999)
net_capacity_factor: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
eaf: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
production_bruto: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
production_netto: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
energy_sales: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fuel_consumption: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
revenue_a: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
revenue_b: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
revenue_c: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
revenue_d: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
revenue_total: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
revenue_pv: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
revenue_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_a_replacement: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_a_pm: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_a_acquisition: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_a_pinjaman: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_a_depreciation: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_a_total: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_a_pv: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_a_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_c_fuel: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_c_pv: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_c_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_bd_om: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_bd_pm_nonmi: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_bd_bd: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_bd_total: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_bd_pv: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_bd_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_disposal_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
total_expense: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
total_cost_eac: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
total_profit_loss: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
total_residual_value: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
total_profit_loss: Optional[float] = Field(None, nullable=True)
total_residual_value: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
calc_depreciation: Optional[float] = Field(None, nullable=True)
calc_interest_payment: Optional[float] = Field(None, nullable=True)
calc_principal_payment: Optional[float] = Field(None, nullable=True)
calc_dept_amount: Optional[float] = Field(None, nullable=True)
calc2_ebitda: Optional[float] = Field(None, nullable=True)
calc2_earning_before_tax: Optional[float] = Field(None, nullable=True)
calc2_tax: Optional[float] = Field(None, nullable=True)
calc2_earning_after_tax: Optional[float] = Field(None, nullable=True)
calc2_nopat: Optional[float] = Field(None, nullable=True)
calc3_interest_after_tax: Optional[float] = Field(None, nullable=True)
calc3_free_cash_flow_on_project: Optional[float] = Field(None, nullable=True)
calc3_discounted_fcf_on_project: Optional[float] = Field(None, nullable=True)
calc4_principal_repayment: Optional[float] = Field(None, nullable=True)
calc4_free_cash_flow_on_equity: Optional[float] = Field(None, nullable=True)
calc4_discounted_fcf_on_equity: Optional[float] = Field(None, nullable=True)
created_at: Optional[datetime] = Field(None, nullable=True)
updated_at: Optional[datetime] = Field(None, nullable=True)
created_by: Optional[str] = Field(None, nullable=True)
updated_by: Optional[str] = Field(None, nullable=True)
cost_disposal_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_total_revenue: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_revenue_a: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_revenue_b: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_revenue_c: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_revenue_d: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_revenue_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_fuel_cost_component_c: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_fuel_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_fuel_cost_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_oem_component_bd: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_oem_bd_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_oem_periodic_maintenance_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_oem_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_capex_component_a: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_capex_biaya_investasi_tambahan: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_capex_acquisition_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_capex_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
class PlantTransactionChart(PlantTransactionDataBase):
@ -93,14 +93,14 @@ class PlantTransactionChart(PlantTransactionDataBase):
class PlantChartData(DefaultBase):
items: List[PlantTransactionChart]
bep_year: Optional[int] = Field(int, ge=0, le=9999)
bep_total_lcc: Optional[float] = Field(float, ge=0, le=1_000_000_000_000_000)
bep_year: Optional[int] = Field(int, nullable=True, ge=0, le=9999)
bep_total_lcc: Optional[float] = Field(float, nullable=True, ge=0, le=1_000_000_000_000_000)
class PlantTransactionFSImport(DefaultBase):
data: List[List[Optional[Any]]]
is_actual: Optional[int] = Field(None, ge=0, le=1)
seq: Optional[int] = Field(None, ge=0, le=9999)
is_actual: Optional[int] = Field(None, nullable=True, ge=0, le=1)
seq: Optional[int] = Field(None, nullable=True, ge=0, le=9999)
class PlantTransactionDataCreate(PlantTransactionDataBase):
@ -117,8 +117,3 @@ class PlantTransactionDataRead(PlantTransactionDataBase):
class PlantTransactionDataPagination(Pagination):
items: List[PlantTransactionDataRead] = []
class ListQueryParams(CommonParams):
pass

@ -1,4 +1,4 @@
from typing import Annotated, List, Optional
from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, HTTPException, status, Query
@ -11,7 +11,6 @@ from src.plant_transaction_data_simulations.schema import (
PlantTransactionDataSimulationsCreate,
PlantTransactionDataSimulationsUpdate,
PlantTransactionFSImportSimulations,
ListQueryParams,
)
from src.plant_transaction_data_simulations.service import (
get,
@ -35,15 +34,17 @@ router = APIRouter()
async def get_transaction_datas(
db_session: DbSession,
common: CommonParameters,
params: Annotated[ListQueryParams, Query()],
simulation_id: UUID = Query(..., description="Simulation identifier"),
items_per_page: Optional[int] = Query(5),
search: Optional[str] = Query(None),
):
"""Get all transaction_data pagination."""
plant_transaction_data = await get_all(
db_session=db_session,
items_per_page=params.items_per_page,
search=params.search,
items_per_page=items_per_page,
search=search,
common=common,
simulation_id=params.simulation_id,
simulation_id=simulation_id,
)
# return
return StandardResponse(
@ -51,25 +52,6 @@ async def get_transaction_datas(
message="Data retrieved successfully",
)
@router.get("/export-all", response_model=StandardResponse[PlantTransactionDataSimulationsPagination])
async def get_transaction_datas_export_all(
db_session: DbSession,
common: CommonParameters,
simulation_id: UUID = Query(..., description="Simulation identifier"),
):
"""Get all transaction_data for export."""
common["all"] = True
plant_transaction_data = await get_all(
db_session=db_session,
items_per_page=-1,
common=common,
simulation_id=simulation_id,
)
return StandardResponse(
data=plant_transaction_data,
message="All Plant Transaction Data Simulations retrieved successfully",
)
@router.get("/charts", response_model=StandardResponse[PlantChartDataSimulations])
async def get_chart_data(
db_session: DbSession,
@ -156,8 +138,8 @@ async def create_transaction_data(
return StandardResponse(data=transaction_data, message="Data created successfully")
@router.post(
"/update/bulk", response_model=StandardResponse[List[PlantTransactionDataSimulationsRead]]
@router.put(
"/bulk", response_model=StandardResponse[List[PlantTransactionDataSimulationsRead]]
)
async def bulk_update_transaction_data(
db_session: DbSession,
@ -186,8 +168,8 @@ async def bulk_update_transaction_data(
message="Bulk update completed successfully",
)
@router.post(
"/update/{transaction_data_id}", response_model=StandardResponse[PlantTransactionDataSimulationsRead]
@router.put(
"/{transaction_data_id}", response_model=StandardResponse[PlantTransactionDataSimulationsRead]
)
async def update_transaction_data(
db_session: DbSession,
@ -216,8 +198,8 @@ async def update_transaction_data(
)
@router.post(
"/delete/{transaction_data_id}", response_model=StandardResponse[PlantTransactionDataSimulationsRead]
@router.delete(
"/{transaction_data_id}", response_model=StandardResponse[PlantTransactionDataSimulationsRead]
)
async def delete_transaction_data(db_session: DbSession, transaction_data_id: str):
transaction_data = await get(

@ -3,131 +3,131 @@ from typing import Any, List, Optional
from uuid import UUID
from pydantic import Field
from src.models import CommonParams, DefaultBase, Pagination
from src.models import DefaultBase, Pagination
class PlantTransactionDataSimulationsBase(DefaultBase):
simulation_id: Optional[UUID] = Field(None)
tahun: Optional[int] = Field(None, ge=1900, le=9999)
is_actual: Optional[int] = Field(None, ge=0, le=1)
seq: Optional[int] = Field(None, ge=0, le=9999)
net_capacity_factor: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
eaf: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
production_bruto: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
production_netto: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
energy_sales: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fuel_consumption: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
revenue_a: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
revenue_b: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
revenue_c: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
revenue_d: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
revenue_total: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
revenue_pv: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
revenue_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_a_replacement: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_a_pm: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_a_acquisition: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_a_pinjaman: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_a_depreciation: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_a_total: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_a_pv: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_a_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_c_fuel: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_c_pv: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_c_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_bd_om: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_bd_pm_nonmi: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_bd_bd: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_bd_total: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_bd_pv: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_bd_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_disposal_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
total_expense: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
total_cost_eac: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
total_profit_loss: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
total_residual_value: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
total_profit_loss: Optional[float] = Field(None)
total_residual_value: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
calc_depreciation: Optional[float] = Field(None)
calc_interest_payment: Optional[float] = Field(None)
calc_principal_payment: Optional[float] = Field(None)
calc_dept_amount: Optional[float] = Field(None)
calc2_ebitda: Optional[float] = Field(None)
calc2_earning_before_tax: Optional[float] = Field(None)
calc2_tax: Optional[float] = Field(None)
calc2_earning_after_tax: Optional[float] = Field(None)
calc2_nopat: Optional[float] = Field(None)
calc3_interest_after_tax: Optional[float] = Field(None)
calc3_free_cash_flow_on_project: Optional[float] = Field(None)
calc3_discounted_fcf_on_project: Optional[float] = Field(None)
calc4_principal_repayment: Optional[float] = Field(None)
calc4_free_cash_flow_on_equity: Optional[float] = Field(None)
calc4_discounted_fcf_on_equity: Optional[float] = Field(None)
created_at: Optional[datetime] = Field(None)
updated_at: Optional[datetime] = Field(None)
created_by: Optional[str] = Field(None)
updated_by: Optional[str] = Field(None)
chart_total_revenue: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_revenue_a: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_revenue_b: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_revenue_c: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_revenue_d: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_revenue_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_fuel_cost_component_c: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_fuel_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_fuel_cost_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_oem_component_bd: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_oem_bd_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_oem_periodic_maintenance_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_oem_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_capex_component_a: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_capex_biaya_investasi_tambahan: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_capex_acquisition_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
chart_capex_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cost_disposal_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fs_chart_total_revenue: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fs_chart_revenue_a: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fs_chart_revenue_b: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fs_chart_revenue_c: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fs_chart_revenue_d: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fs_chart_revenue_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fs_chart_fuel_cost_component_c: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fs_chart_fuel_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fs_chart_fuel_cost_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fs_chart_oem_component_bd: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fs_chart_oem_bd_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fs_chart_oem_periodic_maintenance_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fs_chart_oem_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fs_chart_capex_component_a: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
simulation_id: Optional[UUID] = Field(None, nullable=True)
tahun: Optional[int] = Field(None, nullable=True, ge=1900, le=9999)
is_actual: Optional[int] = Field(None, nullable=True, ge=0, le=1)
seq: Optional[int] = Field(None, nullable=True, ge=0, le=9999)
net_capacity_factor: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
eaf: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
production_bruto: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
production_netto: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
energy_sales: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fuel_consumption: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
revenue_a: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
revenue_b: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
revenue_c: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
revenue_d: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
revenue_total: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
revenue_pv: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
revenue_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_a_replacement: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_a_pm: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_a_acquisition: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_a_pinjaman: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_a_depreciation: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_a_total: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_a_pv: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_a_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_c_fuel: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_c_pv: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_c_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_bd_om: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_bd_pm_nonmi: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_bd_bd: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_bd_total: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_bd_pv: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_bd_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_disposal_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
total_expense: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
total_cost_eac: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
total_profit_loss: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
total_residual_value: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
total_profit_loss: Optional[float] = Field(None, nullable=True)
total_residual_value: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
calc_depreciation: Optional[float] = Field(None, nullable=True)
calc_interest_payment: Optional[float] = Field(None, nullable=True)
calc_principal_payment: Optional[float] = Field(None, nullable=True)
calc_dept_amount: Optional[float] = Field(None, nullable=True)
calc2_ebitda: Optional[float] = Field(None, nullable=True)
calc2_earning_before_tax: Optional[float] = Field(None, nullable=True)
calc2_tax: Optional[float] = Field(None, nullable=True)
calc2_earning_after_tax: Optional[float] = Field(None, nullable=True)
calc2_nopat: Optional[float] = Field(None, nullable=True)
calc3_interest_after_tax: Optional[float] = Field(None, nullable=True)
calc3_free_cash_flow_on_project: Optional[float] = Field(None, nullable=True)
calc3_discounted_fcf_on_project: Optional[float] = Field(None, nullable=True)
calc4_principal_repayment: Optional[float] = Field(None, nullable=True)
calc4_free_cash_flow_on_equity: Optional[float] = Field(None, nullable=True)
calc4_discounted_fcf_on_equity: Optional[float] = Field(None, nullable=True)
created_at: Optional[datetime] = Field(None, nullable=True)
updated_at: Optional[datetime] = Field(None, nullable=True)
created_by: Optional[str] = Field(None, nullable=True)
updated_by: Optional[str] = Field(None, nullable=True)
chart_total_revenue: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_revenue_a: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_revenue_b: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_revenue_c: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_revenue_d: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_revenue_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_fuel_cost_component_c: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_fuel_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_fuel_cost_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_oem_component_bd: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_oem_bd_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_oem_periodic_maintenance_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_oem_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_capex_component_a: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_capex_biaya_investasi_tambahan: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_capex_acquisition_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
chart_capex_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cost_disposal_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fs_chart_total_revenue: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fs_chart_revenue_a: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fs_chart_revenue_b: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fs_chart_revenue_c: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fs_chart_revenue_d: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fs_chart_revenue_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fs_chart_fuel_cost_component_c: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fs_chart_fuel_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fs_chart_fuel_cost_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fs_chart_oem_component_bd: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fs_chart_oem_bd_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fs_chart_oem_periodic_maintenance_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fs_chart_oem_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fs_chart_capex_component_a: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fs_chart_capex_biaya_investasi_tambahan: Optional[float] = Field(
None, ge=0, le=1_000_000_000_000_000
None, nullable=True, ge=0, le=1_000_000_000_000_000
)
fs_chart_capex_acquisition_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fs_chart_capex_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
fs_chart_capex_acquisition_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
fs_chart_capex_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
class PlantTransactionChartSimulations(PlantTransactionDataSimulationsBase):
tahun: Optional[int] = Field(None, ge=0, le=9999)
is_actual: Optional[int] = Field(None, ge=0, le=1)
seq: Optional[int] = Field(None, ge=0, le=9999)
tahun: Optional[int] = Field(None, nullable=True, ge=0, le=9999)
is_actual: Optional[int] = Field(None, nullable=True, ge=0, le=1)
seq: Optional[int] = Field(None, nullable=True, ge=0, le=9999)
class PlantChartDataSimulations(DefaultBase):
items: List[PlantTransactionChartSimulations]
bep_year: Optional[int] = Field(int, ge=0, le=9999)
bep_total_lcc: Optional[float] = Field(float, ge=0, le=1_000_000_000_000_000)
bep_year: Optional[int] = Field(int, nullable=True, ge=0, le=9999)
bep_total_lcc: Optional[float] = Field(float, nullable=True, ge=0, le=1_000_000_000_000_000)
class PlantTransactionFSImportSimulations(DefaultBase):
data: List[List[Optional[Any]]]
is_actual: Optional[int] = Field(None, ge=0, le=1)
seq: Optional[int] = Field(None, ge=0, le=9999)
simulation_id: UUID = Field(...)
is_actual: Optional[int] = Field(None, nullable=True, ge=0, le=1)
seq: Optional[int] = Field(None, nullable=True, ge=0, le=9999)
simulation_id: UUID = Field(..., nullable=False)
class PlantTransactionDataSimulationsCreate(PlantTransactionDataSimulationsBase):
simulation_id: UUID = Field(...)
simulation_id: UUID = Field(..., nullable=False)
class PlantTransactionDataSimulationsUpdate(PlantTransactionDataSimulationsBase):
@ -140,11 +140,3 @@ class PlantTransactionDataSimulationsRead(PlantTransactionDataSimulationsBase):
class PlantTransactionDataSimulationsPagination(Pagination):
items: List[PlantTransactionDataSimulationsRead] = []
class ListQueryParams(CommonParams):
simulation_id: UUID = Field(
...,
description="Simulation identifier",
)

@ -1,4 +1,4 @@
from typing import Annotated, Optional
from typing import Optional
from fastapi import APIRouter, HTTPException, Query, status
@ -13,7 +13,6 @@ from src.simulations.schema import (
SimulationRead,
SimulationRunPayload,
SimulationUpdate,
ListQueryParams,
)
from src.simulations.service import create, delete, get, get_all, run_simulation, update
@ -25,33 +24,18 @@ async def get_simulations(
db_session: DbSession,
common: CommonParameters,
current_user: CurrentUser,
params: Annotated[ListQueryParams, Query()],
items_per_page: Optional[int] = Query(5),
search: Optional[str] = Query(None),
):
simulations = await get_all(
db_session=db_session,
items_per_page=params.items_per_page,
search=params.search,
items_per_page=items_per_page,
search=search,
common=common,
owner=current_user.name,
)
return StandardResponse(data=simulations, message="Data retrieved successfully")
@router.get("/export-all", response_model=StandardResponse[SimulationPagination])
async def get_simulations_export_all(
db_session: DbSession,
common: CommonParameters,
current_user: CurrentUser,
):
"""Get all simulations for export."""
common["all"] = True
simulations = await get_all(
db_session=db_session,
items_per_page=-1,
common=common,
owner=current_user.name,
)
return StandardResponse(data=simulations, message="All Simulations Data retrieved successfully")
@router.get("/{simulation_id}", response_model=StandardResponse[SimulationRead])
async def get_simulation(
@ -107,7 +91,7 @@ async def run_simulation_endpoint(
)
@router.post("/update/{simulation_id}", response_model=StandardResponse[SimulationBase])
@router.put("/{simulation_id}", response_model=StandardResponse[SimulationBase])
async def update_simulation(
db_session: DbSession,
simulation_id: str,
@ -134,7 +118,7 @@ async def update_simulation(
return StandardResponse(data=updated_simulation, message="Data updated successfully")
@router.post("/delete/{simulation_id}", response_model=StandardResponse[SimulationBase])
@router.delete("/{simulation_id}", response_model=StandardResponse[SimulationBase])
async def delete_simulation(
db_session: DbSession,
simulation_id: str,

@ -4,7 +4,7 @@ from uuid import UUID
from pydantic import Field
from src.models import CommonParams, DefaultBase, Pagination
from src.models import DefaultBase, Pagination
from src.masterdata_simulations.schema import MasterDataSimulationRead
from src.plant_transaction_data_simulations.schema import (
PlantTransactionDataSimulationsRead,
@ -13,16 +13,16 @@ from src.plant_transaction_data_simulations.schema import (
class SimulationBase(DefaultBase):
id: UUID
label: Optional[str] = Field(None)
version: Optional[int] = Field(None, ge=0, le=9_999_999_999)
created_at: Optional[datetime] = Field(None)
updated_at: Optional[datetime] = Field(None)
created_by: Optional[str] = Field(None)
updated_by: Optional[str] = Field(None)
label: Optional[str] = Field(None, nullable=False)
version: Optional[int] = Field(None, nullable=True, ge=0, le=9_999_999_999)
created_at: Optional[datetime] = Field(None, nullable=True)
updated_at: Optional[datetime] = Field(None, nullable=True)
created_by: Optional[str] = Field(None, nullable=True)
updated_by: Optional[str] = Field(None, nullable=True)
class SimulationCreate(SimulationBase):
label: str = Field(...)
label: str = Field(..., nullable=False)
class SimulationUpdate(SimulationBase):
@ -32,10 +32,10 @@ class SimulationUpdate(SimulationBase):
class SimulationRead(SimulationBase):
id: UUID
masterdata_entries: List[MasterDataSimulationRead] = Field(
default_factory=list
default_factory=list, nullable=False
)
plant_transactions: List[PlantTransactionDataSimulationsRead] = Field(
default_factory=list
default_factory=list, nullable=False
)
@ -44,18 +44,10 @@ class SimulationPagination(Pagination):
class MasterDataOverride(DefaultBase):
name: str = Field(...)
value_num: Optional[float] = Field(None, le=1_000_000_000_000_000)
value_str: Optional[str] = Field(None)
name: str = Field(..., nullable=False)
value_num: Optional[float] = Field(None, nullable=True, le=1_000_000_000_000_000)
value_str: Optional[str] = Field(None, nullable=True)
class SimulationRunPayload(DefaultBase):
label: Optional[str] = Field(None)
label: Optional[str] = Field(None, nullable=True)
overrides: List[MasterDataOverride] = Field(default_factory=list)
class ListQueryParams(CommonParams):
search: Optional[str] = Field(
default=None,
description="Search keyword",
)

@ -1,8 +1,8 @@
from typing import Annotated, Optional
from typing import Optional
from fastapi import APIRouter, Form, HTTPException, status, Query, UploadFile, File
from .model import UploadedFileData
from src.uploaded_file.schema import UploadedFileDataCreate, UploadedFileDataUpdate, UploadedFileDataRead, UploadedFileDataPagination, ListQueryParams
from src.uploaded_file.schema import UploadedFileDataCreate, UploadedFileDataUpdate, UploadedFileDataRead, UploadedFileDataPagination
from src.uploaded_file.service import get, get_all, create, update, delete
from src.database.service import CommonParameters, search_filter_sort_paginate
@ -20,13 +20,14 @@ router = APIRouter()
async def get_uploaded_files(
db_session: DbSession,
common: CommonParameters,
params: Annotated[ListQueryParams, Query()],
items_per_page: Optional[int] = Query(5),
search: Optional[str] = Query(None),
):
"""Get all uploaded files pagination."""
uploaded_files = await get_all(
db_session=db_session,
items_per_page=params.items_per_page,
search=params.search,
items_per_page=items_per_page,
search=search,
common=common,
)
# return
@ -35,23 +36,6 @@ async def get_uploaded_files(
message="Data retrieved successfully",
)
@router.get("/export-all", response_model=StandardResponse[UploadedFileDataPagination])
async def get_uploaded_files_export_all(
db_session: DbSession,
common: CommonParameters,
):
"""Get all uploaded files for export."""
common["all"] = True
uploaded_files = await get_all(
db_session=db_session,
items_per_page=-1,
common=common,
)
return StandardResponse(
data=uploaded_files,
message="All Uploaded Files Data retrieved successfully",
)
@router.get("/{uploaded_file_id}", response_model=StandardResponse[UploadedFileDataRead])
async def get_uploaded_file(db_session: DbSession, uploaded_file_id: str):
@ -118,7 +102,7 @@ async def create_uploaded_file(
return StandardResponse(data=uploaded_file_obj, message="Data created successfully")
@router.post("/update/{uploaded_file_id}", response_model=StandardResponse[UploadedFileDataRead])
@router.put("/{uploaded_file_id}", response_model=StandardResponse[UploadedFileDataRead])
async def update_uploaded_file(
db_session: DbSession,
uploaded_file_id: str,
@ -142,7 +126,7 @@ async def update_uploaded_file(
)
@router.post("/delete/{uploaded_file_id}", response_model=StandardResponse[UploadedFileDataRead])
@router.delete("/{uploaded_file_id}", response_model=StandardResponse[UploadedFileDataRead])
async def delete_uploaded_file(db_session: DbSession, uploaded_file_id: str):
uploaded_file = await get(db_session=db_session, uploaded_file_id=uploaded_file_id)

@ -3,18 +3,18 @@ from typing import List, Optional
from uuid import UUID
from pydantic import Field
from src.models import CommonParams, DefaultBase, Pagination
from src.models import DefaultBase, Pagination
class UploadedFileDataBase(DefaultBase):
filename: str = Field(...)
file_content: str = Field(...)
file_url: str = Field(...)
file_size: int = Field(...)
file_type: str = Field(...)
created_at: Optional[datetime] = Field(None)
updated_at: Optional[datetime] = Field(None)
created_by: Optional[str] = Field(None)
updated_by: Optional[str] = Field(None)
filename: str = Field(..., nullable=False)
file_content: str = Field(..., nullable=False)
file_url: str = Field(..., nullable=False)
file_size: int = Field(..., nullable=False)
file_type: str = Field(..., nullable=False)
created_at: Optional[datetime] = Field(None, nullable=True)
updated_at: Optional[datetime] = Field(None, nullable=True)
created_by: Optional[str] = Field(None, nullable=True)
updated_by: Optional[str] = Field(None, nullable=True)
class UploadedFileDataCreate(UploadedFileDataBase):
pass
@ -24,11 +24,7 @@ class UploadedFileDataUpdate(UploadedFileDataBase):
class UploadedFileDataRead(UploadedFileDataBase):
id: UUID
wlc_version: Optional[int] = Field(None)
wlc_version: Optional[int] = Field(None, nullable=False)
class UploadedFileDataPagination(Pagination):
items: List[UploadedFileDataRead] = []
class ListQueryParams(CommonParams):
pass

@ -1,8 +1,8 @@
from typing import Annotated, Optional
from typing import Optional
from fastapi import APIRouter, HTTPException, status, Query
from .model import Yeardata
from .schema import YeardataPagination, YeardataRead, YeardataCreate, YeardataUpdate, ListQueryParams
from .schema import YeardataPagination, YeardataRead, YeardataCreate, YeardataUpdate
from .service import get, get_all, create, update, delete
from src.database.service import CommonParameters, search_filter_sort_paginate
@ -17,13 +17,14 @@ router = APIRouter()
async def get_yeardatas(
db_session: DbSession,
common: CommonParameters,
params: Annotated[ListQueryParams, Query()],
items_per_page: Optional[int] = Query(5),
search: Optional[str] = Query(None),
):
"""Get all yeardata pagination."""
year_data = await get_all(
db_session=db_session,
items_per_page=params.items_per_page,
search=params.search,
items_per_page=items_per_page,
search=search,
common=common,
)
# return
@ -32,23 +33,6 @@ async def get_yeardatas(
message="Data retrieved successfully",
)
@router.get("/export-all", response_model=StandardResponse[YeardataPagination])
async def get_yeardatas_export_all(
db_session: DbSession,
common: CommonParameters,
):
"""Get all yeardata for export."""
common["all"] = True
year_data = await get_all(
db_session=db_session,
items_per_page=-1,
common=common,
)
return StandardResponse(
data=year_data,
message="All Year Data retrieved successfully",
)
@router.get("/{yeardata_id}", response_model=StandardResponse[YeardataRead])
async def get_yeardata(db_session: DbSession, yeardata_id: str):
@ -72,7 +56,7 @@ async def create_yeardata(
return StandardResponse(data=yeardata, message="Data created successfully")
@router.post("/update/{yeardata_id}", response_model=StandardResponse[YeardataRead])
@router.put("/{yeardata_id}", response_model=StandardResponse[YeardataRead])
async def update_yeardata(
db_session: DbSession,
yeardata_id: str,
@ -96,7 +80,7 @@ async def update_yeardata(
)
@router.post("/delete/{yeardata_id}", response_model=StandardResponse[YeardataRead])
@router.delete("/{yeardata_id}", response_model=StandardResponse[YeardataRead])
async def delete_yeardata(db_session: DbSession, yeardata_id: str):
yeardata = await get(db_session=db_session, yeardata_id=yeardata_id)

@ -3,29 +3,29 @@ from typing import List, Optional
from uuid import UUID
from pydantic import Field, field_validator
from src.models import CommonParams, DefaultBase, Pagination
from src.models import DefaultBase, Pagination
class YeardataBase(DefaultBase):
year: Optional[int] = Field(None, ge=1900)
rp_per_kwh: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
total_lost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
man_hour: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
asset_crit_ens_energy_not_served: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
asset_crit_bpp_system: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
asset_crit_bpp_pembangkit: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
asset_crit_dmn_daya_mampu_netto: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
asset_crit_marginal_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
asset_crit_efdh_equivalent_forced_derated_hours: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
asset_crit_foh_forced_outage_hours: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
asset_crit_extra_fuel_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
cf: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
eaf: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000)
rbd_simulation_id: Optional[str] = Field(None)
created_at: Optional[datetime] = Field(None)
updated_at: Optional[datetime] = Field(None)
created_by: Optional[str] = Field(None)
updated_by: Optional[str] = Field(None)
year: Optional[int] = Field(None, nullable=True, ge=1900)
rp_per_kwh: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
total_lost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
man_hour: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
asset_crit_ens_energy_not_served: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
asset_crit_bpp_system: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
asset_crit_bpp_pembangkit: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
asset_crit_dmn_daya_mampu_netto: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
asset_crit_marginal_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
asset_crit_efdh_equivalent_forced_derated_hours: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
asset_crit_foh_forced_outage_hours: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
asset_crit_extra_fuel_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
cf: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
eaf: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000)
rbd_simulation_id: Optional[str] = Field(None, nullable=True)
created_at: Optional[datetime] = Field(None, nullable=True)
updated_at: Optional[datetime] = Field(None, nullable=True)
created_by: Optional[str] = Field(None, nullable=True)
updated_by: Optional[str] = Field(None, nullable=True)
@field_validator(
"asset_crit_ens_energy_not_served",
@ -61,8 +61,3 @@ class YeardataRead(YeardataBase):
class YeardataPagination(Pagination):
items: List[YeardataRead] = []
class ListQueryParams(CommonParams):
pass

@ -1,111 +0,0 @@
Traceback (most recent call last):
File "<frozen runpy>", line 198, in _run_module_as_main
File "<frozen runpy>", line 88, in _run_code
File "C:\dev\be-lcca\venv\Lib\site-packages\pytest\__main__.py", line 9, in <module>
raise SystemExit(pytest.console_main())
^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 201, in console_main
code = main()
^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 156, in main
config = _prepareconfig(args, plugins)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 341, in _prepareconfig
config = pluginmanager.hook.pytest_cmdline_parse(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_hooks.py", line 513, in __call__
return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_manager.py", line 120, in _hookexec
return self._inner_hookexec(hook_name, methods, kwargs, firstresult)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 139, in _multicall
raise exception.with_traceback(exception.__traceback__)
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall
teardown.throw(exception) # type: ignore[union-attr]
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\helpconfig.py", line 105, in pytest_cmdline_parse
config = yield
^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 103, in _multicall
res = hook_impl.function(*args)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1140, in pytest_cmdline_parse
self.parse(args)
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1494, in parse
self._preparse(args, addopts=addopts)
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1398, in _preparse
self.hook.pytest_load_initial_conftests(
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_hooks.py", line 513, in __call__
return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_manager.py", line 120, in _hookexec
return self._inner_hookexec(hook_name, methods, kwargs, firstresult)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 139, in _multicall
raise exception.with_traceback(exception.__traceback__)
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall
teardown.throw(exception) # type: ignore[union-attr]
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\warnings.py", line 151, in pytest_load_initial_conftests
return (yield)
^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall
teardown.throw(exception) # type: ignore[union-attr]
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\capture.py", line 154, in pytest_load_initial_conftests
yield
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 103, in _multicall
res = hook_impl.function(*args)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1222, in pytest_load_initial_conftests
self.pluginmanager._set_initial_conftests(
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 581, in _set_initial_conftests
self._try_load_conftest(
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 619, in _try_load_conftest
self._loadconftestmodules(
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 659, in _loadconftestmodules
mod = self._importconftest(
^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 710, in _importconftest
mod = import_path(
^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\pathlib.py", line 587, in import_path
importlib.import_module(module_name)
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.11_3.11.2544.0_x64__qbz5n2kfra8p0\Lib\importlib\__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<frozen importlib._bootstrap>", line 1204, in _gcd_import
File "<frozen importlib._bootstrap>", line 1176, in _find_and_load
File "<frozen importlib._bootstrap>", line 1147, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 690, in _load_unlocked
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\assertion\rewrite.py", line 184, in exec_module
exec(co, module.__dict__)
File "C:\dev\be-lcca\tests\conftest.py", line 20, in <module>
from fastapi import Request
File "C:\dev\be-lcca\venv\Lib\site-packages\fastapi\__init__.py", line 7, in <module>
from .applications import FastAPI as FastAPI
File "C:\dev\be-lcca\venv\Lib\site-packages\fastapi\applications.py", line 16, in <module>
from fastapi import routing
File "C:\dev\be-lcca\venv\Lib\site-packages\fastapi\routing.py", line 34, in <module>
from fastapi.dependencies.models import Dependant
File "C:\dev\be-lcca\venv\Lib\site-packages\fastapi\dependencies\models.py", line 5, in <module>
from fastapi.security.base import SecurityBase
File "C:\dev\be-lcca\venv\Lib\site-packages\fastapi\security\__init__.py", line 1, in <module>
from .api_key import APIKeyCookie as APIKeyCookie
File "C:\dev\be-lcca\venv\Lib\site-packages\fastapi\security\api_key.py", line 6, in <module>
from starlette.requests import Request
File "C:\dev\be-lcca\venv\Lib\site-packages\starlette\requests.py", line 12, in <module>
from starlette.formparsers import FormParser, MultiPartException, MultiPartParser
File "C:\dev\be-lcca\venv\Lib\site-packages\starlette\formparsers.py", line 17, in <module>
import python_multipart as multipart
File "C:\dev\be-lcca\venv\Lib\site-packages\python_multipart\__init__.py", line 7, in <module>
from .multipart import (
File "C:\dev\be-lcca\venv\Lib\site-packages\python_multipart\multipart.py", line 115, in <module>
class MultipartState(IntEnum):
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.11_3.11.2544.0_x64__qbz5n2kfra8p0\Lib\enum.py", line 647, in __new__
delattr(enum_class, '_singles_mask_')
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.11_3.11.2544.0_x64__qbz5n2kfra8p0\Lib\enum.py", line 752, in __delattr__
super().__delattr__(attr)
^^^^^^^
KeyboardInterrupt

@ -1,155 +0,0 @@
Traceback (most recent call last):
File "<frozen runpy>", line 198, in _run_module_as_main
File "<frozen runpy>", line 88, in _run_code
File "C:\dev\be-lcca\venv\Lib\site-packages\pytest\__main__.py", line 9, in <module>
raise SystemExit(pytest.console_main())
^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 201, in console_main
code = main()
^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 156, in main
config = _prepareconfig(args, plugins)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 341, in _prepareconfig
config = pluginmanager.hook.pytest_cmdline_parse(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_hooks.py", line 513, in __call__
return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_manager.py", line 120, in _hookexec
return self._inner_hookexec(hook_name, methods, kwargs, firstresult)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 139, in _multicall
raise exception.with_traceback(exception.__traceback__)
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall
teardown.throw(exception) # type: ignore[union-attr]
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\helpconfig.py", line 105, in pytest_cmdline_parse
config = yield
^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 103, in _multicall
res = hook_impl.function(*args)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1140, in pytest_cmdline_parse
self.parse(args)
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1494, in parse
self._preparse(args, addopts=addopts)
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1398, in _preparse
self.hook.pytest_load_initial_conftests(
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_hooks.py", line 513, in __call__
return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_manager.py", line 120, in _hookexec
return self._inner_hookexec(hook_name, methods, kwargs, firstresult)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 139, in _multicall
raise exception.with_traceback(exception.__traceback__)
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall
teardown.throw(exception) # type: ignore[union-attr]
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\warnings.py", line 151, in pytest_load_initial_conftests
return (yield)
^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall
teardown.throw(exception) # type: ignore[union-attr]
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\capture.py", line 154, in pytest_load_initial_conftests
yield
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 103, in _multicall
res = hook_impl.function(*args)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1222, in pytest_load_initial_conftests
self.pluginmanager._set_initial_conftests(
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 581, in _set_initial_conftests
self._try_load_conftest(
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 619, in _try_load_conftest
self._loadconftestmodules(
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 659, in _loadconftestmodules
mod = self._importconftest(
^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 710, in _importconftest
mod = import_path(
^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\pathlib.py", line 587, in import_path
importlib.import_module(module_name)
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.11_3.11.2544.0_x64__qbz5n2kfra8p0\Lib\importlib\__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<frozen importlib._bootstrap>", line 1204, in _gcd_import
File "<frozen importlib._bootstrap>", line 1176, in _find_and_load
File "<frozen importlib._bootstrap>", line 1147, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 690, in _load_unlocked
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\assertion\rewrite.py", line 184, in exec_module
exec(co, module.__dict__)
File "C:\dev\be-lcca\tests\conftest.py", line 22, in <module>
from src.main import app
File "C:\dev\be-lcca\src\main.py", line 33, in <module>
from src.api import api_router
File "C:\dev\be-lcca\src\api.py", line 22, in <module>
from src.simulations.router import router as simulations_router
File "C:\dev\be-lcca\src\simulations\__init__.py", line 1, in <module>
from .router import router
File "C:\dev\be-lcca\src\simulations\router.py", line 17, in <module>
from src.simulations.service import create, delete, get, get_all, run_simulation, update
File "C:\dev\be-lcca\src\simulations\service.py", line 34, in <module>
column.key for column in sa_inspect(MasterData).mapper.column_attrs if column.key != "id"
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\util\langhelpers.py", line 1257, in __get__
obj.__dict__[self.__name__] = result = self.fget(obj)
^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\mapper.py", line 3172, in column_attrs
return self._filter_properties(properties.ColumnProperty)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\mapper.py", line 3225, in _filter_properties
self._check_configure()
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\mapper.py", line 2401, in _check_configure
_configure_registries({self.registry}, cascade=True)
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\mapper.py", line 4213, in _configure_registries
_do_configure_registries(registries, cascade)
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\mapper.py", line 4254, in _do_configure_registries
mapper._post_configure_properties()
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\mapper.py", line 2421, in _post_configure_properties
prop.post_instrument_class(self)
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\interfaces.py", line 1113, in post_instrument_class
self.strategy.init_class_attribute(mapper)
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\strategies.py", line 254, in init_class_attribute
_register_attribute(
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\strategies.py", line 126, in _register_attribute
desc = attributes.register_attribute_impl(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\attributes.py", line 2605, in register_attribute_impl
"_Dispatch[QueryableAttribute[Any]]", manager[key].dispatch
^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\event\base.py", line 465, in __get__
if hasattr(obj, "_slots_dispatch"):
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\attributes.py", line 472, in __getattr__
return getattr(self.comparator, key)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\util\langhelpers.py", line 1332, in __getattr__
return self._fallback_getattr(key)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\properties.py", line 472, in _fallback_getattr
return getattr(self.__clause_element__(), key)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\util\langhelpers.py", line 1319, in oneshot
result = fn(*args, **kw)
^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\properties.py", line 439, in _memoized_method___clause_element__
return self._orm_annotate_column(self.prop.columns[0])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\properties.py", line 425, in _orm_annotate_column
return col._annotate(annotations)._set_propagate_attrs(
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\sql\annotation.py", line 129, in _annotate
return Annotated._as_annotated_instance(self, values) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\sql\annotation.py", line 277, in _as_annotated_instance
return cls(element, values)
^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\sql\elements.py", line 5313, in __init__
Annotated.__init__(self, element, values)
File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\sql\annotation.py", line 289, in __init__
self.__dict__ = element.__dict__.copy()
^^^^^^^^^^^^^^^^^^^^^^^
KeyboardInterrupt

@ -1,38 +0,0 @@
C:\dev\be-lcca\venv\Lib\site-packages\pytest_asyncio\plugin.py:247: PytestDeprecationWarning: The configuration option "asyncio_default_fixture_loop_scope" is unset.
The event loop scope for asynchronous fixtures will default to the fixture caching scope. Future versions of pytest-asyncio will default the loop scope for asynchronous fixtures to function scope. Set the default fixture loop scope explicitly in order to avoid unexpected behavior in the future. Valid fixture loop scopes are: "function", "class", "module", "package", "session"
warnings.warn(PytestDeprecationWarning(_DEFAULT_FIXTURE_LOOP_SCOPE_UNSET))
============================= test session starts =============================
platform win32 -- Python 3.11.9, pytest-8.3.4, pluggy-1.5.0 -- C:\dev\be-lcca\venv\Scripts\python.exe
cachedir: .pytest_cache
rootdir: C:\dev\be-lcca
configfile: pyproject.toml
plugins: anyio-4.8.0, Faker-30.10.0, asyncio-1.3.0
asyncio: mode=Mode.STRICT, debug=False, asyncio_default_fixture_loop_scope=None, asyncio_default_test_loop_scope=function
collecting ... collected 1 item
tests/test_healthcheck.py::test_healthcheck PASSED [100%]
============================== warnings summary ===============================
venv\Lib\site-packages\pydantic\_internal\_config.py:295
C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_config.py:295: PydanticDeprecatedSince20: Support for class-based `config` is deprecated, use ConfigDict instead. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.10/migration/
warnings.warn(DEPRECATION_MESSAGE, DeprecationWarning)
venv\Lib\site-packages\pydantic\fields.py:1042: 473 warnings
C:\dev\be-lcca\venv\Lib\site-packages\pydantic\fields.py:1042: PydanticDeprecatedSince20: Using extra keyword arguments on `Field` is deprecated and will be removed. Use `json_schema_extra` instead. (Extra keys: 'nullable'). Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.10/migration/
warn(
venv\Lib\site-packages\pydantic\_internal\_generate_schema.py:297: 115 warnings
C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py:297: PydanticDeprecatedSince20: `json_encoders` is deprecated. See https://docs.pydantic.dev/2.10/concepts/serialization/#custom-serializers for alternatives. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.10/migration/
warnings.warn(
src\database\core.py:115
C:\dev\be-lcca\src\database\core.py:115: MovedIn20Warning: The ``declarative_base()`` function is now available as sqlalchemy.orm.declarative_base(). (deprecated since: 2.0) (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9)
Base = declarative_base(cls=CustomBase)
tests/test_healthcheck.py::test_healthcheck
C:\dev\be-lcca\venv\Lib\site-packages\httpx\_client.py:1437: DeprecationWarning: The 'app' shortcut is now deprecated. Use the explicit style 'transport=ASGITransport(app=...)' instead.
warnings.warn(message, DeprecationWarning)
-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html
======================= 1 passed, 591 warnings in 0.95s =======================

@ -1,141 +0,0 @@
Traceback (most recent call last):
File "<frozen runpy>", line 198, in _run_module_as_main
File "<frozen runpy>", line 88, in _run_code
File "C:\dev\be-lcca\venv\Lib\site-packages\pytest\__main__.py", line 9, in <module>
raise SystemExit(pytest.console_main())
^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 201, in console_main
code = main()
^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 156, in main
config = _prepareconfig(args, plugins)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 341, in _prepareconfig
config = pluginmanager.hook.pytest_cmdline_parse(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_hooks.py", line 513, in __call__
return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_manager.py", line 120, in _hookexec
return self._inner_hookexec(hook_name, methods, kwargs, firstresult)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 139, in _multicall
raise exception.with_traceback(exception.__traceback__)
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall
teardown.throw(exception) # type: ignore[union-attr]
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\helpconfig.py", line 105, in pytest_cmdline_parse
config = yield
^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 103, in _multicall
res = hook_impl.function(*args)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1140, in pytest_cmdline_parse
self.parse(args)
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1494, in parse
self._preparse(args, addopts=addopts)
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1398, in _preparse
self.hook.pytest_load_initial_conftests(
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_hooks.py", line 513, in __call__
return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_manager.py", line 120, in _hookexec
return self._inner_hookexec(hook_name, methods, kwargs, firstresult)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 139, in _multicall
raise exception.with_traceback(exception.__traceback__)
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall
teardown.throw(exception) # type: ignore[union-attr]
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\warnings.py", line 151, in pytest_load_initial_conftests
return (yield)
^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall
teardown.throw(exception) # type: ignore[union-attr]
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\capture.py", line 154, in pytest_load_initial_conftests
yield
File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 103, in _multicall
res = hook_impl.function(*args)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1222, in pytest_load_initial_conftests
self.pluginmanager._set_initial_conftests(
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 581, in _set_initial_conftests
self._try_load_conftest(
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 619, in _try_load_conftest
self._loadconftestmodules(
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 659, in _loadconftestmodules
mod = self._importconftest(
^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 710, in _importconftest
mod = import_path(
^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\pathlib.py", line 587, in import_path
importlib.import_module(module_name)
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.11_3.11.2544.0_x64__qbz5n2kfra8p0\Lib\importlib\__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<frozen importlib._bootstrap>", line 1204, in _gcd_import
File "<frozen importlib._bootstrap>", line 1176, in _find_and_load
File "<frozen importlib._bootstrap>", line 1147, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 690, in _load_unlocked
File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\assertion\rewrite.py", line 184, in exec_module
exec(co, module.__dict__)
File "C:\dev\be-lcca\tests\conftest.py", line 22, in <module>
from src.main import app
File "C:\dev\be-lcca\src\main.py", line 33, in <module>
from src.api import api_router
File "C:\dev\be-lcca\src\api.py", line 18, in <module>
from src.acquisition_cost.router import router as acquisition_data_router
File "C:\dev\be-lcca\src\acquisition_cost\router.py", line 6, in <module>
from src.acquisition_cost.schema import AcquisitionCostDataPagination, AcquisitionCostDataRead, AcquisitionCostDataCreate, AcquisitionCostDataUpdate, ListQueryParams
File "C:\dev\be-lcca\src\acquisition_cost\schema.py", line 20, in <module>
class AcquisitionCostDataCreate(AcquisitionCostDataBase):
File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_model_construction.py", line 224, in __new__
complete_model_class(
File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_model_construction.py", line 602, in complete_model_class
schema = cls.__get_pydantic_core_schema__(cls, handler)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\main.py", line 702, in __get_pydantic_core_schema__
return handler(source)
^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_schema_generation_shared.py", line 84, in __call__
schema = self._handler(source_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 610, in generate_schema
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 879, in _generate_schema_inner
return self._model_schema(obj)
^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 691, in _model_schema
{k: self._generate_md_field_schema(k, v, decorators) for k, v in fields.items()},
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 691, in <dictcomp>
{k: self._generate_md_field_schema(k, v, decorators) for k, v in fields.items()},
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 1071, in _generate_md_field_schema
common_field = self._common_field_schema(name, field_info, decorators)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 1263, in _common_field_schema
schema = self._apply_annotations(
^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 2056, in _apply_annotations
schema = get_inner_schema(source_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_schema_generation_shared.py", line 84, in __call__
schema = self._handler(source_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 2040, in inner_handler
metadata_js_function = _extract_get_pydantic_json_schema(obj, schema)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 2403, in _extract_get_pydantic_json_schema
return _extract_get_pydantic_json_schema(tp.__origin__, schema)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 2402, in _extract_get_pydantic_json_schema
if hasattr(tp, '__origin__') and not _typing_extra.is_annotated(tp):
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.11_3.11.2544.0_x64__qbz5n2kfra8p0\Lib\typing.py", line 470, in __getattr__
def __getattr__(self, item):
KeyboardInterrupt

@ -1,51 +0,0 @@
C:\dev\be-lcca\venv\Lib\site-packages\pytest_asyncio\plugin.py:247: PytestDeprecationWarning: The configuration option "asyncio_default_fixture_loop_scope" is unset.
The event loop scope for asynchronous fixtures will default to the fixture caching scope. Future versions of pytest-asyncio will default the loop scope for asynchronous fixtures to function scope. Set the default fixture loop scope explicitly in order to avoid unexpected behavior in the future. Valid fixture loop scopes are: "function", "class", "module", "package", "session"
warnings.warn(PytestDeprecationWarning(_DEFAULT_FIXTURE_LOOP_SCOPE_UNSET))
============================= test session starts =============================
platform win32 -- Python 3.11.9, pytest-8.3.4, pluggy-1.5.0 -- C:\dev\be-lcca\venv\Scripts\python.exe
cachedir: .pytest_cache
rootdir: C:\dev\be-lcca
configfile: pytest.ini
plugins: anyio-4.8.0, Faker-30.10.0, asyncio-1.3.0
asyncio: mode=Mode.AUTO, debug=False, asyncio_default_fixture_loop_scope=None, asyncio_default_test_loop_scope=function
collecting ... collected 0 items / 2 errors
=================================== ERRORS ====================================
____________ ERROR collecting tests/unit/test_masterdata_logic.py _____________
tests\unit\test_masterdata_logic.py:2: in <module>
from src.masterdata.service import calculate_pmt
src\masterdata\service.py:6: in <module>
from src.database.service import search_filter_sort_paginate
src\database\service.py:7: in <module>
from .core import DbSession
src\database\core.py:19: in <module>
from src.config import SQLALCHEMY_DATABASE_URI, COLLECTOR_URI
src\config.py:99: in <module>
DEV_USERNAME = config("DEV_USERNAME")
venv\Lib\site-packages\starlette\config.py:90: in __call__
return self.get(key, cast, default)
venv\Lib\site-packages\starlette\config.py:107: in get
raise KeyError(f"Config '{key}' is missing, and has no default.")
E KeyError: "Config 'DEV_USERNAME' is missing, and has no default."
___________ ERROR collecting tests/unit/test_masterdata_service.py ____________
tests\unit\test_masterdata_service.py:3: in <module>
from src.masterdata.service import create, get
src\masterdata\service.py:6: in <module>
from src.database.service import search_filter_sort_paginate
src\database\service.py:7: in <module>
from .core import DbSession
src\database\core.py:19: in <module>
from src.config import SQLALCHEMY_DATABASE_URI, COLLECTOR_URI
src\config.py:99: in <module>
DEV_USERNAME = config("DEV_USERNAME")
venv\Lib\site-packages\starlette\config.py:90: in __call__
return self.get(key, cast, default)
venv\Lib\site-packages\starlette\config.py:107: in get
raise KeyError(f"Config '{key}' is missing, and has no default.")
E KeyError: "Config 'DEV_USERNAME' is missing, and has no default."
=========================== short test summary info ===========================
ERROR tests/unit/test_masterdata_logic.py - KeyError: "Config 'DEV_USERNAME' ...
ERROR tests/unit/test_masterdata_service.py - KeyError: "Config 'DEV_USERNAME...
!!!!!!!!!!!!!!!!!!! Interrupted: 2 errors during collection !!!!!!!!!!!!!!!!!!!
============================== 2 errors in 0.67s ==============================

@ -1,115 +1,69 @@
import os
# Set dummy environment variables for testing
os.environ["DATABASE_HOSTNAME"] = "localhost"
os.environ["DATABASE_CREDENTIAL_USER"] = "test"
os.environ["DATABASE_CREDENTIAL_PASSWORD"] = "test"
os.environ["COLLECTOR_CREDENTIAL_USER"] = "test"
os.environ["COLLECTOR_CREDENTIAL_PASSWORD"] = "test"
os.environ["DEV_USERNAME"] = "test"
os.environ["DEV_PASSWORD"] = "test"
# import asyncio
# from typing import AsyncGenerator, Generator
# import pytest
# import pytest_asyncio
# from httpx import AsyncClient, ASGITransport
# from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
# from sqlalchemy.orm import sessionmaker
# from sqlalchemy.pool import StaticPool
# from fastapi import Request
import asyncio
from typing import AsyncGenerator, Generator
import pytest
from httpx import AsyncClient
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.pool import StaticPool
import pytest
from sqlalchemy_utils import drop_database, database_exists
from starlette.config import environ
from starlette.testclient import TestClient
# from src.database import Base, get_db
# from src.main import app
# from src.database.core import Base, get_db, get_collector_db
# from src.auth.service import JWTBearer
# from src.auth.model import UserBase
# # Import all models to register them with Base
# import src.acquisition_cost.model
# import src.equipment.model
# import src.equipment_master.model
# import src.manpower_cost.model
# import src.manpower_master.model
# import src.masterdata.model
# import src.masterdata_simulations.model
# import src.plant_fs_transaction_data.model
# import src.plant_masterdata.model
# import src.plant_transaction_data.model
# import src.plant_transaction_data_simulations.model
# import src.simulations.model
# import src.uploaded_file.model
# import src.yeardata.model
# Test database URL
TEST_DATABASE_URL = "sqlite+aiosqlite:///:memory:"
# # Test database URL
# TEST_DATABASE_URL = "sqlite+aiosqlite:///:memory:"
engine = create_async_engine(
TEST_DATABASE_URL,
connect_args={"check_same_thread": False},
poolclass=StaticPool,
)
# engine = create_async_engine(
# TEST_DATABASE_URL,
# connect_args={"check_same_thread": False},
# poolclass=StaticPool,
# )
async_session = sessionmaker(
engine,
class_=AsyncSession,
expire_on_commit=False,
autocommit=False,
autoflush=False,
)
# TestingSessionLocal = sessionmaker(
# engine,
# class_=AsyncSession,
# expire_on_commit=False,
# autocommit=False,
# autoflush=False,
# )
# def pytest_sessionfinish(session, exitstatus):
# """
# Called after whole test run finished, right before returning the exit status to the system.
# Used here to dispose of all SQLAlchemy engines to prevent hanging.
# """
# from src.database.core import engine as db_engine, collector_engine
async def override_get_db() -> AsyncGenerator[AsyncSession, None]:
async with async_session() as session:
try:
yield session
await session.commit()
except Exception:
await session.rollback()
raise
finally:
await session.close()
# async def dispose_all():
# # Dispose of both test engine and production engines
# await engine.dispose()
# await db_engine.dispose()
# await collector_engine.dispose()
# try:
# loop = asyncio.get_event_loop()
# if loop.is_running():
# # If the loop is already running, we create a task
# loop.create_task(dispose_all())
# else:
# loop.run_until_complete(dispose_all())
# except Exception:
# # Fallback for environment where no loop is available or loop is closed
# try:
# asyncio.run(dispose_all())
# except Exception:
# pass
app.dependency_overrides[get_db] = override_get_db
# # Removed custom event_loop fixture
# @pytest_asyncio.fixture(autouse=True)
# async def setup_db():
# async with engine.begin() as conn:
# await conn.run_sync(Base.metadata.create_all)
# yield
# async with engine.begin() as conn:
# await conn.run_sync(Base.metadata.drop_all)
@pytest.fixture(scope="session")
def event_loop() -> Generator:
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop
loop.close()
# async def override_get_db(request: Request = None):
# async with TestingSessionLocal() as session:
# yield session
# app.dependency_overrides[get_db] = override_get_db
# app.dependency_overrides[get_collector_db] = override_get_db
@pytest.fixture(autouse=True)
async def setup_db() -> AsyncGenerator[None, None]:
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
yield
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
# @pytest.fixture(autouse=True)
# def mock_auth(monkeypatch):
# async def mock_call(self, request: Request):
# user = UserBase(user_id="test-id", name="test-user", role="admin")
# request.state.user = user
# return user
# monkeypatch.setattr(JWTBearer, "__call__", mock_call)
# @pytest_asyncio.fixture
# async def client() -> AsyncGenerator[AsyncClient, None]:
# async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client:
# yield client
@pytest.fixture
async def client() -> AsyncGenerator[AsyncClient, None]:
async with AsyncClient(app=app, base_url="http://test") as client:
yield client

@ -0,0 +1,3 @@
from sqlalchemy.orm import scoped_session, sessionmaker
Session = scoped_session(sessionmaker())

@ -0,0 +1,33 @@
import uuid
from datetime import datetime
from factory import (
LazyAttribute,
LazyFunction,
Sequence,
SubFactory,
post_generation,
SelfAttribute,
)
from factory.alchemy import SQLAlchemyModelFactory
from factory.fuzzy import FuzzyChoice, FuzzyDateTime, FuzzyInteger, FuzzyText
from faker import Faker
from faker.providers import misc
# from pytz import UTC
from .database import Session
fake = Faker()
fake.add_provider(misc)
class BaseFactory(SQLAlchemyModelFactory):
"""Base Factory."""
class Meta:
"""Factory configuration."""
abstract = True
sqlalchemy_session = Session
sqlalchemy_session_persistence = "commit"

@ -1,24 +0,0 @@
import pytest
from src.masterdata.service import calculate_pmt
def test_calculate_pmt_zero_rate():
# PMT = -PV / nper when rate is 0
pv = 1000
nper = 10
rate = 0
result = calculate_pmt(rate, nper, pv)
assert result == -100
def test_calculate_pmt_standard():
# Example: Loan 1000, 5% rate, 2 periods
# PMT = -1000 * (0.05 * (1.05)^2) / ((1.05)^2 - 1)
# PMT = -1000 * (0.05 * 1.1025) / (0.1025)
# PMT = -1000 * (0.055125) / (0.1025) = -537.8048...
result = calculate_pmt(5, 2, 1000)
assert round(result, 2) == -537.80
def test_calculate_pmt_percentage():
# If rate > 1, it divides by 100
result_5 = calculate_pmt(5, 10, 1000)
result_05 = calculate_pmt(0.05, 10, 1000)
assert result_5 == result_05

@ -1,39 +0,0 @@
import pytest
from unittest.mock import AsyncMock, MagicMock
from src.masterdata.service import create, get
from src.masterdata.schema import MasterDataCreate
@pytest.mark.asyncio
async def test_create_masterdata_service():
mock_db = AsyncMock()
mock_db.add = MagicMock()
masterdata_in = MasterDataCreate(
name="Test",
description="Desc",
unit_of_measurement="unit",
value_num=10.0,
seq=1
)
result = await create(db_session=mock_db, masterdata_in=masterdata_in)
assert result.name == "Test"
mock_db.add.assert_called_once()
mock_db.commit.assert_called_once()
@pytest.mark.asyncio
async def test_get_masterdata_service():
mock_db = AsyncMock()
mock_db.add = MagicMock()
mock_result = MagicMock()
mock_masterdata = MagicMock()
mock_masterdata.id = "test-id"
# Mock behavior of db_session.execute().scalars().one_or_none()
mock_result.scalars.return_value.one_or_none.return_value = mock_masterdata
mock_db.execute.return_value = mock_result
result = await get(db_session=mock_db, masterdata_id="test-id")
assert result.id == "test-id"
mock_db.execute.assert_called_once()
Loading…
Cancel
Save