diff --git a/Jenkinsfile b/Jenkinsfile index 55d7922..2559cb0 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -43,6 +43,18 @@ pipeline { } } + // stage('Run Unit Tests') { + // steps { + // sh 'poetry run pytest tests/unit' + // } + // } + + // stage('Run E2E Tests') { + // steps { + // sh 'poetry run pytest tests/e2e' + // } + // } + stage('Docker Login') { steps { // Fixed variable names based on the 'DOCKER_AUTH' environment key diff --git a/TESTING.md b/TESTING.md new file mode 100644 index 0000000..0d5958f --- /dev/null +++ b/TESTING.md @@ -0,0 +1,44 @@ +# Unit Testing Guide - be-lcca + +This document provides instructions on how to set up and run unit tests for the **be-lcca** project. + +## 1. Preparation + +### Install Dependencies +Ensure you have all dependencies installed. This project uses `poetry`. + +```bash +# Install dependencies +poetry install +``` + +## 2. Configuration + +### Pytest Configuration +Ensure the `pytest.ini` file in the root directory points to the `unit` test folder: + +```ini +[pytest] +testpaths = tests/unit +python_files = test_*.py +asyncio_mode = auto +``` + +## 3. Running Tests + +### Run Unit Tests +To run all unit tests in the project: + +```bash +poetry run pytest tests/unit +``` + +### Run Specific Unit Test File +```bash +poetry run pytest tests/unit/test_specific_feature.py +``` + +## 4. Best Practices + +- **Isolation**: Ensure tests do not rely on a live database; use local data structures or mock objects. +- **Factory Boy**: Use factories for creating complex models in your tests. diff --git a/docs/test.md b/docs/test.md new file mode 100644 index 0000000..1f4e185 --- /dev/null +++ b/docs/test.md @@ -0,0 +1,88 @@ +# Panduan Menjalankan Script Testing di BE LCCA Digital Twin + +Proyek ini menggunakan **Pytest** sebagai framework pengujian. Infrastruktur testing terletak di direktori `tests/` dan dikonfigurasi untuk menangani sifat asynchronous dari aplikasi FastAPI serta isolasi database. + +--- + +## **1. Persiapan Lingkungan (Environment Setup)** +Pastikan Anda berada di root direktori proyek dan environment sudah siap. + +### **Opsi A: Menggunakan Virtual Environment (Direkomendasikan)** +Aktifkan `venv` sebelum menjalankan perintah apapun: +```bash +python -m venv venv +source venv/bin/activate +pip install poetry +poetry install +``` + +### **Opsi B: Menggunakan Poetry** +Jika Anda lebih suka menggunakan Poetry secara langsung tanpa aktivasi manual: +```bash +poetry run pytest +``` + +--- + +## **2. Menjalankan Pengujian** + +| Tujuan | Perintah | +| :--- | :--- | +| **Jalankan Unit Tests** | `pytest tests/unit` | +| **Jalankan E2E Tests** | `pytest tests/e2e` | +| **Jalankan semua test** | `pytest` | +| **Tampilkan statement print** | `pytest -s` | +| **Berhenti di kegagalan pertama** | `pytest -x` | +| **Jalankan file spesifik** | `pytest tests/unit/test_example.py` | + +> **Catatan**: Verbose output (`-v`) sudah aktif secara default di konfigurasi `pyproject.toml`. + +--- + +## **3. Peringatan Penting (Caution for E2E Tests)** + +⚠️ **PENTING**: Saat menjalankan pengujian **End-to-End (E2E)**, pastikan Anda menggunakan **Testing Database**. + +* **JANGAN PERNAH** menjalankan E2E tests menggunakan database **Production** atau **Development**. +* Pengujian E2E seringkali melakukan operasi manipulasi data (create, update, delete) dan pembersihan database secara otomatis yang dapat mengakibatkan **kehilangan data permanen**. +* Selalu gunakan database terpisah (misalnya PostgreSQL instance khusus testing atau SQLite) yang aman untuk dihapus isinya sewaktu-waktu. + +--- + +## **4. Gambaran Infrastruktur Testing** +Direktori `tests/` berisi beberapa utility script yang memudahkan proses testing: + +* **`conftest.py`**: Berisi fixture global. Sudah terkonfigurasi dengan: + * `client`: `AsyncClient` untuk simulasi request API ke aplikasi FastAPI Anda. + * `setup_db`: Secara otomatis membuat dan menghapus database test (SQLite in-memory) untuk setiap sesi pengujian. +* **`factories.py`**: Menggunakan `factory-boy` untuk menghasilkan mock data untuk model Anda. +* **`database.py`**: Mengonfigurasi session database untuk kebutuhan pengujian. + +--- + +## **5. Menulis Test Pertama Anda** +Agar `pytest` mengenali sebuah file sebagai test, file tersebut harus dinamai dengan format `test_*.py` atau `*_test.py`. + +**Contoh (`tests/test_api.py`):** +```python +import pytest + +@pytest.mark.asyncio +async def test_api_status(client): + """Contoh pengujian menggunakan fixture 'client' dari conftest.py""" + response = await client.get("/") + assert response.status_code == 200 +``` + +--- + +## **6. Tips Troubleshooting** +* **Masalah Module Path**: Jika Anda menemui error `ModuleNotFoundError`, jalankan test dengan menambahkan direktori saat ini ke `PYTHONPATH`: + ```bash + export PYTHONPATH=$PYTHONPATH:. + pytest + ``` +* **Menjalankan Test yang Gagal Saja**: Untuk menghemat waktu, jalankan hanya test yang gagal pada sesi sebelumnya: + ```bash + pytest --lf + ``` \ No newline at end of file diff --git a/docs/updated_acquisition_algorithm.md b/docs/updated_acquisition_algorithm.md new file mode 100644 index 0000000..72006b1 --- /dev/null +++ b/docs/updated_acquisition_algorithm.md @@ -0,0 +1,59 @@ +# Updated Equipment Acquisition & Simulation Algorithm + +This document outlines the refactored logic for equipment acquisition cost calculation and simulation forecasting, implemented in February 2026. + +## 1. Timeline Definitions + +The simulation follows a strict temporal alignment to ensure consistency across the fleet: + +| Parameter | Value | Description | +| :--- | :--- | :--- | +| **Base Year** | `2015` | The target year for all "Value of Money" (Net Present Value) calculations. | +| **Forecasting Start** | `2015` | The year from which future predictions and Economic Life reports begin. | +| **Calculation Start** | `2014` | The technical sequence start ($seq = 0$) used to establish an initial state. | + +--- + +## 2. Capital Cost Adjustment (Value of Money) + +To account for the time value of money, both the **Initial Acquisition Cost** and the **Replacement Cost** are normalized to the **2015 Base Year** using the project's inflation rate. + +### 2.1 Adjustment Formula + +The value of any cost $V$ at a specific $Year$ is adjusted to its equivalent value in $2015$ using the following formula: + +$$V_{2015} = \frac{V_{Year}}{(1 + r)^{(Year - 2015)}}$$ + +Where: +- $V_{2015}$ = Adjusted value in 2015 terms. +- $V_{Year}$ = Raw cost recorded in the database or Maximo. +- $r$ = Inflation rate (from `lcc_ms_master`, defaults to $0.05$ if undefined). +- $Year$ = The year the cost was recorded ($Y_{acq}$ or $Y_{replace}$). + +### 2.2 Total Acquisition Cost + +The total capital cost $C_{total}$ stored in the master data is the sum of the adjusted initial cost and the adjusted first detected replacement cost: + +$$C_{total} = \frac{C_{initial}}{(1+r)^{(Y_{acq} - 2015)}} + \frac{C_{replace}}{(1+r)^{(Y_{replace} - 2015)}}$$ + +--- + +## 3. Maintenance Cost Suppression Logic + +A specific business rule is applied to prevent "double counting" or distorted maintenance records during major equipment replacement years: + +### 3.1 Replacement Year Rule +In the **first year** where a `replace_cost > 0` is detected in Maximo ($Y_{replace}$): +- All **Material Costs** are set to $0.0$. +- All **Labor Costs** (and labor hours) are set to $0.0$. + +### 3.2 Logic Rationale +The replacement cost is treated as a capital expenditure (CAPEX) that restarts the equipment's life cycle. Standard maintenance (OPEX) for that specific year is ignored because the replacement action supersedes regular repair tasks. + +--- + +## 4. Implementation Reference + +The logic is primarily contained in: +- `src/equipment/service.py`: `check_and_update_acquisition_data()` (Cost adjustments). +- `src/modules/equipment/insert_actual_data.py`: `query_data()` (Timeline and cost suppression). diff --git a/poetry.lock b/poetry.lock index 5415881..75a7063 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,20 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. + +[[package]] +name = "aiosqlite" +version = "0.22.1" +description = "asyncio bridge to the standard sqlite3 module" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiosqlite-0.22.1-py3-none-any.whl", hash = "sha256:21c002eb13823fad740196c5a2e9d8e62f6243bd9e7e4a1f87fb5e44ecb4fceb"}, + {file = "aiosqlite-0.22.1.tar.gz", hash = "sha256:043e0bd78d32888c0a9ca90fc788b38796843360c855a7262a532813133a0650"}, +] + +[package.extras] +dev = ["attribution (==1.8.0)", "black (==25.11.0)", "build (>=1.2)", "coverage[toml] (==7.10.7)", "flake8 (==7.3.0)", "flake8-bugbear (==24.12.12)", "flit (==3.12.0)", "mypy (==1.19.0)", "ufmt (==2.8.0)", "usort (==1.0.8.post1)"] +docs = ["sphinx (==8.1.3)", "sphinx-mdinclude (==0.6.2)"] [[package]] name = "annotated-types" @@ -6,6 +22,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -17,6 +34,7 @@ version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, @@ -30,7 +48,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] [[package]] @@ -39,6 +57,8 @@ version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.10\"" files = [ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, @@ -50,6 +70,7 @@ version = "0.30.0" description = "An asyncio PostgreSQL driver" optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"}, {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"}, @@ -107,8 +128,21 @@ async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.11.0\""} [package.extras] docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"] -gssauth = ["gssapi", "sspilib"] -test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi", "k5test", "mypy (>=1.8.0,<1.9.0)", "sspilib", "uvloop (>=0.15.3)"] +gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""] +test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""] + +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +description = "Backport of asyncio.Runner, a context manager that controls event loop life cycle." +optional = false +python-versions = "<3.11,>=3.8" +groups = ["main"] +markers = "python_version == \"3.10\"" +files = [ + {file = "backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5"}, + {file = "backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162"}, +] [[package]] name = "certifi" @@ -116,6 +150,7 @@ version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, @@ -127,6 +162,7 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -228,6 +264,7 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -242,6 +279,8 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] +markers = "platform_system == \"Windows\" or sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -253,6 +292,7 @@ version = "1.3.1" description = "Python library for calculating contours of 2D quadrilateral grids" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "contourpy-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a045f341a77b77e1c5de31e74e966537bba9f3c4099b35bf4c2e3939dd54cdab"}, {file = "contourpy-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:500360b77259914f7805af7462e41f9cb7ca92ad38e9f94d6c8641b089338124"}, @@ -326,6 +366,7 @@ version = "0.12.1" description = "Composable style cycles" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, @@ -341,6 +382,7 @@ version = "1.2.18" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +groups = ["main"] files = [ {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"}, {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"}, @@ -350,7 +392,7 @@ files = [ wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools ; python_version >= \"3.12\"", "tox"] [[package]] name = "dnspython" @@ -358,6 +400,7 @@ version = "2.7.0" description = "DNS toolkit" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, @@ -378,6 +421,7 @@ version = "2.2.0" description = "A robust email address syntax and deliverability validation library." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, @@ -393,6 +437,7 @@ version = "2.0.0" description = "An implementation of lxml.xmlfile for the standard library" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa"}, {file = "et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54"}, @@ -404,6 +449,8 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.10\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -418,6 +465,7 @@ version = "3.3.1" description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "factory_boy-3.3.1-py2.py3-none-any.whl", hash = "sha256:7b1113c49736e1e9995bc2a18f4dbf2c52cf0f841103517010b1d825712ce3ca"}, {file = "factory_boy-3.3.1.tar.gz", hash = "sha256:8317aa5289cdfc45f9cae570feb07a6177316c82e34d14df3c2e1f22f26abef0"}, @@ -436,6 +484,7 @@ version = "30.10.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "Faker-30.10.0-py3-none-any.whl", hash = "sha256:5f05ee92ddf0e1736d95dca41b2a16ee06d987b736fa4ddecdb047abf2e9024b"}, {file = "faker-30.10.0.tar.gz", hash = "sha256:c2e627d3becec67f7a45400d3670018b5abb3f0728b7dfaa06c135b7df1ce3fb"}, @@ -451,6 +500,7 @@ version = "0.115.8" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "fastapi-0.115.8-py3-none-any.whl", hash = "sha256:753a96dd7e036b34eeef8babdfcfe3f28ff79648f86551eb36bfc1b0bf4a8cbf"}, {file = "fastapi-0.115.8.tar.gz", hash = "sha256:0ce9111231720190473e222cdf0f07f7206ad7e53ea02beb1d2dc36e2f0741e9"}, @@ -477,6 +527,7 @@ version = "0.0.7" description = "Run and manage FastAPI apps from the command line with FastAPI CLI. 🚀" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "fastapi_cli-0.0.7-py3-none-any.whl", hash = "sha256:d549368ff584b2804336c61f192d86ddea080c11255f375959627911944804f4"}, {file = "fastapi_cli-0.0.7.tar.gz", hash = "sha256:02b3b65956f526412515907a0793c9094abd4bfb5457b389f645b0ea6ba3605e"}, @@ -496,6 +547,7 @@ version = "4.56.0" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "fonttools-4.56.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:331954d002dbf5e704c7f3756028e21db07097c19722569983ba4d74df014000"}, {file = "fonttools-4.56.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8d1613abd5af2f93c05867b3a3759a56e8bf97eb79b1da76b2bc10892f96ff16"}, @@ -550,18 +602,18 @@ files = [ ] [package.extras] -all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0) ; python_version <= \"3.12\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"] graphite = ["lz4 (>=1.7.4.2)"] -interpolatable = ["munkres", "pycairo", "scipy"] +interpolatable = ["munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\""] lxml = ["lxml (>=4.0)"] pathops = ["skia-pathops (>=0.5.0)"] plot = ["matplotlib"] repacker = ["uharfbuzz (>=0.23.0)"] symfont = ["sympy"] -type1 = ["xattr"] +type1 = ["xattr ; sys_platform == \"darwin\""] ufo = ["fs (>=2.2.0,<3)"] -unicode = ["unicodedata2 (>=15.1.0)"] -woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] +unicode = ["unicodedata2 (>=15.1.0) ; python_version <= \"3.12\""] +woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"] [[package]] name = "greenlet" @@ -569,6 +621,8 @@ version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")" files = [ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, @@ -655,6 +709,7 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -666,6 +721,7 @@ version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -687,6 +743,7 @@ version = "0.6.4" description = "A collection of framework independent HTTP protocol utils." optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, @@ -742,6 +799,7 @@ version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, @@ -755,7 +813,7 @@ idna = "*" sniffio = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -767,6 +825,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -781,6 +840,7 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -792,6 +852,7 @@ version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -809,6 +870,7 @@ version = "1.4.2" description = "Lightweight pipelining with Python functions" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, @@ -820,6 +882,7 @@ version = "1.4.8" description = "A fast implementation of the Cassowary constraint solver" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db"}, {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b"}, @@ -909,6 +972,7 @@ version = "4.0.1" description = "Rate limiting utilities" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "limits-4.0.1-py3-none-any.whl", hash = "sha256:67667e669f570cf7be4e2c2bc52f763b3f93bdf66ea945584360bc1a3f251901"}, {file = "limits-4.0.1.tar.gz", hash = "sha256:a54f5c058dfc965319ae3ee78faf222294659e371b46d22cd7456761f7e46d5a"}, @@ -920,9 +984,9 @@ packaging = ">=21,<25" typing-extensions = "*" [package.extras] -all = ["aetcd", "coredis (>=3.4.0,<5)", "emcache (>=0.6.1)", "emcache (>=1)", "etcd3", "motor (>=3,<4)", "pymemcache (>3,<5.0.0)", "pymongo (>4.1,<5)", "redis (>3,!=4.5.2,!=4.5.3,<6.0.0)", "redis (>=4.2.0,!=4.5.2,!=4.5.3)"] +all = ["aetcd", "coredis (>=3.4.0,<5)", "emcache (>=0.6.1) ; python_version < \"3.11\"", "emcache (>=1) ; python_version >= \"3.11\" and python_version < \"3.13.0\"", "etcd3", "motor (>=3,<4)", "pymemcache (>3,<5.0.0)", "pymongo (>4.1,<5)", "redis (>3,!=4.5.2,!=4.5.3,<6.0.0)", "redis (>=4.2.0,!=4.5.2,!=4.5.3)"] async-etcd = ["aetcd"] -async-memcached = ["emcache (>=0.6.1)", "emcache (>=1)"] +async-memcached = ["emcache (>=0.6.1) ; python_version < \"3.11\"", "emcache (>=1) ; python_version >= \"3.11\" and python_version < \"3.13.0\""] async-mongodb = ["motor (>=3,<4)"] async-redis = ["coredis (>=3.4.0,<5)"] etcd = ["etcd3"] @@ -937,6 +1001,7 @@ version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -961,6 +1026,7 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1031,6 +1097,7 @@ version = "3.10.0" description = "Python plotting package" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "matplotlib-3.10.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2c5829a5a1dd5a71f0e31e6e8bb449bc0ee9dbfb05ad28fc0c6b55101b3a4be6"}, {file = "matplotlib-3.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2a43cbefe22d653ab34bb55d42384ed30f611bcbdea1f8d7f431011a2e1c62e"}, @@ -1088,6 +1155,7 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -1099,6 +1167,7 @@ version = "2.2.3" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "numpy-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cbc6472e01952d3d1b2772b720428f8b90e2deea8344e854df22b0618e9cce71"}, {file = "numpy-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdfe0c22692a30cd830c0755746473ae66c4a8f2e7bd508b35fb3b6a0813d787"}, @@ -1163,6 +1232,7 @@ version = "1.0.0" description = "Simple financial functions" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "numpy-financial-1.0.0.tar.gz", hash = "sha256:f84341bc62b2485d5604a73d5fac7e91975b4b9cd5f4a5a9cf608902ea00cb40"}, {file = "numpy_financial-1.0.0-py3-none-any.whl", hash = "sha256:bae534b357516f12258862d1f0181d911032d0467f215bfcd1c264b4da579047"}, @@ -1177,6 +1247,7 @@ version = "3.1.5" description = "A Python library to read/write Excel 2010 xlsx/xlsm files" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"}, {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"}, @@ -1191,6 +1262,7 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -1202,6 +1274,7 @@ version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, @@ -1288,6 +1361,7 @@ version = "1.0.1" description = "A Python package for describing statistical models and for building design matrices." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "patsy-1.0.1-py2.py3-none-any.whl", hash = "sha256:751fb38f9e97e62312e921a1954b81e1bb2bcda4f5eeabaf94db251ee791509c"}, {file = "patsy-1.0.1.tar.gz", hash = "sha256:e786a9391eec818c054e359b737bbce692f051aee4c661f4141cc88fb459c0c4"}, @@ -1305,6 +1379,7 @@ version = "11.1.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8"}, {file = "pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192"}, @@ -1384,7 +1459,7 @@ docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline fpx = ["olefile"] mic = ["olefile"] tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "trove-classifiers (>=2024.10.12)"] -typing = ["typing-extensions"] +typing = ["typing-extensions ; python_version < \"3.10\""] xmp = ["defusedxml"] [[package]] @@ -1393,6 +1468,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -1408,6 +1484,7 @@ version = "2.9.10" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, @@ -1456,6 +1533,7 @@ files = [ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, @@ -1484,6 +1562,7 @@ version = "2.10.6" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -1496,7 +1575,7 @@ typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" @@ -1504,6 +1583,7 @@ version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -1616,6 +1696,7 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, @@ -1630,6 +1711,7 @@ version = "3.2.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, @@ -1644,6 +1726,7 @@ version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, @@ -1660,12 +1743,34 @@ tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-asyncio" +version = "1.3.0" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5"}, + {file = "pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5"}, +] + +[package.dependencies] +backports-asyncio-runner = {version = ">=1.1,<2", markers = "python_version < \"3.11\""} +pytest = ">=8.2,<10" +typing-extensions = {version = ">=4.12", markers = "python_version < \"3.13\""} + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + [[package]] name = "python-dateutil" version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -1680,6 +1785,7 @@ version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, @@ -1694,6 +1800,7 @@ version = "0.0.20" description = "A streaming multipart parser for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104"}, {file = "python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13"}, @@ -1705,6 +1812,7 @@ version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, @@ -1716,6 +1824,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -1778,6 +1887,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -1799,6 +1909,7 @@ version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -1818,6 +1929,7 @@ version = "0.13.2" description = "Rich toolkit for building command-line applications" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "rich_toolkit-0.13.2-py3-none-any.whl", hash = "sha256:f3f6c583e5283298a2f7dbd3c65aca18b7f818ad96174113ab5bec0b0e35ed61"}, {file = "rich_toolkit-0.13.2.tar.gz", hash = "sha256:fea92557530de7c28f121cbed572ad93d9e0ddc60c3ca643f1b831f2f56b95d3"}, @@ -1834,6 +1946,7 @@ version = "1.6.1" description = "A set of python modules for machine learning and data mining" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "scikit_learn-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d056391530ccd1e501056160e3c9673b4da4805eb67eb2bdf4e983e1f9c9204e"}, {file = "scikit_learn-1.6.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0c8d036eb937dbb568c6242fa598d551d88fb4399c0344d95c001980ec1c7d36"}, @@ -1888,6 +2001,7 @@ version = "1.15.1" description = "Fundamental algorithms for scientific computing in Python" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "scipy-1.15.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:c64ded12dcab08afff9e805a67ff4480f5e69993310e093434b10e85dc9d43e1"}, {file = "scipy-1.15.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5b190b935e7db569960b48840e5bef71dc513314cc4e79a1b7d14664f57fd4ff"}, @@ -1937,7 +2051,7 @@ numpy = ">=1.23.5,<2.5" [package.extras] dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.16.5)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.0.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"] -test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [[package]] name = "shellingham" @@ -1945,6 +2059,7 @@ version = "1.5.4" description = "Tool to Detect Surrounding Shell" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, @@ -1956,6 +2071,7 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -1967,6 +2083,7 @@ version = "0.1.9" description = "A rate limiting extension for Starlette and Fastapi" optional = false python-versions = ">=3.7,<4.0" +groups = ["main"] files = [ {file = "slowapi-0.1.9-py3-none-any.whl", hash = "sha256:cfad116cfb84ad9d763ee155c1e5c5cbf00b0d47399a769b227865f5df576e36"}, {file = "slowapi-0.1.9.tar.gz", hash = "sha256:639192d0f1ca01b1c6d95bf6c71d794c3a9ee189855337b4821f7f457dddad77"}, @@ -1984,6 +2101,7 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -1995,6 +2113,7 @@ version = "2.0.37" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da36c3b0e891808a7542c5c89f224520b9a16c7f5e4d6a1156955605e54aef0e"}, {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e7402ff96e2b073a98ef6d6142796426d705addd27b9d26c3b32dbaa06d7d069"}, @@ -2090,6 +2209,7 @@ version = "0.13.0" description = "A library to filter SQLAlchemy queries." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "sqlalchemy-filters-0.13.0.tar.gz", hash = "sha256:40f2daead93c4db2409cf5e5abf67a420179f9e5c1df5c15fa1b474f6533b105"}, {file = "sqlalchemy_filters-0.13.0-py3-none-any.whl", hash = "sha256:aa4595b90d152eb76fa312a3e03d5d675f0c2e16762751f340f5449468689d9a"}, @@ -2110,6 +2230,7 @@ version = "0.41.2" description = "Various utility functions for SQLAlchemy." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "SQLAlchemy-Utils-0.41.2.tar.gz", hash = "sha256:bc599c8c3b3319e53ce6c5c3c471120bd325d0071fb6f38a10e924e3d07b9990"}, {file = "SQLAlchemy_Utils-0.41.2-py3-none-any.whl", hash = "sha256:85cf3842da2bf060760f955f8467b87983fb2e30f1764fd0e24a48307dc8ec6e"}, @@ -2127,8 +2248,8 @@ intervals = ["intervals (>=0.7.1)"] password = ["passlib (>=1.6,<2.0)"] pendulum = ["pendulum (>=2.0.5)"] phone = ["phonenumbers (>=5.9.2)"] -test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] -test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo ; python_version < \"3.9\"", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo ; python_version < \"3.9\"", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] timezone = ["python-dateutil"] url = ["furl (>=0.4.1)"] @@ -2138,6 +2259,7 @@ version = "0.45.3" description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "starlette-0.45.3-py3-none-any.whl", hash = "sha256:dfb6d332576f136ec740296c7e8bb8c8a7125044e7c6da30744718880cdd059d"}, {file = "starlette-0.45.3.tar.gz", hash = "sha256:2cbcba2a75806f8a41c722141486f37c28e30a0921c5f6fe4346cb0dcee1302f"}, @@ -2155,6 +2277,7 @@ version = "0.14.4" description = "Statistical computations and models for Python" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "statsmodels-0.14.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7a62f1fc9086e4b7ee789a6f66b3c0fc82dd8de1edda1522d30901a0aa45e42b"}, {file = "statsmodels-0.14.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:46ac7ddefac0c9b7b607eed1d47d11e26fe92a1bc1f4d9af48aeed4e21e87981"}, @@ -2197,7 +2320,7 @@ scipy = ">=1.8,<1.9.2 || >1.9.2" [package.extras] build = ["cython (>=3.0.10)"] -develop = ["colorama", "cython (>=3.0.10)", "cython (>=3.0.10,<4)", "flake8", "isort", "joblib", "matplotlib (>=3)", "pytest (>=7.3.0,<8)", "pytest-cov", "pytest-randomly", "pytest-xdist", "pywinpty", "setuptools-scm[toml] (>=8.0,<9.0)"] +develop = ["colorama", "cython (>=3.0.10)", "cython (>=3.0.10,<4)", "flake8", "isort", "joblib", "matplotlib (>=3)", "pytest (>=7.3.0,<8)", "pytest-cov", "pytest-randomly", "pytest-xdist", "pywinpty ; os_name == \"nt\"", "setuptools-scm[toml] (>=8.0,<9.0)"] docs = ["ipykernel", "jupyter-client", "matplotlib", "nbconvert", "nbformat", "numpydoc", "pandas-datareader", "sphinx"] [[package]] @@ -2206,6 +2329,7 @@ version = "3.5.0" description = "threadpoolctl" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, @@ -2217,6 +2341,8 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.10\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -2258,6 +2384,7 @@ version = "0.15.1" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "typer-0.15.1-py3-none-any.whl", hash = "sha256:7994fb7b8155b64d3402518560648446072864beefd44aa2dc36972a5972e847"}, {file = "typer-0.15.1.tar.gz", hash = "sha256:a0588c0a7fa68a1978a069818657778f86abe6ff5ea6abf472f940a08bfe4f0a"}, @@ -2275,6 +2402,7 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -2286,6 +2414,7 @@ version = "2025.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" +groups = ["main"] files = [ {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, @@ -2297,13 +2426,14 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -2314,6 +2444,7 @@ version = "0.32.1" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "uvicorn-0.32.1-py3-none-any.whl", hash = "sha256:82ad92fd58da0d12af7482ecdb5f2470a04c9c9a53ced65b9bbb4a205377602e"}, {file = "uvicorn-0.32.1.tar.gz", hash = "sha256:ee9519c246a72b1c084cea8d3b44ed6026e78a4a309cbedae9c37e4cb9fbb175"}, @@ -2327,12 +2458,12 @@ httptools = {version = ">=0.6.3", optional = true, markers = "extra == \"standar python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} [package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] [[package]] name = "uvloop" @@ -2340,6 +2471,8 @@ version = "0.21.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" +groups = ["main"] +markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"" files = [ {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, @@ -2391,6 +2524,7 @@ version = "1.0.4" description = "Simple, modern and high performance file watching and code reload in python." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "watchfiles-1.0.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ba5bb3073d9db37c64520681dd2650f8bd40902d991e7b4cfaeece3e32561d08"}, {file = "watchfiles-1.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f25d0ba0fe2b6d2c921cf587b2bf4c451860086534f40c384329fb96e2044d1"}, @@ -2474,6 +2608,7 @@ version = "14.2" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "websockets-14.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e8179f95323b9ab1c11723e5d91a89403903f7b001828161b480a7810b334885"}, {file = "websockets-14.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d8c3e2cdb38f31d8bd7d9d28908005f6fa9def3324edb9bf336d7e4266fd397"}, @@ -2552,6 +2687,7 @@ version = "1.17.2" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, @@ -2635,6 +2771,6 @@ files = [ ] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.10" -content-hash = "8d70f1df8b24fbd51e128ed36fbf43c4ccfdcd3b7dbd1f0f718870cab0c4d568" +content-hash = "a67faa975147cf6652ac87a3767b499a2c0e344cb4d3f7c47d0526e81fe2bbd0" diff --git a/pyproject.toml b/pyproject.toml index 053e01d..294260e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,12 @@ pandas = "^2.2.3" numpy-financial = "^1.0.0" numpy = "^2.2.3" statsmodels = "^0.14.4" +pytest-asyncio = "^1.3.0" +aiosqlite = "^0.22.1" + +[tool.pytest.ini_options] +asyncio_default_fixture_loop_scope = "function" +addopts = "-v" [build-system] diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..0ba4486 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,4 @@ +[pytest] +testpaths = tests/unit +python_files = test_*.py +asyncio_mode = auto diff --git a/src/__pycache__/exceptions.cpython-311.pyc b/src/__pycache__/exceptions.cpython-311.pyc index ceda2ab..8b0dca6 100644 Binary files a/src/__pycache__/exceptions.cpython-311.pyc and b/src/__pycache__/exceptions.cpython-311.pyc differ diff --git a/src/__pycache__/logging.cpython-311.pyc b/src/__pycache__/logging.cpython-311.pyc index 20fbd26..0d7eb38 100644 Binary files a/src/__pycache__/logging.cpython-311.pyc and b/src/__pycache__/logging.cpython-311.pyc differ diff --git a/src/__pycache__/main.cpython-311.pyc b/src/__pycache__/main.cpython-311.pyc index e7365ad..d00c9a4 100644 Binary files a/src/__pycache__/main.cpython-311.pyc and b/src/__pycache__/main.cpython-311.pyc differ diff --git a/src/__pycache__/models.cpython-311.pyc b/src/__pycache__/models.cpython-311.pyc index 09d2e86..8b5292e 100644 Binary files a/src/__pycache__/models.cpython-311.pyc and b/src/__pycache__/models.cpython-311.pyc differ diff --git a/src/acquisition_cost/schema.py b/src/acquisition_cost/schema.py index ec995a3..23dbdd8 100644 --- a/src/acquisition_cost/schema.py +++ b/src/acquisition_cost/schema.py @@ -7,14 +7,14 @@ from src.models import CommonParams, DefaultBase, Pagination class AcquisitionCostDataBase(DefaultBase): - category_no: Optional[str] = Field(None, nullable=True) - name: Optional[str] = Field(None, nullable=True) - cost_unit_3_n_4: Optional[float] = Field(None, nullable=True) - cost_unit_3: Optional[float] = Field(None, nullable=True) - created_at: Optional[datetime] = Field(None, nullable=True) - updated_at: Optional[datetime] = Field(None, nullable=True) - created_by: Optional[str] = Field(None, nullable=True) - updated_by: Optional[str] = Field(None, nullable=True) + category_no: Optional[str] = Field(None) + name: Optional[str] = Field(None) + cost_unit_3_n_4: Optional[float] = Field(None) + cost_unit_3: Optional[float] = Field(None) + created_at: Optional[datetime] = Field(None) + updated_at: Optional[datetime] = Field(None) + created_by: Optional[str] = Field(None) + updated_by: Optional[str] = Field(None) class AcquisitionCostDataCreate(AcquisitionCostDataBase): diff --git a/src/auth/service.py b/src/auth/service.py index dc49e59..2d06f00 100644 --- a/src/auth/service.py +++ b/src/auth/service.py @@ -28,6 +28,17 @@ class JWTBearer(HTTPBearer): ) request.state.user = user_info + + from src.context import set_user_id, set_username, set_role + if hasattr(user_info, "user_id"): + set_user_id(str(user_info.user_id)) + if hasattr(user_info, "username"): + set_username(user_info.username) + elif hasattr(user_info, "name"): + set_username(user_info.name) + if hasattr(user_info, "role"): + set_role(user_info.role) + return user_info else: raise HTTPException(status_code=403, detail="Invalid authorization code.") @@ -46,7 +57,7 @@ class JWTBearer(HTTPBearer): return UserBase(**user_data["data"]) except Exception as e: - print(f"Token verification error: {str(e)}") + logging.error(f"Token verification error: {str(e)}") return None diff --git a/src/config.py b/src/config.py index 03ab027..b92f09f 100644 --- a/src/config.py +++ b/src/config.py @@ -51,7 +51,7 @@ def get_config(): config = get_config() -LOG_LEVEL = config("LOG_LEVEL", default=logging.WARNING) +LOG_LEVEL = config("LOG_LEVEL", default="INFO") ENV = config("ENV", default="local") PORT = config("PORT", cast=int, default=8000) HOST = config("HOST", default="localhost") diff --git a/src/context.py b/src/context.py index 4c968a2..47e0e62 100644 --- a/src/context.py +++ b/src/context.py @@ -2,8 +2,18 @@ from contextvars import ContextVar from typing import Optional, Final REQUEST_ID_CTX_KEY: Final[str] = "request_id" +USER_ID_CTX_KEY: Final[str] = "user_id" +USERNAME_CTX_KEY: Final[str] = "username" +ROLE_CTX_KEY: Final[str] = "role" + _request_id_ctx_var: ContextVar[Optional[str]] = ContextVar( REQUEST_ID_CTX_KEY, default=None) +_user_id_ctx_var: ContextVar[Optional[str]] = ContextVar( + USER_ID_CTX_KEY, default=None) +_username_ctx_var: ContextVar[Optional[str]] = ContextVar( + USERNAME_CTX_KEY, default=None) +_role_ctx_var: ContextVar[Optional[str]] = ContextVar( + ROLE_CTX_KEY, default=None) def get_request_id() -> Optional[str]: @@ -16,3 +26,27 @@ def set_request_id(request_id: str): def reset_request_id(token): _request_id_ctx_var.reset(token) + + +def get_user_id() -> Optional[str]: + return _user_id_ctx_var.get() + + +def set_user_id(user_id: str): + return _user_id_ctx_var.set(user_id) + + +def get_username() -> Optional[str]: + return _username_ctx_var.get() + + +def set_username(username: str): + return _username_ctx_var.set(username) + + +def get_role() -> Optional[str]: + return _role_ctx_var.get() + + +def set_role(role: str): + return _role_ctx_var.set(role) diff --git a/src/database/__pycache__/core.cpython-311.pyc b/src/database/__pycache__/core.cpython-311.pyc index 565023f..83c2bed 100644 Binary files a/src/database/__pycache__/core.cpython-311.pyc and b/src/database/__pycache__/core.cpython-311.pyc differ diff --git a/src/database/__pycache__/service.cpython-311.pyc b/src/database/__pycache__/service.cpython-311.pyc index e095085..0166c2f 100644 Binary files a/src/database/__pycache__/service.cpython-311.pyc and b/src/database/__pycache__/service.cpython-311.pyc differ diff --git a/src/database/core.py b/src/database/core.py index 8202f9f..a0209e0 100644 --- a/src/database/core.py +++ b/src/database/core.py @@ -2,8 +2,8 @@ from starlette.requests import Request from sqlalchemy_utils import get_mapper from sqlalchemy.sql.expression import true -from sqlalchemy.orm import object_session, sessionmaker, Session -from sqlalchemy.ext.declarative import declarative_base, declared_attr +from sqlalchemy.orm import object_session, sessionmaker, Session, declarative_base +from sqlalchemy.ext.declarative import declared_attr from sqlalchemy import create_engine, inspect from pydantic import BaseModel from fastapi import Depends diff --git a/src/database/service.py b/src/database/service.py index b797248..baa61e7 100644 --- a/src/database/service.py +++ b/src/database/service.py @@ -134,7 +134,7 @@ async def search_filter_sort_paginate( # Get total count count_query = Select(func.count()).select_from(query.subquery()) total = await db_session.scalar(count_query) - if all: + if all or items_per_page == -1: result = await db_session.execute(query) items = _extract_result_items(result) return { diff --git a/src/equipment/__pycache__/router.cpython-311.pyc b/src/equipment/__pycache__/router.cpython-311.pyc index 7b7a0fb..28b9e73 100644 Binary files a/src/equipment/__pycache__/router.cpython-311.pyc and b/src/equipment/__pycache__/router.cpython-311.pyc differ diff --git a/src/equipment/__pycache__/schema.cpython-311.pyc b/src/equipment/__pycache__/schema.cpython-311.pyc index 13254f2..d4ce878 100644 Binary files a/src/equipment/__pycache__/schema.cpython-311.pyc and b/src/equipment/__pycache__/schema.cpython-311.pyc differ diff --git a/src/equipment/__pycache__/service.cpython-311.pyc b/src/equipment/__pycache__/service.cpython-311.pyc index 158ac2f..190e00c 100644 Binary files a/src/equipment/__pycache__/service.cpython-311.pyc and b/src/equipment/__pycache__/service.cpython-311.pyc differ diff --git a/src/equipment/router.py b/src/equipment/router.py index 6c5583f..4a49db6 100644 --- a/src/equipment/router.py +++ b/src/equipment/router.py @@ -75,7 +75,7 @@ async def get_maximo_record_by_assetnum(db_session: CollectorDbSession, assetnum ) @router.get("/simulate/{assetnum}") -async def simulate_equipment(db_session: DbSession, assetnum: str): +async def simulate_equipment(db_session: DbSession, assetnum: str, token: Token): """Stream progress events while running the simulation (prediksi + EAC). This endpoint returns Server-Sent Events (SSE). Each event's `data` is @@ -98,7 +98,7 @@ async def simulate_equipment(db_session: DbSession, assetnum: str): yield f"data: {json.dumps({'status':'started','step':'prediksi','message':'Menghitung prediksi'})}\n\n" try: - prediksi = await prediksi_main(assetnum=assetnum) + prediksi = await prediksi_main(assetnum=assetnum, token=token) except Exception as exc: # send error event and stop yield f"data: {json.dumps({'status':'error','step':'prediksi','message':str(exc)})}\n\n" @@ -140,7 +140,7 @@ async def simulate_equipment(db_session: DbSession, assetnum: str): @router.get("/simulate-all") -async def simulate_all_equipment(db_session: DbSession): +async def simulate_all_equipment(db_session: DbSession, token: Token): """Run simulation (prediksi + EAC) for ALL equipment. Returns SSE stream of progress. """ @@ -150,34 +150,34 @@ async def simulate_all_equipment(db_session: DbSession): try: assetnums = await get_all_assetnums(db_session=db_session) except Exception as e: - yield f"data: {json.dumps({'status':'error', 'message': f'Failed to fetch assetnums: {str(e)}'})}\\n\\n" + yield f"data: {json.dumps({'status':'error', 'message': f'Failed to fetch assetnums: {str(e)}'})}\n\n" return total = len(assetnums) - yield f"data: {json.dumps({'status':'started', 'message': f'Simulasi dimulai untuk {total} asset'})}\\n\\n" + yield f"data: {json.dumps({'status':'started', 'message': f'Simulasi dimulai untuk {total} asset'})}\n\n" success_count = 0 error_count = 0 for idx, assetnum in enumerate(assetnums, start=1): - yield f"data: {json.dumps({'status':'working', 'step':f'Proses {idx}/{total}', 'assetnum': assetnum})}\\n\\n" + yield f"data: {json.dumps({'status':'working', 'step':f'Proses {idx}/{total}', 'assetnum': assetnum})}\n\n" try: # Update acquisition year and target year await update_initial_simulation_data(db_session=db_session, assetnum=assetnum) # Prediksi - await prediksi_main(assetnum=assetnum) + await prediksi_main(assetnum=assetnum, token=token) # EAC eac = Eac() eac.hitung_eac_equipment(assetnum=assetnum) success_count += 1 except Exception as e: error_count += 1 - yield f"data: {json.dumps({'status':'partial_error', 'assetnum': assetnum, 'message': str(e)})}\\n\\n" + yield f"data: {json.dumps({'status':'partial_error', 'assetnum': assetnum, 'message': str(e)})}\n\n" continue - yield f"data: {json.dumps({'status':'done', 'message':f'Simulasi selesai. Success: {success_count}, Errors: {error_count}'})}\\n\\n" + yield f"data: {json.dumps({'status':'done', 'message':f'Simulasi selesai. Success: {success_count}, Errors: {error_count}'})}\n\n" headers = { "Content-type": "text/event-stream", diff --git a/src/equipment/schema.py b/src/equipment/schema.py index c6a2811..50fc188 100644 --- a/src/equipment/schema.py +++ b/src/equipment/schema.py @@ -8,87 +8,87 @@ from src.models import CommonParams, DefaultBase, Pagination MAX_PRICE = 1_000_000_000_000_000 # thousands of trillion class EquipmentBase(DefaultBase): - assetnum: Optional[str] = Field(None, nullable=True) - acquisition_year: Optional[int] = Field(None, nullable=True) - acquisition_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - capital_cost_record_time: Optional[int] = Field(None, nullable=True) - design_life: Optional[int] = Field(None, nullable=True) - forecasting_start_year: Optional[int] = Field(None, nullable=True) - forecasting_target_year: Optional[int] = Field(None, nullable=True) - manhours_rate: Optional[float] = Field(None, nullable=True) - harga_saat_ini: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - minimum_eac_seq: Optional[int] = Field(None, nullable=True) - minimum_eac_year: Optional[int] = Field(None, nullable=True) - minimum_eac: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - minimum_npv: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - minimum_pmt: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - minimum_pmt_aq_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - minimum_is_actual: Optional[int] = Field(None, nullable=True) - efdh_equivalent_forced_derated_hours: Optional[float] = Field(None, nullable=True) - foh_forced_outage_hours: Optional[float] = Field(None, nullable=True) - category_no: Optional[str] = Field(None, nullable=True) - proportion: Optional[float] = Field(None, nullable=True) - created_at: Optional[datetime] = Field(None, nullable=True) - updated_at: Optional[datetime] = Field(None, nullable=True) - created_by: Optional[str] = Field(None, nullable=True) - updated_by: Optional[str] = Field(None, nullable=True) + assetnum: Optional[str] = Field(None) + acquisition_year: Optional[int] = Field(None) + acquisition_cost: Optional[float] = Field(None, le=MAX_PRICE) + capital_cost_record_time: Optional[int] = Field(None) + design_life: Optional[int] = Field(None) + forecasting_start_year: Optional[int] = Field(None) + forecasting_target_year: Optional[int] = Field(None) + manhours_rate: Optional[float] = Field(None) + harga_saat_ini: Optional[float] = Field(None, le=MAX_PRICE) + minimum_eac_seq: Optional[int] = Field(None) + minimum_eac_year: Optional[int] = Field(None) + minimum_eac: Optional[float] = Field(None, le=MAX_PRICE) + minimum_npv: Optional[float] = Field(None, le=MAX_PRICE) + minimum_pmt: Optional[float] = Field(None, le=MAX_PRICE) + minimum_pmt_aq_cost: Optional[float] = Field(None, le=MAX_PRICE) + minimum_is_actual: Optional[int] = Field(None) + efdh_equivalent_forced_derated_hours: Optional[float] = Field(None) + foh_forced_outage_hours: Optional[float] = Field(None) + category_no: Optional[str] = Field(None) + proportion: Optional[float] = Field(None) + created_at: Optional[datetime] = Field(None) + updated_at: Optional[datetime] = Field(None) + created_by: Optional[str] = Field(None) + updated_by: Optional[str] = Field(None) class EquipmentMasterBase(DefaultBase): - location_tag: Optional[str] = Field(None, nullable=True) - assetnum: Optional[str] = Field(None, nullable=True) - name: Optional[str] = Field(None, nullable=True) + location_tag: Optional[str] = Field(None) + assetnum: Optional[str] = Field(None) + name: Optional[str] = Field(None) class MasterBase(DefaultBase): - assetnum: Optional[str] = Field(None, nullable=True) - tahun: Optional[int] = Field(None, nullable=True) - seq: Optional[int] = Field(None, nullable=True) - is_actual: Optional[float] = Field(None, nullable=True) - raw_cm_interval: Optional[float] = Field(None, nullable=True) - raw_cm_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - raw_cm_labor_time: Optional[float] = Field(None, nullable=True) - raw_cm_labor_human: Optional[float] = Field(None, nullable=True) - raw_pm_interval: Optional[float] = Field(None, nullable=True) - raw_pm_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - raw_pm_labor_time: Optional[float] = Field(None, nullable=True) - raw_pm_labor_human: Optional[float] = Field(None, nullable=True) - raw_predictive_interval: Optional[float] = Field(None, nullable=True) - raw_predictive_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - raw_predictive_labor_time: Optional[float] = Field(None, nullable=True) - raw_predictive_labor_human: Optional[float] = Field(None, nullable=True) - raw_oh_interval: Optional[float] = Field(None, nullable=True) - raw_oh_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - raw_oh_labor_time: Optional[float] = Field(None, nullable=True) - raw_oh_labor_human: Optional[float] = Field(None, nullable=True) - raw_project_task_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - raw_loss_output_MW: Optional[float] = Field(None, nullable=True) - raw_loss_output_price: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - raw_operational_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - raw_maintenance_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - rc_cm_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - rc_cm_labor_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - rc_pm_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - rc_pm_labor_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - rc_predictive_labor_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - rc_oh_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - rc_oh_labor_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - rc_project_material_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - rc_lost_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - rc_operation_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - rc_maintenance_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - asset_criticality: Optional[float] = Field(None, nullable=True) - rc_total_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - eac_npv: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - eac_annual_mnt_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - eac_annual_acq_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - eac_disposal_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - eac_eac: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - efdh_equivalent_forced_derated_hours: Optional[float] = Field(None, nullable=True) - foh_forced_outage_hours: Optional[float] = Field(None, nullable=True) - category_no: Optional[str] = Field(None, nullable=True) - proportion: Optional[float] = Field(None, nullable=True) + assetnum: Optional[str] = Field(None) + tahun: Optional[int] = Field(None) + seq: Optional[int] = Field(None) + is_actual: Optional[float] = Field(None) + raw_cm_interval: Optional[float] = Field(None) + raw_cm_material_cost: Optional[float] = Field(None, le=MAX_PRICE) + raw_cm_labor_time: Optional[float] = Field(None) + raw_cm_labor_human: Optional[float] = Field(None) + raw_pm_interval: Optional[float] = Field(None) + raw_pm_material_cost: Optional[float] = Field(None, le=MAX_PRICE) + raw_pm_labor_time: Optional[float] = Field(None) + raw_pm_labor_human: Optional[float] = Field(None) + raw_predictive_interval: Optional[float] = Field(None) + raw_predictive_material_cost: Optional[float] = Field(None, le=MAX_PRICE) + raw_predictive_labor_time: Optional[float] = Field(None) + raw_predictive_labor_human: Optional[float] = Field(None) + raw_oh_interval: Optional[float] = Field(None) + raw_oh_material_cost: Optional[float] = Field(None, le=MAX_PRICE) + raw_oh_labor_time: Optional[float] = Field(None) + raw_oh_labor_human: Optional[float] = Field(None) + raw_project_task_material_cost: Optional[float] = Field(None, le=MAX_PRICE) + raw_loss_output_MW: Optional[float] = Field(None) + raw_loss_output_price: Optional[float] = Field(None, le=MAX_PRICE) + raw_operational_cost: Optional[float] = Field(None, le=MAX_PRICE) + raw_maintenance_cost: Optional[float] = Field(None, le=MAX_PRICE) + rc_cm_material_cost: Optional[float] = Field(None, le=MAX_PRICE) + rc_cm_labor_cost: Optional[float] = Field(None, le=MAX_PRICE) + rc_pm_material_cost: Optional[float] = Field(None, le=MAX_PRICE) + rc_pm_labor_cost: Optional[float] = Field(None, le=MAX_PRICE) + rc_predictive_labor_cost: Optional[float] = Field(None, le=MAX_PRICE) + rc_oh_material_cost: Optional[float] = Field(None, le=MAX_PRICE) + rc_oh_labor_cost: Optional[float] = Field(None, le=MAX_PRICE) + rc_project_material_cost: Optional[float] = Field(None, le=MAX_PRICE) + rc_lost_cost: Optional[float] = Field(None, le=MAX_PRICE) + rc_operation_cost: Optional[float] = Field(None, le=MAX_PRICE) + rc_maintenance_cost: Optional[float] = Field(None, le=MAX_PRICE) + asset_criticality: Optional[float] = Field(None) + rc_total_cost: Optional[float] = Field(None, le=MAX_PRICE) + eac_npv: Optional[float] = Field(None, le=MAX_PRICE) + eac_annual_mnt_cost: Optional[float] = Field(None, le=MAX_PRICE) + eac_annual_acq_cost: Optional[float] = Field(None, le=MAX_PRICE) + eac_disposal_cost: Optional[float] = Field(None, le=MAX_PRICE) + eac_eac: Optional[float] = Field(None, le=MAX_PRICE) + efdh_equivalent_forced_derated_hours: Optional[float] = Field(None) + foh_forced_outage_hours: Optional[float] = Field(None) + category_no: Optional[str] = Field(None) + proportion: Optional[float] = Field(None) class HistoricalBase(MasterBase): - acquisition_year_ref: Optional[str] = Field(None, nullable=True) + acquisition_year_ref: Optional[str] = Field(None) class EquipmentCreate(EquipmentBase): pass @@ -99,54 +99,54 @@ class EquipmentUpdate(EquipmentBase): class ReplacementBase(DefaultBase): """Schema for replacement history (from lcc_ms_equipment_historical_data).""" - acquisition_year: Optional[int] = Field(None, nullable=True, ge=1900, le=9999) - acquisition_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - acquisition_year_ref: Optional[str] = Field(None, nullable=True) - created_at: Optional[datetime] = Field(None, nullable=True) + acquisition_year: Optional[int] = Field(None, ge=1900, le=9999) + acquisition_cost: Optional[float] = Field(None, le=MAX_PRICE) + acquisition_year_ref: Optional[str] = Field(None) + created_at: Optional[datetime] = Field(None) class EquipmentRead(DefaultBase): equipment_master_record: EquipmentMasterBase equipment_data: EquipmentBase chart_data: List[MasterBase] - min_eac_value: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - min_seq: Optional[float] = Field(None, nullable=True) - min_eac_year: Optional[float] = Field(None, nullable=True) - last_actual_year: Optional[int] = Field(None, nullable=True, ge=1900, le=9999) - maximo_data: Optional[List[dict]] = Field(None, nullable=True) - joined_maximo: Optional[List[dict]] = Field(None, nullable=True) - min_eac_disposal_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - historical_data: Optional[List[HistoricalBase]] = Field(None, nullable=True) - replacement_data: Optional[List[ReplacementBase]] = Field(None, nullable=True) + min_eac_value: Optional[float] = Field(None, le=MAX_PRICE) + min_seq: Optional[float] = Field(None) + min_eac_year: Optional[float] = Field(None) + last_actual_year: Optional[int] = Field(None, ge=1900, le=9999) + maximo_data: Optional[List[dict]] = Field(None) + joined_maximo: Optional[List[dict]] = Field(None) + min_eac_disposal_cost: Optional[float] = Field(None, le=MAX_PRICE) + historical_data: Optional[List[HistoricalBase]] = Field(None) + replacement_data: Optional[List[ReplacementBase]] = Field(None) class EquipmentTop10(EquipmentBase): id: UUID equipment_master: EquipmentMasterBase - forecasting_target_year: Optional[int] = Field(None, nullable=True, ge=1900, le=9999) - minimum_eac_seq: Optional[int] = Field(None, nullable=True) - minimum_eac_year: Optional[int] = Field(None, nullable=True) - minimum_eac: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - minimum_npv: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - minimum_pmt: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - minimum_pmt_aq_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - minimum_is_actual: Optional[int] = Field(None, nullable=True) - harga_saat_ini: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - remaining_life: Optional[int] = Field(None, nullable=True) + forecasting_target_year: Optional[int] = Field(None, ge=1900, le=9999) + minimum_eac_seq: Optional[int] = Field(None) + minimum_eac_year: Optional[int] = Field(None) + minimum_eac: Optional[float] = Field(None, le=MAX_PRICE) + minimum_npv: Optional[float] = Field(None, le=MAX_PRICE) + minimum_pmt: Optional[float] = Field(None, le=MAX_PRICE) + minimum_pmt_aq_cost: Optional[float] = Field(None, le=MAX_PRICE) + minimum_is_actual: Optional[int] = Field(None) + harga_saat_ini: Optional[float] = Field(None, le=MAX_PRICE) + remaining_life: Optional[int] = Field(None) class EquipmentTop10Pagination(Pagination): items: List[EquipmentTop10] = [] class EquipmentDataMaster(EquipmentBase): id: UUID - equipment_master: Optional[EquipmentMasterBase] = Field(None, nullable=True) - forecasting_target_year: Optional[int] = Field(None, nullable=True, ge=1900, le=9999) - minimum_eac_seq: Optional[int] = Field(None, nullable=True, ge=0) - minimum_eac_year: Optional[int] = Field(None, nullable=True, ge=1900, le=9999) - minimum_eac: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - minimum_npv: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - minimum_pmt: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - minimum_pmt_aq_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) - minimum_is_actual: Optional[int] = Field(None, nullable=True) - harga_saat_ini: Optional[float] = Field(None, nullable=True, le=MAX_PRICE) + equipment_master: Optional[EquipmentMasterBase] = Field(None) + forecasting_target_year: Optional[int] = Field(None, ge=1900, le=9999) + minimum_eac_seq: Optional[int] = Field(None, ge=0) + minimum_eac_year: Optional[int] = Field(None, ge=1900, le=9999) + minimum_eac: Optional[float] = Field(None, le=MAX_PRICE) + minimum_npv: Optional[float] = Field(None, le=MAX_PRICE) + minimum_pmt: Optional[float] = Field(None, le=MAX_PRICE) + minimum_pmt_aq_cost: Optional[float] = Field(None, le=MAX_PRICE) + minimum_is_actual: Optional[int] = Field(None) + harga_saat_ini: Optional[float] = Field(None, le=MAX_PRICE) # class EquipmentTop10EconomicLife(DefaultBase): # equipment: EquipmentDataMaster diff --git a/src/equipment/service.py b/src/equipment/service.py index e3080c9..0651595 100644 --- a/src/equipment/service.py +++ b/src/equipment/service.py @@ -670,9 +670,9 @@ async def delete(*, db_session: DbSession, equipment_id: str): async def check_and_update_acquisition_data(db_session: DbSession, assetnum: str) -> bool: """ - Check if acquisition year/cost in Maximo differs from local DB. - If changed, archive history, delete transaction data, update master, and return True. - Otherwise return False. + Check if acquisition cost in Maximo differs from local DB. + Updates master acquisition_cost (initial + replacement) and sets forecasting_start_year to 2015. + Returns True if master record was updated, False otherwise. """ conn = get_production_connection() first_year = None @@ -680,7 +680,7 @@ async def check_and_update_acquisition_data(db_session: DbSession, assetnum: str if conn: try: cursor = conn.cursor() - # Query the oldest year from wo_maximo to detect the original acquisition + # Query the oldest year from wo_maximo to detect the original replacement cost query = """ select DATE_PART('year', a.reportdate) AS year, a.asset_replacecost AS cost from wo_maximo a @@ -697,7 +697,7 @@ async def check_and_update_acquisition_data(db_session: DbSession, assetnum: str cursor.close() conn.close() except Exception as e: - print(f"Error fetching acquisition year for {assetnum}: {e}") + print(f"Error fetching replacement data for {assetnum}: {e}") if conn: try: conn.close() @@ -706,123 +706,75 @@ async def check_and_update_acquisition_data(db_session: DbSession, assetnum: str updates_performed = False - if first_year: - # Fetch equipment to update - eq = await get_by_assetnum(db_session=db_session, assetnum=assetnum) - if eq: - # Check if forecasting_target_year matches the "default" logic (acquisition + design_life) - # using the OLD acquisition year. - current_acq = eq.acquisition_year - current_life = eq.design_life - current_target = eq.forecasting_target_year - current_acq_cost = eq.acquisition_cost - - # If current_target is logically "default", we update it. - # If user changed it to something else, we might want to preserve it - # if it currently holds the default value (based on old acq year). - is_valid_default = False - if current_acq and current_life and current_target: - is_valid_default = current_target == (current_acq + current_life) - - # Check for changes - change_year = (eq.acquisition_year != first_year) - change_cost = (first_cost is not None and eq.acquisition_cost != first_cost) - - # We only archive transaction history if the acquisition year itself changed. - # This prevents redundant history entries for cost-only updates. - if change_year: - print(f"Acquisition year change detected for {assetnum}: {current_acq}->{first_year}. Archiving history.") - - acq_year_ref = f"{current_acq}_{current_target}" + # Fetch equipment to update + eq = await get_by_assetnum(db_session=db_session, assetnum=assetnum) + if eq: + # Check if forecasting_target_year matches the "default" logic (acquisition + design_life) + # using the OLD acquisition year. + current_acq = eq.acquisition_year + current_life = eq.design_life + current_target = eq.forecasting_target_year + + is_valid_default = False + if current_acq and current_life and current_target: + is_valid_default = current_target == (current_acq + current_life) + + # Fetch inflation rate from lcc_ms_master for value-of-money adjustment + inflation_rate = 0.05 # Default fallback + try: + rate_query = text("SELECT value_num / 100.0 FROM lcc_ms_master WHERE name = 'inflation_rate'") + rate_result = (await db_session.execute(rate_query)).scalar() + if rate_result is not None: + inflation_rate = float(rate_result) + except Exception as e: + print(f"Warning: Could not fetch inflation_rate for {assetnum}: {e}") - # --- ARCHIVE HISTORICAL DATA --- - - # Check for existing identical archive to prevent duplicates (after calculation failures/retries) - check_hist_query = text("SELECT 1 FROM lcc_ms_equipment_historical_data WHERE assetnum = :assetnum AND acquisition_year_ref = :acq_year_ref LIMIT 1") - hist_exists = (await db_session.execute(check_hist_query, {"assetnum": assetnum, "acq_year_ref": acq_year_ref})).fetchone() - - if not hist_exists: - # 1. Copy old equipment master data to history - history_ms_query = text(""" - INSERT INTO lcc_ms_equipment_historical_data ( - id, assetnum, acquisition_year, acquisition_cost, capital_cost_record_time, design_life, - forecasting_start_year, forecasting_target_year, manhours_rate, created_at, created_by, - updated_at, updated_by, min_eac_info, harga_saat_ini, minimum_eac_seq, minimum_eac_year, - minimum_eac, minimum_npv, minimum_pmt, minimum_pmt_aq_cost, minimum_is_actual, - efdh_equivalent_forced_derated_hours, foh_forced_outage_hours, category_no, proportion, - acquisition_year_ref - ) - SELECT - uuid_generate_v4(), assetnum, acquisition_year, acquisition_cost, capital_cost_record_time, design_life, - forecasting_start_year, forecasting_target_year, manhours_rate, created_at, created_by, - updated_at, updated_by, min_eac_info, harga_saat_ini, minimum_eac_seq, minimum_eac_year, - minimum_eac, minimum_npv, minimum_pmt, minimum_pmt_aq_cost, minimum_is_actual, - efdh_equivalent_forced_derated_hours, foh_forced_outage_hours, category_no, proportion, - :acq_year_ref - FROM lcc_ms_equipment_data - WHERE assetnum = :assetnum - """) - await db_session.execute(history_ms_query, {"acq_year_ref": acq_year_ref, "assetnum": assetnum}) - - # 2. Copy old transaction data to lcc_equipment_historical_tr_data - history_tr_query = text(""" - INSERT INTO lcc_equipment_historical_tr_data ( - id, assetnum, tahun, seq, is_actual, - raw_cm_interval, raw_cm_material_cost, raw_cm_labor_time, raw_cm_labor_human, - raw_pm_interval, raw_pm_material_cost, raw_pm_labor_time, raw_pm_labor_human, - raw_oh_interval, raw_oh_material_cost, raw_oh_labor_time, raw_oh_labor_human, - raw_predictive_interval, raw_predictive_material_cost, raw_predictive_labor_time, raw_predictive_labor_human, - raw_project_task_material_cost, "raw_loss_output_MW", raw_loss_output_price, - raw_operational_cost, raw_maintenance_cost, - rc_cm_material_cost, rc_cm_labor_cost, - rc_pm_material_cost, rc_pm_labor_cost, - rc_oh_material_cost, rc_oh_labor_cost, - rc_predictive_labor_cost, - rc_project_material_cost, rc_lost_cost, rc_operation_cost, rc_maintenance_cost, - rc_total_cost, - eac_npv, eac_annual_mnt_cost, eac_annual_acq_cost, eac_disposal_cost, eac_eac, - efdh_equivalent_forced_derated_hours, foh_forced_outage_hours, - created_by, created_at, acquisition_year_ref - ) - SELECT - uuid_generate_v4(), assetnum, tahun, seq, is_actual, - raw_cm_interval, raw_cm_material_cost, raw_cm_labor_time, raw_cm_labor_human, - raw_pm_interval, raw_pm_material_cost, raw_pm_labor_time, raw_pm_labor_human, - raw_oh_interval, raw_oh_material_cost, raw_oh_labor_time, raw_oh_labor_human, - raw_predictive_interval, raw_predictive_material_cost, raw_predictive_labor_time, raw_predictive_labor_human, - raw_project_task_material_cost, "raw_loss_output_MW", raw_loss_output_price, - raw_operational_cost, raw_maintenance_cost, - rc_cm_material_cost, rc_cm_labor_cost, - rc_pm_material_cost, rc_pm_labor_cost, - rc_oh_material_cost, rc_oh_labor_cost, - rc_predictive_labor_cost, - rc_project_material_cost, rc_lost_cost, rc_operation_cost, rc_maintenance_cost, - rc_total_cost, - eac_npv, eac_annual_mnt_cost, eac_annual_acq_cost, eac_disposal_cost, eac_eac, - efdh_equivalent_forced_derated_hours, foh_forced_outage_hours, - created_by, NOW(), :acq_year_ref - FROM lcc_equipment_tr_data - WHERE assetnum = :assetnum - """) - await db_session.execute(history_tr_query, {"acq_year_ref": acq_year_ref, "assetnum": assetnum}) - - # 3. Delete old data - del_query = text("DELETE FROM lcc_equipment_tr_data WHERE assetnum = :assetnum") - await db_session.execute(del_query, {"assetnum": assetnum}) - - # Update Equipment Master regardless of if archive was needed/skipped - if change_year or change_cost: - if first_cost is not None and eq.acquisition_cost != first_cost: - eq.acquisition_cost = first_cost - - if eq.acquisition_year != first_year: - eq.acquisition_year = first_year - eq.forecasting_start_year = first_year # Align start with acquisition - if is_valid_default and current_life: - eq.forecasting_target_year = first_year + current_life - - await db_session.commit() - updates_performed = True + # Calculate initial cost from category/proportion (base acquisition cost) + initial_cost = 0.0 + if eq.category_no and eq.proportion: + _, aggregated_cost = await fetch_acquisition_cost_with_rollup( + db_session=db_session, base_category_no=eq.category_no + ) + if aggregated_cost: + initial_cost = (eq.proportion * 0.01) * aggregated_cost + + # Adjust initial cost to 2015 value (Base Year) + # Formula: Value_2015 = Value_Year / (1 + rate)^(Year - 2015) + adj_initial_cost = initial_cost + if current_acq and current_acq != 2015: + adj_initial_cost = initial_cost / ((1 + inflation_rate) ** (current_acq - 2015)) + + # Adjust replace cost to 2015 value (Base Year) + adj_replace_cost = (first_cost or 0.0) + if first_year and first_year != 2015: + adj_replace_cost = (first_cost or 0.0) / ((1 + inflation_rate) ** (first_year - 2015)) + + # Total cost is adjusted initial cost plus the adjusted replacement cost + total_cost = adj_initial_cost + adj_replace_cost + + change_cost = (eq.acquisition_cost != total_cost) + # Requirement: forecasting_start_year always starts from 2015 + change_start = (eq.forecasting_start_year != 2015) + + # Note: acquisition_year itself is no longer updated as per requirements. + + if change_cost or change_start: + if change_cost: + print( + f"Acquisition cost update for {assetnum}: {eq.acquisition_cost} -> {total_cost} " + f"(Adj. Initial: {adj_initial_cost} + Adj. Replacement: {adj_replace_cost} | Rate: {inflation_rate})" + ) + eq.acquisition_cost = total_cost + + if change_start: + print(f"Aligning forecasting_start_year to 2015 for {assetnum}") + eq.forecasting_start_year = 2015 + # If target was default, we update it to 2015 + design_life + if is_valid_default and current_life: + eq.forecasting_target_year = 2015 + current_life + + await db_session.commit() + updates_performed = True return updates_performed diff --git a/src/equipment_master/__pycache__/schema.cpython-311.pyc b/src/equipment_master/__pycache__/schema.cpython-311.pyc index 1021060..ef16ea4 100644 Binary files a/src/equipment_master/__pycache__/schema.cpython-311.pyc and b/src/equipment_master/__pycache__/schema.cpython-311.pyc differ diff --git a/src/equipment_master/schema.py b/src/equipment_master/schema.py index 50f3247..ae94d76 100644 --- a/src/equipment_master/schema.py +++ b/src/equipment_master/schema.py @@ -7,10 +7,10 @@ from src.models import CommonParams, DefaultBase, Pagination class EquipmentMasterBase(DefaultBase): - parent_id: Optional[UUID] = Field(None, nullable=True) - name: Optional[str] = Field(None, nullable=True) - created_at: Optional[datetime] = Field(None, nullable=True) - updated_at: Optional[datetime] = Field(None, nullable=True) + parent_id: Optional[UUID] = Field(None) + name: Optional[str] = Field(None) + created_at: Optional[datetime] = Field(None) + updated_at: Optional[datetime] = Field(None) class EquipmentMasterCreate(EquipmentMasterBase): @@ -18,8 +18,8 @@ class EquipmentMasterCreate(EquipmentMasterBase): class EquipmentTree(DefaultBase): - level_no: Optional[int] = Field(None, nullable=True) - name: Optional[str] = Field(None, nullable=True) + level_no: Optional[int] = Field(None) + name: Optional[str] = Field(None) class EquipmentMasterUpdate(EquipmentMasterBase): @@ -31,12 +31,12 @@ EquipmentMasterReadRef = ForwardRef("EquipmentMasterRead") class EquipmentMasterRead(EquipmentMasterBase): id: UUID - equipment_tree_id: Optional[UUID] = Field(None, nullable=True) + equipment_tree_id: Optional[UUID] = Field(None) equipment_tree: EquipmentTree - category_id: Optional[UUID] = Field(None, nullable=True) - system_tag: Optional[str] = Field(None, nullable=True) - assetnum: Optional[str] = Field(None, nullable=True) - location_tag: Optional[str] = Field(None, nullable=True) + category_id: Optional[UUID] = Field(None) + system_tag: Optional[str] = Field(None) + assetnum: Optional[str] = Field(None) + location_tag: Optional[str] = Field(None) children: List[EquipmentMasterReadRef] # type: ignore diff --git a/src/exceptions.py b/src/exceptions.py index 8c9b589..18377cb 100644 --- a/src/exceptions.py +++ b/src/exceptions.py @@ -96,35 +96,86 @@ def handle_exception(request: Request, exc: Exception): """ Global exception handler for Fastapi application. """ + import uuid + error_id = str(uuid.uuid1()) request_info = get_request_context(request) + + # Store error_id in request.state for middleware/logging + request.state.error_id = error_id if isinstance(exc, RateLimitExceeded): - _rate_limit_exceeded_handler(request, exc) + logging.warning( + f"Rate limit exceeded | Error ID: {error_id}", + extra={ + "error_id": error_id, + "error_category": "rate_limit", + "request": request_info, + "detail": str(exc.description) if hasattr(exc, "description") else str(exc), + }, + ) + return JSONResponse( + status_code=429, + content={ + "data": None, + "message": "Rate limit exceeded", + "status": ResponseStatus.ERROR, + "error_id": error_id + } + ) + + if isinstance(exc, RequestValidationError): + logging.warning( + f"Validation error occurred | Error ID: {error_id}", + extra={ + "error_id": error_id, + "error_category": "validation", + "errors": exc.errors(), + "request": request_info, + }, + ) + return JSONResponse( + status_code=422, + content={ + "data": exc.errors(), + "message": "Validation Error", + "status": ResponseStatus.ERROR, + "error_id": error_id + }, + ) + if isinstance(exc, HTTPException): logging.error( - f"HTTP exception | Code: {exc.status_code} | Error: {exc.detail} | Request: {request_info}", - extra={"error_category": "http"}, + f"HTTP exception occurred | Error ID: {error_id}", + extra={ + "error_id": error_id, + "error_category": "http", + "status_code": exc.status_code, + "detail": exc.detail if hasattr(exc, "detail") else str(exc), + "request": request_info, + }, ) return JSONResponse( status_code=exc.status_code, content={ "data": None, - "message": str(exc.detail), + "message": str(exc.detail) if hasattr(exc, "detail") else str(exc), "status": ResponseStatus.ERROR, - "errors": [ - ErrorDetail( - message=str(exc.detail) - ).model_dump() - ] - } + "error_id": error_id + }, ) if isinstance(exc, SQLAlchemyError): error_message, status_code = handle_sqlalchemy_error(exc) logging.error( - f"Database Error | Error: {str(error_message)} | Request: {request_info}", - extra={"error_category": "database"}, + f"Database error occurred | Error ID: {error_id}", + extra={ + "error_id": error_id, + "error_category": "database", + "error_message": error_message, + "request": request_info, + "exception": str(exc), + }, ) return JSONResponse( @@ -133,42 +184,28 @@ def handle_exception(request: Request, exc: Exception): "data": None, "message": error_message, "status": ResponseStatus.ERROR, - "errors": [ - ErrorDetail( - message=error_message - ).model_dump() - ] - } + "error_id": error_id + }, ) # Log unexpected errors - error_message = f"{exc.__class__.__name__}: {str(exc)}" - error_traceback = exc.__traceback__ - - # Get file and line info if available - if error_traceback: - tb = error_traceback - while tb.tb_next: - tb = tb.tb_next - file_name = tb.tb_frame.f_code.co_filename - line_num = tb.tb_lineno - error_message = f"{error_message}\nFile {file_name}, line {line_num}" - logging.error( - f"Unexpected Error | Error: {error_message} | Request: {request_info}", - extra={"error_category": "unexpected"}, + f"Unexpected error occurred | Error ID: {error_id}", + extra={ + "error_id": error_id, + "error_category": "unexpected", + "error_message": str(exc), + "request": request_info, + }, + exc_info=True, ) - + return JSONResponse( status_code=500, content={ "data": None, - "message": error_message, + "message": "An unexpected error occurred", "status": ResponseStatus.ERROR, - "errors": [ - ErrorDetail( - message=error_message - ).model_dump() - ] - } + "error_id": error_id + }, ) diff --git a/src/logging.py b/src/logging.py index a7f31af..b9fc41c 100644 --- a/src/logging.py +++ b/src/logging.py @@ -35,29 +35,45 @@ class JSONFormatter(logging.Formatter): Custom formatter to output logs in JSON format. """ def format(self, record): - from src.context import get_request_id - + from src.context import get_request_id, get_user_id, get_username, get_role request_id = None + user_id = None + username = None + role = None + try: request_id = get_request_id() + user_id = get_user_id() + username = get_username() + role = get_role() except Exception: pass + # Standard fields from requirements log_record = { - "timestamp": datetime.datetime.fromtimestamp(record.created).astimezone().isoformat(), + "timestamp": datetime.datetime.fromtimestamp(record.created).strftime("%Y-%m-%d %H:%M:%S"), "level": record.levelname, + "name": record.name, "message": record.getMessage(), - "logger_name": record.name, - "location": f"{record.module}:{record.funcName}:{record.lineno}", - "module": record.module, - "funcName": record.funcName, - "lineno": record.lineno, - "pid": os.getpid(), - "request_id": request_id or "SYSTEM", # request id assigned per request or SYSTEM for system logs } - + # Add Context information if available + if user_id: + log_record["user_id"] = user_id + if username: + log_record["username"] = username + if role: + log_record["role"] = role + if request_id: + log_record["request_id"] = request_id + + # Add Error context if available + if hasattr(record, "error_id"): + log_record["error_id"] = record.error_id + elif "error_id" in record.__dict__: + log_record["error_id"] = record.error_id + # Capture exception info if available if record.exc_info: log_record["exception"] = self.formatException(record.exc_info) @@ -67,18 +83,17 @@ class JSONFormatter(logging.Formatter): log_record["stack_trace"] = self.formatStack(record.stack_info) # Add any extra attributes passed to the log call - # We skip standard and internal uvicorn/fastapi attributes to avoid duplication or mess standard_attrs = { "args", "asctime", "created", "exc_info", "exc_text", "filename", "funcName", "levelname", "levelno", "lineno", "module", "msecs", "message", "msg", "name", "pathname", "process", "processName", - "relativeCreated", "stack_info", "thread", "threadName", + "relativeCreated", "stack_info", "thread", "threadName", "error_id", "color_message", "request", "scope" } for key, value in record.__dict__.items(): - if key not in standard_attrs: + if key not in standard_attrs and not key.startswith("_"): log_record[key] = value - + log_json = json.dumps(log_record) # Apply color if the output is a terminal @@ -119,11 +134,19 @@ def configure_logging(): root_logger.addHandler(handler) # Reconfigure uvicorn loggers to use our JSON formatter - for logger_name in ["uvicorn", "uvicorn.access", "uvicorn.error", "fastapi"]: + for logger_name in ["uvicorn", "uvicorn.error", "fastapi"]: logger = logging.getLogger(logger_name) logger.handlers = [] logger.propagate = True + # Disable uvicorn access logs as we handle request logging in our middleware + access_logger = logging.getLogger("uvicorn.access") + access_logger.handlers = [] + access_logger.propagate = False + + # set uvicorn access log level to warning + logging.getLogger("uvicorn.access").setLevel(logging.WARNING) + # sometimes the slack client can be too verbose logging.getLogger("slack_sdk.web.base_client").setLevel(logging.CRITICAL) diff --git a/src/main.py b/src/main.py index b5f803b..265deee 100644 --- a/src/main.py +++ b/src/main.py @@ -7,12 +7,14 @@ from typing import Optional, Final from fastapi import FastAPI, HTTPException, status +from fastapi.exceptions import RequestValidationError from fastapi.responses import JSONResponse from pydantic import ValidationError from slowapi import _rate_limit_exceeded_handler from slowapi.errors import RateLimitExceeded from sqlalchemy import inspect +from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm import scoped_session from sqlalchemy.ext.asyncio import async_scoped_session from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint @@ -38,17 +40,18 @@ log = logging.getLogger(__name__) # we configure the logging level and format configure_logging() -# we define the exception handlers -exception_handlers = {Exception: handle_exception} - # we create the ASGI for the app -app = FastAPI(exception_handlers=exception_handlers, openapi_url="", title="LCCA API", +app = FastAPI(openapi_url="", title="LCCA API", description="Welcome to LCCA's API documentation!", version="0.1.0") app.state.limiter = limiter -app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler) -app.add_middleware(GZipMiddleware, minimum_size=2000) +# we define the exception handlers +app.add_exception_handler(Exception, handle_exception) +app.add_exception_handler(HTTPException, handle_exception) +app.add_exception_handler(RequestValidationError, handle_exception) +app.add_exception_handler(RateLimitExceeded, handle_exception) +app.add_exception_handler(SQLAlchemyError, handle_exception) from src.context import set_request_id, reset_request_id, get_request_id @@ -65,15 +68,74 @@ async def db_session_middleware(request: Request, call_next): try: - log.info(f"Incoming request: {request.method} {request.url.path}") + start_time = time.time() session = async_scoped_session(async_session, scopefunc=get_request_id) request.state.db = session() collector_session = async_scoped_session(collector_async_session, scopefunc=get_request_id) request.state.collector_db = collector_session() + response = await call_next(request) - log.info(f"Request completed: {response.status_code}") + process_time = (time.time() - start_time) * 1000 + + from src.context import get_username, get_role, get_user_id, set_user_id, set_username, set_role + + # Pull from context or fallback to request.state.user + username = get_username() + role = get_role() + user_id = get_user_id() + + user_obj = getattr(request.state, "user", None) + if user_obj: + # UserBase in this project + u_id = getattr(user_obj, "user_id", None) + u_name = getattr(user_obj, "name", None) or getattr(user_obj, "username", None) + u_role = getattr(user_obj, "role", None) + + if not user_id and u_id: + user_id = str(u_id) + set_user_id(user_id) + if not username and u_name: + username = u_name + set_username(username) + if not role and u_role: + role = u_role + set_role(role) + + user_info_str = "" + if username: + user_info_str = f" | User: {username}" + if role: + user_info_str += f" ({role})" + + log.info( + f"HTTP {request.method} {request.url.path} completed in {round(process_time, 2)}ms{user_info_str}", + extra={ + "method": request.method, + "path": request.url.path, + "status_code": response.status_code, + "duration_ms": round(process_time, 2), + "user_id": user_id, + "role": role, + }, + ) except Exception as e: - log.error(f"Request failed: {type(e).__name__} - {str(e)}") + # Generate an error_id here if it hasn't been generated yet + error_id = getattr(request.state, "error_id", None) + if not error_id: + import uuid + error_id = str(uuid.uuid1()) + request.state.error_id = error_id + + log.error( + f"Request failed | Error ID: {error_id}", + extra={ + "method": request.method, + "path": request.url.path, + "error": str(e), + "error_id": error_id, + }, + exc_info=True, + ) raise e from None finally: await request.state.db.close() diff --git a/src/manpower_cost/schema.py b/src/manpower_cost/schema.py index 293936d..91eda64 100644 --- a/src/manpower_cost/schema.py +++ b/src/manpower_cost/schema.py @@ -7,14 +7,14 @@ from src.models import CommonParams, DefaultBase, Pagination class ManpowerCostBase(DefaultBase): - staff_job_level: str = Field(..., nullable=False) - salary_per_month_idr: float = Field(..., nullable=False) - salary_per_day_idr: float = Field(..., nullable=False) - salary_per_hour_idr: float = Field(..., nullable=False) - created_at: Optional[datetime] = Field(None, nullable=True) - updated_at: Optional[datetime] = Field(None, nullable=True) - created_by: Optional[str] = Field(None, nullable=True) - updated_by: Optional[str] = Field(None, nullable=True) + staff_job_level: str = Field(...) + salary_per_month_idr: float = Field(...) + salary_per_day_idr: float = Field(...) + salary_per_hour_idr: float = Field(...) + created_at: Optional[datetime] = Field(None) + updated_at: Optional[datetime] = Field(None) + created_by: Optional[str] = Field(None) + updated_by: Optional[str] = Field(None) class ManpowerCostCreate(ManpowerCostBase): diff --git a/src/manpower_master/schema.py b/src/manpower_master/schema.py index 24aab80..c945b86 100644 --- a/src/manpower_master/schema.py +++ b/src/manpower_master/schema.py @@ -7,14 +7,14 @@ from src.models import CommonParams, DefaultBase, Pagination class ManpowerCostBase(DefaultBase): - staff_job_level: str = Field(..., nullable=False) - salary_per_month_idr: float = Field(..., nullable=False) - salary_per_day_idr: float = Field(..., nullable=False) - salary_per_hour_idr: float = Field(..., nullable=False) - created_at: Optional[datetime] = Field(None, nullable=True) - updated_at: Optional[datetime] = Field(None, nullable=True) - created_by: Optional[str] = Field(None, nullable=True) - updated_by: Optional[str] = Field(None, nullable=True) + staff_job_level: str = Field(...) + salary_per_month_idr: float = Field(...) + salary_per_day_idr: float = Field(...) + salary_per_hour_idr: float = Field(...) + created_at: Optional[datetime] = Field(None) + updated_at: Optional[datetime] = Field(None) + created_by: Optional[str] = Field(None) + updated_by: Optional[str] = Field(None) class ManpowerCostCreate(ManpowerCostBase): diff --git a/src/masterdata/__pycache__/schema.cpython-311.pyc b/src/masterdata/__pycache__/schema.cpython-311.pyc index 1531dca..f6c2f5a 100644 Binary files a/src/masterdata/__pycache__/schema.cpython-311.pyc and b/src/masterdata/__pycache__/schema.cpython-311.pyc differ diff --git a/src/masterdata/schema.py b/src/masterdata/schema.py index 015cda5..b2fe898 100644 --- a/src/masterdata/schema.py +++ b/src/masterdata/schema.py @@ -11,29 +11,29 @@ class MasterdataBase(DefaultBase): # discount_rate: Optional[float] # inflation_rate: Optional[float] # manhours_rate: Optional[float] - name: Optional[str] = Field(None, nullable=True) - description: Optional[str] = Field(None, nullable=True) - unit_of_measurement: Optional[str] = Field(None, nullable=True) + name: Optional[str] = Field(None) + description: Optional[str] = Field(None) + unit_of_measurement: Optional[str] = Field(None) value_num: Optional[float] = Field( - None, nullable=True, le=1_000_000_000_000_000 # 1 quadrillion + None, le=1_000_000_000_000_000 # 1 quadrillion ) - value_str: Optional[str] = Field(None, nullable=True) - seq: Optional[int] = Field(None, nullable=True) - created_at: Optional[datetime] = Field(None, nullable=True) - updated_at: Optional[datetime] = Field(None, nullable=True) - created_by: Optional[str] = Field(None, nullable=True) - updated_by: Optional[str] = Field(None, nullable=True) + value_str: Optional[str] = Field(None) + seq: Optional[int] = Field(None) + created_at: Optional[datetime] = Field(None) + updated_at: Optional[datetime] = Field(None) + created_by: Optional[str] = Field(None) + updated_by: Optional[str] = Field(None) class MasterDataCreate(MasterdataBase): - name: str = Field(..., nullable=True) - description: str = Field(..., nullable=True) - unit_of_measurement: str = Field(..., nullable=True) + name: str = Field(...) + description: str = Field(...) + unit_of_measurement: str = Field(...) value_num: float = Field( - ..., nullable=True, le=1_000_000_000_000_000 # 1 quadrillion + ..., le=1_000_000_000_000_000 # 1 quadrillion ) - value_str: str = Field(None, nullable=True) - seq: int = Field(None, nullable=True) + value_str: str = Field(None) + seq: int = Field(None) class MasterDataUpdate(MasterdataBase): diff --git a/src/masterdata_simulations/schema.py b/src/masterdata_simulations/schema.py index d3e2093..b5726e0 100644 --- a/src/masterdata_simulations/schema.py +++ b/src/masterdata_simulations/schema.py @@ -8,19 +8,19 @@ from src.models import CommonParams, DefaultBase, Pagination class MasterDataSimulationBase(MasterdataBase): - simulation_id: Optional[UUID] = Field(None, nullable=True) + simulation_id: Optional[UUID] = Field(None) class MasterDataSimulationCreate(MasterDataSimulationBase): - simulation_id: UUID = Field(..., nullable=False) - name: str = Field(..., nullable=True) - description: str = Field(..., nullable=True) - unit_of_measurement: str = Field(..., nullable=True) + simulation_id: UUID = Field(...) + name: str = Field(...) + description: str = Field(...) + unit_of_measurement: str = Field(...) value_num: float = Field( - ..., nullable=True, le=1_000_000_000_000_000 + ..., le=1_000_000_000_000_000 ) - value_str: str = Field(..., nullable=True) - seq: int = Field(..., nullable=True) + value_str: str = Field(...) + seq: int = Field(...) class MasterDataSimulationUpdate(MasterDataSimulationBase): @@ -28,7 +28,7 @@ class MasterDataSimulationUpdate(MasterDataSimulationBase): class BulkMasterDataSimulationUpdate(DefaultBase): - simulation_id: UUID = Field(..., nullable=False) + simulation_id: UUID = Field(...) updates: List[dict] diff --git a/src/middleware.py b/src/middleware.py index 3127020..5599a59 100644 --- a/src/middleware.py +++ b/src/middleware.py @@ -18,13 +18,35 @@ MAX_QUERY_PARAMS = 50 MAX_QUERY_LENGTH = 2000 MAX_JSON_BODY_SIZE = 1024 * 100 # 100 KB -# Very targeted patterns. Avoid catastrophic regex nonsense. -XSS_PATTERN_STR = r"( 50: + raise HTTPException(status_code=400, detail=f"Pagination size '{key}' cannot exceed 50") + if size_val % 5 != 0: + raise HTTPException(status_code=400, detail=f"Pagination size '{key}' must be a multiple of 5") + except ValueError: + raise HTTPException(status_code=400, detail=f"Pagination size '{key}' must be an integer") # ------------------------- # 4. Content-Type sanity diff --git a/src/models.py b/src/models.py index 8979823..9fdaee8 100644 --- a/src/models.py +++ b/src/models.py @@ -2,7 +2,7 @@ from datetime import datetime from typing import Generic, List, Optional, TypeVar import uuid -from pydantic import BaseModel, Field, SecretStr +from pydantic import BaseModel, Field, SecretStr, ConfigDict from sqlalchemy import Column, DateTime, String, func, event from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm import Mapped, mapped_column @@ -67,19 +67,14 @@ class DefaultMixin(TimeStampMixin, UUIDMixin): # Pydantic Models class DefaultBase(BaseModel): - class Config: - from_attributes = True - validate_assignment = True - arbitrary_types_allowed = True - str_strip_whitespace = True - extra = "forbid" - populate_by_name=True - - json_encoders = { - # custom output conversion for datetime - datetime: lambda v: v.strftime("%Y-%m-%dT%H:%M:%S.%fZ") if v else None, - SecretStr: lambda v: v.get_secret_value() if v else None, - } + model_config = ConfigDict( + from_attributes=True, + validate_assignment=True, + arbitrary_types_allowed=True, + str_strip_whitespace=True, + extra="forbid", + populate_by_name=True, + ) class Pagination(DefaultBase): diff --git a/src/modules/equipment/Prediksi.py b/src/modules/equipment/Prediksi.py index 8ccf4c9..3af234e 100644 --- a/src/modules/equipment/Prediksi.py +++ b/src/modules/equipment/Prediksi.py @@ -587,7 +587,7 @@ class Prediksi: self.refresh_token = d.get("refresh_token") return data except httpx.HTTPError as e: - print(f"Sign-in failed: {e}") + print(f"Sign-in failed for URL {self.AUTH_APP_URL}/sign-in: {type(e).__name__} - {e}") # Try to sign out if sign-in failed try: signout_url = f"{self.AUTH_APP_URL}/sign-out" @@ -595,14 +595,8 @@ class Prediksi: await client.get(signout_url, timeout=10.0) print("Signed out due to sign-in failure.") except Exception as signout_exc: - print(f"Sign-out failed: {signout_exc}") - # Try to sign in again - try: - signin_res = await self.sign_in() - if self.access_token: - return signin_res - except Exception as signin_exc: - print(f"Sign-in failed after sign-out: {signin_exc}") + print(f"Sign-out failed for URL {self.AUTH_APP_URL}/sign-out: {type(signout_exc).__name__} - {signout_exc}") + return None async def refresh_access_token(self) -> str: @@ -680,6 +674,39 @@ class Prediksi: print(f"HTTP error occurred: {e}") return {} + def __get_historical_cost_per_failure(self, assetnum): + connection = None + try: + connection = get_production_connection() + if connection is None: + return 0.0 + cursor = connection.cursor() + # Optimized single-pass query: counts and sums in one scan + query = """ + SELECT + SUM(a.actmatcost) / NULLIF(COUNT(CASE WHEN a.wonum NOT LIKE 'T%%' THEN 1 END), 0) as cost_failure + FROM wo_maximo a + WHERE (a.asset_unit = '3' OR a.asset_unit = '00') + AND a.status IN ('COMP', 'CLOSE') + AND a.asset_assetnum = %s + AND a.worktype IN ('CM', 'PROACTIVE', 'EM') + AND a.wojp8 != 'S1' + AND ( + a.description NOT ILIKE '%%U4%%' + OR (a.description ILIKE '%%U3%%' AND a.description ILIKE '%%U4%%') + ) + """ + cursor.execute(query, (assetnum,)) + result = cursor.fetchone() + cost_failure = float(result[0]) if result and result[0] is not None else 0.0 + return cost_failure + except Exception as e: + print(f"Error fetching historical cost per failure for {assetnum}: {e}") + return 0.0 + finally: + if connection: + connection.close() + def __get_man_hour_rate(self, staff_level: str = "Junior"): connection = None try: @@ -761,7 +788,8 @@ class Prediksi: rate, max_year = self.__get_rate_and_max_year(assetnum) man_hour_rate = self.__get_man_hour_rate() # Defaults to 'junior' - pmt = 0 + # Pre-fetch cost per failure once per asset to avoid redundant DB queries + avg_cost_per_failure = self.__get_historical_cost_per_failure(assetnum) # Prediksi untuk setiap kolom for column in df.columns: @@ -813,16 +841,32 @@ class Prediksi: preds_list.append(cost) preds = np.array(preds_list, dtype=float) - elif recent_vals.empty: - avg = 0.0 - preds = np.repeat(float(avg), n_future) else: - avg = pd.to_numeric(recent_vals, errors="coerce").fillna(0).mean() - avg = 0.0 if pd.isna(avg) else float(avg) - preds = np.repeat(float(avg), n_future) + # Use pre-fetched cost per failure + preds_list = [] + for yr in future_years: + failures_data = await self._fetch_api_data(assetnum, yr) + # Interval from predicted number of failures + interval = 0.0 + if isinstance(failures_data, dict): + data_list = failures_data.get("data") + if isinstance(data_list, list) and len(data_list) > 0: + first_item = data_list[0] + if isinstance(first_item, dict): + num_fail = first_item.get("num_fail") + if num_fail is not None: + try: + interval = float(num_fail) + except Exception: + interval = 0.0 + + # predicted_cost = predicted_failures * avg_cost_per_failure + cost = interval * avg_cost_per_failure + preds_list.append(cost) + preds = np.array(preds_list, dtype=float) else: - # Для kolom non-cm, gunakan nilai dari last actual year bila ada, + # kolom non-cm, gunakan nilai dari last actual year bila ada, # jika tidak ada gunakan last available non-NA value, jika tidak ada pakai 0.0 if "is_actual" in df.columns and not df[df["is_actual"] == 1].empty: last_actual_year_series = df[df["is_actual"] == 1]["year"] diff --git a/src/modules/equipment/__pycache__/Prediksi.cpython-311.pyc b/src/modules/equipment/__pycache__/Prediksi.cpython-311.pyc index 6f1e6d7..08e5968 100644 Binary files a/src/modules/equipment/__pycache__/Prediksi.cpython-311.pyc and b/src/modules/equipment/__pycache__/Prediksi.cpython-311.pyc differ diff --git a/src/modules/equipment/__pycache__/insert_actual_data.cpython-311.pyc b/src/modules/equipment/__pycache__/insert_actual_data.cpython-311.pyc index 11b06a3..ca6f66e 100644 Binary files a/src/modules/equipment/__pycache__/insert_actual_data.cpython-311.pyc and b/src/modules/equipment/__pycache__/insert_actual_data.cpython-311.pyc differ diff --git a/src/modules/equipment/__pycache__/run.cpython-311.pyc b/src/modules/equipment/__pycache__/run.cpython-311.pyc index cd997a8..485be65 100644 Binary files a/src/modules/equipment/__pycache__/run.cpython-311.pyc and b/src/modules/equipment/__pycache__/run.cpython-311.pyc differ diff --git a/src/modules/equipment/formula.py b/src/modules/equipment/formula.py index 849d964..99b41b1 100644 --- a/src/modules/equipment/formula.py +++ b/src/modules/equipment/formula.py @@ -6,6 +6,14 @@ This file consolidates the core mathematical/financial formulas used across: - `insert_actual_data.py` (aggregation formulas, man-hour conversion) - `Prediksi.py` (future value / fv wrappers) +### Prediction Logic Summary +| Category | Logic Type | Formula Basis | +| :--- | :--- | :--- | +| **CM Labor** | **Reliability-Based** | `Failures x 3.0 x 1.0 x ManPowerRate` | +| **CM Other** | **Reliability-Based** | `Failures x CostPerFailure (from Production SQL)` | +| **PM / OH / PDM** | **Last Scenario** | `Value from Last Actual Year` (Carry Forward) | +| **Total Risk Cost** | **Aggregated** | `Sum of above + Asset Criticality Multiplier` | + Keep these functions pure and well-documented to make debugging and comparisons easier. """ diff --git a/src/modules/equipment/insert_actual_data.py b/src/modules/equipment/insert_actual_data.py index 88f1764..d26b38a 100644 --- a/src/modules/equipment/insert_actual_data.py +++ b/src/modules/equipment/insert_actual_data.py @@ -39,72 +39,6 @@ def get_recursive_query(cursor, assetnum, worktype="CM"): Fungsi untuk menjalankan query rekursif berdasarkan assetnum dan worktype. worktype memiliki nilai default 'CM'. """ - # query = f""" - # SELECT - # ROW_NUMBER() OVER (ORDER BY tbl.assetnum, tbl.year, tbl.worktype) AS seq, - # * - # FROM ( - # SELECT - # a.worktype, - # a.assetnum, - # EXTRACT(YEAR FROM a.reportdate) AS year, - # COUNT(a.wonum) AS raw_corrective_failure_interval, - # SUM(a.total_cost_max) AS raw_corrective_material_cost, - # ROUND( - # SUM( - # EXTRACT(EPOCH FROM ( - # a.actfinish - - # a.actstart - # )) - # ) / 3600 - # , 2) AS raw_corrective_labor_time_jam, - # SUM(a.jumlah_labor) AS raw_corrective_labor_technician - # FROM - # public.wo_staging_3 AS a - # WHERE - # a.unit = '3' - # GROUP BY - # a.worktype, - # a.assetnum, - # EXTRACT(YEAR FROM a.reportdate) - # ) AS tbl - # WHERE - # tbl.worktype = '{worktype}' - # AND tbl.assetnum = '{assetnum}' - # ORDER BY - # tbl.assetnum, - # tbl.year, - # tbl.worktype - # """ -# query = f""" -# select d.tahun, SUM(d.actmatcost) AS raw_corrective_material_cost, sum(d.man_hour) as man_hour_peryear from -# ( -# SELECT -# a.wonum, -# a.actmatcost, -# DATE_PART('year', a.reportdate) AS tahun, -# ( -# ROUND(SUM(EXTRACT(EPOCH FROM (a.actfinish - a.actstart)) / 3600), 2) -# ) AS man_hour, -# CASE -# WHEN COUNT(b.laborcode) = 0 THEN 3 -# ELSE COUNT(b.laborcode) -# END AS man_count -# FROM public.wo_maximo AS a -# LEFT JOIN public.wo_maximo_labtrans AS b -# ON b.wonum = a.wonum -# WHERE -# a.asset_unit = '3' -# AND a.worktype = '{worktype}' -# AND a.asset_assetnum = '{assetnum}' -# and a.wonum not like 'T%' -# GROUP BY -# a.wonum, -# a.actmatcost, -# DATE_PART('year', a.reportdate) -# ) as d group by d.tahun -# ; -# """ where_query = get_where_query_sql(assetnum, worktype) query = f""" @@ -360,48 +294,11 @@ def _build_tr_row_values( ) rc_cm_material_cost = raw_cm_material_cost_total - # rc_cm_labor_cost = ( - # data_cm_row.get("raw_cm_labor_time") - # * data_cm_row.get("rc_cm_labor_human") - # * man_hour_value - # if data_cm_row - # and data_cm_row.get("rc_cm_labor_cost") - # and data_cm_row.get("rc_cm_labor_human") - # and man_hour_value is not None - # else 0 - # ) rc_pm_material_cost = raw_pm_material_cost - # rc_pm_labor_cost = ( - # data_pm_row.get("raw_pm_labor_time") - # * data_pm_row.get("rc_pm_labor_human") - # * man_hour_value - # if data_pm_row - # and data_pm_row.get("rc_pm_labor_cost") - # and data_pm_row.get("rc_pm_labor_human") - # and man_hour_value is not None - # else 0 - # ) rc_oh_material_cost = raw_oh_material_cost - # rc_oh_labor_cost = ( - # data_oh_row.get("raw_oh_labor_time") - # * data_oh_row.get("rc_oh_labor_human") - # * man_hour_value - # if data_oh_row - # and data_oh_row.get("rc_oh_labor_cost") - # and data_oh_row.get("rc_oh_labor_human") - # and man_hour_value is not None - # else 0 - # ) - - # rc_predictive_labor_cost = ( - # data_predictive_row.get("raw_predictive_labor_human") * man_hour_value - # if data_predictive_row - # and data_predictive_row.get("rc_predictive_labor_cost") - # and man_hour_value is not None - # else 0 - # ) + if labour_cost_lookup and year is not None: cm_lookup = labour_cost_lookup.get("CM", {}) @@ -987,18 +884,14 @@ async def query_data(target_assetnum: str = None): print(f"Error checking acquisition data for {assetnum}: {exc}") - forecasting_start_year_db = row.get("forecasting_start_year") - acquisition_year = row.get("acquisition_year") + # Calculation start is always 2014 (forecasting start is 2015) + # Forecasting and calculation start configuration + loop_start_year = 2014 + + # Delete data before calculation start (2014) + cursor.execute("DELETE FROM lcc_equipment_tr_data WHERE assetnum = %s AND tahun < %s", (assetnum, loop_start_year)) - if acquisition_year: - # Remove data before acquisition_year - cursor.execute("DELETE FROM lcc_equipment_tr_data WHERE assetnum = %s AND tahun < %s", (assetnum, acquisition_year)) - forecasting_start_year = acquisition_year - elif forecasting_start_year_db: - # If no acquisition_year but forecasting_start_year defined in DB - forecasting_start_year = forecasting_start_year_db - else: - forecasting_start_year = 2014 + forecasting_start_year = loop_start_year asset_start = datetime.now() processed_assets += 1 @@ -1024,6 +917,18 @@ async def query_data(target_assetnum: str = None): "OH": get_labour_cost_totals(cursor_wo, assetnum, "OH"), } + # Find first year with replace_cost > 0 in Maximo (Requirement: ignore costs in this year) + cursor_wo.execute(""" + select DATE_PART('year', a.reportdate) AS year + from wo_maximo a + where a.asset_replacecost > 0 + and a.asset_assetnum = %s + order by a.reportdate asc + limit 1; + """, (assetnum,)) + res_rep = cursor_wo.fetchone() + first_rep_year = int(res_rep[0]) if res_rep else None + seq = 0 # Looping untuk setiap tahun for year in range(forecasting_start_year, current_year + 1): @@ -1074,6 +979,23 @@ async def query_data(target_assetnum: str = None): year=year, labour_cost_lookup=labour_cost_lookup, ) + + # Requirement: At the first year of the replace cost detected > 0, + # The material cost/ labor cost is ignored. + if first_rep_year and year == first_rep_year: + cost_keys = [ + "raw_cm_material_cost", "raw_cm_labor_time", + "raw_pm_material_cost", "raw_pm_labor_time", + "raw_oh_material_cost", "raw_oh_labor_time", + "raw_predictive_material_cost", "raw_predictive_labor_time", + "rc_cm_material_cost", "rc_cm_labor_cost", + "rc_pm_material_cost", "rc_pm_labor_cost", + "rc_oh_material_cost", "rc_oh_labor_cost", + "rc_predictive_labor_cost" + ] + for k in cost_keys: + if k in row_values: + row_values[k] = 0.0 if not data_exists: cursor.execute( insert_query, diff --git a/src/modules/equipment/run.py b/src/modules/equipment/run.py index 0766aa8..6795c94 100644 --- a/src/modules/equipment/run.py +++ b/src/modules/equipment/run.py @@ -6,18 +6,60 @@ from src.modules.equipment.insert_actual_data import query_data from src.modules.equipment.Prediksi import Prediksi, main as predict_run from src.modules.equipment.Eac import Eac, main as eac_run +def format_execution_time(execution_time): + if execution_time >= 3600: + hours = int(execution_time // 3600) + minutes = int((execution_time % 3600) // 60) + seconds = execution_time % 60 + return f"{hours}h {minutes}m {seconds:.2f}s." + elif execution_time >= 60: + minutes = int(execution_time // 60) + seconds = execution_time % 60 + return f"{minutes}m {seconds:.2f}s." + else: + return f"{execution_time:.2f} seconds." + +# Alternative calling function to just predict and calculate eac without inserting actual data +async def simulate(assetnum: str = None): + start_time = time.time() + print(f"Starting simulation (predict + eac) {'for ' + assetnum if assetnum else 'for all assets'}...") + + try: + prediction_result = await predict_run(assetnum=assetnum) + if prediction_result is False: + print("Prediction step failed or was skipped. Skipping EAC run.") + return + except Exception as e: + print(f"Error in predict_run: {str(e)}") + return + + try: + result = eac_run(assetnum=assetnum) + if asyncio.iscoroutine(result): + result = await result + print("EAC run completed.") + except Exception as e: + print(f"Error in eac_run: {str(e)}") + return + + end_time = time.time() + message = f"Simulation finished in {format_execution_time(end_time - start_time)}" + print(message) + return message + # Panggil fungsi -async def main(): +async def main(assetnum: str = None): start_time = time.time() + print(f"Starting calculation workflow {'for ' + assetnum if assetnum else 'for all assets'}...") try: - await query_data() + await query_data(target_assetnum=assetnum) except Exception as e: print(f"Error in query_data: {str(e)}") return try: - prediction_result = await predict_run() + prediction_result = await predict_run(assetnum=assetnum) if prediction_result is False: print("Prediction step failed or was skipped. Skipping EAC run.") return @@ -26,37 +68,28 @@ async def main(): return try: - result = eac_run() + result = eac_run(assetnum=assetnum) if asyncio.iscoroutine(result): result = await result - - if isinstance(result, (list, tuple)): - print(f"EAC run returned {len(result)} items.") - else: - print("EAC run completed.") + print("EAC run completed.") except Exception as e: print(f"Error in eac_run: {str(e)}") return end_time = time.time() - execution_time = end_time - start_time - # format execution time into h/m/s as needed - if execution_time >= 3600: - hours = int(execution_time // 3600) - minutes = int((execution_time % 3600) // 60) - seconds = execution_time % 60 - message = f"Script calculation finished in {hours}h {minutes}m {seconds:.2f}s." - elif execution_time >= 60: - minutes = int(execution_time // 60) - seconds = execution_time % 60 - message = f"Script calculation finished in {minutes}m {seconds:.2f}s." - else: - message = f"Script calculation finished in {execution_time:.2f} seconds." - + message = f"Script calculation finished in {format_execution_time(end_time - start_time)}" print(message) return message if __name__ == "__main__": - asyncio.run( - main() - ) + import argparse + parser = argparse.ArgumentParser(description="Run LCCA Simulation") + parser.add_argument("mode", nargs="?", choices=["main", "simulate"], default="main", help="Mode to run: 'main' (full) or 'simulate' (no data refresh)") + parser.add_argument("--assetnum", type=str, help="Specific asset number to process") + + args = parser.parse_args() + + if args.mode == "simulate": + asyncio.run(simulate(assetnum=args.assetnum)) + else: + asyncio.run(main(assetnum=args.assetnum)) diff --git a/src/plant_fs_transaction_data/schema.py b/src/plant_fs_transaction_data/schema.py index 7315262..8af5592 100644 --- a/src/plant_fs_transaction_data/schema.py +++ b/src/plant_fs_transaction_data/schema.py @@ -9,65 +9,65 @@ from src.models import DefaultBase, Pagination class PlantFSTransactionDataBase(DefaultBase): fs_chart_total_revenue: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_chart_revenue_a: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_chart_revenue_b: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_chart_revenue_c: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_chart_revenue_d: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_chart_revenue_annualized: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_chart_fuel_cost_component_c: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_chart_fuel_cost: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_chart_fuel_cost_annualized: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_chart_oem_component_bd: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_chart_oem_bd_cost: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_chart_oem_periodic_maintenance_cost: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_chart_oem_annualized: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_chart_capex_component_a: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_chart_capex_biaya_investasi_tambahan: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_chart_capex_acquisition_cost: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_chart_capex_annualized: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) fs_cost_disposal_cost: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) - tahun: Optional[int] = Field(None, nullable=True, ge=1900, le=9999) - seq: Optional[int] = Field(None, nullable=True, ge=0, le=9999) - created_at: Optional[datetime] = Field(None, nullable=True) - updated_at: Optional[datetime] = Field(None, nullable=True) - created_by: Optional[str] = Field(None, nullable=True) - updated_by: Optional[str] = Field(None, nullable=True) + tahun: Optional[int] = Field(None, ge=1900, le=9999) + seq: Optional[int] = Field(None, ge=0, le=9999) + created_at: Optional[datetime] = Field(None) + updated_at: Optional[datetime] = Field(None) + created_by: Optional[str] = Field(None) + updated_by: Optional[str] = Field(None) class PlantFSTransactionDataCreate(PlantFSTransactionDataBase): @@ -97,6 +97,6 @@ class PlantFSTransactionChart(PlantFSTransactionDataBase): class PlantFSChartData(DefaultBase): items: List[PlantFSTransactionChart] - bep_year: Optional[int] = Field(None, nullable=True, ge=0, le=9999) - bep_total_lcc: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) + bep_year: Optional[int] = Field(None, ge=0, le=9999) + bep_total_lcc: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) diff --git a/src/plant_masterdata/__pycache__/schema.cpython-311.pyc b/src/plant_masterdata/__pycache__/schema.cpython-311.pyc index 511e8aa..af0a54a 100644 Binary files a/src/plant_masterdata/__pycache__/schema.cpython-311.pyc and b/src/plant_masterdata/__pycache__/schema.cpython-311.pyc differ diff --git a/src/plant_masterdata/schema.py b/src/plant_masterdata/schema.py index 363df84..016003d 100644 --- a/src/plant_masterdata/schema.py +++ b/src/plant_masterdata/schema.py @@ -10,69 +10,69 @@ from src.auth.service import CurrentUser MAX_NUMERIC_VALUE = 1_000_000_000_000_000 # thousands of trillion class PlantMasterdataBase(DefaultBase): - discount_rate: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - total_project_cost: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - umur_teknis: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - interest_rate: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - loan_portion: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - equity_portion: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - loan: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - loan_tenor: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - principal_interest_payment: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - corporate_tax_rate: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - wacc_on_project: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - wacc_on_equity: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - equity: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - daya_mampu_netto: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - auxiliary: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - susut_trafo: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - sfc: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - electricity_price_a: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - electricity_price_b: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - electricity_price_c: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - electricity_price_d: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - harga_bahan_bakar: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - calc_on_project_irr: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - calc_on_project_npv: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - calc_on_equity_irr: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - calc_on_equity_npv: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - calc_roa_all: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - calc_roa_current: Optional[float] = Field(None, nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - created_at: Optional[datetime] = Field(None, nullable=True) - updated_at: Optional[datetime] = Field(None, nullable=True) - created_by: Optional[str] = Field(None, nullable=True) - updated_by: Optional[str] = Field(None, nullable=True) + discount_rate: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + total_project_cost: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + umur_teknis: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + interest_rate: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + loan_portion: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + equity_portion: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + loan: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + loan_tenor: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + principal_interest_payment: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + corporate_tax_rate: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + wacc_on_project: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + wacc_on_equity: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + equity: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + daya_mampu_netto: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + auxiliary: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + susut_trafo: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + sfc: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + electricity_price_a: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + electricity_price_b: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + electricity_price_c: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + electricity_price_d: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + harga_bahan_bakar: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + calc_on_project_irr: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + calc_on_project_npv: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + calc_on_equity_irr: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + calc_on_equity_npv: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + calc_roa_all: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + calc_roa_current: Optional[float] = Field(None, ge=0, le=MAX_NUMERIC_VALUE) + created_at: Optional[datetime] = Field(None) + updated_at: Optional[datetime] = Field(None) + created_by: Optional[str] = Field(None) + updated_by: Optional[str] = Field(None) class PlantMasterDataCreate(PlantMasterdataBase): - discount_rate: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - total_project_cost: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - umur_teknis: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - interest_rate: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - loan_portion: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - equity_portion: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - loan: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - loan_tenor: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - principal_interest_payment: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - corporate_tax_rate: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - wacc_on_project: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - wacc_on_equity: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - equity: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - daya_mampu_netto: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - auxiliary: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - susut_trafo: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - sfc: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - electricity_price_a: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - electricity_price_b: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - electricity_price_c: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - electricity_price_d: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - harga_bahan_bakar: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - calc_on_project_irr: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - calc_on_project_npv: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - calc_on_equity_irr: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - calc_on_equity_npv: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - calc_roa_all: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) - calc_roa_current: float = Field(..., nullable=True, ge=0, le=MAX_NUMERIC_VALUE) + discount_rate: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + total_project_cost: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + umur_teknis: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + interest_rate: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + loan_portion: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + equity_portion: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + loan: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + loan_tenor: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + principal_interest_payment: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + corporate_tax_rate: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + wacc_on_project: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + wacc_on_equity: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + equity: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + daya_mampu_netto: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + auxiliary: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + susut_trafo: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + sfc: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + electricity_price_a: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + electricity_price_b: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + electricity_price_c: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + electricity_price_d: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + harga_bahan_bakar: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + calc_on_project_irr: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + calc_on_project_npv: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + calc_on_equity_irr: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + calc_on_equity_npv: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + calc_roa_all: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) + calc_roa_current: float = Field(..., ge=0, le=MAX_NUMERIC_VALUE) class PlantMasterDataUpdate(PlantMasterdataBase): diff --git a/src/plant_transaction_data/__pycache__/schema.cpython-311.pyc b/src/plant_transaction_data/__pycache__/schema.cpython-311.pyc index aaf073c..37b55b8 100644 Binary files a/src/plant_transaction_data/__pycache__/schema.cpython-311.pyc and b/src/plant_transaction_data/__pycache__/schema.cpython-311.pyc differ diff --git a/src/plant_transaction_data/schema.py b/src/plant_transaction_data/schema.py index dff7ea3..a9db71f 100644 --- a/src/plant_transaction_data/schema.py +++ b/src/plant_transaction_data/schema.py @@ -7,83 +7,83 @@ from src.models import DefaultBase, Pagination class PlantTransactionDataBase(DefaultBase): - tahun: Optional[int] = Field(None, nullable=True, ge=1900, le=9999) - is_actual: Optional[int] = Field(None, nullable=True, ge=0, le=1) - seq: Optional[int] = Field(None, nullable=True, ge=0, le=9999) - net_capacity_factor: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - eaf: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - production_bruto: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - production_netto: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - energy_sales: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fuel_consumption: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - revenue_a: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - revenue_b: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - revenue_c: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - revenue_d: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - revenue_total: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - revenue_pv: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - revenue_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_a_replacement: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_a_pm: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_a_acquisition: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_a_pinjaman: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_a_depreciation: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_a_total: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_a_pv: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_a_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_c_fuel: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_c_pv: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_c_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_bd_om: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_bd_pm_nonmi: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_bd_bd: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_bd_total: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_bd_pv: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_bd_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_disposal_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - total_expense: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - total_cost_eac: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - total_profit_loss: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - total_residual_value: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - total_profit_loss: Optional[float] = Field(None, nullable=True) - total_residual_value: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - calc_depreciation: Optional[float] = Field(None, nullable=True) - calc_interest_payment: Optional[float] = Field(None, nullable=True) - calc_principal_payment: Optional[float] = Field(None, nullable=True) - calc_dept_amount: Optional[float] = Field(None, nullable=True) - calc2_ebitda: Optional[float] = Field(None, nullable=True) - calc2_earning_before_tax: Optional[float] = Field(None, nullable=True) - calc2_tax: Optional[float] = Field(None, nullable=True) - calc2_earning_after_tax: Optional[float] = Field(None, nullable=True) - calc2_nopat: Optional[float] = Field(None, nullable=True) - calc3_interest_after_tax: Optional[float] = Field(None, nullable=True) - calc3_free_cash_flow_on_project: Optional[float] = Field(None, nullable=True) - calc3_discounted_fcf_on_project: Optional[float] = Field(None, nullable=True) - calc4_principal_repayment: Optional[float] = Field(None, nullable=True) - calc4_free_cash_flow_on_equity: Optional[float] = Field(None, nullable=True) - calc4_discounted_fcf_on_equity: Optional[float] = Field(None, nullable=True) - created_at: Optional[datetime] = Field(None, nullable=True) - updated_at: Optional[datetime] = Field(None, nullable=True) - created_by: Optional[str] = Field(None, nullable=True) - updated_by: Optional[str] = Field(None, nullable=True) - cost_disposal_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_total_revenue: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_revenue_a: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_revenue_b: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_revenue_c: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_revenue_d: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_revenue_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_fuel_cost_component_c: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_fuel_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_fuel_cost_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_oem_component_bd: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_oem_bd_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_oem_periodic_maintenance_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_oem_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_capex_component_a: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_capex_biaya_investasi_tambahan: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_capex_acquisition_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_capex_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) + tahun: Optional[int] = Field(None, ge=1900, le=9999) + is_actual: Optional[int] = Field(None, ge=0, le=1) + seq: Optional[int] = Field(None, ge=0, le=9999) + net_capacity_factor: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + eaf: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + production_bruto: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + production_netto: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + energy_sales: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fuel_consumption: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + revenue_a: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + revenue_b: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + revenue_c: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + revenue_d: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + revenue_total: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + revenue_pv: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + revenue_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_a_replacement: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_a_pm: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_a_acquisition: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_a_pinjaman: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_a_depreciation: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_a_total: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_a_pv: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_a_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_c_fuel: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_c_pv: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_c_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_bd_om: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_bd_pm_nonmi: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_bd_bd: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_bd_total: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_bd_pv: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_bd_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_disposal_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + total_expense: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + total_cost_eac: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + total_profit_loss: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + total_residual_value: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + total_profit_loss: Optional[float] = Field(None) + total_residual_value: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + calc_depreciation: Optional[float] = Field(None) + calc_interest_payment: Optional[float] = Field(None) + calc_principal_payment: Optional[float] = Field(None) + calc_dept_amount: Optional[float] = Field(None) + calc2_ebitda: Optional[float] = Field(None) + calc2_earning_before_tax: Optional[float] = Field(None) + calc2_tax: Optional[float] = Field(None) + calc2_earning_after_tax: Optional[float] = Field(None) + calc2_nopat: Optional[float] = Field(None) + calc3_interest_after_tax: Optional[float] = Field(None) + calc3_free_cash_flow_on_project: Optional[float] = Field(None) + calc3_discounted_fcf_on_project: Optional[float] = Field(None) + calc4_principal_repayment: Optional[float] = Field(None) + calc4_free_cash_flow_on_equity: Optional[float] = Field(None) + calc4_discounted_fcf_on_equity: Optional[float] = Field(None) + created_at: Optional[datetime] = Field(None) + updated_at: Optional[datetime] = Field(None) + created_by: Optional[str] = Field(None) + updated_by: Optional[str] = Field(None) + cost_disposal_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_total_revenue: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_revenue_a: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_revenue_b: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_revenue_c: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_revenue_d: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_revenue_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_fuel_cost_component_c: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_fuel_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_fuel_cost_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_oem_component_bd: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_oem_bd_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_oem_periodic_maintenance_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_oem_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_capex_component_a: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_capex_biaya_investasi_tambahan: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_capex_acquisition_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_capex_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) class PlantTransactionChart(PlantTransactionDataBase): @@ -93,14 +93,14 @@ class PlantTransactionChart(PlantTransactionDataBase): class PlantChartData(DefaultBase): items: List[PlantTransactionChart] - bep_year: Optional[int] = Field(int, nullable=True, ge=0, le=9999) - bep_total_lcc: Optional[float] = Field(float, nullable=True, ge=0, le=1_000_000_000_000_000) + bep_year: Optional[int] = Field(int, ge=0, le=9999) + bep_total_lcc: Optional[float] = Field(float, ge=0, le=1_000_000_000_000_000) class PlantTransactionFSImport(DefaultBase): data: List[List[Optional[Any]]] - is_actual: Optional[int] = Field(None, nullable=True, ge=0, le=1) - seq: Optional[int] = Field(None, nullable=True, ge=0, le=9999) + is_actual: Optional[int] = Field(None, ge=0, le=1) + seq: Optional[int] = Field(None, ge=0, le=9999) class PlantTransactionDataCreate(PlantTransactionDataBase): diff --git a/src/plant_transaction_data_simulations/schema.py b/src/plant_transaction_data_simulations/schema.py index 873e95e..0668854 100644 --- a/src/plant_transaction_data_simulations/schema.py +++ b/src/plant_transaction_data_simulations/schema.py @@ -7,127 +7,127 @@ from src.models import DefaultBase, Pagination class PlantTransactionDataSimulationsBase(DefaultBase): - simulation_id: Optional[UUID] = Field(None, nullable=True) - tahun: Optional[int] = Field(None, nullable=True, ge=1900, le=9999) - is_actual: Optional[int] = Field(None, nullable=True, ge=0, le=1) - seq: Optional[int] = Field(None, nullable=True, ge=0, le=9999) - net_capacity_factor: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - eaf: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - production_bruto: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - production_netto: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - energy_sales: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fuel_consumption: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - revenue_a: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - revenue_b: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - revenue_c: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - revenue_d: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - revenue_total: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - revenue_pv: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - revenue_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_a_replacement: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_a_pm: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_a_acquisition: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_a_pinjaman: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_a_depreciation: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_a_total: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_a_pv: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_a_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_c_fuel: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_c_pv: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_c_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_bd_om: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_bd_pm_nonmi: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_bd_bd: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_bd_total: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_bd_pv: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_bd_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_disposal_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - total_expense: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - total_cost_eac: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - total_profit_loss: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - total_residual_value: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - total_profit_loss: Optional[float] = Field(None, nullable=True) - total_residual_value: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - calc_depreciation: Optional[float] = Field(None, nullable=True) - calc_interest_payment: Optional[float] = Field(None, nullable=True) - calc_principal_payment: Optional[float] = Field(None, nullable=True) - calc_dept_amount: Optional[float] = Field(None, nullable=True) - calc2_ebitda: Optional[float] = Field(None, nullable=True) - calc2_earning_before_tax: Optional[float] = Field(None, nullable=True) - calc2_tax: Optional[float] = Field(None, nullable=True) - calc2_earning_after_tax: Optional[float] = Field(None, nullable=True) - calc2_nopat: Optional[float] = Field(None, nullable=True) - calc3_interest_after_tax: Optional[float] = Field(None, nullable=True) - calc3_free_cash_flow_on_project: Optional[float] = Field(None, nullable=True) - calc3_discounted_fcf_on_project: Optional[float] = Field(None, nullable=True) - calc4_principal_repayment: Optional[float] = Field(None, nullable=True) - calc4_free_cash_flow_on_equity: Optional[float] = Field(None, nullable=True) - calc4_discounted_fcf_on_equity: Optional[float] = Field(None, nullable=True) - created_at: Optional[datetime] = Field(None, nullable=True) - updated_at: Optional[datetime] = Field(None, nullable=True) - created_by: Optional[str] = Field(None, nullable=True) - updated_by: Optional[str] = Field(None, nullable=True) - chart_total_revenue: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_revenue_a: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_revenue_b: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_revenue_c: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_revenue_d: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_revenue_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_fuel_cost_component_c: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_fuel_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_fuel_cost_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_oem_component_bd: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_oem_bd_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_oem_periodic_maintenance_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_oem_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_capex_component_a: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_capex_biaya_investasi_tambahan: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_capex_acquisition_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - chart_capex_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cost_disposal_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fs_chart_total_revenue: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fs_chart_revenue_a: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fs_chart_revenue_b: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fs_chart_revenue_c: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fs_chart_revenue_d: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fs_chart_revenue_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fs_chart_fuel_cost_component_c: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fs_chart_fuel_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fs_chart_fuel_cost_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fs_chart_oem_component_bd: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fs_chart_oem_bd_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fs_chart_oem_periodic_maintenance_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fs_chart_oem_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fs_chart_capex_component_a: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) + simulation_id: Optional[UUID] = Field(None) + tahun: Optional[int] = Field(None, ge=1900, le=9999) + is_actual: Optional[int] = Field(None, ge=0, le=1) + seq: Optional[int] = Field(None, ge=0, le=9999) + net_capacity_factor: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + eaf: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + production_bruto: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + production_netto: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + energy_sales: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fuel_consumption: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + revenue_a: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + revenue_b: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + revenue_c: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + revenue_d: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + revenue_total: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + revenue_pv: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + revenue_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_a_replacement: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_a_pm: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_a_acquisition: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_a_pinjaman: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_a_depreciation: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_a_total: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_a_pv: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_a_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_c_fuel: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_c_pv: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_c_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_bd_om: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_bd_pm_nonmi: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_bd_bd: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_bd_total: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_bd_pv: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_bd_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_disposal_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + total_expense: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + total_cost_eac: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + total_profit_loss: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + total_residual_value: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + total_profit_loss: Optional[float] = Field(None) + total_residual_value: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + calc_depreciation: Optional[float] = Field(None) + calc_interest_payment: Optional[float] = Field(None) + calc_principal_payment: Optional[float] = Field(None) + calc_dept_amount: Optional[float] = Field(None) + calc2_ebitda: Optional[float] = Field(None) + calc2_earning_before_tax: Optional[float] = Field(None) + calc2_tax: Optional[float] = Field(None) + calc2_earning_after_tax: Optional[float] = Field(None) + calc2_nopat: Optional[float] = Field(None) + calc3_interest_after_tax: Optional[float] = Field(None) + calc3_free_cash_flow_on_project: Optional[float] = Field(None) + calc3_discounted_fcf_on_project: Optional[float] = Field(None) + calc4_principal_repayment: Optional[float] = Field(None) + calc4_free_cash_flow_on_equity: Optional[float] = Field(None) + calc4_discounted_fcf_on_equity: Optional[float] = Field(None) + created_at: Optional[datetime] = Field(None) + updated_at: Optional[datetime] = Field(None) + created_by: Optional[str] = Field(None) + updated_by: Optional[str] = Field(None) + chart_total_revenue: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_revenue_a: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_revenue_b: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_revenue_c: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_revenue_d: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_revenue_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_fuel_cost_component_c: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_fuel_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_fuel_cost_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_oem_component_bd: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_oem_bd_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_oem_periodic_maintenance_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_oem_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_capex_component_a: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_capex_biaya_investasi_tambahan: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_capex_acquisition_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + chart_capex_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cost_disposal_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fs_chart_total_revenue: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fs_chart_revenue_a: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fs_chart_revenue_b: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fs_chart_revenue_c: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fs_chart_revenue_d: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fs_chart_revenue_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fs_chart_fuel_cost_component_c: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fs_chart_fuel_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fs_chart_fuel_cost_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fs_chart_oem_component_bd: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fs_chart_oem_bd_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fs_chart_oem_periodic_maintenance_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fs_chart_oem_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fs_chart_capex_component_a: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) fs_chart_capex_biaya_investasi_tambahan: Optional[float] = Field( - None, nullable=True, ge=0, le=1_000_000_000_000_000 + None, ge=0, le=1_000_000_000_000_000 ) - fs_chart_capex_acquisition_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - fs_chart_capex_annualized: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) + fs_chart_capex_acquisition_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + fs_chart_capex_annualized: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) class PlantTransactionChartSimulations(PlantTransactionDataSimulationsBase): - tahun: Optional[int] = Field(None, nullable=True, ge=0, le=9999) - is_actual: Optional[int] = Field(None, nullable=True, ge=0, le=1) - seq: Optional[int] = Field(None, nullable=True, ge=0, le=9999) + tahun: Optional[int] = Field(None, ge=0, le=9999) + is_actual: Optional[int] = Field(None, ge=0, le=1) + seq: Optional[int] = Field(None, ge=0, le=9999) class PlantChartDataSimulations(DefaultBase): items: List[PlantTransactionChartSimulations] - bep_year: Optional[int] = Field(int, nullable=True, ge=0, le=9999) - bep_total_lcc: Optional[float] = Field(float, nullable=True, ge=0, le=1_000_000_000_000_000) + bep_year: Optional[int] = Field(int, ge=0, le=9999) + bep_total_lcc: Optional[float] = Field(float, ge=0, le=1_000_000_000_000_000) class PlantTransactionFSImportSimulations(DefaultBase): data: List[List[Optional[Any]]] - is_actual: Optional[int] = Field(None, nullable=True, ge=0, le=1) - seq: Optional[int] = Field(None, nullable=True, ge=0, le=9999) - simulation_id: UUID = Field(..., nullable=False) + is_actual: Optional[int] = Field(None, ge=0, le=1) + seq: Optional[int] = Field(None, ge=0, le=9999) + simulation_id: UUID = Field(...) class PlantTransactionDataSimulationsCreate(PlantTransactionDataSimulationsBase): - simulation_id: UUID = Field(..., nullable=False) + simulation_id: UUID = Field(...) class PlantTransactionDataSimulationsUpdate(PlantTransactionDataSimulationsBase): diff --git a/src/simulations/schema.py b/src/simulations/schema.py index 19409fa..7b5a7cd 100644 --- a/src/simulations/schema.py +++ b/src/simulations/schema.py @@ -13,16 +13,16 @@ from src.plant_transaction_data_simulations.schema import ( class SimulationBase(DefaultBase): id: UUID - label: Optional[str] = Field(None, nullable=False) - version: Optional[int] = Field(None, nullable=True, ge=0, le=9_999_999_999) - created_at: Optional[datetime] = Field(None, nullable=True) - updated_at: Optional[datetime] = Field(None, nullable=True) - created_by: Optional[str] = Field(None, nullable=True) - updated_by: Optional[str] = Field(None, nullable=True) + label: Optional[str] = Field(None) + version: Optional[int] = Field(None, ge=0, le=9_999_999_999) + created_at: Optional[datetime] = Field(None) + updated_at: Optional[datetime] = Field(None) + created_by: Optional[str] = Field(None) + updated_by: Optional[str] = Field(None) class SimulationCreate(SimulationBase): - label: str = Field(..., nullable=False) + label: str = Field(...) class SimulationUpdate(SimulationBase): @@ -32,10 +32,10 @@ class SimulationUpdate(SimulationBase): class SimulationRead(SimulationBase): id: UUID masterdata_entries: List[MasterDataSimulationRead] = Field( - default_factory=list, nullable=False + default_factory=list ) plant_transactions: List[PlantTransactionDataSimulationsRead] = Field( - default_factory=list, nullable=False + default_factory=list ) @@ -44,10 +44,10 @@ class SimulationPagination(Pagination): class MasterDataOverride(DefaultBase): - name: str = Field(..., nullable=False) - value_num: Optional[float] = Field(None, nullable=True, le=1_000_000_000_000_000) - value_str: Optional[str] = Field(None, nullable=True) + name: str = Field(...) + value_num: Optional[float] = Field(None, le=1_000_000_000_000_000) + value_str: Optional[str] = Field(None) class SimulationRunPayload(DefaultBase): - label: Optional[str] = Field(None, nullable=True) + label: Optional[str] = Field(None) overrides: List[MasterDataOverride] = Field(default_factory=list) diff --git a/src/uploaded_file/schema.py b/src/uploaded_file/schema.py index f8c5a52..baf5a44 100644 --- a/src/uploaded_file/schema.py +++ b/src/uploaded_file/schema.py @@ -6,15 +6,15 @@ from pydantic import Field from src.models import DefaultBase, Pagination class UploadedFileDataBase(DefaultBase): - filename: str = Field(..., nullable=False) - file_content: str = Field(..., nullable=False) - file_url: str = Field(..., nullable=False) - file_size: int = Field(..., nullable=False) - file_type: str = Field(..., nullable=False) - created_at: Optional[datetime] = Field(None, nullable=True) - updated_at: Optional[datetime] = Field(None, nullable=True) - created_by: Optional[str] = Field(None, nullable=True) - updated_by: Optional[str] = Field(None, nullable=True) + filename: str = Field(...) + file_content: str = Field(...) + file_url: str = Field(...) + file_size: int = Field(...) + file_type: str = Field(...) + created_at: Optional[datetime] = Field(None) + updated_at: Optional[datetime] = Field(None) + created_by: Optional[str] = Field(None) + updated_by: Optional[str] = Field(None) class UploadedFileDataCreate(UploadedFileDataBase): pass @@ -24,7 +24,7 @@ class UploadedFileDataUpdate(UploadedFileDataBase): class UploadedFileDataRead(UploadedFileDataBase): id: UUID - wlc_version: Optional[int] = Field(None, nullable=False) + wlc_version: Optional[int] = Field(None) class UploadedFileDataPagination(Pagination): items: List[UploadedFileDataRead] = [] diff --git a/src/yeardata/__pycache__/schema.cpython-311.pyc b/src/yeardata/__pycache__/schema.cpython-311.pyc index 8b3a86a..15b320e 100644 Binary files a/src/yeardata/__pycache__/schema.cpython-311.pyc and b/src/yeardata/__pycache__/schema.cpython-311.pyc differ diff --git a/src/yeardata/schema.py b/src/yeardata/schema.py index ee20353..039a4e7 100644 --- a/src/yeardata/schema.py +++ b/src/yeardata/schema.py @@ -7,25 +7,25 @@ from src.models import DefaultBase, Pagination class YeardataBase(DefaultBase): - year: Optional[int] = Field(None, nullable=True, ge=1900) - rp_per_kwh: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - total_lost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - man_hour: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - asset_crit_ens_energy_not_served: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - asset_crit_bpp_system: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - asset_crit_bpp_pembangkit: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - asset_crit_dmn_daya_mampu_netto: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - asset_crit_marginal_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - asset_crit_efdh_equivalent_forced_derated_hours: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - asset_crit_foh_forced_outage_hours: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - asset_crit_extra_fuel_cost: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - cf: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - eaf: Optional[float] = Field(None, nullable=True, ge=0, le=1_000_000_000_000_000) - rbd_simulation_id: Optional[str] = Field(None, nullable=True) - created_at: Optional[datetime] = Field(None, nullable=True) - updated_at: Optional[datetime] = Field(None, nullable=True) - created_by: Optional[str] = Field(None, nullable=True) - updated_by: Optional[str] = Field(None, nullable=True) + year: Optional[int] = Field(None, ge=1900) + rp_per_kwh: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + total_lost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + man_hour: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + asset_crit_ens_energy_not_served: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + asset_crit_bpp_system: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + asset_crit_bpp_pembangkit: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + asset_crit_dmn_daya_mampu_netto: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + asset_crit_marginal_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + asset_crit_efdh_equivalent_forced_derated_hours: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + asset_crit_foh_forced_outage_hours: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + asset_crit_extra_fuel_cost: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + cf: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + eaf: Optional[float] = Field(None, ge=0, le=1_000_000_000_000_000) + rbd_simulation_id: Optional[str] = Field(None) + created_at: Optional[datetime] = Field(None) + updated_at: Optional[datetime] = Field(None) + created_by: Optional[str] = Field(None) + updated_by: Optional[str] = Field(None) @field_validator( "asset_crit_ens_energy_not_served", diff --git a/test_masterdata_output.txt b/test_masterdata_output.txt new file mode 100644 index 0000000..96ab605 --- /dev/null +++ b/test_masterdata_output.txt @@ -0,0 +1,111 @@ +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "C:\dev\be-lcca\venv\Lib\site-packages\pytest\__main__.py", line 9, in + raise SystemExit(pytest.console_main()) + ^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 201, in console_main + code = main() + ^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 156, in main + config = _prepareconfig(args, plugins) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 341, in _prepareconfig + config = pluginmanager.hook.pytest_cmdline_parse( + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_hooks.py", line 513, in __call__ + return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_manager.py", line 120, in _hookexec + return self._inner_hookexec(hook_name, methods, kwargs, firstresult) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 139, in _multicall + raise exception.with_traceback(exception.__traceback__) + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall + teardown.throw(exception) # type: ignore[union-attr] + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\helpconfig.py", line 105, in pytest_cmdline_parse + config = yield + ^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 103, in _multicall + res = hook_impl.function(*args) + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1140, in pytest_cmdline_parse + self.parse(args) + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1494, in parse + self._preparse(args, addopts=addopts) + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1398, in _preparse + self.hook.pytest_load_initial_conftests( + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_hooks.py", line 513, in __call__ + return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_manager.py", line 120, in _hookexec + return self._inner_hookexec(hook_name, methods, kwargs, firstresult) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 139, in _multicall + raise exception.with_traceback(exception.__traceback__) + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall + teardown.throw(exception) # type: ignore[union-attr] + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\warnings.py", line 151, in pytest_load_initial_conftests + return (yield) + ^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall + teardown.throw(exception) # type: ignore[union-attr] + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\capture.py", line 154, in pytest_load_initial_conftests + yield + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 103, in _multicall + res = hook_impl.function(*args) + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1222, in pytest_load_initial_conftests + self.pluginmanager._set_initial_conftests( + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 581, in _set_initial_conftests + self._try_load_conftest( + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 619, in _try_load_conftest + self._loadconftestmodules( + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 659, in _loadconftestmodules + mod = self._importconftest( + ^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 710, in _importconftest + mod = import_path( + ^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\pathlib.py", line 587, in import_path + importlib.import_module(module_name) + File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.11_3.11.2544.0_x64__qbz5n2kfra8p0\Lib\importlib\__init__.py", line 126, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1204, in _gcd_import + File "", line 1176, in _find_and_load + File "", line 1147, in _find_and_load_unlocked + File "", line 690, in _load_unlocked + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\assertion\rewrite.py", line 184, in exec_module + exec(co, module.__dict__) + File "C:\dev\be-lcca\tests\conftest.py", line 20, in + from fastapi import Request + File "C:\dev\be-lcca\venv\Lib\site-packages\fastapi\__init__.py", line 7, in + from .applications import FastAPI as FastAPI + File "C:\dev\be-lcca\venv\Lib\site-packages\fastapi\applications.py", line 16, in + from fastapi import routing + File "C:\dev\be-lcca\venv\Lib\site-packages\fastapi\routing.py", line 34, in + from fastapi.dependencies.models import Dependant + File "C:\dev\be-lcca\venv\Lib\site-packages\fastapi\dependencies\models.py", line 5, in + from fastapi.security.base import SecurityBase + File "C:\dev\be-lcca\venv\Lib\site-packages\fastapi\security\__init__.py", line 1, in + from .api_key import APIKeyCookie as APIKeyCookie + File "C:\dev\be-lcca\venv\Lib\site-packages\fastapi\security\api_key.py", line 6, in + from starlette.requests import Request + File "C:\dev\be-lcca\venv\Lib\site-packages\starlette\requests.py", line 12, in + from starlette.formparsers import FormParser, MultiPartException, MultiPartParser + File "C:\dev\be-lcca\venv\Lib\site-packages\starlette\formparsers.py", line 17, in + import python_multipart as multipart + File "C:\dev\be-lcca\venv\Lib\site-packages\python_multipart\__init__.py", line 7, in + from .multipart import ( + File "C:\dev\be-lcca\venv\Lib\site-packages\python_multipart\multipart.py", line 115, in + class MultipartState(IntEnum): + File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.11_3.11.2544.0_x64__qbz5n2kfra8p0\Lib\enum.py", line 647, in __new__ + delattr(enum_class, '_singles_mask_') + File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.11_3.11.2544.0_x64__qbz5n2kfra8p0\Lib\enum.py", line 752, in __delattr__ + super().__delattr__(attr) + ^^^^^^^ +KeyboardInterrupt diff --git a/test_masterdata_output_2.txt b/test_masterdata_output_2.txt new file mode 100644 index 0000000..67fb7e0 --- /dev/null +++ b/test_masterdata_output_2.txt @@ -0,0 +1,155 @@ +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "C:\dev\be-lcca\venv\Lib\site-packages\pytest\__main__.py", line 9, in + raise SystemExit(pytest.console_main()) + ^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 201, in console_main + code = main() + ^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 156, in main + config = _prepareconfig(args, plugins) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 341, in _prepareconfig + config = pluginmanager.hook.pytest_cmdline_parse( + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_hooks.py", line 513, in __call__ + return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_manager.py", line 120, in _hookexec + return self._inner_hookexec(hook_name, methods, kwargs, firstresult) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 139, in _multicall + raise exception.with_traceback(exception.__traceback__) + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall + teardown.throw(exception) # type: ignore[union-attr] + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\helpconfig.py", line 105, in pytest_cmdline_parse + config = yield + ^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 103, in _multicall + res = hook_impl.function(*args) + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1140, in pytest_cmdline_parse + self.parse(args) + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1494, in parse + self._preparse(args, addopts=addopts) + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1398, in _preparse + self.hook.pytest_load_initial_conftests( + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_hooks.py", line 513, in __call__ + return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_manager.py", line 120, in _hookexec + return self._inner_hookexec(hook_name, methods, kwargs, firstresult) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 139, in _multicall + raise exception.with_traceback(exception.__traceback__) + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall + teardown.throw(exception) # type: ignore[union-attr] + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\warnings.py", line 151, in pytest_load_initial_conftests + return (yield) + ^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall + teardown.throw(exception) # type: ignore[union-attr] + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\capture.py", line 154, in pytest_load_initial_conftests + yield + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 103, in _multicall + res = hook_impl.function(*args) + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1222, in pytest_load_initial_conftests + self.pluginmanager._set_initial_conftests( + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 581, in _set_initial_conftests + self._try_load_conftest( + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 619, in _try_load_conftest + self._loadconftestmodules( + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 659, in _loadconftestmodules + mod = self._importconftest( + ^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 710, in _importconftest + mod = import_path( + ^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\pathlib.py", line 587, in import_path + importlib.import_module(module_name) + File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.11_3.11.2544.0_x64__qbz5n2kfra8p0\Lib\importlib\__init__.py", line 126, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1204, in _gcd_import + File "", line 1176, in _find_and_load + File "", line 1147, in _find_and_load_unlocked + File "", line 690, in _load_unlocked + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\assertion\rewrite.py", line 184, in exec_module + exec(co, module.__dict__) + File "C:\dev\be-lcca\tests\conftest.py", line 22, in + from src.main import app + File "C:\dev\be-lcca\src\main.py", line 33, in + from src.api import api_router + File "C:\dev\be-lcca\src\api.py", line 22, in + from src.simulations.router import router as simulations_router + File "C:\dev\be-lcca\src\simulations\__init__.py", line 1, in + from .router import router + File "C:\dev\be-lcca\src\simulations\router.py", line 17, in + from src.simulations.service import create, delete, get, get_all, run_simulation, update + File "C:\dev\be-lcca\src\simulations\service.py", line 34, in + column.key for column in sa_inspect(MasterData).mapper.column_attrs if column.key != "id" + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\util\langhelpers.py", line 1257, in __get__ + obj.__dict__[self.__name__] = result = self.fget(obj) + ^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\mapper.py", line 3172, in column_attrs + return self._filter_properties(properties.ColumnProperty) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\mapper.py", line 3225, in _filter_properties + self._check_configure() + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\mapper.py", line 2401, in _check_configure + _configure_registries({self.registry}, cascade=True) + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\mapper.py", line 4213, in _configure_registries + _do_configure_registries(registries, cascade) + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\mapper.py", line 4254, in _do_configure_registries + mapper._post_configure_properties() + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\mapper.py", line 2421, in _post_configure_properties + prop.post_instrument_class(self) + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\interfaces.py", line 1113, in post_instrument_class + self.strategy.init_class_attribute(mapper) + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\strategies.py", line 254, in init_class_attribute + _register_attribute( + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\strategies.py", line 126, in _register_attribute + desc = attributes.register_attribute_impl( + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\attributes.py", line 2605, in register_attribute_impl + "_Dispatch[QueryableAttribute[Any]]", manager[key].dispatch + ^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\event\base.py", line 465, in __get__ + if hasattr(obj, "_slots_dispatch"): + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\attributes.py", line 472, in __getattr__ + return getattr(self.comparator, key) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\util\langhelpers.py", line 1332, in __getattr__ + return self._fallback_getattr(key) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\properties.py", line 472, in _fallback_getattr + return getattr(self.__clause_element__(), key) + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\util\langhelpers.py", line 1319, in oneshot + result = fn(*args, **kw) + ^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\properties.py", line 439, in _memoized_method___clause_element__ + return self._orm_annotate_column(self.prop.columns[0]) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\orm\properties.py", line 425, in _orm_annotate_column + return col._annotate(annotations)._set_propagate_attrs( + ^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\sql\annotation.py", line 129, in _annotate + return Annotated._as_annotated_instance(self, values) # type: ignore + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\sql\annotation.py", line 277, in _as_annotated_instance + return cls(element, values) + ^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\sql\elements.py", line 5313, in __init__ + Annotated.__init__(self, element, values) + File "C:\dev\be-lcca\venv\Lib\site-packages\sqlalchemy\sql\annotation.py", line 289, in __init__ + self.__dict__ = element.__dict__.copy() + ^^^^^^^^^^^^^^^^^^^^^^^ +KeyboardInterrupt diff --git a/test_output.txt b/test_output.txt new file mode 100644 index 0000000..5acd846 --- /dev/null +++ b/test_output.txt @@ -0,0 +1,38 @@ +C:\dev\be-lcca\venv\Lib\site-packages\pytest_asyncio\plugin.py:247: PytestDeprecationWarning: The configuration option "asyncio_default_fixture_loop_scope" is unset. +The event loop scope for asynchronous fixtures will default to the fixture caching scope. Future versions of pytest-asyncio will default the loop scope for asynchronous fixtures to function scope. Set the default fixture loop scope explicitly in order to avoid unexpected behavior in the future. Valid fixture loop scopes are: "function", "class", "module", "package", "session" + + warnings.warn(PytestDeprecationWarning(_DEFAULT_FIXTURE_LOOP_SCOPE_UNSET)) +============================= test session starts ============================= +platform win32 -- Python 3.11.9, pytest-8.3.4, pluggy-1.5.0 -- C:\dev\be-lcca\venv\Scripts\python.exe +cachedir: .pytest_cache +rootdir: C:\dev\be-lcca +configfile: pyproject.toml +plugins: anyio-4.8.0, Faker-30.10.0, asyncio-1.3.0 +asyncio: mode=Mode.STRICT, debug=False, asyncio_default_fixture_loop_scope=None, asyncio_default_test_loop_scope=function +collecting ... collected 1 item + +tests/test_healthcheck.py::test_healthcheck PASSED [100%] + +============================== warnings summary =============================== +venv\Lib\site-packages\pydantic\_internal\_config.py:295 + C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_config.py:295: PydanticDeprecatedSince20: Support for class-based `config` is deprecated, use ConfigDict instead. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.10/migration/ + warnings.warn(DEPRECATION_MESSAGE, DeprecationWarning) + +venv\Lib\site-packages\pydantic\fields.py:1042: 473 warnings + C:\dev\be-lcca\venv\Lib\site-packages\pydantic\fields.py:1042: PydanticDeprecatedSince20: Using extra keyword arguments on `Field` is deprecated and will be removed. Use `json_schema_extra` instead. (Extra keys: 'nullable'). Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.10/migration/ + warn( + +venv\Lib\site-packages\pydantic\_internal\_generate_schema.py:297: 115 warnings + C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py:297: PydanticDeprecatedSince20: `json_encoders` is deprecated. See https://docs.pydantic.dev/2.10/concepts/serialization/#custom-serializers for alternatives. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.10/migration/ + warnings.warn( + +src\database\core.py:115 + C:\dev\be-lcca\src\database\core.py:115: MovedIn20Warning: The ``declarative_base()`` function is now available as sqlalchemy.orm.declarative_base(). (deprecated since: 2.0) (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9) + Base = declarative_base(cls=CustomBase) + +tests/test_healthcheck.py::test_healthcheck + C:\dev\be-lcca\venv\Lib\site-packages\httpx\_client.py:1437: DeprecationWarning: The 'app' shortcut is now deprecated. Use the explicit style 'transport=ASGITransport(app=...)' instead. + warnings.warn(message, DeprecationWarning) + +-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html +======================= 1 passed, 591 warnings in 0.95s ======================= diff --git a/test_output_e2e.txt b/test_output_e2e.txt new file mode 100644 index 0000000..2291833 --- /dev/null +++ b/test_output_e2e.txt @@ -0,0 +1,141 @@ +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "C:\dev\be-lcca\venv\Lib\site-packages\pytest\__main__.py", line 9, in + raise SystemExit(pytest.console_main()) + ^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 201, in console_main + code = main() + ^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 156, in main + config = _prepareconfig(args, plugins) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 341, in _prepareconfig + config = pluginmanager.hook.pytest_cmdline_parse( + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_hooks.py", line 513, in __call__ + return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_manager.py", line 120, in _hookexec + return self._inner_hookexec(hook_name, methods, kwargs, firstresult) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 139, in _multicall + raise exception.with_traceback(exception.__traceback__) + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall + teardown.throw(exception) # type: ignore[union-attr] + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\helpconfig.py", line 105, in pytest_cmdline_parse + config = yield + ^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 103, in _multicall + res = hook_impl.function(*args) + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1140, in pytest_cmdline_parse + self.parse(args) + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1494, in parse + self._preparse(args, addopts=addopts) + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1398, in _preparse + self.hook.pytest_load_initial_conftests( + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_hooks.py", line 513, in __call__ + return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_manager.py", line 120, in _hookexec + return self._inner_hookexec(hook_name, methods, kwargs, firstresult) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 139, in _multicall + raise exception.with_traceback(exception.__traceback__) + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall + teardown.throw(exception) # type: ignore[union-attr] + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\warnings.py", line 151, in pytest_load_initial_conftests + return (yield) + ^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 122, in _multicall + teardown.throw(exception) # type: ignore[union-attr] + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\capture.py", line 154, in pytest_load_initial_conftests + yield + File "C:\dev\be-lcca\venv\Lib\site-packages\pluggy\_callers.py", line 103, in _multicall + res = hook_impl.function(*args) + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 1222, in pytest_load_initial_conftests + self.pluginmanager._set_initial_conftests( + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 581, in _set_initial_conftests + self._try_load_conftest( + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 619, in _try_load_conftest + self._loadconftestmodules( + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 659, in _loadconftestmodules + mod = self._importconftest( + ^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\config\__init__.py", line 710, in _importconftest + mod = import_path( + ^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\pathlib.py", line 587, in import_path + importlib.import_module(module_name) + File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.11_3.11.2544.0_x64__qbz5n2kfra8p0\Lib\importlib\__init__.py", line 126, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1204, in _gcd_import + File "", line 1176, in _find_and_load + File "", line 1147, in _find_and_load_unlocked + File "", line 690, in _load_unlocked + File "C:\dev\be-lcca\venv\Lib\site-packages\_pytest\assertion\rewrite.py", line 184, in exec_module + exec(co, module.__dict__) + File "C:\dev\be-lcca\tests\conftest.py", line 22, in + from src.main import app + File "C:\dev\be-lcca\src\main.py", line 33, in + from src.api import api_router + File "C:\dev\be-lcca\src\api.py", line 18, in + from src.acquisition_cost.router import router as acquisition_data_router + File "C:\dev\be-lcca\src\acquisition_cost\router.py", line 6, in + from src.acquisition_cost.schema import AcquisitionCostDataPagination, AcquisitionCostDataRead, AcquisitionCostDataCreate, AcquisitionCostDataUpdate, ListQueryParams + File "C:\dev\be-lcca\src\acquisition_cost\schema.py", line 20, in + class AcquisitionCostDataCreate(AcquisitionCostDataBase): + File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_model_construction.py", line 224, in __new__ + complete_model_class( + File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_model_construction.py", line 602, in complete_model_class + schema = cls.__get_pydantic_core_schema__(cls, handler) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\main.py", line 702, in __get_pydantic_core_schema__ + return handler(source) + ^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_schema_generation_shared.py", line 84, in __call__ + schema = self._handler(source_type) + ^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 610, in generate_schema + schema = self._generate_schema_inner(obj) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 879, in _generate_schema_inner + return self._model_schema(obj) + ^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 691, in _model_schema + {k: self._generate_md_field_schema(k, v, decorators) for k, v in fields.items()}, + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 691, in + {k: self._generate_md_field_schema(k, v, decorators) for k, v in fields.items()}, + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 1071, in _generate_md_field_schema + common_field = self._common_field_schema(name, field_info, decorators) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 1263, in _common_field_schema + schema = self._apply_annotations( + ^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 2056, in _apply_annotations + schema = get_inner_schema(source_type) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_schema_generation_shared.py", line 84, in __call__ + schema = self._handler(source_type) + ^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 2040, in inner_handler + metadata_js_function = _extract_get_pydantic_json_schema(obj, schema) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 2403, in _extract_get_pydantic_json_schema + return _extract_get_pydantic_json_schema(tp.__origin__, schema) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\dev\be-lcca\venv\Lib\site-packages\pydantic\_internal\_generate_schema.py", line 2402, in _extract_get_pydantic_json_schema + if hasattr(tp, '__origin__') and not _typing_extra.is_annotated(tp): + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.11_3.11.2544.0_x64__qbz5n2kfra8p0\Lib\typing.py", line 470, in __getattr__ + def __getattr__(self, item): + +KeyboardInterrupt diff --git a/tests/conftest.py b/tests/conftest.py index 5ce6ed2..04fa40a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,18 +1,44 @@ +import os + +# Set dummy environment variables for testing +os.environ["DATABASE_HOSTNAME"] = "localhost" +os.environ["DATABASE_CREDENTIAL_USER"] = "test" +os.environ["DATABASE_CREDENTIAL_PASSWORD"] = "test" +os.environ["COLLECTOR_CREDENTIAL_USER"] = "test" +os.environ["COLLECTOR_CREDENTIAL_PASSWORD"] = "test" +os.environ["DEV_USERNAME"] = "test" +os.environ["DEV_PASSWORD"] = "test" + import asyncio from typing import AsyncGenerator, Generator import pytest -from httpx import AsyncClient +import pytest_asyncio +from httpx import AsyncClient, ASGITransport from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine from sqlalchemy.orm import sessionmaker from sqlalchemy.pool import StaticPool +from fastapi import Request -import pytest -from sqlalchemy_utils import drop_database, database_exists -from starlette.config import environ -from starlette.testclient import TestClient +from src.main import app +from src.database.core import Base, get_db, get_collector_db +from src.auth.service import JWTBearer +from src.auth.model import UserBase -# from src.database import Base, get_db -# from src.main import app +# Import all models to register them with Base +import src.acquisition_cost.model +import src.equipment.model +import src.equipment_master.model +import src.manpower_cost.model +import src.manpower_master.model +import src.masterdata.model +import src.masterdata_simulations.model +import src.plant_fs_transaction_data.model +import src.plant_masterdata.model +import src.plant_transaction_data.model +import src.plant_transaction_data_simulations.model +import src.simulations.model +import src.uploaded_file.model +import src.yeardata.model # Test database URL TEST_DATABASE_URL = "sqlite+aiosqlite:///:memory:" @@ -23,7 +49,7 @@ engine = create_async_engine( poolclass=StaticPool, ) -async_session = sessionmaker( +TestingSessionLocal = sessionmaker( engine, class_=AsyncSession, expire_on_commit=False, @@ -31,39 +57,59 @@ async_session = sessionmaker( autoflush=False, ) +def pytest_sessionfinish(session, exitstatus): + """ + Called after whole test run finished, right before returning the exit status to the system. + Used here to dispose of all SQLAlchemy engines to prevent hanging. + """ + from src.database.core import engine as db_engine, collector_engine + + async def dispose_all(): + # Dispose of both test engine and production engines + await engine.dispose() + await db_engine.dispose() + await collector_engine.dispose() -async def override_get_db() -> AsyncGenerator[AsyncSession, None]: - async with async_session() as session: + try: + loop = asyncio.get_event_loop() + if loop.is_running(): + # If the loop is already running, we create a task + loop.create_task(dispose_all()) + else: + loop.run_until_complete(dispose_all()) + except Exception: + # Fallback for environment where no loop is available or loop is closed try: - yield session - await session.commit() + asyncio.run(dispose_all()) except Exception: - await session.rollback() - raise - finally: - await session.close() - - -app.dependency_overrides[get_db] = override_get_db - + pass -@pytest.fixture(scope="session") -def event_loop() -> Generator: - loop = asyncio.get_event_loop_policy().new_event_loop() - yield loop - loop.close() +# Removed custom event_loop fixture - -@pytest.fixture(autouse=True) -async def setup_db() -> AsyncGenerator[None, None]: +@pytest_asyncio.fixture(autouse=True) +async def setup_db(): async with engine.begin() as conn: await conn.run_sync(Base.metadata.create_all) yield async with engine.begin() as conn: await conn.run_sync(Base.metadata.drop_all) +async def override_get_db(request: Request = None): + async with TestingSessionLocal() as session: + yield session + +app.dependency_overrides[get_db] = override_get_db +app.dependency_overrides[get_collector_db] = override_get_db + +@pytest.fixture(autouse=True) +def mock_auth(monkeypatch): + async def mock_call(self, request: Request): + user = UserBase(user_id="test-id", name="test-user", role="admin") + request.state.user = user + return user + monkeypatch.setattr(JWTBearer, "__call__", mock_call) -@pytest.fixture +@pytest_asyncio.fixture async def client() -> AsyncGenerator[AsyncClient, None]: - async with AsyncClient(app=app, base_url="http://test") as client: + async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client: yield client \ No newline at end of file diff --git a/tests/database.py b/tests/database.py deleted file mode 100644 index 89b84ae..0000000 --- a/tests/database.py +++ /dev/null @@ -1,3 +0,0 @@ -from sqlalchemy.orm import scoped_session, sessionmaker - -Session = scoped_session(sessionmaker()) diff --git a/tests/e2e/test_acquisition_cost.py b/tests/e2e/test_acquisition_cost.py new file mode 100644 index 0000000..e748bbd --- /dev/null +++ b/tests/e2e/test_acquisition_cost.py @@ -0,0 +1,23 @@ +import pytest +from httpx import AsyncClient + +@pytest.mark.asyncio +async def test_get_acquisition_costs(client: AsyncClient): + response = await client.get("/acquisition-data") + assert response.status_code == 200 + assert response.json()["message"] == "Data retrieved successfully" + +@pytest.mark.asyncio +async def test_create_acquisition_cost(client: AsyncClient): + payload = { + "assetnum": "TEST-ASSET", + "acquisition_cost": 1000.0, + "acquisition_year": 2024, + "residual_value": 100.0, + "useful_life": 10 + } + response = await client.post("/acquisition-data", json=payload) + # Note: This might fail if the schema requires more fields OR if those are valid but I'm missing some required ones. + # I'll check the schema if it fails, but for now I'll assume standard POST behavior. + assert response.status_code == 200 + assert response.json()["message"] == "Data created successfully" diff --git a/tests/e2e/test_equipment.py b/tests/e2e/test_equipment.py new file mode 100644 index 0000000..53eede2 --- /dev/null +++ b/tests/e2e/test_equipment.py @@ -0,0 +1,26 @@ +import pytest +from httpx import AsyncClient + +@pytest.mark.asyncio +async def test_get_equipments(client: AsyncClient): + response = await client.get("/equipment") + assert response.status_code == 200 + assert response.json()["message"] == "Data retrieved successfully" + +@pytest.mark.asyncio +async def test_get_top_10_replacement_priorities(client: AsyncClient): + response = await client.get("/equipment/top-10-replacement-priorities") + assert response.status_code == 200 + assert response.json()["message"] == "Top 10 Replacement Priorities Data retrieved successfully" + +@pytest.mark.asyncio +async def test_get_top_10_economic_life(client: AsyncClient): + response = await client.get("/equipment/top-10-economic-life") + assert response.status_code == 200 + assert response.json()["message"] == "Top 10 Economic Life Data retrieved successfully" + +@pytest.mark.asyncio +async def test_count_remaining_life(client: AsyncClient): + response = await client.get("/equipment/count-remaining-life") + assert response.status_code == 200 + assert response.json()["message"] == "Count remaining life retrieved successfully" diff --git a/tests/e2e/test_equipment_master.py b/tests/e2e/test_equipment_master.py new file mode 100644 index 0000000..a75f10f --- /dev/null +++ b/tests/e2e/test_equipment_master.py @@ -0,0 +1,8 @@ +import pytest +from httpx import AsyncClient + +@pytest.mark.asyncio +async def test_get_equipment_masters(client: AsyncClient): + response = await client.get("/equipment-master") + assert response.status_code == 200 + assert response.json()["message"] == "Data retrieved successfully" diff --git a/tests/e2e/test_healthcheck.py b/tests/e2e/test_healthcheck.py new file mode 100644 index 0000000..0908cd7 --- /dev/null +++ b/tests/e2e/test_healthcheck.py @@ -0,0 +1,8 @@ +import pytest +from httpx import AsyncClient + +@pytest.mark.asyncio +async def test_healthcheck(client: AsyncClient): + response = await client.get("/healthcheck") + assert response.status_code == 200 + assert response.json() == {"status": "ok"} diff --git a/tests/e2e/test_masterdata.py b/tests/e2e/test_masterdata.py new file mode 100644 index 0000000..aa4c215 --- /dev/null +++ b/tests/e2e/test_masterdata.py @@ -0,0 +1,97 @@ +import pytest +from httpx import AsyncClient +import uuid + +@pytest.mark.asyncio +async def test_create_masterdata(client: AsyncClient): + payload = { + "name": "Test Master Data", + "description": "Test Description", + "unit_of_measurement": "unit", + "value_num": 100.0, + "value_str": "100", + "seq": 1 + } + response = await client.post("/masterdata", json=payload) + assert response.status_code == 200 + data = response.json() + assert data["message"] == "Data created successfully" + assert data["data"]["name"] == "Test Master Data" + assert "id" in data["data"] + return data["data"]["id"] + +@pytest.mark.asyncio +async def test_get_masterdatas(client: AsyncClient): + # First create one + await client.post("/masterdata", json={ + "name": "Data 1", + "description": "Desc 1", + "unit_of_measurement": "u", + "value_num": 1.0, + "seq": 1 + }) + + response = await client.get("/masterdata") + assert response.status_code == 200 + data = response.json() + assert data["message"] == "Data retrieved successfully" + assert len(data["data"]["items"]) >= 1 + +@pytest.mark.asyncio +async def test_get_masterdata_by_id(client: AsyncClient): + # Create one + create_resp = await client.post("/masterdata", json={ + "name": "Data By ID", + "description": "Desc", + "unit_of_measurement": "u", + "value_num": 2.0, + "seq": 2 + }) + masterdata_id = create_resp.json()["data"]["id"] + + response = await client.get(f"/masterdata/{masterdata_id}") + assert response.status_code == 200 + assert response.json()["data"]["name"] == "Data By ID" + +@pytest.mark.asyncio +async def test_update_masterdata(client: AsyncClient): + # Create one + create_resp = await client.post("/masterdata", json={ + "name": "Old Name", + "description": "Desc", + "unit_of_measurement": "u", + "value_num": 3.0, + "seq": 3 + }) + masterdata_id = create_resp.json()["data"]["id"] + + # Update it + update_payload = { + "name": "New Name", + "value_num": 4.0 + } + response = await client.post(f"/masterdata/update/{masterdata_id}", json=update_payload) + assert response.status_code == 200 + assert response.json()["data"]["name"] == "New Name" + assert response.json()["data"]["value_num"] == 4.0 + +@pytest.mark.asyncio +async def test_delete_masterdata(client: AsyncClient): + # Create one + create_resp = await client.post("/masterdata", json={ + "name": "To Be Deleted", + "description": "Desc", + "unit_of_measurement": "u", + "value_num": 5.0, + "seq": 5 + }) + masterdata_id = create_resp.json()["data"]["id"] + + # Delete it + response = await client.post(f"/masterdata/delete/{masterdata_id}") + assert response.status_code == 200 + assert response.json()["message"] == "Data deleted successfully" + + # Verify it's gone + get_resp = await client.get(f"/masterdata/{masterdata_id}") + assert get_resp.status_code == 404 diff --git a/tests/e2e/test_masterdata_simulations.py b/tests/e2e/test_masterdata_simulations.py new file mode 100644 index 0000000..3037264 --- /dev/null +++ b/tests/e2e/test_masterdata_simulations.py @@ -0,0 +1,8 @@ +import pytest +from httpx import AsyncClient + +@pytest.mark.asyncio +async def test_get_masterdata_simulations(client: AsyncClient): + response = await client.get("/masterdata-simulations") + assert response.status_code == 200 + assert response.json()["message"] == "Data retrieved successfully" diff --git a/tests/e2e/test_plant_fs_transaction.py b/tests/e2e/test_plant_fs_transaction.py new file mode 100644 index 0000000..dbdc94e --- /dev/null +++ b/tests/e2e/test_plant_fs_transaction.py @@ -0,0 +1,16 @@ +import pytest +from httpx import AsyncClient + +@pytest.mark.asyncio +async def test_list_fs_transactions(client: AsyncClient): + response = await client.get("/plant-fs-transaction-data") + assert response.status_code == 200 + assert response.json()["message"] == "Data retrieved successfully" + +@pytest.mark.asyncio +async def test_get_fs_charts(client: AsyncClient): + response = await client.get("/plant-fs-transaction-data/charts") + if response.status_code == 200: + assert "items" in response.json()["data"] + else: + assert response.status_code == 404 diff --git a/tests/e2e/test_plant_masterdata.py b/tests/e2e/test_plant_masterdata.py new file mode 100644 index 0000000..ea5f9a4 --- /dev/null +++ b/tests/e2e/test_plant_masterdata.py @@ -0,0 +1,20 @@ +import pytest +from httpx import AsyncClient + +@pytest.mark.asyncio +async def test_get_plant_masterdatas(client: AsyncClient): + response = await client.get("/plant-masterdata") + assert response.status_code == 200 + assert response.json()["message"] == "Data retrieved successfully" + +@pytest.mark.asyncio +async def test_create_plant_masterdata(client: AsyncClient): + payload = { + "name": "Plant Parameter", + "description": "Plant Desc", + "unit_of_measurement": "unit", + "value_num": 10.5 + } + response = await client.post("/plant-masterdata", json=payload) + assert response.status_code == 200 + assert response.json()["message"] == "Data created successfully" diff --git a/tests/e2e/test_plant_transaction.py b/tests/e2e/test_plant_transaction.py new file mode 100644 index 0000000..5518e4a --- /dev/null +++ b/tests/e2e/test_plant_transaction.py @@ -0,0 +1,18 @@ +import pytest +from httpx import AsyncClient + +@pytest.mark.asyncio +async def test_get_plant_transactions(client: AsyncClient): + response = await client.get("/plant-transaction-data") + assert response.status_code == 200 + assert response.json()["message"] == "Data retrieved successfully" + +@pytest.mark.asyncio +async def test_get_plant_charts(client: AsyncClient): + # This might return 404 if no data exists, but with my setup_db it should be empty + response = await client.get("/plant-transaction-data/charts") + # Actually, the service might raise 404 if it's empty + if response.status_code == 200: + assert "items" in response.json()["data"] + else: + assert response.status_code == 404 diff --git a/tests/e2e/test_simulation.py b/tests/e2e/test_simulation.py new file mode 100644 index 0000000..e44ba9f --- /dev/null +++ b/tests/e2e/test_simulation.py @@ -0,0 +1,19 @@ +import pytest +from httpx import AsyncClient + +@pytest.mark.asyncio +async def test_get_simulations(client: AsyncClient): + response = await client.get("/simulations") + assert response.status_code == 200 + assert response.json()["message"] == "Data retrieved successfully" + +@pytest.mark.asyncio +async def test_create_simulation(client: AsyncClient): + payload = { + "label": "Test Simulation", + "description": "Test Desc", + "version": 1 + } + response = await client.post("/simulations", json=payload) + assert response.status_code == 200 + assert response.json()["data"]["label"] == "Test Simulation" diff --git a/tests/e2e/test_yeardata.py b/tests/e2e/test_yeardata.py new file mode 100644 index 0000000..b187bdd --- /dev/null +++ b/tests/e2e/test_yeardata.py @@ -0,0 +1,18 @@ +import pytest +from httpx import AsyncClient + +@pytest.mark.asyncio +async def test_get_yeardatas(client: AsyncClient): + response = await client.get("/yeardata") + assert response.status_code == 200 + assert response.json()["message"] == "Data retrieved successfully" + +@pytest.mark.asyncio +async def test_create_yeardata(client: AsyncClient): + payload = { + "year": 2024, + "description": "Test Year Data" + } + response = await client.post("/yeardata", json=payload) + assert response.status_code == 200 + assert response.json()["message"] == "Data created successfully" diff --git a/tests/factories.py b/tests/factories.py deleted file mode 100644 index 52ccd3d..0000000 --- a/tests/factories.py +++ /dev/null @@ -1,33 +0,0 @@ -import uuid -from datetime import datetime - -from factory import ( - LazyAttribute, - LazyFunction, - Sequence, - SubFactory, - post_generation, - SelfAttribute, -) -from factory.alchemy import SQLAlchemyModelFactory -from factory.fuzzy import FuzzyChoice, FuzzyDateTime, FuzzyInteger, FuzzyText -from faker import Faker -from faker.providers import misc -# from pytz import UTC - - -from .database import Session - -fake = Faker() -fake.add_provider(misc) - - -class BaseFactory(SQLAlchemyModelFactory): - """Base Factory.""" - - class Meta: - """Factory configuration.""" - - abstract = True - sqlalchemy_session = Session - sqlalchemy_session_persistence = "commit" diff --git a/tests/unit/test_masterdata_logic.py b/tests/unit/test_masterdata_logic.py new file mode 100644 index 0000000..fbb1da8 --- /dev/null +++ b/tests/unit/test_masterdata_logic.py @@ -0,0 +1,24 @@ +import pytest +from src.masterdata.service import calculate_pmt + +def test_calculate_pmt_zero_rate(): + # PMT = -PV / nper when rate is 0 + pv = 1000 + nper = 10 + rate = 0 + result = calculate_pmt(rate, nper, pv) + assert result == -100 + +def test_calculate_pmt_standard(): + # Example: Loan 1000, 5% rate, 2 periods + # PMT = -1000 * (0.05 * (1.05)^2) / ((1.05)^2 - 1) + # PMT = -1000 * (0.05 * 1.1025) / (0.1025) + # PMT = -1000 * (0.055125) / (0.1025) = -537.8048... + result = calculate_pmt(5, 2, 1000) + assert round(result, 2) == -537.80 + +def test_calculate_pmt_percentage(): + # If rate > 1, it divides by 100 + result_5 = calculate_pmt(5, 10, 1000) + result_05 = calculate_pmt(0.05, 10, 1000) + assert result_5 == result_05 diff --git a/tests/unit/test_masterdata_service.py b/tests/unit/test_masterdata_service.py new file mode 100644 index 0000000..bfcf680 --- /dev/null +++ b/tests/unit/test_masterdata_service.py @@ -0,0 +1,39 @@ +import pytest +from unittest.mock import AsyncMock, MagicMock +from src.masterdata.service import create, get +from src.masterdata.schema import MasterDataCreate + +@pytest.mark.asyncio +async def test_create_masterdata_service(): + mock_db = AsyncMock() + mock_db.add = MagicMock() + masterdata_in = MasterDataCreate( + name="Test", + description="Desc", + unit_of_measurement="unit", + value_num=10.0, + seq=1 + ) + + result = await create(db_session=mock_db, masterdata_in=masterdata_in) + + assert result.name == "Test" + mock_db.add.assert_called_once() + mock_db.commit.assert_called_once() + +@pytest.mark.asyncio +async def test_get_masterdata_service(): + mock_db = AsyncMock() + mock_db.add = MagicMock() + mock_result = MagicMock() + mock_masterdata = MagicMock() + mock_masterdata.id = "test-id" + + # Mock behavior of db_session.execute().scalars().one_or_none() + mock_result.scalars.return_value.one_or_none.return_value = mock_masterdata + mock_db.execute.return_value = mock_result + + result = await get(db_session=mock_db, masterdata_id="test-id") + + assert result.id == "test-id" + mock_db.execute.assert_called_once()