diff --git a/poetry.lock b/poetry.lock index 2035045..537b185 100644 --- a/poetry.lock +++ b/poetry.lock @@ -97,6 +97,18 @@ docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"] gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""] test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""] +[[package]] +name = "cachetools" +version = "5.5.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, + {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, +] + [[package]] name = "certifi" version = "2024.8.30" @@ -388,6 +400,133 @@ uvicorn = {version = ">=0.15.0", extras = ["standard"]} [package.extras] standard = ["uvicorn[standard] (>=0.15.0)"] +[[package]] +name = "google-api-core" +version = "2.24.2" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_api_core-2.24.2-py3-none-any.whl", hash = "sha256:810a63ac95f3c441b7c0e43d344e372887f62ce9071ba972eacf32672e072de9"}, + {file = "google_api_core-2.24.2.tar.gz", hash = "sha256:81718493daf06d96d6bc76a91c23874dbf2fac0adbbf542831b805ee6e974696"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.0" +googleapis-common-protos = ">=1.56.2,<2.0.0" +proto-plus = [ + {version = ">=1.22.3,<2.0.0"}, + {version = ">=1.25.0,<2.0.0", markers = "python_version >= \"3.13\""}, +] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" +requests = ">=2.18.0,<3.0.0" + +[package.extras] +async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.dev0)"] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0) ; python_version >= \"3.11\""] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] + +[[package]] +name = "google-api-python-client" +version = "2.169.0" +description = "Google API Client Library for Python" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_api_python_client-2.169.0-py3-none-any.whl", hash = "sha256:dae3e882dc0e6f28e60cf09c1f13fedfd881db84f824dd418aa9e44def2fe00d"}, + {file = "google_api_python_client-2.169.0.tar.gz", hash = "sha256:0585bb97bd5f5bf3ed8d4bf624593e4c5a14d06c811d1952b07a1f94b4d12c51"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0" +google-auth = ">=1.32.0,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0" +google-auth-httplib2 = ">=0.2.0,<1.0.0" +httplib2 = ">=0.19.0,<1.0.0" +uritemplate = ">=3.0.1,<5" + +[[package]] +name = "google-auth" +version = "2.40.2" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_auth-2.40.2-py2.py3-none-any.whl", hash = "sha256:f7e568d42eedfded58734f6a60c58321896a621f7c116c411550a4b4a13da90b"}, + {file = "google_auth-2.40.2.tar.gz", hash = "sha256:a33cde547a2134273226fa4b853883559947ebe9207521f7afc707efbf690f58"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0)", "requests (>=2.20.0,<3.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] +pyjwt = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyjwt (>=2.0)"] +pyopenssl = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0)"] +testing = ["aiohttp (<3.10.0)", "aiohttp (>=3.6.2,<4.0.0)", "aioresponses", "cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "flask", "freezegun", "grpcio", "mock", "oauth2client", "packaging", "pyjwt (>=2.0)", "pyopenssl (<24.3.0)", "pyopenssl (>=20.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-localserver", "pyu2f (>=0.1.5)", "requests (>=2.20.0,<3.0.0)", "responses", "urllib3"] +urllib3 = ["packaging", "urllib3"] + +[[package]] +name = "google-auth-httplib2" +version = "0.2.0" +description = "Google Authentication Library: httplib2 transport" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"}, + {file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"}, +] + +[package.dependencies] +google-auth = "*" +httplib2 = ">=0.19.0" + +[[package]] +name = "google-auth-oauthlib" +version = "1.2.2" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "google_auth_oauthlib-1.2.2-py3-none-any.whl", hash = "sha256:fd619506f4b3908b5df17b65f39ca8d66ea56986e5472eb5978fd8f3786f00a2"}, + {file = "google_auth_oauthlib-1.2.2.tar.gz", hash = "sha256:11046fb8d3348b296302dd939ace8af0a724042e8029c1b872d87fabc9f41684"}, +] + +[package.dependencies] +google-auth = ">=2.15.0" +requests-oauthlib = ">=0.7.0" + +[package.extras] +tool = ["click (>=6.0.0)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.70.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8"}, + {file = "googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257"}, +] + +[package.dependencies] +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0)"] + [[package]] name = "greenlet" version = "3.1.1" @@ -509,6 +648,21 @@ http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] trio = ["trio (>=0.22.0,<1.0)"] +[[package]] +name = "httplib2" +version = "0.22.0" +description = "A comprehensive HTTP client library." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] +files = [ + {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, + {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, +] + +[package.dependencies] +pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} + [[package]] name = "httptools" version = "0.6.4" @@ -859,6 +1013,23 @@ files = [ {file = "numpy-2.1.3.tar.gz", hash = "sha256:aa08e04e08aaf974d4458def539dece0d28146d866a39da5639596f4921fd761"}, ] +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + [[package]] name = "packaging" version = "24.2" @@ -973,6 +1144,24 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "proto-plus" +version = "1.26.1" +description = "Beautiful, Pythonic protocol buffers" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66"}, + {file = "proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<7.0.0" + +[package.extras] +testing = ["google-api-core (>=1.31.5)"] + [[package]] name = "protobuf" version = "5.29.0" @@ -1072,6 +1261,33 @@ files = [ {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"}, ] +[[package]] +name = "pyasn1" +version = "0.6.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a"}, + {file = "pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6"}, +] + +[package.dependencies] +pyasn1 = ">=0.6.1,<0.7.0" + [[package]] name = "pydantic" version = "2.10.2" @@ -1221,6 +1437,21 @@ files = [ [package.extras] windows-terminal = ["colorama (>=0.4.6)"] +[[package]] +name = "pyparsing" +version = "3.2.3" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf"}, + {file = "pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + [[package]] name = "pytest" version = "8.3.3" @@ -1381,6 +1612,25 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=3.4" +groups = ["main"] +files = [ + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + [[package]] name = "rich" version = "13.9.4" @@ -1400,6 +1650,21 @@ pygments = ">=2.13.0,<3.0.0" [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "rsa" +version = "4.9.1" +description = "Pure-Python RSA implementation" +optional = false +python-versions = "<4,>=3.6" +groups = ["main"] +files = [ + {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, + {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + [[package]] name = "shellingham" version = "1.5.4" @@ -1697,6 +1962,18 @@ files = [ {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] +[[package]] +name = "uritemplate" +version = "4.1.1" +description = "Implementation of RFC 6570 URI Templates" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, + {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, +] + [[package]] name = "urllib3" version = "2.2.3" @@ -2035,4 +2312,4 @@ files = [ [metadata] lock-version = "2.1" python-versions = "^3.11" -content-hash = "34cff1e8de752f5b94a7012e1810f7f86fc5b7f6978da8ff5adc8e1e4516ef59" +content-hash = "3653b5daa2723d735ad026b0e3c111971358950a359a7469d410cfcc468a556b" diff --git a/pyproject.toml b/pyproject.toml index 1b49de8..dcaae4c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,9 @@ temporalio = "^1.8.0" pandas = "^2.2.3" psycopg2-binary = "^2.9.10" greenlet = "^3.1.1" +google-api-python-client = "^2.169.0" +google-auth-httplib2 = "^0.2.0" +google-auth-oauthlib = "^1.2.2" [build-system] diff --git a/src/api.py b/src/api.py index bd43f79..f91bf75 100644 --- a/src/api.py +++ b/src/api.py @@ -19,6 +19,9 @@ from src.overhaul_scope.router import router as scope_router from src.scope_equipment.router import router as scope_equipment_router from src.scope_equipment_job.router import router as scope_equipment_job_router from src.overhaul_schedule.router import router as overhaul_schedule_router +from src.overhaul_gantt.router import router as gantt_router + + # from src.overhaul_scope.router import router as scope_router # from src.scope_equipment.router import router as scope_equipment_router # from src.overhaul.router import router as overhaul_router @@ -95,6 +98,10 @@ authenticated_api_router.include_router( job_overhaul_router, prefix="/overhaul-jobs", tags=["job", "overhaul"] ) +authenticated_api_router.include_router( + gantt_router, prefix="/overhaul-gantt", tags=["gantt"] +) + # authenticated_api_router.include_router( # overhaul_history_router, prefix="/overhaul-history", tags=["overhaul_history"] # ) diff --git a/src/overhaul_gantt/model.py b/src/overhaul_gantt/model.py index 707b009..e69de29 100644 --- a/src/overhaul_gantt/model.py +++ b/src/overhaul_gantt/model.py @@ -1,18 +0,0 @@ -from sqlalchemy import (UUID, Column, DateTime, Float, ForeignKey, Integer, - String) -from sqlalchemy.orm import relationship - -from src.database.core import Base -from src.models import DefaultMixin, IdentityMixin, TimeStampMixin - - -class OverhaulSchedule(Base, DefaultMixin): - __tablename__ = "rp_oh_schedule" - - year = Column(Integer, nullable=False) - plan_duration = Column(Integer, nullable=True) - planned_outage = Column(Integer, nullable=True) - actual_shutdown = Column(Integer, nullable=True) - start = Column(DateTime(timezone=True)) # This will be TIMESTAMP WITH TIME ZONE - finish = Column(DateTime(timezone=True)) - remark = Column(String, nullable=True) diff --git a/src/overhaul_gantt/router.py b/src/overhaul_gantt/router.py index 0425baa..21972a2 100644 --- a/src/overhaul_gantt/router.py +++ b/src/overhaul_gantt/router.py @@ -7,19 +7,19 @@ from src.database.core import DbSession from src.database.service import CommonParameters from src.models import StandardResponse -from .schema import (OverhaulScheduleCreate, OverhaulSchedulePagination, OverhaulScheduleUpdate) -from .service import create, get_all, delete, update +# from .schema import (OverhaulScheduleCreate, OverhaulSchedulePagination, OverhaulScheduleUpdate) +from .service import get_gantt_performance_chart router = APIRouter() @router.get( - "", response_model=StandardResponse[OverhaulSchedulePagination] + "", response_model=StandardResponse[list] ) -async def get_schedules(common: CommonParameters): +async def get_gantt_performance(): """Get all scope pagination.""" # return - results = await get_all(common=common) + results = await get_gantt_performance_chart() return StandardResponse( @@ -28,36 +28,36 @@ async def get_schedules(common: CommonParameters): ) -@router.post("", response_model=StandardResponse[None]) -async def create_overhaul_equipment_jobs( - db_session: DbSession, overhaul_job_in: OverhaulScheduleCreate -): - await create( - db_session=db_session, - overhaul_job_in=overhaul_job_in, - ) - - return StandardResponse( - data=None, - message="Data created successfully", - ) - -@router.put("/{overhaul_job_id}", response_model=StandardResponse[None]) -async def update_overhaul_schedule( - db_session: DbSession, overhaul_job_id: str, overhaul_job_in: OverhaulScheduleUpdate -): - await update(db_session=db_session, overhaul_schedule_id=overhaul_job_id, overhaul_job_in=overhaul_job_in) - - return StandardResponse( - data=None, - message="Data updated successfully", - ) - -@router.delete("/{overhaul_job_id}", response_model=StandardResponse[None]) -async def delete_overhaul_equipment_job(db_session: DbSession, overhaul_job_id): - await delete(db_session=db_session, overhaul_schedule_id=overhaul_job_id) - - return StandardResponse( - data=None, - message="Data deleted successfully", - ) +# @router.post("", response_model=StandardResponse[None]) +# async def create_overhaul_equipment_jobs( +# db_session: DbSession, overhaul_job_in: OverhaulScheduleCreate +# ): +# await create( +# db_session=db_session, +# overhaul_job_in=overhaul_job_in, +# ) + +# return StandardResponse( +# data=None, +# message="Data created successfully", +# ) + +# @router.put("/{overhaul_job_id}", response_model=StandardResponse[None]) +# async def update_overhaul_schedule( +# db_session: DbSession, overhaul_job_id: str, overhaul_job_in: OverhaulScheduleUpdate +# ): +# await update(db_session=db_session, overhaul_schedule_id=overhaul_job_id, overhaul_job_in=overhaul_job_in) + +# return StandardResponse( +# data=None, +# message="Data updated successfully", +# ) + +# @router.delete("/{overhaul_job_id}", response_model=StandardResponse[None]) +# async def delete_overhaul_equipment_job(db_session: DbSession, overhaul_job_id): +# await delete(db_session=db_session, overhaul_schedule_id=overhaul_job_id) + +# return StandardResponse( +# data=None, +# message="Data deleted successfully", +# ) diff --git a/src/overhaul_gantt/schema.py b/src/overhaul_gantt/schema.py index e14e866..d6867b7 100644 --- a/src/overhaul_gantt/schema.py +++ b/src/overhaul_gantt/schema.py @@ -1,44 +1,44 @@ -from datetime import datetime -from typing import List, Optional -from uuid import UUID +# from datetime import datetime +# from typing import List, Optional +# from uuid import UUID -from pydantic import Field +# from pydantic import Field -from src.models import DefultBase, Pagination -from src.overhaul_scope.schema import ScopeRead -from src.scope_equipment_job.schema import ScopeEquipmentJobRead -from src.job.schema import ActivityMasterRead +# from src.models import DefultBase, Pagination +# from src.overhaul_scope.schema import ScopeRead +# from src.scope_equipment_job.schema import ScopeEquipmentJobRead +# from src.job.schema import ActivityMasterRead -class OverhaulScheduleBase(DefultBase): - pass +# class OverhaulScheduleBase(DefultBase): +# pass -class OverhaulScheduleCreate(OverhaulScheduleBase): - year: int - plan_duration: Optional[int] = Field(None) - planned_outage: Optional[int] = Field(None) - actual_shutdown: Optional[int] = Field(None) - start: datetime - finish: datetime - remark: Optional[str] = Field(None) +# class OverhaulScheduleCreate(OverhaulScheduleBase): +# year: int +# plan_duration: Optional[int] = Field(None) +# planned_outage: Optional[int] = Field(None) +# actual_shutdown: Optional[int] = Field(None) +# start: datetime +# finish: datetime +# remark: Optional[str] = Field(None) -class OverhaulScheduleUpdate(OverhaulScheduleBase): - start: datetime - finish: datetime +# class OverhaulScheduleUpdate(OverhaulScheduleBase): +# start: datetime +# finish: datetime -class OverhaulScheduleRead(OverhaulScheduleBase): - id: UUID - year: int - plan_duration: Optional[int] - planned_outage: Optional[int] - actual_shutdown: Optional[int] - start: datetime - finish: datetime - remark: Optional[str] +# class OverhaulScheduleRead(OverhaulScheduleBase): +# id: UUID +# year: int +# plan_duration: Optional[int] +# planned_outage: Optional[int] +# actual_shutdown: Optional[int] +# start: datetime +# finish: datetime +# remark: Optional[str] -class OverhaulSchedulePagination(Pagination): - items: List[OverhaulScheduleRead] = [] +# class OverhaulSchedulePagination(Pagination): +# items: List[OverhaulScheduleRead] = [] diff --git a/src/overhaul_gantt/service.py b/src/overhaul_gantt/service.py index 4ea9d37..561c5ee 100644 --- a/src/overhaul_gantt/service.py +++ b/src/overhaul_gantt/service.py @@ -10,48 +10,69 @@ from src.database.service import search_filter_sort_paginate from src.scope_equipment_job.model import ScopeEquipmentJob from src.overhaul_activity.model import OverhaulActivity -from .model import OverhaulSchedule -from .schema import OverhaulScheduleCreate, OverhaulScheduleUpdate +# from .model import OverhaulSchedule +# from .schema import OverhaulScheduleCreate, OverhaulScheduleUpdate +from .utils import get_google_creds, get_spreatsheed_service, process_spreadsheet_data +# async def get_all(*, common): +# """Returns all documents.""" +# query = Select(OverhaulSchedule).order_by(OverhaulSchedule.start.desc()) -async def get_all(*, common): - """Returns all documents.""" - query = Select(OverhaulSchedule).order_by(OverhaulSchedule.start.desc()) +# results = await search_filter_sort_paginate(model=query, **common) +# return results - results = await search_filter_sort_paginate(model=query, **common) - return results +# async def create( +# *, db_session: DbSession, overhaul_job_in: OverhaulScheduleCreate +# ): -async def create( - *, db_session: DbSession, overhaul_job_in: OverhaulScheduleCreate -): +# schedule = OverhaulSchedule(**overhaul_job_in.model_dump()) +# db_session.add(schedule) +# await db_session.commit() +# return schedule - schedule = OverhaulSchedule(**overhaul_job_in.model_dump()) - db_session.add(schedule) - await db_session.commit() - return schedule +# async def update(*, db_session: DbSession, overhaul_schedule_id: str, overhaul_job_in: OverhaulScheduleUpdate): +# """Updates a document.""" +# data = overhaul_job_in.model_dump() +# overhaul_schedule = await db_session.get(OverhaulSchedule, overhaul_schedule_id) -async def update(*, db_session: DbSession, overhaul_schedule_id: str, overhaul_job_in: OverhaulScheduleUpdate): - """Updates a document.""" - data = overhaul_job_in.model_dump() - overhaul_schedule = await db_session.get(OverhaulSchedule, overhaul_schedule_id) +# update_data = overhaul_job_in.model_dump(exclude_defaults=True) - update_data = overhaul_job_in.model_dump(exclude_defaults=True) +# for field in data: +# if field in update_data: +# setattr(overhaul_schedule, field, update_data[field]) - for field in data: - if field in update_data: - setattr(overhaul_schedule, field, update_data[field]) +# await db_session.commit() - await db_session.commit() +# return overhaul_schedule - return overhaul_schedule +# async def delete(*, db_session: DbSession, overhaul_schedule_id: str): +# """Deletes a document.""" +# query = Delete(OverhaulSchedule).where(OverhaulSchedule.id == overhaul_schedule_id) +# await db_session.execute(query) +# await db_session.commit() -async def delete(*, db_session: DbSession, overhaul_schedule_id: str): - """Deletes a document.""" - query = Delete(OverhaulSchedule).where(OverhaulSchedule.id == overhaul_schedule_id) - await db_session.execute(query) - await db_session.commit() + +async def get_gantt_performance_chart(*, spreadsheet_id = "1gZXuwA97zU1v4QBv56wKeiqadc6skHUucGKYG8qVFRk"): + creds = get_google_creds() + RANGE_NAME = "'2024 kurva s'!N79:BJ83" # Or just "2024 schedule" + + try: + service = get_spreatsheed_service(creds) + sheet = service.spreadsheets() + response = sheet.values().get(spreadsheetId=spreadsheet_id, range=RANGE_NAME).execute() + values = response.get("values", []) + keys = ['day', 'time', 'plan', 'actual', 'gap'] + transposed = list(zip(*values)) + results = [dict(zip(keys, result)) for result in transposed] + except Exception as e: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=e) + + processed_data = process_spreadsheet_data(results) + + + return processed_data diff --git a/src/overhaul_gantt/utils.py b/src/overhaul_gantt/utils.py index 0177426..89fdf6e 100644 --- a/src/overhaul_gantt/utils.py +++ b/src/overhaul_gantt/utils.py @@ -1,19 +1,14 @@ import urllib from google.oauth2.service_account import Credentials +from google_auth_oauthlib.flow import InstalledAppFlow +from googleapiclient.discovery import build +from googleapiclient.errors import HttpError -SCOPES = ["https://www.googleapis.com/auth/spreadsheets.readonly"] -# The ID and range of a sample spreadsheet. -SAMPLE_SPREADSHEET_ID = "1gZXuwA97zU1v4QBv56wKeiqadc6skHUucGKYG8qVFRk" -# Try with URL encoding -sheet_name = "2024 schedule" -encoded_sheet_name = urllib.parse.quote(sheet_name) -RANGE_NAME = "'2024 kurva s'!N79:BJ83" # Or just "2024 schedule" +SCOPES = ["https://www.googleapis.com/auth/spreadsheets.readonly"] def get_spreatsheed_service(credentials): - from googleapiclient.discovery import build - return build("sheets", "v4", credentials=credentials, cache_discovery=False) @@ -21,3 +16,50 @@ def get_google_creds(): creds = None creds = Credentials.from_service_account_file("credentials.json", scopes=SCOPES) return creds + + +def process_spreadsheet_data(rows): + processed_data = [] + for row in rows: + processed_row = convert_spreadsheet_data(row) + processed_data.append(processed_row) + return processed_data + + +def convert_spreadsheet_data(data): + result = {} + + # Convert day to integer + result['day'] = int(data['day']) + + # Convert time to a datetime object + from datetime import datetime + # Assuming Indonesian format with month names + # Replace Indonesian month names with English if needed + month_mapping = { + 'Januari': 'January', 'Februari': 'February', 'Maret': 'March', + 'April': 'April', 'Mei': 'May', 'Juni': 'June', + 'Juli': 'July', 'Agustus': 'August', 'September': 'September', + 'Oktober': 'October', 'November': 'November', 'Desember': 'December' + } + + time_str = data['time'] + for indo, eng in month_mapping.items(): + time_str = time_str.replace(indo, eng) + + # Format: "Sabtu, Juli 13, 2024" -> "Saturday, July 13, 2024" + # Removing the day of week to simplify parsing + time_str = time_str.split(', ', 1)[1] # Remove "Sabtu, " + result['time'] = datetime.strptime(time_str, '%B %d, %Y') + + # Convert percentage strings to floats + # Handling format like "0,12%" -> 0.12 + for key in ['plan', 'actual', 'gap']: + # Replace comma with dot (European to US decimal notation) + value = data[key].replace(',', '.') + # Remove percentage sign + value = value.rstrip('%') + # Convert to float + result[key] = float(value) / 100 # Divide by 100 to get the actual decimal value + + return result