fix tri state down value
parent
dcf2428c01
commit
3bc8dc4c41
@ -0,0 +1,13 @@
|
||||
{
|
||||
"type": "service_account",
|
||||
"project_id": "digital-twin-testing",
|
||||
"private_key_id": "941e08a5180cc2e98a547a6f3d73063bf6d670a4",
|
||||
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDNFP8nzj+INP/W\nUs6WqQRyw8XhpOY7nvqnRdJxjg3bTK0L+MF92ARG5prJQ8ZIgj3AINjnKu6/+BMk\nGojGV8Lu3BsOvVA0PxGpjC0Fp1218sviJEUEYFbVUtUrmbsvhUXb/UFNAsG5qIA0\n1Hv0c+Fo8GVXou/m2IMn19rYm2n4V5hYLrs3ItUWQsGmRNnV76Xo5AbC2OJyJSKM\n2HAG59/z7GSpQLdqJozWn4qqvc/HNGZihetlt6c+1KwHvc+h4N4fMjI8yW6+FHac\nBKdH2QShE3Fa+N97fYnWjwnFHkd8DoV1o0LU2tajNFw2uI14N18Y1ffZOppvBUmJ\nFpjPtP2FAgMBAAECggEALIJLlEiLelTCV7IWGxWonP3HQx8mRrdMwCQDtdh2Qmuk\nPO5qlBTfbMHcwySd8putqjaWzKkBMxG0mcfWenGOoYeMqcjFkOlFPI5rMQhvaq88\nt6JWhkogXmEBWdqeCQS8MjAHTZ71C4W5+X1KP7YsrDlMTVZUCb5gzLNo0Ez+lUKN\nq9FmBmwZndxHbJrWm55IrsICVJYDHg8nT2ir+kgAO3erjjqro362bi3ewT8n+9nd\n0mJddqJxDpC8Mk7AACXMLnbdC0iRE2jcE6aI7yh3QH2cBAzCsjPfR5Xi0ZhoUxyq\nVZWCebxYRjnmlPzpWgEW4cWPtkk82xC5osCqyqW4UQKBgQD7x1q8B2LgPYqfG6ED\nkmsgnG47TsirEs266RUwmRo17gTqMqW1X16M/s9EaAlNGRUK6JmT/zxlAaRCSeki\n/IWqODlhl/T5tUGV8/9FzNFNyd+uPVczWdZG0OIiq6VmQDckYChimwp/PSO+2DvC\ngbCFQi4MJUbSsrCpKMFUWAjnKQKBgQDQhTfIXd5Rqjjqz8j5CZLlX4/dIpQYhZww\nIa4AskdS8D6yuBRdTMiz4f7IkRb1HR8HvkcXjRJcWHXdNZoaJBPXnF4p7BVhPuRc\n/S2o3iGLJC8L5fm+OHJ0ck05nQlKkaYXdQANyMmHYY/pZ79s/Gd/8rUa1Z6AVvG4\nw7b2Pad6/QKBgFcNBycJmrsTLBF02QfGIPnmfApPY/waBT2Bhw0FclWBFPvwRPDf\np24yT3FEUyrv6CGcaDUOQ1Bc7/DnYw7wpjpDNDu08UNA0wtIkFMOzLMLgCXbGYIs\npnRXlkgMdqRSwX33LR9ioY96kRhT5N2MP37jcc5LOSByHbhu2eESiwl5AoGALPBI\n8VmpjBgPZCubuHAjxGMv8gx9tjs1Sfo4MdY6INm96W5alTDzIDFYY4lBVVoNUfHk\nCGNSzawS2rWunGuiCxmcAZTrlqWjOZcmu/oDaKVHrlzF0gUARbBMjFoXooZ5MRz6\nAf5/0Dlqdnbxfl93Ps/j0ACxxB74wUUMrUzkR1ECgYBkmu5yoUPKK/U9qM4crmgc\nBO8vRCfoXsHyoQFlYfdgQybSZbbVBkec6bxF5v8T0EHE1sDGFLwtfQJzsSr2x56l\nw9/LZEaMtwzNbJIyTYs6/UpowH7A6nfcDLQPuOA33sQJjw8cfAp0gHNXLnhFUh9z\n8doQWeQFTNctEmUnD0oY8w==\n-----END PRIVATE KEY-----\n",
|
||||
"client_email": "digital-twin@digital-twin-testing.iam.gserviceaccount.com",
|
||||
"client_id": "111678025337341471506",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/digital-twin%40digital-twin-testing.iam.gserviceaccount.com",
|
||||
"universe_domain": "googleapis.com"
|
||||
}
|
||||
@ -0,0 +1,18 @@
|
||||
from sqlalchemy import (UUID, Column, DateTime, Float, ForeignKey, Integer,
|
||||
String)
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from src.database.core import Base
|
||||
from src.models import DefaultMixin, IdentityMixin, TimeStampMixin
|
||||
|
||||
|
||||
class OverhaulSchedule(Base, DefaultMixin):
|
||||
__tablename__ = "rp_oh_schedule"
|
||||
|
||||
year = Column(Integer, nullable=False)
|
||||
plan_duration = Column(Integer, nullable=True)
|
||||
planned_outage = Column(Integer, nullable=True)
|
||||
actual_shutdown = Column(Integer, nullable=True)
|
||||
start = Column(DateTime(timezone=True)) # This will be TIMESTAMP WITH TIME ZONE
|
||||
finish = Column(DateTime(timezone=True))
|
||||
remark = Column(String, nullable=True)
|
||||
@ -0,0 +1,63 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from fastapi import APIRouter, HTTPException, status
|
||||
|
||||
from src.auth.service import CurrentUser
|
||||
from src.database.core import DbSession
|
||||
from src.database.service import CommonParameters
|
||||
from src.models import StandardResponse
|
||||
|
||||
from .schema import (OverhaulScheduleCreate, OverhaulSchedulePagination, OverhaulScheduleUpdate)
|
||||
from .service import create, get_all, delete, update
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"", response_model=StandardResponse[OverhaulSchedulePagination]
|
||||
)
|
||||
async def get_schedules(common: CommonParameters):
|
||||
"""Get all scope pagination."""
|
||||
# return
|
||||
results = await get_all(common=common)
|
||||
|
||||
|
||||
return StandardResponse(
|
||||
data=results,
|
||||
message="Data retrieved successfully",
|
||||
)
|
||||
|
||||
|
||||
@router.post("", response_model=StandardResponse[None])
|
||||
async def create_overhaul_equipment_jobs(
|
||||
db_session: DbSession, overhaul_job_in: OverhaulScheduleCreate
|
||||
):
|
||||
await create(
|
||||
db_session=db_session,
|
||||
overhaul_job_in=overhaul_job_in,
|
||||
)
|
||||
|
||||
return StandardResponse(
|
||||
data=None,
|
||||
message="Data created successfully",
|
||||
)
|
||||
|
||||
@router.put("/{overhaul_job_id}", response_model=StandardResponse[None])
|
||||
async def update_overhaul_schedule(
|
||||
db_session: DbSession, overhaul_job_id: str, overhaul_job_in: OverhaulScheduleUpdate
|
||||
):
|
||||
await update(db_session=db_session, overhaul_schedule_id=overhaul_job_id, overhaul_job_in=overhaul_job_in)
|
||||
|
||||
return StandardResponse(
|
||||
data=None,
|
||||
message="Data updated successfully",
|
||||
)
|
||||
|
||||
@router.delete("/{overhaul_job_id}", response_model=StandardResponse[None])
|
||||
async def delete_overhaul_equipment_job(db_session: DbSession, overhaul_job_id):
|
||||
await delete(db_session=db_session, overhaul_schedule_id=overhaul_job_id)
|
||||
|
||||
return StandardResponse(
|
||||
data=None,
|
||||
message="Data deleted successfully",
|
||||
)
|
||||
@ -0,0 +1,44 @@
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from src.models import DefultBase, Pagination
|
||||
from src.overhaul_scope.schema import ScopeRead
|
||||
from src.scope_equipment_job.schema import ScopeEquipmentJobRead
|
||||
from src.job.schema import ActivityMasterRead
|
||||
|
||||
class OverhaulScheduleBase(DefultBase):
|
||||
pass
|
||||
|
||||
|
||||
class OverhaulScheduleCreate(OverhaulScheduleBase):
|
||||
year: int
|
||||
plan_duration: Optional[int] = Field(None)
|
||||
planned_outage: Optional[int] = Field(None)
|
||||
actual_shutdown: Optional[int] = Field(None)
|
||||
start: datetime
|
||||
finish: datetime
|
||||
remark: Optional[str] = Field(None)
|
||||
|
||||
|
||||
class OverhaulScheduleUpdate(OverhaulScheduleBase):
|
||||
start: datetime
|
||||
finish: datetime
|
||||
|
||||
|
||||
class OverhaulScheduleRead(OverhaulScheduleBase):
|
||||
id: UUID
|
||||
year: int
|
||||
plan_duration: Optional[int]
|
||||
planned_outage: Optional[int]
|
||||
actual_shutdown: Optional[int]
|
||||
start: datetime
|
||||
finish: datetime
|
||||
remark: Optional[str]
|
||||
|
||||
|
||||
|
||||
class OverhaulSchedulePagination(Pagination):
|
||||
items: List[OverhaulScheduleRead] = []
|
||||
@ -0,0 +1,57 @@
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from sqlalchemy import Delete, Select, func
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from src.auth.service import CurrentUser
|
||||
from src.database.core import DbSession
|
||||
from src.database.service import search_filter_sort_paginate
|
||||
from src.scope_equipment_job.model import ScopeEquipmentJob
|
||||
from src.overhaul_activity.model import OverhaulActivity
|
||||
|
||||
from .model import OverhaulSchedule
|
||||
from .schema import OverhaulScheduleCreate, OverhaulScheduleUpdate
|
||||
|
||||
|
||||
async def get_all(*, common):
|
||||
"""Returns all documents."""
|
||||
query = Select(OverhaulSchedule).order_by(OverhaulSchedule.start.desc())
|
||||
|
||||
results = await search_filter_sort_paginate(model=query, **common)
|
||||
return results
|
||||
|
||||
|
||||
async def create(
|
||||
*, db_session: DbSession, overhaul_job_in: OverhaulScheduleCreate
|
||||
):
|
||||
|
||||
|
||||
schedule = OverhaulSchedule(**overhaul_job_in.model_dump())
|
||||
db_session.add(schedule)
|
||||
await db_session.commit()
|
||||
return schedule
|
||||
|
||||
|
||||
|
||||
async def update(*, db_session: DbSession, overhaul_schedule_id: str, overhaul_job_in: OverhaulScheduleUpdate):
|
||||
"""Updates a document."""
|
||||
data = overhaul_job_in.model_dump()
|
||||
overhaul_schedule = await db_session.get(OverhaulSchedule, overhaul_schedule_id)
|
||||
|
||||
update_data = overhaul_job_in.model_dump(exclude_defaults=True)
|
||||
|
||||
for field in data:
|
||||
if field in update_data:
|
||||
setattr(overhaul_schedule, field, update_data[field])
|
||||
|
||||
await db_session.commit()
|
||||
|
||||
return overhaul_schedule
|
||||
|
||||
|
||||
async def delete(*, db_session: DbSession, overhaul_schedule_id: str):
|
||||
"""Deletes a document."""
|
||||
query = Delete(OverhaulSchedule).where(OverhaulSchedule.id == overhaul_schedule_id)
|
||||
await db_session.execute(query)
|
||||
await db_session.commit()
|
||||
@ -0,0 +1,23 @@
|
||||
import urllib
|
||||
from google.oauth2.service_account import Credentials
|
||||
|
||||
SCOPES = ["https://www.googleapis.com/auth/spreadsheets.readonly"]
|
||||
|
||||
# The ID and range of a sample spreadsheet.
|
||||
SAMPLE_SPREADSHEET_ID = "1gZXuwA97zU1v4QBv56wKeiqadc6skHUucGKYG8qVFRk"
|
||||
# Try with URL encoding
|
||||
sheet_name = "2024 schedule"
|
||||
encoded_sheet_name = urllib.parse.quote(sheet_name)
|
||||
RANGE_NAME = "'2024 kurva s'!N79:BJ83" # Or just "2024 schedule"
|
||||
|
||||
|
||||
def get_spreatsheed_service(credentials):
|
||||
from googleapiclient.discovery import build
|
||||
|
||||
return build("sheets", "v4", credentials=credentials, cache_discovery=False)
|
||||
|
||||
|
||||
def get_google_creds():
|
||||
creds = None
|
||||
creds = Credentials.from_service_account_file("credentials.json", scopes=SCOPES)
|
||||
return creds
|
||||
Loading…
Reference in New Issue