fix tri state down value

feature/reliability_stat
Cizz22 8 months ago
parent dcf2428c01
commit 3bc8dc4c41

@ -0,0 +1,13 @@
{
"type": "service_account",
"project_id": "digital-twin-testing",
"private_key_id": "941e08a5180cc2e98a547a6f3d73063bf6d670a4",
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDNFP8nzj+INP/W\nUs6WqQRyw8XhpOY7nvqnRdJxjg3bTK0L+MF92ARG5prJQ8ZIgj3AINjnKu6/+BMk\nGojGV8Lu3BsOvVA0PxGpjC0Fp1218sviJEUEYFbVUtUrmbsvhUXb/UFNAsG5qIA0\n1Hv0c+Fo8GVXou/m2IMn19rYm2n4V5hYLrs3ItUWQsGmRNnV76Xo5AbC2OJyJSKM\n2HAG59/z7GSpQLdqJozWn4qqvc/HNGZihetlt6c+1KwHvc+h4N4fMjI8yW6+FHac\nBKdH2QShE3Fa+N97fYnWjwnFHkd8DoV1o0LU2tajNFw2uI14N18Y1ffZOppvBUmJ\nFpjPtP2FAgMBAAECggEALIJLlEiLelTCV7IWGxWonP3HQx8mRrdMwCQDtdh2Qmuk\nPO5qlBTfbMHcwySd8putqjaWzKkBMxG0mcfWenGOoYeMqcjFkOlFPI5rMQhvaq88\nt6JWhkogXmEBWdqeCQS8MjAHTZ71C4W5+X1KP7YsrDlMTVZUCb5gzLNo0Ez+lUKN\nq9FmBmwZndxHbJrWm55IrsICVJYDHg8nT2ir+kgAO3erjjqro362bi3ewT8n+9nd\n0mJddqJxDpC8Mk7AACXMLnbdC0iRE2jcE6aI7yh3QH2cBAzCsjPfR5Xi0ZhoUxyq\nVZWCebxYRjnmlPzpWgEW4cWPtkk82xC5osCqyqW4UQKBgQD7x1q8B2LgPYqfG6ED\nkmsgnG47TsirEs266RUwmRo17gTqMqW1X16M/s9EaAlNGRUK6JmT/zxlAaRCSeki\n/IWqODlhl/T5tUGV8/9FzNFNyd+uPVczWdZG0OIiq6VmQDckYChimwp/PSO+2DvC\ngbCFQi4MJUbSsrCpKMFUWAjnKQKBgQDQhTfIXd5Rqjjqz8j5CZLlX4/dIpQYhZww\nIa4AskdS8D6yuBRdTMiz4f7IkRb1HR8HvkcXjRJcWHXdNZoaJBPXnF4p7BVhPuRc\n/S2o3iGLJC8L5fm+OHJ0ck05nQlKkaYXdQANyMmHYY/pZ79s/Gd/8rUa1Z6AVvG4\nw7b2Pad6/QKBgFcNBycJmrsTLBF02QfGIPnmfApPY/waBT2Bhw0FclWBFPvwRPDf\np24yT3FEUyrv6CGcaDUOQ1Bc7/DnYw7wpjpDNDu08UNA0wtIkFMOzLMLgCXbGYIs\npnRXlkgMdqRSwX33LR9ioY96kRhT5N2MP37jcc5LOSByHbhu2eESiwl5AoGALPBI\n8VmpjBgPZCubuHAjxGMv8gx9tjs1Sfo4MdY6INm96W5alTDzIDFYY4lBVVoNUfHk\nCGNSzawS2rWunGuiCxmcAZTrlqWjOZcmu/oDaKVHrlzF0gUARbBMjFoXooZ5MRz6\nAf5/0Dlqdnbxfl93Ps/j0ACxxB74wUUMrUzkR1ECgYBkmu5yoUPKK/U9qM4crmgc\nBO8vRCfoXsHyoQFlYfdgQybSZbbVBkec6bxF5v8T0EHE1sDGFLwtfQJzsSr2x56l\nw9/LZEaMtwzNbJIyTYs6/UpowH7A6nfcDLQPuOA33sQJjw8cfAp0gHNXLnhFUh9z\n8doQWeQFTNctEmUnD0oY8w==\n-----END PRIVATE KEY-----\n",
"client_email": "digital-twin@digital-twin-testing.iam.gserviceaccount.com",
"client_id": "111678025337341471506",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/digital-twin%40digital-twin-testing.iam.gserviceaccount.com",
"universe_domain": "googleapis.com"
}

@ -36,9 +36,10 @@ class JWTBearer(HTTPBearer):
raise HTTPException(status_code=403, detail="Invalid authorization code.")
def verify_jwt(self, jwtoken: str) -> Optional[UserBase]:
try:
response = requests.get(
f"{config.AUTH_SERVICE_API}/verify-token",
f"{config.AUTH_SERVICE_API}/verify-token?url=http://localhost:8000",
headers={"Authorization": f"Bearer {jwtoken}"},
)

@ -205,7 +205,6 @@ async def get_eaf_timeline(*, db_session, eaf_input: float, oh_session_id: str,
oh_session = await get_overhaul(db_session=db_session, overhaul_session_id=oh_session_id)
oh_session_start = datetime.fromisoformat(oh_session.start_date.isoformat())
# Determine date range
if MIN_EAF <= eaf_input <= MAX_EAF:
end_date = oh_session_start + timedelta(hours=oh_duration)
@ -214,38 +213,59 @@ async def get_eaf_timeline(*, db_session, eaf_input: float, oh_session_id: str,
else: # eaf_input > MAX_EAF
end_date = oh_session_start + timedelta(hours=oh_duration) - timedelta(days=180)
# Default EAF values when system is up
default_values = {
'eaf1_value': 1.0,
'eaf2_value': 0.75,
'eaf3_value': 0.6,
'eaf4_value': 0.3
}
# EAF values during downtime
# EAF values during downtime - now using three states
downtime_values = {
'eaf1': 0.8,
'eaf2': 0.65,
'eaf3': 0.35,
'eaf4': 0
'eaf1_warning': 0.6,
'eaf1_down': 0.0,
}
# Generate down periods for all EAF scenarios at once
all_down_periods = {}
for eaf_key in ['eaf1', 'eaf2', 'eaf3', 'eaf4']:
periods = generate_down_periods(oh_session_start, end_date, 5, min_duration=30, max_duration=90)
# Sort periods by start time for binary search
all_down_periods[eaf_key] = sorted(periods, key=lambda x: x[0])
for eaf_key in ['eaf1']:
# Generate warning periods (0.6)
warning_periods = generate_down_periods(
oh_session_start,
end_date,
3, # Less frequent warnings
min_duration=24,
max_duration=48
)
# Generate full downtime periods (0.0)
down_periods = generate_down_periods(
oh_session_start,
end_date,
2, # Less frequent full downtimes
min_duration=36,
max_duration=72
)
# Store both types of periods with their state
all_down_periods[f'{eaf_key}_warning'] = [(start, end, 'warning') for start, end in warning_periods]
all_down_periods[f'{eaf_key}_down'] = [(start, end, 'down') for start, end in down_periods]
# Create a list of all state change times
state_changes = defaultdict(dict)
for eaf_key, periods in all_down_periods.items():
for start, end in periods:
# Process both warning and down periods
for eaf_type, periods in all_down_periods.items():
eaf_key = eaf_type.split('_')[0] # Extract base key (eaf1)
state_type = eaf_type.split('_')[1] # Extract state type (warning/down)
for start, end, state in periods:
# Record state changes at period boundaries
state_changes[start][eaf_key] = downtime_values[eaf_key]
state_changes[end + timedelta(hours=1)][eaf_key] = default_values[f'{eaf_key}_value']
if state == 'warning':
state_changes[start][f'{eaf_key}_value'] = downtime_values[f'{eaf_key}_warning']
else: # state == 'down'
state_changes[start][f'{eaf_key}_value'] = downtime_values[f'{eaf_key}_down']
# Reset to normal at the end of period
state_changes[end + timedelta(hours=1)][f'{eaf_key}_value'] = default_values[f'{eaf_key}_value']
# Convert state_changes to sorted list of times
change_times = sorted(state_changes.keys())
@ -259,10 +279,10 @@ async def get_eaf_timeline(*, db_session, eaf_input: float, oh_session_id: str,
while current_time <= end_date:
# Update values if we've hit a state change point
if idx < len(change_times) and current_time >= change_times[idx]:
while idx < len(change_times) and current_time >= change_times[idx]:
changes = state_changes[change_times[idx]]
for eaf_key, value in changes.items():
current_values[f'{eaf_key}_value'] = value
for key, value in changes.items():
current_values[key] = value
idx += 1
results.append({

@ -0,0 +1,18 @@
from sqlalchemy import (UUID, Column, DateTime, Float, ForeignKey, Integer,
String)
from sqlalchemy.orm import relationship
from src.database.core import Base
from src.models import DefaultMixin, IdentityMixin, TimeStampMixin
class OverhaulSchedule(Base, DefaultMixin):
__tablename__ = "rp_oh_schedule"
year = Column(Integer, nullable=False)
plan_duration = Column(Integer, nullable=True)
planned_outage = Column(Integer, nullable=True)
actual_shutdown = Column(Integer, nullable=True)
start = Column(DateTime(timezone=True)) # This will be TIMESTAMP WITH TIME ZONE
finish = Column(DateTime(timezone=True))
remark = Column(String, nullable=True)

@ -0,0 +1,63 @@
from typing import List, Optional
from fastapi import APIRouter, HTTPException, status
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import CommonParameters
from src.models import StandardResponse
from .schema import (OverhaulScheduleCreate, OverhaulSchedulePagination, OverhaulScheduleUpdate)
from .service import create, get_all, delete, update
router = APIRouter()
@router.get(
"", response_model=StandardResponse[OverhaulSchedulePagination]
)
async def get_schedules(common: CommonParameters):
"""Get all scope pagination."""
# return
results = await get_all(common=common)
return StandardResponse(
data=results,
message="Data retrieved successfully",
)
@router.post("", response_model=StandardResponse[None])
async def create_overhaul_equipment_jobs(
db_session: DbSession, overhaul_job_in: OverhaulScheduleCreate
):
await create(
db_session=db_session,
overhaul_job_in=overhaul_job_in,
)
return StandardResponse(
data=None,
message="Data created successfully",
)
@router.put("/{overhaul_job_id}", response_model=StandardResponse[None])
async def update_overhaul_schedule(
db_session: DbSession, overhaul_job_id: str, overhaul_job_in: OverhaulScheduleUpdate
):
await update(db_session=db_session, overhaul_schedule_id=overhaul_job_id, overhaul_job_in=overhaul_job_in)
return StandardResponse(
data=None,
message="Data updated successfully",
)
@router.delete("/{overhaul_job_id}", response_model=StandardResponse[None])
async def delete_overhaul_equipment_job(db_session: DbSession, overhaul_job_id):
await delete(db_session=db_session, overhaul_schedule_id=overhaul_job_id)
return StandardResponse(
data=None,
message="Data deleted successfully",
)

@ -0,0 +1,44 @@
from datetime import datetime
from typing import List, Optional
from uuid import UUID
from pydantic import Field
from src.models import DefultBase, Pagination
from src.overhaul_scope.schema import ScopeRead
from src.scope_equipment_job.schema import ScopeEquipmentJobRead
from src.job.schema import ActivityMasterRead
class OverhaulScheduleBase(DefultBase):
pass
class OverhaulScheduleCreate(OverhaulScheduleBase):
year: int
plan_duration: Optional[int] = Field(None)
planned_outage: Optional[int] = Field(None)
actual_shutdown: Optional[int] = Field(None)
start: datetime
finish: datetime
remark: Optional[str] = Field(None)
class OverhaulScheduleUpdate(OverhaulScheduleBase):
start: datetime
finish: datetime
class OverhaulScheduleRead(OverhaulScheduleBase):
id: UUID
year: int
plan_duration: Optional[int]
planned_outage: Optional[int]
actual_shutdown: Optional[int]
start: datetime
finish: datetime
remark: Optional[str]
class OverhaulSchedulePagination(Pagination):
items: List[OverhaulScheduleRead] = []

@ -0,0 +1,57 @@
from typing import Optional
from fastapi import HTTPException, status
from sqlalchemy import Delete, Select, func
from sqlalchemy.orm import selectinload
from src.auth.service import CurrentUser
from src.database.core import DbSession
from src.database.service import search_filter_sort_paginate
from src.scope_equipment_job.model import ScopeEquipmentJob
from src.overhaul_activity.model import OverhaulActivity
from .model import OverhaulSchedule
from .schema import OverhaulScheduleCreate, OverhaulScheduleUpdate
async def get_all(*, common):
"""Returns all documents."""
query = Select(OverhaulSchedule).order_by(OverhaulSchedule.start.desc())
results = await search_filter_sort_paginate(model=query, **common)
return results
async def create(
*, db_session: DbSession, overhaul_job_in: OverhaulScheduleCreate
):
schedule = OverhaulSchedule(**overhaul_job_in.model_dump())
db_session.add(schedule)
await db_session.commit()
return schedule
async def update(*, db_session: DbSession, overhaul_schedule_id: str, overhaul_job_in: OverhaulScheduleUpdate):
"""Updates a document."""
data = overhaul_job_in.model_dump()
overhaul_schedule = await db_session.get(OverhaulSchedule, overhaul_schedule_id)
update_data = overhaul_job_in.model_dump(exclude_defaults=True)
for field in data:
if field in update_data:
setattr(overhaul_schedule, field, update_data[field])
await db_session.commit()
return overhaul_schedule
async def delete(*, db_session: DbSession, overhaul_schedule_id: str):
"""Deletes a document."""
query = Delete(OverhaulSchedule).where(OverhaulSchedule.id == overhaul_schedule_id)
await db_session.execute(query)
await db_session.commit()

@ -0,0 +1,23 @@
import urllib
from google.oauth2.service_account import Credentials
SCOPES = ["https://www.googleapis.com/auth/spreadsheets.readonly"]
# The ID and range of a sample spreadsheet.
SAMPLE_SPREADSHEET_ID = "1gZXuwA97zU1v4QBv56wKeiqadc6skHUucGKYG8qVFRk"
# Try with URL encoding
sheet_name = "2024 schedule"
encoded_sheet_name = urllib.parse.quote(sheet_name)
RANGE_NAME = "'2024 kurva s'!N79:BJ83" # Or just "2024 schedule"
def get_spreatsheed_service(credentials):
from googleapiclient.discovery import build
return build("sheets", "v4", credentials=credentials, cache_discovery=False)
def get_google_creds():
creds = None
creds = Credentials.from_service_account_file("credentials.json", scopes=SCOPES)
return creds
Loading…
Cancel
Save