add file read for big json

main
Cizz22 4 months ago
parent e7cb0a0e76
commit 7b5d9f55e6

@ -1,4 +1,4 @@
from sqlalchemy import JSON, UUID, Column, DateTime, Float, ForeignKey, Integer, String from sqlalchemy import JSON, UUID, Column, DateTime, Float, ForeignKey, Integer, Numeric, String
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
from src.database.core import Base from src.database.core import Base
@ -142,8 +142,8 @@ class AerosSimulationCalcResult(Base, DefaultMixin):
class AerosSimulationPlotResult(Base, DefaultMixin): class AerosSimulationPlotResult(Base, DefaultMixin):
__tablename__ = "rbd_tr_aeros_simulation_plot_result" __tablename__ = "rbd_tr_aeros_simulation_plot_result"
max_flow_rate = Column(Float, nullable=False) max_flow_rate = Column(Numeric, nullable=False)
storage_capacity = Column(Float, nullable=False) storage_capacity = Column(Numeric, nullable=False)
point_availabilities = Column(JSON, nullable=False) point_availabilities = Column(JSON, nullable=False)
point_flowrates = Column(JSON, nullable=False) point_flowrates = Column(JSON, nullable=False)
timestamp_outs = Column(JSON, nullable=False) timestamp_outs = Column(JSON, nullable=False)

@ -22,7 +22,7 @@ from .schema import (
) )
from .service import ( from .service import (
create_simulation, create_simulation,
execute_simulation, # execute_simulation,
get_all, get_all,
get_custom_parameters, get_custom_parameters,
get_simulation_by_id, get_simulation_by_id,
@ -33,6 +33,8 @@ from .service import (
get_plant_calc_result get_plant_calc_result
) )
from .simulation_save_service import execute_simulation
from src.aeros_equipment.schema import EquipmentWithCustomParameters from src.aeros_equipment.schema import EquipmentWithCustomParameters
router = APIRouter() router = APIRouter()

@ -1,11 +1,14 @@
from datetime import datetime from datetime import datetime
import json import json
import os
import tempfile
from typing import Optional from typing import Optional
from uuid import uuid4, uuid4, UUID from uuid import uuid4, uuid4, UUID
import logging import logging
import httpx import httpx
from fastapi import HTTPException, status from fastapi import HTTPException, status
import ijson import ijson
import requests
from sqlalchemy import delete, select, update, and_ from sqlalchemy import delete, select, update, and_
from sqlalchemy.orm import selectinload from sqlalchemy.orm import selectinload
@ -27,7 +30,7 @@ from .model import (
from src.aeros_equipment.model import AerosEquipment, AerosEquipmentCustomParameterData from src.aeros_equipment.model import AerosEquipment, AerosEquipmentCustomParameterData
from src.aeros_equipment.schema import EquipmentWithCustomParameters from src.aeros_equipment.schema import EquipmentWithCustomParameters
from .schema import SimulationInput, SimulationPlotResult, SimulationRankingParameters from .schema import SimulationInput, SimulationPlotResult, SimulationRankingParameters
from .utils import calculate_eaf from .utils import calculate_eaf, stream_large_array
client = httpx.AsyncClient(timeout=300.0) client = httpx.AsyncClient(timeout=300.0)
active_simulations = {} active_simulations = {}
@ -367,6 +370,10 @@ async def execute_simulation(*, db_session: DbSession, simulation_id: Optional[U
if eq_update is None: if eq_update is None:
eq_update = {} eq_update = {}
print("Executing simulation with id: %s", simulation_id, sim_data["SchematicName"]) print("Executing simulation with id: %s", simulation_id, sim_data["SchematicName"])
tmpfile = os.path.join(tempfile.gettempdir(), "simulation.json")
if os.path.exists(tmpfile):
os.remove(tmpfile)
try: try:
if not is_saved: if not is_saved:
@ -389,17 +396,22 @@ async def execute_simulation(*, db_session: DbSession, simulation_id: Optional[U
await db_session.commit() await db_session.commit()
print("Simulation started with id: %s", simulation.id) print("Simulation started with id: %s", simulation.id)
response = await client.post( # if not os.path.exists(tmpfile):
f"{AEROS_BASE_URL}/api/Simulation/RunSimulation", response = requests.post(f"{AEROS_BASE_URL}/api/Simulation/RunSimulation", stream=True, json=sim_data)
json=sim_data, file_obj = response.raw
headers={"Content-Type": "application/json"},
)
response.raise_for_status() # response = await client.post(
result = response.json() # f"{AEROS_BASE_URL}/api/Simulation/RunSimulation",
# json=sim_data,
# headers={"Content-Type": "application/json"},
# )
# response.raise_for_status()
# result = response.json()
await save_simulation_result( await save_simulation_result(
db_session=db_session, simulation_id=simulation.id, result=result, schematic_name=sim_data["SchematicName"], eq_update=eq_update db_session=db_session, simulation_id=simulation.id, schematic_name=sim_data["SchematicName"], eq_update=eq_update, file_path=file_obj
) )
print("Simulation completed with id: %s", simulation.id) print("Simulation completed with id: %s", simulation.id)
simulation.status = "completed" simulation.status = "completed"
@ -564,125 +576,217 @@ async def create_calc_result_object(
async def save_simulation_result( async def save_simulation_result(
*, db_session: DbSession, simulation_id: UUID, result: dict, schematic_name: str, eq_update: dict *, db_session: DbSession, simulation_id: UUID, schematic_name: str, eq_update: dict, file_path
): ):
print("Saving simulation result") print("Saving simulation result")
"""Save the simulation result.""" """Save the simulation result."""
calc_result = result["nodeResultOuts"] # calc_result = result["nodeResultOuts"]
plot_result = result["plotNodeOuts"] # plot_result = result["plotNodeOuts"]
"""Save the simulation result""" """Save the simulation result"""
avaiable_nodes = { available_nodes = {
f"{node.node_type}:{node.node_name}": node f"{node.node_type}:{node.node_name}": node
for node in await get_all_aeros_node(db_session=db_session, schematic_name=schematic_name) for node in await get_all_aeros_node(db_session=db_session, schematic_name=schematic_name)
} }
calc_objects = [] # calc_objects = []
plot_objects = [] plot_objects = []
try: try:
for result in plot_result: # for result in plot_result:
node_type = "RegularNode" if result["nodeType"] == "RegularNode" else "SchematicNode" # node_type = "RegularNode" if result["nodeType"] == "RegularNode" else "SchematicNode"
node = avaiable_nodes.get(f"{node_type}:{result['nodeName']}", None) # node = avaiable_nodes.get(f"{node_type}:{result['nodeName']}", None)
if not node: # if not node:
if result["nodeType"] != "RegularNode" and result["nodeType"] != "Schematic": # if result["nodeType"] != "RegularNode" and result["nodeType"] != "Schematic":
continue # continue
node = await get_or_save_node( # node = await get_or_save_node(
db_session=db_session, node_data=result, type="plot" # db_session=db_session, node_data=result, type="plot"
) # )
plot_result = AerosSimulationPlotResult( # plot_result = AerosSimulationPlotResult(
aeros_simulation_id=simulation_id, # aeros_simulation_id=simulation_id,
aeros_node_id=node.id, # aeros_node_id=node.id,
max_flow_rate=result["maxFlowrate"], # max_flow_rate=result["maxFlowrate"],
storage_capacity=result["storageCapacity"], # storage_capacity=result["storageCapacity"],
point_availabilities=result["pointAvailabilities"], # point_availabilities=result["pointAvailabilities"],
point_flowrates=result["pointFlowrates"], # point_flowrates=result["pointFlowrates"],
timestamp_outs=result["timeStampOuts"], # timestamp_outs=result["timeStampOuts"],
) # )
plot_objects.append(plot_result) # plot_objects.append(plot_result)
for result in calc_result: # for result in calc_result:
node_type = "RegularNode" if result["nodeType"] == "RegularNode" else "SchematicNode" # node_type = "RegularNode" if result["nodeType"] == "RegularNode" else "SchematicNode"
node = avaiable_nodes.get(f"{node_type}:{result['nodeName']}", None) # node = avaiable_nodes.get(f"{node_type}:{result['nodeName']}", None)
eq_reliability = eq_update.get(result["nodeName"], { # eq_reliability = eq_update.get(result["nodeName"], {
"eta": 0, # "eta": 0,
"beta": 0, # "beta": 0,
"mttr": 0, # "mttr": 0,
"parameters": {} # "parameters": {}
}) # })
plot_data = next(plot for plot in plot_objects if plot.aeros_node_id == node.id) if node else {} # # plot_data = next(plot for plot in plot_objects if plot.aeros_node_id == node.id) if node else {}
# if not node:
# if result["nodeType"] != "RegularNode" and result["nodeType"] != "Schematic":
# continue
# node = await get_or_save_node(
# db_session=db_session, node_data=result, type="calc"
# )
# eaf, derating_hours = calculate_eaf(
# available_hours=result["totalUpTime"],
# period_hours=result["totalUpTime"] + result["totalDowntime"],
# actual_production=result["production"],
# ideal_production=result["idealProduction"],
# downtime_hours = result["totalDowntime"],
# )
# efor = (result["totalDowntime"] / (result["totalDowntime"] + result["totalUpTime"]))*100 if (result["totalDowntime"] + result["totalUpTime"]) > 0 else 0
# calc_result = AerosSimulationCalcResult(
# aeros_simulation_id=simulation_id,
# aeros_node_id=node.id,
# total_downtime=result["totalDowntime"],
# total_uptime=result["totalUpTime"],
# num_events=result["numEvents"],
# production=result["production"],
# production_std=result["productionStd"],
# ideal_production=result["idealProduction"],
# availability=result["availability"],
# efficiency=result["efficiency"],
# effective_loss=result["effectiveLoss"],
# num_cm=result["numCM"],
# cm_waiting_time=result["cmWaitingTime"],
# total_cm_downtime=result["totalCMDowntime"],
# num_pm=result["numPM"],
# total_pm_downtime=result["totalPMDowntime"],
# num_ip=result["numIP"],
# total_ip_downtime=result["totalIPDowntime"],
# num_oh=result["numOH"],
# total_oh_downtime=result["totalOHDowntime"],
# t_wait_for_crew=result["tWaitForCrew"],
# t_wait_for_spare=result["tWaitForSpare"],
# duration_at_full=result["durationAtFull"],
# duration_above_hh=result["durationAboveHH"],
# duration_above_h=result["durationAboveH"],
# duration_below_l=result["durationBelowL"],
# duration_below_ll=result["durationBelowLL"],
# duration_at_empty=result["durationAtEmpty"],
# stg_input=result["stgInput"],
# stg_output=result["stgOutput"],
# average_level=result["averageLevel"],
# potential_production=result["potentialProduction"],
# eaf=eaf,
# efor=efor,
# derating_hours=derating_hours,
# beta=eq_reliability["beta"] if node_type == "RegularNode" else None,
# eta=eq_reliability["eta"] if node_type == "RegularNode" else None,
# mttr=eq_reliability["mttr"] if node_type == "RegularNode" else None,
# parameters=eq_reliability["parameters"] if node_type == "RegularNode" else None
# )
# calc_objects.append(calc_result)
if not node:
if result["nodeType"] != "RegularNode" and result["nodeType"] != "Schematic":
continue
node = await get_or_save_node(
db_session=db_session, node_data=result, type="calc"
)
eaf, derating_hours = calculate_eaf(
available_hours=result["totalUpTime"],
period_hours=result["totalUpTime"] + result["totalDowntime"],
actual_production=result["production"],
ideal_production=result["idealProduction"],
downtime_hours = result["totalDowntime"],
plot_data=plot_data
)
efor = (result["totalDowntime"] / (result["totalDowntime"] + result["totalUpTime"]))*100 if (result["totalDowntime"] + result["totalUpTime"]) > 0 else 0
for results in ijson.items(file_path, "plotNodeOuts"):
calc_result = AerosSimulationCalcResult( for result in results:
aeros_simulation_id=simulation_id, print("processing data", result["nodeName"])
aeros_node_id=node.id, node_type = "RegularNode" if result["nodeType"] == "RegularNode" else "SchematicNode"
total_downtime=result["totalDowntime"], node = available_nodes.get(f"{node_type}:{result['nodeName']}")
total_uptime=result["totalUpTime"], if not node:
num_events=result["numEvents"], if result["nodeType"] not in ["RegularNode", "Schematic"]:
production=result["production"], continue
production_std=result["productionStd"], node = await get_or_save_node(
ideal_production=result["idealProduction"], db_session=db_session, node_data=result, type="plot"
availability=result["availability"], )
efficiency=result["efficiency"],
effective_loss=result["effectiveLoss"],
num_cm=result["numCM"], plot_result = AerosSimulationPlotResult(
cm_waiting_time=result["cmWaitingTime"], aeros_simulation_id=simulation_id,
total_cm_downtime=result["totalCMDowntime"], aeros_node_id=node.id,
num_pm=result["numPM"], max_flow_rate=result["maxFlowrate"],
total_pm_downtime=result["totalPMDowntime"], storage_capacity=result["storageCapacity"],
num_ip=result["numIP"], point_availabilities=result["pointAvailabilities"],
total_ip_downtime=result["totalIPDowntime"], point_flowrates=result["pointFlowrates"],
num_oh=result["numOH"], timestamp_outs=result["timeStampOuts"],
total_oh_downtime=result["totalOHDowntime"], )
t_wait_for_crew=result["tWaitForCrew"], plot_objects.append(plot_result)
t_wait_for_spare=result["tWaitForSpare"],
duration_at_full=result["durationAtFull"], # for result in stream_large_array(file_path, "nodeResultOuts"):
duration_above_hh=result["durationAboveHH"], # print("Processing node result steam for node:", result["nodeName"])
duration_above_h=result["durationAboveH"], # node_type = "RegularNode" if result["nodeType"] == "RegularNode" else "SchematicNode"
duration_below_l=result["durationBelowL"], # node = available_nodes.get(f"{node_type}:{result['nodeName']}", None)
duration_below_ll=result["durationBelowLL"],
duration_at_empty=result["durationAtEmpty"], # eq_reliability = eq_update.get(result["nodeName"], {
stg_input=result["stgInput"], # "eta": 0, "beta": 0, "mttr": 0, "parameters": {}
stg_output=result["stgOutput"], # })
average_level=result["averageLevel"],
potential_production=result["potentialProduction"], # if not node:
eaf=eaf, # if result["nodeType"] != "RegularNode" and result["nodeType"] != "Schematic":
efor=efor, # continue
derating_hours=derating_hours, # node = await get_or_save_node(
beta=eq_reliability["beta"] if node_type == "RegularNode" else None, # db_session=db_session, node_data=result, type="calc"
eta=eq_reliability["eta"] if node_type == "RegularNode" else None, # )
mttr=eq_reliability["mttr"] if node_type == "RegularNode" else None,
parameters=eq_reliability["parameters"] if node_type == "RegularNode" else None # eaf, derating_hours = calculate_eaf(
) # available_hours=result["totalUpTime"],
# period_hours=result["totalUpTime"] + result["totalDowntime"],
calc_objects.append(calc_result) # actual_production=result["production"],
# ideal_production=result["idealProduction"],
# downtime_hours = result["totalDowntime"],
# )
# efor = (result["totalDowntime"] / (result["totalDowntime"] + result["totalUpTime"]))*100 if (result["totalDowntime"] + result["totalUpTime"]) > 0 else 0
# calc_result = AerosSimulationCalcResult(
# aeros_simulation_id=simulation_id,
# aeros_node_id=node.id,
# total_downtime=result["totalDowntime"],
# total_uptime=result["totalUpTime"],
# num_events=result["numEvents"],
# production=result["production"],
# production_std=result["productionStd"],
# ideal_production=result["idealProduction"],
# availability=result["availability"],
# efficiency=result["efficiency"],
# effective_loss=result["effectiveLoss"],
# num_cm=result["numCM"],
# cm_waiting_time=result["cmWaitingTime"],
# total_cm_downtime=result["totalCMDowntime"],
# num_pm=result["numPM"],
# total_pm_downtime=result["totalPMDowntime"],
# num_ip=result["numIP"],
# total_ip_downtime=result["totalIPDowntime"],
# num_oh=result["numOH"],
# total_oh_downtime=result["totalOHDowntime"],
# t_wait_for_crew=result["tWaitForCrew"],
# t_wait_for_spare=result["tWaitForSpare"],
# duration_at_full=result["durationAtFull"],
# duration_above_hh=result["durationAboveHH"],
# duration_above_h=result["durationAboveH"],
# duration_below_l=result["durationBelowL"],
# duration_below_ll=result["durationBelowLL"],
# duration_at_empty=result["durationAtEmpty"],
# stg_input=result["stgInput"],
# stg_output=result["stgOutput"],
# average_level=result["averageLevel"],
# potential_production=result["potentialProduction"],
# eaf=eaf,
# efor=efor,
# derating_hours=derating_hours,
# beta=eq_reliability["beta"] if node_type == "RegularNode" else None,
# eta=eq_reliability["eta"] if node_type == "RegularNode" else None,
# mttr=eq_reliability["mttr"] if node_type == "RegularNode" else None,
# parameters=eq_reliability["parameters"] if node_type == "RegularNode" else None
# )
# db_session.add(calc_result)
except Exception as e: except Exception as e:
simulation = await get_simulation_by_id( simulation = await get_simulation_by_id(
@ -696,8 +800,10 @@ async def save_simulation_result(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)
) )
db_session.add_all(calc_objects) # db_session.add_all(calc_objects)
db_session.add_all(plot_objects) # db_session.add_all(plot_objects)
raise Exception(plot_result)
simulation = await get_simulation_by_id( simulation = await get_simulation_by_id(
db_session=db_session, simulation_id=simulation_id db_session=db_session, simulation_id=simulation_id

@ -0,0 +1,242 @@
import json
import logging
import os
import tempfile
from datetime import datetime
from decimal import Decimal
from typing import Optional
from uuid import UUID
import httpx
import ijson
from fastapi import HTTPException, status
from src.aeros_simulation.model import AerosSimulationCalcResult, AerosSimulationPlotResult
from src.aeros_simulation.service import get_all_aeros_node, get_or_save_node, get_simulation_by_id
from src.aeros_simulation.utils import calculate_eaf
from src.config import AEROS_BASE_URL
from src.database.core import DbSession
from src.logging import setup_logging
log = logging.getLogger(__name__)
setup_logging(logger=log)
async def execute_simulation(
*, db_session: DbSession, simulation_id: Optional[UUID] = None,
sim_data: dict, is_saved: bool = False, eq_update: dict = None
):
"""Execute the actual simulation call"""
eq_update = eq_update or {}
log.info("Executing simulation with id=%s, schematic=%s", simulation_id, sim_data.get("SchematicName"))
tmpfile = os.path.join(tempfile.gettempdir(), f"simulation_{simulation_id}.json")
try:
async with httpx.AsyncClient(timeout=300.0) as client:
async with client.stream(
"POST",
f"{AEROS_BASE_URL}/api/Simulation/RunSimulation",
json=sim_data,
headers={"Content-Type": "application/json"},
) as response:
response.raise_for_status()
with open(tmpfile, "wb") as f:
async for chunk in response.aiter_bytes():
f.write(chunk)
if not is_saved:
# If not saving to DB, just return parsed JSON
with open(tmpfile, "r") as f:
return json.load(f)
# Update simulation status
simulation = await get_simulation_by_id(db_session=db_session, simulation_id=simulation_id)
simulation.status = "processing"
await db_session.commit()
await process_large_json_streaming(
db_session=db_session,
file_path=tmpfile,
simulation_id=simulation.id,
eq_update=eq_update,
schematic_name=sim_data["SchematicName"],
)
simulation.status = "completed"
simulation.completed_at = datetime.now()
await db_session.commit()
log.info("Simulation result saved for simulation id: %s", simulation.id)
return True
except Exception as e:
if simulation_id:
simulation = await get_simulation_by_id(db_session=db_session, simulation_id=simulation_id)
simulation.status = "failed"
simulation.error = str(e)
await db_session.commit()
log.error("Simulation failed: %s", str(e))
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)) from e
finally:
if os.path.exists(tmpfile):
os.remove(tmpfile)
async def process_large_json_streaming(
*, db_session: DbSession, file_path: str, simulation_id, eq_update, schematic_name
):
"""Stream JSON parsing from disk with minimal memory usage."""
batch_size = 200 # increase batch size for efficiency
plot_batch, calc_batch = [], []
def convert_item(item):
if isinstance(item, dict):
return {k: convert_item(v) for k, v in item.items()}
if isinstance(item, list):
return [convert_item(i) for i in item]
if isinstance(item, Decimal):
return float(item)
return item
available_nodes = {
f"{node.node_type}:{node.node_name}": node
for node in await get_all_aeros_node(db_session=db_session, schematic_name=schematic_name)
}
try:
with open(file_path, "r") as file:
# Process 'plotNodeOuts'
log.info("Processing plot array...")
for plot_item in ijson.items(file, "plotNodeOuts.item"):
item = convert_item(plot_item)
plot_obj = await create_plot_result_object(simulation_id, item, available_nodes, eq_update, db_session)
if plot_obj:
plot_batch.append(plot_obj)
if len(plot_batch) >= batch_size:
db_session.add_all(plot_batch)
await db_session.commit()
plot_batch.clear()
# Reset file pointer
file.seek(0)
# Process 'nodeResultOuts'
log.info("Processing calculation array...")
for calc_item in ijson.items(file, "nodeResultOuts.item"):
item = convert_item(calc_item)
calc_obj = await create_calc_result_object(simulation_id, item, available_nodes, eq_update, db_session)
if calc_obj:
calc_batch.append(calc_obj)
if len(calc_batch) >= batch_size:
db_session.add_all(calc_batch)
await db_session.commit()
calc_batch.clear()
# Final flush
if plot_batch:
db_session.add_all(plot_batch)
await db_session.commit()
if calc_batch:
db_session.add_all(calc_batch)
await db_session.commit()
except Exception as e:
log.error("Error processing JSON stream: %s", str(e))
raise
async def create_plot_result_object(
simulation_id, result, available_nodes, eq_update, db_session
):
node_type = "RegularNode" if result["nodeType"] == "RegularNode" else "SchematicNode"
node = available_nodes.get(f"{node_type}:{result['nodeName']}")
if not node:
if result["nodeType"] not in ["RegularNode", "Schematic"]:
return None
node = await get_or_save_node(
db_session=db_session, node_data=result, type="plot"
)
available_nodes[f"{node_type}:{result['nodeName']}"] = node
return AerosSimulationPlotResult(
aeros_simulation_id=simulation_id,
aeros_node_id=node.id,
max_flow_rate=float(result["maxFlowrate"]),
storage_capacity=float(result["storageCapacity"]),
point_availabilities=result["pointAvailabilities"],
point_flowrates=result["pointFlowrates"],
timestamp_outs=result["timeStampOuts"],
)
async def create_calc_result_object(
simulation_id, result, available_nodes, eq_update, db_session
) :
"""Create a single calc result object."""
node_type = "RegularNode" if result["nodeType"] == "RegularNode" else "SchematicNode"
node = available_nodes.get(f"{node_type}:{result['nodeName']}")
if not node:
if result["nodeType"] not in ["RegularNode", "Schematic"]:
return None
node = await get_or_save_node(db_session=db_session, node_data=result, type="calc")
# Add to available_nodes for future use
available_nodes[f"{node_type}:{result['nodeName']}"] = node
eq_reliability = eq_update.get(result["nodeName"], {
"eta": 0, "beta": 0, "mttr": 0, "parameters": {}
})
eaf, derating_hours = calculate_eaf(
available_hours=result["totalUpTime"],
period_hours=result["totalUpTime"] + result["totalDowntime"],
actual_production=result["production"],
ideal_production=result["idealProduction"],
downtime_hours=result["totalDowntime"]
)
efor = (result["totalDowntime"] / (result["totalDowntime"] + result["totalUpTime"])) * 100 if (result["totalDowntime"] + result["totalUpTime"]) > 0 else 0
return AerosSimulationCalcResult(
aeros_simulation_id=simulation_id,
aeros_node_id=node.id,
total_downtime=result["totalDowntime"],
total_uptime=result["totalUpTime"],
num_events=result["numEvents"],
production=result["production"],
production_std=result["productionStd"],
ideal_production=result["idealProduction"],
availability=result["availability"],
efficiency=result["efficiency"],
effective_loss=result["effectiveLoss"],
num_cm=result["numCM"],
cm_waiting_time=result["cmWaitingTime"],
total_cm_downtime=result["totalCMDowntime"],
num_pm=result["numPM"],
total_pm_downtime=result["totalPMDowntime"],
num_ip=result["numIP"],
total_ip_downtime=result["totalIPDowntime"],
num_oh=result["numOH"],
total_oh_downtime=result["totalOHDowntime"],
t_wait_for_crew=result["tWaitForCrew"],
t_wait_for_spare=result["tWaitForSpare"],
duration_at_full=result["durationAtFull"],
duration_above_hh=result["durationAboveHH"],
duration_above_h=result["durationAboveH"],
duration_below_l=result["durationBelowL"],
duration_below_ll=result["durationBelowLL"],
duration_at_empty=result["durationAtEmpty"],
stg_input=result["stgInput"],
stg_output=result["stgOutput"],
average_level=result["averageLevel"],
potential_production=result["potentialProduction"],
eaf=eaf,
efor=efor,
derating_hours=derating_hours,
beta=eq_reliability["beta"] if node_type == "RegularNode" else None,
eta=eq_reliability["eta"] if node_type == "RegularNode" else None,
mttr=eq_reliability["mttr"] if node_type == "RegularNode" else None,
parameters=eq_reliability["parameters"] if node_type == "RegularNode" else None
)

@ -45,7 +45,7 @@ def get_config():
config = get_config() config = get_config()
LOG_LEVEL = config("LOG_LEVEL", default=logging.WARNING) LOG_LEVEL = config("LOG_LEVEL", default=logging.INFO)
ENV = config("ENV", default="local") ENV = config("ENV", default="local")
PORT = config("PORT", cast=int, default=8000) PORT = config("PORT", cast=int, default=8000)
HOST = config("HOST", default="localhost") HOST = config("HOST", default="localhost")

@ -1,32 +1,16 @@
import logging import logging
import sys
from fastapi import FastAPI
from src.config import LOG_LEVEL from src.config import LOG_LEVEL
from src.enums import RBDEnum
LOG_FORMAT_DEBUG = "%(levelname)s:%(message)s:%(pathname)s:%(funcName)s:%(lineno)d"
class LogLevels(RBDEnum):
info = "INFO"
warn = "WARN"
error = "ERROR"
debug = "DEBUG"
def configure_logging():
log_level = str(LOG_LEVEL).upper() # cast to string
log_levels = list(LogLevels)
if log_level not in log_levels:
# we use error as the default log level
logging.basicConfig(level=LogLevels.error)
return
if log_level == LogLevels.debug:
logging.basicConfig(level=log_level, format=LOG_FORMAT_DEBUG)
return
logging.basicConfig(level=log_level)
# sometimes the slack client can be too verbose def setup_logging(logger):
logging.getLogger("slack_sdk.web.base_client").setLevel(logging.CRITICAL) # Your logging configuration here
logger.setLevel(logging.DEBUG)
# Create formatter
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
# Create console handler
stream_handler = logging.StreamHandler(sys.stdout)
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)

@ -1,4 +1,5 @@
import logging import logging
import sys
import time import time
from contextvars import ContextVar from contextvars import ContextVar
from os import path from os import path
@ -25,13 +26,13 @@ from src.api import api_router
from src.database.core import async_session, engine from src.database.core import async_session, engine
from src.enums import ResponseStatus from src.enums import ResponseStatus
from src.exceptions import handle_exception from src.exceptions import handle_exception
from src.logging import configure_logging from src.logging import setup_logging
from src.rate_limiter import limiter from src.rate_limiter import limiter
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
# we configure the logging level and format # we configure the logging level and format
configure_logging()
# we define the exception handlers # we define the exception handlers
exception_handlers = {Exception: handle_exception} exception_handlers = {Exception: handle_exception}
@ -48,6 +49,8 @@ app.state.limiter = limiter
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler) app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
app.add_middleware(GZipMiddleware, minimum_size=2000) app.add_middleware(GZipMiddleware, minimum_size=2000)
# credentials: "include", # credentials: "include",
setup_logging(logger=log)
log.info('API is starting up')
REQUEST_ID_CTX_KEY: Final[str] = "request_id" REQUEST_ID_CTX_KEY: Final[str] = "request_id"

Loading…
Cancel
Save