update efdh foh

main
MrWaradana 1 month ago
parent effe624bd4
commit aae078bdfc

@ -94,3 +94,5 @@ class EquipmentTransactionRecords(Base, DefaultMixin, IdentityMixin):
eac_annual_acq_cost = Column(Float, nullable=False)
eac_disposal_cost = Column(Float, nullable=False)
eac_eac = Column(Float, nullable=False)
efdh_equivalent_forced_derating_hours = Column(Float, nullable=False)
foh_forced_outage_hours = Column(Float, nullable=False)

@ -72,6 +72,8 @@ class MasterBase(DefaultBase):
eac_annual_acq_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
eac_disposal_cost: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
eac_eac: Optional[float] = Field(None, nullable=True, le=MAX_PRICE)
efdh_equivalent_forced_derating_hours: Optional[float] = Field(None, nullable=True)
foh_forced_outage_hours: Optional[float] = Field(None, nullable=True)
class EquipmentCreate(EquipmentBase):

@ -150,11 +150,11 @@ class Eac:
# Nilai proyeksi yang didiskontokan menggunakan offset eksponen dari urutan aktual terakhir
# sehingga offset tahun berlanjut dari aktual yang sudah diproses.
# Rumus NPV: NPV = Σ [Ct / (1 + r)^t]
# dimana Ct = cash flow pada periode t, r = inflation_rate, t = periode
# dimana Ct = cash flow pada periode t, r = disc_rate, t = periode
# value adalah rc_total_cost pada periode t
# 1 + inflation_rate ** (last_seq + i + 1) adalah perhitungan diskonto dengan offset waktu
# 1 + disc_rate ** (last_seq + i + 1) adalah perhitungan diskonto dengan offset waktu
discounted_proj = sum(
(float(value) / ((1 + inflation_rate) ** (last_seq + i + 1)))
(float(value) / ((1 + disc_rate) ** (last_seq + i + 1)))
for i, value in enumerate(cumulative_values)
)

@ -334,9 +334,11 @@ class Prediksi:
finally:
if connection:
connection.close()
def __get_asset_criticality_params(self, equipment_id):
try:
connections = get_connection()
efdh_foh_sum = None
connection = (
connections[0] if isinstance(connections, tuple) else connections
)
@ -356,8 +358,8 @@ class Prediksi:
asset_crit_bpp_pembangkit,
asset_crit_dmn_daya_mampu_netto,
asset_crit_marginal_cost,
asset_crit_efdh_equivalent_force_derated_hours,
asset_crit_foh_force_outage_hours,
asset_crit_efdh_equivalent_forced_derated_hours,
asset_crit_foh_forced_outage_hours,
asset_crit_extra_fuel_cost
FROM lcc_ms_year_data
) t
@ -392,12 +394,33 @@ class Prediksi:
part2 = max(0.0, (dmn - ens)) * extra_fuel
asset_criticality = part1 + part2
efdh = _f("asset_crit_efdh_equivalent_force_derated_hours") # EFDH
foh = _f("asset_crit_foh_force_outage_hours")
efdh = _f("asset_crit_efdh_equivalent_forced_derated_hours") # EFDH per Year
foh = _f("asset_crit_foh_forced_outage_hours") # FOH per Year
query_each_equipment = """
SELECT
efdh_equivalent_forced_derated_hours,
foh_forced_outage_hours
FROM lcc_ms_equipment_data
WHERE assetnum = %s
"""
cursor.execute(query_each_equipment, (equipment_id,))
result_eq = cursor.fetchone()
if result_eq:
eq_efdh = float(result_eq.get("efdh_equivalent_force_derated_hours") or 0.0) # EFDH per Equipment
eq_foh = float(result_eq.get("foh_force_outage_hours") or 0.0) # FOH per Equipment
efdh_foh_equipment = eq_efdh + eq_foh
# if efdh_foh_equipment == 0:
# efdh_foh_sum = efdh + foh
# else:
efdh_foh_sum = efdh_foh_equipment
return {
"asset_criticality": asset_criticality,
"efdh_oh_sum": efdh + foh,
"efdh_foh_sum": efdh_foh_sum,
}
except Exception as e:
@ -430,7 +453,8 @@ class Prediksi:
raw_oh_interval, raw_oh_material_cost, raw_oh_labor_time, raw_oh_labor_human,
raw_predictive_interval, raw_predictive_material_cost, raw_predictive_labor_time, raw_predictive_labor_human,
"raw_loss_output_MW" as raw_loss_output_mw, raw_loss_output_price,
raw_operational_cost, raw_maintenance_cost
raw_operational_cost, raw_maintenance_cost,
efdh_equivalent_forced_derated_hours, foh_forced_outage_hours,
FROM lcc_equipment_tr_data
WHERE assetnum = %s;
'''
@ -500,6 +524,9 @@ class Prediksi:
raw_operational_cost = float(r.get("raw_operational_cost") or 0.0)
raw_maintenance_cost = float(r.get("raw_maintenance_cost") or 0.0)
efdh_equivalent_forced_derated_hours = float(r.get("efdh_equivalent_forced_derated_hours") or 0.0)
foh_forced_outage_hours = float(r.get("foh_forced_outage_hours") or 0.0)
rc_cm_material_cost = float(r.get("rc_cm_material_cost") or 0.0)
# compute per-column costs using helpers
@ -541,17 +568,19 @@ class Prediksi:
asset_criticality_value = 0.0
# Simplify extraction and avoid repeating the multiplication
ac = asset_criticality_data if isinstance(asset_criticality_data, dict) else {}
try:
efdh_oh_sum = float(ac.get("efdh_oh_sum", 0.0))
except Exception:
efdh_oh_sum = 0.0
efdh_foh_sum = efdh_equivalent_forced_derated_hours + foh_forced_outage_hours if efdh_equivalent_forced_derated_hours and foh_forced_outage_hours else 0.0
# try:
# efdh_foh_sum = float(ac.get("efdh_foh_sum", 0.0))
# except Exception:
# efdh_foh_sum = 0.0
try:
asset_criticality_value = float(ac.get("asset_criticality", 0.0))
except Exception:
asset_criticality_value = 0.0
# single multiplier used for all RC groups
ac_multiplier = efdh_oh_sum * asset_criticality_value
ac_multiplier = efdh_foh_sum * asset_criticality_value
total = rc_total_cost(
rc_cm=rc_cm_material + rc_cm_labor + ac_multiplier,

@ -1,4 +1,6 @@
import asyncio
import pandas as pd
from decimal import Decimal, InvalidOperation
import psycopg2
from psycopg2.extras import DictCursor
from uuid import uuid4
@ -141,6 +143,237 @@ def get_data_tahun(cursor):
cursor.execute(query)
return cursor.fetchall()
def _parse_decimal(value: str, decimal_separator: str = ".") -> Decimal:
"""Parse numeric strings that may use comma decimal separators."""
if value is None:
return Decimal("0")
sanitized = value.strip()
if not sanitized:
return Decimal("0")
sanitized = sanitized.replace(" ", "")
if decimal_separator == ",":
sanitized = sanitized.replace(".", "").replace(",", ".")
else:
sanitized = sanitized.replace(",", "")
try:
return Decimal(sanitized)
except (InvalidOperation, ValueError):
print(f"Unable to parse numeric value '{value}', defaulting to 0.")
return Decimal("0")
def _normalize_key(key: str) -> str:
if not key:
return ""
cleaned = key.strip().lstrip("\ufeff").lower()
for char in (" ", ".", "-", "\t"):
cleaned = cleaned.replace(char, "_")
while "__" in cleaned:
cleaned = cleaned.replace("__", "_")
return cleaned
def _load_acquisition_cost_lookup(csv_path: str) -> dict:
if not os.path.exists(csv_path):
print(f"CSV file not found at {csv_path}")
return {}
try:
df = pd.read_csv(csv_path, sep=";", dtype=str, keep_default_na=False, encoding="utf-8")
except Exception as exc:
print(f"Failed to read CSV file {csv_path}: {exc}")
return {}
df.columns = [_normalize_key(col) for col in df.columns]
required_cols = {"location_tag", "proportion", "category_no", "acquisition_cost"}
missing_cols = required_cols - set(df.columns)
if missing_cols:
print(f"CSV file is missing required columns: {', '.join(sorted(missing_cols))}")
return {}
lookup = {}
for _, row in df.iterrows():
raw_tag = (row.get("location_tag") or "").strip()
location_tag = raw_tag.upper()
if not location_tag:
continue
lookup[location_tag] = {
"proportion": _parse_decimal(row.get("proportion"), decimal_separator=","),
"category_no": _parse_decimal(row.get("category_no"), decimal_separator="."),
"acquisition_cost": _parse_decimal(row.get("acquisition_cost"), decimal_separator="."),
"raw_location_tag": raw_tag,
}
return lookup
def _build_tr_row_values(
data_cm_row,
data_pm_row,
data_oh_row,
data_predictive_row,
data_tahunan_row,
):
"""Return sanitized numeric values for equipment transaction rows."""
def _safe_value(row, key):
if not row:
return 0
value = row.get(key)
return value if value is not None else 0
has_recursive_data = any(
row for row in (data_cm_row, data_pm_row, data_oh_row, data_predictive_row)
)
if not has_recursive_data:
return {
"raw_cm_interval": 0,
"raw_cm_material_cost": 0,
"raw_cm_labor_time": 0,
"raw_cm_labor_human": 0,
"raw_pm_interval": 0,
"raw_pm_material_cost": 0,
"raw_pm_labor_time": 0,
"raw_pm_labor_human": 0,
"raw_oh_interval": 0,
"raw_oh_material_cost": 0,
"raw_oh_labor_time": 0,
"raw_oh_labor_human": 0,
"raw_predictive_interval": 0,
"raw_predictive_material_cost": 0,
"raw_predictive_labor_time": 0,
"raw_predictive_labor_human": 0,
"raw_loss_output_MW": 0,
"raw_loss_output_price": 0,
"rc_cm_material_cost": 0,
"rc_cm_labor_cost": 0,
"rc_pm_material_cost": 0,
"rc_pm_labor_cost": 0,
"rc_oh_material_cost": 0,
"rc_oh_labor_cost": 0,
"rc_predictive_labor_cost": 0,
}
raw_cm_interval = _safe_value(data_cm_row, "raw_cm_interval")
raw_cm_material_cost_total = _safe_value(data_cm_row, "raw_cm_material_cost")
raw_cm_material_cost = (
raw_cm_material_cost_total / raw_cm_interval if raw_cm_interval else 0
)
raw_cm_labor_time = _safe_value(data_cm_row, "raw_cm_labor_time")
raw_cm_labor_human = _safe_value(data_cm_row, "raw_cm_labor_human")
raw_pm_interval = _safe_value(data_pm_row, "raw_pm_interval")
raw_pm_material_cost = _safe_value(data_pm_row, "raw_pm_material_cost")
raw_pm_labor_time = _safe_value(data_pm_row, "raw_pm_labor_time")
raw_pm_labor_human = _safe_value(data_pm_row, "raw_pm_labor_human")
raw_oh_interval = _safe_value(data_oh_row, "raw_oh_interval")
raw_oh_material_cost = _safe_value(data_oh_row, "raw_oh_material_cost")
raw_oh_labor_time = _safe_value(data_oh_row, "raw_oh_labor_time")
raw_oh_labor_human = _safe_value(data_oh_row, "raw_oh_labor_human")
raw_pdm_interval = _safe_value(data_predictive_row, "raw_predictive_interval")
raw_pdm_material_cost = _safe_value(
data_predictive_row, "raw_predictive_material_cost"
)
raw_pdm_labor_time = _safe_value(
data_predictive_row, "raw_predictive_labor_time"
)
raw_pdm_labor_human = _safe_value(
data_predictive_row, "raw_predictive_labor_human"
)
raw_loss_output_MW = (
data_tahunan_row.get("total_lost")
if data_tahunan_row and data_tahunan_row.get("total_lost") is not None
else 0
)
raw_loss_output_price = (
data_tahunan_row.get("rp_per_kwh")
if data_tahunan_row and data_tahunan_row.get("rp_per_kwh") is not None
else 0
)
man_hour_value = (
data_tahunan_row.get("man_hour")
if data_tahunan_row and data_tahunan_row.get("man_hour") is not None
else None
)
rc_cm_material_cost = raw_cm_material_cost_total
rc_cm_labor_cost = (
data_cm_row.get("raw_cm_labor_time")
* data_cm_row.get("rc_cm_labor_human")
* man_hour_value
if data_cm_row
and data_cm_row.get("rc_cm_labor_cost")
and data_cm_row.get("rc_cm_labor_human")
and man_hour_value is not None
else 0
)
rc_pm_material_cost = raw_pm_material_cost
rc_pm_labor_cost = (
data_pm_row.get("raw_pm_labor_time")
* data_pm_row.get("rc_pm_labor_human")
* man_hour_value
if data_pm_row
and data_pm_row.get("rc_pm_labor_cost")
and data_pm_row.get("rc_pm_labor_human")
and man_hour_value is not None
else 0
)
rc_oh_material_cost = raw_oh_material_cost
rc_oh_labor_cost = (
data_oh_row.get("raw_oh_labor_time")
* data_oh_row.get("rc_oh_labor_human")
* man_hour_value
if data_oh_row
and data_oh_row.get("rc_oh_labor_cost")
and data_oh_row.get("rc_oh_labor_human")
and man_hour_value is not None
else 0
)
rc_predictive_labor_cost = (
data_predictive_row.get("raw_predictive_labor_human") * man_hour_value
if data_predictive_row
and data_predictive_row.get("rc_predictive_labor_cost")
and man_hour_value is not None
else 0
)
return {
"raw_cm_interval": raw_cm_interval,
"raw_cm_material_cost": raw_cm_material_cost,
"raw_cm_labor_time": raw_cm_labor_time,
"raw_cm_labor_human": raw_cm_labor_human,
"raw_pm_interval": raw_pm_interval,
"raw_pm_material_cost": raw_pm_material_cost,
"raw_pm_labor_time": raw_pm_labor_time,
"raw_pm_labor_human": raw_pm_labor_human,
"raw_oh_interval": raw_oh_interval,
"raw_oh_material_cost": raw_oh_material_cost,
"raw_oh_labor_time": raw_oh_labor_time,
"raw_oh_labor_human": raw_oh_labor_human,
"raw_predictive_interval": raw_pdm_interval,
"raw_predictive_material_cost": raw_pdm_material_cost,
"raw_predictive_labor_time": raw_pdm_labor_time,
"raw_predictive_labor_human": raw_pdm_labor_human,
"raw_loss_output_MW": raw_loss_output_MW,
"raw_loss_output_price": raw_loss_output_price,
"rc_cm_material_cost": rc_cm_material_cost,
"rc_cm_labor_cost": rc_cm_labor_cost,
"rc_pm_material_cost": rc_pm_material_cost,
"rc_pm_labor_cost": rc_pm_labor_cost,
"rc_oh_material_cost": rc_oh_material_cost,
"rc_oh_labor_cost": rc_oh_labor_cost,
"rc_predictive_labor_cost": rc_predictive_labor_cost,
}
async def insert_ms_equipment_data():
connection = None
try:
@ -234,9 +467,9 @@ async def insert_lcca_maximo_corrective_data():
cursor_db_app.execute(check_data_query)
data_count = cursor_db_app.fetchone()[0]
if data_count > 0:
truncate_query = "TRUNCATE TABLE lcc_equipment_tr_data"
cursor_db_app.execute(truncate_query)
# if data_count > 0:
# truncate_query = "TRUNCATE TABLE lcc_equipment_tr_data"
# cursor_db_app.execute(truncate_query)
query_main = "SELECT DISTINCT(assetnum) FROM ms_equipment_master"
cursor_db_app.execute(query_main)
@ -359,6 +592,203 @@ async def insert_lcca_maximo_corrective_data():
connection_wo_db.close()
production_connection.close()
async def insert_acquisition_cost_data():
connection = None
connection_wo_db = None
cursor = None
try:
connection, connection_wo_db = get_connection()
if connection is None or connection_wo_db is None:
print("Database connection failed.")
return
start_time = datetime.now()
print(f"Start insert_acquisition_cost_data at {start_time.isoformat()}")
# Ambil data dari tabel lcc_ms_equipment_data join dengan ms_equipment_master dari pada kolom assetnum dengan
# select assetnum dan location_tag
location_tag_query = """
SELECT em.assetnum, em.location_tag, em.name
FROM lcc_ms_equipment_data AS ed
JOIN ms_equipment_master AS em ON ed.assetnum = em.assetnum;
"""
cursor = connection.cursor(cursor_factory=DictCursor)
cursor.execute(location_tag_query)
location_tag_results = cursor.fetchall()
if not location_tag_results:
print("No equipment data found to update.")
return
csv_path = os.path.join(os.path.dirname(__file__), "acquisition_cost.csv")
csv_lookup = _load_acquisition_cost_lookup(csv_path)
if not csv_lookup:
print("CSV file does not contain any usable rows.")
return
update_query = """
UPDATE lcc_ms_equipment_data
SET proportion = %s,
category_no = %s,
acquisition_cost = %s,
updated_at = NOW()
WHERE assetnum = %s
"""
updated_assets = 0
skipped_missing_csv = 0
skipped_missing_tag = 0
progress_rows = []
processed_csv_tags = set()
for idx, row in enumerate(location_tag_results, start=1):
assetnum = row["assetnum"]
location_tag_value = row["location_tag"]
equipment_name = row.get("name")
normalized_tag = (location_tag_value or "").strip().upper()
if not normalized_tag:
skipped_missing_tag += 1
print(f"[{idx}] Skipping asset {assetnum}: missing location_tag")
progress_rows.append(
{
"assetnum": assetnum,
"location_tag": location_tag_value or "",
"name": equipment_name or "",
"status": "missing_tag",
}
)
continue
csv_row = csv_lookup.get(normalized_tag)
if not csv_row:
skipped_missing_csv += 1
print(f"[{idx}] No CSV match for asset {assetnum} (location_tag={location_tag_value})")
progress_rows.append(
{
"assetnum": assetnum,
"location_tag": location_tag_value or "",
"name": equipment_name or "",
"status": "no_csv_match",
}
)
continue
processed_csv_tags.add(normalized_tag)
try:
cursor.execute(
update_query,
(
csv_row["proportion"],
csv_row["category_no"],
csv_row["acquisition_cost"],
assetnum,
),
)
if cursor.rowcount:
updated_assets += 1
progress_rows.append(
{
"assetnum": assetnum,
"location_tag": location_tag_value or "",
"name": equipment_name or "",
"status": "updated",
}
)
else:
progress_rows.append(
{
"assetnum": assetnum,
"location_tag": location_tag_value or "",
"name": equipment_name or "",
"status": "to_do",
}
)
except Exception as exc:
try:
connection.rollback()
except Exception:
pass
print(f"[{idx}] Error updating asset {assetnum}: {exc}")
progress_rows.append(
{
"assetnum": assetnum,
"location_tag": location_tag_value or "",
"name": equipment_name or "",
"status": "to_do",
}
)
continue
if idx % 100 == 0:
try:
connection.commit()
except Exception:
connection.rollback()
print(
f"Processed {idx} assets so far. Updated {updated_assets}, "
f"no CSV match {skipped_missing_csv}, missing tag {skipped_missing_tag}."
)
# Capture CSV rows that never matched any asset so the checklist highlights remaining work.
unused_csv_tags = [
(tag, data)
for tag, data in csv_lookup.items()
if tag not in processed_csv_tags
]
if unused_csv_tags:
for unused_tag, csv_row in unused_csv_tags:
progress_rows.append(
{
"assetnum": "",
"location_tag": csv_row.get("raw_location_tag") or unused_tag,
"name": "",
"status": "csv_unprocessed",
}
)
if progress_rows:
progress_df = pd.DataFrame(progress_rows)
progress_csv_path = os.path.join(
os.path.dirname(__file__), "acquisition_cost_progress.csv"
)
try:
progress_df.to_csv(progress_csv_path, index=False)
print(f"Progress checklist saved to {progress_csv_path}")
except Exception as exc:
print(f"Failed to write progress checklist CSV: {exc}")
try:
connection.commit()
except Exception as exc:
print(f"Commit failed: {exc}")
connection.rollback()
duration = datetime.now() - start_time
print(
f"Finished insert_acquisition_cost_data in {duration.total_seconds():.2f}s. "
f"Updated {updated_assets} assets, missing CSV {skipped_missing_csv}, missing tag {skipped_missing_tag}."
)
except Exception as e:
print("Error saat menjalankan insert_acquisition_cost_data:", e)
finally:
if cursor:
try:
cursor.close()
except Exception:
pass
if connection or connection_wo_db:
try:
connection.close()
except Exception:
pass
try:
connection_wo_db.close()
except Exception:
pass
async def query_data():
connection = None
@ -375,9 +805,71 @@ async def query_data():
# Membuat cursor menggunakan DictCursor
cursor = connection.cursor(cursor_factory=DictCursor)
cursor_wo = connection_production_wo.cursor(cursor_factory=DictCursor)
insert_query = """
INSERT INTO lcc_equipment_tr_data (
id, assetnum, tahun, seq, is_actual,
raw_cm_interval, raw_cm_material_cost, raw_cm_labor_time, raw_cm_labor_human,
raw_pm_interval, raw_pm_material_cost, raw_pm_labor_time, raw_pm_labor_human,
raw_oh_interval, raw_oh_material_cost, raw_oh_labor_time, raw_oh_labor_human,
raw_predictive_interval, raw_predictive_material_cost, raw_predictive_labor_time, raw_predictive_labor_human,
"raw_loss_output_MW", raw_loss_output_price,
"rc_cm_material_cost", "rc_cm_labor_cost",
"rc_pm_material_cost", "rc_pm_labor_cost",
"rc_oh_material_cost", "rc_oh_labor_cost",
"rc_predictive_labor_cost",
created_by, created_at
) VALUES (
%s, %s, %s, %s, %s,
%s, %s, %s, %s,
%s, %s, %s, %s,
%s, %s, %s, %s,
%s, %s, %s, %s,
%s, %s,
%s, %s,
%s, %s,
%s, %s,
%s,
'Sys', NOW()
)
"""
update_query = """
UPDATE lcc_equipment_tr_data
SET seq = %s,
is_actual = %s,
raw_cm_interval = %s,
raw_cm_material_cost = %s,
raw_cm_labor_time = %s,
raw_cm_labor_human = %s,
raw_pm_interval = %s,
raw_pm_material_cost = %s,
raw_pm_labor_time = %s,
raw_pm_labor_human = %s,
raw_oh_interval = %s,
raw_oh_material_cost = %s,
raw_oh_labor_time = %s,
raw_oh_labor_human = %s,
raw_predictive_interval = %s,
raw_predictive_material_cost = %s,
raw_predictive_labor_time = %s,
raw_predictive_labor_human = %s,
"raw_loss_output_MW" = %s,
raw_loss_output_price = %s,
"rc_cm_material_cost" = %s,
"rc_cm_labor_cost" = %s,
"rc_pm_material_cost" = %s,
"rc_pm_labor_cost" = %s,
"rc_oh_material_cost" = %s,
"rc_oh_labor_cost" = %s,
"rc_predictive_labor_cost" = %s,
updated_by = 'Sys',
updated_at = NOW()
WHERE assetnum = %s AND tahun = %s
"""
# TRUNCATE DATA
truncate_query = "TRUNCATE TABLE lcc_equipment_tr_data RESTART IDENTITY"
cursor.execute(truncate_query)
# truncate_query = "TRUNCATE TABLE lcc_equipment_tr_data RESTART IDENTITY"
# cursor.execute(truncate_query)
# Query untuk mendapatkan semua data dari tabel `lcc_ms_equipment_data`
# query_main = "SELECT * FROM lcc_ms_equipment_data"
@ -462,189 +954,87 @@ async def query_data():
print(f"Error checking data for assetnum {assetnum}: {e}")
continue
if not data_exists:
# Insert data jika belum ada
if not data_cm_row and not data_pm_row and not data_oh_row and not data_predictive_row:
# Jika data recursive_row tidak ada
insert_query = """
INSERT INTO lcc_equipment_tr_data (
id, assetnum, tahun, seq, is_actual,
raw_cm_interval, raw_cm_material_cost, raw_cm_labor_time, raw_cm_labor_human
, raw_pm_interval, raw_pm_material_cost, raw_pm_labor_time, raw_pm_labor_human
, raw_oh_interval, raw_oh_material_cost, raw_oh_labor_time, raw_oh_labor_human
, raw_predictive_interval, raw_predictive_material_cost, raw_predictive_labor_time, raw_predictive_labor_human
, "raw_loss_output_MW", raw_loss_output_price
, rc_cm_material_cost, rc_cm_labor_cost
, rc_pm_material_cost, rc_pm_labor_cost
, rc_oh_material_cost, rc_oh_labor_cost
, rc_predictive_labor_cost
, created_by, created_at
) VALUES (
%s, %s, %s, %s, %s,
%s, %s, %s, %s,
%s, %s, %s, %s,
%s, %s, %s, %s,
%s, %s, %s, %s,
%s, %s
, %s, %s
, %s, %s
, %s, %s
, %s
, 'Sys', NOW()
row_values = _build_tr_row_values(
data_cm_row,
data_pm_row,
data_oh_row,
data_predictive_row,
data_tahunan_row,
)
"""
if not data_exists:
cursor.execute(
insert_query,
(
str(uuid4()), # id
assetnum, # assetnum
year, # tahun
seq, # seq
1, # is_actual
0, # raw_cm_interval (minimal 1 karena minimal 1x OH)
0, # raw_cm_material_cost
0, # raw_cm_labor_time
0, # raw_cm_labor_human
0, # raw_pm_interval set default 1
0, # raw_pm_material_cost
0, # raw_pm_labor_time
0, # raw_pm_labor_human
0, # raw_oh_interval set default 1
0, # raw_oh_material_cost
0, # raw_oh_labor_time
0, # raw_oh_labor_human
0, # raw_predictive_interval set default 1
0, # raw_predictive_material_cost
0, # raw_predictive_labor_time
0, # raw_predictive_labor_human
( # "raw_loss_output_MW"
# data_tahunan_row["total_lost"]
0
if data_tahunan_row
else 0
),
( # raw_loss_output_price
# data_tahunan_row["rp_per_kwh"]
0
if data_tahunan_row
else 0
),
0, # rc_cm_material_cost
0, # rc_cm_labor_cost
0, # rc_pm_material_cost
0, # rc_pm_labor_cost
0, # rc_oh_material_cost
0, # rc_oh_labor_cost
0, # rc_predictive_labor_cost
str(uuid4()),
assetnum,
year,
seq,
1,
row_values["raw_cm_interval"],
row_values["raw_cm_material_cost"],
row_values["raw_cm_labor_time"],
row_values["raw_cm_labor_human"],
row_values["raw_pm_interval"],
row_values["raw_pm_material_cost"],
row_values["raw_pm_labor_time"],
row_values["raw_pm_labor_human"],
row_values["raw_oh_interval"],
row_values["raw_oh_material_cost"],
row_values["raw_oh_labor_time"],
row_values["raw_oh_labor_human"],
row_values["raw_predictive_interval"],
row_values["raw_predictive_material_cost"],
row_values["raw_predictive_labor_time"],
row_values["raw_predictive_labor_human"],
row_values["raw_loss_output_MW"],
row_values["raw_loss_output_price"],
row_values["rc_cm_material_cost"],
row_values["rc_cm_labor_cost"],
row_values["rc_pm_material_cost"],
row_values["rc_pm_labor_cost"],
row_values["rc_oh_material_cost"],
row_values["rc_oh_labor_cost"],
row_values["rc_predictive_labor_cost"],
),
)
inserted_this_asset += 1
total_inserted += 1
# print minimal per-year insert log
# print(f"Inserted default data for {assetnum} year {year}")
else:
# Jika data recursive_row ada
insert_query = """
INSERT INTO lcc_equipment_tr_data (
id, assetnum, tahun, seq, is_actual,
raw_cm_interval, raw_cm_material_cost, raw_cm_labor_time, raw_cm_labor_human
, raw_pm_interval, raw_pm_material_cost, raw_pm_labor_time, raw_pm_labor_human
, raw_oh_interval, raw_oh_material_cost, raw_oh_labor_time, raw_oh_labor_human
, raw_predictive_interval, raw_predictive_material_cost, raw_predictive_labor_time, raw_predictive_labor_human
, "raw_loss_output_MW", raw_loss_output_price
, "rc_cm_material_cost", "rc_cm_labor_cost"
, "rc_pm_material_cost", "rc_pm_labor_cost"
, "rc_oh_material_cost", "rc_oh_labor_cost"
, "rc_predictive_labor_cost"
, created_by, created_at
) VALUES (
%s, %s, %s, %s, %s,
%s, %s, %s, %s,
%s, %s, %s, %s,
%s, %s, %s, %s,
%s, %s, %s, %s,
%s, %s,
%s, %s,
%s, %s,
%s, %s,
%s,
'Sys', NOW()
)
"""
# Normalize row values to avoid inserting NULL and avoid division by zero
raw_cm_interval = data_cm_row.get("raw_cm_interval") if data_cm_row and data_cm_row.get("raw_cm_interval") is not None else 0
raw_cm_material_cost = data_cm_row.get("raw_cm_material_cost") if data_cm_row and data_cm_row.get("raw_cm_material_cost") is not None else 0
avg_cm_material_cost = (raw_cm_material_cost / raw_cm_interval) if raw_cm_interval else 0
raw_cm_labor_time = data_cm_row.get("raw_cm_labor_time") if data_cm_row and data_cm_row.get("raw_cm_labor_time") is not None else 0
raw_cm_labor_human = data_cm_row.get("raw_cm_labor_human") if data_cm_row and data_cm_row.get("raw_cm_labor_human") is not None else 0
raw_pm_interval = data_pm_row.get("raw_pm_interval") if data_pm_row and data_pm_row.get("raw_pm_interval") is not None else 0
raw_pm_material_cost = data_pm_row.get("raw_pm_material_cost") if data_pm_row and data_pm_row.get("raw_pm_material_cost") is not None else 0
raw_pm_labor_time = data_pm_row.get("raw_pm_labor_time") if data_pm_row and data_pm_row.get("raw_pm_labor_time") is not None else 0
raw_pm_labor_human = data_pm_row.get("raw_pm_labor_human") if data_pm_row and data_pm_row.get("raw_pm_labor_human") is not None else 0
raw_oh_interval = data_oh_row.get("raw_oh_interval") if data_oh_row and data_oh_row.get("raw_oh_interval") is not None else 0
raw_oh_material_cost = data_oh_row.get("raw_oh_material_cost") if data_oh_row and data_oh_row.get("raw_oh_material_cost") is not None else 0
raw_oh_labor_time = data_oh_row.get("raw_oh_labor_time") if data_oh_row and data_oh_row.get("raw_oh_labor_time") is not None else 0
raw_oh_labor_human = data_oh_row.get("raw_oh_labor_human") if data_oh_row and data_oh_row.get("raw_oh_labor_human") is not None else 0
raw_pdm_interval = data_predictive_row.get("raw_predictive_interval") if data_predictive_row and data_predictive_row.get("raw_predictive_interval") is not None else 0
raw_pdm_material_cost = data_predictive_row.get("raw_predictive_material_cost") if data_predictive_row and data_predictive_row.get("raw_predictive_material_cost") is not None else 0
raw_pdm_labor_time = data_predictive_row.get("raw_predictive_labor_time") if data_predictive_row and data_predictive_row.get("raw_predictive_labor_time") is not None else 0
raw_pdm_labor_human = data_predictive_row.get("raw_predictive_labor_human") if data_predictive_row and data_predictive_row.get("raw_predictive_labor_human") is not None else 0
raw_loss_output_MW = data_tahunan_row.get("total_lost") if data_tahunan_row and data_tahunan_row.get("total_lost") is not None else 0
raw_loss_output_price = data_tahunan_row.get("rp_per_kwh") if data_tahunan_row and data_tahunan_row.get("rp_per_kwh") is not None else 0
rc_cm_material_cost = data_cm_row.get("raw_cm_material_cost") if data_cm_row and data_cm_row.get("raw_cm_material_cost") is not None else 0
rc_cm_labor_cost = data_cm_row.get("raw_cm_labor_time")*data_cm_row.get("rc_cm_labor_human")*data_tahunan_row.get("man_hour") if data_cm_row and data_cm_row.get("rc_cm_labor_cost") and data_cm_row.get("rc_cm_labor_human") and data_tahunan_row.get("man_hour") is not None else 0
rc_pm_material_cost = data_pm_row.get("raw_pm_material_cost") if data_pm_row and data_pm_row.get("raw_pm_material_cost") is not None else 0
rc_pm_labor_cost = data_pm_row.get("raw_pm_labor_time")*data_pm_row.get("rc_pm_labor_human")*data_tahunan_row.get("man_hour") if data_pm_row and data_pm_row.get("rc_pm_labor_cost") and data_pm_row.get("rc_pm_labor_human") and data_tahunan_row.get("man_hour") is not None else 0
rc_oh_material_cost = data_oh_row.get("raw_oh_material_cost") if data_oh_row and data_oh_row.get("raw_oh_material_cost") is not None else 0
rc_oh_labor_cost = data_oh_row.get("raw_oh_labor_time")*data_oh_row.get("rc_oh_labor_human")*data_tahunan_row.get("man_hour") if data_oh_row and data_oh_row.get("rc_oh_labor_cost") and data_oh_row.get("rc_oh_labor_human") and data_tahunan_row.get("man_hour") is not None else 0
rc_predictive_labor_cost = data_predictive_row.get("raw_predictive_labor_human")*data_tahunan_row.get("man_hour") if data_predictive_row and data_predictive_row.get("rc_predictive_labor_cost") and data_tahunan_row.get("man_hour") is not None else 0
cursor.execute(
insert_query,
update_query,
(
str(uuid4()), # id
assetnum, # assetnum
year, # tahun
seq, # seq
1, # is_actual
raw_cm_interval, # raw_cm_interval
avg_cm_material_cost, # avg raw_cm_material_cost per interval
raw_cm_labor_time, # raw_cm_labor_time
raw_cm_labor_human, # raw_cm_labor_human
raw_pm_interval, # raw_pm_interval
raw_pm_material_cost, # raw_pm_material_cost
raw_pm_labor_time, # raw_pm_labor_time
raw_pm_labor_human,
raw_oh_interval,
raw_oh_material_cost,
raw_oh_labor_time,
raw_oh_labor_human,
raw_pdm_interval,
raw_pdm_material_cost,
raw_pdm_labor_time,
raw_pdm_labor_human,
raw_loss_output_MW,
raw_loss_output_price,
rc_cm_material_cost,
rc_cm_labor_cost,
rc_pm_material_cost,
rc_pm_labor_cost,
rc_oh_material_cost,
rc_oh_labor_cost,
rc_predictive_labor_cost,
seq,
1,
row_values["raw_cm_interval"],
row_values["raw_cm_material_cost"],
row_values["raw_cm_labor_time"],
row_values["raw_cm_labor_human"],
row_values["raw_pm_interval"],
row_values["raw_pm_material_cost"],
row_values["raw_pm_labor_time"],
row_values["raw_pm_labor_human"],
row_values["raw_oh_interval"],
row_values["raw_oh_material_cost"],
row_values["raw_oh_labor_time"],
row_values["raw_oh_labor_human"],
row_values["raw_predictive_interval"],
row_values["raw_predictive_material_cost"],
row_values["raw_predictive_labor_time"],
row_values["raw_predictive_labor_human"],
row_values["raw_loss_output_MW"],
row_values["raw_loss_output_price"],
row_values["rc_cm_material_cost"],
row_values["rc_cm_labor_cost"],
row_values["rc_pm_material_cost"],
row_values["rc_pm_labor_cost"],
row_values["rc_oh_material_cost"],
row_values["rc_oh_labor_cost"],
row_values["rc_predictive_labor_cost"],
assetnum,
year,
),
)
inserted_this_asset += 1
total_inserted += 1
seq = seq + 1
# commit per asset to persist progress and free transaction

@ -3,12 +3,12 @@ import time
# prefer package-relative imports, but allow running this file directly as a script
try:
from src.modules.equipment.insert_actual_data import query_data, insert_lcca_maximo_corrective_data, insert_ms_equipment_data
from src.modules.equipment.insert_actual_data import query_data, insert_lcca_maximo_corrective_data, insert_ms_equipment_data, insert_acquisition_cost_data
from src.modules.equipment.Prediksi import Prediksi, main as predict_run
from src.modules.equipment.Eac import Eac, main as eac_run
except ImportError:
# fallback when there's no parent package (e.g., python run.py)
from insert_actual_data import query_data, insert_lcca_maximo_corrective_data, insert_ms_equipment_data
from insert_actual_data import query_data, insert_lcca_maximo_corrective_data, insert_ms_equipment_data, insert_acquisition_cost_data
from Prediksi import Prediksi, main as predict_run
from Eac import Eac, main as eac_run
@ -23,6 +23,12 @@ async def main():
# print(f"Error in query_data: {str(e)}")
# return
# try:
# await insert_acquisition_cost_data()
# except Exception as e:
# print(f"Error in insert_acquisition_cost_data: {str(e)}")
# return
# try:
# await predict_run()
# except Exception as e:

Loading…
Cancel
Save