|
|
|
|
@ -28,6 +28,11 @@ class Prediksi:
|
|
|
|
|
self.RELIABILITY_APP_URL = RELIABILITY_APP_URL or os.getenv(
|
|
|
|
|
"RELIABILITY_APP_URL", "http://192.168.1.82:8000/reliability"
|
|
|
|
|
)
|
|
|
|
|
# Base URL for auth endpoints (sign-in, refresh-token)
|
|
|
|
|
self.AUTH_APP_URL = os.getenv("AUTH_APP_URL", "http://192.168.1.82:8000")
|
|
|
|
|
# tokens will be stored here after sign-in/refresh
|
|
|
|
|
self.access_token = None
|
|
|
|
|
self.refresh_token = None
|
|
|
|
|
|
|
|
|
|
# Fungsi untuk mengambil data dari database
|
|
|
|
|
def __get_param(self, equipment_id):
|
|
|
|
|
@ -199,22 +204,44 @@ class Prediksi:
|
|
|
|
|
)
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
# Fetch data from external API
|
|
|
|
|
# If a token was provided, store locally so fetch_api_data can use/refresh it
|
|
|
|
|
if token:
|
|
|
|
|
self.access_token = token
|
|
|
|
|
|
|
|
|
|
# Fetch data from external API (uses instance access_token and will try refresh on 403)
|
|
|
|
|
async def fetch_api_data(assetnum: str, year: int) -> dict:
|
|
|
|
|
url = self.RELIABILITY_APP_URL
|
|
|
|
|
# print(f"Using URL: {url}") # Add this for debugging
|
|
|
|
|
endpoint = f"{url}/main/number-of-failures/{assetnum}/{int(year)}/{int(year)}"
|
|
|
|
|
async with httpx.AsyncClient() as client:
|
|
|
|
|
# print(
|
|
|
|
|
# f"{url}/main/number-of-failures/{assetnum}/{int(year)}/{int(year)}"
|
|
|
|
|
# )
|
|
|
|
|
try:
|
|
|
|
|
current_token = getattr(self, "access_token", None)
|
|
|
|
|
response = await client.get(
|
|
|
|
|
f"{url}/main/number-of-failures/{assetnum}/{int(year)}/{int(year)}",
|
|
|
|
|
endpoint,
|
|
|
|
|
timeout=30.0,
|
|
|
|
|
headers={"Authorization": f"Bearer {token}"},
|
|
|
|
|
headers={"Authorization": f"Bearer {current_token}"} if current_token else {},
|
|
|
|
|
)
|
|
|
|
|
response.raise_for_status()
|
|
|
|
|
return response.json()
|
|
|
|
|
except httpx.HTTPStatusError as e:
|
|
|
|
|
status = getattr(e.response, "status_code", None)
|
|
|
|
|
# If we get a 403, try to refresh the access token and retry once
|
|
|
|
|
if status == 403:
|
|
|
|
|
print("Received 403 from reliability API, attempting to refresh access token...")
|
|
|
|
|
new_access = await self.refresh_access_token()
|
|
|
|
|
if new_access:
|
|
|
|
|
try:
|
|
|
|
|
response = await client.get(
|
|
|
|
|
endpoint,
|
|
|
|
|
timeout=30.0,
|
|
|
|
|
headers={"Authorization": f"Bearer {new_access}"},
|
|
|
|
|
)
|
|
|
|
|
response.raise_for_status()
|
|
|
|
|
return response.json()
|
|
|
|
|
except httpx.HTTPError as e2:
|
|
|
|
|
print(f"HTTP error occurred after refresh: {e2}")
|
|
|
|
|
return {}
|
|
|
|
|
print(f"HTTP error occurred: {e}")
|
|
|
|
|
return {}
|
|
|
|
|
except httpx.HTTPError as e:
|
|
|
|
|
print(f"HTTP error occurred: {e}")
|
|
|
|
|
return {}
|
|
|
|
|
@ -224,22 +251,46 @@ class Prediksi:
|
|
|
|
|
records_to_insert = []
|
|
|
|
|
for _, row in data.iterrows():
|
|
|
|
|
max_seq = max_seq + 1
|
|
|
|
|
# Update values from API
|
|
|
|
|
# (token already stored before defining fetch_api_data)
|
|
|
|
|
# maintain previous cm_interval between iterations using attribute on fetch_api_data
|
|
|
|
|
if not hasattr(fetch_api_data, "prev_cm"):
|
|
|
|
|
fetch_api_data.prev_cm = None
|
|
|
|
|
|
|
|
|
|
# Update values from API (current year)
|
|
|
|
|
api_data = await fetch_api_data(equipment_id, row["year"])
|
|
|
|
|
if api_data and "data" in api_data and isinstance(api_data["data"], list) and len(api_data["data"]) > 0:
|
|
|
|
|
# Get current num_fail (ensure numeric)
|
|
|
|
|
try:
|
|
|
|
|
cm_interval_prediction = float(api_data["data"][0].get("num_fail", row.get("cm_interval", 1)))
|
|
|
|
|
cur_cm = float(api_data["data"][0].get("num_fail", row.get("cm_interval", 1)))
|
|
|
|
|
except Exception:
|
|
|
|
|
cm_interval_prediction = float(row.get("cm_interval", 1)) if not pd.isna(row.get("cm_interval", None)) else 1
|
|
|
|
|
cur_cm = float(row.get("cm_interval", 1)) if not pd.isna(row.get("cm_interval", None)) else 1.0
|
|
|
|
|
else:
|
|
|
|
|
# Fallback: ensure numeric scalar, not a tuple
|
|
|
|
|
try:
|
|
|
|
|
val = float(row.get("cm_interval", 1))
|
|
|
|
|
cm_interval_prediction = val if val >= 1 else 1.0
|
|
|
|
|
cur_cm = val if val >= 1 else 1.0
|
|
|
|
|
except Exception:
|
|
|
|
|
cur_cm = 1.0
|
|
|
|
|
|
|
|
|
|
# Determine previous cm_interval: prefer stored prev_cm, otherwise try API for previous year, else fallback to cur_cm
|
|
|
|
|
if fetch_api_data.prev_cm is not None:
|
|
|
|
|
prev_cm = float(fetch_api_data.prev_cm)
|
|
|
|
|
else:
|
|
|
|
|
try:
|
|
|
|
|
api_prev = await fetch_api_data(equipment_id, int(row["year"]) - 1)
|
|
|
|
|
if api_prev and "data" in api_prev and isinstance(api_prev["data"], list) and len(api_prev["data"]) > 0:
|
|
|
|
|
prev_cm = float(api_prev["data"][0].get("num_fail", cur_cm))
|
|
|
|
|
else:
|
|
|
|
|
# attempt to use any available previous value from the row if present, otherwise fallback to current
|
|
|
|
|
prev_cm = float(row.get("cm_interval", cur_cm)) if not pd.isna(row.get("cm_interval", None)) else cur_cm
|
|
|
|
|
except Exception:
|
|
|
|
|
cm_interval_prediction = 1.0
|
|
|
|
|
prev_cm = cur_cm
|
|
|
|
|
|
|
|
|
|
# compute difference: current year interval minus previous year interval
|
|
|
|
|
try:
|
|
|
|
|
cm_interval_diff = float(cur_cm) - float(prev_cm)
|
|
|
|
|
except Exception:
|
|
|
|
|
cm_interval_diff = 0.0
|
|
|
|
|
|
|
|
|
|
# append record using the difference for raw_cm_interval
|
|
|
|
|
records_to_insert.append(
|
|
|
|
|
(
|
|
|
|
|
str(uuid4()),
|
|
|
|
|
@ -247,7 +298,7 @@ class Prediksi:
|
|
|
|
|
float(row["pm_interval"]) if not pd.isna(row.get("pm_interval", None)) else 0.0,
|
|
|
|
|
float(row["year"]) if not pd.isna(row.get("year", None)) else 0.0,
|
|
|
|
|
equipment_id,
|
|
|
|
|
cm_interval_prediction,
|
|
|
|
|
cm_interval_diff,
|
|
|
|
|
float(row["cm_cost"]) if not pd.isna(row.get("cm_cost", None)) else 0.0,
|
|
|
|
|
float(row["cm_labor_time"]) if not pd.isna(row.get("cm_labor_time", None)) else 0.0,
|
|
|
|
|
float(row["cm_labor_human"]) if not pd.isna(row.get("cm_labor_human", None)) else 0.0,
|
|
|
|
|
@ -267,6 +318,9 @@ class Prediksi:
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# store current cm for next iteration
|
|
|
|
|
fetch_api_data.prev_cm = cur_cm
|
|
|
|
|
|
|
|
|
|
# Eksekusi batch insert
|
|
|
|
|
cursor.executemany(insert_query, records_to_insert)
|
|
|
|
|
connection.commit()
|
|
|
|
|
@ -386,6 +440,60 @@ class Prediksi:
|
|
|
|
|
if connection:
|
|
|
|
|
connection.close()
|
|
|
|
|
|
|
|
|
|
# Authentication: sign-in and refresh helpers
|
|
|
|
|
async def sign_in(self, username: str = "user14", password: str = "password") -> dict:
|
|
|
|
|
"""Sign in to AUTH_APP_URL/sign-in using provided username/password.
|
|
|
|
|
|
|
|
|
|
Stores access_token and refresh_token on the instance when successful and returns the parsed response dict.
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
async with httpx.AsyncClient() as client:
|
|
|
|
|
resp = await client.post(
|
|
|
|
|
f"{self.AUTH_APP_URL}/sign-in",
|
|
|
|
|
json={"username": username, "password": password},
|
|
|
|
|
timeout=30.0,
|
|
|
|
|
)
|
|
|
|
|
resp.raise_for_status()
|
|
|
|
|
data = resp.json()
|
|
|
|
|
if isinstance(data, dict) and "data" in data:
|
|
|
|
|
d = data.get("data") or {}
|
|
|
|
|
# set tokens if present
|
|
|
|
|
self.access_token = d.get("access_token")
|
|
|
|
|
self.refresh_token = d.get("refresh_token")
|
|
|
|
|
return data
|
|
|
|
|
except httpx.HTTPError as e:
|
|
|
|
|
print(f"Sign-in failed: {e}")
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
async def refresh_access_token(self) -> str:
|
|
|
|
|
"""Refresh the access token using the stored refresh_token via AUTH_APP_URL/refresh-token.
|
|
|
|
|
|
|
|
|
|
On success updates self.access_token and returns it. Returns None on failure.
|
|
|
|
|
"""
|
|
|
|
|
if not getattr(self, "refresh_token", None):
|
|
|
|
|
print("No refresh token available to refresh access token.")
|
|
|
|
|
return None
|
|
|
|
|
try:
|
|
|
|
|
async with httpx.AsyncClient() as client:
|
|
|
|
|
resp = await client.get(
|
|
|
|
|
f"{self.AUTH_APP_URL}/refresh-token",
|
|
|
|
|
headers={"Authorization": f"Bearer {self.refresh_token}"},
|
|
|
|
|
timeout=30.0,
|
|
|
|
|
)
|
|
|
|
|
resp.raise_for_status()
|
|
|
|
|
data = resp.json()
|
|
|
|
|
if isinstance(data, dict) and "data" in data:
|
|
|
|
|
new_access = data.get("data", {}).get("access_token")
|
|
|
|
|
if new_access:
|
|
|
|
|
self.access_token = new_access
|
|
|
|
|
print("Access token refreshed.")
|
|
|
|
|
return new_access
|
|
|
|
|
print("Refresh response did not contain a new access token.")
|
|
|
|
|
return None
|
|
|
|
|
except httpx.HTTPError as e:
|
|
|
|
|
print(f"Error refreshing token: {e}")
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
# ======================================================================================================================================================
|
|
|
|
|
|
|
|
|
|
async def predict_equipment_data(self, assetnum, token):
|
|
|
|
|
@ -445,26 +553,180 @@ class Prediksi:
|
|
|
|
|
|
|
|
|
|
# Prediksi untuk setiap kolom
|
|
|
|
|
for column in df.columns:
|
|
|
|
|
if column != "year":
|
|
|
|
|
if "cost" in column.lower():
|
|
|
|
|
# Prediksi Future Value
|
|
|
|
|
nper = max_year - df["year"].max()
|
|
|
|
|
pv = -df[column].iloc[-1]
|
|
|
|
|
predictions[column] = self.__future_value_predict(
|
|
|
|
|
rate, nper, pmt, pv, future_years
|
|
|
|
|
)
|
|
|
|
|
elif df[column].nunique() < 5:
|
|
|
|
|
predictions[column] = exponential_smoothing_predict(
|
|
|
|
|
column, future_years
|
|
|
|
|
)
|
|
|
|
|
elif df[column].isnull().sum() > 0:
|
|
|
|
|
predictions[column] = decision_tree_predict(
|
|
|
|
|
column, future_years
|
|
|
|
|
if column == "year":
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
n_future = len(future_years)
|
|
|
|
|
col_lower = column.lower()
|
|
|
|
|
try:
|
|
|
|
|
# Case untuk kolom yang terkait dengan corrective maintenance (cm)
|
|
|
|
|
if "cm" in col_lower:
|
|
|
|
|
# Tentukan jumlah baris recent yang dianggap actual jika kolom is_actual ada
|
|
|
|
|
if "is_actual" in df.columns:
|
|
|
|
|
recent_df = df[df["is_actual"] == 1]
|
|
|
|
|
recent_n = recent_df.shape[0]
|
|
|
|
|
else:
|
|
|
|
|
recent_df = df
|
|
|
|
|
recent_n = df.shape[0]
|
|
|
|
|
|
|
|
|
|
recent_n = max(1, recent_n)
|
|
|
|
|
recent_vals = (
|
|
|
|
|
recent_df.sort_values("year", ascending=False)
|
|
|
|
|
.head(recent_n)[column]
|
|
|
|
|
.dropna()
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Fallback ke semua nilai non-na jika tidak ada recent_vals
|
|
|
|
|
if recent_vals.empty:
|
|
|
|
|
recent_vals = df[column].dropna()
|
|
|
|
|
|
|
|
|
|
# Jika masih kosong, pakai default (interval minimal 1, lainnya 0)
|
|
|
|
|
if recent_vals.empty:
|
|
|
|
|
avg = 0.0
|
|
|
|
|
else:
|
|
|
|
|
# Pastikan numeric; jika gagal, pakai mean dari yang bisa dikonversi
|
|
|
|
|
try:
|
|
|
|
|
avg = float(np.nanmean(recent_vals.astype(float)))
|
|
|
|
|
except Exception:
|
|
|
|
|
# jika conversion gagal gunakan mean pandas (objek mungkin numeric-like)
|
|
|
|
|
avg = float(recent_vals.mean())
|
|
|
|
|
|
|
|
|
|
if "interval" in col_lower:
|
|
|
|
|
avg = max(0.0, avg)
|
|
|
|
|
|
|
|
|
|
preds = np.repeat(float(avg), n_future)
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
predictions[column] = linear_regression_predict(
|
|
|
|
|
column, future_years
|
|
|
|
|
)
|
|
|
|
|
# Untuk kolom non-cm, gunakan nilai dari last actual year bila ada,
|
|
|
|
|
# jika tidak ada gunakan last available non-NA value, jika tidak ada pakai 0.0
|
|
|
|
|
if "is_actual" in df.columns and not df[df["is_actual"] == 1].empty:
|
|
|
|
|
last_actual_year_series = df[df["is_actual"] == 1]["year"]
|
|
|
|
|
last_actual_year = (
|
|
|
|
|
int(last_actual_year_series.max())
|
|
|
|
|
if not last_actual_year_series.isna().all()
|
|
|
|
|
else int(df["year"].max())
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
last_actual_year = int(df["year"].max())
|
|
|
|
|
|
|
|
|
|
row_vals = df[df["year"] == last_actual_year]
|
|
|
|
|
value = None
|
|
|
|
|
|
|
|
|
|
if not row_vals.empty:
|
|
|
|
|
val = row_vals[column].iloc[-1]
|
|
|
|
|
if not pd.isna(val):
|
|
|
|
|
try:
|
|
|
|
|
value = float(val)
|
|
|
|
|
except Exception:
|
|
|
|
|
# jika bukan numeric, set 0.0
|
|
|
|
|
value = 0.0
|
|
|
|
|
|
|
|
|
|
if value is None:
|
|
|
|
|
non_na = df[column].dropna()
|
|
|
|
|
if not non_na.empty:
|
|
|
|
|
try:
|
|
|
|
|
value = float(non_na.iloc[-1])
|
|
|
|
|
except Exception:
|
|
|
|
|
value = 0.0
|
|
|
|
|
else:
|
|
|
|
|
value = 0.0
|
|
|
|
|
|
|
|
|
|
preds = np.repeat(float(value), n_future)
|
|
|
|
|
|
|
|
|
|
except Exception:
|
|
|
|
|
# Jika terjadi error unexpected, fallback ke nol
|
|
|
|
|
preds = np.repeat(0.0, n_future)
|
|
|
|
|
|
|
|
|
|
# Pastikan semua prediksi bernilai non-negatif float dan berbentuk list sesuai panjang future_years
|
|
|
|
|
preds = np.abs(np.array(preds, dtype=float))
|
|
|
|
|
predictions[column] = preds.tolist()
|
|
|
|
|
# if "cost" in column.lower():
|
|
|
|
|
# # Prediksi Future Value
|
|
|
|
|
# nper = max_year - df["year"].max()
|
|
|
|
|
# pv = -df[column].iloc[-1]
|
|
|
|
|
# predictions[column] = self.__future_value_predict(
|
|
|
|
|
# rate, nper, pmt, pv, future_years
|
|
|
|
|
# )
|
|
|
|
|
# elif df[column].nunique() < 5:
|
|
|
|
|
# predictions[column] = exponential_smoothing_predict(
|
|
|
|
|
# column, future_years
|
|
|
|
|
# )
|
|
|
|
|
# elif df[column].isnull().sum() > 0:
|
|
|
|
|
# predictions[column] = decision_tree_predict(
|
|
|
|
|
# column, future_years
|
|
|
|
|
# )
|
|
|
|
|
# else:
|
|
|
|
|
# predictions[column] = linear_regression_predict(
|
|
|
|
|
# column, future_years
|
|
|
|
|
# )
|
|
|
|
|
|
|
|
|
|
# for column in df.columns:
|
|
|
|
|
# if column != "year":
|
|
|
|
|
# if "cost" in column.lower():
|
|
|
|
|
# # Prediksi Future Value
|
|
|
|
|
# # ensure nper is an integer and non-negative
|
|
|
|
|
# try:
|
|
|
|
|
# nper = int(max_year - df["year"].max())
|
|
|
|
|
# except Exception:
|
|
|
|
|
# nper = 0
|
|
|
|
|
# if nper < 0:
|
|
|
|
|
# nper = 0
|
|
|
|
|
|
|
|
|
|
# # safe conversion of last observed value to numeric present value (pv)
|
|
|
|
|
# try:
|
|
|
|
|
# last_val = df[column].iloc[-1]
|
|
|
|
|
# pv = -float(last_val) if not pd.isna(last_val) else 0.0
|
|
|
|
|
# except Exception:
|
|
|
|
|
# pv = 0.0
|
|
|
|
|
|
|
|
|
|
# # compute future values and ensure preds is a numpy float array
|
|
|
|
|
# fv_list = self.__future_value_predict(
|
|
|
|
|
# rate, nper, pmt, pv, future_years
|
|
|
|
|
# )
|
|
|
|
|
# preds = np.array(fv_list, dtype=float)
|
|
|
|
|
# predictions[column] = preds
|
|
|
|
|
# elif df[column].nunique() < 5:
|
|
|
|
|
# preds = exponential_smoothing_predict(column, future_years)
|
|
|
|
|
# elif df[column].isnull().sum() > 0:
|
|
|
|
|
# preds = decision_tree_predict(column, future_years)
|
|
|
|
|
# else:
|
|
|
|
|
# # Produce sideways / fluctuating predictions around recent level (deterministic)
|
|
|
|
|
# series = df[column].dropna().values
|
|
|
|
|
# if len(series) == 0:
|
|
|
|
|
# base = 0.0
|
|
|
|
|
# else:
|
|
|
|
|
# base = float(np.mean(series[-3:])) if len(series) >= 3 else float(series[-1])
|
|
|
|
|
# # amplitude based on historical std, fallback to a small fraction of base
|
|
|
|
|
# hist_std = float(np.std(series)) if len(series) > 1 else max(abs(base) * 0.01, 0.0)
|
|
|
|
|
# amp = max(hist_std, abs(base) * 0.01)
|
|
|
|
|
# t = np.arange(len(future_years))
|
|
|
|
|
# preds = base + amp * np.sin(2 * np.pi * t / max(len(future_years), 1))
|
|
|
|
|
# # avoid negative predictions for inherently non-negative series
|
|
|
|
|
# preds = np.where(preds < 0, 0, preds)
|
|
|
|
|
|
|
|
|
|
# # normalize preds to numpy float array
|
|
|
|
|
# preds = np.array(preds, dtype=float)
|
|
|
|
|
|
|
|
|
|
# # Columns containing "human" should be rounded to one decimal and clamped 0.0-3.0
|
|
|
|
|
# if "human" in column.lower():
|
|
|
|
|
# # humans must be whole numbers (no decimals) and capped between 0 and 3
|
|
|
|
|
# preds = np.nan_to_num(preds, nan=0.0)
|
|
|
|
|
# preds = np.rint(preds) # round to nearest integer
|
|
|
|
|
# preds = np.clip(preds, 0, 3).astype(int)
|
|
|
|
|
|
|
|
|
|
# # Columns containing "labor_time" should be reasonable yearly hours.
|
|
|
|
|
# # If predictions are unrealistically large, scale them down proportionally to a sane max (e.g., 2000 hours/year),
|
|
|
|
|
# # then round to one decimal and ensure non-negative.
|
|
|
|
|
# if "labor_time" in column.lower():
|
|
|
|
|
# max_yearly_hours = 2000.0
|
|
|
|
|
# current_max = np.nanmax(preds) if preds.size > 0 else 0.0
|
|
|
|
|
# if current_max > max_yearly_hours and current_max > 0:
|
|
|
|
|
# scale = max_yearly_hours / current_max
|
|
|
|
|
# preds = preds * scale
|
|
|
|
|
# preds = np.clip(preds, 0.0, max_yearly_hours)
|
|
|
|
|
# preds = np.round(preds, 1)
|
|
|
|
|
|
|
|
|
|
# predictions[column] = preds
|
|
|
|
|
|
|
|
|
|
# Konversi hasil ke DataFrame
|
|
|
|
|
predictions_df = pd.DataFrame(predictions)
|
|
|
|
|
@ -489,8 +751,28 @@ class Prediksi:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
RELIABILITY_APP_URL = os.getenv("RELIABILITY_APP_URL", "http://192.168.1.82:8000/reliability")
|
|
|
|
|
async def main(RELIABILITY_APP_URL=RELIABILITY_APP_URL):
|
|
|
|
|
async def main(RELIABILITY_APP_URL=RELIABILITY_APP_URL, assetnum=None):
|
|
|
|
|
connection = None
|
|
|
|
|
try:
|
|
|
|
|
prediksi = Prediksi(RELIABILITY_APP_URL)
|
|
|
|
|
|
|
|
|
|
# Sign in to obtain access_token/refresh_token before processing
|
|
|
|
|
signin_res = await prediksi.sign_in()
|
|
|
|
|
if not getattr(prediksi, "access_token", None):
|
|
|
|
|
print("Failed to obtain access token; aborting.")
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
# If an assetnum was provided, run only for that assetnum
|
|
|
|
|
if assetnum:
|
|
|
|
|
print(f"Processing single assetnum: {assetnum}")
|
|
|
|
|
try:
|
|
|
|
|
await prediksi.predict_equipment_data(assetnum, prediksi.access_token)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print(f"Error processing {assetnum}: {e}")
|
|
|
|
|
print("Selesai.")
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
# Otherwise fetch all assetnums from DB and loop
|
|
|
|
|
connections = get_connection()
|
|
|
|
|
connection = connections[0] if isinstance(connections, tuple) else connections
|
|
|
|
|
if connection is None:
|
|
|
|
|
@ -502,28 +784,24 @@ async def main(RELIABILITY_APP_URL=RELIABILITY_APP_URL):
|
|
|
|
|
cursor.execute(query_main)
|
|
|
|
|
results = cursor.fetchall()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
prediksi = Prediksi(RELIABILITY_APP_URL)
|
|
|
|
|
token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJmcmVzaCI6ZmFsc2UsImlhdCI6MTc2MjQxODk5My4xNzI4NTYsImp0aSI6ImJ1OU0xQVlLSTZENTd2cC1OaDgtUlEiLCJ0eXBlIjoiYWNjZXNzIiwic3ViIjoiMzg1NzJhOTItZjE2Yy00MWIyLThjNmYtYWZhNTcyMzhhNWU3IiwibmJmIjoxNzYyNDE4OTkzLCJjc3JmIjoiNjY5NzVjNDEtNTg0ZS00OGFkLWJjMmItMDNlZDEyZDM2ZDczIiwiZXhwIjoxNzYyNDI2MTkzLCJub25jZSI6ImYzMThkNDVkNmYzZWRjMzNiN2Q0MmE0MGRkNDJkNDRhIn0.elDnyaoeJ48oOIUdMRZjt7gGICmK-2Awg6Rbl_BZ1PQ"
|
|
|
|
|
|
|
|
|
|
for idx, row in enumerate(results, start=1):
|
|
|
|
|
assetnum = row.get("assetnum") if hasattr(row, "get") else row[0]
|
|
|
|
|
if not assetnum or str(assetnum).strip() == "":
|
|
|
|
|
current_asset = row.get("assetnum") if hasattr(row, "get") else row[0]
|
|
|
|
|
if not current_asset or str(current_asset).strip() == "":
|
|
|
|
|
print(f"[{idx}/{len(results)}] Skipping empty assetnum")
|
|
|
|
|
continue
|
|
|
|
|
print(f"[{idx}/{len(results)}] Processing assetnum: {assetnum}")
|
|
|
|
|
print(f"[{idx}/{len(results)}] Processing assetnum: {current_asset}")
|
|
|
|
|
try:
|
|
|
|
|
await prediksi.predict_equipment_data(assetnum, token)
|
|
|
|
|
await prediksi.predict_equipment_data(current_asset, prediksi.access_token)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print(f"Error processing {assetnum}: {e}")
|
|
|
|
|
print(f"Error processing {current_asset}: {e}")
|
|
|
|
|
|
|
|
|
|
print("Selesai.")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print(f"Error getting database connection: {e}")
|
|
|
|
|
return
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print(f"Error getting database connection: {e}")
|
|
|
|
|
print(f"Error in main: {e}")
|
|
|
|
|
return
|
|
|
|
|
finally:
|
|
|
|
|
if connection:
|
|
|
|
|
connection.close()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
|