diff --git a/src/equipment/__pycache__/service.cpython-311.pyc b/src/equipment/__pycache__/service.cpython-311.pyc index 921a422..0ce524a 100644 Binary files a/src/equipment/__pycache__/service.cpython-311.pyc and b/src/equipment/__pycache__/service.cpython-311.pyc differ diff --git a/src/equipment/service.py b/src/equipment/service.py index f7bd855..c18b2f1 100644 --- a/src/equipment/service.py +++ b/src/equipment/service.py @@ -23,6 +23,7 @@ import httpx from src.modules.equipment.run import main from src.modules.equipment.Prediksi import main as predict_main from src.modules.equipment.Eac import main as eac_main +from src.modules.equipment.where_query_sql import get_where_query_sql_all_worktype import datetime import math @@ -120,99 +121,26 @@ CATEGORY_ROLLUP_CHILDREN = _build_category_rollup_children() logger = logging.getLogger(__name__) -MAXIMO_SQL = text( - """ - SELECT - * - FROM public.wo_maximo AS a - WHERE a.asset_unit = '3' - AND a.asset_assetnum = :assetnum - AND a.wonum NOT LIKE 'T%' - AND ( - (a.worktype = 'CM' AND a.wojp8 != 'S1') - OR (a.worktype <> 'CM') - ); - """ -) - -JOINED_MAXIMO_SQL = text( - """ - SELECT * - FROM public.wo_maximo a - LEFT JOIN public.wo_maximo_labtrans b - ON b.wonum = a.wonum - LEFT JOIN lcc_ms_manpower emp - ON UPPER(TRIM(emp."ID Number")) = UPPER(TRIM(b.laborcode)) - WHERE - a.asset_unit = '3' - AND a.wonum NOT LIKE 'T%' - AND a.asset_assetnum = :assetnum - AND ( - a.actfinish IS NULL - OR a.actstart IS NULL - OR (EXTRACT(EPOCH FROM (a.actfinish - a.actstart)) / 3600.0) <= 730 - ) - AND ( - (a.worktype = 'CM' AND a.wojp8 != 'S1') - OR (a.worktype <> 'CM') - ) - AND ( - a.description NOT ILIKE '%U4%' - OR ( - a.description ILIKE '%U3%' - AND a.description ILIKE '%U4%' - ) - ); - """ -) - -async def _fetch_maximo_records( - *, session: AsyncSession, assetnum: str -) -> list[dict[str, Any]]: - """Fetch Maximo rows with a retry to mask transient collector failures.""" - - query = MAXIMO_SQL.bindparams(assetnum=assetnum) - - try: - result = await session.execute(query) - return result.mappings().all() - except AsyncpgInterfaceError as exc: - logger.warning( - "Collector session closed while fetching Maximo data for %s. Retrying once.", - assetnum, - ) - try: - async with collector_async_session() as retry_session: - retry_result = await retry_session.execute(query) - return retry_result.mappings().all() - except Exception as retry_exc: - logger.error( - "Retrying Maximo query failed for %s: %s", - assetnum, - retry_exc, - exc_info=True, - ) - except SQLAlchemyError as exc: - logger.error( - "Failed to fetch Maximo data for %s: %s", assetnum, exc, exc_info=True - ) - except Exception as exc: - logger.exception( - "Unexpected error while fetching Maximo data for %s", assetnum - ) - - return [] - async def _fetch_joined_maximo_records( *, session: AsyncSession, assetnum: str ) -> list[dict[str, Any]]: """Fetch Joined Maximo rows with a retry to mask transient collector failures.""" - - query = JOINED_MAXIMO_SQL.bindparams(assetnum=assetnum) - + where_query = get_where_query_sql_all_worktype(assetnum) + + JOINED_MAXIMO_SQL = text( + f""" + SELECT * + FROM public.wo_maximo a + LEFT JOIN public.wo_maximo_labtrans b + ON b.wonum = a.wonum + LEFT JOIN lcc_ms_manpower emp + ON UPPER(TRIM(emp."ID Number")) = UPPER(TRIM(b.laborcode)) + {where_query} + """ + ) try: - result = await session.execute(query) + result = await session.execute(JOINED_MAXIMO_SQL) return result.mappings().all() except AsyncpgInterfaceError as exc: logger.warning( @@ -221,7 +149,7 @@ async def _fetch_joined_maximo_records( ) try: async with collector_async_session() as retry_session: - retry_result = await retry_session.execute(query) + retry_result = await retry_session.execute(JOINED_MAXIMO_SQL) return retry_result.mappings().all() except Exception as retry_exc: logger.error( @@ -358,7 +286,7 @@ async def get_master_by_assetnum( min_seq = equipment_record.minimum_eac_seq if equipment_record else None min_eac_year = equipment_record.minimum_eac_year if equipment_record else None - maximo_record = await _fetch_maximo_records( + maximo_record = await _fetch_joined_maximo_records( session=collector_db_session, assetnum=assetnum ) joined_maximo_record = await _fetch_joined_maximo_records( diff --git a/src/modules/equipment/Prediksi.py b/src/modules/equipment/Prediksi.py index a07f067..38ae297 100644 --- a/src/modules/equipment/Prediksi.py +++ b/src/modules/equipment/Prediksi.py @@ -86,22 +86,14 @@ class Prediksi: query = """ SELECT tahun AS year, - raw_cm_interval AS cm_interval, - raw_cm_material_cost AS cm_cost, - raw_cm_labor_time AS cm_labor_time, - raw_cm_labor_human AS cm_labor_human, - raw_pm_interval AS pm_interval, - raw_pm_material_cost AS pm_cost, - raw_pm_labor_time AS pm_labor_time, - raw_pm_labor_human AS pm_labor_human, - raw_oh_interval AS oh_interval, - raw_oh_material_cost AS oh_cost, - raw_oh_labor_time AS oh_labor_time, - raw_oh_labor_human AS oh_labor_human, - raw_predictive_material_cost AS predictive_material_cost, - raw_predictive_labor_time AS predictive_labor_time, - raw_predictive_labor_human AS predictive_labor_human, - raw_predictive_interval AS predictive_interval + rc_cm_material_cost, + rc_cm_labor_cost, + rc_pm_material_cost, + rc_pm_labor_cost, + rc_oh_material_cost, + rc_oh_labor_cost, + rc_predictive_material_cost, + rc_predictive_labor_cost FROM lcc_equipment_tr_data WHERE assetnum = %s and is_actual=1 @@ -188,62 +180,65 @@ class Prediksi: # Query untuk insert data insert_query = """ INSERT INTO lcc_equipment_tr_data ( - id, + id, seq, is_actual, - raw_pm_interval, - tahun, assetnum, - raw_cm_interval, raw_cm_material_cost, raw_cm_labor_time, raw_cm_labor_human, - raw_pm_material_cost, raw_pm_labor_time, raw_pm_labor_human, - raw_oh_interval, raw_oh_material_cost, raw_oh_labor_time, raw_oh_labor_human, - raw_predictive_interval, raw_predictive_material_cost, raw_predictive_labor_time, raw_predictive_labor_human, - created_by, created_at + tahun, assetnum, + rc_cm_material_cost, + rc_cm_labor_cost, + rc_pm_material_cost, + rc_pm_labor_cost, + rc_oh_material_cost, + rc_oh_labor_cost, + rc_predictive_material_cost, + rc_predictive_labor_cost, + created_by, created_at ) VALUES ( - %s, %s, 0, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 'Sys', NOW() + %s, %s, 0, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 'Sys', NOW() ) """ # If a token was provided, store locally so fetch_api_data can use/refresh it - if token: - self.access_token = token - - # Fetch data from external API (uses instance access_token and will try refresh on 403) - async def fetch_api_data(assetnum: str, year: int) -> dict: - url = self.RELIABILITY_APP_URL - endpoint = f"{url}/main/number-of-failures/{assetnum}/{int(year)}/{int(year)}" - async with httpx.AsyncClient() as client: - try: - current_token = getattr(self, "access_token", None) - response = await client.get( - endpoint, - timeout=30.0, - headers={"Authorization": f"Bearer {current_token}"} if current_token else {}, - ) - response.raise_for_status() - return response.json() - except httpx.HTTPStatusError as e: - status = getattr(e.response, "status_code", None) - # If we get a 403, try to refresh the access token and retry once - if status == 403: - print("Received 403 from reliability API, attempting to refresh access token...") - new_access = await self.refresh_access_token() - if new_access: - try: - response = await client.get( - endpoint, - timeout=30.0, - headers={"Authorization": f"Bearer {new_access}"}, - ) - response.raise_for_status() - return response.json() - except httpx.HTTPError as e2: - print(f"HTTP error occurred after refresh: {e2}") - return {} - print(f"HTTP error occurred: {e}") - return {} - except httpx.HTTPError as e: - print(f"HTTP error occurred: {e}") - return {} + # if token: + # self.access_token = token + + # # Fetch data from external API (uses instance access_token and will try refresh on 403) + # async def fetch_api_data(assetnum: str, year: int) -> dict: + # url = self.RELIABILITY_APP_URL + # endpoint = f"{url}/main/number-of-failures/{assetnum}/{int(year)}/{int(year)}" + # async with httpx.AsyncClient() as client: + # try: + # current_token = getattr(self, "access_token", None) + # response = await client.get( + # endpoint, + # timeout=30.0, + # headers={"Authorization": f"Bearer {current_token}"} if current_token else {}, + # ) + # response.raise_for_status() + # return response.json() + # except httpx.HTTPStatusError as e: + # status = getattr(e.response, "status_code", None) + # # If we get a 403, try to refresh the access token and retry once + # if status == 403: + # print("Received 403 from reliability API, attempting to refresh access token...") + # new_access = await self.refresh_access_token() + # if new_access: + # try: + # response = await client.get( + # endpoint, + # timeout=30.0, + # headers={"Authorization": f"Bearer {new_access}"}, + # ) + # response.raise_for_status() + # return response.json() + # except httpx.HTTPError as e2: + # print(f"HTTP error occurred after refresh: {e2}") + # return {} + # print(f"HTTP error occurred: {e}") + # return {} + # except httpx.HTTPError as e: + # print(f"HTTP error occurred: {e}") + # return {} # Menyiapkan data untuk batch insert # print(f"Data to be inserted: {data}") @@ -252,71 +247,63 @@ class Prediksi: max_seq = max_seq + 1 # (token already stored before defining fetch_api_data) # maintain previous cm_interval between iterations using attribute on fetch_api_data - if not hasattr(fetch_api_data, "prev_cm"): - fetch_api_data.prev_cm = None + # if not hasattr(fetch_api_data, "prev_cm"): + # fetch_api_data.prev_cm = None # Update values from API (current year) - api_data = await fetch_api_data(equipment_id, row["year"]) - if api_data and "data" in api_data and isinstance(api_data["data"], list) and len(api_data["data"]) > 0: - try: - cur_cm = float(api_data["data"][0].get("num_fail", row.get("cm_interval", 1))) - except Exception: - cur_cm = float(row.get("cm_interval", 1)) if not pd.isna(row.get("cm_interval", None)) else 1.0 - else: - try: - val = float(row.get("cm_interval", 1)) - cur_cm = val if val >= 1 else 1.0 - except Exception: - cur_cm = 1.0 + # api_data = await fetch_api_data(equipment_id, row["year"]) + # if api_data and "data" in api_data and isinstance(api_data["data"], list) and len(api_data["data"]) > 0: + # try: + # cur_cm = float(api_data["data"][0].get("num_fail", row.get("cm_interval", 1))) + # except Exception: + # cur_cm = float(row.get("cm_interval", 1)) if not pd.isna(row.get("cm_interval", None)) else 1.0 + # else: + # try: + # val = float(row.get("cm_interval", 1)) + # cur_cm = val if val >= 1 else 1.0 + # except Exception: + # cur_cm = 1.0 # Determine previous cm_interval: prefer stored prev_cm, otherwise try API for previous year, else fallback to cur_cm - if fetch_api_data.prev_cm is not None: - prev_cm = float(fetch_api_data.prev_cm) - else: - try: - api_prev = await fetch_api_data(equipment_id, int(row["year"]) - 1) - if api_prev and "data" in api_prev and isinstance(api_prev["data"], list) and len(api_prev["data"]) > 0: - prev_cm = float(api_prev["data"][0].get("num_fail", cur_cm)) - else: - # attempt to use any available previous value from the row if present, otherwise fallback to current - prev_cm = float(row.get("cm_interval", cur_cm)) if not pd.isna(row.get("cm_interval", None)) else cur_cm - except Exception: - prev_cm = cur_cm + # if fetch_api_data.prev_cm is not None: + # prev_cm = float(fetch_api_data.prev_cm) + # else: + # try: + # api_prev = await fetch_api_data(equipment_id, int(row["year"]) - 1) + # if api_prev and "data" in api_prev and isinstance(api_prev["data"], list) and len(api_prev["data"]) > 0: + # prev_cm = float(api_prev["data"][0].get("num_fail", cur_cm)) + # else: + # # attempt to use any available previous value from the row if present, otherwise fallback to current + # prev_cm = float(row.get("cm_interval", cur_cm)) if not pd.isna(row.get("cm_interval", None)) else cur_cm + # except Exception: + # prev_cm = cur_cm # compute difference: current year interval minus previous year interval - try: - cm_interval_diff = float(cur_cm) - float(prev_cm) - except Exception: - cm_interval_diff = 0.0 + # try: + # cm_interval_diff = float(cur_cm) - float(prev_cm) + # except Exception: + # cm_interval_diff = 0.0 # append record using the difference for raw_cm_interval records_to_insert.append( ( str(uuid4()), # id int(max_seq), # seq - float(row["pm_interval"]) if not pd.isna(row.get("pm_interval", None)) else 0.0, - float(row["year"]) if not pd.isna(row.get("year", None)) else 0.0, + int(row["year"]), equipment_id, - cm_interval_diff, - float(row["cm_cost"]) if not pd.isna(row.get("cm_cost", None)) else 0.0, - float(row["cm_labor_time"]) if not pd.isna(row.get("cm_labor_time", None)) else 0.0, - float(row["cm_labor_human"]) if not pd.isna(row.get("cm_labor_human", None)) else 0.0, - float(row["pm_cost"]) if not pd.isna(row.get("pm_cost", None)) else 0.0, - float(row["pm_labor_time"]) if not pd.isna(row.get("pm_labor_time", None)) else 0.0, - float(row["pm_labor_human"]) if not pd.isna(row.get("pm_labor_human", None)) else 0.0, - float(row["oh_interval"]) if not pd.isna(row.get("oh_interval", None)) else 0.0, - float(row["oh_cost"]) if not pd.isna(row.get("oh_cost", None)) else 0.0, - float(row["oh_labor_time"]) if not pd.isna(row.get("oh_labor_time", None)) else 0.0, - float(row["oh_labor_human"]) if not pd.isna(row.get("oh_labor_human", None)) else 0.0, - float(row["predictive_interval"]) if not pd.isna(row.get("predictive_interval", None)) else 0.0, - float(row["predictive_material_cost"]) if not pd.isna(row.get("predictive_material_cost", None)) else 0.0, - float(row["predictive_labor_time"]) if not pd.isna(row.get("predictive_labor_time", None)) else 0.0, - float(row["predictive_labor_human"]) if not pd.isna(row.get("predictive_labor_human", None)) else 0.0, + float(row.get("rc_cm_material_cost", 0)) if not pd.isna(row.get("rc_cm_material_cost", 0)) else 0.0, + float(row.get("rc_cm_labor_cost", 0)) if not pd.isna(row.get("rc_cm_labor_cost", 0)) else 0.0, + float(row.get("rc_pm_material_cost", 0)) if not pd.isna(row.get("rc_pm_material_cost", 0)) else 0.0, + float(row.get("rc_pm_labor_cost", 0)) if not pd.isna(row.get("rc_pm_labor_cost", 0)) else 0.0, + float(row.get("rc_oh_material_cost", 0)) if not pd.isna(row.get("rc_oh_material_cost", 0)) else 0.0, + float(row.get("rc_oh_labor_cost", 0)) if not pd.isna(row.get("rc_oh_labor_cost", 0)) else 0.0, + float(row.get("rc_predictive_material_cost", 0)) if not pd.isna(row.get("rc_predictive_material_cost", 0)) else 0.0, + float(row.get("rc_predictive_labor_cost", 0)) if not pd.isna(row.get("rc_predictive_labor_cost", 0)) else 0.0, ) ) # store current cm for next iteration - fetch_api_data.prev_cm = cur_cm + # fetch_api_data.prev_cm = cur_cm # Eksekusi batch insert cursor.executemany(insert_query, records_to_insert) @@ -788,24 +775,26 @@ class Prediksi: if "is_actual" in df.columns: recent_df = df[df["is_actual"] == 1] recent_n = recent_df.shape[0] + avg_recent = recent_df[column].mean() + print(f"avg_recent: {avg_recent}") else: recent_df = df recent_n = df.shape[0] recent_n = max(1, recent_n) recent_vals = ( - recent_df.sort_values("year", ascending=False) + recent_df.sort_values("year", ascending=True) .head(recent_n)[column] .dropna() ) - + # print(f"Recent Vals: {recent_vals}") # Fallback ke semua nilai non-na jika tidak ada recent_vals if recent_vals.empty: recent_vals = df[column].dropna() # Jika masih kosong, pakai default (interval minimal 1, lainnya 0) if recent_vals.empty: - avg = 0.0 + avg = 0.0 else: # Pastikan numeric; jika gagal, pakai mean dari yang bisa dikonversi try: @@ -818,7 +807,7 @@ class Prediksi: avg = max(0.0, avg) preds = np.repeat(float(avg), n_future) - + print(preds) else: # Untuk kolom non-cm, gunakan nilai dari last actual year bila ada, # jika tidak ada gunakan last available non-NA value, jika tidak ada pakai 0.0 @@ -980,14 +969,14 @@ async def main(RELIABILITY_APP_URL=RELIABILITY_APP_URL, assetnum=None, token=Non prediksi = Prediksi(RELIABILITY_APP_URL) # If token not provided, sign in to obtain access_token/refresh_token - if token is None: - signin_res = await prediksi.sign_in() - if not getattr(prediksi, "access_token", None): - print("Failed to obtain access token; aborting.") - return - else: - # Use provided token as access token - prediksi.access_token = token + # if token is None: + # signin_res = await prediksi.sign_in() + # if not getattr(prediksi, "access_token", None): + # print("Failed to obtain access token; aborting.") + # return + # else: + # # Use provided token as access token + # prediksi.access_token = token # If an assetnum was provided, run only for that assetnum if assetnum: diff --git a/src/modules/equipment/__pycache__/Prediksi.cpython-311.pyc b/src/modules/equipment/__pycache__/Prediksi.cpython-311.pyc index 9177911..c445d9e 100644 Binary files a/src/modules/equipment/__pycache__/Prediksi.cpython-311.pyc and b/src/modules/equipment/__pycache__/Prediksi.cpython-311.pyc differ diff --git a/src/modules/equipment/__pycache__/insert_actual_data.cpython-311.pyc b/src/modules/equipment/__pycache__/insert_actual_data.cpython-311.pyc index 7023e41..f8add15 100644 Binary files a/src/modules/equipment/__pycache__/insert_actual_data.cpython-311.pyc and b/src/modules/equipment/__pycache__/insert_actual_data.cpython-311.pyc differ diff --git a/src/modules/equipment/__pycache__/run.cpython-311.pyc b/src/modules/equipment/__pycache__/run.cpython-311.pyc index 0d30746..c376c1f 100644 Binary files a/src/modules/equipment/__pycache__/run.cpython-311.pyc and b/src/modules/equipment/__pycache__/run.cpython-311.pyc differ diff --git a/src/modules/equipment/insert_actual_data.py b/src/modules/equipment/insert_actual_data.py index a017cc0..2cfe860 100644 --- a/src/modules/equipment/insert_actual_data.py +++ b/src/modules/equipment/insert_actual_data.py @@ -8,7 +8,7 @@ from datetime import datetime import sys import os import httpx - +from src.modules.equipment.where_query_sql import get_where_query_sql sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) from config import get_connection, get_production_connection @@ -106,6 +106,7 @@ def get_recursive_query(cursor, assetnum, worktype="CM"): # ) as d group by d.tahun # ; # """ + where_query = get_where_query_sql(assetnum, worktype) query = f""" select @@ -122,14 +123,8 @@ def get_recursive_query(cursor, assetnum, worktype="CM"): from public.wo_maximo as a LEFT JOIN public.wo_maximo_labtrans AS b ON b.wonum = a.wonum -where - a.asset_unit = '3' - {f"AND a.worktype = '{worktype}'" if worktype != 'CM' else "AND a.worktype in ('CM', 'PROACTIVE', 'WA')"} - AND a.asset_assetnum = '{assetnum}' - and a.wonum not like 'T%' - {f"AND a.wojp8 != 'S1'" if worktype == 'CM' else ""} -group by DATE_PART('year', a.reportdate) -having ROUND(SUM(EXTRACT(EPOCH FROM (a.actfinish - a.actstart)) / 3600), 2) <= 730; + {where_query} +group by DATE_PART('year', a.reportdate); """ # Eksekusi query dan fetch hasil cursor.execute(query) @@ -149,7 +144,7 @@ def get_labour_cost_totals(cursor, assetnum: str, worktype: str) -> dict: """Return yearly labor cost totals for a worktype using the standardized query.""" if not assetnum or not worktype: return {} - + where_query = get_where_query_sql(assetnum, worktype) query = f""" SELECT EXTRACT(YEAR FROM x.reportdate)::int AS tahun, @@ -173,25 +168,12 @@ FROM ( a.wonum, a.reportdate, CASE - WHEN (EXTRACT(EPOCH FROM (a.actfinish - a.actstart)) / 3600.0) = 0 - THEN 1 - ELSE (EXTRACT(EPOCH FROM (a.actfinish - a.actstart)) / 3600.0) + WHEN EXTRACT(EPOCH FROM (a.actfinish - a.actstart)) / 3600.0 = 0 THEN 1 + WHEN EXTRACT(EPOCH FROM (a.actfinish - a.actstart)) / 3600.0 > 730 THEN 1 + ELSE EXTRACT(EPOCH FROM (a.actfinish - a.actstart)) / 3600.0 END AS jumlah_jam_kerja - FROM public.wo_maximo a - WHERE - a.asset_unit = '3' - AND a.wonum NOT LIKE 'T%' - AND a.asset_assetnum = '{assetnum}' - AND (EXTRACT(EPOCH FROM (a.actfinish - a.actstart)) / 3600.0) <= 730 - AND a.worktype = '{worktype}' - AND ( - a.description NOT ILIKE '%U4%' - OR ( - a.description ILIKE '%U3%' - AND a.description ILIKE '%U4%' - ) - ) + {where_query} ) bw LEFT JOIN public.wo_maximo_labtrans b ON b.wonum = bw.wonum diff --git a/src/modules/equipment/run.py b/src/modules/equipment/run.py index 941f4d0..5773e38 100644 --- a/src/modules/equipment/run.py +++ b/src/modules/equipment/run.py @@ -17,11 +17,11 @@ except ImportError: async def main(): start_time = time.time() - # try: - # await query_data() - # except Exception as e: - # print(f"Error in query_data: {str(e)}") - # return + try: + await query_data() + except Exception as e: + print(f"Error in query_data: {str(e)}") + return try: prediction_result = await predict_run() diff --git a/src/modules/equipment/where_query_sql.py b/src/modules/equipment/where_query_sql.py new file mode 100644 index 0000000..f8bb101 --- /dev/null +++ b/src/modules/equipment/where_query_sql.py @@ -0,0 +1,38 @@ +def get_where_query_sql(assetnum, worktype): + where_query = f""" + where + a.asset_unit = '3' + and a.wonum not like 'T%' + AND a.asset_assetnum = '{assetnum}' + {f"AND a.worktype = '{worktype}'" if worktype != 'CM' else "AND a.worktype in ('CM', 'PROACTIVE', 'EM')"} + {f"AND a.wojp8 != 'S1'" if worktype == 'CM' else ""} + AND ( + a.description NOT ILIKE '%U4%' + OR ( + a.description ILIKE '%U3%' + AND a.description ILIKE '%U4%' + ) + ) + """ + + return where_query + +def get_where_query_sql_all_worktype(assetnum): + where_query = f""" + where + a.asset_unit = '3' + and a.wonum not like 'T%' + AND a.asset_assetnum = '{assetnum}' + AND ( + (a.worktype = 'CM' AND a.wojp8 != 'S1') + OR (a.worktype <> 'CM') + ) + AND ( + a.description NOT ILIKE '%U4%' + OR ( + a.description ILIKE '%U3%' + AND a.description ILIKE '%U4%' + ) + ) + """ + return where_query \ No newline at end of file