Update app.py
Browse files
app.py
CHANGED
|
@@ -2,7 +2,7 @@ import os
|
|
| 2 |
import sys
|
| 3 |
from datetime import datetime
|
| 4 |
from pathlib import Path
|
| 5 |
-
from typing import Tuple, Any,
|
| 6 |
|
| 7 |
import duckdb
|
| 8 |
import pandas as pd
|
|
@@ -18,8 +18,8 @@ from reportlab.pdfgen import canvas
|
|
| 18 |
# Basic configuration
|
| 19 |
# -------------------------------------------------------------------
|
| 20 |
APP_TITLE = "ALCO Liquidity & Interest-Rate Risk Dashboard"
|
| 21 |
-
TABLE_FQN = "my_db.main.masterdataset_v"
|
| 22 |
-
VIEW_FQN = "my_db.main.positions_v"
|
| 23 |
EXPORT_DIR = Path("exports")
|
| 24 |
EXPORT_DIR.mkdir(exist_ok=True)
|
| 25 |
|
|
@@ -32,35 +32,21 @@ def connect_md() -> duckdb.DuckDBPyConnection:
|
|
| 32 |
if not token:
|
| 33 |
raise RuntimeError("MOTHERDUCK_TOKEN is not set. Add it as a Space secret.")
|
| 34 |
try:
|
| 35 |
-
|
| 36 |
-
return conn
|
| 37 |
except Exception as e:
|
| 38 |
-
print("
|
| 39 |
raise
|
| 40 |
|
| 41 |
|
| 42 |
# -------------------------------------------------------------------
|
| 43 |
-
# Column discovery & dynamic SQL
|
| 44 |
# -------------------------------------------------------------------
|
| 45 |
-
|
| 46 |
-
"
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
"
|
| 50 |
-
"currency",
|
| 51 |
-
"Portfolio_value",
|
| 52 |
-
"Interest_rate",
|
| 53 |
-
"days_to_maturity",
|
| 54 |
]
|
| 55 |
-
|
| 56 |
-
PRODUCT_ASSETS = (
|
| 57 |
-
"loan", "overdraft", "advances", "bills", "bill", "tbond", "t-bond", "tbill",
|
| 58 |
-
"t-bill", "repo_asset", "assets"
|
| 59 |
-
)
|
| 60 |
-
PRODUCT_SOF = (
|
| 61 |
-
"fd", "term_deposit", "td", "savings", "current", "call", "repo_liab"
|
| 62 |
-
)
|
| 63 |
-
|
| 64 |
|
| 65 |
def discover_columns(conn: duckdb.DuckDBPyConnection, table_fqn: str) -> List[str]:
|
| 66 |
q = f"""
|
|
@@ -70,265 +56,46 @@ def discover_columns(conn: duckdb.DuckDBPyConnection, table_fqn: str) -> List[st
|
|
| 70 |
AND table_name = split_part('{table_fqn}', '.', 3)
|
| 71 |
"""
|
| 72 |
df = conn.execute(q).fetchdf()
|
| 73 |
-
return
|
| 74 |
|
| 75 |
|
| 76 |
def build_view_sql(existing_cols: List[str]) -> str:
|
| 77 |
-
|
| 78 |
-
|
| 79 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 80 |
if c.lower() in existing_cols:
|
| 81 |
-
|
| 82 |
else:
|
| 83 |
-
# use sensible defaults for types
|
| 84 |
if c in ("Portfolio_value", "Interest_rate", "days_to_maturity", "months"):
|
| 85 |
-
|
| 86 |
else:
|
| 87 |
-
|
|
|
|
|
|
|
|
|
|
| 88 |
|
| 89 |
-
# Add bucket derived from product (which must exist; we hard-require product & Portfolio_value & days_to_maturity)
|
| 90 |
-
# If 'product' doesn't exist, the app can't work; guard above (we will assert later).
|
| 91 |
bucket_case = (
|
| 92 |
-
"CASE "
|
| 93 |
-
f"WHEN lower(product) IN ({
|
| 94 |
-
f"WHEN lower(product) IN ({
|
| 95 |
-
"ELSE 'Unknown' END AS bucket"
|
| 96 |
)
|
| 97 |
|
| 98 |
-
|
| 99 |
return f"""
|
| 100 |
CREATE OR REPLACE VIEW {VIEW_FQN} AS
|
| 101 |
SELECT
|
| 102 |
-
{
|
| 103 |
FROM {TABLE_FQN};
|
| 104 |
"""
|
| 105 |
|
| 106 |
|
| 107 |
-
def make_max_date_sql(has_asof: bool) -> str:
|
| 108 |
-
if not has_asof:
|
| 109 |
-
# No as_of_date column -> return N/A row
|
| 110 |
-
return "SELECT 'N/A'::VARCHAR AS d;"
|
| 111 |
-
return f"WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN}) SELECT d FROM maxd;"
|
| 112 |
-
|
| 113 |
-
|
| 114 |
-
def wrap_latest_date(sql_body: str, has_asof: bool) -> str:
|
| 115 |
-
"""
|
| 116 |
-
If as_of_date exists, pin to latest date using a CTE and JOIN.
|
| 117 |
-
Otherwise, return the body directly from VIEW_FQN (no date pinning).
|
| 118 |
-
The sql_body must reference the view as 'p'.
|
| 119 |
-
"""
|
| 120 |
-
if not has_asof:
|
| 121 |
-
# Remove any JOIN to maxd; just select from the view
|
| 122 |
-
return f"SELECT * FROM ({sql_body})"
|
| 123 |
-
else:
|
| 124 |
-
return f"SELECT * FROM ({sql_body})"
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
def build_kpi_sql(has_asof: bool) -> str:
|
| 128 |
-
if has_asof:
|
| 129 |
-
return f"""
|
| 130 |
-
WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN}),
|
| 131 |
-
t1 AS (
|
| 132 |
-
SELECT p.bucket, SUM(p.Portfolio_value) AS amt
|
| 133 |
-
FROM {VIEW_FQN} p
|
| 134 |
-
JOIN maxd m ON p.as_of_date = m.d
|
| 135 |
-
WHERE p.days_to_maturity <= 1
|
| 136 |
-
GROUP BY p.bucket
|
| 137 |
-
)
|
| 138 |
-
SELECT
|
| 139 |
-
COALESCE(SUM(CASE WHEN bucket='Assets' THEN amt END),0) AS assets_t1,
|
| 140 |
-
COALESCE(SUM(CASE WHEN bucket='SoF' THEN amt END),0) AS sof_t1,
|
| 141 |
-
COALESCE(SUM(CASE WHEN bucket='Assets' THEN amt END),0)
|
| 142 |
-
- COALESCE(SUM(CASE WHEN bucket='SoF' THEN amt END),0) AS net_gap_t1
|
| 143 |
-
FROM t1;
|
| 144 |
-
"""
|
| 145 |
-
else:
|
| 146 |
-
return f"""
|
| 147 |
-
WITH t1 AS (
|
| 148 |
-
SELECT p.bucket, SUM(p.Portfolio_value) AS amt
|
| 149 |
-
FROM {VIEW_FQN} p
|
| 150 |
-
WHERE p.days_to_maturity <= 1
|
| 151 |
-
GROUP BY p.bucket
|
| 152 |
-
)
|
| 153 |
-
SELECT
|
| 154 |
-
COALESCE(SUM(CASE WHEN bucket='Assets' THEN amt END),0) AS assets_t1,
|
| 155 |
-
COALESCE(SUM(CASE WHEN bucket='SoF' THEN amt END),0) AS sof_t1,
|
| 156 |
-
COALESCE(SUM(CASE WHEN bucket='Assets' THEN amt END),0)
|
| 157 |
-
- COALESCE(SUM(CASE WHEN bucket='SoF' THEN amt END),0) AS net_gap_t1
|
| 158 |
-
FROM t1;
|
| 159 |
-
"""
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
def build_ladder_sql(has_asof: bool) -> str:
|
| 163 |
-
if has_asof:
|
| 164 |
-
return f"""
|
| 165 |
-
WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN})
|
| 166 |
-
SELECT
|
| 167 |
-
CASE
|
| 168 |
-
WHEN p.days_to_maturity <= 1 THEN 'T+1'
|
| 169 |
-
WHEN p.days_to_maturity BETWEEN 2 AND 7 THEN 'T+2..7'
|
| 170 |
-
WHEN p.days_to_maturity BETWEEN 8 AND 30 THEN 'T+8..30'
|
| 171 |
-
ELSE 'T+31+'
|
| 172 |
-
END AS time_bucket,
|
| 173 |
-
p.bucket,
|
| 174 |
-
SUM(p.Portfolio_value) AS amount
|
| 175 |
-
FROM {VIEW_FQN} p
|
| 176 |
-
JOIN maxd m ON p.as_of_date = m.d
|
| 177 |
-
GROUP BY 1,2
|
| 178 |
-
ORDER BY CASE time_bucket WHEN 'T+1' THEN 1 WHEN 'T+2..7' THEN 2 WHEN 'T+8..30' THEN 3 ELSE 4 END, p.bucket;
|
| 179 |
-
"""
|
| 180 |
-
else:
|
| 181 |
-
return f"""
|
| 182 |
-
SELECT
|
| 183 |
-
CASE
|
| 184 |
-
WHEN p.days_to_maturity <= 1 THEN 'T+1'
|
| 185 |
-
WHEN p.days_to_maturity BETWEEN 2 AND 7 THEN 'T+2..7'
|
| 186 |
-
WHEN p.days_to_maturity BETWEEN 8 AND 30 THEN 'T+8..30'
|
| 187 |
-
ELSE 'T+31+'
|
| 188 |
-
END AS time_bucket,
|
| 189 |
-
p.bucket,
|
| 190 |
-
SUM(p.Portfolio_value) AS amount
|
| 191 |
-
FROM {VIEW_FQN} p
|
| 192 |
-
GROUP BY 1,2
|
| 193 |
-
ORDER BY CASE time_bucket WHEN 'T+1' THEN 1 WHEN 'T+2..7' THEN 2 WHEN 'T+8..30' THEN 3 ELSE 4 END, p.bucket;
|
| 194 |
-
"""
|
| 195 |
-
|
| 196 |
-
|
| 197 |
-
def build_t1_group_sql(group_col: str, has_asof: bool) -> str:
|
| 198 |
-
if has_asof:
|
| 199 |
-
return f"""
|
| 200 |
-
WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN})
|
| 201 |
-
SELECT p.bucket, p.{group_col} AS grp, SUM(p.Portfolio_value) AS amount
|
| 202 |
-
FROM {VIEW_FQN} p
|
| 203 |
-
JOIN maxd m ON p.as_of_date = m.d
|
| 204 |
-
WHERE p.days_to_maturity <= 1
|
| 205 |
-
GROUP BY 1,2
|
| 206 |
-
ORDER BY p.bucket, amount DESC
|
| 207 |
-
LIMIT 50;
|
| 208 |
-
"""
|
| 209 |
-
else:
|
| 210 |
-
return f"""
|
| 211 |
-
SELECT p.bucket, p.{group_col} AS grp, SUM(p.Portfolio_value) AS amount
|
| 212 |
-
FROM {VIEW_FQN} p
|
| 213 |
-
WHERE p.days_to_maturity <= 1
|
| 214 |
-
GROUP BY 1,2
|
| 215 |
-
ORDER BY p.bucket, amount DESC
|
| 216 |
-
LIMIT 50;
|
| 217 |
-
"""
|
| 218 |
-
|
| 219 |
-
|
| 220 |
-
def build_irr_sql(has_asof: bool, has_months: bool, has_ir: bool) -> str:
|
| 221 |
-
# T_years uses days_to_maturity OR months (if present). y uses Interest_rate (if present).
|
| 222 |
-
t_years_expr = "CASE WHEN p.days_to_maturity IS NOT NULL THEN p.days_to_maturity/365.0"
|
| 223 |
-
if has_months:
|
| 224 |
-
t_years_expr += " WHEN p.months IS NOT NULL THEN p.months/12.0"
|
| 225 |
-
t_years_expr += " ELSE NULL END"
|
| 226 |
-
|
| 227 |
-
y_expr = "(p.Interest_rate / 100.0)" if has_ir else "NULL"
|
| 228 |
-
|
| 229 |
-
if has_asof:
|
| 230 |
-
base_from = f"FROM {VIEW_FQN} p JOIN maxd m ON p.as_of_date = m.d"
|
| 231 |
-
max_cte = f"WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN}),"
|
| 232 |
-
else:
|
| 233 |
-
base_from = f"FROM {VIEW_FQN} p"
|
| 234 |
-
max_cte = "WITH"
|
| 235 |
-
|
| 236 |
-
return f"""
|
| 237 |
-
{max_cte}
|
| 238 |
-
base AS (
|
| 239 |
-
SELECT
|
| 240 |
-
p.bucket,
|
| 241 |
-
p.Portfolio_value AS pv,
|
| 242 |
-
{y_expr} AS y,
|
| 243 |
-
{t_years_expr} AS T_years
|
| 244 |
-
{base_from}
|
| 245 |
-
WHERE p.Portfolio_value IS NOT NULL
|
| 246 |
-
),
|
| 247 |
-
metrics AS (
|
| 248 |
-
SELECT
|
| 249 |
-
bucket,
|
| 250 |
-
pv,
|
| 251 |
-
CASE WHEN T_years IS NULL THEN NULL
|
| 252 |
-
WHEN y IS NULL THEN T_years
|
| 253 |
-
ELSE T_years/(1.0+y) END AS dur_mod,
|
| 254 |
-
CASE WHEN T_years IS NULL THEN NULL
|
| 255 |
-
WHEN y IS NULL THEN T_years*(T_years+1.0)
|
| 256 |
-
ELSE (T_years*(T_years+1.0))/POWER(1.0+y,2) END AS convexity_approx,
|
| 257 |
-
CASE WHEN T_years IS NULL THEN NULL
|
| 258 |
-
ELSE pv * (CASE WHEN y IS NULL THEN T_years ELSE T_years/(1.0+y) END) * 0.0001 END AS dv01
|
| 259 |
-
FROM base
|
| 260 |
-
),
|
| 261 |
-
agg AS (
|
| 262 |
-
SELECT
|
| 263 |
-
bucket,
|
| 264 |
-
SUM(pv) AS pv_sum,
|
| 265 |
-
SUM(pv * dur_mod) / NULLIF(SUM(pv),0) AS dur_mod_port,
|
| 266 |
-
SUM(dv01) AS dv01_sum
|
| 267 |
-
FROM metrics
|
| 268 |
-
GROUP BY bucket
|
| 269 |
-
)
|
| 270 |
-
SELECT
|
| 271 |
-
COALESCE(MAX(CASE WHEN bucket='Assets' THEN pv_sum END),0) AS assets_pv,
|
| 272 |
-
COALESCE(MAX(CASE WHEN bucket='SoF' THEN pv_sum END),0) AS sof_pv,
|
| 273 |
-
COALESCE(MAX(CASE WHEN bucket='Assets' THEN dur_mod_port END),0) AS assets_dur_mod,
|
| 274 |
-
COALESCE(MAX(CASE WHEN bucket='SoF' THEN dur_mod_port END),0) AS sof_dur_mod,
|
| 275 |
-
COALESCE(MAX(CASE WHEN bucket='Assets' THEN dur_mod_port END),0)
|
| 276 |
-
- COALESCE(MAX(CASE WHEN bucket='SoF' THEN dur_mod_port END),0) AS duration_gap,
|
| 277 |
-
COALESCE(MAX(CASE WHEN bucket='Assets' THEN dv01_sum END),0)
|
| 278 |
-
- COALESCE(MAX(CASE WHEN bucket='SoF' THEN dv01_sum END),0) AS net_dv01;
|
| 279 |
-
"""
|
| 280 |
-
|
| 281 |
-
|
| 282 |
-
def build_shock_sql(has_asof: bool, has_months: bool, has_ir: bool) -> str:
|
| 283 |
-
t_years_expr = "CASE WHEN p.days_to_maturity IS NOT NULL THEN p.days_to_maturity/365.0"
|
| 284 |
-
if has_months:
|
| 285 |
-
t_years_expr += " WHEN p.months IS NOT NULL THEN p.months/12.0"
|
| 286 |
-
t_years_expr += " ELSE NULL END"
|
| 287 |
-
|
| 288 |
-
y_expr = "(p.Interest_rate / 100.0)" if has_ir else "NULL"
|
| 289 |
-
|
| 290 |
-
if has_asof:
|
| 291 |
-
base_from = f"FROM {VIEW_FQN} p JOIN maxd m ON p.as_of_date = m.d"
|
| 292 |
-
max_cte = f"WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN}),"
|
| 293 |
-
else:
|
| 294 |
-
base_from = f"FROM {VIEW_FQN} p"
|
| 295 |
-
max_cte = "WITH"
|
| 296 |
-
|
| 297 |
-
return f"""
|
| 298 |
-
{max_cte}
|
| 299 |
-
base AS (
|
| 300 |
-
SELECT
|
| 301 |
-
p.bucket,
|
| 302 |
-
p.Portfolio_value AS pv,
|
| 303 |
-
{y_expr} AS y,
|
| 304 |
-
{t_years_expr} AS T_years
|
| 305 |
-
{base_from}
|
| 306 |
-
),
|
| 307 |
-
k AS (
|
| 308 |
-
SELECT
|
| 309 |
-
bucket, pv,
|
| 310 |
-
CASE WHEN T_years IS NULL THEN NULL
|
| 311 |
-
WHEN y IS NULL THEN T_years
|
| 312 |
-
ELSE T_years/(1.0+y) END AS dur_mod,
|
| 313 |
-
CASE WHEN T_years IS NULL THEN NULL
|
| 314 |
-
WHEN y IS NULL THEN T_years*(T_years+1.0)
|
| 315 |
-
ELSE (T_years*(T_years+1.0))/POWER(1.0+y,2) END AS convexity_approx
|
| 316 |
-
FROM base
|
| 317 |
-
),
|
| 318 |
-
shock AS (
|
| 319 |
-
SELECT
|
| 320 |
-
bucket,
|
| 321 |
-
SUM((- pv * dur_mod * 0.01) + (0.5 * pv * convexity_approx * POWER(0.01,2))) AS dPV_up_100bp,
|
| 322 |
-
SUM((+ pv * dur_mod * 0.01) + (0.5 * pv * convexity_approx * POWER(-0.01,2))) AS dPV_dn_100bp
|
| 323 |
-
FROM k
|
| 324 |
-
GROUP BY bucket
|
| 325 |
-
)
|
| 326 |
-
SELECT * FROM shock ORDER BY bucket;
|
| 327 |
-
"""
|
| 328 |
-
|
| 329 |
-
|
| 330 |
# -------------------------------------------------------------------
|
| 331 |
-
# Data
|
| 332 |
# -------------------------------------------------------------------
|
| 333 |
class DashboardResult(BaseModel):
|
| 334 |
as_of_date: str
|
|
@@ -336,100 +103,100 @@ class DashboardResult(BaseModel):
|
|
| 336 |
sof_t1: float
|
| 337 |
net_gap_t1: float
|
| 338 |
ladder: pd.DataFrame
|
| 339 |
-
t1_by_month: pd.DataFrame
|
| 340 |
-
t1_by_segment: pd.DataFrame
|
| 341 |
-
t1_by_ccy: pd.DataFrame
|
| 342 |
irr: pd.DataFrame
|
| 343 |
-
shocks: pd.DataFrame
|
| 344 |
|
| 345 |
|
| 346 |
# -------------------------------------------------------------------
|
| 347 |
-
#
|
| 348 |
# -------------------------------------------------------------------
|
| 349 |
-
def ensure_view(conn: duckdb.DuckDBPyConnection, existing_cols: List[str])
|
| 350 |
-
|
| 351 |
-
|
| 352 |
-
|
| 353 |
-
|
| 354 |
-
f"Source table {TABLE_FQN} must contain {mandatory}, "
|
| 355 |
-
f"found only: {existing_cols}"
|
| 356 |
-
)
|
| 357 |
-
conn.execute(build_view_sql(existing_cols))
|
| 358 |
|
| 359 |
|
| 360 |
def fetch_all(conn: duckdb.DuckDBPyConnection) -> DashboardResult:
|
| 361 |
-
|
| 362 |
-
ensure_view(conn,
|
| 363 |
-
|
| 364 |
-
|
| 365 |
-
has_months = "months" in
|
| 366 |
-
|
| 367 |
-
|
| 368 |
-
|
| 369 |
-
|
| 370 |
-
|
| 371 |
-
|
| 372 |
-
|
| 373 |
-
as_of_str = (
|
| 374 |
-
pd.to_datetime(as_of).strftime("%Y-%m-%d")
|
| 375 |
-
if has_asof and not pd.isna(as_of)
|
| 376 |
-
else "N/A"
|
| 377 |
-
)
|
| 378 |
|
| 379 |
-
# KPIs
|
| 380 |
-
|
| 381 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 382 |
|
| 383 |
-
#
|
|
|
|
| 384 |
if has_months:
|
| 385 |
-
|
| 386 |
-
|
| 387 |
-
else
|
| 388 |
-
|
| 389 |
-
|
| 390 |
-
|
| 391 |
-
|
| 392 |
-
|
| 393 |
-
|
| 394 |
-
|
| 395 |
-
|
| 396 |
-
|
| 397 |
-
|
| 398 |
-
|
| 399 |
-
else:
|
| 400 |
-
t1_c = pd.DataFrame(columns=["bucket", "currency", "amount"])
|
| 401 |
-
|
| 402 |
-
# IRR & shocks (works even if Interest_rate/months are missing)
|
| 403 |
-
irr = conn.execute(build_irr_sql(has_asof, has_months, has_ir)).fetchdf()
|
| 404 |
-
shocks = conn.execute(build_shock_sql(has_asof, has_months, has_ir)).fetchdf()
|
| 405 |
|
| 406 |
return DashboardResult(
|
| 407 |
-
as_of_date=
|
| 408 |
-
assets_t1=float(
|
| 409 |
-
sof_t1=float(
|
| 410 |
-
net_gap_t1=float(
|
| 411 |
ladder=ladder,
|
| 412 |
-
t1_by_month=t1_m,
|
| 413 |
-
t1_by_segment=t1_s,
|
| 414 |
-
t1_by_ccy=t1_c,
|
| 415 |
irr=irr,
|
| 416 |
-
shocks=shocks,
|
| 417 |
)
|
| 418 |
|
| 419 |
|
| 420 |
# -------------------------------------------------------------------
|
| 421 |
-
#
|
| 422 |
# -------------------------------------------------------------------
|
| 423 |
def plot_ladder(df: pd.DataFrame):
|
| 424 |
pivot = df.pivot(index="time_bucket", columns="bucket", values="amount").fillna(0)
|
| 425 |
order = ["T+1", "T+2..7", "T+8..30", "T+31+"]
|
| 426 |
pivot = pivot.reindex(order)
|
| 427 |
fig, ax = plt.subplots(figsize=(7, 4))
|
| 428 |
-
|
| 429 |
-
|
| 430 |
-
ax.
|
| 431 |
-
ax.bar(pivot.index, -sof, bottom=0, label="SoF")
|
| 432 |
-
ax.axhline(0, linewidth=1)
|
| 433 |
ax.set_ylabel("LKR")
|
| 434 |
ax.set_title("Maturity Ladder (Assets vs SoF)")
|
| 435 |
ax.legend()
|
|
@@ -450,123 +217,51 @@ def export_excel(res: DashboardResult) -> Path:
|
|
| 450 |
"net_gap_t1": [res.net_gap_t1],
|
| 451 |
}).to_excel(xw, index=False, sheet_name="kpis")
|
| 452 |
res.ladder.to_excel(xw, index=False, sheet_name="ladder")
|
| 453 |
-
res.t1_by_month.to_excel(xw, index=False, sheet_name="t1_by_month")
|
| 454 |
-
res.t1_by_segment.to_excel(xw, index=False, sheet_name="t1_by_segment")
|
| 455 |
-
res.t1_by_ccy.to_excel(xw, index=False, sheet_name="t1_by_ccy")
|
| 456 |
res.irr.to_excel(xw, index=False, sheet_name="irr")
|
| 457 |
-
res.shocks.to_excel(xw, index=False, sheet_name="shocks")
|
| 458 |
-
return out
|
| 459 |
-
|
| 460 |
-
|
| 461 |
-
def export_pdf(res: DashboardResult) -> Path:
|
| 462 |
-
out = EXPORT_DIR / f"alco_report_{res.as_of_date}.pdf"
|
| 463 |
-
c = canvas.Canvas(str(out), pagesize=A4)
|
| 464 |
-
W, H = A4
|
| 465 |
-
y = H - 20 * mm
|
| 466 |
-
|
| 467 |
-
def line(txt, size=11, dy=6 * mm):
|
| 468 |
-
nonlocal y
|
| 469 |
-
c.setFont("Helvetica", size)
|
| 470 |
-
c.drawString(20 * mm, y, txt)
|
| 471 |
-
y -= dy
|
| 472 |
-
|
| 473 |
-
line(APP_TITLE, 14, dy=8 * mm)
|
| 474 |
-
line(f"As of: {res.as_of_date}")
|
| 475 |
-
line(f"Assets T+1: {res.assets_t1:,.0f} LKR")
|
| 476 |
-
line(f"SoF T+1: {res.sof_t1:,.0f} LKR")
|
| 477 |
-
line(f"Net Gap T+1: {res.net_gap_t1:,.0f} LKR (negative = shortfall)")
|
| 478 |
-
y -= 4 * mm
|
| 479 |
-
|
| 480 |
-
if not res.irr.empty:
|
| 481 |
-
irr = res.irr.iloc[0]
|
| 482 |
-
line("Interest-Rate Risk (approx)", 12, dy=7 * mm)
|
| 483 |
-
line(f"Assets ModDur: {irr['assets_dur_mod']:.2f} | SoF ModDur: {irr['sof_dur_mod']:.2f}")
|
| 484 |
-
line(f"Duration Gap: {irr['duration_gap']:.2f}")
|
| 485 |
-
line(f"Net DV01: {irr['net_dv01']:,.0f} LKR/bp")
|
| 486 |
-
|
| 487 |
-
if not res.shocks.empty:
|
| 488 |
-
net_up = res.shocks["dPV_up_100bp"].sum()
|
| 489 |
-
net_dn = res.shocks["dPV_dn_100bp"].sum()
|
| 490 |
-
y -= 2 * mm
|
| 491 |
-
line(f"+100bp net ΔPV: {net_up:,.0f} LKR | -100bp net ΔPV: {net_dn:,.0f} LKR")
|
| 492 |
-
|
| 493 |
-
c.showPage()
|
| 494 |
-
c.save()
|
| 495 |
return out
|
| 496 |
|
| 497 |
|
| 498 |
# -------------------------------------------------------------------
|
| 499 |
# Gradio UI
|
| 500 |
# -------------------------------------------------------------------
|
| 501 |
-
def run_dashboard()
|
| 502 |
conn = connect_md()
|
| 503 |
res = fetch_all(conn)
|
| 504 |
fig = plot_ladder(res.ladder)
|
| 505 |
excel_path = export_excel(res)
|
| 506 |
-
pdf_path = export_pdf(res)
|
| 507 |
return (
|
| 508 |
res.as_of_date,
|
| 509 |
res.assets_t1,
|
| 510 |
res.sof_t1,
|
| 511 |
res.net_gap_t1,
|
| 512 |
fig,
|
| 513 |
-
res.
|
| 514 |
-
res.t1_by_segment,
|
| 515 |
-
res.t1_by_ccy,
|
| 516 |
res.irr,
|
| 517 |
-
res.shocks,
|
| 518 |
str(excel_path),
|
| 519 |
-
str(pdf_path),
|
| 520 |
)
|
| 521 |
|
| 522 |
|
| 523 |
with gr.Blocks(title=APP_TITLE) as demo:
|
| 524 |
-
gr.Markdown(
|
| 525 |
-
f"# {APP_TITLE}\n"
|
| 526 |
-
"*Source:* `my_db.main.masterdataset_v` → `positions_v` | *Sign:* Assets=+ SoF=–"
|
| 527 |
-
)
|
| 528 |
|
| 529 |
with gr.Row():
|
| 530 |
-
|
| 531 |
|
| 532 |
with gr.Row():
|
| 533 |
as_of = gr.Textbox(label="As of date", interactive=False)
|
| 534 |
-
|
| 535 |
with gr.Row():
|
| 536 |
-
|
| 537 |
-
|
| 538 |
-
|
| 539 |
|
| 540 |
chart = gr.Plot(label="Maturity Ladder")
|
|
|
|
|
|
|
|
|
|
| 541 |
|
| 542 |
-
|
| 543 |
-
t1m = gr.Dataframe(label="T+1 by Tenor (months)")
|
| 544 |
-
t1s = gr.Dataframe(label="T+1 by Segment")
|
| 545 |
-
|
| 546 |
-
t1c = gr.Dataframe(label="T+1 by Currency")
|
| 547 |
-
irr = gr.Dataframe(label="Interest-Rate Risk (bucketed)")
|
| 548 |
-
shocks = gr.Dataframe(label="Parallel Shock ±100bp (bucketed)")
|
| 549 |
-
|
| 550 |
-
with gr.Row():
|
| 551 |
-
excel_file = gr.File(label="Excel export", interactive=False)
|
| 552 |
-
pdf_file = gr.File(label="PDF export", interactive=False)
|
| 553 |
-
|
| 554 |
-
btn.click(
|
| 555 |
fn=run_dashboard,
|
| 556 |
-
outputs=[
|
| 557 |
-
as_of,
|
| 558 |
-
k1,
|
| 559 |
-
k2,
|
| 560 |
-
k3,
|
| 561 |
-
chart,
|
| 562 |
-
t1m,
|
| 563 |
-
t1s,
|
| 564 |
-
t1c,
|
| 565 |
-
irr,
|
| 566 |
-
shocks,
|
| 567 |
-
excel_file,
|
| 568 |
-
pdf_file,
|
| 569 |
-
],
|
| 570 |
)
|
| 571 |
|
| 572 |
if __name__ == "__main__":
|
|
|
|
| 2 |
import sys
|
| 3 |
from datetime import datetime
|
| 4 |
from pathlib import Path
|
| 5 |
+
from typing import Tuple, Any, List
|
| 6 |
|
| 7 |
import duckdb
|
| 8 |
import pandas as pd
|
|
|
|
| 18 |
# Basic configuration
|
| 19 |
# -------------------------------------------------------------------
|
| 20 |
APP_TITLE = "ALCO Liquidity & Interest-Rate Risk Dashboard"
|
| 21 |
+
TABLE_FQN = "my_db.main.masterdataset_v"
|
| 22 |
+
VIEW_FQN = "my_db.main.positions_v"
|
| 23 |
EXPORT_DIR = Path("exports")
|
| 24 |
EXPORT_DIR.mkdir(exist_ok=True)
|
| 25 |
|
|
|
|
| 32 |
if not token:
|
| 33 |
raise RuntimeError("MOTHERDUCK_TOKEN is not set. Add it as a Space secret.")
|
| 34 |
try:
|
| 35 |
+
return duckdb.connect(f"md:?motherduck_token={token}")
|
|
|
|
| 36 |
except Exception as e:
|
| 37 |
+
print("❌ Connection failed:", e, file=sys.stderr)
|
| 38 |
raise
|
| 39 |
|
| 40 |
|
| 41 |
# -------------------------------------------------------------------
|
| 42 |
+
# Column discovery & dynamic SQL
|
| 43 |
# -------------------------------------------------------------------
|
| 44 |
+
PRODUCT_ASSETS = [
|
| 45 |
+
"assets"
|
| 46 |
+
]
|
| 47 |
+
PRODUCT_SOF = [
|
| 48 |
+
"fd"
|
|
|
|
|
|
|
|
|
|
|
|
|
| 49 |
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 50 |
|
| 51 |
def discover_columns(conn: duckdb.DuckDBPyConnection, table_fqn: str) -> List[str]:
|
| 52 |
q = f"""
|
|
|
|
| 56 |
AND table_name = split_part('{table_fqn}', '.', 3)
|
| 57 |
"""
|
| 58 |
df = conn.execute(q).fetchdf()
|
| 59 |
+
return df["col"].tolist()
|
| 60 |
|
| 61 |
|
| 62 |
def build_view_sql(existing_cols: List[str]) -> str:
|
| 63 |
+
wanted = [
|
| 64 |
+
"as_of_date", "product", "months", "segments",
|
| 65 |
+
"currency", "Portfolio_value", "Interest_rate",
|
| 66 |
+
"days_to_maturity"
|
| 67 |
+
]
|
| 68 |
+
select_list = []
|
| 69 |
+
for c in wanted:
|
| 70 |
if c.lower() in existing_cols:
|
| 71 |
+
select_list.append(c)
|
| 72 |
else:
|
|
|
|
| 73 |
if c in ("Portfolio_value", "Interest_rate", "days_to_maturity", "months"):
|
| 74 |
+
select_list.append(f"CAST(NULL AS DOUBLE) AS {c}")
|
| 75 |
else:
|
| 76 |
+
select_list.append(f"CAST(NULL AS VARCHAR) AS {c}")
|
| 77 |
+
|
| 78 |
+
sof_list = ", ".join([f"'{p}'" for p in PRODUCT_SOF])
|
| 79 |
+
asset_list = ", ".join([f"'{p}'" for p in PRODUCT_ASSETS])
|
| 80 |
|
|
|
|
|
|
|
| 81 |
bucket_case = (
|
| 82 |
+
f"CASE "
|
| 83 |
+
f"WHEN lower(product) IN ({sof_list}) THEN 'SoF' "
|
| 84 |
+
f"WHEN lower(product) IN ({asset_list}) THEN 'Assets' "
|
| 85 |
+
f"ELSE 'Unknown' END AS bucket"
|
| 86 |
)
|
| 87 |
|
| 88 |
+
select_sql = ",\n ".join(select_list + [bucket_case])
|
| 89 |
return f"""
|
| 90 |
CREATE OR REPLACE VIEW {VIEW_FQN} AS
|
| 91 |
SELECT
|
| 92 |
+
{select_sql}
|
| 93 |
FROM {TABLE_FQN};
|
| 94 |
"""
|
| 95 |
|
| 96 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 97 |
# -------------------------------------------------------------------
|
| 98 |
+
# Data model
|
| 99 |
# -------------------------------------------------------------------
|
| 100 |
class DashboardResult(BaseModel):
|
| 101 |
as_of_date: str
|
|
|
|
| 103 |
sof_t1: float
|
| 104 |
net_gap_t1: float
|
| 105 |
ladder: pd.DataFrame
|
|
|
|
|
|
|
|
|
|
| 106 |
irr: pd.DataFrame
|
|
|
|
| 107 |
|
| 108 |
|
| 109 |
# -------------------------------------------------------------------
|
| 110 |
+
# Core logic
|
| 111 |
# -------------------------------------------------------------------
|
| 112 |
+
def ensure_view(conn: duckdb.DuckDBPyConnection, existing_cols: List[str]):
|
| 113 |
+
if not {"product", "portfolio_value", "days_to_maturity"}.issubset(set(existing_cols)):
|
| 114 |
+
raise RuntimeError("Missing required columns in source table.")
|
| 115 |
+
sql = build_view_sql(existing_cols)
|
| 116 |
+
conn.execute(sql)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 117 |
|
| 118 |
|
| 119 |
def fetch_all(conn: duckdb.DuckDBPyConnection) -> DashboardResult:
|
| 120 |
+
cols = discover_columns(conn, TABLE_FQN)
|
| 121 |
+
ensure_view(conn, cols)
|
| 122 |
+
has_asof = "as_of_date" in cols
|
| 123 |
+
has_ir = "interest_rate" in cols
|
| 124 |
+
has_months = "months" in cols
|
| 125 |
+
|
| 126 |
+
# As-of date
|
| 127 |
+
as_of_date = "N/A"
|
| 128 |
+
if has_asof:
|
| 129 |
+
asof_df = conn.execute(f"SELECT max(as_of_date) AS d FROM {VIEW_FQN}").fetchdf()
|
| 130 |
+
if not asof_df.empty and not pd.isna(asof_df["d"].iloc[0]):
|
| 131 |
+
as_of_date = str(asof_df["d"].iloc[0])[:10]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 132 |
|
| 133 |
+
# KPIs
|
| 134 |
+
kpi_sql = f"""
|
| 135 |
+
SELECT
|
| 136 |
+
COALESCE(SUM(CASE WHEN bucket='Assets' AND days_to_maturity<=1 THEN Portfolio_value END),0) AS assets_t1,
|
| 137 |
+
COALESCE(SUM(CASE WHEN bucket='SoF' AND days_to_maturity<=1 THEN Portfolio_value END),0) AS sof_t1,
|
| 138 |
+
COALESCE(SUM(CASE WHEN bucket='Assets' AND days_to_maturity<=1 THEN Portfolio_value END),0)
|
| 139 |
+
- COALESCE(SUM(CASE WHEN bucket='SoF' AND days_to_maturity<=1 THEN Portfolio_value END),0) AS net_gap_t1
|
| 140 |
+
FROM {VIEW_FQN};
|
| 141 |
+
"""
|
| 142 |
+
kpi = conn.execute(kpi_sql).fetchdf()
|
| 143 |
+
|
| 144 |
+
# Ladder
|
| 145 |
+
ladder_sql = f"""
|
| 146 |
+
SELECT
|
| 147 |
+
CASE
|
| 148 |
+
WHEN days_to_maturity <= 1 THEN 'T+1'
|
| 149 |
+
WHEN days_to_maturity BETWEEN 2 AND 7 THEN 'T+2..7'
|
| 150 |
+
WHEN days_to_maturity BETWEEN 8 AND 30 THEN 'T+8..30'
|
| 151 |
+
ELSE 'T+31+'
|
| 152 |
+
END AS time_bucket,
|
| 153 |
+
bucket,
|
| 154 |
+
SUM(Portfolio_value) AS amount
|
| 155 |
+
FROM {VIEW_FQN}
|
| 156 |
+
GROUP BY 1,2
|
| 157 |
+
ORDER BY 1,2;
|
| 158 |
+
"""
|
| 159 |
+
ladder = conn.execute(ladder_sql).fetchdf()
|
| 160 |
|
| 161 |
+
# IRR simplified
|
| 162 |
+
t_expr = "CASE WHEN days_to_maturity IS NOT NULL THEN days_to_maturity/365.0"
|
| 163 |
if has_months:
|
| 164 |
+
t_expr += " WHEN months IS NOT NULL THEN months/12.0"
|
| 165 |
+
t_expr += " ELSE NULL END"
|
| 166 |
+
y_expr = "(Interest_rate/100.0)" if has_ir else "NULL"
|
| 167 |
+
|
| 168 |
+
irr_sql = f"""
|
| 169 |
+
SELECT
|
| 170 |
+
bucket,
|
| 171 |
+
SUM(Portfolio_value) AS pv_sum,
|
| 172 |
+
SUM(Portfolio_value * {t_expr}) / NULLIF(SUM(Portfolio_value),0) AS dur_mac,
|
| 173 |
+
SUM(Portfolio_value * ({t_expr})/(1+COALESCE({y_expr},0))) / NULLIF(SUM(Portfolio_value),0) AS dur_mod
|
| 174 |
+
FROM {VIEW_FQN}
|
| 175 |
+
GROUP BY bucket;
|
| 176 |
+
"""
|
| 177 |
+
irr = conn.execute(irr_sql).fetchdf()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 178 |
|
| 179 |
return DashboardResult(
|
| 180 |
+
as_of_date=as_of_date,
|
| 181 |
+
assets_t1=float(kpi["assets_t1"].iloc[0]),
|
| 182 |
+
sof_t1=float(kpi["sof_t1"].iloc[0]),
|
| 183 |
+
net_gap_t1=float(kpi["net_gap_t1"].iloc[0]),
|
| 184 |
ladder=ladder,
|
|
|
|
|
|
|
|
|
|
| 185 |
irr=irr,
|
|
|
|
| 186 |
)
|
| 187 |
|
| 188 |
|
| 189 |
# -------------------------------------------------------------------
|
| 190 |
+
# Visualization
|
| 191 |
# -------------------------------------------------------------------
|
| 192 |
def plot_ladder(df: pd.DataFrame):
|
| 193 |
pivot = df.pivot(index="time_bucket", columns="bucket", values="amount").fillna(0)
|
| 194 |
order = ["T+1", "T+2..7", "T+8..30", "T+31+"]
|
| 195 |
pivot = pivot.reindex(order)
|
| 196 |
fig, ax = plt.subplots(figsize=(7, 4))
|
| 197 |
+
ax.bar(pivot.index, pivot.get("Assets", 0), label="Assets")
|
| 198 |
+
ax.bar(pivot.index, -pivot.get("SoF", 0), label="SoF")
|
| 199 |
+
ax.axhline(0, color="gray", lw=1)
|
|
|
|
|
|
|
| 200 |
ax.set_ylabel("LKR")
|
| 201 |
ax.set_title("Maturity Ladder (Assets vs SoF)")
|
| 202 |
ax.legend()
|
|
|
|
| 217 |
"net_gap_t1": [res.net_gap_t1],
|
| 218 |
}).to_excel(xw, index=False, sheet_name="kpis")
|
| 219 |
res.ladder.to_excel(xw, index=False, sheet_name="ladder")
|
|
|
|
|
|
|
|
|
|
| 220 |
res.irr.to_excel(xw, index=False, sheet_name="irr")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 221 |
return out
|
| 222 |
|
| 223 |
|
| 224 |
# -------------------------------------------------------------------
|
| 225 |
# Gradio UI
|
| 226 |
# -------------------------------------------------------------------
|
| 227 |
+
def run_dashboard():
|
| 228 |
conn = connect_md()
|
| 229 |
res = fetch_all(conn)
|
| 230 |
fig = plot_ladder(res.ladder)
|
| 231 |
excel_path = export_excel(res)
|
|
|
|
| 232 |
return (
|
| 233 |
res.as_of_date,
|
| 234 |
res.assets_t1,
|
| 235 |
res.sof_t1,
|
| 236 |
res.net_gap_t1,
|
| 237 |
fig,
|
| 238 |
+
res.ladder,
|
|
|
|
|
|
|
| 239 |
res.irr,
|
|
|
|
| 240 |
str(excel_path),
|
|
|
|
| 241 |
)
|
| 242 |
|
| 243 |
|
| 244 |
with gr.Blocks(title=APP_TITLE) as demo:
|
| 245 |
+
gr.Markdown(f"# {APP_TITLE}\n_Source:_ `{TABLE_FQN}` → `{VIEW_FQN}`")
|
|
|
|
|
|
|
|
|
|
| 246 |
|
| 247 |
with gr.Row():
|
| 248 |
+
refresh_btn = gr.Button("🔄 Refresh", variant="primary")
|
| 249 |
|
| 250 |
with gr.Row():
|
| 251 |
as_of = gr.Textbox(label="As of date", interactive=False)
|
|
|
|
| 252 |
with gr.Row():
|
| 253 |
+
a1 = gr.Number(label="Assets T+1 (LKR)", precision=0)
|
| 254 |
+
a2 = gr.Number(label="SoF T+1 (LKR)", precision=0)
|
| 255 |
+
a3 = gr.Number(label="Net Gap T+1 (LKR)", precision=0)
|
| 256 |
|
| 257 |
chart = gr.Plot(label="Maturity Ladder")
|
| 258 |
+
ladder_df = gr.Dataframe(label="Ladder Detail")
|
| 259 |
+
irr_df = gr.Dataframe(label="Interest-Rate Risk (approx)")
|
| 260 |
+
excel_file = gr.File(label="Excel export", interactive=False)
|
| 261 |
|
| 262 |
+
refresh_btn.click(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 263 |
fn=run_dashboard,
|
| 264 |
+
outputs=[as_of, a1, a2, a3, chart, ladder_df, irr_df, excel_file],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 265 |
)
|
| 266 |
|
| 267 |
if __name__ == "__main__":
|