From 0298a56bd61ddbaec2836044d1f3972e1c47bcfc Mon Sep 17 00:00:00 2001
From: sina <20732540+SinaKhalili@users.noreply.github.com>
Date: Thu, 31 Oct 2024 04:01:22 +0900
Subject: [PATCH] Add cached versions
---
backend/app.py | 13 ++
backend/middleware/cache_middleware.py | 20 ++-
src/lib/api.py | 40 ++++++
src/main.py | 21 +++
src/page/asset_liability_cached.py | 79 +++++++++++
src/page/health_cached.py | 45 +++++++
src/page/price_shock_cached.py | 173 +++++++++++++++++++++++++
7 files changed, 390 insertions(+), 1 deletion(-)
create mode 100644 src/page/asset_liability_cached.py
create mode 100644 src/page/health_cached.py
create mode 100644 src/page/price_shock_cached.py
diff --git a/backend/app.py b/backend/app.py
index 0840e84..3df3796 100644
--- a/backend/app.py
+++ b/backend/app.py
@@ -60,6 +60,7 @@ def clean_cache(state: BackendState) -> None:
except Exception as e:
print(f"Error deleting {pickle}: {e}")
+ # Clean regular cache
cache_files = glob.glob("cache/*")
if len(cache_files) > 35:
print("cache folder has more than 35 files, deleting old ones")
@@ -71,6 +72,18 @@ def clean_cache(state: BackendState) -> None:
except Exception as e:
print(f"Error deleting {cache_file}: {e}")
+ # Clean ucache
+ ucache_files = glob.glob("ucache/*")
+ if len(ucache_files) > 35:
+ print("ucache folder has more than 35 files, deleting old ones")
+ ucache_files.sort(key=os.path.getmtime)
+ for ucache_file in ucache_files[:-35]:
+ print(f"deleting {ucache_file}")
+ try:
+ os.remove(ucache_file)
+ except Exception as e:
+ print(f"Error deleting {ucache_file}: {e}")
+
@repeat_every(seconds=60 * 8, wait_first=True)
async def repeatedly_clean_cache(state: BackendState) -> None:
diff --git a/backend/middleware/cache_middleware.py b/backend/middleware/cache_middleware.py
index 914a57b..cdad5d7 100644
--- a/backend/middleware/cache_middleware.py
+++ b/backend/middleware/cache_middleware.py
@@ -18,9 +18,12 @@ def __init__(self, app: ASGIApp, state: BackendState, cache_dir: str = "cache"):
super().__init__(app)
self.state = state
self.cache_dir = cache_dir
+ self.ucache_dir = "ucache"
self.revalidation_locks: Dict[str, asyncio.Lock] = {}
if not os.path.exists(self.cache_dir):
os.makedirs(self.cache_dir)
+ if not os.path.exists(self.ucache_dir):
+ os.makedirs(self.ucache_dir)
async def dispatch(self, request: BackendRequest, call_next: Callable):
if not request.url.path.startswith("/api"):
@@ -155,7 +158,22 @@ async def _fetch_and_cache(
os.makedirs(os.path.dirname(cache_file), exist_ok=True)
with open(cache_file, "w") as f:
json.dump(response_data, f)
- print(f"Cached fresh data for {request.url.path}")
+
+ ucache_key = f"{request.method}{request.url.path}"
+ if request.url.query:
+ safe_query = request.url.query.replace("&", "_").replace(
+ "=", "-"
+ )
+ ucache_key = f"{ucache_key}__{safe_query}"
+ ucache_key = ucache_key.replace("/", "_")
+
+ ucache_file = os.path.join(self.ucache_dir, f"{ucache_key}.json")
+ with open(ucache_file, "w") as f:
+ json.dump(response_data, f)
+
+ print(
+ f"Cached fresh data for {request.url.path} with query {request.url.query}"
+ )
else:
print(
f"Failed to cache data for {request.url.path}. Status code: {response.status_code}"
diff --git a/src/lib/api.py b/src/lib/api.py
index 32637a2..b5b1af1 100644
--- a/src/lib/api.py
+++ b/src/lib/api.py
@@ -1,3 +1,4 @@
+import json
import os
from typing import Optional
@@ -9,6 +10,7 @@
load_dotenv()
BASE_URL = os.environ["BACKEND_URL"]
+R2_PREFIX = "https://pub" + "-7dc8852b9fd5407a92614093e1f73280.r" + "2.dev"
def api(
@@ -52,3 +54,41 @@ def api(
return pd.DataFrame(response.json())
except ValueError:
return response.json()
+
+
+def api2(url: str, params: Optional[dict] = None) -> dict:
+ """
+ Fetch data from R2 storage using the simplified naming scheme.
+ Example: /api/health/health_distribution -> GET_api_health_health_distribution.json
+ Example with params: /api/price-shock/usermap?asset_group=ignore+stables&oracle_distortion=0.05
+ -> GET_api_price-shock_usermap__asset_group-ignore+stables_oracle_distortion-0.05.json
+ """
+ print("SERVING FROM R2")
+
+ try:
+ # Convert URL path to R2 filename format
+ cache_key = f"GET/api/{url}".replace("/", "_")
+
+ # Handle query parameters exactly as they appear in the URL
+ if params:
+ # Convert params to URL query string format
+ query_parts = []
+ for k, v in params.items():
+ # Replace space with + to match URL encoding
+ if isinstance(v, str):
+ v = v.replace(" ", "%2B")
+ query_parts.append(f"{k}-{v}")
+ query_str = "_".join(query_parts)
+ cache_key = f"{cache_key}__{query_str}"
+
+ r2_url = f"{R2_PREFIX}/{cache_key}.json"
+ print(f"Fetching from R2: {r2_url}")
+ response = requests.get(r2_url)
+ if response.status_code != 200:
+ raise Exception(f"Failed to fetch from R2: {response.status_code}")
+
+ response_data = response.json()
+ return response_data["content"]
+ except Exception as e:
+ print(f"Error fetching from R2: {str(e)}")
+ raise
diff --git a/src/main.py b/src/main.py
index 4f9ae20..1c04653 100644
--- a/src/main.py
+++ b/src/main.py
@@ -5,11 +5,14 @@
from lib.page import needs_backend
from lib.page import sidebar
from page.asset_liability import asset_liab_matrix_page
+from page.asset_liability_cached import asset_liab_matrix_cached_page
from page.backend import backend_page
from page.health import health_page
+from page.health_cached import health_cached_page
from page.liquidation_curves import liquidation_curves_page
from page.orderbook import orderbook_page
from page.price_shock import price_shock_page
+from page.price_shock_cached import price_shock_cached_page
from sections.welcome import welcome_page
import streamlit as st
@@ -58,6 +61,24 @@
title="Liquidation Curves",
icon="🌊",
),
+ st.Page(
+ health_cached_page,
+ url_path="health-cached",
+ title="Health (Cached)",
+ icon="🏥",
+ ),
+ st.Page(
+ price_shock_cached_page,
+ url_path="price-shock-cached",
+ title="Price Shock (Cached)",
+ icon="💸",
+ ),
+ st.Page(
+ asset_liab_matrix_cached_page,
+ url_path="asset-liab-matrix-cached",
+ title="Asset-Liab Matrix (Cached)",
+ icon="📊",
+ ),
]
if os.getenv("DEV"):
pages.append(
diff --git a/src/page/asset_liability_cached.py b/src/page/asset_liability_cached.py
new file mode 100644
index 0000000..d8701d7
--- /dev/null
+++ b/src/page/asset_liability_cached.py
@@ -0,0 +1,79 @@
+import json
+
+from driftpy.constants.perp_markets import mainnet_perp_market_configs
+from driftpy.constants.spot_markets import mainnet_spot_market_configs
+from lib.api import api2
+import pandas as pd
+from requests.exceptions import JSONDecodeError
+import streamlit as st
+
+
+options = [0, 1, 2, 3]
+labels = [
+ "none",
+ "liq within 50% of oracle",
+ "maint. health < 10%",
+ "init. health < 10%",
+]
+
+
+def asset_liab_matrix_cached_page():
+ params = st.query_params
+ mode = int(params.get("mode", 0))
+ perp_market_index = int(params.get("perp_market_index", 0))
+
+ mode = st.selectbox(
+ "Options", options, format_func=lambda x: labels[x], index=options.index(mode)
+ )
+ st.query_params.update({"mode": mode})
+
+ perp_market_index = st.selectbox(
+ "Market index",
+ [x.market_index for x in mainnet_perp_market_configs],
+ index=[x.market_index for x in mainnet_perp_market_configs].index(
+ perp_market_index
+ ),
+ )
+ st.query_params.update({"perp_market_index": perp_market_index})
+
+ try:
+ url = f"asset-liability/matrix/{0 if mode is None else mode}/{0 if perp_market_index is None else perp_market_index}"
+ result = api2(url)
+ if "result" in result and result["result"] == "miss":
+ st.write("Fetching data for the first time...")
+ st.image(
+ "https://i.gifer.com/origin/8a/8a47f769c400b0b7d81a8f6f8e09a44a_w200.gif"
+ )
+ st.write("Check again in one minute!")
+ st.stop()
+
+ except Exception as e:
+ if type(e) == JSONDecodeError:
+ print("HIT A JSONDecodeError...", e)
+ st.write("Fetching data for the first time...")
+ st.image(
+ "https://i.gifer.com/origin/8a/8a47f769c400b0b7d81a8f6f8e09a44a_w200.gif"
+ )
+ st.write("Check again in one minute!")
+ st.stop()
+ else:
+ st.write(e)
+ st.stop()
+
+ res = pd.DataFrame(result["res"])
+ df = pd.DataFrame(result["df"])
+
+ st.write(f"{df.shape[0]} users for scenario")
+ st.write(res)
+
+ tabs = st.tabs(["FULL"] + [x.symbol for x in mainnet_spot_market_configs])
+ tabs[0].dataframe(df, hide_index=True)
+
+ for idx, tab in enumerate(tabs[1:]):
+ important_cols = [x for x in df.columns if "spot_" + str(idx) in x]
+ toshow = df[["spot_asset", "net_usd_value"] + important_cols]
+ toshow = toshow[toshow[important_cols].abs().sum(axis=1) != 0].sort_values(
+ by="spot_" + str(idx) + "_all", ascending=False
+ )
+ tab.write(f"{ len(toshow)} users with this asset to cover liabilities")
+ tab.dataframe(toshow, hide_index=True)
diff --git a/src/page/health_cached.py b/src/page/health_cached.py
new file mode 100644
index 0000000..f102f64
--- /dev/null
+++ b/src/page/health_cached.py
@@ -0,0 +1,45 @@
+from lib.api import api2
+import plotly.express as px
+import streamlit as st
+
+from utils import fetch_result_with_retry
+
+
+def health_cached_page():
+ health_distribution = api2("health/health_distribution")
+ largest_perp_positions = api2("health/largest_perp_positions")
+ most_levered_positions = api2("health/most_levered_perp_positions_above_1m")
+ largest_spot_borrows = api2("health/largest_spot_borrows")
+ most_levered_borrows = api2("health/most_levered_spot_borrows_above_1m")
+
+ print(health_distribution)
+
+ fig = px.bar(
+ health_distribution,
+ x="Health Range",
+ y="Counts",
+ title="Health Distribution",
+ hover_data={"Notional Values": ":,"}, # Custom format for notional values
+ labels={"Counts": "Num Users", "Notional Values": "Notional Value ($)"},
+ )
+
+ fig.update_traces(
+ hovertemplate="Health Range: %{x}
Count: %{y}
Notional Value: $%{customdata[0]:,.0f}"
+ )
+
+ with st.container():
+ st.plotly_chart(fig, use_container_width=True)
+
+ perp_col, spot_col = st.columns([1, 1])
+
+ with perp_col:
+ st.markdown("### **Largest perp positions:**")
+ st.dataframe(largest_perp_positions, hide_index=True)
+ st.markdown("### **Most levered perp positions > $1m:**")
+ st.dataframe(most_levered_positions, hide_index=True)
+
+ with spot_col:
+ st.markdown("### **Largest spot borrows:**")
+ st.dataframe(largest_spot_borrows, hide_index=True)
+ st.markdown("### **Most levered spot borrows > $750k:**")
+ st.dataframe(most_levered_borrows, hide_index=True)
diff --git a/src/page/price_shock_cached.py b/src/page/price_shock_cached.py
new file mode 100644
index 0000000..e0b7e73
--- /dev/null
+++ b/src/page/price_shock_cached.py
@@ -0,0 +1,173 @@
+import asyncio
+from asyncio import AbstractEventLoop
+import os
+import time
+from typing import Any, TypedDict
+
+from anchorpy import Wallet
+from driftpy.account_subscription_config import AccountSubscriptionConfig
+from driftpy.drift_client import DriftClient
+from driftpy.pickle.vat import Vat
+from lib.api import api2
+import pandas as pd
+import plotly.graph_objects as go
+from solana.rpc.async_api import AsyncClient
+import streamlit as st
+
+
+class UserLeveragesResponse(TypedDict):
+ leverages_none: list[Any]
+ leverages_up: list[Any]
+ leverages_down: list[Any]
+ user_keys: list[str]
+ distorted_oracles: list[str]
+
+
+def create_dataframes(leverages):
+ return [pd.DataFrame(lev) for lev in leverages]
+
+
+def calculate_spot_bankruptcies(df):
+ spot_bankrupt = df[
+ (df["spot_asset"] < df["spot_liability"]) & (df["net_usd_value"] < 0)
+ ]
+ return (spot_bankrupt["spot_liability"] - spot_bankrupt["spot_asset"]).sum()
+
+
+def calculate_total_bankruptcies(df):
+ return -df[df["net_usd_value"] < 0]["net_usd_value"].sum()
+
+
+def generate_oracle_moves(num_scenarios, oracle_distort):
+ return (
+ [-oracle_distort * (i + 1) * 100 for i in range(num_scenarios)]
+ + [0]
+ + [oracle_distort * (i + 1) * 100 for i in range(num_scenarios)]
+ )
+
+
+def price_shock_plot(user_leverages, oracle_distort: float):
+ levs = user_leverages
+ dfs = (
+ create_dataframes(levs["leverages_down"])
+ + [pd.DataFrame(levs["leverages_none"])]
+ + create_dataframes(levs["leverages_up"])
+ )
+
+ spot_bankruptcies = [calculate_spot_bankruptcies(df) for df in dfs]
+ total_bankruptcies = [calculate_total_bankruptcies(df) for df in dfs]
+
+ num_scenarios = len(levs["leverages_down"])
+ oracle_moves = generate_oracle_moves(num_scenarios, oracle_distort)
+
+ # Create and sort the DataFrame BEFORE plotting
+ df_plot = pd.DataFrame(
+ {
+ "Oracle Move (%)": oracle_moves,
+ "Total Bankruptcy ($)": total_bankruptcies,
+ "Spot Bankruptcy ($)": spot_bankruptcies,
+ }
+ )
+
+ # Sort by Oracle Move to ensure correct line connection order
+ df_plot = df_plot.sort_values("Oracle Move (%)")
+
+ # Calculate Perp Bankruptcy AFTER sorting
+ df_plot["Perp Bankruptcy ($)"] = (
+ df_plot["Total Bankruptcy ($)"] - df_plot["Spot Bankruptcy ($)"]
+ )
+
+ # Create the figure with sorted data
+ fig = go.Figure()
+ for column in [
+ "Total Bankruptcy ($)",
+ "Spot Bankruptcy ($)",
+ "Perp Bankruptcy ($)",
+ ]:
+ fig.add_trace(
+ go.Scatter(
+ x=df_plot["Oracle Move (%)"],
+ y=df_plot[column],
+ mode="lines+markers",
+ name=column,
+ )
+ )
+
+ fig.update_layout(
+ title="Bankruptcies in Crypto Price Scenarios",
+ xaxis_title="Oracle Move (%)",
+ yaxis_title="Bankruptcy ($)",
+ legend_title="Bankruptcy Type",
+ template="plotly_dark",
+ )
+
+ return fig
+
+
+def price_shock_cached_page():
+ # Get query parameters
+ params = st.query_params
+ print(params, "params")
+
+ cov = params.get("cov", "ignore stables")
+ oracle_distort = float(params.get("oracle_distort", 0.05))
+
+ cov_col, distort_col = st.columns(2)
+ cov = cov_col.selectbox(
+ "covariance:",
+ [
+ "ignore stables",
+ "sol + lst only",
+ "meme",
+ ],
+ index=["ignore stables", "sol + lst only", "meme"].index(cov),
+ )
+
+ oracle_distort = distort_col.selectbox(
+ "oracle distortion:",
+ [0.05, 0.1, 0.2, 0.5, 1],
+ index=[0.05, 0.1, 0.2, 0.5, 1].index(oracle_distort),
+ help="step size of oracle distortions",
+ )
+
+ # Update query parameters
+ st.query_params.update({"cov": cov, "oracle_distort": oracle_distort})
+
+ try:
+ result = api2(
+ "price-shock/usermap",
+ params={
+ "asset_group": cov,
+ "oracle_distortion": oracle_distort,
+ "n_scenarios": 5,
+ },
+ )
+ except Exception as e:
+ print("HIT AN EXCEPTION...", e)
+
+ if "result" in result and result["result"] == "miss":
+ st.write("Fetching data for the first time...")
+ st.image(
+ "https://i.gifer.com/origin/8a/8a47f769c400b0b7d81a8f6f8e09a44a_w200.gif"
+ )
+ st.write("Check again in one minute!")
+ st.stop()
+
+ fig = price_shock_plot(result, oracle_distort)
+ st.plotly_chart(fig)
+ oracle_down_max = pd.DataFrame(result["leverages_down"][-1])
+ with st.expander(
+ str("oracle down max bankrupt count=")
+ + str(len(oracle_down_max[oracle_down_max.net_usd_value < 0]))
+ ):
+ st.dataframe(oracle_down_max)
+
+ oracle_up_max = pd.DataFrame(result["leverages_up"][-1], index=result["user_keys"])
+ with st.expander(
+ str("oracle up max bankrupt count=")
+ + str(len(oracle_up_max[oracle_up_max.net_usd_value < 0]))
+ ):
+ st.dataframe(oracle_up_max)
+
+ with st.expander("distorted oracle keys"):
+ st.write(result["distorted_oracles"])