From 877aa7b193657dfef5184d7807626fb02bf25028 Mon Sep 17 00:00:00 2001 From: sina <20732540+SinaKhalili@users.noreply.github.com> Date: Wed, 2 Oct 2024 09:19:54 -0700 Subject: [PATCH] Update backend cache --- backend/app.py | 132 +++------------- backend/middleware/cache_middleware.py | 69 +++++++++ backend/middleware/readiness.py | 9 +- backend/state.py | 109 ++++++++++++- backend/utils/matrix.py | 160 ++++++++++++++++++++ backend/utils/user_metrics.py | 202 +++++++++++++++++++++++++ 6 files changed, 568 insertions(+), 113 deletions(-) create mode 100644 backend/middleware/cache_middleware.py create mode 100644 backend/utils/matrix.py create mode 100644 backend/utils/user_metrics.py diff --git a/backend/app.py b/backend/app.py index 9c3fe28..00a5429 100644 --- a/backend/app.py +++ b/backend/app.py @@ -1,35 +1,19 @@ -from asyncio import create_task -from asyncio import gather from contextlib import asynccontextmanager -from datetime import datetime import glob import os -from anchorpy import Wallet +from backend.api import asset_liability +from backend.api import health +from backend.api import liquidation +from backend.api import metadata +from backend.api import price_shock +from backend.middleware.cache_middleware import CacheMiddleware from backend.middleware.readiness import ReadinessMiddleware from backend.state import BackendState -from backend.utils.vat import load_newest_files -from backend.utils.waiting_for import waiting_for from dotenv import load_dotenv -from driftpy.account_subscription_config import AccountSubscriptionConfig -from driftpy.drift_client import DriftClient -from driftpy.market_map.market_map import MarketMap -from driftpy.market_map.market_map_config import ( - WebsocketConfig as MarketMapWebsocketConfig, -) -from driftpy.market_map.market_map_config import MarketMapConfig -from driftpy.pickle.vat import Vat -from driftpy.types import MarketType -from driftpy.user_map.user_map import UserMap -from driftpy.user_map.user_map_config import ( - WebsocketConfig as UserMapWebsocketConfig, -) -from driftpy.user_map.user_map_config import UserMapConfig -from driftpy.user_map.user_map_config import UserStatsMapConfig -from driftpy.user_map.userstats_map import UserStatsMap +from fastapi import BackgroundTasks from fastapi import FastAPI import pandas as pd -from solana.rpc.async_api import AsyncClient load_dotenv() @@ -38,73 +22,21 @@ @asynccontextmanager async def lifespan(app: FastAPI): - global state url = os.getenv("RPC_URL") if not url: raise ValueError("RPC_URL environment variable is not set.") - - state.connection = AsyncClient(url) - state.dc = DriftClient( - state.connection, - Wallet.dummy(), - "mainnet", - account_subscription=AccountSubscriptionConfig("cached"), - ) - state.perp_map = MarketMap( - MarketMapConfig( - state.dc.program, - MarketType.Perp(), - MarketMapWebsocketConfig(), - state.dc.connection, - ) - ) - state.spot_map = MarketMap( - MarketMapConfig( - state.dc.program, - MarketType.Spot(), - MarketMapWebsocketConfig(), - state.dc.connection, - ) - ) - state.user_map = UserMap(UserMapConfig(state.dc, UserMapWebsocketConfig())) - state.stats_map = UserStatsMap(UserStatsMapConfig(state.dc)) - state.vat = Vat( - state.dc, - state.user_map, - state.stats_map, - state.spot_map, - state.perp_map, - ) + global state + state.initialize(url) print("Checking if cached vat exists") cached_vat_path = sorted(glob.glob("pickles/*")) if len(cached_vat_path) > 0: print("Loading cached vat") - directory = cached_vat_path[-1] - pickle_map = load_newest_files(directory) - with waiting_for("unpickling"): - await state.vat.unpickle( - users_filename=pickle_map["usermap"], - user_stats_filename=pickle_map["userstats"], - spot_markets_filename=pickle_map["spot"], - perp_markets_filename=pickle_map["perp"], - spot_oracles_filename=pickle_map["spotoracles"], - perp_oracles_filename=pickle_map["perporacles"], - ) + await state.load_pickle_snapshot(cached_vat_path[-1]) else: - print("No cached vat found") - - # with waiting_for("drift client"): - # await state.dc.subscribe() - with waiting_for("subscriptions"): - await gather( - create_task(state.spot_map.subscribe()), - create_task(state.perp_map.subscribe()), - create_task(state.user_map.subscribe()), - create_task(state.stats_map.subscribe()), - ) + print("No cached vat found, bootstrapping") + await state.bootstrap() - state.some_df = pd.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]}) state.ready = True print("Starting app") yield @@ -118,6 +50,15 @@ async def lifespan(app: FastAPI): app = FastAPI(lifespan=lifespan) app.add_middleware(ReadinessMiddleware, state=state) +app.add_middleware(CacheMiddleware, state=state, cache_dir="cache") + +app.include_router(health.router, prefix="/api/health", tags=["health"]) +app.include_router(metadata.router, prefix="/api/metadata", tags=["metadata"]) +app.include_router(liquidation.router, prefix="/api/liquidation", tags=["liquidation"]) +app.include_router(price_shock.router, prefix="/api/price-shock", tags=["price-shock"]) +app.include_router( + asset_liability.router, prefix="/api/asset-liability", tags=["asset-liability"] +) @app.get("/") @@ -125,32 +66,7 @@ async def root(): return {"message": "Hello World"} -@app.get("/df") -async def get_df(): - return state.some_df.to_dict(orient="records") - - -@app.get("/users") -async def get_users(): - users = [user.user_public_key for user in state.user_map.values()] - return users - - @app.get("/pickle") -async def pickle(): - now = datetime.now() - folder_name = now.strftime("vat-%Y-%m-%d-%H-%M-%S") - if not os.path.exists("pickles"): - os.makedirs("pickles") - path = os.path.join("pickles", folder_name, "") - - os.makedirs(path, exist_ok=True) - with waiting_for("pickling"): - result = await state.vat.pickle(path) - - return {"result": result} - - -@app.get("/health") -async def health_check(): - return {"status": "healthy" if state.ready else "initializing"} +async def pickle(background_tasks: BackgroundTasks): + background_tasks.add_task(state.take_pickle_snapshot) + return {"result": "background task added"} diff --git a/backend/middleware/cache_middleware.py b/backend/middleware/cache_middleware.py new file mode 100644 index 0000000..a650491 --- /dev/null +++ b/backend/middleware/cache_middleware.py @@ -0,0 +1,69 @@ +import hashlib +import os +import pickle + +from backend.state import BackendRequest +from backend.state import BackendState +from fastapi import HTTPException +from fastapi import Response +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.types import ASGIApp + + +class CacheMiddleware(BaseHTTPMiddleware): + def __init__(self, app: ASGIApp, state: BackendState, cache_dir: str = "cache"): + super().__init__(app) + self.state = state + self.cache_dir = cache_dir + if not os.path.exists(self.cache_dir): + os.makedirs(self.cache_dir) + + async def dispatch(self, request: BackendRequest, call_next): + if not request.url.path.startswith("/api"): + return await call_next(request) + if self.state.current_pickle_path == "bootstrap": + return await call_next(request) + + cache_key = self._generate_cache_key(request) + cache_file = os.path.join(self.cache_dir, f"{cache_key}.pkl") + + if os.path.exists(cache_file): + print(f"Cache hit for {request.url.path}") + with open(cache_file, "rb") as f: + response_data = pickle.load(f) + return Response( + content=response_data["content"], + status_code=response_data["status_code"], + headers=response_data["headers"], + ) + + print(f"Cache miss for {request.url.path}") + response = await call_next(request) + + if response.status_code == 200: + response_body = b"" + async for chunk in response.body_iterator: + response_body += chunk + response_data = { + "content": response_body, + "status_code": response.status_code, + "headers": dict(response.headers), + } + + os.makedirs(os.path.dirname(cache_file), exist_ok=True) + with open(cache_file, "wb") as f: + pickle.dump(response_data, f) + + return Response( + content=response_body, + status_code=response.status_code, + headers=dict(response.headers), + ) + + return response + + def _generate_cache_key(self, request: BackendRequest) -> str: + current_pickle_path = self.state.current_pickle_path + hash_input = f"{current_pickle_path}:{request.method}:{request.url.path}:{request.url.query}" + print("Hash input: ", hash_input) + return hashlib.md5(hash_input.encode()).hexdigest() diff --git a/backend/middleware/readiness.py b/backend/middleware/readiness.py index 47a12b1..f30a5ba 100644 --- a/backend/middleware/readiness.py +++ b/backend/middleware/readiness.py @@ -1,16 +1,19 @@ +from backend.state import BackendRequest from backend.state import BackendState from fastapi import HTTPException -from fastapi import Request from starlette.middleware.base import BaseHTTPMiddleware +from starlette.types import ASGIApp class ReadinessMiddleware(BaseHTTPMiddleware): - def __init__(self, app, state: BackendState): + def __init__(self, app: ASGIApp, state: BackendState): super().__init__(app) self.state = state - async def dispatch(self, request: Request, call_next): + async def dispatch(self, request: BackendRequest, call_next): if not self.state.ready and request.url.path != "/health": raise HTTPException(status_code=503, detail="Service is not ready") + + request.state.backend_state = self.state response = await call_next(request) return response diff --git a/backend/state.py b/backend/state.py index cfd438f..8959c8b 100644 --- a/backend/state.py +++ b/backend/state.py @@ -1,22 +1,127 @@ -from typing import Optional +from asyncio import create_task +from asyncio import gather +from datetime import datetime +import os +from typing import TypedDict from anchorpy import Wallet +from backend.utils.vat import load_newest_files +from backend.utils.waiting_for import waiting_for +from driftpy.account_subscription_config import AccountSubscriptionConfig from driftpy.drift_client import DriftClient from driftpy.market_map.market_map import MarketMap +from driftpy.market_map.market_map_config import ( + WebsocketConfig as MarketMapWebsocketConfig, +) +from driftpy.market_map.market_map_config import MarketMapConfig from driftpy.pickle.vat import Vat +from driftpy.types import MarketType from driftpy.user_map.user_map import UserMap +from driftpy.user_map.user_map_config import ( + WebsocketConfig as UserMapWebsocketConfig, +) +from driftpy.user_map.user_map_config import UserMapConfig +from driftpy.user_map.user_map_config import UserStatsMapConfig from driftpy.user_map.userstats_map import UserStatsMap +from fastapi import Request import pandas as pd from solana.rpc.async_api import AsyncClient class BackendState: - some_df: pd.DataFrame connection: AsyncClient dc: DriftClient spot_map: MarketMap perp_map: MarketMap user_map: UserMap stats_map: UserStatsMap + + current_pickle_path: str vat: Vat ready: bool + + def initialize( + self, url: str + ): # Not using __init__ because we need the rpc url to be passed in + self.connection = AsyncClient(url) + self.dc = DriftClient( + self.connection, + Wallet.dummy(), + "mainnet", + account_subscription=AccountSubscriptionConfig("cached"), + ) + self.perp_map = MarketMap( + MarketMapConfig( + self.dc.program, + MarketType.Perp(), + MarketMapWebsocketConfig(), + self.dc.connection, + ) + ) + self.spot_map = MarketMap( + MarketMapConfig( + self.dc.program, + MarketType.Spot(), + MarketMapWebsocketConfig(), + self.dc.connection, + ) + ) + self.user_map = UserMap(UserMapConfig(self.dc, UserMapWebsocketConfig())) + self.stats_map = UserStatsMap(UserStatsMapConfig(self.dc)) + self.vat = Vat( + self.dc, + self.user_map, + self.stats_map, + self.spot_map, + self.perp_map, + ) + + async def bootstrap(self): + with waiting_for("drift client"): + await self.dc.subscribe() + with waiting_for("subscriptions"): + await gather( + create_task(self.spot_map.subscribe()), + create_task(self.perp_map.subscribe()), + create_task(self.user_map.subscribe()), + create_task(self.stats_map.subscribe()), + ) + self.current_pickle_path = "bootstrap" + + async def take_pickle_snapshot(self): + now = datetime.now() + folder_name = now.strftime("vat-%Y-%m-%d-%H-%M-%S") + if not os.path.exists("pickles"): + os.makedirs("pickles") + path = os.path.join("pickles", folder_name, "") + + os.makedirs(path, exist_ok=True) + with waiting_for("pickling"): + result = await self.vat.pickle(path) + with waiting_for("unpickling"): + await self.load_pickle_snapshot(path) + return result + + async def load_pickle_snapshot(self, directory: str): + pickle_map = load_newest_files(directory) + self.current_pickle_path = directory + with waiting_for("unpickling"): + await self.vat.unpickle( + users_filename=pickle_map["usermap"], + user_stats_filename=pickle_map["userstats"], + spot_markets_filename=pickle_map["spot"], + perp_markets_filename=pickle_map["perp"], + spot_oracles_filename=pickle_map["spotoracles"], + perp_oracles_filename=pickle_map["perporacles"], + ) + return pickle_map + + +class BackendRequest(Request): + @property + def backend_state(self) -> BackendState: + return self.state.get("backend_state") + + @backend_state.setter + def backend_state(self, value: BackendState): + self.state["backend_state"] = value diff --git a/backend/utils/matrix.py b/backend/utils/matrix.py new file mode 100644 index 0000000..a45d54e --- /dev/null +++ b/backend/utils/matrix.py @@ -0,0 +1,160 @@ +from backend.utils.user_metrics import get_usermap_df +from driftpy.constants.spot_markets import mainnet_spot_market_configs +from driftpy.drift_client import DriftClient +from driftpy.pickle.vat import Vat +import pandas as pd + + +async def get_matrix( + drift_client: DriftClient, + vat: Vat, + mode=0, + perp_market_index=0, +): + NUMBER_OF_SPOT = len(mainnet_spot_market_configs) + + res = await get_usermap_df( + drift_client, + vat.users, + "margins", + oracle_distortion=0, + cov_matrix="ignore stables", + n_scenarios=0, + ) + levs_none = res["leverages_none"] + levs_init = res["leverages_initial"] + levs_maint = res["leverages_maintenance"] + user_keys = res["user_keys"] + + levs_maint = [x for x in levs_maint if int(x["health"]) <= 10] + levs_init = [x for x in levs_init if int(x["health"]) <= 10] + + df: pd.DataFrame + match mode: + case 0: # nothing + df = pd.DataFrame(levs_none, index=user_keys) + case 1: # liq within 50% of oracle + df = pd.DataFrame(levs_none, index=user_keys) + case 2: # maint. health < 10% + user_keys = [x["user_key"] for x in levs_init] + df = pd.DataFrame(levs_init, index=user_keys) + case 3: # init. health < 10% + user_keys = [x["user_key"] for x in levs_maint] + df = pd.DataFrame(levs_maint, index=user_keys) + + def get_rattt(row): + calculations = [ + ( + "all_assets", + lambda v: v if v > 0 else 0, + ), # Simplified from v / row['spot_asset'] * row['spot_asset'] + ( + "all", + lambda v: ( + v + / row["spot_asset"] + * (row["perp_liability"] + row["spot_liability"]) + if v > 0 + else 0 + ), + ), + ( + "all_perp", + lambda v: v / row["spot_asset"] * row["perp_liability"] if v > 0 else 0, + ), + ( + "all_spot", + lambda v: v / row["spot_asset"] * row["spot_liability"] if v > 0 else 0, + ), + ( + f"perp_{perp_market_index}_long", + lambda v: ( + v / row["spot_asset"] * row["net_p"][perp_market_index] + if v > 0 and row["net_p"][0] > 0 + else 0 + ), + ), + ( + f"perp_{perp_market_index}_short", + lambda v: ( + v / row["spot_asset"] * row["net_p"][perp_market_index] + if v > 0 and row["net_p"][perp_market_index] < 0 + else 0 + ), + ), + ] + + series_list = [] + for suffix, calc_func in calculations: + series = pd.Series([calc_func(val) for key, val in row["net_v"].items()]) + series.index = [f"spot_{x}_{suffix}" for x in series.index] + series_list.append(series) + + return pd.concat(series_list) + + df = pd.concat([df, df.apply(get_rattt, axis=1)], axis=1) + + def calculate_effective_leverage(group): + assets = group["all_assets"] + liabilities = group["all_liabilities"] + return liabilities / assets if assets != 0 else 0 + + def format_with_checkmark(value, condition, mode, financial=False): + if financial: + formatted_value = f"{value:,.2f}" + else: + formatted_value = f"{value:.2f}" + + if condition and mode > 0: + return f"{formatted_value} ✅" + return formatted_value + + res = pd.DataFrame( + { + ("spot" + str(i)): ( + df[f"spot_{i}_all_assets"].sum(), + format_with_checkmark( + df[f"spot_{i}_all"].sum(), + 0 < df[f"spot_{i}_all"].sum() < 1_000_000, + mode, + financial=True, + ), + format_with_checkmark( + calculate_effective_leverage( + { + "all_assets": df[f"spot_{i}_all_assets"].sum(), + "all_liabilities": df[f"spot_{i}_all"].sum(), + } + ), + 0 + < calculate_effective_leverage( + { + "all_assets": df[f"spot_{i}_all_assets"].sum(), + "all_liabilities": df[f"spot_{i}_all"].sum(), + } + ) + < 2, + mode, + ), + df[f"spot_{i}_all_spot"].sum(), + df[f"spot_{i}_all_perp"].sum(), + df[f"spot_{i}_perp_{perp_market_index}_long"].sum(), + df[f"spot_{i}_perp_{perp_market_index}_short"].sum(), + ) + for i in range(NUMBER_OF_SPOT) + }, + index=[ + "all_assets", + "all_liabilities", + "effective_leverage", + "all_spot", + "all_perp", + f"perp_{perp_market_index}_long", + f"perp_{perp_market_index}_short", + ], + ).T + + res["all_liabilities"] = res["all_liabilities"].astype(str) + res["effective_leverage"] = res["effective_leverage"].astype(str) + + return res, df diff --git a/backend/utils/user_metrics.py b/backend/utils/user_metrics.py new file mode 100644 index 0000000..8e4b77d --- /dev/null +++ b/backend/utils/user_metrics.py @@ -0,0 +1,202 @@ +import copy +from typing import List, Optional + +from driftpy.constants.numeric_constants import MARGIN_PRECISION +from driftpy.constants.numeric_constants import QUOTE_PRECISION +from driftpy.constants.perp_markets import mainnet_perp_market_configs +from driftpy.constants.spot_markets import mainnet_spot_market_configs +from driftpy.drift_client import DriftClient +from driftpy.drift_user import DriftUser +from driftpy.math.margin import MarginCategory +from driftpy.types import OraclePriceData +from driftpy.user_map.user_map import UserMap + + +def get_init_health(user: DriftUser): + """ + Returns the initial health of the user. + """ + if user.is_being_liquidated(): + return 0 + + total_collateral = user.get_total_collateral(MarginCategory.INITIAL) + maintenance_margin_req = user.get_margin_requirement(MarginCategory.INITIAL) + + if maintenance_margin_req == 0 and total_collateral >= 0: + return 100 + elif total_collateral <= 0: + return 0 + else: + return round( + min(100, max(0, (1 - maintenance_margin_req / total_collateral) * 100)) + ) + + +def comb_asset_liab(a_l_tup): + return a_l_tup[0] - a_l_tup[1] + + +def get_collateral_composition(x: DriftUser, margin_category, n): + net_v = { + i: comb_asset_liab( + x.get_spot_market_asset_and_liability_value(i, margin_category) + ) + / QUOTE_PRECISION + for i in range(n) + } + return net_v + + +def get_perp_liab_composition(x: DriftUser, margin_category, n): + net_p = { + i: x.get_perp_market_liability(i, margin_category, signed=True) + / QUOTE_PRECISION + for i in range(n) + } + return net_p + + +def get_user_metrics(x: DriftUser, margin_category: MarginCategory): + """ + Returns a dictionary of the user's health, leverage, and other metrics. + """ + NUMBER_OF_SPOT = len(mainnet_spot_market_configs) + NUMBER_OF_PERP = len(mainnet_perp_market_configs) + + metrics = { + "user_key": x.user_public_key, + "leverage": x.get_leverage() / MARGIN_PRECISION, + "perp_liability": x.get_total_perp_position_liability(margin_category) + / QUOTE_PRECISION, + "spot_asset": x.get_spot_market_asset_value(None, margin_category) + / QUOTE_PRECISION, + "spot_liability": x.get_spot_market_liability_value(None, margin_category) + / QUOTE_PRECISION, + "upnl": x.get_unrealized_pnl(True) / QUOTE_PRECISION, + "net_usd_value": ( + x.get_net_spot_market_value(None) + x.get_unrealized_pnl(True) + ) + / QUOTE_PRECISION, + } + metrics["health"] = ( + get_init_health(x) + if margin_category == MarginCategory.INITIAL + else x.get_health() + ) + metrics["net_v"] = get_collateral_composition(x, margin_category, NUMBER_OF_SPOT) + metrics["net_p"] = get_perp_liab_composition(x, margin_category, NUMBER_OF_PERP) + + return metrics + + +def get_skipped_oracles(cov_matrix: Optional[str]) -> List[str]: + """ + Determine which oracles to skip based on the cov_matrix parameter. + """ + groups = { + "sol only": ["SOL"], + "sol lst only": ["mSOL", "jitoSOL", "bSOL"], + "sol ecosystem only": ["PYTH", "JTO", "WIF", "JUP", "TNSR", "DRIFT"], + "meme": ["WIF"], + "wrapped only": ["wBTC", "wETH"], + "stables only": ["USD"], + } + if cov_matrix in groups: + return [ + str(x.oracle) + for x in mainnet_spot_market_configs + if x.symbol not in groups[cov_matrix] + ] + elif cov_matrix == "ignore stables": + return [str(x.oracle) for x in mainnet_spot_market_configs if "USD" in x.symbol] + else: + return [] + + +def calculate_leverages( + user_vals: list[DriftUser], maintenance_category: MarginCategory +): + """ + Calculate the leverages for all users at a given maintenance category + """ + return list(get_user_metrics(x, maintenance_category) for x in user_vals) + + +async def get_usermap_df( + _drift_client: DriftClient, + user_map: UserMap, + mode: str, + oracle_distortion: float = 0.1, + cov_matrix: Optional[str] = None, + n_scenarios: int = 5, +): + user_keys = list(user_map.user_map.keys()) + user_vals = list(user_map.values()) + + skipped_oracles = get_skipped_oracles(cov_matrix) + + if mode == "margins": + leverages_none = calculate_leverages(user_vals, None) + leverages_initial = calculate_leverages(user_vals, MarginCategory.INITIAL) + leverages_maintenance = calculate_leverages( + user_vals, MarginCategory.MAINTENANCE + ) + return { + "leverages_none": leverages_none, + "leverages_initial": leverages_initial, + "leverages_maintenance": leverages_maintenance, + "user_keys": user_keys, + } + else: + num_entrs = n_scenarios + new_oracles_dat_up = [] + new_oracles_dat_down = [] + + for i in range(num_entrs): + new_oracles_dat_up.append({}) + new_oracles_dat_down.append({}) + + assert len(new_oracles_dat_down) == num_entrs + print("skipped oracles:", skipped_oracles) + distorted_oracles = [] + cache_up = copy.deepcopy(_drift_client.account_subscriber.cache) + cache_down = copy.deepcopy(_drift_client.account_subscriber.cache) + for i, (key, val) in enumerate( + _drift_client.account_subscriber.cache["oracle_price_data"].items() + ): + for i in range(num_entrs): + new_oracles_dat_up[i][key] = copy.deepcopy(val) + new_oracles_dat_down[i][key] = copy.deepcopy(val) + if cov_matrix is not None and key in skipped_oracles: + continue + distorted_oracles.append(key) + for i in range(num_entrs): + oracle_distort_up = max(1 + oracle_distortion * (i + 1), 1) + oracle_distort_down = max(1 - oracle_distortion * (i + 1), 0) + + if isinstance(new_oracles_dat_up[i][key], OraclePriceData): + new_oracles_dat_up[i][key].price *= oracle_distort_up + new_oracles_dat_down[i][key].price *= oracle_distort_down + else: + new_oracles_dat_up[i][key].data.price *= oracle_distort_up + new_oracles_dat_down[i][key].data.price *= oracle_distort_down + + levs_none = calculate_leverages(user_vals, None) + levs_up = [] + levs_down = [] + + for i in range(num_entrs): + cache_up["oracle_price_data"] = new_oracles_dat_up[i] + cache_down["oracle_price_data"] = new_oracles_dat_down[i] + levs_up_i = list(get_user_metrics(x, None, cache_up) for x in user_vals) + levs_down_i = list(get_user_metrics(x, None, cache_down) for x in user_vals) + levs_up.append(levs_up_i) + levs_down.append(levs_down_i) + + return { + "leverages_none": levs_none, + "leverages_up": tuple(levs_up), + "leverages_down": tuple(levs_down), + "user_keys": user_keys, + "distorted_oracles": distorted_oracles, + }