diff --git a/.prettierrc.json5 b/.prettierrc.json5 index 3d1ea86f4e3..a328c95304b 100644 --- a/.prettierrc.json5 +++ b/.prettierrc.json5 @@ -28,6 +28,16 @@ plugins: ['prettier-plugin-sh'], }, }, + { + // Keep network definitions readable on wide lines + files: [ + 'packages/portfolio-contract/src/network/network.prod.ts', + 'packages/portfolio-contract/test/network/test-network.ts', + ], + options: { + printWidth: 180, + }, + }, ], singleQuote: true, } diff --git a/multichain-testing/scripts/ymax-tool.ts b/multichain-testing/scripts/ymax-tool.ts index b6c534705ff..d5be414335a 100755 --- a/multichain-testing/scripts/ymax-tool.ts +++ b/multichain-testing/scripts/ymax-tool.ts @@ -13,10 +13,8 @@ import { type ProposalType, type TargetAllocation, } from '@aglocal/portfolio-contract/src/type-guards.ts'; -import { - makePortfolioQuery, - makePortfolioSteps, -} from '@aglocal/portfolio-contract/tools/portfolio-actors.ts'; +import { makePortfolioSteps } from '@aglocal/portfolio-contract/src/plan-transfers.ts'; +import { makePortfolioQuery } from '@aglocal/portfolio-contract/tools/portfolio-actors.ts'; import { axelarConfigTestnet, gmpAddresses as gmpConfigs, @@ -157,7 +155,10 @@ const openPositions = async ( const goal = objectMap(goalData, toAmt); console.debug('TODO: address Ethereum-only limitation'); const evm = 'Ethereum'; - const { give: giveWFees } = makePortfolioSteps(goal, { evm, feeBrand: BLD }); + const { give: giveWFees } = await makePortfolioSteps(goal, { + evm, + feeBrand: BLD, + }); // XXX WIP: contract is to pay BLD fee const { GmpFee: _gf, ...give } = giveWFees; const proposal: ProposalType['openPortfolio'] = { diff --git a/packages/portfolio-contract/package.json b/packages/portfolio-contract/package.json index c6e221f67f6..511772a1d0d 100644 --- a/packages/portfolio-contract/package.json +++ b/packages/portfolio-contract/package.json @@ -51,6 +51,7 @@ "@endo/marshal": "^1.8.0", "@endo/pass-style": "^1.6.3", "@endo/patterns": "^1.7.0", + "javascript-lp-solver": "^0.4.24", "viem": "^2.31.0" }, "ava": { diff --git a/packages/portfolio-contract/solver-approach-design.md b/packages/portfolio-contract/solver-approach-design.md new file mode 100644 index 00000000000..d96ef40c836 --- /dev/null +++ b/packages/portfolio-contract/solver-approach-design.md @@ -0,0 +1,344 @@ +# Rebalance Solver Overview + +This document describes the current balance rebalancing solver used in `plan-solve.ts` and the surrounding graph/diagnostics utilities. + +## 1. Domain & Graph Structure +We model a multi-chain, multi-place asset distribution problem as a directed flow network. + +Node (vertex) types (all implement `AssetPlaceRef`): +- Chain hubs: `@ChainName` (e.g. `@Arbitrum`, `@Avalanche`, `@Ethereum`, `@noble`, `@agoric`). A single hub per chain collects and redistributes flow for that chain. +- Protocol / pool leaves: `Protocol_Chain` identifiers (e.g. `Aave_Arbitrum`, `Beefy_re7_Avalanche`, `Compound_Ethereum`). Each is attached to exactly one hub (its chain). +- Local Agoric seats: `''`, `''`, and `'+agoric'` – all leaves on the `@agoric` hub. + +Notes: +- Pool-to-chain affiliation is sourced from PoolPlaces (typed map) at build time. Hubs are not auto-added from PoolPlaces; only pools whose hub is already present in the NetworkSpec are auto-included. When a pool id isn't found, `chainOf(x)` falls back to parsing the suffix of `Protocol_Chain`. +- `+agoric` is a staging account on `@agoric`, used to accumulate new deposits before distribution; for deposit planning it must end at 0 in the final targets. + +Supply (net position) per node: +``` +netSupply(node) = current[node] - target[node] +> 0 : surplus (must send out) +< 0 : deficit (must receive) += 0 : balanced +``` +Sum of all supplies must be zero for feasibility. + +## 2. Edges +Two classes of directed edges: +1. Intra-chain (leaf <-> hub) + - Always present for every non-hub leaf. + - Attributes: `variableFee=1`, `fixedFee=0`, `timeFixed=1`, very large capacity. +2. Inter-chain (hub -> hub) links provided by `NetworkSpec.links`: + - CCTP slow (EVM -> Noble): high latency (≈1080s), low/zero variable fee. + - CCTP return (Noble -> EVM): low latency (≈20s). + - FastUSDC (unidirectional EVM -> Noble): `variableFeeBps≈15` (≈0.15%), `timeSec≈45`. + - Noble <-> Agoric IBC: `variableFeeBps≈200` (≈2.00%), `timeSec≈10`. +Each link is directional; reverse direction is added explicitly where needed. + +Overrides: +- Explicit inter-hub links from the `NetworkSpec` supersede any auto-added base edge with the same `src -> dest`. This lets the definition supply real pricing/latency. + +Edge attributes used by optimizer: +- `capacity` (numeric upper bound on flow) – large default for intra-chain. +- `variableFee` (linear cost coefficient per unit flow) – used in Cheapest mode. For inter-hub links this comes from `LinkSpec.variableFeeBps`. +- `fixedFee` (flat activation cost) – triggers binary var only if >0 in Cheapest mode (from `LinkSpec.flatFee` when provided). +- `timeFixed` (activation latency metric) – triggers binary var only if >0 in Fastest mode (from `LinkSpec.timeSec`). + +## 3. Optimization Modes +Two primary objectives, with optional secondary tie-breaks: +- Cheapest (primary): Minimize Σ (fixedFee_e * y_e + variableFee_e * f_e) +- Fastest (primary): Minimize Σ (timeFixed_e * y_e) + +Secondary (tie-break) options: +1) Two-pass lexicographic (not currently enabled): + - Solve primary, fix the optimum within ±ε as a constraint, then re-solve minimizing the secondary. +2) Composite objective (implemented): + - Minimize Primary + ε · Secondary, where ε is chosen dynamically small enough not to perturb the primary optimum. + +Current behavior: +- In Cheapest mode, secondary prefers lower Σ(timeFixed_e · y_e). +- In Fastest mode, secondary prefers lower Σ(fixedFee_e · y_e + variableFee_e · f_e). + +In both modes: +- Continuous flow variables: `f_e ≥ 0` for every edge. +- Linking constraint for edges with a binary: `f_e ≤ capacity_e * y_e`. + +## 4. Flow Conservation +For every node v: +``` +Σ_out f_e - Σ_in f_e = netSupply(v) +``` +A surplus node exports its excess; a deficit node imports exactly its shortfall. + +## 5. Model Representation (javascript-lp-solver) +We build an LP/MIP object with: +- `variables`: one per flow variable `f_edgeId`, each holding coefficients into every node constraint and capacity constraint; plus binary usage vars `y_edgeId` when required. +- `constraints`: + - Node equality constraints (one per node with any incident edges). + - Capacity constraints `f_e ≤ capacity_e`. + - Link constraints when binary present: `f_e - capacity_e * y_e ≤ 0`. +- `optimize`: synthetic key (e.g. `obj` internally then projected to `cost`). +- `opType`: `min`. +- `binaries` / `ints`: maps of binary / integer variables (only binaries used for now). +No scaling: amounts, fees, and times are used directly (inputs are within safe numeric ranges: amounts up to millions, fees up to ~0.2 variable or a few dollars fixed, latencies minutes/hours). + +## 6. Solution Decoding +After solving we extract active edges where `flow > ε` (ε=1e-6). These positive-flow edges are then scheduled using the deterministic algorithm in Section 9 to produce an ordered list of executable steps. Each step is emitted as `MovementDesc { src, dest, amount }` with amount reconstructed as bigint (rounded from numeric flow). + +## 7. Example (Conceptual) +If Aave_Arbitrum has surplus 30 and Beefy_re7_Avalanche has deficit 30, optimal Cheapest path may produce steps: +``` +Aave_Arbitrum -> @Arbitrum -> @noble -> @Avalanche -> Beefy_re7_Avalanche +``` +Reflected as four MovementDescs (one per edge used) with amount 30. + +## 8. Extensibility Notes +- Additional cost dimensions (e.g. risk scores) can be integrated by augmenting objective coefficients. +- Scaling can be reintroduced if future magnitudes exceed safe integer precision. +- Multi-objective (lexicographic) could wrap two solves (first fastest then cheapest among fastest solutions) if required. + +## 9. Execution Ordering (Deterministic Scheduling) +The emitted MovementDescs follow a dependency-based schedule ensuring every step is feasible with currently available funds: +1. Initialization: Any node with positive netSupply provides initial available liquidity. +2. Candidate selection loop: + - At each iteration, consider unscheduled positive-flow edges whose source node currently has sufficient available units (>= flow). + - If multiple candidates exist, prefer edges whose originating chain (derived from the source node) matches the chain of the previously scheduled edge (chain grouping heuristic). This groups sequential operations per chain, especially helpful for EVM-origin flows. + - If still multiple, choose the edge with smallest numeric edge id (stable deterministic tiebreaker). +3. Availability update: After scheduling an edge (src->dest, flow f), decrease availability at src by f and increase availability at dest by f. +4. Deadlock fallback: If no edge is currently fundable (e.g. all remaining edges originate at intermediate hubs with zero temporary balance), schedule remaining edges in ascending edge id order, simulating availability updates to break the cycle. + +Resulting guarantees: +- No step requires funds that have not yet been made available by a prior step (except in the explicit deadlock fallback case, which should only occur for purely cyclic zero-supply intermediate structures). +- Order is fully deterministic given the solved flows. +- Movements are naturally grouped by chain where possible, improving readability for execution planning. + +--- + +## 10. NetworkSpec Schema & Validation + +Schema Summary (TypeScript interfaces): +``` +// Chains (hubs) +interface ChainSpec { + name: SupportedChain; // e.g., 'agoric' | 'noble' | 'Arbitrum' + chainId?: string; // cosmos chain-id or network id + evmChainId?: number; // EVM numeric chain id if applicable + bech32Prefix?: string; // for Cosmos chains + axelarKey?: AxelarChain; // Axelar registry key if differs from name + feeDenom?: string; // e.g., 'ubld', 'uusdc' + gasDenom?: string; // if distinct from feeDenom + control: 'ibc' | 'axelar' | 'local'; // how Agoric reaches this chain +} + +// Pools (leaves) +interface PoolSpec { + pool: PoolKey; // 'Aave_Arbitrum', 'USDNVault', ... + chain: SupportedChain; // host chain of the pool + protocol: YieldProtocol; // protocol identifier +} + +// Local places: seats (, ) and local accounts (+agoric) +interface LocalPlaceSpec { + id: AssetPlaceRef; // '' | '' | '+agoric' | PoolKey + chain: SupportedChain; // typically 'agoric' + variableFeeBps?: number; // optional local edge variable fee (bps) + flatFee?: NatValue; // optional flat fee in local units + timeSec?: number; // optional local latency + capacity?: NatValue; // optional local capacity + enabled?: boolean; +} + +// Directed inter-hub link +interface LinkSpec { + src: SupportedChain; // source chain + dest: SupportedChain; // destination chain + transfer: 'ibc' | 'fastusdc' | 'cctpReturn' | 'cctpSlow'; + variableFeeBps: number; // variable fee in basis points of amount + timeSec: number; // latency in seconds + flatFee?: NatValue; // optional fixed fee (minor units) + capacity?: NatValue; // optional throughput cap + min?: NatValue; // optional minimum transfer size + priority?: number; // optional tie-break hint + enabled?: boolean; // admin toggle +} + +interface NetworkSpec { + debug?: boolean; // enable extra diagnostics/debug + environment?: 'dev' | 'test' | 'prod'; + chains: ChainSpec[]; + pools: PoolSpec[]; + localPlaces?: LocalPlaceSpec[]; + links: LinkSpec[]; // inter-hub links only +} +``` + +Builder & translation to solver: +- Hubs come from `spec.chains`. Hubs are not auto-added from PoolPlaces. +- Leaves include `spec.pools`, `spec.localPlaces.id`, known PoolPlaces whose hub is present, and any nodes mentioned in `current`/`target` (validated to avoid implicitly adding hubs). +- Intra-chain leaf<->hub edges are auto-added with large capacity and base costs (`variableFee=1`, `timeFixed=1`). +- Agoric-local overrides: `+agoric`, ``, and `` have zero-fee/zero-time edges to/from `@agoric` and between each other. +- Inter-hub links from `spec.links` are added hub->hub. If an auto-added edge exists with the same `src -> dest`, the explicit link replaces it (override precedence). Mapping to solver fields: `variableFee = variableFeeBps`, `timeFixed = timeSec`, `fixedFee = flatFee`, `via = transfer`. + +Determinism: +- After applying all edges, edge IDs are normalized to `e0..eN` in insertion order to stabilize solver behavior and tests. + +Validation: +- Minimal validation ensures link `src`/`dest` chains are declared in `spec.chains`. +- Dynamic nodes (from `current`/`target`) must not introduce undeclared hubs; known pools require their host hub to be present. +- Additional post-failure checks are performed by `preflightValidateNetworkPlan` (see below). + +### 10.1 PoolPlaces integration and chain inference +- PoolPlaces provides the canonical mapping from pool ids to chain hubs used by the builder. Hubs are not implicitly added from this mapping. +- `chainOf(x)` resolves a node's chain via PoolPlaces; if not found and `x` matches `Protocol_Chain`, it falls back to using the `_Chain` suffix. + +### 10.2 Diagnostics and failure analysis +Error handling on infeasible solves is designed for clarity with minimal overhead when things work: +- Normal operation: on success, no extra diagnostics are computed. +- On solver infeasibility: if `graph.debug` is true (from `NetworkSpec.debug`), the solver emits a concise error plus diagnostic details to aid triage. Otherwise, it throws a terse error message. +- After any infeasible solve, a post-failure preflight validator runs: it checks for unsupported position keys and missing inter-hub reachability required by the requested flows. If it finds a clearer root cause, it throws a targeted error explaining the issue. + +Implementation notes: +- Diagnostics live in `graph-diagnose.ts` (`diagnoseInfeasible`, `preflightValidateNetworkPlan`). +- Enable by setting `debug: true` in your `NetworkSpec`. +- Typical diagnostic output includes: supply balance summary, stranded sources/sinks, hub connectivity and inter-hub links, and a suggested set of missing edges. + +--- + +### 10.3 Path explanations and near-miss analysis + +When a particular route is suspected to be viable but the solver reports infeasible, it helps to check a candidate path hop-by-hop and to summarize “almost works” pairs. Two helpers are available in `graph-diagnose.ts`: + +- `explainPath(graph, path: string[])` + - Validates each hop in the given path array (e.g., `['+agoric', '@agoric', '@noble', 'USDNVault']`). + - Returns `{ ok: true }` if every hop exists and has positive capacity; otherwise returns the first failing hop with a reason and suggestion: + - `missing-node`: node isn’t in `graph.nodes`. + - `missing-edge`: no `src -> dest` edge exists. + - `wrong-direction`: reverse edge exists but forward is missing (suggest adding forward edge). + - `capacity-zero`: edge exists but has no positive capacity. + +- `diagnoseNearMisses(graph)` + - Looks at all positive-supply sources to negative-supply sinks and classifies why each unreachable pair fails. + - Categories include `no-directed-path` and `capacity-blocked`, with an optional hint (e.g., “consider adding inter-hub @agoric->@Avalanche”). + - This runs automatically (and is appended to the thrown message) when `NetworkDefinition.debug` is true and the solver returns infeasible. + +Example usage (TypeScript): + +```ts +import '@endo/init/debug.js'; +import { Far } from '@endo/far'; +import { AmountMath } from '@agoric/ertp'; +import type { Brand, NatAmount } from '@agoric/ertp'; +import { makeGraphFromDefinition } from '@aglocal/portfolio-contract/src/network/buildGraph.js'; +import { PROD_NETWORK } from '@aglocal/portfolio-contract/src/network/network.prod.js'; +import type { LinkSpec } from '@aglocal/portfolio-contract/src/network/network-spec.js'; +import type { AssetPlaceRef } from '@aglocal/portfolio-contract/src/type-guards-steps.js'; +import { + explainPath, + diagnoseNearMisses, +} from '@aglocal/portfolio-contract/src/graph-diagnose.js'; + +// Define a small scenario. +const USDC = Far('USDC Brand') as Brand<'nat'>; +const deposit = AmountMath.make(USDC, 50_000_000n); +const zero = AmountMath.make(USDC, 0n); +const current: Partial> = { + '+agoric': deposit, +}; +const target: Partial> = { + '+agoric': zero, + USDNVault: deposit, +}; + +// Build an incomplete graph. +const network = JSON.parse(JSON.stringify(PROD_NETWORK)) as typeof PROD_NETWORK; +const agoricToNoble = network.links.find( + link => link.src === 'agoric' && link.dest === 'noble', +) as LinkSpec; +network.links = network.links.filter(link => link !== agoricToNoble); +const incomplete = makeGraphFromDefinition(network, current, target, USDC); + +// 1) Summarize near-misses between sources and destinations. +const near = diagnoseNearMisses(incomplete); +console.log(near); +// => { missingPairs: [ +// { src: '+agoric', dest: 'USDNVault', category: 'no-directed-path', hint: undefined } ] + +// 2) Explain a candidate path +const path = ['+agoric', '@agoric', '@noble', 'USDNVault']; +const pathReport = explainPath(incomplete, path); +console.log(pathReport); +// => { ok: false, failAtIndex: 1, src: '@agoric', dest: '@noble', reason: 'wrong-direction', +// suggestion: 'add edge @agoric->@noble (reverse exists)' } + +// 3) Apply the suggestion and retry. +network.links.push(agoricToNoble); +const complete = makeGraphFromDefinition(network, current, target, USDC); +console.log(diagnoseNearMisses(complete)); +// => { missingPairs: [] } +console.log(explainPath(complete, path)); +// => { ok: true } +``` + +Notes: +- These checks are purely topological/capacity-driven and independent of the optimize mode (cheapest/fastest). +- They’re inexpensive (BFS over a small graph) and run only when requested or when `debug` is enabled and the solve is infeasible. + + +## 11. Todo +Critical +- Typed Node/Edge aliases to enforce pool↔hub pairing at compile time. +- add USDN and USDNVault to PROD_NETWORK; override the fee +- use teh fee to compute + + +Further items for solver +- support additions of dynamic constraints (e.g., when route price changes) +- add fee information and time to moves +- add minimimums to links +- add capacity limits to links + - test them +- rename "timeSec" to "time"? +- add details.evmGas + +Further non-planner items +- add withdraw offer handling + - is it just "adjust"? +- support operations without a supplied plan (where planner provides) +- maintain a shared graph in memory that gets updated on new graph +- support multiple assets +- sanity check the step list + +Renames +- cctpSlow → cctpToNoble and cctpReturn → cctpFromNoble +- NetworkSpec → NetworkDesc, and probably likewise for ChainSpec/PoolSpec/LocalPlaceSpec/LinkSpec (since we generally use "spec" to describe a string specifier) +- `src`/`dest` if acceptable alphabetical synonyms can be found (e.g., `source`/`target` or `fromPlace`/`toPlace`) + +Later +- add the method to the move operations (CCTP vs. Fast USDC) +- re-plan after some steps have already happened? + - provide the expected balances after each step, then plan form there +- split transactions under limits + +Future things to try: +- enable disabling/enabling some links (e.g., if they go down) and replanning +- add liquidity pool +- optimize withdraw for "fastest money to seat. + - accelerate withdraw with a liquidity pool +- add operation (like `supply`) so that we can have actual gwei estimates associated with them in the graph +- support multiple currencies explicitly + +## 12. Current Plan +This last section is the living plan. As details are settled (schemas, invariants, design choices), they should be promoted into the relevant sections above, keeping this section focused on the remaining work and sequencing. + +Status as of 2025-09-14: +- Phase 1: Complete — types, builder, and prod/test configs added; `planRebalanceFlow` accepts a network. +- Phase 2: Complete — unit tests migrated to use the test network; legacy LINKS removed in this package. +- Phase 3: Complete — deposit routing is being refactored to derive paths via the generic graph; downstream services updated incrementally. Post-failure preflight validation and solver diagnostics are integrated and controlled by `graph.debug`. Composite objective for secondary tie-breaks implemented. +- Phase 4: Pending — finalize docs and remove remaining legacy references elsewhere. + +Phases: +- Phase 3 next steps: + - Deprecate / remove `planTransfer` & `planTransferPath` after callers migrate. +- Phase 4: + - Documentation updates: ensure this document reflects finalized schema and behavior (this doc now includes PoolPlaces integration, edge override precedence, and diagnostics flow). + - Add/extend validation and tooling as needed; remove remaining legacy references in downstream packages. diff --git a/packages/portfolio-contract/src/graph-diagnose.ts b/packages/portfolio-contract/src/graph-diagnose.ts new file mode 100644 index 00000000000..b7f0615be08 --- /dev/null +++ b/packages/portfolio-contract/src/graph-diagnose.ts @@ -0,0 +1,445 @@ +// Lightweight diagnostics for infeasible solver models. +// Extracted to a separate module to keep solver core lean and allow reuse. + +import { Fail, q } from '@endo/errors'; + +import type { NatAmount } from '@agoric/ertp/src/types.js'; +import { provideLazyMap } from '@agoric/internal/src/js-utils.js'; + +import type { NetworkSpec } from './network/network-spec.js'; +import type { RebalanceGraph, LpModel } from './plan-solve.js'; +import { PoolPlaces, type PoolKey, type PoolPlaceInfo } from './type-guards.js'; + +/** + * Build human-readable diagnostics for infeasible models. + * Heuristics only: checks supply balance and reachability of sinks from sources. + * + * Example output (when graph.debug is true): + * No feasible solution: nodes=7 edges=12 | supply: sum=0 pos=1500 neg=1500 (pos should equal neg; sum should be 0) | sources=2 sinks=2 | sources with no path to any sink (1): Aave_Arbitrum(800) | hubs present: @agoric, @noble, @Arbitrum | inter-hub edges: @agoric->@noble, @noble->@Arbitrum + * + * How to enable: + * - Set `debug: true` on the NetworkSpec used to build the graph. + */ +export const diagnoseInfeasible = ( + graph: RebalanceGraph, + _model: LpModel, +): string => { + const nodes = [...graph.nodes]; + const supplies = graph.supplies; + const edges = graph.edges; + + let sumSupply = 0; + let posTotal = 0; + let negTotal = 0; + const sources: string[] = []; + const sinksSet = new Set(); + for (const n of nodes) { + const s = supplies[n] || 0; + sumSupply += s; + if (s > 0) { + sources.push(n); + posTotal += s; + } else if (s < 0) { + sinksSet.add(n); + negTotal += -s; + } + } + + // Adjacency for forward reachability + const adj = new Map(); + for (const e of edges) { + const srcAdj = provideLazyMap(adj, e.src, () => []); + srcAdj.push(e.dest); + } + + const bfs = (start: string) => { + const queue = [start]; + const seen = new Set(queue); + while (queue.length) { + const cur = queue.shift()!; + const outs = adj.get(cur); + for (const v of outs || []) { + if (seen.has(v)) continue; + seen.add(v); + queue.push(v); + } + } + return seen; + }; + + const stranded = [] as { node: string; supply: number }[]; + for (const s of sources) { + const reach = bfs(s); + const canReachSink = [...reach].some(n => sinksSet.has(n)); + if (!canReachSink) stranded.push({ node: s, supply: supplies[s] }); + } + + const hubSet = new Set(nodes.filter(n => n.startsWith('@'))); + const hubEdges = edges + .filter(e => e.src.startsWith('@') && e.dest.startsWith('@')) + .map(e => `${e.src}->${e.dest}`); + + const lines: string[] = []; + lines.push(`nodes=${nodes.length} edges=${edges.length}`); + lines.push( + `supply: pos ${posTotal} must equal neg ${negTotal} (diff ${sumSupply})`, + ); + if (sumSupply !== 0) lines.push('WARN: total supply does not balance to 0'); + lines.push(`sources=${sources.length} sinks=${sinksSet.size}`); + if (stranded.length) { + const sample = stranded + .slice(0, 6) + .map(s => `${s.node}(${s.supply})`) + .join(', '); + lines.push( + `sources with no path to any sink (${stranded.length}): ${sample}`, + ); + } + lines.push(`hubs present: ${[...hubSet].sort().join(', ')}`); + lines.push( + `inter-hub edges: ${hubEdges.length ? hubEdges.join(', ') : '(none)'}`, + ); + return lines.join(' | '); +}; + +/** + * Preflight validation: ensure that all referenced positions exist and that + * the network provides inter-hub connectivity needed by the planned moves. + * Throws Fail with a clear message on error. + */ +export const preflightValidateNetworkPlan = ( + network: NetworkSpec, + current: Partial>, + target: Partial>, +) => { + const keys = new Set([ + ...Object.keys(current ?? {}), + ...Object.keys(target ?? {}), + ]); + const vOf = (a?: NatAmount) => (a ? (a.value as bigint) : 0n); + + // Build hub-only adjacency from NetworkSpec.links + const adj = new Map(); + for (const l of network.links) { + const src = `@${l.src}`; + const dest = `@${l.dest}`; + (adj.get(src) ?? adj.set(src, []).get(src)!).push(dest); + } + const bfs = (start: string) => { + const seen = new Set([start]); + const queue = [start]; + while (queue.length) { + const cur = queue.shift()!; + for (const n of adj.get(cur) || []) + if (!seen.has(n)) { + seen.add(n); + queue.push(n); + } + } + return seen; + }; + const reachFromAgoric = bfs('@agoric'); + + const needsToAgoric = new Map(); + const needsFromAgoric = new Map(); + const declared = new Set([ + ...network.chains.map(c => `@${c.name}`), + ...network.pools.map(p => p.pool), + ...(network.localPlaces ?? []).map(lp => lp.id), + '', + '', + '+agoric', + ]); + + for (const k of keys) { + if (k === '+agoric') continue; + const cur = vOf(current[k]); + const tgt = vOf(target[k]); + if (cur === tgt) continue; + + const pp: PoolPlaceInfo | undefined = ( + PoolPlaces as Record + )[k as PoolKey]; + if (!pp && !declared.has(k)) { + throw Fail`Unsupported position key: ${q(k)}`; + } + // Determine the chain for this position key + const chain = + k === '' || k === '' || k === '+agoric' + ? 'agoric' + : (pp?.chainName ?? /^[^_]+_([A-Za-z0-9-]+)$/.exec(k)?.[1] ?? ''); + if (!chain) { + // If still unknown, skip further inter-hub checks; builder will surface issues + continue; + } + const hub = `@${chain}`; + + if (cur > tgt) { + ( + needsToAgoric.get(chain) ?? needsToAgoric.set(chain, []).get(chain)! + ).push(k); + } else if (tgt > cur) { + ( + needsFromAgoric.get(chain) ?? needsFromAgoric.set(chain, []).get(chain)! + ).push(k); + if (!reachFromAgoric.has(hub)) { + const list = (needsFromAgoric.get(chain) || []).join(', '); + throw Fail`No inter-hub path @agoric->${hub}; positions: ${q(list)}`; + } + } + } + + for (const [chain, posKeys] of needsToAgoric.entries()) { + const reach = bfs(`@${chain}`); + if (!reach.has('@agoric')) { + throw Fail`No inter-hub path @${chain}->@agoric; positions: ${q(posKeys.join(', '))}`; + } + } +}; + +/** + * Explain why a given path would fail on this graph. + * + * Input format: + * - `path` is an ordered list of node ids: `[n0, n1, ..., nk]`. + * - Each hop is interpreted as the directed edge `n[i] -> n[i+1]`. + * - Node id conventions used by the rebalance graph: + * - Hubs: `@{chain}` (e.g., `@agoric`, `@noble`, `@Arbitrum`). + * - Agoric-local seats: ``, ``, `+agoric` (these live on `@agoric`). + * - Pools/positions: `{Pool}_{Chain}` (e.g., `Aave_Arbitrum`, `Compound_Arbitrum`). + * - Local places and dynamics use their declared `id` verbatim. + * + * Return format: + * - `{ ok: true }` if every hop exists in the graph and has positive capacity. + * - `{ ok: false, failAtIndex, src, dest, reason, suggestion? }` otherwise, where: + * - `failAtIndex` is the index `i` of the failing hop `path[i] -> path[i+1]`. + * - `reason` is one of: `missing-node`, `missing-edge`, `wrong-direction`, `capacity-zero`. + * - `suggestion` is a human hint such as `add node X`, `add edge A->B`, or `increase capacity on A->B`. + * + * Tips: + * - Use `canonicalPathBetween(graph, src, dest)` to build a typical hub/leaf skeleton path + * before validating it with `explainPath`. + */ +export const explainPath = ( + graph: RebalanceGraph, + path: string[], +): + | { ok: true } + | { + ok: false; + failAtIndex: number; // index of the src in path where hop src->dest fails + src: string; + dest: string; + reason: + | 'missing-node' + | 'missing-edge' + | 'wrong-direction' + | 'capacity-zero'; + suggestion?: string; + } => { + if (path.length < 2) return { ok: true }; + const nodes = graph.nodes; + const edgeMap = new Map(); + for (const e of graph.edges) + edgeMap.set(`${e.src}->${e.dest}`, { capacity: e.capacity }); + for (let i = 0; i < path.length - 1; i += 1) { + const src = path[i]; + const dest = path[i + 1]; + if (!nodes.has(src as any)) + return { + ok: false, + failAtIndex: i, + src, + dest, + reason: 'missing-node', + suggestion: `add node ${src}`, + }; + if (!nodes.has(dest as any)) + return { + ok: false, + failAtIndex: i, + src, + dest, + reason: 'missing-node', + suggestion: `add node ${dest}`, + }; + const key = `${src}->${dest}`; + const revKey = `${dest}->${src}`; + const fwd = edgeMap.get(key); + if (!fwd) { + const rev = edgeMap.get(revKey); + if (rev) { + return { + ok: false, + failAtIndex: i, + src, + dest, + reason: 'wrong-direction', + suggestion: `add edge ${src}->${dest} (reverse exists)`, + }; + } + return { + ok: false, + failAtIndex: i, + src, + dest, + reason: 'missing-edge', + suggestion: `add edge ${src}->${dest}`, + }; + } + if (!(fwd.capacity > 0)) { + return { + ok: false, + failAtIndex: i, + src, + dest, + reason: 'capacity-zero', + suggestion: `increase capacity on ${src}->${dest}`, + }; + } + } + return { ok: true }; +}; + +/** + * Diagnose near-miss connectivity categories for each source (positive supply) + * and sink (negative supply). Purely topological; ignores objective. + */ +export const diagnoseNearMisses = (graph: RebalanceGraph) => { + const nodes = [...graph.nodes]; + const supplies = graph.supplies; + const sources = nodes.filter(n => (supplies[n] || 0) > 0); + const sinks = nodes.filter(n => (supplies[n] || 0) < 0); + + const adj = new Map(); + const capAdj = new Map(); + for (const e of graph.edges) { + (adj.get(e.src) ?? adj.set(e.src, []).get(e.src)!).push(e.dest); + if (e.capacity > 0) + (capAdj.get(e.src) ?? capAdj.set(e.src, []).get(e.src)!).push(e.dest); + } + const bfs = (start: string, A: Map) => { + const seen = new Set([start]); + const queue = [start]; + while (queue.length) { + const cur = queue.shift()!; + for (const v of A.get(cur) || []) + if (!seen.has(v)) { + seen.add(v); + queue.push(v); + } + } + return seen; + }; + + const interHubEdges = graph.edges + .filter(e => e.src.startsWith('@') && e.dest.startsWith('@')) + .map(e => `${e.src}->${e.dest}`); + + const missingPairs: Array<{ + src: string; + dest: string; + category: string; + hint?: string; + }> = []; + + for (const s of sources) { + const reachDir = bfs(s, adj); + const reachCap = bfs(s, capAdj); + for (const t of sinks) { + if (reachDir.has(t) && reachCap.has(t)) continue; // reachable + // Try to see if a single inter-hub edge would unlock (only for hubs) + let hint: string | undefined; + if (s.startsWith('@') && t.startsWith('@')) { + // if t not in reach from s, propose s->t if not present + const cand = `${s}->${t}`; + if (!interHubEdges.includes(cand)) + hint = `consider adding inter-hub ${cand}`; + } + // eslint-disable-next-line no-nested-ternary + const category = !reachDir.has(t) + ? 'no-directed-path' + : !reachCap.has(t) + ? 'capacity-blocked' + : 'unknown'; + missingPairs.push({ src: s, dest: t, category, hint }); + } + } + + return { missingPairs }; +}; + +/** Build a canonical leaf/hub -> hub/leaf path skeleton between two nodes. */ +export const canonicalPathBetween = ( + graph: RebalanceGraph, + src: string, + dest: string, +): string[] => { + if (src === dest) return [src]; + const hubOf = (n: string) => { + if (n.startsWith('@')) return n; + // Seats live on agoric + if (n === '' || n === '' || n === '+agoric') + return '@agoric'; + const pp = (PoolPlaces as Record)[ + n as PoolKey + ]; + if (pp) return `@${pp.chainName}`; + const m = /^([A-Za-z0-9]+)_([A-Za-z0-9-]+)$/.exec(n); + if (m) return `@${m[2]}`; + // Fallback: if node exists and is not a hub, assume @agoric + return '@agoric'; + }; + const srcHub = hubOf(src); + const destHub = hubOf(dest); + const path: string[] = []; + if (src !== srcHub) path.push(src, srcHub); + else path.push(srcHub); + if (srcHub !== destHub) path.push(destHub); + if (dest !== destHub) path.push(dest); + return path; +}; + +/** Build an example path explanation for a pair of nodes. */ +export const examplePathExplain = ( + graph: RebalanceGraph, + src: string, + dest: string, +) => { + const path = canonicalPathBetween(graph, src, dest); + const report = explainPath(graph, path); + return { path, report }; +}; + +/** + * Build a compact, human-readable message describing why a model is infeasible. + * Includes supply/reachability summary, near-miss pairs, and an example path explanation. + */ +export const formatInfeasibleDiagnostics = ( + graph: RebalanceGraph, + model: LpModel, +): string => { + const diag = diagnoseInfeasible(graph, model); + const near = diagnoseNearMisses(graph); + const nearStr = near.missingPairs.length + ? ` | near-misses: ${near.missingPairs + .slice(0, 6) + .map( + m => + `${m.src}->${m.dest}(${m.category}${m.hint ? `:${m.hint}` : ''})`, + ) + .join(', ')}` + : ''; + let example = ''; + if (near.missingPairs.length) { + const m = near.missingPairs[0]; + const ex = examplePathExplain(graph, m.src, m.dest); + if (ex.report.ok) { + example = ` | example: ${ex.path.join('->')} ok`; + } else { + const r = ex.report; + example = ` | example: ${ex.path.join('->')} fails at ${r.src}->${r.dest} (${r.reason}${r.suggestion ? `; ${r.suggestion}` : ''})`; + } + } + return `${diag}${nearStr}${example}`; +}; diff --git a/packages/portfolio-contract/src/jsLPSolver.md b/packages/portfolio-contract/src/jsLPSolver.md new file mode 100644 index 00000000000..46586e26643 --- /dev/null +++ b/packages/portfolio-contract/src/jsLPSolver.md @@ -0,0 +1,351 @@ +jsLPSolver +========== +[A linear programming solver for the rest of us!](https://youtu.be/LbfMmCf5-ds?t=51) + + +What Can I do with it? +----------------------- + +You can solve problems that fit the following fact pattern like this one +from [this](http://math.stackexchange.com/questions/59429/berlin-airlift-linear-optimization-problem) site. + +>On June 24, 1948, the former Soviet Union blocked all land and water routes through East Germany to Berlin. +>A gigantic airlift was organized using American and British planes to supply food, clothing and other supplies +>to more than 2 million people in West Berlin. +> +>The cargo capacity was 30,000 cubic feet for an American plane and 20,000 cubic feet for a British plane. +>To break the Soviet blockade, the Western Allies had to maximize cargo capacity, +>but were subject to the following restrictions: No more than 44 planes could be used. The larger American planes required 16 +>personnel per flight; double that of the requirement for the British planes. The total number of personnel +>available could not exceed 512. The cost of an American flight was $9000 and the cost of a British flight was $5000. +>The total weekly costs could note exceed $300,000. +>Find the number of American and British planes that were used to maximize cargo capacity. + + + +So How Would I Do This? +----------------------- +Part of the reason I built this library is that I wanted to do as little thinking / setup as possible +to solve the actual problem. Instead of tinkering with arrays to solve this problem, you would create a +model in a JavaScript object, and solve it through the solver's `solve` function; like this: + +### Install: + + +(in Node) +``` +npm install javascript-lp-solver --save +``` + +(in browser through CDN) +```html + +``` + +(webpack) +```javascript +const webpack = require('webpack'); //to access built-in plugins + +module.exports = { + "mode": "development", + "plugins": [ + new webpack.IgnorePlugin(/(fs|child_process)/), + ] +} +``` + +### Use: + +```javascript +var solver = require("./src/solver"), + results, + model = { + "optimize": "capacity", + "opType": "max", + "constraints": { + "plane": {"max": 44}, + "person": {"max": 512}, + "cost": {"max": 300000} + }, + "variables": { + "brit": { + "capacity": 20000, + "plane": 1, + "person": 8, + "cost": 5000 + }, + "yank": { + "capacity": 30000, + "plane": 1, + "person": 16, + "cost": 9000 + } + }, +}; + +results = solver.Solve(model); +console.log(results); +``` + +which should yield the following: +``` +{feasible: true, brit: 24, yank: 20, result: 1080000} +``` + +What If I Want Only Integers +-------------------- + +Say you live in the real world and partial results aren't realistic, too messy, or generally unsafe. + +> You run a small custom furniture shop and make custom tables and dressers. +> +> Each week you're limited to 300 square feet of wood, 110 hours of labor, +> and 400 square feet of storage. +> +> A table uses 30sf of wood, 5 hours of labor, requires 30sf of storage and has a +> gross profit of $1,200. A dresser uses 20sf of wood, 10 hours of work to put +> together, requires 50 square feet to store and has a gross profit of $1,600. +> +> How much of each do you produce to maximize profit, given that partial furniture +> aren't allowed in this dumb world problem? + +```javascript +var solver = require("./src/solver"), + model = { + "optimize": "profit", + "opType": "max", + "constraints": { + "wood": {"max": 300}, + "labor": {"max": 110}, + "storage": {"max": 400} + }, + "variables": { + "table": {"wood": 30, "labor": 5, "profit": 1200, "table": 1, "storage": 30}, + "dresser": {"wood": 20, "labor": 10, "profit": 1600, "dresser": 1, "storage": 50} + }, + "ints": {"table": 1, "dresser": 1} + } + +console.log(solver.Solve(model)); +// {feasible: true, result: 1440-0, table: 8, dresser: 3} +``` + +My problem is HUGE. Can I do this async or something? +-------------------- + +Yes! Or something! + +So its not truly async, but an instance of solver can be easily(?) put in an instance of a web worker. + +*worker.js* +```javascript + +// n.b. Solver connects itself to the global 'self' +// if its available... +// +importScripts("/prod/solver.js"); + +onmessage = function(d){ + var results = solver.Solve(d.data); + postMessage(results); +}; +``` + +*main.html* +```javascript + + var w = new Worker("./worker.js"); + + w.onmessage = function(d){ + // + // do something fun / exciting with our results! + // + console.log(d); + } + + w.postMessage(lp_model); +``` + +How Fast Can It Go? +--------------------- + +Random selection of problems of "some" size / interest: + +``` +----------------- +----------------- +LargeFarmMIP [ 100 variables, 35 constraints, 100 integers ] +jsLPSolver: 16.475ms + + +----------------- +----------------- +Monster Problem [ 552 variables, 600 constraints, 0 integers ] +jsLPSolver: 18.142ms + + +----------------- +----------------- +monster_II [ 924 variables, 888 constraints, 112 integers ] +jsLPSolver: 308.026ms + + +----------------- +----------------- +Fancy Stock Cutting Problem [ 31 variables, 5 constraints, 31 integers ] +jsLPSolver: 1.396ms + + +----------------- +----------------- +Vendor Selection [ 1640 variables, 1641 constraints, 0 integers ] +jsLPSolver: 1222.659ms + + +``` + +Neat! What else can I do with it? + + +API / Guide +=============== + +Below is my first pass at describing the various parts of the model, what they do, and other miscellaneous options that might not +be super intuitive. + +As much as possible, I'm trying to make all of the options / functions accessible by changing the JSON model. To me (maybe incorrectly), +it's easier to be able to just call one method to do everything based on the model its given instead of having to hit seperate functions +exposed on the solver itself. + +#### optimize + +This tells the model (wait for it) what to optimize (minimize or maximize). Typically (honestly, always) the thing you're optimizing is an attribute +of a variable. For example, `profit` might be a variable attribute you want to maximize. In this case, your model would look like this: + +```json + { + "optimize": "profit", + "opType": "max", + } +``` + +_MULTI OBJECTIVE OPTIMIZATION_: This is kind of a throwaway function I added because I needed it for something. I don't know if there's a better way to do this, or if it even makes sense, so please take this with a grain of salt. + +Say you have a problem where you want to eat as much "bacon", "cheddar cheese", and "french fries" as possible. To do this, set the "optimize" attribute of the model like this: + +```json + "optimize": { + "bacon": "max", + "cheddar cheese": "max", + "french fries": "max" + } +``` + +This will return a result where no single objective can be improved without hurting at least one other objective. It also returns the results of the "child" optimization problems + +#### opType + +This tells the solver how to optimize your problem. Acceptable options are "min" for minimize and "max" for maximize. + +#### variables + +These are the inputs of your problem. For the word problem: + +>How many chairs, tables, and desks do you need to produce given that a chair requires ... + +...chairs, tables, and desks are your variables. You can assign attributes to the variables (size, cost, weight, etc) that you can use to constrain the problem. + +On your model, your variables would look like this: + +```json + "variables": { + "table": {"wood": 30, "labor": 5, "profit": 1200, "storage": 30}, + "dresser": {"wood": 20, "labor": 10, "profit": 1600, "storage": 50} + }, +``` + +#### constraints + +Real world problems don't allow you to use an unlimited number of resources (sad). In order to solve problems like + +>Maximize Profit... + +where resources are limited; constraints come into play. Here is where you put them. (In a normal LP tableau, these are the inequalities). + +Using the above example, say you had at most 300 units of wood, 110 units of labour, and 400 units of storage. To represent this in JSON format, you +would set it up like this: + +```json + "constraints": { + "wood": {"max": 300}, + "labor": {"max": 110}, + "storage": {"max": 400} + }, +``` + +...where for the first constraint, "wood" is the attribute you're setting a constraint on with a "maximum" of 300 units used to solve the the problem. Other options for constraints are "min" (minimum) and "equal" (equal to). + +#### options + +This is a catch-all place to put additional options on the model for the Solver to work with in an attempt to not clutter the "core" of the model too much. + +#### options.timeout (default: none) + +This option is how many milliseconds you want to allow for the solver to try and solve the model you're running. You set it like this: + +```json +"options": { + "timeout": 10000 +} +``` + +N.B. currently, it only works for mixed-integer linear programs + +#### options.tolerance (default: 0) + +For large scale integer problems the solving process can take increasingly long. However, oftentimes the solution to these problems does not have to be the absolute best possible solution, but rather a solution relatively close to the optimal one. In these cases, a variable called tolerance can be specified in the model object. The value assigned to the tolerance variable states that the solver should stop the solution process when the best solution found is within {{options.tolerance}}% of the best theoretical objective value. + +It is set up like this: + +```json +"options": { + "tolerance": 0.05 +} +``` + +#### options.exitOnCycles (default: true) + +Exits when cycles detected + +External Solver Integration +=============================== + +(n.b. this is still very much in progress and subject to change...) + +Basically I want to be able to work with "professional-grade" solver libraries through jsLPSolver; without incorporating hard dependencies / binary builds / etc. + + +## lpsolve + +To use, incorporate the following onto your model: + +```json + "external": { + "solver": "lpsolve", + "binPath": "C:/lpsolve/lp_solve.exe", + "tempName": "C:/temp/out.txt", + "args": [ + "-s2", + "-timeout", + 240 + ] + } +``` + +Basically, its doing the following: + +1. Convert your model to something lpsolve can use +2. Saves your model to a temporary file (hence the `tempName` attribute) +3. Runs everything through a command line (`require("child_process").execFile`) against the lpsolve executable (binPath) with whatever arguments you need (args) +4. Scrubs the results +5. Returns a JSON object with the results diff --git a/packages/portfolio-contract/src/network/buildGraph.ts b/packages/portfolio-contract/src/network/buildGraph.ts new file mode 100644 index 00000000000..6b0013a994a --- /dev/null +++ b/packages/portfolio-contract/src/network/buildGraph.ts @@ -0,0 +1,136 @@ +import { Fail } from '@endo/errors'; +import type { NatAmount, Amount } from '@agoric/ertp/src/types.js'; +import { partialMap } from '@agoric/internal/src/js-utils.js'; + +import { buildBaseGraph, type FlowEdge } from '../plan-solve.js'; +import { PoolPlaces, type PoolKey } from '../type-guards.js'; +import type { AssetPlaceRef } from '../type-guards-steps.js'; + +import type { NetworkSpec } from './network-spec.js'; + +/** + * Build a RebalanceGraph from a NetworkSpec. + * Adds intra-chain leaf<->hub edges via buildBaseGraph; then applies inter-hub links. + */ +export const makeGraphFromDefinition = ( + spec: NetworkSpec, + current: Partial>, + target: Partial>, + brand: Amount['brand'], + feeBrand: Amount['brand'], +) => { + // Hubs from spec. + const hubs = new Set(spec.chains.map(c => `@${c.name}`)); + + // PoolKeys whose hub is in spec. Do NOT auto-add hubs. + const knownPoolKeys = Object.keys(PoolPlaces).filter(k => + hubs.has(`@${PoolPlaces[k].chainName}`), + ); + + // Minimal validation: ensure links reference present hubs. + for (const link of spec.links) { + hubs.has(link.src) || + !link.src.startsWith('@') || + Fail`missing link src hub ${link.src}`; + hubs.has(link.dest) || + !link.dest.startsWith('@') || + Fail`missing link dest hub ${link.dest}`; + } + + // Each current/target node must be connected to a hub. + const dynamicNodes = new Set([ + ...Object.keys(current ?? {}), + ...Object.keys(target ?? {}), + ]); + const dynErrors: string[] = []; + for (const n of dynamicNodes) { + // Nothing to validate for a local place. + if (n.startsWith('<') || n.startsWith('+')) continue; + if (n.startsWith('@')) { + if (!hubs.has(n)) dynErrors.push(`undeclared hub ${n}`); + } else if (Object.hasOwn(PoolPlaces, n)) { + // Known PoolKey; require its hub to be present. + const hub = `@${PoolPlaces[n as PoolKey].chainName}`; + if (!hubs.has(hub)) { + dynErrors.push(`pool ${n} requires missing hub ${hub}`); + } + } + } + dynErrors.length === 0 || + Fail`NetworkSpec is missing required hubs for dynamic nodes: ${dynErrors}`; + + const placeRefs = new Set([ + ...hubs, + ...spec.pools.map(p => p.pool), + ...knownPoolKeys, + ...(spec.localPlaces ?? []).map(lp => lp.id), + ...dynamicNodes, + ]) as Set; + const graph = buildBaseGraph( + [...placeRefs], + current, + target, + brand, + feeBrand, + ); + if (spec.debug) graph.debug = true; + + // Force the presence of particular edges. + const edges = [...graph.edges] as Array; + const capacityDefault = 9_007_199_254_740_000; // not quite MAX_SAFE_INTEGER + const addOrReplaceEdge = ( + src: AssetPlaceRef, + dest: AssetPlaceRef, + customAttrs?: Omit, + ) => { + (graph.nodes.has(src) && graph.nodes.has(dest)) || + Fail`Graph missing nodes for link ${src}->${dest}`; + + // Remove any existing edge that matches on (src, dest, via?). + for (let i = 0; i < edges.length; i += 1) { + const edge = edges[i]; + if (!edge || edge.src !== src || edge.dest !== dest) continue; + if (customAttrs?.via === undefined || edge.via === customAttrs.via) { + edges[i] = undefined; + } + } + + const dataAttrs = customAttrs || { + capacity: capacityDefault, + variableFee: 1, + fixedFee: 0, + timeFixed: 1, + via: 'local', + }; + edges.push({ id: 'TBD', src, dest, ...dataAttrs }); + }; + + // Ensure intra-Agoric links with 0 fee / 0 time: + // -> +agoric -> @agoric -> + // eslint-disable-next-line github/array-foreach + (['', '+agoric', '@agoric', ''] as AssetPlaceRef[]).forEach( + (dest, i, arr) => { + const src: AssetPlaceRef | undefined = i === 0 ? undefined : arr[i - 1]; + if (!src || !graph.nodes.has(src) || !graph.nodes.has(dest)) return; + addOrReplaceEdge(src, dest); + }, + ); + + // Override the base graph with inter-hub links from spec. + for (const link of spec.links) { + addOrReplaceEdge(link.src, link.dest, { + capacity: Number(link.capacity ?? capacityDefault), + variableFee: link.variableFeeBps ?? 0, + fixedFee: link.flatFee === undefined ? undefined : Number(link.flatFee), + timeFixed: link.timeSec, + via: link.transfer, + feeMode: link.feeMode, + }); + } + + // Force unique sequential edge IDs for avoiding collisions in the solver. + graph.edges = partialMap(edges, edge => (edge ? { ...edge } : undefined)); + for (let i = 0; i < graph.edges.length; i += 1) graph.edges[i].id = `e${i}`; + + return graph; +}; diff --git a/packages/portfolio-contract/src/network/network-spec.ts b/packages/portfolio-contract/src/network/network-spec.ts new file mode 100644 index 00000000000..427ae7436e6 --- /dev/null +++ b/packages/portfolio-contract/src/network/network-spec.ts @@ -0,0 +1,85 @@ +import type { NatValue } from '@agoric/ertp'; +import type { + AxelarChain, + YieldProtocol, + SupportedChain, +} from '@agoric/portfolio-api/src/constants.js'; + +import type { PoolKey } from '../type-guards.js'; +import type { AssetPlaceRef } from '../type-guards-steps.js'; + +// Control and transfer planes +export type ControlProtocol = 'ibc' | 'axelar' | 'local'; +export type TransferProtocol = + | 'ibc' + | 'fastusdc' + | 'cctpReturn' + | 'cctpSlow' + | 'local'; +export type FeeMode = 'toUSDN' | 'gmpCall' | 'gmpTransfer'; + +// Chains (hubs) +export interface ChainSpec { + name: SupportedChain; + chainId?: string; // cosmos chain-id or network id + evmChainId?: number; // EVM numeric chain id if applicable + bech32Prefix?: string; // for Cosmos + axelarKey?: AxelarChain; // Axelar registry key if differs from name + feeDenom?: string; // e.g., 'ubld', 'uusdc' + gasDenom?: string; // if distinct from feeDenom + control: ControlProtocol; // how agoric reaches this chain: 'ibc' (noble) or 'axelar' (EVM) or 'local' (agoric) +} + +// Pools (leaves) +export interface PoolSpec { + pool: PoolKey; // e.g., 'Aave_Arbitrum', 'USDNVault' + chain: SupportedChain; // host chain of the pool + protocol: YieldProtocol; // reuse existing YieldProtocol keys +} + +// Local places: seats (, ) and local accounts (+agoric), with local edge fees +export interface LocalPlaceSpec { + id: AssetPlaceRef; // '' | '' | '+agoric' | PoolKey (treated as local to its hub) + chain: SupportedChain; // typically 'agoric' + // Local edge fee/policy when connecting to the hub on the same chain + variableFeeBps?: number; + flatFee?: NatValue; + timeSec?: number; + capacity?: NatValue; + enabled?: boolean; +} + +// Directed inter-hub link +export interface LinkSpec { + src: AssetPlaceRef; + dest: AssetPlaceRef; + + // Fees + variableFeeBps: number; // basis points of amount + flatFee?: NatValue; // minor units in src fee token + + // Performance & limits + timeSec: number; // latency + capacity?: NatValue; // optional throughput limit + min?: NatValue; // optional min transfer size + + // Protocols + transfer: TransferProtocol; // asset transfer mechanism + feeMode?: FeeMode; // how fees apply to transation using this link. See plan-solve.ts + + // Policy / guardrails (optional) + priority?: number; // tie-break hint + enabled?: boolean; // admin toggle +} + +// Overall network definition +export interface NetworkSpec { + debug?: boolean; + environment?: 'dev' | 'test' | 'prod'; + + chains: ChainSpec[]; + pools: PoolSpec[]; + localPlaces?: LocalPlaceSpec[]; + links: LinkSpec[]; +} +export type { PoolKey }; diff --git a/packages/portfolio-contract/src/network/network.prod.ts b/packages/portfolio-contract/src/network/network.prod.ts new file mode 100644 index 00000000000..117358a924d --- /dev/null +++ b/packages/portfolio-contract/src/network/network.prod.ts @@ -0,0 +1,107 @@ +import type { NetworkSpec } from './network-spec.js'; + +// Initial production network in NetworkSpec format +export const PROD_NETWORK: NetworkSpec = { + // Enable debug diagnostics to aid troubleshooting in tests + debug: true, + environment: 'prod', + chains: [ + { name: 'agoric', control: 'local' }, + { name: 'noble', control: 'ibc' }, + { name: 'Arbitrum', control: 'axelar' }, + { name: 'Avalanche', control: 'axelar' }, + { name: 'Ethereum', control: 'axelar' }, + ], + pools: [ + { pool: 'Aave_Arbitrum', chain: 'Arbitrum', protocol: 'Aave' }, + { pool: 'Beefy_re7_Avalanche', chain: 'Avalanche', protocol: 'Beefy' }, + { pool: 'Compound_Ethereum', chain: 'Ethereum', protocol: 'Compound' }, + { pool: 'USDN', chain: 'noble', protocol: 'USDN' }, + { pool: 'USDNVault', chain: 'noble', protocol: 'USDN' }, + ], + localPlaces: [ + { id: '', chain: 'agoric' }, + { id: '', chain: 'agoric' }, + { id: '+agoric', chain: 'agoric' }, + ], + links: [ + // USDN costs a fee to get into + { src: '@noble', dest: 'USDN', transfer: 'local', variableFeeBps: 5, timeSec: 0, feeMode: 'toUSDN' }, + { src: '@noble', dest: 'USDNVault', transfer: 'local', variableFeeBps: 5, timeSec: 0, feeMode: 'toUSDN' }, + + // CCTP slow + { + src: '@Arbitrum', + dest: '@noble', + transfer: 'cctpSlow', + variableFeeBps: 0, + timeSec: 1080, + }, + { + src: '@Avalanche', + dest: '@noble', + transfer: 'cctpSlow', + variableFeeBps: 0, + timeSec: 1080, + }, + { + src: '@Ethereum', + dest: '@noble', + transfer: 'cctpSlow', + variableFeeBps: 0, + timeSec: 1080, + }, + // CCTP return (fast on return path) + { + src: '@noble', + dest: '@Arbitrum', + transfer: 'cctpReturn', + variableFeeBps: 0, + timeSec: 20, + feeMode: 'gmpTransfer', + }, + { + src: '@noble', + dest: '@Avalanche', + transfer: 'cctpReturn', + variableFeeBps: 0, + timeSec: 20, + feeMode: 'gmpTransfer', + }, + { + src: '@noble', + dest: '@Ethereum', + transfer: 'cctpReturn', + variableFeeBps: 0, + timeSec: 20, + feeMode: 'gmpTransfer', + }, + // Fast USDC (Axelar GMP) + { + src: '@Arbitrum', + dest: '@noble', + transfer: 'fastusdc', + variableFeeBps: 15, + timeSec: 45, + }, + { + src: '@Avalanche', + dest: '@noble', + transfer: 'fastusdc', + variableFeeBps: 15, + timeSec: 45, + }, + { + src: '@Ethereum', + dest: '@noble', + transfer: 'fastusdc', + variableFeeBps: 15, + timeSec: 45, + }, + // IBC between agoric and noble + { src: '@agoric', dest: '@noble', transfer: 'ibc', variableFeeBps: 0, timeSec: 10 }, + { src: '@noble', dest: '@agoric', transfer: 'ibc', variableFeeBps: 0, timeSec: 10 }, + ], +}; + +export default PROD_NETWORK; diff --git a/packages/portfolio-contract/src/network/path.ts b/packages/portfolio-contract/src/network/path.ts new file mode 100644 index 00000000000..49976995d96 --- /dev/null +++ b/packages/portfolio-contract/src/network/path.ts @@ -0,0 +1,75 @@ +import type { Amount } from '@agoric/ertp'; +import type { RebalanceGraph, FlowEdge } from '../plan-solve.js'; +import type { AssetPlaceRef, MovementDesc } from '../type-guards-steps.js'; + +/** Weight selector: cheapest => variableFee, fastest => timeFixed, default 1 */ +const edgeWeight = (e: FlowEdge, mode: 'cheapest' | 'fastest') => { + if (mode === 'cheapest') return e.variableFee ?? 0; + if (mode === 'fastest') return e.timeFixed ?? 0; + return 1; +}; + +/** Find shortest(weighted) path using Dijkstra (graph small so OK). */ +export const findPath = ( + graph: RebalanceGraph, + src: AssetPlaceRef, + dest: AssetPlaceRef, + mode: 'cheapest' | 'fastest' = 'cheapest', +): AssetPlaceRef[] => { + if (src === dest) return [src]; + const dist = new Map(); + const prev = new Map(); + for (const n of graph.nodes) dist.set(n, Infinity); + dist.set(src, 0); + const q = new Set(graph.nodes); + while (q.size) { + // pick min dist + let u: string | undefined; + let best = Infinity; + for (const n of q) { + const d = dist.get(n as string) ?? Infinity; + if (d < best) { + best = d; + u = n as string; + } + } + if (!u) break; + q.delete(u as AssetPlaceRef); + if (u === dest) break; + for (const e of graph.edges) { + if (e.src !== u) continue; + const w = edgeWeight(e, mode); + const alt = best + w; + const dv = dist.get(e.dest) ?? Infinity; + if (alt < dv) { + dist.set(e.dest, alt); + prev.set(e.dest, u); + } + } + } + if (!prev.has(dest) && src !== dest) throw Error(`no path ${src} -> ${dest}`); + const rev: string[] = [dest]; + let cur: string | undefined = dest; + while (cur && cur !== src) { + cur = prev.get(cur); + if (!cur) break; + rev.push(cur); + } + rev.reverse(); + if (rev[0] !== src) + throw Error(`path reconstruction failed ${src} -> ${dest}`); + return rev as AssetPlaceRef[]; +}; + +export const pathToSteps = ( + path: AssetPlaceRef[], + amount: Amount<'nat'>, + _brand: Amount<'nat'>['brand'], +): MovementDesc[] => { + if (path.length < 2) return []; + const steps: MovementDesc[] = []; + for (let i = 0; i < path.length - 1; i += 1) { + steps.push({ src: path[i], dest: path[i + 1], amount }); + } + return steps; +}; diff --git a/packages/portfolio-contract/src/plan-rebalance.ts b/packages/portfolio-contract/src/plan-rebalance.ts new file mode 100644 index 00000000000..7caee16818c --- /dev/null +++ b/packages/portfolio-contract/src/plan-rebalance.ts @@ -0,0 +1,89 @@ +import { AmountMath } from '@agoric/ertp'; +import type { Brand, NatAmount } from '@agoric/ertp/src/types.js'; +import type { PoolKey } from '@aglocal/portfolio-contract/src/type-guards'; +import type { AssetPlaceRef } from './type-guards-steps.ts'; + +/** + * Compare two nat-based amounts of the same brand. + * XXX Should we extend AmountMath to better support compare/sort? + * ``` + * AmountMath.compare(a, b); // -1 | 0 | 1 | NaN + * AmountMath.sort(records, { by: record => record.amount, direction }) + * ``` + */ +const compareAmounts = (a: NatAmount, b: NatAmount): -1 | 0 | 1 => { + const aValue = a.value; + const bValue = b.value; + // eslint-disable-next-line no-nested-ternary + return aValue > bValue ? 1 : aValue < bValue ? 1 : 0; +}; + +export interface Transfer { + from: PoolKey; + to: PoolKey; + amount: NatAmount; +} + +export function rebalanceMinCostFlow( + currentBalances: Partial>, + targetAllocations: Partial>, + brand: Brand<'nat'>, +): Transfer[] { + const epsilon = AmountMath.make(brand, 1n); + + const total = Object.values(currentBalances).reduce( + (sum, amount) => AmountMath.add(sum, amount), + AmountMath.makeEmpty(brand), + ); + + const targetBalances = Object.fromEntries( + Object.entries(targetAllocations).map(([pool, basisPoints]) => [ + pool, + AmountMath.make(brand, (total.value * basisPoints) / 10000n), + ]), + ) as Record; + + type PoolDelta = { pool: PoolKey; amount: NatAmount }; + + const surplus: PoolDelta[] = []; + const deficit: PoolDelta[] = []; + + for (const poolKey of Object.keys(targetAllocations)) { + const pool = poolKey as PoolKey; + const current = currentBalances[pool] ?? AmountMath.makeEmpty(brand); + const target = targetBalances[pool]; + + if (AmountMath.isGTE(current, target)) { + const surplusAmount = AmountMath.subtract(current, target); + if (AmountMath.isGTE(surplusAmount, epsilon)) { + surplus.push({ pool, amount: surplusAmount }); + } + } else { + const deficitAmount = AmountMath.subtract(target, current); + if (AmountMath.isGTE(deficitAmount, epsilon)) { + deficit.push({ pool, amount: deficitAmount }); + } + } + } + + const transfers: Transfer[] = []; + + while (surplus.length > 0 && deficit.length > 0) { + surplus.sort((a, b) => compareAmounts(b.amount, a.amount)); + deficit.sort((a, b) => compareAmounts(b.amount, a.amount)); + + const from = surplus[0]; + const to = deficit[0]; + const amount = AmountMath.min(from.amount, to.amount); + + transfers.push({ from: from.pool, to: to.pool, amount }); + + from.amount = AmountMath.subtract(from.amount, amount); + to.amount = AmountMath.subtract(to.amount, amount); + + if (AmountMath.isEmpty(from.amount)) surplus.shift(); + if (AmountMath.isEmpty(to.amount)) deficit.shift(); + } + + return transfers; +} diff --git a/packages/portfolio-contract/src/plan-solve.ts b/packages/portfolio-contract/src/plan-solve.ts new file mode 100644 index 00000000000..798fc1c7ff6 --- /dev/null +++ b/packages/portfolio-contract/src/plan-solve.ts @@ -0,0 +1,544 @@ +import type { Amount } from '@agoric/ertp'; +import { AmountMath } from '@agoric/ertp'; +import type { NatAmount } from '@agoric/ertp/src/types.js'; + +import { Fail } from '@endo/errors'; +import jsLPSolver from 'javascript-lp-solver'; +import type { IModel, IModelVariableConstraint } from 'javascript-lp-solver'; +import { + makeTracer, + naturalCompare, + objectMap, + typedEntries, +} from '@agoric/internal'; +import { AxelarChain } from '@agoric/portfolio-api/src/constants.js'; +import { PoolPlaces, type PoolKey } from './type-guards.js'; +import type { AssetPlaceRef, MovementDesc } from './type-guards-steps.js'; +import type { + FeeMode, + NetworkSpec, + TransferProtocol, +} from './network/network-spec.js'; +import { makeGraphFromDefinition } from './network/buildGraph.js'; +import { + preflightValidateNetworkPlan, + formatInfeasibleDiagnostics, +} from './graph-diagnose.js'; + +const replaceOrInit = ( + map: Map, + key: K, + callback: (value: V | undefined, key: K, exists: boolean) => V, +) => { + const old = map.get(key); + const exists = old !== undefined || map.has(key); + map.set(key, callback(old, key, exists)); +}; + +// eslint-disable-next-line @typescript-eslint/no-unused-vars +const trace = makeTracer('solve'); + +// ----------------------------------- Types ----------------------------------- + +/** + * A chain hub node id looks like '@agoric', '@noble', '@Arbitrum', etc. + * Leaf nodes: PoolKey, '', '', '+agoric' + */ + +/** Internal edge representation */ +export interface FlowEdge { + id: string; + src: AssetPlaceRef; + dest: AssetPlaceRef; + capacity: number; // numeric for LP; derived from bigint + variableFee: number; // cost coefficient per unit flow in basis points + fixedFee?: number; // optional fixed cost (cheapest mode) + timeFixed?: number; // optional time cost (fastest mode) + via?: TransferProtocol; // annotation (e.g. 'intra-chain', 'cctp', etc.) + feeMode?: FeeMode; +} + +/** Node supply: positive => must send out; negative => must receive */ +export interface SupplyMap { + [node: string]: number; +} + +/** Graph structure */ +export interface RebalanceGraph { + nodes: Set; + edges: FlowEdge[]; + supplies: SupplyMap; + brand: Amount['brand']; + feeBrand: Amount['brand']; + /** When true, print extra diagnostics on solver failure */ + debug?: boolean; +} + +/** Mode of optimization */ +export type RebalanceMode = 'cheapest' | 'fastest'; + +/** Solver result edge */ +export interface SolvedEdgeFlow { + edge: FlowEdge; + flow: number; + used: boolean; +} + +/** Model shape for javascript-lp-solver */ +export type LpModel = IModel; + +// --- keep existing type declarations above --- + +/** + * ----------------------------------------------------------------------------- + * Scaling Rules (model domain normalization) + * - Amount / Capacity units (native) are scaled by AMOUNT_SCALE (1e3) + * - Variable fee given in native $/unit -> divide by AMOUNT_SCALE + * - Flat fees kept as-is (assumed already small: cents → dollars) + * - Latency (seconds) -> minutes ( divide by 60 ) + * - Big-M: use scaled capacity directly (tight) + * - Treat |value| < FLOW_EPS as zero when decoding + * ----------------------------------------------------------------------------- + */ +const FLOW_EPS = 1e-6; + +/** + * Build base graph with: + * - Hub nodes for each chain discovered in placeRefs (auto-added as '@chain') + * - Leaf nodes for each placeRef (except hubs already formatted) + * - Intra-chain bidirectional edges leaf <-> hub (variableFee=1, timeFixed=1) + * - Supplies = current - target; if target missing, assume unchanged (target=current) + */ +export const buildBaseGraph = ( + placeRefs: AssetPlaceRef[], + current: Partial>, + target: Partial>, + brand: Amount['brand'], + feeBrand: Amount['brand'], +): RebalanceGraph => { + const nodes = new Set(); + const edges: FlowEdge[] = []; + const supplies: SupplyMap = {}; + + // Collect chains needed + for (const ref of placeRefs) { + nodes.add(ref); + const chain = chainOf(ref); + nodes.add(`@${chain}` as AssetPlaceRef); + } + + // Build supplies (signed deltas) + for (const node of nodes) { + const currentVal = current[node]?.value ?? 0n; + const targetSpecified = Object.prototype.hasOwnProperty.call(target, node); + const targetVal = targetSpecified ? target[node]!.value : currentVal; // unchanged if unspecified + const delta = currentVal - targetVal; + // NOTE: Number(bigint) loses precision beyond MAX_SAFE_INTEGER (2^53-1) + // For USDC amounts (6 decimals), the largest realistic value would be trillions + // of dollars, which should be well within safe integer range. + if (delta !== 0n) supplies[node] = Number(delta); + } + + // Intra-chain edges (leaf <-> hub) + let eid = 0; + const vf = 1; // direct variable fee per unit + const tf = 1; // time cost unit (seconds or abstract) + for (const node of nodes) { + if (isHub(node)) continue; + + const chainName = chainOf(node); + const hub = `@${chainName}` as AssetPlaceRef; + if (node === hub) continue; + + const feeMode = Object.keys(AxelarChain).includes(chainName) + ? { feeMode: 'gmpCall' as FeeMode } + : {}; + const base: Omit = { + capacity: (Number.MAX_SAFE_INTEGER + 1) / 4, + variableFee: vf, + fixedFee: 0, + timeFixed: tf, + via: 'local', + ...feeMode, + }; + + // eslint-disable-next-line no-plusplus + edges.push({ id: `e${eid++}`, src: node, dest: hub, ...base }); + + // Skip @agoric → +agoric edge + if (node === '+agoric') continue; + + // eslint-disable-next-line no-plusplus + edges.push({ id: `e${eid++}`, src: hub, dest: node, ...base }); + } + + // Return mutable graph (do NOT harden so we can add inter-chain links later) + return { + nodes, + edges, + supplies, + brand, + feeBrand, + debug: false, + } as RebalanceGraph; +}; + +// ------------------------------ Model Building ------------------------------- + +type IntVar = Record< + | `allow_${string}` + | `through_${string}` + | `netOut_${string}` + | 'magnifiedVariableFee' + | 'weight', + number +>; +type BinaryVar = Record< + `allow_${string}` | 'magnifiedFlatFee' | 'timeFixed' | 'weight', + number +>; +type WeightFns = { + getPrimaryWeights: (intVar: IntVar, binaryVar: BinaryVar) => number[]; + setWeights: (intVar: IntVar, binaryVar: BinaryVar, epsilon: number) => void; +}; + +const modeFns = new Map( + typedEntries({ + cheapest: { + getPrimaryWeights: (intVar, binaryVar) => [ + intVar.magnifiedVariableFee, + binaryVar.magnifiedFlatFee, + ], + setWeights: (intVar, binaryVar, epsilon) => { + // Fees have full weight; time is weighted by epsilon. + intVar.weight = intVar.magnifiedVariableFee; + binaryVar.weight = + binaryVar.magnifiedFlatFee + binaryVar.timeFixed * epsilon; + }, + }, + fastest: { + getPrimaryWeights: (_intVar, binaryVar) => [binaryVar.timeFixed], + setWeights: (intVar, binaryVar, epsilon) => { + // Fees are weighted by epsilon; time has full weight. + intVar.weight = intVar.magnifiedVariableFee * epsilon; + binaryVar.weight = + binaryVar.timeFixed + binaryVar.magnifiedFlatFee * epsilon; + }, + }, + } as Record), +); + +/** + * Build LP/MIP model for javascript-lp-solver. + */ +export const buildLPModel = ( + graph: RebalanceGraph, + mode: RebalanceMode, +): LpModel => { + const { getPrimaryWeights, setWeights } = + modeFns.get(mode) || Fail`unknown mode ${mode}`; + + const intVariables = {} as Record<`via_${string}`, IntVar>; + const binaryVariables = {} as Record<`pick_${string}`, BinaryVar>; + const constraints = {} as Record; + let minPrimaryWeight = Infinity; + for (const edge of graph.edges) { + const { id, src, dest } = edge; + const { capacity, variableFee, fixedFee = 0, timeFixed = 0 } = edge; + + // The numbers in graph.supplies should use the same units as fixedFee, but + // variableFee is in basis points relative to some scaling of those other + // values. + // We also want fee attributes large enough to avoid IEEE 754 rounding + // issues. + // Assume that scaling to be 1e6 (i.e., 100 bp = 1% of supplies[key]/1e6) + // and scale the fee attributes accordingly such that variableFee 100 will + // contribute a weight of 0.01 for each `via_$edge` atomic unit of payload + // (i.e., magnified by 1e6 if the scaling is actually 1e6) and fixedFee 1 + // will contribute a weight of 1e6 if the corresponding edge is used (i.e., + // also magnified by 1e6 if the scaling is actually 1e6). + // The solution may be disrupted by either over- or under-weighting + // variableFee w.r.t. fixedFee, but not otherwise, and we accept the risk. + // TODO: Define RebalanceGraph['scale'] to eliminate this guesswork. + const magnifiedVariableFee = variableFee / 10_000; + const magnifiedFlatFee = fixedFee * 1e6; + + const intVar: IntVar = { + // A negative value for `allow_${id}` forces the solution to include 1 + // `pick_${id}` (and the corresponding fixed costs) in order to satisfy + // that attribute's min: 0 constraint below. + [`allow_${id}`]: -1, + [`through_${id}`]: 1, + [`netOut_${src}`]: 1, + [`netOut_${dest}`]: -1, + magnifiedVariableFee, + weight: 0, // increased below + }; + intVariables[`via_${id}`] = intVar; + + constraints[`allow_${id}`] = { min: 0 }; + constraints[`through_${id}`] = { max: capacity }; + + const binaryVar = { + [`allow_${id}`]: Number.MAX_SAFE_INTEGER, + magnifiedFlatFee, + timeFixed, + weight: 0, // increased below + }; + binaryVariables[`pick_${id}`] = binaryVar; + + // Keep track of the lowest non-zero primary weight for `mode`. + minPrimaryWeight = Math.min( + minPrimaryWeight, + ...getPrimaryWeights(intVar, binaryVar).map(n => n || Infinity), + ); + } + + // Finalize the weights. + const epsilonWeight = Number.isFinite(minPrimaryWeight) + ? minPrimaryWeight / 1e6 + : 1e-9; + for (const { id } of graph.edges) { + setWeights( + intVariables[`via_${id}`], + binaryVariables[`pick_${id}`], + epsilonWeight, + ); + } + + // Constrain the net flow from each node. + for (const node of graph.nodes) { + const supply = graph.supplies[node] || 0; + constraints[`netOut_${node}`] = { equal: supply }; + } + + return { + optimize: 'weight', + opType: 'min', + constraints, + variables: { ...intVariables, ...binaryVariables }, + binaries: objectMap(binaryVariables, () => true), + ints: objectMap(intVariables, () => true), + }; +}; + +/** + * Represent a JSON-serializable object as a spacey single-line literal with + * identifier-compatible property names unquoted. + */ +const prettyJsonable = (obj: unknown): string => { + const jsonText = JSON.stringify(obj, null, 1); + // Capture strings and replace them with JSON-incompatible `#`s. + const strings = [] as string[]; + const safe = jsonText.replace(/"(\\.|[^\\"])*":?/g, s => { + strings.push(s); + return '#'; + }); + // Condense the [now guaranteed-insignificant] whitespace. + const singleLine = safe.replace(/\s+/g, ' '); + // Restore the strings, stripping quotes from property names as possible. + const pretty = singleLine.replaceAll('#', () => { + const s = strings.shift() as string; + if (!s.endsWith(':')) return s; + return s.replace(/^"([\p{ID_Start}$_][\p{ID_Continue}$]*)":$/u, '$1:'); + }); + return pretty; +}; + +// solveRebalance: use javascript-lp-solver directly +// This operation is async to allow future use of async solvers if needed +export const solveRebalance = async ( + model: LpModel, + graph: RebalanceGraph, +): Promise => { + const result = jsLPSolver.Solve(model, 1e-6); + if (result.feasible !== true) { + if (graph.debug) { + // Emit richer context only on demand to avoid noisy passing runs + let msg = formatInfeasibleDiagnostics(graph, model); + msg += ` | ${prettyJsonable(result)}`; + console.error('[solver] No feasible solution. Diagnostics:', msg); + throw Fail`No feasible solution: ${msg}`; + } + throw Fail`No feasible solution: ${result}`; + } + const flows: SolvedEdgeFlow[] = []; + for (const edge of graph.edges) { + const { id } = edge; + const flowKey = `via_${id}`; + const flow = Object.hasOwn(result, flowKey) ? result[flowKey] : undefined; + const used = (flow ?? 0) > FLOW_EPS || result[`pick_${id}`]; + if (used) flows.push({ edge, flow, used: true }); + } + return flows; +}; + +export const rebalanceMinCostFlowSteps = ( + flows: SolvedEdgeFlow[], + graph: RebalanceGraph, +): MovementDesc[] => { + const supplies = new Map( + typedEntries(graph.supplies).filter(([_place, amount]) => amount > 0), + ); + type AnnotatedFlow = SolvedEdgeFlow & { srcChain: string }; + const pendingFlows = new Map( + flows + .filter(f => f.flow > FLOW_EPS) + .map(f => [f.edge.id, { ...f, srcChain: chainOf(f.edge.src) }]), + ); + const prioritized = [] as AnnotatedFlow[]; + + // Maintain last chosen originating chain to group sequential operations. + let lastChain: string | undefined; + + while (pendingFlows.size) { + const candidates = [...pendingFlows.values()].filter( + f => (supplies.get(f.edge.src) || 0) >= f.flow, + ); + + if (!candidates.length) { + // Deadlock mitigation: pick by edge id order regardless of availability. + const sorted = [...pendingFlows.values()].sort((a, b) => + naturalCompare(a.edge.id, b.edge.id), + ); + for (const f of sorted) { + prioritized.push(f); + replaceOrInit(supplies, f.edge.src, (old = 0) => old - f.flow); + replaceOrInit(supplies, f.edge.dest, (old = 0) => old + f.flow); + pendingFlows.delete(f.edge.id); + } + break; + } + + // Prefer continuing with lastChain if possible. + const fromSameChain = lastChain + ? candidates.filter(c => c.srcChain === lastChain) + : undefined; + const chosenGroup = fromSameChain?.length ? fromSameChain : candidates; + + // Pick deterministic smallest edge id within chosen group. + chosenGroup.sort((a, b) => naturalCompare(a.edge.id, b.edge.id)); + const chosen = chosenGroup[0]; + prioritized.push(chosen); + replaceOrInit(supplies, chosen.edge.src, (old = 0) => old - chosen.flow); + replaceOrInit(supplies, chosen.edge.dest, (old = 0) => old + chosen.flow); + pendingFlows.delete(chosen.edge.id); + lastChain = chosen.srcChain; + } + + const steps: MovementDesc[] = prioritized.map(({ edge, flow }) => { + Number.isSafeInteger(flow) || + Fail`flow ${flow} for edge ${edge} is not a safe integer`; + const amount = AmountMath.make(graph.brand, BigInt(flow)); + + let details = {}; + switch (edge.feeMode) { + case 'gmpTransfer': + // TODO: Rather than hard-code, derive from Axelar `estimateGasFee`. + // https://docs.axelar.dev/dev/axelarjs-sdk/axelar-query-api#estimategasfee + details = { fee: AmountMath.make(graph.feeBrand, 30_000_000n) }; + break; + case 'gmpCall': + // TODO: Rather than hard-code, derive from Axelar `estimateGasFee`. + // https://docs.axelar.dev/dev/axelarjs-sdk/axelar-query-api#estimategasfee + details = { fee: AmountMath.make(graph.feeBrand, 30_000_000n) }; + break; + case 'toUSDN': { + // NOTE USDN transfer incurs a fee on output amount in basis points + const usdnOut = + (BigInt(flow) * (10000n - BigInt(edge.variableFee))) / 10000n; + details = { detail: { usdnOut } }; + break; + } + default: + break; + } + + return { src: edge.src, dest: edge.dest, amount, ...details }; + }); + + return harden(steps); +}; + +// -------------------------- Convenience End-to-End --------------------------- + +/** + * Full pipeline (network required): + * 1. build graph from NetworkSpec + * 2. buildModel + * 3. solveRebalance + * 4. rebalanceMinCostFlowSteps + */ +export const planRebalanceFlow = async (opts: { + network: NetworkSpec; + current: Partial>; + target: Partial>; + brand: Amount['brand']; + feeBrand: Amount['brand']; + mode?: RebalanceMode; +}) => { + const { network, current, target, brand, feeBrand, mode = 'fastest' } = opts; + // TODO remove "automatic" values that should be static + const graph = makeGraphFromDefinition( + network, + current, + target, + brand, + feeBrand, + ); + + const model = buildLPModel(graph, mode); + let flows; + await null; + try { + flows = await solveRebalance(model, graph); + } catch (err) { + // If the solver says infeasible, try to produce a clearer message + preflightValidateNetworkPlan(network as any, current as any, target as any); + throw err; + } + const steps = rebalanceMinCostFlowSteps(flows, graph); + return harden({ graph, model, flows, steps }); +}; + +const chainOf = (id: AssetPlaceRef): string => { + if (id.startsWith('@')) return id.slice(1); + if (id === '' || id === '' || id === '+agoric') + return 'agoric'; + if (id in PoolPlaces) { + const pk = id as PoolKey; + return PoolPlaces[pk].chainName; + } + // Fallback: syntactic pool id like "Protocol_Chain" => chain + // This enables base graph edges for pools even if not listed in PoolPlaces + const m = /^([A-Za-z0-9]+)_([A-Za-z0-9-]+)$/.exec(id); + if (m) { + return m[2]; + } + throw Fail`Cannot determine chain for ${id}`; +}; +const isHub = (id: AssetPlaceRef): boolean => id.startsWith('@'); + +// ---------------------------- Example (commented) ---------------------------- +/* +Example usage: + +const { steps } = await planRebalanceFlow( + { + nodes: ['Aave_Arbitrum', 'Compound_Arbitrum', 'USDN', '', '@agoric'], + edges, + }, + { + Aave_Arbitrum: AmountMath.make(brand, 1_000n), + Compound_Arbitrum: AmountMath.make(brand, 100n), + }, + { + Aave_Arbitrum: AmountMath.make(brand, 800n), + Compound_Arbitrum: AmountMath.make(brand, 300n), + }, + brand, + 'cheapest', +}); + +console.log(steps); +*/ diff --git a/packages/portfolio-contract/src/plan-transfers.ts b/packages/portfolio-contract/src/plan-transfers.ts new file mode 100644 index 00000000000..605dbfe7f83 --- /dev/null +++ b/packages/portfolio-contract/src/plan-transfers.ts @@ -0,0 +1,193 @@ +import { AmountMath } from '@agoric/ertp'; +import type { Amount, Brand, NatAmount, NatValue } from '@agoric/ertp'; +import type { TargetAllocation } from '@aglocal/portfolio-contract/src/type-guards.js'; +import { NonNullish } from '@agoric/internal'; +import type { + YieldProtocol, + AxelarChain, +} from '@agoric/portfolio-api/src/constants.js'; +import { throwRedacted as Fail } from '@endo/errors'; +import { objectMap } from '@endo/patterns'; +import type { MovementDesc } from './type-guards-steps.ts'; +import { planRebalanceFlow } from './plan-solve.js'; +import { PROD_NETWORK } from './network/network.prod.js'; +/** + * Plan deposit transfers based on the target allocation and current balances. + * + * @param deposit - The amount to be deposited. + * @param currentBalances - Current balances for each position. + * @param targetAllocation - Target allocation percentages for each position. + * @returns Planned transfers for each position. + * @deprecated Use the solver in `makePortfolioSteps` instead. + */ +export function planDepositTransfers( + deposit: Amount<'nat'>, + currentBalances: Record>, + targetAllocation: TargetAllocation, +): Record> { + // Validate percentages sum to 100 + const totalPct = Object.values(targetAllocation).reduce((s, p) => s + p, 0n); + if (totalPct !== 100n) + throw Error('Target allocation percentages must sum to 100'); + + const brand = deposit.brand; + const make = (v: bigint) => AmountMath.make(brand, v); + const dep = deposit.value; + if (dep === 0n) return {}; + + // Sum current balances (bigint) + let currentTotal = 0n; + for (const amt of Object.values(currentBalances)) currentTotal += amt.value; + const totalAfter = currentTotal + dep; + + // Compute positive needs only (skip over-target) + const needs: Record = {}; + let sumNeeds = 0n; + for (const [k, pct] of Object.entries(targetAllocation)) { + const targetAbs = (totalAfter * pct) / 100n; + const cur = currentBalances[k]?.value || 0n; + if (cur >= targetAbs) continue; + const need = targetAbs - cur; + if (need > 0n) { + needs[k] = need; + sumNeeds += need; + } + } + if (sumNeeds === 0n) return {}; + + // If deposit covers all needs, allocate fully; otherwise scale proportionally + const transfers: Record> = {}; + if (sumNeeds <= dep) { + for (const [k, need] of Object.entries(needs)) transfers[k] = make(need); + return transfers; + } + for (const [k, need] of Object.entries(needs)) { + const scaled = (need * dep) / sumNeeds; // floor scaling + if (scaled > 0n) transfers[k] = make(scaled); + } + return transfers; +} + +/** + * Build deposit (give) and movement steps to achieve goal amounts per protocol. + * Aggregates deposit at @noble then fan-outs to chain-specific pools. + */ +export const makePortfolioSteps = async < + G extends Partial>, +>( + goal: G, + opts: { + evm?: AxelarChain; + feeBrand?: Brand<'nat'>; + fees?: Record; + detail?: { usdnOut: NatValue }; + } = {}, +) => { + Object.values(goal).length > 0 || Fail`empty goal`; + const { USDN: _ignored, ...evmGoal } = goal; + const { + evm = 'Arbitrum', + feeBrand, + fees = objectMap(evmGoal, _ => ({ + Account: AmountMath.make(NonNullish(feeBrand), 150n), + Call: AmountMath.make(NonNullish(feeBrand), 100n), + })), + } = opts; + + // Compute total deposit and build current/target for the solver + const values = Object.values(goal) as NatAmount[]; + const deposit = values.reduce((acc, v) => AmountMath.add(acc, v)); + const brand = deposit.brand; + + /** Map protocol goal -> concrete PoolKey target */ + const target: Partial> = {}; + for (const [proto, amount] of Object.entries(goal) as [ + YieldProtocol, + NatAmount, + ][]) { + if (proto === 'USDN') { + // eslint-disable-next-line dot-notation + target['USDNVault'] = amount; + } else { + target[`${proto}_${evm}`] = amount; + } + } + // Deposit seat must end empty + target[''] = AmountMath.make(brand, 0n); + + const current: Partial> = { + '': deposit, + }; + + // Run the solver to compute movement steps + const { steps: raw } = await planRebalanceFlow({ + network: PROD_NETWORK, + current: current as any, + target: target as any, + brand, + feeBrand: brand, // Use same brand for fees in this context + mode: 'cheapest', + }); + + // Inject USDN detail and EVM fees to match existing behavior/tests + const steps: MovementDesc[] = raw.map(s => ({ ...s })); + + // USDN detail: 99% min-out of requested USDN + const usdnAmt = (goal as any).USDN as NatAmount | undefined; + if (usdnAmt) { + const usdnDetail = { usdnOut: ((usdnAmt.value || 0n) * 99n) / 100n } as { + usdnOut: NatValue; + }; + for (const s of steps) { + if (s.src === '@noble' && s.dest === 'USDNVault') { + (s as any).detail = usdnDetail; + } + } + } + + // Add fees on noble->EVM (Account) and EVM->Pool (Call) + const feeMap = fees as Record< + string, + { Account: NatAmount; Call: NatAmount } + >; + for (let i = 0; i < steps.length - 1; i += 1) { + const a = steps[i]; + const b = steps[i + 1]; + if ( + a.src === '@noble' && + typeof a.dest === 'string' && + a.dest.startsWith('@') + ) { + const hub = a.dest; // e.g., '@Arbitrum' + if (b.src === hub && typeof b.dest === 'string') { + const m = /^(Aave|Compound)_(.+)$/.exec(b.dest); + if (m) { + const proto = m[1] as keyof typeof feeMap; + const ff = feeMap[proto as string]; + if (ff) { + (a as any).fee = ff.Account; + (b as any).fee = ff.Call; + } + } + } + } + } + + // Build give with optional aggregated GMP fees (if any) + const feeValues = Object.values(feeMap ?? {}) as { + Account: NatAmount; + Call: NatAmount; + }[]; + const gmpFee = feeValues.length + ? feeValues + .map(f => [f.Account, f.Call]) + .flat() + .reduce((acc, v) => AmountMath.add(acc, v)) + : undefined; + const give = { + Deposit: deposit, + ...(gmpFee ? { GmpFee: gmpFee } : {}), + } as Record>; + + return harden({ give, steps }); +}; diff --git a/packages/portfolio-contract/src/portfolio.flows.ts b/packages/portfolio-contract/src/portfolio.flows.ts index 1a109a0b6f8..ed179996945 100644 --- a/packages/portfolio-contract/src/portfolio.flows.ts +++ b/packages/portfolio-contract/src/portfolio.flows.ts @@ -323,6 +323,7 @@ export const wayFromSrcToDesc = (moveDesc: MovementDesc): Way => { throw Fail`src pos must have account as dest ${q(moveDesc)}`; const poolKey = src as PoolKey; const { protocol } = PoolPlaces[poolKey]; + // TODO move this into metadata const feeRequired = ['Compound', 'Aave', 'Beefy']; moveDesc.fee || !feeRequired.includes(protocol) || diff --git a/packages/portfolio-contract/test/network/buildGraph.test.ts b/packages/portfolio-contract/test/network/buildGraph.test.ts new file mode 100644 index 00000000000..cf56f3899d2 --- /dev/null +++ b/packages/portfolio-contract/test/network/buildGraph.test.ts @@ -0,0 +1,119 @@ +import test from 'ava'; +import { Far } from '@endo/marshal'; +import { AmountMath } from '@agoric/ertp'; +import type { NetworkSpec } from '../../src/network/network-spec.js'; +import { makeGraphFromDefinition } from '../../src/network/buildGraph.js'; +import { planRebalanceFlow } from '../../src/plan-solve.js'; + +const brand = Far('TestBrand') as any; +const feeBrand = Far('TestFeeBrand') as any; +const makeAmt = (v: bigint) => AmountMath.make(brand as any, v); + +test('NetworkSpec minimal validation via builder', t => { + const base: NetworkSpec = { + chains: [ + { name: 'A', control: 'local' }, + { name: 'B', control: 'local' }, + ], + pools: [], + links: [], + localPlaces: [], + } as any; + t.notThrows(() => makeGraphFromDefinition(base, {}, {}, Far('B'), feeBrand)); +}); + +test('makeGraphFromDefinition adds intra-chain edges and appends inter edges with sequential ids', t => { + const net: NetworkSpec = { + chains: [ + { name: 'Arbitrum', control: 'axelar' }, + { name: 'Ethereum', control: 'axelar' }, + ], + pools: [ + { pool: 'Aave_Arbitrum', chain: 'Arbitrum', protocol: 'Aave' }, + { pool: 'Compound_Ethereum', chain: 'Ethereum', protocol: 'Compound' }, + ], + localPlaces: [], + links: [ + { + src: '@Arbitrum', + dest: '@Ethereum', + transfer: 'fastusdc', + variableFeeBps: 0, + timeSec: 10, + }, + { + src: '@Ethereum', + dest: '@Arbitrum', + transfer: 'fastusdc', + variableFeeBps: 0, + timeSec: 10, + }, + ], + } as any; + const graph = makeGraphFromDefinition(net, {}, {}, brand, feeBrand); + const leafCount = 2; // Aave_Arbitrum, Compound_Ethereum + const expectedIntra = leafCount * 2; // bidirectional + t.true(graph.edges.length >= expectedIntra + net.links.length); + const interEdges = graph.edges.filter( + e => e.src.startsWith('@') && e.dest.startsWith('@'), + ); + t.is(interEdges.length, net.links.length); + // Edge ids for inter edges should be the last ones appended in order + const sortedIds = interEdges + .map(e => Number(e.id.slice(1))) + .sort((a, b) => a - b); + const minInterId = expectedIntra; // intra edges allocated first + t.true(sortedIds[0] >= minInterId, 'inter edges start after intra edges'); +}); + +test('planRebalanceFlow uses NetworkSpec (legacy links param ignored at type level)', async t => { + const net: NetworkSpec = { + chains: [ + { name: 'Arbitrum', control: 'axelar' }, + { name: 'Avalanche', control: 'axelar' }, + ], + pools: [ + { pool: 'Aave_Arbitrum', chain: 'Arbitrum', protocol: 'Aave' }, + { pool: 'Beefy_re7_Avalanche', chain: 'Avalanche', protocol: 'Beefy' }, + ], + localPlaces: [], + links: [ + { + src: '@Arbitrum', + dest: '@Avalanche', + transfer: 'fastusdc', + variableFeeBps: 0, + timeSec: 10, + }, + { + src: '@Avalanche', + dest: '@Arbitrum', + transfer: 'fastusdc', + variableFeeBps: 0, + timeSec: 10, + }, + ], + } as any; + const current = { + Aave_Arbitrum: makeAmt(50n), + Beefy_re7_Avalanche: makeAmt(0n), + } as any; + const target = { + Aave_Arbitrum: makeAmt(20n), + Beefy_re7_Avalanche: makeAmt(30n), + } as any; + const res = await planRebalanceFlow({ + network: net, + current, + target, + brand, + feeBrand, + mode: 'cheapest', + }); + // Ensure only the two provided inter edges (plus intra) exist, not link-derived ones + const hubEdges = res.graph.edges.filter( + e => e.src.startsWith('@') && e.dest.startsWith('@'), + ); + t.is(hubEdges.length, 2); + t.true(res.steps.length > 0); +}); diff --git a/packages/portfolio-contract/test/network/test-network.ts b/packages/portfolio-contract/test/network/test-network.ts new file mode 100644 index 00000000000..27318944b52 --- /dev/null +++ b/packages/portfolio-contract/test/network/test-network.ts @@ -0,0 +1,61 @@ +/* eslint-disable camelcase */ + +import type { SupportedChain } from '@agoric/portfolio-api/src/constants.js'; +import type { NetworkSpec } from '../../src/network/network-spec.js'; +import type { PoolKey } from '../../src/type-guards.js'; +import type { AssetPlaceRef } from '../../src/type-guards-steps.js'; + +// @ts-expect-error TS2322: Type '"Polygon"' is not assignable to type 'SupportedChain'. +const Polygon: SupportedChain = 'Polygon'; + +// @ts-expect-error TS2322: Type '"Compound_Polygon"' is not assignable to type 'PoolKey'. +const Compound_Polygon: PoolKey = 'Compound_Polygon'; + +export const TEST_NETWORK: NetworkSpec = { + debug: true, + environment: 'test', + chains: [ + { name: 'agoric', control: 'local' }, + { name: 'noble', control: 'ibc' }, + { name: 'Arbitrum', control: 'axelar' }, + { name: 'Avalanche', control: 'axelar' }, + { name: Polygon, control: 'axelar' }, + { name: 'Ethereum', control: 'axelar' }, + ], + pools: [ + { pool: 'Aave_Arbitrum', chain: 'Arbitrum', protocol: 'Aave' }, + { pool: 'Beefy_re7_Avalanche', chain: 'Avalanche', protocol: 'Beefy' }, + { pool: 'Compound_Ethereum', chain: 'Ethereum', protocol: 'Compound' }, + { pool: 'Aave_Avalanche', chain: 'Avalanche', protocol: 'Aave' }, + { pool: Compound_Polygon, chain: Polygon, protocol: 'Compound' }, + { pool: 'USDN', chain: 'noble', protocol: 'USDN' }, + { pool: 'USDNVault', chain: 'noble', protocol: 'USDN' }, + ], + localPlaces: [ + // Agoric seats/accounts + { id: '', chain: 'agoric' }, + { id: '', chain: 'agoric' }, + { id: '+agoric', chain: 'agoric' }, + ], + links: [ + // USDN costs a fee to get into + { src: '@noble', dest: 'USDN', transfer: 'local', variableFeeBps: 5, timeSec: 0, feeMode: 'toUSDN' }, + { src: '@noble', dest: 'USDNVault', transfer: 'local', variableFeeBps: 5, timeSec: 0, feeMode: 'toUSDN' }, + + // CCTP slow towards noble + { src: '@Polygon' as AssetPlaceRef, dest: '@noble', transfer: 'cctpSlow', variableFeeBps: 0, timeSec: 1080 }, + { src: '@Arbitrum', dest: '@noble', transfer: 'cctpSlow', variableFeeBps: 0, timeSec: 1080 }, + { src: '@Avalanche', dest: '@noble', transfer: 'cctpSlow', variableFeeBps: 0, timeSec: 1080 }, + { src: '@Ethereum', dest: '@noble', transfer: 'cctpSlow', variableFeeBps: 0, timeSec: 1080 }, + // Return path + { src: '@noble', dest: '@Arbitrum', transfer: 'cctpReturn', variableFeeBps: 0, timeSec: 20, feeMode: 'gmpTransfer' }, + { src: '@noble', dest: '@Polygon' as AssetPlaceRef, transfer: 'cctpReturn', variableFeeBps: 0, timeSec: 20, feeMode: 'gmpTransfer' }, + { src: '@noble', dest: '@Avalanche', transfer: 'cctpReturn', variableFeeBps: 0, timeSec: 20, feeMode: 'gmpTransfer' }, + { src: '@noble', dest: '@Ethereum', transfer: 'cctpReturn', variableFeeBps: 0, timeSec: 20, feeMode: 'gmpTransfer' }, + // IBC agoric<->noble + { src: '@agoric', dest: '@noble', transfer: 'ibc', variableFeeBps: 0, timeSec: 10 }, + { src: '@noble', dest: '@agoric', transfer: 'ibc', variableFeeBps: 0, timeSec: 10 }, + ], +}; + +export default TEST_NETWORK; diff --git a/packages/portfolio-contract/test/plan-deposit-transfers.test.ts b/packages/portfolio-contract/test/plan-deposit-transfers.test.ts new file mode 100644 index 00000000000..736a32a859a --- /dev/null +++ b/packages/portfolio-contract/test/plan-deposit-transfers.test.ts @@ -0,0 +1,135 @@ +import test from 'ava'; +import { AmountMath, type Brand } from '@agoric/ertp'; +import { Far } from '@endo/pass-style'; +import type { TargetAllocation } from '@aglocal/portfolio-contract/src/type-guards.js'; +import { makeTracer } from '@agoric/internal'; +import { planDepositTransfers } from '../src/plan-transfers.ts'; + +const brand = Far('mock brand') as Brand<'nat'>; +const trace = makeTracer('planDepositTransfers'); + +test('planDepositTransfers works in a handful of cases', t => { + const make = value => AmountMath.make(brand, value); + + trace('Test case 1: Empty current balances, equal target allocation'); + const deposit1 = make(1000n); + const currentBalances1 = {}; + const targetAllocation1: TargetAllocation = { + USDN: 50n, + Aave_Arbitrum: 30n, + Compound_Arbitrum: 20n, + }; + + const result1 = planDepositTransfers( + deposit1, + currentBalances1, + targetAllocation1, + ); + + t.deepEqual(result1, { + USDN: make(500n), + Aave_Arbitrum: make(300n), + Compound_Arbitrum: make(200n), + }); + + trace('Test case 2: Existing balances, need rebalancing'); + const deposit2 = make(500n); + const currentBalances2 = { + USDN: make(200n), + Aave_Arbitrum: make(100n), + Compound_Arbitrum: make(0n), + }; + const targetAllocation2: TargetAllocation = { + USDN: 40n, + Aave_Arbitrum: 40n, + Compound_Arbitrum: 20n, + }; + + const result2 = planDepositTransfers( + deposit2, + currentBalances2, + targetAllocation2, + ); + trace({ deposit2, targetAllocation2, result2 }); + // Total after deposit: 300 + 500 = 800 + // Targets: USDN=320, Aave=320, Compound=160 + // Transfers needed: USDN=120, Aave=220, Compound=160 + t.deepEqual(result2, { + USDN: make(120n), + Aave_Arbitrum: make(220n), + Compound_Arbitrum: make(160n), + }); + + // Test case 3: Some positions already over-allocated + const deposit3 = make(300n); + const currentBalances3 = { + USDN: make(600n), // already over target + Aave_Arbitrum: make(100n), + Compound_Arbitrum: make(50n), + }; + const targetAllocation3: TargetAllocation = { + USDN: 50n, + Aave_Arbitrum: 30n, + Compound_Arbitrum: 20n, + }; + + const result3 = planDepositTransfers( + deposit3, + currentBalances3, + targetAllocation3, + ); + trace({ currentBalances3, deposit3, targetAllocation3, result3 }); + + // Total after deposit: 750 + 300 = 1050 + // Targets: USDN=525, Aave=315, Compound=210 + // USDN is already over target (600 > 525), so no transfer + // Need transfers: Aave=215, Compound=160, total=375 + // But deposit is only 300, so scale down proportionally: + // Aave: 215 * (300/375) = 172, Compound: 160 * (300/375) = 128 + t.deepEqual(result3, { + Aave_Arbitrum: make(172n), + Compound_Arbitrum: make(128n), + }); + + // Test case 4: Transfer amounts exceed deposit (scaling needed) + const deposit4 = make(100n); + const currentBalances4 = { + USDN: make(0n), + Aave_Arbitrum: make(0n), + Compound_Arbitrum: make(0n), + }; + const targetAllocation4: TargetAllocation = { + USDN: 60n, + Aave_Arbitrum: 30n, + Compound_Arbitrum: 10n, + }; + + const result4 = planDepositTransfers( + deposit4, + currentBalances4, + targetAllocation4, + ); + + // Should allocate proportionally to the 100 deposit + t.deepEqual(result4, { + USDN: make(60n), + Aave_Arbitrum: make(30n), + Compound_Arbitrum: make(10n), + }); + + // Test case 5: Single position target + const deposit5 = make(1000n); + const currentBalances5 = { USDN: make(500n) }; + const targetAllocation5: TargetAllocation = { USDN: 100n }; + + const result5 = planDepositTransfers( + deposit5, + currentBalances5, + targetAllocation5, + ); + + // Total after: 1500, target: 1500, current: 500, transfer: 1000 + t.deepEqual(result5, { + USDN: make(1000n), + }); +}); diff --git a/packages/portfolio-contract/test/portfolio.flows.test.ts b/packages/portfolio-contract/test/portfolio.flows.test.ts index 0d48e60f716..7e365691b7e 100644 --- a/packages/portfolio-contract/test/portfolio.flows.test.ts +++ b/packages/portfolio-contract/test/portfolio.flows.test.ts @@ -66,7 +66,7 @@ import { type ProposalType, type StatusFor, } from '../src/type-guards.ts'; -import { makePortfolioSteps } from '../tools/portfolio-actors.ts'; +import { makePortfolioSteps } from '../src/plan-transfers.ts'; import { decodeFunctionCall } from './abi-utils.ts'; import { axelarIdsMock, @@ -473,8 +473,10 @@ test('Noble Dollar Swap, Lock messages', t => { } }); -test('makePortfolioSteps for USDN position', t => { - const actual = makePortfolioSteps({ USDN: make(USDC, 50n * 1_000_000n) }); +test('makePortfolioSteps for USDN position', async t => { + const actual = await makePortfolioSteps({ + USDN: make(USDC, 50n * 1_000_000n), + }); const amount = make(USDC, 50n * 1_000_000n); const detail = { usdnOut: 49500000n }; @@ -489,7 +491,9 @@ test('makePortfolioSteps for USDN position', t => { }); test('open portfolio with USDN position', async t => { - const { give, steps } = makePortfolioSteps({ USDN: make(USDC, 50_000_000n) }); + const { give, steps } = await makePortfolioSteps({ + USDN: make(USDC, 50_000_000n), + }); const { orch, ctx, offer, storage } = mocks({}, give); const { log, seat } = offer; @@ -517,7 +521,7 @@ const openAndTransfer = test.macro( goal: Partial>, makeEvents: () => VTransferIBCEvent[], ) => { - const { give, steps } = makePortfolioSteps(goal, { feeBrand: BLD }); + const { give, steps } = await makePortfolioSteps(goal, { feeBrand: BLD }); const { orch, ctx, offer, storage, tapPK } = mocks({}, give); const { log, seat } = offer; @@ -603,7 +607,7 @@ test.skip('reject missing fee before committing anything', t => { }); test('open portfolio with Compound position', async t => { - const { give, steps } = makePortfolioSteps( + const { give, steps } = await makePortfolioSteps( { Compound: make(USDC, 300n) }, { fees: { Compound: { Account: make(BLD, 300n), Call: make(BLD, 100n) } } }, ); @@ -659,7 +663,7 @@ test('handle failure in localTransfer from seat to local account', async t => { }); test('handle failure in IBC transfer', async t => { - const { give, steps } = makePortfolioSteps({ USDN: make(USDC, 100n) }); + const { give, steps } = await makePortfolioSteps({ USDN: make(USDC, 100n) }); const { orch, ctx, offer, storage } = mocks( { transfer: Error('IBC is on the fritz!!') }, give, @@ -683,7 +687,7 @@ test('handle failure in IBC transfer', async t => { }); test('handle failure in executeEncodedTx', async t => { - const { give, steps } = makePortfolioSteps({ USDN: make(USDC, 100n) }); + const { give, steps } = await makePortfolioSteps({ USDN: make(USDC, 100n) }); const { orch, ctx, offer, storage } = mocks( { executeEncodedTx: Error('exec swaplock went kerflewey') }, give, @@ -987,7 +991,7 @@ test('client can move to deposit LCA', async t => { }); test('receiveUpcall returns false if sender is not AXELAR_GMP', async t => { - const { give, steps } = makePortfolioSteps( + const { give, steps } = await makePortfolioSteps( { Compound: make(USDC, 300n) }, { fees: { Compound: { Account: make(BLD, 300n), Call: make(BLD, 100n) } } }, ); @@ -1015,7 +1019,7 @@ test('handle failure in provideCosmosAccount makeAccount', async t => { const amount = make(USDC, 100n); const chainToErr = new Map([['noble', Error('timeout creating ICA')]]); - const { give, steps } = makePortfolioSteps({ USDN: amount }); + const { give, steps } = await makePortfolioSteps({ USDN: amount }); const { orch, ctx, offer, storage } = mocks( { makeAccount: chainToErr }, give, @@ -1080,7 +1084,7 @@ test('handle failure in provideCosmosAccount makeAccount', async t => { test('handle failure in provideEVMAccount sendMakeAccountCall', async t => { const unlucky = make(BLD, 13n); - const { give, steps } = makePortfolioSteps( + const { give, steps } = await makePortfolioSteps( { Compound: make(USDC, 300n) }, { fees: { Compound: { Account: unlucky, Call: make(BLD, 100n) } }, @@ -1137,7 +1141,7 @@ test('handle failure in provideEVMAccount sendMakeAccountCall', async t => { } // Recovery attempt - avoid the unlucky 13n fee using same portfolio - const { give: giveGood, steps: stepsGood } = makePortfolioSteps( + const { give: giveGood, steps: stepsGood } = await makePortfolioSteps( { Compound: make(USDC, 300n) }, { fees: { Compound: { Account: make(BLD, 300n), Call: make(BLD, 100n) } } }, ); diff --git a/packages/portfolio-contract/test/rebalance.test.ts b/packages/portfolio-contract/test/rebalance.test.ts new file mode 100644 index 00000000000..8808be80305 --- /dev/null +++ b/packages/portfolio-contract/test/rebalance.test.ts @@ -0,0 +1,608 @@ +import test from 'ava'; +import type { ImplementationFn } from 'ava'; +import { AmountMath } from '@agoric/ertp'; +import type { Brand } from '@agoric/ertp/src/types.js'; +import { objectMap } from '@agoric/internal'; +import type { + SupportedChain, + YieldProtocol, +} from '@agoric/portfolio-api/src/constants.js'; +import { Far } from '@endo/marshal'; +import type { + NetworkSpec, + TransferProtocol, +} from '../src/network/network-spec.js'; +import { planRebalanceFlow } from '../src/plan-solve.js'; +import type { FlowEdge, RebalanceMode } from '../src/plan-solve.js'; +import type { PoolKey } from '../src/type-guards.js'; +import type { AssetPlaceRef } from '../src/type-guards-steps.js'; +import { TEST_NETWORK } from './network/test-network.js'; + +// eslint-disable-next-line no-nested-ternary +const strcmp = (a: string, b: string) => (a > b ? 1 : a < b ? -1 : 0); +const compareFlowEdges = (a: FlowEdge, b: FlowEdge) => + strcmp(a.src.toLowerCase(), b.src.toLowerCase()) || + strcmp(a.dest.toLowerCase(), b.dest.toLowerCase()); + +// Shared Tok brand + helper +const { brand: TOK_BRAND } = (() => ({ brand: Far('USD*') as Brand<'nat'> }))(); +const { brand: FEE_BRAND } = (() => ({ brand: Far('BLD') as Brand<'nat'> }))(); +const token = (v: bigint) => AmountMath.make(TOK_BRAND, v); +const fee = (v: bigint) => AmountMath.make(FEE_BRAND, v); +const ZERO = token(0n); +const fixedFee = fee(30_000_000n); + +// Pools +const A = 'Aave_Arbitrum'; +const B = 'Beefy_re7_Avalanche'; +const C = 'Compound_Ethereum'; + +// Helper to build current map (use shared token) +const balances = (rec: Record) => objectMap(rec, v => token(v)); + +const testWithModes = ( + titlePrefix: string, + modes: RebalanceMode[], + callback: ImplementationFn<[mode: RebalanceMode]>, +) => { + for (const mode of modes) { + test(`${titlePrefix} [${mode}]`, async t => { + await callback(t, mode); + }); + } +}; + +const testWithAllModes = ( + titlePrefix: string, + callback: ImplementationFn<[mode: string]>, +) => testWithModes(titlePrefix, ['cheapest', 'fastest'], callback); + +testWithAllModes('solver simple 2-pool case (A -> B 30)', async (t, mode) => { + const current = balances({ [A]: 80n, [B]: 20n }); + const targetBps = { [A]: 5000n, [B]: 5000n }; + const { steps } = await planRebalanceFlow({ + mode: mode as RebalanceMode, + network: TEST_NETWORK, + current, + target: objectMap(targetBps, bps => token((100n * bps) / 10000n)), + brand: TOK_BRAND, + feeBrand: FEE_BRAND, + }); + + t.deepEqual(steps, [ + // leaf -> hub + { src: A, dest: '@Arbitrum', amount: token(30n), fee: fixedFee }, + // hub -> hub legs + { src: '@Arbitrum', dest: '@noble', amount: token(30n) }, + { + src: '@noble', + dest: '@Avalanche', + amount: token(30n), + fee: fixedFee, + }, + // hub -> leaf + { src: '@Avalanche', dest: B, amount: token(30n), fee: fixedFee }, + ]); +}); + +testWithAllModes( + 'solver 3-pool rounding (A -> B 33, A -> C 33)', + async (t, mode) => { + const current = balances({ [A]: 100n, [B]: 0n, [C]: 0n }); + const targetBps = { [A]: 3400n, [B]: 3300n, [C]: 3300n }; + const { steps } = await planRebalanceFlow({ + mode: mode as RebalanceMode, + network: TEST_NETWORK, + current, + target: objectMap(targetBps, bps => token((100n * bps) / 10000n)), + brand: TOK_BRAND, + feeBrand: FEE_BRAND, + }); + + const amt66 = token(66n); + const amt33 = token(33n); + t.deepEqual(steps, [ + // leaf -> hub (aggregated outflow from A) + { src: A, dest: '@Arbitrum', amount: amt66, fee: fixedFee }, + // hub -> hub aggregated then split + { src: '@Arbitrum', dest: '@noble', amount: amt66 }, + { + src: '@noble', + dest: '@Avalanche', + amount: amt33, + fee: fixedFee, + }, + { + src: '@noble', + dest: '@Ethereum', + amount: amt33, + fee: fixedFee, + }, + // hub -> leaf + { src: '@Avalanche', dest: B, amount: amt33, fee: fixedFee }, + { src: '@Ethereum', dest: C, amount: amt33, fee: fixedFee }, + ]); + }, +); + +testWithAllModes('solver already balanced => no steps', async (t, mode) => { + const current = balances({ [A]: 50n, [B]: 50n }); + const targetBps = { [A]: 5000n, [B]: 5000n }; + const { steps } = await planRebalanceFlow({ + mode: mode as RebalanceMode, + network: TEST_NETWORK, + current, + target: objectMap(targetBps, bps => token((100n * bps) / 10000n)), + brand: TOK_BRAND, + feeBrand: FEE_BRAND, + }); + t.deepEqual(steps, []); +}); + +testWithAllModes('solver all to one (B + C -> A)', async (t, mode) => { + const current = balances({ [A]: 10n, [B]: 20n, [C]: 70n }); + const { steps } = await planRebalanceFlow({ + mode: mode as RebalanceMode, + network: TEST_NETWORK, + current, + target: { [A]: token(100n), [B]: ZERO, [C]: ZERO }, + brand: TOK_BRAND, + feeBrand: FEE_BRAND, + }); + t.deepEqual(steps, [ + { src: B, dest: '@Avalanche', amount: token(20n), fee: fixedFee }, + { src: '@Avalanche', dest: '@noble', amount: token(20n) }, + { src: C, dest: '@Ethereum', amount: token(70n), fee: fixedFee }, + { src: '@Ethereum', dest: '@noble', amount: token(70n) }, + { + src: '@noble', + dest: '@Arbitrum', + amount: token(90n), + fee: fixedFee, + }, + { src: '@Arbitrum', dest: A, amount: token(90n), fee: fixedFee }, + ]); +}); + +testWithAllModes( + 'solver distribute from one (A -> B 60, A -> C 40)', + async (t, mode) => { + const current = balances({ [A]: 100n, [B]: 0n, [C]: 0n }); + const target = { [A]: ZERO, [B]: token(60n), [C]: token(40n) }; + const { steps } = await planRebalanceFlow({ + mode: mode as RebalanceMode, + network: TEST_NETWORK, + current, + target, + brand: TOK_BRAND, + feeBrand: FEE_BRAND, + }); + t.deepEqual(steps, [ + { src: A, dest: '@Arbitrum', amount: token(100n), fee: fixedFee }, + { src: '@Arbitrum', dest: '@noble', amount: token(100n) }, + { + src: '@noble', + dest: '@Avalanche', + amount: token(60n), + fee: fixedFee, + }, + { + src: '@noble', + dest: '@Ethereum', + amount: token(40n), + fee: fixedFee, + }, + { src: '@Avalanche', dest: B, amount: token(60n), fee: fixedFee }, + { src: '@Ethereum', dest: C, amount: token(40n), fee: fixedFee }, + ]); + }, +); + +testWithAllModes( + 'solver collect to one (B 30 + C 70 -> A)', + async (t, mode) => { + const current = balances({ [A]: 0n, [B]: 30n, [C]: 70n }); + const { steps } = await planRebalanceFlow({ + mode: mode as RebalanceMode, + network: TEST_NETWORK, + current, + target: { [A]: token(100n), [B]: ZERO, [C]: ZERO }, + brand: TOK_BRAND, + feeBrand: FEE_BRAND, + }); + t.deepEqual(steps, [ + { src: B, dest: '@Avalanche', amount: token(30n), fee: fixedFee }, + { src: '@Avalanche', dest: '@noble', amount: token(30n) }, + { src: C, dest: '@Ethereum', amount: token(70n), fee: fixedFee }, + { src: '@Ethereum', dest: '@noble', amount: token(70n) }, + { + src: '@noble', + dest: '@Arbitrum', + amount: token(100n), + fee: fixedFee, + }, + { src: '@Arbitrum', dest: A, amount: token(100n), fee: fixedFee }, + ]); + }, +); + +testWithAllModes( + 'solver deposit redistribution (+agoric 100 -> A 70, B 30)', + async (t, mode) => { + const current = balances({ '+agoric': 100n, [A]: 0n, [B]: 0n }); + const { steps } = await planRebalanceFlow({ + mode: mode as RebalanceMode, + network: TEST_NETWORK, + current, + target: { '+agoric': ZERO, [A]: token(70n), [B]: token(30n) }, + brand: TOK_BRAND, + feeBrand: FEE_BRAND, + }); + t.deepEqual(steps, [ + { src: '+agoric', dest: '@agoric', amount: token(100n) }, + { src: '@agoric', dest: '@noble', amount: token(100n) }, + { + src: '@noble', + dest: '@Arbitrum', + amount: token(70n), + fee: fixedFee, + }, + { + src: '@noble', + dest: '@Avalanche', + amount: token(30n), + fee: fixedFee, + }, + { src: '@Arbitrum', dest: A, amount: token(70n), fee: fixedFee }, + { src: '@Avalanche', dest: B, amount: token(30n), fee: fixedFee }, + ]); + }, +); + +testWithAllModes( + 'solver deposit redistribution (Deposit 100 -> A 70, B 30)', + async (t, mode) => { + const current = balances({ '': 100n, [A]: 0n, [B]: 0n }); + const { steps } = await planRebalanceFlow({ + mode: mode as RebalanceMode, + network: TEST_NETWORK, + current, + target: { '': ZERO, [A]: token(70n), [B]: token(30n) }, + brand: TOK_BRAND, + feeBrand: FEE_BRAND, + }); + t.deepEqual(steps, [ + { src: '', dest: '@agoric', amount: token(100n) }, + // TODO dckc should this go through +agoric? + // { src: '+agoric', dest: '@agoric', amount: token(100n) }, + { src: '@agoric', dest: '@noble', amount: token(100n) }, + { + src: '@noble', + dest: '@Arbitrum', + amount: token(70n), + fee: fixedFee, + }, + { + src: '@noble', + dest: '@Avalanche', + amount: token(30n), + fee: fixedFee, + }, + { src: '@Arbitrum', dest: A, amount: token(70n), fee: fixedFee }, + { src: '@Avalanche', dest: B, amount: token(30n), fee: fixedFee }, + ]); + }, +); + +testWithAllModes( + 'solver withdraw to cash (A 50 + B 30 -> Cash)', + async (t, mode) => { + const current = balances({ [A]: 50n, [B]: 30n, '': 0n }); + const { steps } = await planRebalanceFlow({ + mode: mode as RebalanceMode, + network: TEST_NETWORK, + current, + target: { [A]: ZERO, [B]: ZERO, '': token(80n) }, + brand: TOK_BRAND, + feeBrand: FEE_BRAND, + }); + t.deepEqual(steps, [ + { src: A, dest: '@Arbitrum', amount: token(50n), fee: fixedFee }, + { src: '@Arbitrum', dest: '@noble', amount: token(50n) }, + { src: B, dest: '@Avalanche', amount: token(30n), fee: fixedFee }, + { src: '@Avalanche', dest: '@noble', amount: token(30n) }, + { src: '@noble', dest: '@agoric', amount: token(80n) }, + { src: '@agoric', dest: '', amount: token(80n) }, + ]); + }, +); + +testWithAllModes( + 'solver hub balances into pools (hubs supply -> pool targets)', + async (t, mode) => { + const current = balances({ + [A]: 20n, + [B]: 10n, + [C]: 0n, + '@Arbitrum': 30n, + '@Avalanche': 20n, + '@noble': 20n, + }); + const { steps } = await planRebalanceFlow({ + mode: mode as RebalanceMode, + network: TEST_NETWORK, + current, + target: { + [A]: token(50n), + [B]: token(30n), + [C]: token(20n), + '@Arbitrum': ZERO, + '@Avalanche': ZERO, + '@noble': ZERO, + }, + brand: TOK_BRAND, + feeBrand: FEE_BRAND, + }); + t.deepEqual(steps, [ + { src: '@Arbitrum', dest: A, amount: token(30n), fee: fixedFee }, + { src: '@Avalanche', dest: B, amount: token(20n), fee: fixedFee }, + { + src: '@noble', + dest: '@Ethereum', + amount: token(20n), + fee: fixedFee, + }, + { src: '@Ethereum', dest: C, amount: token(20n), fee: fixedFee }, + ]); + }, +); + +testWithAllModes( + 'solver deposit split across three pools (Deposit 1000 -> USDN 500, A 300, C 200)', + async (t, mode) => { + // Mirrors planDepositTransfers case 1 proportions + const USDN = 'USDN'; + const current = balances({ + '': 1000n, + [USDN]: 0n, + [A]: 0n, + [C]: 0n, + }); + const target = { + '': ZERO, + [USDN]: token(500n), + [A]: token(300n), + [C]: token(200n), + }; + const { steps } = await planRebalanceFlow({ + mode: mode as RebalanceMode, + network: TEST_NETWORK, + current, + target, + brand: TOK_BRAND, + feeBrand: FEE_BRAND, + }); + t.deepEqual(steps, [ + { src: '', dest: '@agoric', amount: token(1000n) }, + { src: '@agoric', dest: '@noble', amount: token(1000n) }, + { + src: '@noble', + dest: USDN, + amount: token(500n), + detail: { usdnOut: 499n }, + }, + { + src: '@noble', + dest: '@Arbitrum', + amount: token(300n), + fee: fixedFee, + }, + { + src: '@noble', + dest: '@Ethereum', + amount: token(200n), + fee: fixedFee, + }, + { src: '@Arbitrum', dest: A, amount: token(300n), fee: fixedFee }, + { src: '@Ethereum', dest: C, amount: token(200n), fee: fixedFee }, + ]); + }, +); + +testWithAllModes( + 'solver deposit with existing balances to meet targets', + async (t, mode) => { + // Mirrors planDepositTransfers case 2: existing balances + deposit 500 -> targets + // current: USDN 200, A 100, C 0; deposit 500; targets USDN 320, A 320, C 160 + const USDN = 'USDN'; + const current = balances({ + [USDN]: 200n, + [A]: 100n, + [C]: 0n, + '': 500n, + }); + const target = { + [USDN]: token(320n), + [A]: token(320n), + [C]: token(160n), + '': ZERO, + }; + const { steps } = await planRebalanceFlow({ + mode: mode as RebalanceMode, + network: TEST_NETWORK, + current, + target, + brand: TOK_BRAND, + feeBrand: FEE_BRAND, + }); + // Expect deposit 500 to route to fill deficits: USDN 120, A 220, C 160 + t.deepEqual(steps, [ + { src: '', dest: '@agoric', amount: token(500n) }, + { src: '@agoric', dest: '@noble', amount: token(500n) }, + { + src: '@noble', + dest: USDN, + amount: token(120n), + detail: { usdnOut: 119n }, + }, + { + src: '@noble', + dest: '@Arbitrum', + amount: token(220n), + fee: fixedFee, + }, + { + src: '@noble', + dest: '@Ethereum', + amount: token(160n), + fee: fixedFee, + }, + { src: '@Arbitrum', dest: A, amount: token(220n), fee: fixedFee }, + { src: '@Ethereum', dest: C, amount: token(160n), fee: fixedFee }, + ]); + }, +); + +testWithAllModes( + 'solver single-target deposit (Deposit 1000 -> USDN 1000)', + async (t, mode) => { + // Mirrors planDepositTransfers case 5: one target asset + const USDN = 'USDN'; + const current = balances({ '': 1000n, [USDN]: 500n }); + const target = { '': ZERO, [USDN]: token(1500n) }; + const { steps } = await planRebalanceFlow({ + mode: mode as RebalanceMode, + network: TEST_NETWORK, + current, + target, + brand: TOK_BRAND, + feeBrand: FEE_BRAND, + }); + t.deepEqual(steps, [ + { src: '', dest: '@agoric', amount: token(1000n) }, + { src: '@agoric', dest: '@noble', amount: token(1000n) }, + { + src: '@noble', + dest: USDN, + amount: token(1000n), + detail: { usdnOut: 999n }, + }, + ]); + }, +); + +testWithAllModes( + 'solver leaves unmentioned pools unchanged', + async (t, mode) => { + const current = balances({ [A]: 80n, [B]: 20n, [C]: 7n }); // C present in current + const target = { [A]: token(50n), [B]: token(50n) }; // C omitted from target + const { steps } = await planRebalanceFlow({ + mode: mode as RebalanceMode, + network: TEST_NETWORK, + current, + target, + brand: TOK_BRAND, + feeBrand: FEE_BRAND, + }); + + // Identical to the 2-pool case; no steps to/from C + t.deepEqual(steps, [ + { src: A, dest: '@Arbitrum', amount: token(30n), fee: fixedFee }, + { src: '@Arbitrum', dest: '@noble', amount: token(30n) }, + { + src: '@noble', + dest: '@Avalanche', + amount: token(30n), + fee: fixedFee, + }, + { src: '@Avalanche', dest: B, amount: token(30n), fee: fixedFee }, + ]); + }, +); + +test.failing('solver differentiates cheapest vs. fastest', async t => { + const network: NetworkSpec = { + debug: true, + environment: 'test', + chains: [ + { name: 'agoric', control: 'local' }, + { name: 'External' as SupportedChain, control: 'ibc' }, + ], + pools: [ + { + pool: 'Sink_External' as PoolKey, + chain: 'external' as SupportedChain, + protocol: 'sink' as YieldProtocol, + }, + ], + localPlaces: [{ id: '+agoric', chain: 'agoric' }], + links: [ + { + src: '@agoric', + dest: '@External' as AssetPlaceRef, + transfer: 'cheap' as TransferProtocol, + variableFeeBps: 5, + timeSec: 60, + feeMode: 'gmpCall', + }, + { + src: '@agoric', + dest: '@External' as AssetPlaceRef, + transfer: 'fast' as TransferProtocol, + variableFeeBps: 6, + timeSec: 59, + feeMode: 'gmpCall', + }, + ], + }; + const current = balances({ '+agoric': 100n, Sink_External: 0n }); + const target = balances({ '+agoric': 0n, Sink_External: 100n }); + + const cheapResult = await planRebalanceFlow({ + mode: 'cheapest', + network, + current, + target, + brand: TOK_BRAND, + feeBrand: FEE_BRAND, + }); + t.like(cheapResult.flows.map(flow => flow.edge).sort(compareFlowEdges), [ + { src: '+agoric', dest: '@agoric', via: 'local' }, + { src: '@agoric', dest: '@External', via: 'cheap' }, + { src: '@External', dest: 'Sink_External', via: 'local' }, + ]); + t.deepEqual(cheapResult.steps, [ + { src: '+agoric', dest: '@agoric', amount: token(100n) }, + { + src: '@agoric', + dest: '@External', + amount: token(100n), + fee: fixedFee, + }, + { src: '@External', dest: 'Sink_External', amount: token(100n) }, + ]); + + const fastResult = await planRebalanceFlow({ + mode: 'fastest', + network, + current, + target, + brand: TOK_BRAND, + feeBrand: FEE_BRAND, + }); + t.like(cheapResult.flows.map(flow => flow.edge).sort(compareFlowEdges), [ + { src: '+agoric', dest: '@agoric', via: 'local' }, + { src: '@agoric', dest: '@External', via: 'fast' }, + { src: '@External', dest: 'Sink_External', via: 'local' }, + ]); + t.deepEqual(fastResult.steps, [ + { src: '+agoric', dest: '@agoric', amount: token(100n) }, + { + src: '@agoric', + dest: '@External', + amount: token(100n), + fee: fixedFee, + }, + { src: '@External', dest: 'Sink_External', amount: token(100n) }, + ]); +}); diff --git a/packages/portfolio-contract/test/snapshots/portfolio.flows.test.ts.md b/packages/portfolio-contract/test/snapshots/portfolio.flows.test.ts.md index 05aab1c36ad..044010a0d1f 100644 --- a/packages/portfolio-contract/test/snapshots/portfolio.flows.test.ts.md +++ b/packages/portfolio-contract/test/snapshots/portfolio.flows.test.ts.md @@ -391,6 +391,20 @@ Generated by [AVA](https://avajs.dev). }, opts: undefined, }, + { + _cap: 'noble11056', + _method: 'executeEncodedTx', + msgs: [ + { + typeUrl: '/noble.swap.v1.MsgSwap', + value: 'Cgpub2JsZTExMDU2EhMKBXV1c2RjEgozMzMzMDAwMDAwGgcSBXV1c2RuIhMKBXV1c2RuEgozMjk5NjcwMDAw', + }, + { + typeUrl: '/noble.dollar.vaults.v1.MsgLock', + value: 'Cgpub2JsZTExMDU2EAEaCjMyOTk2NzAwMDA=', + }, + ], + }, { _cap: 'noble11056', _method: 'depositForBurn', @@ -428,20 +442,6 @@ Generated by [AVA](https://avajs.dev). memo: '{"destination_chain":"arbitrum","destination_address":"0x126cf3AC9ea12794Ff50f56727C7C66E26D9C092","payload":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,116,120,49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,202,199,255,168,44,15,67,235,176,252,17,252,211,33,35,236,164,102,38,207,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,68,9,94,167,179,0,0,0,0,0,0,0,0,0,0,0,0,135,135,11,202,63,63,214,51,92,63,76,232,57,45,105,53,11,79,164,226,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,198,169,139,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,135,135,11,202,63,63,214,51,92,63,76,232,57,45,105,53,11,79,164,226,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,132,97,123,160,55,0,0,0,0,0,0,0,0,0,0,0,0,202,199,255,168,44,15,67,235,176,252,17,252,211,33,35,236,164,102,38,207,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,198,169,139,64,0,0,0,0,0,0,0,0,0,0,0,0,18,108,243,172,158,161,39,148,255,80,245,103,39,199,198,110,38,217,192,146,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"type":1,"fee":{"amount":"100","recipient":"axelar1aythygn6z5thymj6tmzfwekzh05ewg3l7d6y89"}}', }, }, - { - _cap: 'noble11056', - _method: 'executeEncodedTx', - msgs: [ - { - typeUrl: '/noble.swap.v1.MsgSwap', - value: 'Cgpub2JsZTExMDU2EhMKBXV1c2RjEgozMzMzMDAwMDAwGgcSBXV1c2RuIhMKBXV1c2RuEgozMjk5NjcwMDAw', - }, - { - typeUrl: '/noble.dollar.vaults.v1.MsgLock', - value: 'Cgpub2JsZTExMDU2EAEaCjMyOTk2NzAwMDA=', - }, - ], - }, { _cap: 'seat', _method: 'exit', @@ -529,8 +529,8 @@ Generated by [AVA](https://avajs.dev). brand: Object @Alleged: USDC brand {}, value: 3333000000n, }, - dest: '@Arbitrum', - how: 'CCTP', + dest: 'USDNVault', + how: 'USDN', src: '@noble', }, { @@ -538,18 +538,18 @@ Generated by [AVA](https://avajs.dev). brand: Object @Alleged: USDC brand {}, value: 3333000000n, }, - dest: 'Aave_Arbitrum', - how: 'Aave', - src: '@Arbitrum', + dest: '@Arbitrum', + how: 'CCTP', + src: '@noble', }, { amount: { brand: Object @Alleged: USDC brand {}, value: 3333000000n, }, - dest: 'USDNVault', - how: 'USDN', - src: '@noble', + dest: 'Aave_Arbitrum', + how: 'Aave', + src: '@Arbitrum', }, ], ], @@ -1744,6 +1744,20 @@ Generated by [AVA](https://avajs.dev). }, opts: undefined, }, + { + _cap: 'noble11056', + _method: 'executeEncodedTx', + msgs: [ + { + typeUrl: '/noble.swap.v1.MsgSwap', + value: 'Cgpub2JsZTExMDU2EhMKBXV1c2RjEgozMzMzMDAwMDAwGgcSBXV1c2RuIhMKBXV1c2RuEgozMjk5NjcwMDAw', + }, + { + typeUrl: '/noble.dollar.vaults.v1.MsgLock', + value: 'Cgpub2JsZTExMDU2EAEaCjMyOTk2NzAwMDA=', + }, + ], + }, { _cap: 'noble11056', _method: 'depositForBurn', @@ -1781,20 +1795,6 @@ Generated by [AVA](https://avajs.dev). memo: '{"destination_chain":"arbitrum","destination_address":"0x126cf3AC9ea12794Ff50f56727C7C66E26D9C092","payload":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,128,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,116,120,49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,202,199,255,168,44,15,67,235,176,252,17,252,211,33,35,236,164,102,38,207,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,68,9,94,167,179,0,0,0,0,0,0,0,0,0,0,0,0,135,135,11,202,63,63,214,51,92,63,76,232,57,45,105,53,11,79,164,226,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,198,169,139,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,135,135,11,202,63,63,214,51,92,63,76,232,57,45,105,53,11,79,164,226,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,132,97,123,160,55,0,0,0,0,0,0,0,0,0,0,0,0,202,199,255,168,44,15,67,235,176,252,17,252,211,33,35,236,164,102,38,207,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,198,169,139,64,0,0,0,0,0,0,0,0,0,0,0,0,18,108,243,172,158,161,39,148,255,80,245,103,39,199,198,110,38,217,192,146,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"type":1,"fee":{"amount":"100","recipient":"axelar1aythygn6z5thymj6tmzfwekzh05ewg3l7d6y89"}}', }, }, - { - _cap: 'noble11056', - _method: 'executeEncodedTx', - msgs: [ - { - typeUrl: '/noble.swap.v1.MsgSwap', - value: 'Cgpub2JsZTExMDU2EhMKBXV1c2RjEgozMzMzMDAwMDAwGgcSBXV1c2RuIhMKBXV1c2RuEgozMjk5NjcwMDAw', - }, - { - typeUrl: '/noble.dollar.vaults.v1.MsgLock', - value: 'Cgpub2JsZTExMDU2EAEaCjMyOTk2NzAwMDA=', - }, - ], - }, { _cap: 'seat', _method: 'exit', @@ -1882,8 +1882,8 @@ Generated by [AVA](https://avajs.dev). brand: Object @Alleged: USDC brand {}, value: 3333000000n, }, - dest: '@Arbitrum', - how: 'CCTP', + dest: 'USDNVault', + how: 'USDN', src: '@noble', }, { @@ -1891,18 +1891,18 @@ Generated by [AVA](https://avajs.dev). brand: Object @Alleged: USDC brand {}, value: 3333000000n, }, - dest: 'Aave_Arbitrum', - how: 'Aave', - src: '@Arbitrum', + dest: '@Arbitrum', + how: 'CCTP', + src: '@noble', }, { amount: { brand: Object @Alleged: USDC brand {}, value: 3333000000n, }, - dest: 'USDNVault', - how: 'USDN', - src: '@noble', + dest: 'Aave_Arbitrum', + how: 'Aave', + src: '@Arbitrum', }, ], ], diff --git a/packages/portfolio-contract/test/snapshots/portfolio.flows.test.ts.snap b/packages/portfolio-contract/test/snapshots/portfolio.flows.test.ts.snap index a6e63f5744b..62dfdf32b6d 100644 Binary files a/packages/portfolio-contract/test/snapshots/portfolio.flows.test.ts.snap and b/packages/portfolio-contract/test/snapshots/portfolio.flows.test.ts.snap differ diff --git a/packages/portfolio-contract/tools/portfolio-actors.ts b/packages/portfolio-contract/tools/portfolio-actors.ts index c77ba83ee47..4c624b32a09 100644 --- a/packages/portfolio-contract/tools/portfolio-actors.ts +++ b/packages/portfolio-contract/tools/portfolio-actors.ts @@ -11,29 +11,12 @@ * @see type-guards.ts for the authoritative interface specification */ import { type VstorageKit } from '@agoric/client-utils'; -import { - AmountMath, - type Amount, - type Brand, - type NatAmount, - type NatValue, -} from '@agoric/ertp'; -import { NonNullish } from '@agoric/internal'; +import { AmountMath } from '@agoric/ertp'; import { ROOT_STORAGE_PATH } from '@agoric/orchestration/tools/contract-tests.js'; import type { InvitationSpec } from '@agoric/smart-wallet/src/invitations.js'; import type { Instance } from '@agoric/zoe'; -import { Fail } from '@endo/errors'; -import { objectMap } from '@endo/patterns'; import type { ExecutionContext } from 'ava'; -import type { - AxelarChain, - YieldProtocol, -} from '@agoric/portfolio-api/src/constants.js'; import { type start } from '@aglocal/portfolio-contract/src/portfolio.contract.js'; -import type { - AssetPlaceRef, - MovementDesc, -} from '@aglocal/portfolio-contract/src/type-guards-steps.js'; import { makePositionPath, portfolioIdOfPath, @@ -43,8 +26,6 @@ import { type ProposalType, type StatusFor, type PoolKey, - type TargetAllocation, - PoolPlaces, } from '@aglocal/portfolio-contract/src/type-guards.js'; import type { WalletTool } from '@aglocal/portfolio-contract/tools/wallet-offer-tools.js'; @@ -213,172 +194,3 @@ export const makeTrader = ( }); return self; }; - -const { entries, values } = Object; -const { add, make } = AmountMath; -const amountSum = (amounts: A[]) => - amounts.reduce((acc, v) => add(acc, v)); - -export const makePortfolioSteps = < - G extends Partial>, ->( - goal: G, - opts: { - /** XXX assume same chain for Aave and Compound */ - evm?: AxelarChain; - feeBrand?: Brand<'nat'>; - fees?: Record; - detail?: { usdnOut: NatValue }; - } = {}, -) => { - values(goal).length > 0 || Fail`empty goal`; - const { USDN: _1, ...evmGoal } = goal; - const { - evm = 'Arbitrum', - feeBrand, - fees = objectMap(evmGoal, _ => ({ - Account: make(NonNullish(feeBrand), 150n), - Call: make(NonNullish(feeBrand), 100n), - })), - detail = 'USDN' in goal - ? { usdnOut: ((goal.USDN?.value || 0n) * 99n) / 100n } - : undefined, - } = opts; - const steps: MovementDesc[] = []; - - const Deposit = amountSum(values(goal)); - const GmpFee = - values(fees).length > 0 - ? amountSum( - values(fees) - .map(f => [f.Account, f.Call]) - .flat(), - ) - : undefined; - const give = { Deposit, ...(GmpFee ? { GmpFee } : {}) }; - steps.push({ src: '', dest: '@agoric', amount: Deposit }); - steps.push({ src: '@agoric', dest: '@noble', amount: Deposit }); - for (const [p, amount] of entries(goal)) { - switch (p) { - case 'USDN': - steps.push({ src: '@noble', dest: 'USDNVault', amount, detail }); - break; - case 'Aave': - case 'Compound': - // XXX optimize: combine noble->evm steps - steps.push({ - src: '@noble', - dest: `@${evm}`, - amount, - fee: fees[p].Account, - }); - steps.push({ - src: `@${evm}`, - dest: `${p}_${evm}`, - amount, - fee: fees[p].Call, - }); - break; - default: - throw Error('unreachable'); - } - } - - return harden({ give, steps }); -}; - -/** - * Compute a breakdown of `deposit` into amounts - * to send to positions so that the resulting position balances are as close - * to targetAllocation as possible. - */ -export const planDepositTransfers = ( - deposit: NatAmount, - currentBalances: Partial>, - targetAllocation: TargetAllocation, -): Partial> => { - const { brand } = deposit; - const depositValue = deposit.value; - - // Calculate total current value across all positions - const totalCurrent = Object.values(currentBalances).reduce( - (sum, amount) => sum + (amount?.value || 0n), - 0n, - ); - - // Total value after deposit - const totalAfterDeposit = totalCurrent + depositValue; - - // Calculate target amounts for each position - const transfers: Partial> = {}; - - for (const [poolKey, targetPercent] of Object.entries(targetAllocation)) { - const currentAmount = currentBalances[poolKey as PoolKey]?.value || 0n; - const targetAmount = (totalAfterDeposit * BigInt(targetPercent)) / 100n; - const transferAmount = targetAmount - currentAmount; - - if (transferAmount > 0n) { - transfers[poolKey as PoolKey] = make(brand, transferAmount); - } - } - - // Ensure we don't exceed the deposit amount - const totalTransfers = Object.values(transfers).reduce( - (sum, amount) => sum + (amount?.value || 0n), - 0n, - ); - - if (totalTransfers > depositValue) { - // Scale down proportionally if we exceed the deposit - for (const [poolKey, amount] of Object.entries(transfers)) { - if (amount) { - transfers[poolKey as PoolKey] = make( - brand, - (amount.value * depositValue) / totalTransfers, - ); - } - } - } - - return transfers; -}; - -export const planTransfer = ( - dest: PoolKey, - amount: NatAmount, - feeBrand: Brand<'nat'>, -): MovementDesc[] => { - const { protocol: p, chainName: evm } = PoolPlaces[dest]; - const steps: MovementDesc[] = []; - - switch (p) { - case 'USDN': { - const detail = { usdnOut: ((amount.value || 0n) * 99n) / 100n }; - console.warn('TODO: client should query exchange rate'); - steps.push({ src: '@noble', dest: 'USDNVault', amount, detail }); - break; - } - case 'Aave': - case 'Compound': - // XXX optimize: combine noble->evm steps - steps.push({ - src: '@noble', - dest: `@${evm}`, - amount, - // TODO: Rather than hard-code, derive from Axelar `estimateGasFee`. - // https://docs.axelar.dev/dev/axelarjs-sdk/axelar-query-api#estimategasfee - fee: make(feeBrand, 30_000_000n), - }); - console.warn('TODO: stop hard-coding fees!'); - steps.push({ - src: `@${evm}`, - dest: `${p}_${evm}`, - amount, - fee: make(feeBrand, 30_000_000n), // KLUDGE. - }); - break; - default: - throw Error('unreachable'); - } - return harden(steps); -}; diff --git a/services/ymax-planner/src/plan-deposit.ts b/services/ymax-planner/src/plan-deposit.ts index 9d9dccd0b98..29b1ff2382d 100644 --- a/services/ymax-planner/src/plan-deposit.ts +++ b/services/ymax-planner/src/plan-deposit.ts @@ -6,15 +6,19 @@ import { } from '@aglocal/portfolio-contract/src/type-guards.js'; import type { VstorageKit } from '@agoric/client-utils'; import { AmountMath } from '@agoric/ertp/src/amountMath.js'; -import type { NatAmount } from '@agoric/ertp/src/types.js'; -import { Fail, q } from '@endo/errors'; -import { - makePortfolioQuery, - planDepositTransfers, - planTransfer, -} from '@aglocal/portfolio-contract/tools/portfolio-actors.js'; -import type { CosmosRestClient } from './cosmos-rest-client.js'; +import type { Brand, NatAmount } from '@agoric/ertp/src/types.js'; +import { Fail, q, X } from '@endo/errors'; +import { makePortfolioQuery } from '@aglocal/portfolio-contract/tools/portfolio-actors.js'; +// import { TEST_NETWORK } from '@aglocal/portfolio-contract/test/network/test-network.js'; +import { PROD_NETWORK } from '@aglocal/portfolio-contract/src/network/network.prod.js'; +import { planRebalanceFlow } from '@aglocal/portfolio-contract/src/plan-solve.js'; +import type { + AssetPlaceRef, + MovementDesc, +} from '@aglocal/portfolio-contract/src/type-guards-steps.js'; +import type { NetworkSpec } from '@aglocal/portfolio-contract/src/network/network-spec.js'; import type { Chain, Pool, SpectrumClient } from './spectrum-client.js'; +import type { CosmosRestClient } from './cosmos-rest-client.js'; const getOwn = ( obj: O, @@ -59,6 +63,163 @@ export const getCurrentBalance = async ( } }; +/** + * Compute absolute target balances from an allocation map over PoolKeys. + * Ensures sum(target) == sum(current) + deposit; non-allocated pools target to 0. + */ +export const depositTargetsFromAllocation = ( + amount: NatAmount, + current: Partial>, + allocation: Record, // weights; not optional +): Partial> => { + const brand = amount.brand; + // Base weighted targets for current + delta + const targets = computeWeightedTargets( + brand, + current, + amount.value as bigint, + allocation, + ); + + // Staging account ('+agoric') must end at 0: all staged funds should be fanned out + // to destination accounts/pools as part of this deposit plan. + targets['+agoric'] = AmountMath.make(brand, 0n); + return harden(targets); +}; + +/** + * Compute absolute target balances for a withdraw operation driven by allocation weights. + * Reduces balances across selected pools per weights and increases '' by the withdraw amount. + * Pools not in the allocation remain unchanged. Throws if selected pools do not cover the withdrawal. + */ +export const withdrawTargetsFromAllocation = ( + amount: NatAmount, + current: Partial>, + allocation: Record, +): Partial> => { + const brand = amount.brand; + const withdraw = amount.value; + const targets = computeWeightedTargets(brand, current, -withdraw, allocation); + const currentCash = current['']?.value ?? 0n; + targets[''] = AmountMath.make(brand, currentCash + withdraw); + return harden(targets); +}; + +/** + * Derive weighted targets for allocation keys. Additionally, always zero out hub balances + * (chains; keys starting with '@') that have non-zero current amounts. Returns only entries + * whose values change compared to current. + */ +const computeWeightedTargets = ( + brand: Brand<'nat'>, + current: Partial>, + delta: bigint, + allocation: Record, +): Partial> => { + const totalCurrentAmt = Object.keys(current).reduce( + (acc, k) => (allocation[k] ? AmountMath.add(acc, current[k]) : acc), + AmountMath.makeEmpty(brand), + ); + const total = totalCurrentAmt.value + delta; + assert(total >= 0n, X`total after delta must not be negative`); + const entries = Object.entries(allocation) as Array< + [PoolKey, number | bigint] + >; + assert(entries.length > 0, X`empty allocation`); + const SCALE_NUM = 1_000_000; + const weights = entries.map(([k, w]) => { + const wNum = Number(w as any); + assert(Number.isFinite(wNum), X`allocation weight must be a number`); + const wScaled = BigInt(Math.round(wNum * SCALE_NUM)); + return [k, wScaled] as const; + }); + const sumW = weights.reduce((acc, [, w]) => acc + w, 0n); + assert(sumW > 0n, X`allocation weights must sum > 0`); + const draft: Partial> = {}; + let assigned = 0n; + let maxKey = entries[0][0]; + let maxW = -1n as unknown as bigint; + for (const [key, w] of weights) { + if (w > (maxW as bigint)) { + maxW = w as bigint; + maxKey = key; + } + const v = (total * (w as bigint)) / (sumW as bigint); + assigned += v; + draft[key] = AmountMath.make(brand, v); + } + const remainder = total - assigned; + if (remainder !== 0n) { + const cur = draft[maxKey] ?? AmountMath.make(brand, 0n); + draft[maxKey] = AmountMath.add(cur, AmountMath.make(brand, remainder)); + } + const targets: Partial> = { ...draft }; + // Zero hubs (chains) with non-zero current balances + for (const key of Object.keys(current)) { + if (key.startsWith('@')) { + const curAmt = current[key]; + if (curAmt && curAmt.value !== 0n) { + targets[key] = AmountMath.make(brand, 0n); + } + } + } + for (const key of Object.keys(targets)) { + const curV = current[key]?.value ?? 0n; + const nextV = targets[key]!.value; + if (curV === nextV) delete targets[key]; + } + return targets; +}; + +/** + * Plan deposit to absolute target balances using the LP rebalance solver. + * Default mode is 'fastest'. + */ +export const planDepositToTargets = async ( + amount: NatAmount, + current: Partial>, + target: Partial>, // includes all pools + '+agoric' + network: NetworkSpec, + feeBrand: Brand<'nat'>, +): Promise => { + const brand = amount.brand; + // Construct current including the deposit seat + const currentWithDeposit: Partial> = { + ...current, + }; + // NOTE It is important that the only '+agoric' amount that it is allowed to + // include in the solution is the amount provided in this deposit operation. + // The actual balance on '+agoric' may include assets for another operation + // in progress. + const existing = currentWithDeposit['+agoric'] ?? AmountMath.make(brand, 0n); + currentWithDeposit['+agoric'] = AmountMath.add(existing, amount); + // console.log('COMPLETE GRAPH', currentWithDeposit, target, network); + const { steps } = await planRebalanceFlow({ + network, + current: currentWithDeposit as any, + target: target as any, + brand, + feeBrand, + }); + return steps; +}; + +/** + * Plan deposit driven by target allocation weights. + * Computes absolute targets, then calls the amount-based planner above. + */ +export const planDepositToAllocations = async ( + amount: NatAmount, + current: Partial>, + allocation: Record, + network: NetworkSpec, + feeBrand: Brand<'nat'>, +): Promise => { + const targets = depositTargetsFromAllocation(amount, current, allocation); + return planDepositToTargets(amount, current, targets, network, feeBrand); +}; + +// Back-compat utility used by CLI or handlers export const handleDeposit = async ( portfolioKey: `${string}.portfolios.portfolio${number}`, amount: NatAmount, @@ -68,6 +229,7 @@ export const handleDeposit = async ( spectrum: SpectrumClient; cosmosRest: CosmosRestClient; }, + network: NetworkSpec = PROD_NETWORK, ) => { const querier = makePortfolioQuery(powers.readPublished, portfolioKey); const status = await querier.getPortfolioStatus(); @@ -78,7 +240,7 @@ export const handleDeposit = async ( policyVersion, rebalanceCount, } = status; - if (!targetAllocation) return; + if (!targetAllocation) return { policyVersion, rebalanceCount, steps: [] }; const errors = [] as Error[]; const balanceEntries = await Promise.all( positionKeys.map(async (posKey: PoolKey): Promise<[PoolKey, NatAmount]> => { @@ -86,6 +248,7 @@ export const handleDeposit = async ( try { const poolPlaceInfo = getOwn(PoolPlaces, posKey) || Fail`Unknown PoolPlace`; + // TODO there should be a bulk query operation available now const amountValue = await getCurrentBalance( poolPlaceInfo, accountIdByChain, @@ -102,14 +265,14 @@ export const handleDeposit = async ( if (errors.length) { throw AggregateError(errors, 'Could not get balances'); } - const balances = Object.fromEntries(balanceEntries); - const transfers = planDepositTransfers(amount, balances, targetAllocation); - const steps = [ - { src: '+agoric', dest: '@agoric', amount }, - { src: '@agoric', dest: '@noble', amount }, - ...Object.entries(transfers).flatMap(([dest, amt]) => - planTransfer(dest as PoolKey, amt, feeBrand), - ), - ]; + const currentBalances = Object.fromEntries(balanceEntries); + // Use PROD network by default; callers may wish to parameterize later + const steps = await planDepositToAllocations( + amount, + currentBalances, + targetAllocation as any, + network, + feeBrand, + ); return { policyVersion, rebalanceCount, steps }; }; diff --git a/services/ymax-planner/test/deposit-tools.test.ts b/services/ymax-planner/test/deposit-tools.test.ts index 4719a39f64a..acfde1f546a 100644 --- a/services/ymax-planner/test/deposit-tools.test.ts +++ b/services/ymax-planner/test/deposit-tools.test.ts @@ -2,14 +2,10 @@ /* eslint-disable max-classes-per-file, class-methods-use-this */ import test from 'ava'; -import { Far } from '@endo/pass-style'; - import type { VstorageKit } from '@agoric/client-utils'; import { AmountMath, type Brand } from '@agoric/ertp'; - -import type { TargetAllocation } from '@aglocal/portfolio-contract/src/type-guards.js'; -import { planDepositTransfers } from '@aglocal/portfolio-contract/tools/portfolio-actors.js'; - +import { Far } from '@endo/pass-style'; +import { TEST_NETWORK } from '@aglocal/portfolio-contract/test/network/test-network.js'; import { CosmosRestClient } from '../src/cosmos-rest-client.ts'; import { handleDeposit } from '../src/plan-deposit.ts'; import { SpectrumClient } from '../src/spectrum-client.ts'; @@ -21,129 +17,23 @@ const feeBrand = Far('fee brand (BLD)') as Brand<'nat'>; const powers = { fetch, setTimeout }; -test('planDepositTransfers works in a handful of cases', t => { - // Test case 1: Empty current balances, equal target allocation - const deposit1 = makeDeposit(1000n); - const currentBalances1 = {}; - const targetAllocation1: TargetAllocation = { - USDN: 50n, - Aave_Arbitrum: 30n, - Compound_Arbitrum: 20n, - }; - - const result1 = planDepositTransfers( - deposit1, - currentBalances1, - targetAllocation1, - ); - - t.deepEqual(result1, { - USDN: makeDeposit(500n), - Aave_Arbitrum: makeDeposit(300n), - Compound_Arbitrum: makeDeposit(200n), - }); - - // Test case 2: Existing balances, need rebalancing - const deposit2 = makeDeposit(500n); - const currentBalances2 = { - USDN: makeDeposit(200n), - Aave_Arbitrum: makeDeposit(100n), - Compound_Arbitrum: makeDeposit(0n), - }; - const targetAllocation2: TargetAllocation = { - USDN: 40n, - Aave_Arbitrum: 40n, - Compound_Arbitrum: 20n, - }; - - const result2 = planDepositTransfers( - deposit2, - currentBalances2, - targetAllocation2, - ); - - // Total after deposit: 300 + 500 = 800 - // Targets: USDN=320, Aave=320, Compound=160 - // Transfers needed: USDN=120, Aave=220, Compound=160 - t.deepEqual(result2, { - USDN: makeDeposit(120n), - Aave_Arbitrum: makeDeposit(220n), - Compound_Arbitrum: makeDeposit(160n), - }); - - // Test case 3: Some positions already over-allocated - const deposit3 = makeDeposit(300n); - const currentBalances3 = { - USDN: makeDeposit(600n), // already over target - Aave_Arbitrum: makeDeposit(100n), - Compound_Arbitrum: makeDeposit(50n), - }; - const targetAllocation3: TargetAllocation = { - USDN: 50n, - Aave_Arbitrum: 30n, - Compound_Arbitrum: 20n, - }; - - const result3 = planDepositTransfers( - deposit3, - currentBalances3, - targetAllocation3, - ); - - // Total after deposit: 750 + 300 = 1050 - // Targets: USDN=525, Aave=315, Compound=210 - // USDN is already over target (600 > 525), so no transfer - // Need transfers: Aave=215, Compound=160, total=375 - // But deposit is only 300, so scale down proportionally: - // Aave: 215 * (300/375) = 172, Compound: 160 * (300/375) = 128 - t.deepEqual(result3, { - Aave_Arbitrum: makeDeposit(172n), - Compound_Arbitrum: makeDeposit(128n), - }); - - // Test case 4: Transfer amounts exceed deposit (scaling needed) - const deposit4 = makeDeposit(100n); - const currentBalances4 = { - USDN: makeDeposit(0n), - Aave_Arbitrum: makeDeposit(0n), - Compound_Arbitrum: makeDeposit(0n), - }; - const targetAllocation4: TargetAllocation = { - USDN: 60n, - Aave_Arbitrum: 30n, - Compound_Arbitrum: 10n, - }; - - const result4 = planDepositTransfers( - deposit4, - currentBalances4, - targetAllocation4, - ); - - // Should allocate proportionally to the 100 deposit - t.deepEqual(result4, { - USDN: makeDeposit(60n), - Aave_Arbitrum: makeDeposit(30n), - Compound_Arbitrum: makeDeposit(10n), - }); - - // Test case 5: Single position target - const deposit5 = makeDeposit(1000n); - const currentBalances5 = { USDN: makeDeposit(500n) }; - const targetAllocation5: TargetAllocation = { USDN: 100n }; - - const result5 = planDepositTransfers( - deposit5, - currentBalances5, - targetAllocation5, - ); - - // Total after: 1500, target: 1500, current: 500, transfer: 1000 - t.deepEqual(result5, { - USDN: makeDeposit(1000n), - }); -}); - +/** + * Deposit: 1000n + * TargetAllocation: + * USDN: 50% + * Aave_Arbitrum: 30% + * Compound_Arbitrum: 20% + + * CurrentBalance: + * Noble: 200n, + * Aave_Arbitrum: 100n, + * Compound_Arlanbitrum: 50n, + * + * Expected: + * USDN: 675n, +675n, 475n from deposit, 200n Noble + * Aave_Arbitrum: 405n, +305n + * Compound_Arbitrum: 270n, +220n + */ test('handleDeposit works with mocked dependencies', async t => { const deposit = makeDeposit(1000n); const portfolioKey = 'test.portfolios.portfolio1' as const; @@ -247,6 +137,8 @@ test('handleDeposit handles missing targetAllocation gracefully', async t => { Arbitrum: 'arbitrum:test:addr2', }, // No targetAllocation + policyVersion: 4, + rebalanceCount: 0, }; } throw new Error(`Unexpected path: ${path}`); @@ -292,7 +184,7 @@ test('handleDeposit handles missing targetAllocation gracefully', async t => { cosmosRest: mockCosmosRestClient, }); - t.is(result, undefined); + t.deepEqual(result, { policyVersion: 4, rebalanceCount: 0, steps: [] }); }); test('handleDeposit handles different position types correctly', async t => { @@ -303,18 +195,23 @@ test('handleDeposit handles different position types correctly', async t => { const mockReadPublished = async (path: string) => { if (path === portfolioKey) { return { - positionKeys: ['USDN', 'USDNVault', 'Aave_Avalanche', 'Compound_Base'], + positionKeys: [ + 'USDN', + 'USDNVault', + 'Aave_Avalanche', + 'Compound_Ethereum', + ], flowCount: 0, accountIdByChain: { noble: 'noble:test:addr1', Avalanche: 'avalanche:test:addr2', - Base: 'base:test:addr3', + Ethereum: 'ethereum:test:addr3', }, targetAllocation: { USDN: 40n, USDNVault: 20n, Aave_Avalanche: 25n, - Compound_Base: 15n, + Compound_Ethereum: 15n, }, }; } @@ -336,7 +233,7 @@ test('handleDeposit handles different position types correctly', async t => { balance: { supplyBalance: 150, borrowAmount: 0 }, }; } - if (chain === 'base' && pool === 'compound') { + if (chain === 'ethereum' && pool === 'compound') { return { pool, chain, @@ -374,10 +271,16 @@ test('handleDeposit handles different position types correctly', async t => { readPublished: mockReadPublished, } as VstorageKit; - const result = await handleDeposit(portfolioKey, deposit, feeBrand, { - readPublished: mockVstorageKit.readPublished, - spectrum: mockSpectrumClient, - cosmosRest: mockCosmosRestClient, - }); + const result = await handleDeposit( + portfolioKey, + deposit, + feeBrand, + { + readPublished: mockVstorageKit.readPublished, + spectrum: mockSpectrumClient, + cosmosRest: mockCosmosRestClient, + }, + TEST_NETWORK, + ); t.snapshot(result?.steps); }); diff --git a/services/ymax-planner/test/snapshots/deposit-tools.test.ts.md b/services/ymax-planner/test/snapshots/deposit-tools.test.ts.md index 1f32bd99e19..c318fc2f22f 100644 --- a/services/ymax-planner/test/snapshots/deposit-tools.test.ts.md +++ b/services/ymax-planner/test/snapshots/deposit-tools.test.ts.md @@ -33,16 +33,16 @@ Generated by [AVA](https://avajs.dev). brand: Object @Alleged: mock brand {}, value: 475n, }, - dest: 'USDNVault', + dest: 'USDN', detail: { - usdnOut: 470n, + usdnOut: 474n, }, src: '@noble', }, { amount: { brand: Object @Alleged: mock brand {}, - value: 305n, + value: 525n, }, dest: '@Arbitrum', fee: { @@ -63,18 +63,6 @@ Generated by [AVA](https://avajs.dev). }, src: '@Arbitrum', }, - { - amount: { - brand: Object @Alleged: mock brand {}, - value: 220n, - }, - dest: '@Arbitrum', - fee: { - brand: Object @Alleged: fee brand (BLD) {}, - value: 30000000n, - }, - src: '@noble', - }, { amount: { brand: Object @Alleged: mock brand {}, @@ -114,11 +102,11 @@ Generated by [AVA](https://avajs.dev). { amount: { brand: Object @Alleged: mock brand {}, - value: 430n, + value: 431n, }, - dest: 'USDNVault', + dest: 'USDN', detail: { - usdnOut: 425n, + usdnOut: 430n, }, src: '@noble', }, @@ -148,37 +136,37 @@ Generated by [AVA](https://avajs.dev). { amount: { brand: Object @Alleged: mock brand {}, - value: 306n, + value: 198n, }, - dest: 'Aave_Avalanche', + dest: '@Ethereum', fee: { brand: Object @Alleged: fee brand (BLD) {}, value: 30000000n, }, - src: '@Avalanche', + src: '@noble', }, { amount: { brand: Object @Alleged: mock brand {}, value: 198n, }, - dest: '@Base', + dest: 'Compound_Ethereum', fee: { brand: Object @Alleged: fee brand (BLD) {}, value: 30000000n, }, - src: '@noble', + src: '@Ethereum', }, { amount: { brand: Object @Alleged: mock brand {}, - value: 198n, + value: 306n, }, - dest: 'Compound_Base', + dest: 'Aave_Avalanche', fee: { brand: Object @Alleged: fee brand (BLD) {}, value: 30000000n, }, - src: '@Base', + src: '@Avalanche', }, ] diff --git a/services/ymax-planner/test/snapshots/deposit-tools.test.ts.snap b/services/ymax-planner/test/snapshots/deposit-tools.test.ts.snap index 4cf3f494fca..6e2493e2695 100644 Binary files a/services/ymax-planner/test/snapshots/deposit-tools.test.ts.snap and b/services/ymax-planner/test/snapshots/deposit-tools.test.ts.snap differ diff --git a/yarn.lock b/yarn.lock index 33567299acd..302d8b20235 100644 --- a/yarn.lock +++ b/yarn.lock @@ -218,6 +218,7 @@ __metadata: "@fast-check/ava": "npm:^2.0.1" ava: "npm:^5.3.0" c8: "npm:^10.1.3" + javascript-lp-solver: "npm:^0.4.24" ts-blank-space: "npm:^0.6.2" viem: "npm:^2.31.0" languageName: unknown @@ -11787,6 +11788,13 @@ __metadata: languageName: node linkType: hard +"javascript-lp-solver@npm:^0.4.24": + version: 0.4.24 + resolution: "javascript-lp-solver@npm:0.4.24" + checksum: 10c0/447cef1ac521d58d03e35f551c9a392e72a8837c4a0dba32f549f56b830d2a16f90dc823a9307bc2cc89e649a2c53a19fd904d65ebee0c2ad8fe712acdf429a3 + languageName: node + linkType: hard + "jessie.js@npm:0.3.4": version: 0.3.4 resolution: "jessie.js@npm:0.3.4"