diff --git a/.sqlx/query-c279740d623e06b3e3add31a6c15085bde3207756fe914837cef0cd12b864366.json b/.sqlx/query-c279740d623e06b3e3add31a6c15085bde3207756fe914837cef0cd12b864366.json new file mode 100644 index 00000000..b3729a01 --- /dev/null +++ b/.sqlx/query-c279740d623e06b3e3add31a6c15085bde3207756fe914837cef0cd12b864366.json @@ -0,0 +1,38 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO bundles (\n id, \"state\", senders, minimum_base_fee, txn_hashes, \n txs, reverting_tx_hashes, dropping_tx_hashes, \n block_number, min_timestamp, max_timestamp,\n created_at, updated_at\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, NOW(), NOW())\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + { + "Custom": { + "name": "bundle_state", + "kind": { + "Enum": [ + "Ready", + "BundleLimit", + "AccountLimits", + "GlobalLimits", + "IncludedInFlashblock", + "IncludedInBlock" + ] + } + } + }, + "BpcharArray", + "Int8", + "BpcharArray", + "TextArray", + "BpcharArray", + "BpcharArray", + "Int8", + "Int8", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "c279740d623e06b3e3add31a6c15085bde3207756fe914837cef0cd12b864366" +} diff --git a/.sqlx/query-ca6a250821d4542720578da20aa9cf31e808fa8dcbd701c75246dbdc95c58946.json b/.sqlx/query-ca6a250821d4542720578da20aa9cf31e808fa8dcbd701c75246dbdc95c58946.json deleted file mode 100644 index ad777006..00000000 --- a/.sqlx/query-ca6a250821d4542720578da20aa9cf31e808fa8dcbd701c75246dbdc95c58946.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO bundles (\n id, senders, minimum_base_fee, txn_hashes, \n txs, reverting_tx_hashes, dropping_tx_hashes, \n block_number, min_timestamp, max_timestamp,\n created_at, updated_at\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, NOW(), NOW())\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Uuid", - "BpcharArray", - "Int8", - "BpcharArray", - "TextArray", - "BpcharArray", - "BpcharArray", - "Int8", - "Int8", - "Int8" - ] - }, - "nullable": [] - }, - "hash": "ca6a250821d4542720578da20aa9cf31e808fa8dcbd701c75246dbdc95c58946" -} diff --git a/crates/datastore/migrations/1757444171_create_bundles_table.sql b/crates/datastore/migrations/1757444171_create_bundles_table.sql index a5fcdf76..7fd2d6fd 100644 --- a/crates/datastore/migrations/1757444171_create_bundles_table.sql +++ b/crates/datastore/migrations/1757444171_create_bundles_table.sql @@ -1,6 +1,22 @@ +DO $$ +BEGIN + IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'bundle_state') THEN + CREATE TYPE bundle_state AS ENUM ( + 'Ready', + 'BundleLimit', + 'AccountLimits', + 'GlobalLimits', + 'IncludedInFlashblock', + 'IncludedInBlock' + ); + END IF; +END$$; + + -- Create bundles table CREATE TABLE IF NOT EXISTS bundles ( id UUID PRIMARY KEY, + "state" bundle_state NOT NULL, senders CHAR(42)[], minimum_base_fee BIGINT, -- todo find a larger type diff --git a/crates/datastore/src/postgres.rs b/crates/datastore/src/postgres.rs index 88b75ad0..0fea3b49 100644 --- a/crates/datastore/src/postgres.rs +++ b/crates/datastore/src/postgres.rs @@ -11,6 +11,17 @@ use sqlx::PgPool; use tracing::info; use uuid::Uuid; +#[derive(Debug, Clone, sqlx::Type)] +#[sqlx(type_name = "bundle_state", rename_all = "PascalCase")] +pub enum BundleState { + Ready, + BundleLimit, + AccountLimits, + GlobalLimits, + IncludedInFlashblock, + IncludedInBlock, +} + #[derive(sqlx::FromRow, Debug)] struct BundleRow { senders: Option>, @@ -22,6 +33,7 @@ struct BundleRow { block_number: Option, min_timestamp: Option, max_timestamp: Option, + state: BundleState, } /// Filter criteria for selecting bundles @@ -60,6 +72,7 @@ pub struct BundleWithMetadata { pub txn_hashes: Vec, pub senders: Vec
, pub min_base_fee: i64, + pub state: BundleState, } /// PostgreSQL implementation of the BundleDatastore trait @@ -137,6 +150,7 @@ impl PostgresDatastore { txn_hashes: parsed_txn_hashes?, senders: parsed_senders?, min_base_fee: row.minimum_base_fee.unwrap_or(0), + state: row.state, }) } @@ -198,14 +212,15 @@ impl BundleDatastore for PostgresDatastore { sqlx::query!( r#" INSERT INTO bundles ( - id, senders, minimum_base_fee, txn_hashes, + id, "state", senders, minimum_base_fee, txn_hashes, txs, reverting_tx_hashes, dropping_tx_hashes, block_number, min_timestamp, max_timestamp, created_at, updated_at ) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, NOW(), NOW()) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, NOW(), NOW()) "#, id, + BundleState::Ready as BundleState, &senders, minimum_base_fee, &txn_hashes, @@ -226,7 +241,7 @@ impl BundleDatastore for PostgresDatastore { let result = sqlx::query_as::<_, BundleRow>( r#" SELECT senders, minimum_base_fee, txn_hashes, txs, reverting_tx_hashes, - dropping_tx_hashes, block_number, min_timestamp, max_timestamp + dropping_tx_hashes, block_number, min_timestamp, max_timestamp, "state" FROM bundles WHERE id = $1 "#, @@ -266,7 +281,7 @@ impl BundleDatastore for PostgresDatastore { let rows = sqlx::query_as::<_, BundleRow>( r#" SELECT senders, minimum_base_fee, txn_hashes, txs, reverting_tx_hashes, - dropping_tx_hashes, block_number, min_timestamp, max_timestamp + dropping_tx_hashes, block_number, min_timestamp, max_timestamp, "state" FROM bundles WHERE minimum_base_fee >= $1 AND (block_number = $2 OR block_number IS NULL OR block_number = 0 OR $2 = 0) diff --git a/crates/datastore/tests/datastore.rs b/crates/datastore/tests/datastore.rs index 5e8c6a98..cf2a4af7 100644 --- a/crates/datastore/tests/datastore.rs +++ b/crates/datastore/tests/datastore.rs @@ -5,7 +5,7 @@ use testcontainers_modules::{ postgres, testcontainers::{ContainerAsync, runners::AsyncRunner}, }; -use tips_datastore::postgres::BundleFilter; +use tips_datastore::postgres::{BundleFilter, BundleState}; use tips_datastore::{BundleDatastore, PostgresDatastore}; struct TestHarness { @@ -96,6 +96,10 @@ async fn insert_and_get() -> eyre::Result<()> { let metadata = retrieved_bundle_with_metadata.unwrap(); let retrieved_bundle = &metadata.bundle; + assert!( + matches!(metadata.state, BundleState::Ready), + "Bundle should default to Ready state" + ); assert_eq!(retrieved_bundle.txs.len(), test_bundle.txs.len()); assert_eq!(retrieved_bundle.block_number, test_bundle.block_number); assert_eq!(retrieved_bundle.min_timestamp, test_bundle.min_timestamp); diff --git a/justfile b/justfile index 811cbe66..e44a77e6 100644 --- a/justfile +++ b/justfile @@ -14,7 +14,7 @@ fix: cargo fmt --all cargo clippy --fix --allow-dirty --allow-staged # UI - cd ui && npx biome check --fix + cd ui && npx biome check --write --unsafe create-migration name: touch crates/datastore/migrations/$(date +%s)_{{ name }}.sql @@ -72,10 +72,10 @@ start-except programs: stop-all ### RUN SERVICES ### deps-reset: - docker compose down && docker compose rm && rm -rf data/ && mkdir -p data/postgres data/kafka data/minio && docker compose up -d + COMPOSE_FILE=docker-compose.yml:docker-compose.tips.yml docker compose down && docker compose rm && rm -rf data/ && mkdir -p data/postgres data/kafka data/minio && docker compose up -d deps: - docker compose down && docker compose rm && docker compose up -d + COMPOSE_FILE=docker-compose.yml:docker-compose.tips.yml docker compose down && docker compose rm && docker compose up -d audit: cargo run --bin tips-audit diff --git a/ui/src/app/api/bundles/route.ts b/ui/src/app/api/bundles/route.ts index eb2a3495..31bdba8a 100644 --- a/ui/src/app/api/bundles/route.ts +++ b/ui/src/app/api/bundles/route.ts @@ -4,7 +4,14 @@ import { bundles } from "@/db/schema"; export interface Bundle { id: string; - txnHashes: string[] | null; + txnHashes: string[]; + state: + | "Ready" + | "BundleLimit" + | "AccountLimits" + | "GlobalLimits" + | "IncludedInFlashblock" + | "IncludedInBlock"; } export async function GET() { @@ -13,6 +20,7 @@ export async function GET() { .select({ id: bundles.id, txnHashes: bundles.txnHashes, + state: bundles.state, }) .from(bundles); diff --git a/ui/src/app/bundles/page.tsx b/ui/src/app/bundles/page.tsx index f67b8df0..16c1c771 100644 --- a/ui/src/app/bundles/page.tsx +++ b/ui/src/app/bundles/page.tsx @@ -85,12 +85,33 @@ export default function BundlesPage() { href={`/bundles/${bundle.id}`} className="block p-3 border rounded-lg bg-white/5 hover:bg-white/10 transition-colors" > - - {bundle.id} - {" ("} - {bundle.txnHashes?.join(", ") || "No transactions"} - {")"} - +
+ {bundle.id} +
+ + {bundle.state} + + + {bundle.txnHashes?.join(", ") || "No transactions"} + +
+
))} diff --git a/ui/src/db/relations.ts b/ui/src/db/relations.ts index 0ed80c74..e69de29b 100644 --- a/ui/src/db/relations.ts +++ b/ui/src/db/relations.ts @@ -1,2 +0,0 @@ -import { relations } from "drizzle-orm/relations"; -import {} from "./schema"; diff --git a/ui/src/db/schema.ts b/ui/src/db/schema.ts index 1f88680a..b5286170 100644 --- a/ui/src/db/schema.ts +++ b/ui/src/db/schema.ts @@ -1,15 +1,25 @@ -import { sql } from "drizzle-orm"; import { bigint, char, + pgEnum, pgTable, text, timestamp, uuid, } from "drizzle-orm/pg-core"; +export const bundleState = pgEnum("bundle_state", [ + "Ready", + "BundleLimit", + "AccountLimits", + "GlobalLimits", + "IncludedInFlashblock", + "IncludedInBlock", +]); + export const bundles = pgTable("bundles", { id: uuid().primaryKey().notNull(), + state: bundleState().notNull(), senders: char({ length: 42 }).array(), // You can use { mode: "bigint" } if numbers are exceeding js number limitations minimumBaseFee: bigint("minimum_base_fee", { mode: "number" }),