diff --git a/.goreleaser.yml b/.goreleaser.yml index 01fbc95c3..4e18acdaa 100644 --- a/.goreleaser.yml +++ b/.goreleaser.yml @@ -24,6 +24,14 @@ builds: - amd64 - arm64 +release: + prerelease: auto + extra_files: + - glob: openapi.yaml + footer: | + ## What to do next? + - Read the [documentation](https://docs.formance.com/) + - Join our [Slack server](https://formance.com/slack) archives: - id: "{{.ProjectName}}" diff --git a/internal/storage/ledgerstore/migrations_v1.go b/internal/storage/ledgerstore/migrations_v1.go index acc88ce6c..7deffc3b5 100644 --- a/internal/storage/ledgerstore/migrations_v1.go +++ b/internal/storage/ledgerstore/migrations_v1.go @@ -133,14 +133,14 @@ func batchLogs( stmt, err := sqlTx.PrepareContext(ctx, pq.CopyInSchema( schema, "logs", - "id", "type", "hash", "date", "data", + "ledger", "id", "type", "hash", "date", "data", )) if err != nil { return err } for _, l := range logs { - _, err = stmt.ExecContext(ctx, l.ID, l.Type, l.Hash, l.Date, RawMessage(l.Data)) + _, err = stmt.ExecContext(ctx, schema, l.ID, l.Type, l.Hash, l.Date, RawMessage(l.Data)) if err != nil { return err } diff --git a/internal/storage/migrate_ledger_v1_test.go b/internal/storage/migrate_ledger_v1_test.go new file mode 100644 index 000000000..84926c7b0 --- /dev/null +++ b/internal/storage/migrate_ledger_v1_test.go @@ -0,0 +1,64 @@ +package storage_test + +import ( + "database/sql" + "os" + "path/filepath" + "testing" + + "github.com/formancehq/ledger/internal/storage/driver" + "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/ledger/internal/storage/systemstore" + "github.com/formancehq/stack/libs/go-libs/bun/bunconnect" + "github.com/formancehq/stack/libs/go-libs/logging" + "github.com/formancehq/stack/libs/go-libs/pgtesting" + "github.com/stretchr/testify/require" +) + +func TestMigrateLedgerV1(t *testing.T) { + require.NoError(t, pgtesting.CreatePostgresServer()) + t.Cleanup(func() { + require.NoError(t, pgtesting.DestroyPostgresServer()) + }) + + db, err := sql.Open("postgres", pgtesting.Server().GetDSN()) + require.NoError(t, err) + + data, err := os.ReadFile(filepath.Join("testdata", "v1-dump.sql")) + require.NoError(t, err) + + _, err = db.Exec(string(data)) + require.NoError(t, err) + + ctx := logging.TestingContext() + + d := driver.New(bunconnect.ConnectionOptions{ + DatabaseSourceName: pgtesting.Server().GetDSN(), + Debug: testing.Verbose(), + Writer: os.Stdout, + }) + require.NoError(t, d.Initialize(ctx)) + + ledgers, err := d.GetSystemStore().ListLedgers(ctx, systemstore.ListLedgersQuery{}) + require.NoError(t, err) + + for _, ledger := range ledgers.Data { + require.NotEmpty(t, ledger.Bucket) + require.Equal(t, ledger.Name, ledger.Bucket) + + bucket, err := d.OpenBucket(ledger.Bucket) + require.NoError(t, err) + require.NoError(t, bucket.Migrate(ctx)) + + store, err := bucket.GetLedgerStore(ledger.Name) + require.NoError(t, err) + + txs, err := store.GetTransactions(ctx, ledgerstore.NewGetTransactionsQuery(ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes]{})) + require.NoError(t, err) + require.NotEmpty(t, txs) + + accounts, err := store.GetAccountsWithVolumes(ctx, ledgerstore.NewGetAccountsQuery(ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes]{})) + require.NoError(t, err) + require.NotEmpty(t, accounts) + } +} diff --git a/internal/storage/systemstore/migrations.go b/internal/storage/systemstore/migrations.go index 11ed945a2..27ab83fc1 100644 --- a/internal/storage/systemstore/migrations.go +++ b/internal/storage/systemstore/migrations.go @@ -34,8 +34,15 @@ func Migrate(ctx context.Context, db bun.IDB) error { Table("ledgers"). ColumnExpr("bucket varchar(255)"). Exec(ctx) - - return errors.Wrap(err, "adding 'bucket' column") + if err != nil { + return errors.Wrap(err, "adding 'bucket' column") + } + _, err = tx.NewUpdate(). + Table("ledgers"). + Set("bucket = ledger"). + Where("1 = 1"). + Exec(ctx) + return errors.Wrap(err, "setting 'bucket' column") } _, err = tx.NewCreateTable(). diff --git a/internal/storage/testdata/v1-dump.sql b/internal/storage/testdata/v1-dump.sql new file mode 100644 index 000000000..f8ce01ee4 --- /dev/null +++ b/internal/storage/testdata/v1-dump.sql @@ -0,0 +1,959 @@ +-- +-- PostgreSQL database dump +-- + +-- Dumped from database version 13.8 +-- Dumped by pg_dump version 16.1 + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +-- +-- Name: _system; Type: SCHEMA; Schema: - +-- + +CREATE SCHEMA _system; + +-- +-- Name: default; Type: SCHEMA; Schema: - +-- + +CREATE SCHEMA "default"; + +-- +-- Name: public; Type: SCHEMA; Schema: - +-- + +-- *not* creating schema, since initdb creates it + +-- +-- Name: wallets-002; Type: SCHEMA; Schema: - +-- + +CREATE SCHEMA "wallets-002"; + +-- +-- Name: pg_trgm; Type: EXTENSION; Schema: -; Owner: - +-- + +CREATE EXTENSION IF NOT EXISTS pg_trgm WITH SCHEMA public; + + +-- +-- Name: EXTENSION pg_trgm; Type: COMMENT; Schema: -; Owner: +-- + +COMMENT ON EXTENSION pg_trgm IS 'text similarity measurement and index searching based on trigrams'; + + +-- +-- Name: pgcrypto; Type: EXTENSION; Schema: -; Owner: - +-- + +CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public; + + +-- +-- Name: EXTENSION pgcrypto; Type: COMMENT; Schema: -; Owner: +-- + +COMMENT ON EXTENSION pgcrypto IS 'cryptographic functions'; + + +-- +-- Name: compute_hashes(); Type: FUNCTION; Schema: default +-- + +CREATE FUNCTION "default".compute_hashes() RETURNS void + LANGUAGE plpgsql + AS $$ DECLARE r record; BEGIN /* Create JSON object manually as it needs to be in canonical form */ FOR r IN (select id, '{"data":' || "default".normaliz(data::jsonb) || ',"date":"' || to_char (date at time zone 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '","hash":"","id":' || id || ',"type":"' || type || '"}' as canonical from "default".log) LOOP UPDATE "default".log set hash = (select encode(digest( COALESCE((select '{"data":' || "default".normaliz(data::jsonb) || ',"date":"' || to_char (date at time zone 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '","hash":"' || hash || '","id":' || id || ',"type":"' || type || '"}' from "default".log where id = r.id - 1), 'null') || r.canonical, 'sha256' ), 'hex')) WHERE id = r.id; END LOOP; END $$; + +-- +-- Name: compute_volumes(); Type: FUNCTION; Schema: default +-- + +CREATE FUNCTION "default".compute_volumes() RETURNS trigger + LANGUAGE plpgsql + AS $$ DECLARE p record; BEGIN FOR p IN ( SELECT t.postings->>'source' as source, t.postings->>'asset' as asset, sum ((t.postings->>'amount')::bigint) as amount FROM ( SELECT jsonb_array_elements(((newtable.data::jsonb)->>'postings')::jsonb) as postings FROM newtable WHERE newtable.type = 'NEW_TRANSACTION' ) t GROUP BY source, asset ) LOOP INSERT INTO "default".accounts (address, metadata) VALUES (p.source, '{}') ON CONFLICT DO NOTHING; INSERT INTO "default".volumes (account, asset, input, output) VALUES (p.source, p.asset, 0, p.amount::bigint) ON CONFLICT (account, asset) DO UPDATE SET output = p.amount::bigint + ( SELECT output FROM "default".volumes WHERE account = p.source AND asset = p.asset ); END LOOP; FOR p IN ( SELECT t.postings->>'destination' as destination, t.postings->>'asset' as asset, sum ((t.postings->>'amount')::bigint) as amount FROM ( SELECT jsonb_array_elements(((newtable.data::jsonb)->>'postings')::jsonb) as postings FROM newtable WHERE newtable.type = 'NEW_TRANSACTION' ) t GROUP BY destination, asset ) LOOP INSERT INTO "default".accounts (address, metadata) VALUES (p.destination, '{}') ON CONFLICT DO NOTHING; INSERT INTO "default".volumes (account, asset, input, output) VALUES (p.destination, p.asset, p.amount::bigint, 0) ON CONFLICT (account, asset) DO UPDATE SET input = p.amount::bigint + ( SELECT input FROM "default".volumes WHERE account = p.destination AND asset = p.asset ); END LOOP; RETURN NULL; END $$; + + +-- +-- Name: handle_log_entry(); Type: FUNCTION; Schema: default +-- + +CREATE FUNCTION "default".handle_log_entry() RETURNS trigger + LANGUAGE plpgsql + AS $$ BEGIN if NEW.type = 'NEW_TRANSACTION' THEN INSERT INTO "default".transactions(id, timestamp, reference, postings, metadata, pre_commit_volumes, post_commit_volumes) VALUES ( (NEW.data ->> 'txid')::bigint, (NEW.data ->> 'timestamp')::varchar, CASE WHEN (NEW.data ->> 'reference')::varchar = '' THEN NULL ELSE (NEW.data ->> 'reference')::varchar END, (NEW.data ->> 'postings')::jsonb, CASE WHEN (NEW.data ->> 'metadata')::jsonb IS NULL THEN '{}' ELSE (NEW.data ->> 'metadata')::jsonb END, (NEW.data ->> 'preCommitVolumes')::jsonb, (NEW.data ->> 'postCommitVolumes')::jsonb ); END IF; if NEW.type = 'SET_METADATA' THEN if NEW.data ->> 'targetType' = 'TRANSACTION' THEN UPDATE "default".transactions SET metadata = metadata || (NEW.data ->> 'metadata')::jsonb WHERE id = (NEW.data ->> 'targetId')::bigint; END IF; if NEW.data ->> 'targetType' = 'ACCOUNT' THEN INSERT INTO "default".accounts (address, metadata) VALUES ((NEW.data ->> 'targetId')::varchar, (NEW.data ->> 'metadata')::jsonb) ON CONFLICT (address) DO UPDATE SET metadata = accounts.metadata || (NEW.data ->> 'metadata')::jsonb; END IF; END IF; RETURN NEW; END; $$; + + +-- +-- Name: is_valid_json(text); Type: FUNCTION; Schema: default +-- + +CREATE FUNCTION "default".is_valid_json(p_json text) RETURNS boolean + LANGUAGE plpgsql IMMUTABLE + AS $$ BEGIN RETURN (p_json::jsonb IS NOT NULL); EXCEPTION WHEN others THEN RETURN false; END; $$; + + +-- +-- Name: meta_compare(jsonb, boolean, text[]); Type: FUNCTION; Schema: default +-- + +CREATE FUNCTION "default".meta_compare(metadata jsonb, value boolean, VARIADIC path text[]) RETURNS boolean + LANGUAGE plpgsql IMMUTABLE + AS $$ BEGIN return jsonb_extract_path(metadata, variadic path)::bool = value::bool; EXCEPTION WHEN others THEN RAISE INFO 'Error Name: %', SQLERRM; RAISE INFO 'Error State: %', SQLSTATE; RETURN false; END $$; + +-- +-- Name: meta_compare(jsonb, numeric, text[]); Type: FUNCTION; Schema: default +-- + +CREATE FUNCTION "default".meta_compare(metadata jsonb, value numeric, VARIADIC path text[]) RETURNS boolean + LANGUAGE plpgsql IMMUTABLE + AS $$ BEGIN return jsonb_extract_path(metadata, variadic path)::numeric = value::numeric; EXCEPTION WHEN others THEN RAISE INFO 'Error Name: %', SQLERRM; RAISE INFO 'Error State: %', SQLSTATE; RETURN false; END $$; + +-- +-- Name: meta_compare(jsonb, character varying, text[]); Type: FUNCTION; Schema: default +-- + +CREATE FUNCTION "default".meta_compare(metadata jsonb, value character varying, VARIADIC path text[]) RETURNS boolean + LANGUAGE plpgsql IMMUTABLE + AS $$ BEGIN return jsonb_extract_path_text(metadata, variadic path)::varchar = value::varchar; EXCEPTION WHEN others THEN RAISE INFO 'Error Name: %', SQLERRM; RAISE INFO 'Error State: %', SQLSTATE; RETURN false; END $$; + +-- +-- Name: normaliz(jsonb); Type: FUNCTION; Schema: default +-- + +CREATE FUNCTION "default".normaliz(v jsonb) RETURNS text + LANGUAGE plpgsql + AS $$ DECLARE r record; t jsonb; BEGIN if jsonb_typeof(v) = 'object' then return ( SELECT COALESCE('{' || string_agg(keyValue, ',') || '}', '{}') FROM ( SELECT '"' || key || '":' || value as keyValue FROM ( SELECT key, (CASE WHEN "default".is_valid_json((select v ->> key)) THEN (select "default".normaliz((select v ->> key)::jsonb)) ELSE '"' || (select v ->> key) || '"' END) as value FROM ( SELECT jsonb_object_keys(v) as key ) t order by key ) t ) t ); end if; if jsonb_typeof(v) = 'array' then return ( select COALESCE('[' || string_agg(items, ',') || ']', '[]') from ( select "default".normaliz(item) as items from jsonb_array_elements(v) item ) t ); end if; if jsonb_typeof(v) = 'string' then return v::text; end if; if jsonb_typeof(v) = 'number' then return v::bigint; end if; if jsonb_typeof(v) = 'boolean' then return v::boolean; end if; return ''; END $$; + +-- +-- Name: use_account(jsonb, character varying); Type: FUNCTION; Schema: default +-- + +CREATE FUNCTION "default".use_account(postings jsonb, account character varying) RETURNS boolean + LANGUAGE sql + AS $$ SELECT bool_or(v.value) from ( SELECT "default".use_account_as_source(postings, account) AS value UNION SELECT "default".use_account_as_destination(postings, account) AS value ) v $$; + +-- +-- Name: use_account_as_destination(jsonb, character varying); Type: FUNCTION; Schema: default +-- + +CREATE FUNCTION "default".use_account_as_destination(postings jsonb, account character varying) RETURNS boolean + LANGUAGE sql + AS $_$ select bool_or(v.value::bool) from ( select jsonb_extract_path_text(jsonb_array_elements(postings), 'destination') ~ ('^' || account || '$') as value) as v; $_$; + +-- +-- Name: use_account_as_source(jsonb, character varying); Type: FUNCTION; Schema: default +-- + +CREATE FUNCTION "default".use_account_as_source(postings jsonb, account character varying) RETURNS boolean + LANGUAGE sql + AS $_$ select bool_or(v.value::bool) from ( select jsonb_extract_path_text(jsonb_array_elements(postings), 'source') ~ ('^' || account || '$') as value) as v; $_$; + +-- +-- Name: compute_hashes(); Type: FUNCTION; Schema: wallets-002 +-- + +CREATE FUNCTION "wallets-002".compute_hashes() RETURNS void + LANGUAGE plpgsql + AS $$ DECLARE r record; BEGIN /* Create JSON object manually as it needs to be in canonical form */ FOR r IN (select id, '{"data":' || "wallets-002".normaliz(data::jsonb) || ',"date":"' || to_char (date at time zone 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '","hash":"","id":' || id || ',"type":"' || type || '"}' as canonical from "wallets-002".log) LOOP UPDATE "wallets-002".log set hash = (select encode(digest( COALESCE((select '{"data":' || "wallets-002".normaliz(data::jsonb) || ',"date":"' || to_char (date at time zone 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '","hash":"' || hash || '","id":' || id || ',"type":"' || type || '"}' from "wallets-002".log where id = r.id - 1), 'null') || r.canonical, 'sha256' ), 'hex')) WHERE id = r.id; END LOOP; END $$; + +-- +-- Name: compute_volumes(); Type: FUNCTION; Schema: wallets-002 +-- + +CREATE FUNCTION "wallets-002".compute_volumes() RETURNS trigger + LANGUAGE plpgsql + AS $$ DECLARE p record; BEGIN FOR p IN ( SELECT t.postings->>'source' as source, t.postings->>'asset' as asset, sum ((t.postings->>'amount')::bigint) as amount FROM ( SELECT jsonb_array_elements(((newtable.data::jsonb)->>'postings')::jsonb) as postings FROM newtable WHERE newtable.type = 'NEW_TRANSACTION' ) t GROUP BY source, asset ) LOOP INSERT INTO "wallets-002".accounts (address, metadata) VALUES (p.source, '{}') ON CONFLICT DO NOTHING; INSERT INTO "wallets-002".volumes (account, asset, input, output) VALUES (p.source, p.asset, 0, p.amount::bigint) ON CONFLICT (account, asset) DO UPDATE SET output = p.amount::bigint + ( SELECT output FROM "wallets-002".volumes WHERE account = p.source AND asset = p.asset ); END LOOP; FOR p IN ( SELECT t.postings->>'destination' as destination, t.postings->>'asset' as asset, sum ((t.postings->>'amount')::bigint) as amount FROM ( SELECT jsonb_array_elements(((newtable.data::jsonb)->>'postings')::jsonb) as postings FROM newtable WHERE newtable.type = 'NEW_TRANSACTION' ) t GROUP BY destination, asset ) LOOP INSERT INTO "wallets-002".accounts (address, metadata) VALUES (p.destination, '{}') ON CONFLICT DO NOTHING; INSERT INTO "wallets-002".volumes (account, asset, input, output) VALUES (p.destination, p.asset, p.amount::bigint, 0) ON CONFLICT (account, asset) DO UPDATE SET input = p.amount::bigint + ( SELECT input FROM "wallets-002".volumes WHERE account = p.destination AND asset = p.asset ); END LOOP; RETURN NULL; END $$; + +-- +-- Name: handle_log_entry(); Type: FUNCTION; Schema: wallets-002 +-- + +CREATE FUNCTION "wallets-002".handle_log_entry() RETURNS trigger + LANGUAGE plpgsql + AS $$ BEGIN if NEW.type = 'NEW_TRANSACTION' THEN INSERT INTO "wallets-002".transactions(id, timestamp, reference, postings, metadata, pre_commit_volumes, post_commit_volumes) VALUES ( (NEW.data ->> 'txid')::bigint, (NEW.data ->> 'timestamp')::varchar, CASE WHEN (NEW.data ->> 'reference')::varchar = '' THEN NULL ELSE (NEW.data ->> 'reference')::varchar END, (NEW.data ->> 'postings')::jsonb, CASE WHEN (NEW.data ->> 'metadata')::jsonb IS NULL THEN '{}' ELSE (NEW.data ->> 'metadata')::jsonb END, (NEW.data ->> 'preCommitVolumes')::jsonb, (NEW.data ->> 'postCommitVolumes')::jsonb ); END IF; if NEW.type = 'SET_METADATA' THEN if NEW.data ->> 'targetType' = 'TRANSACTION' THEN UPDATE "wallets-002".transactions SET metadata = metadata || (NEW.data ->> 'metadata')::jsonb WHERE id = (NEW.data ->> 'targetId')::bigint; END IF; if NEW.data ->> 'targetType' = 'ACCOUNT' THEN INSERT INTO "wallets-002".accounts (address, metadata) VALUES ((NEW.data ->> 'targetId')::varchar, (NEW.data ->> 'metadata')::jsonb) ON CONFLICT (address) DO UPDATE SET metadata = accounts.metadata || (NEW.data ->> 'metadata')::jsonb; END IF; END IF; RETURN NEW; END; $$; + +-- +-- Name: is_valid_json(text); Type: FUNCTION; Schema: wallets-002 +-- + +CREATE FUNCTION "wallets-002".is_valid_json(p_json text) RETURNS boolean + LANGUAGE plpgsql IMMUTABLE + AS $$ BEGIN RETURN (p_json::jsonb IS NOT NULL); EXCEPTION WHEN others THEN RETURN false; END; $$; + +-- +-- Name: meta_compare(jsonb, boolean, text[]); Type: FUNCTION; Schema: wallets-002 +-- + +CREATE FUNCTION "wallets-002".meta_compare(metadata jsonb, value boolean, VARIADIC path text[]) RETURNS boolean + LANGUAGE plpgsql IMMUTABLE + AS $$ BEGIN return jsonb_extract_path(metadata, variadic path)::bool = value::bool; EXCEPTION WHEN others THEN RAISE INFO 'Error Name: %', SQLERRM; RAISE INFO 'Error State: %', SQLSTATE; RETURN false; END $$; + +-- +-- Name: meta_compare(jsonb, numeric, text[]); Type: FUNCTION; Schema: wallets-002 +-- + +CREATE FUNCTION "wallets-002".meta_compare(metadata jsonb, value numeric, VARIADIC path text[]) RETURNS boolean + LANGUAGE plpgsql IMMUTABLE + AS $$ BEGIN return jsonb_extract_path(metadata, variadic path)::numeric = value::numeric; EXCEPTION WHEN others THEN RAISE INFO 'Error Name: %', SQLERRM; RAISE INFO 'Error State: %', SQLSTATE; RETURN false; END $$; + +-- +-- Name: meta_compare(jsonb, character varying, text[]); Type: FUNCTION; Schema: wallets-002 +-- + +CREATE FUNCTION "wallets-002".meta_compare(metadata jsonb, value character varying, VARIADIC path text[]) RETURNS boolean + LANGUAGE plpgsql IMMUTABLE + AS $$ BEGIN return jsonb_extract_path_text(metadata, variadic path)::varchar = value::varchar; EXCEPTION WHEN others THEN RAISE INFO 'Error Name: %', SQLERRM; RAISE INFO 'Error State: %', SQLSTATE; RETURN false; END $$; + +-- +-- Name: normaliz(jsonb); Type: FUNCTION; Schema: wallets-002 +-- + +CREATE FUNCTION "wallets-002".normaliz(v jsonb) RETURNS text + LANGUAGE plpgsql + AS $$ DECLARE r record; t jsonb; BEGIN if jsonb_typeof(v) = 'object' then return ( SELECT COALESCE('{' || string_agg(keyValue, ',') || '}', '{}') FROM ( SELECT '"' || key || '":' || value as keyValue FROM ( SELECT key, (CASE WHEN "wallets-002".is_valid_json((select v ->> key)) THEN (select "wallets-002".normaliz((select v ->> key)::jsonb)) ELSE '"' || (select v ->> key) || '"' END) as value FROM ( SELECT jsonb_object_keys(v) as key ) t order by key ) t ) t ); end if; if jsonb_typeof(v) = 'array' then return ( select COALESCE('[' || string_agg(items, ',') || ']', '[]') from ( select "wallets-002".normaliz(item) as items from jsonb_array_elements(v) item ) t ); end if; if jsonb_typeof(v) = 'string' then return v::text; end if; if jsonb_typeof(v) = 'number' then return v::bigint; end if; if jsonb_typeof(v) = 'boolean' then return v::boolean; end if; return ''; END $$; + +-- +-- Name: use_account(jsonb, character varying); Type: FUNCTION; Schema: wallets-002 +-- + +CREATE FUNCTION "wallets-002".use_account(postings jsonb, account character varying) RETURNS boolean + LANGUAGE sql + AS $$ SELECT bool_or(v.value) from ( SELECT "wallets-002".use_account_as_source(postings, account) AS value UNION SELECT "wallets-002".use_account_as_destination(postings, account) AS value ) v $$; + +-- +-- Name: use_account_as_destination(jsonb, character varying); Type: FUNCTION; Schema: wallets-002 +-- + +CREATE FUNCTION "wallets-002".use_account_as_destination(postings jsonb, account character varying) RETURNS boolean + LANGUAGE sql + AS $_$ select bool_or(v.value::bool) from ( select jsonb_extract_path_text(jsonb_array_elements(postings), 'destination') ~ ('^' || account || '$') as value) as v; $_$; + +-- +-- Name: use_account_as_source(jsonb, character varying); Type: FUNCTION; Schema: wallets-002 +-- + +CREATE FUNCTION "wallets-002".use_account_as_source(postings jsonb, account character varying) RETURNS boolean + LANGUAGE sql + AS $_$ select bool_or(v.value::bool) from ( select jsonb_extract_path_text(jsonb_array_elements(postings), 'source') ~ ('^' || account || '$') as value) as v; $_$; + +SET default_tablespace = ''; + +SET default_table_access_method = heap; + +-- +-- Name: configuration; Type: TABLE; Schema: _system +-- + +CREATE TABLE _system.configuration ( + key character varying(255) NOT NULL, + value text, + addedat timestamp without time zone +); + +-- +-- Name: ledgers; Type: TABLE; Schema: _system +-- + +CREATE TABLE _system.ledgers ( + ledger character varying(255) NOT NULL, + addedat timestamp without time zone +); + +-- +-- Name: accounts; Type: TABLE; Schema: default +-- + +CREATE TABLE "default".accounts ( + address character varying NOT NULL, + metadata jsonb DEFAULT '{}'::jsonb, + address_json jsonb +); + +-- +-- Name: idempotency; Type: TABLE; Schema: default +-- + +CREATE TABLE "default".idempotency ( + key character varying NOT NULL, + date character varying, + status_code integer, + headers character varying, + body character varying, + request_hash character varying +); + +-- +-- Name: log; Type: TABLE; Schema: default +-- + +CREATE TABLE "default".log ( + id bigint, + type character varying, + hash character varying, + date timestamp with time zone, + data jsonb +); + +-- +-- Name: log_seq; Type: SEQUENCE; Schema: default +-- + +CREATE SEQUENCE "default".log_seq + START WITH 0 + INCREMENT BY 1 + MINVALUE 0 + NO MAXVALUE + CACHE 1; + +-- +-- Name: mapping; Type: TABLE; Schema: default +-- + +CREATE TABLE "default".mapping ( + mapping_id character varying, + mapping character varying +); + +-- +-- Name: migrations; Type: TABLE; Schema: default +-- + +CREATE TABLE "default".migrations ( + version character varying, + date character varying +); + +-- +-- Name: postings; Type: TABLE; Schema: default +-- + +CREATE TABLE "default".postings ( + txid bigint, + posting_index integer, + source jsonb, + destination jsonb +); + +-- +-- Name: transactions; Type: TABLE; Schema: default +-- + +CREATE TABLE "default".transactions ( + id bigint, + "timestamp" timestamp with time zone, + reference character varying, + hash character varying, + postings jsonb, + metadata jsonb DEFAULT '{}'::jsonb, + pre_commit_volumes jsonb, + post_commit_volumes jsonb +); + +-- +-- Name: volumes; Type: TABLE; Schema: default +-- + +CREATE TABLE "default".volumes ( + account character varying, + asset character varying, + input numeric, + output numeric, + account_json jsonb +); + +-- +-- Name: accounts; Type: TABLE; Schema: wallets-002 +-- + +CREATE TABLE "wallets-002".accounts ( + address character varying NOT NULL, + metadata jsonb DEFAULT '{}'::jsonb, + address_json jsonb +); + +-- +-- Name: idempotency; Type: TABLE; Schema: wallets-002 +-- + +CREATE TABLE "wallets-002".idempotency ( + key character varying NOT NULL, + date character varying, + status_code integer, + headers character varying, + body character varying, + request_hash character varying +); + +-- +-- Name: log; Type: TABLE; Schema: wallets-002 +-- + +CREATE TABLE "wallets-002".log ( + id bigint, + type character varying, + hash character varying, + date timestamp with time zone, + data jsonb +); + +-- +-- Name: log_seq; Type: SEQUENCE; Schema: wallets-002 +-- + +CREATE SEQUENCE "wallets-002".log_seq + START WITH 0 + INCREMENT BY 1 + MINVALUE 0 + NO MAXVALUE + CACHE 1; + +-- +-- Name: mapping; Type: TABLE; Schema: wallets-002 +-- + +CREATE TABLE "wallets-002".mapping ( + mapping_id character varying, + mapping character varying +); + +-- +-- Name: migrations; Type: TABLE; Schema: wallets-002 +-- + +CREATE TABLE "wallets-002".migrations ( + version character varying, + date character varying +); + +-- +-- Name: postings; Type: TABLE; Schema: wallets-002 +-- + +CREATE TABLE "wallets-002".postings ( + txid bigint, + posting_index integer, + source jsonb, + destination jsonb +); + +-- +-- Name: transactions; Type: TABLE; Schema: wallets-002 +-- + +CREATE TABLE "wallets-002".transactions ( + id bigint, + "timestamp" timestamp with time zone, + reference character varying, + hash character varying, + postings jsonb, + metadata jsonb DEFAULT '{}'::jsonb, + pre_commit_volumes jsonb, + post_commit_volumes jsonb +); + +-- +-- Name: volumes; Type: TABLE; Schema: wallets-002 +-- + +CREATE TABLE "wallets-002".volumes ( + account character varying, + asset character varying, + input numeric, + output numeric, + account_json jsonb +); + +-- +-- Data for Name: configuration; Type: TABLE DATA; Schema: _system +-- + +INSERT INTO _system.configuration (key, value, addedat) VALUES ('appId', '7f50ba54-cdb1-4e79-a2f7-3e704ce08d08', '2023-12-13 18:16:31'); + + +-- +-- Data for Name: ledgers; Type: TABLE DATA; Schema: _system +-- + +INSERT INTO _system.ledgers (ledger, addedat) VALUES ('wallets-002', '2023-12-13 18:16:35.943038'); +INSERT INTO _system.ledgers (ledger, addedat) VALUES ('default', '2023-12-13 18:21:05.044237'); + + +-- +-- Data for Name: accounts; Type: TABLE DATA; Schema: default +-- + +INSERT INTO "default".accounts (address, metadata, address_json) VALUES ('world', '{}', '["world"]'); +INSERT INTO "default".accounts (address, metadata, address_json) VALUES ('bank', '{}', '["bank"]'); +INSERT INTO "default".accounts (address, metadata, address_json) VALUES ('bob', '{}', '["bob"]'); +INSERT INTO "default".accounts (address, metadata, address_json) VALUES ('alice', '{"foo": "bar"}', '["alice"]'); + + +-- +-- Data for Name: idempotency; Type: TABLE DATA; Schema: default +-- + + + +-- +-- Data for Name: log; Type: TABLE DATA; Schema: default +-- + +INSERT INTO "default".log (id, type, hash, date, data) VALUES (0, 'NEW_TRANSACTION', '79fc36b46f2668ee1f682a109765af8e849d11715d078bd361e7b4eb61fadc70', '2023-12-13 18:21:05+00', '{"txid": 0, "metadata": {}, "postings": [{"asset": "USD/2", "amount": 10000, "source": "world", "destination": "bank"}], "reference": "", "timestamp": "2023-12-13T18:21:05Z"}'); +INSERT INTO "default".log (id, type, hash, date, data) VALUES (1, 'NEW_TRANSACTION', 'e493bab4fcce0c281193414ea43a7d34b73c89ac1bb103755e9fb1064d00c0e8', '2023-12-13 18:21:40+00', '{"txid": 1, "metadata": {}, "postings": [{"asset": "USD/2", "amount": 10000, "source": "world", "destination": "bob"}], "reference": "", "timestamp": "2023-12-13T18:21:40Z"}'); +INSERT INTO "default".log (id, type, hash, date, data) VALUES (2, 'NEW_TRANSACTION', '19ac0ffff69a271615ba09c6564f3851ab0fe32e7aabe3ab9083b63501f29332', '2023-12-13 18:21:46+00', '{"txid": 2, "metadata": {}, "postings": [{"asset": "USD/2", "amount": 10000, "source": "world", "destination": "alice"}], "reference": "", "timestamp": "2023-12-13T18:21:46Z"}'); +INSERT INTO "default".log (id, type, hash, date, data) VALUES (3, 'SET_METADATA', '839800b3bf685903b37240e8a59e1872d29c2ed9715a79c56b86edb5b5b0976f', '2023-12-14 09:30:31+00', '{"metadata": {"foo": "bar"}, "targetId": "alice", "targetType": "ACCOUNT"}'); + + +-- +-- Data for Name: mapping; Type: TABLE DATA; Schema: default +-- + + + +-- +-- Data for Name: migrations; Type: TABLE DATA; Schema: default +-- + +INSERT INTO "default".migrations (version, date) VALUES ('0', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('1', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('2', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('3', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('4', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('5', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('6', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('7', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('8', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('9', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('10', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('11', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('12', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('13', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('14', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('15', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('16', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('17', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('18', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('19', '2023-12-13T18:21:05Z'); +INSERT INTO "default".migrations (version, date) VALUES ('20', '2023-12-13T18:21:05Z'); + + +-- +-- Data for Name: postings; Type: TABLE DATA; Schema: default +-- + +INSERT INTO "default".postings (txid, posting_index, source, destination) VALUES (0, 0, '["world"]', '["bank"]'); +INSERT INTO "default".postings (txid, posting_index, source, destination) VALUES (1, 0, '["world"]', '["bob"]'); +INSERT INTO "default".postings (txid, posting_index, source, destination) VALUES (2, 0, '["world"]', '["alice"]'); + + +-- +-- Data for Name: transactions; Type: TABLE DATA; Schema: default +-- + +INSERT INTO "default".transactions (id, "timestamp", reference, hash, postings, metadata, pre_commit_volumes, post_commit_volumes) VALUES (0, '2023-12-13 18:21:05+00', NULL, NULL, '[{"asset": "USD/2", "amount": 10000, "source": "world", "destination": "bank"}]', '{}', '{"bank": {"USD/2": {"input": 0, "output": 0, "balance": 0}}, "world": {"USD/2": {"input": 0, "output": 0, "balance": 0}}}', '{"bank": {"USD/2": {"input": 10000, "output": 0, "balance": 10000}}, "world": {"USD/2": {"input": 0, "output": 10000, "balance": -10000}}}'); +INSERT INTO "default".transactions (id, "timestamp", reference, hash, postings, metadata, pre_commit_volumes, post_commit_volumes) VALUES (1, '2023-12-13 18:21:40+00', NULL, NULL, '[{"asset": "USD/2", "amount": 10000, "source": "world", "destination": "bob"}]', '{}', '{"bob": {"USD/2": {"input": 0, "output": 0, "balance": 0}}, "world": {"USD/2": {"input": 0, "output": 10000, "balance": -10000}}}', '{"bob": {"USD/2": {"input": 10000, "output": 0, "balance": 10000}}, "world": {"USD/2": {"input": 0, "output": 20000, "balance": -20000}}}'); +INSERT INTO "default".transactions (id, "timestamp", reference, hash, postings, metadata, pre_commit_volumes, post_commit_volumes) VALUES (2, '2023-12-13 18:21:46+00', NULL, NULL, '[{"asset": "USD/2", "amount": 10000, "source": "world", "destination": "alice"}]', '{}', '{"alice": {"USD/2": {"input": 0, "output": 0, "balance": 0}}, "world": {"USD/2": {"input": 0, "output": 20000, "balance": -20000}}}', '{"alice": {"USD/2": {"input": 10000, "output": 0, "balance": 10000}}, "world": {"USD/2": {"input": 0, "output": 30000, "balance": -30000}}}'); + + +-- +-- Data for Name: volumes; Type: TABLE DATA; Schema: default +-- + +INSERT INTO "default".volumes (account, asset, input, output, account_json) VALUES ('bank', 'USD/2', 10000, 0, '["bank"]'); +INSERT INTO "default".volumes (account, asset, input, output, account_json) VALUES ('bob', 'USD/2', 10000, 0, '["bob"]'); +INSERT INTO "default".volumes (account, asset, input, output, account_json) VALUES ('alice', 'USD/2', 10000, 0, '["alice"]'); +INSERT INTO "default".volumes (account, asset, input, output, account_json) VALUES ('world', 'USD/2', 0, 30000, '["world"]'); + + +-- +-- Data for Name: accounts; Type: TABLE DATA; Schema: wallets-002 +-- + +INSERT INTO "wallets-002".accounts (address, metadata, address_json) VALUES ('wallets:15b7a366c6e9473f96276803ef585ae9:main', '{"wallets/id": "15b7a366-c6e9-473f-9627-6803ef585ae9", "wallets/name": "wallet1", "wallets/balances": "true", "wallets/createdAt": "2023-12-14T09:30:48.01540488Z", "wallets/spec/type": "wallets.primary", "wallets/custom_data": {}, "wallets/balances/name": "main"}', '["wallets", "15b7a366c6e9473f96276803ef585ae9", "main"]'); +INSERT INTO "wallets-002".accounts (address, metadata, address_json) VALUES ('world', '{}', '["world"]'); +INSERT INTO "wallets-002".accounts (address, metadata, address_json) VALUES ('wallets:71e6788ad1954139bec5c3e35ee4a2dc:main', '{"wallets/id": "71e6788a-d195-4139-bec5-c3e35ee4a2dc", "wallets/name": "wallet2", "wallets/balances": "true", "wallets/createdAt": "2023-12-14T09:32:38.001913219Z", "wallets/spec/type": "wallets.primary", "wallets/custom_data": {"catgory": "gold"}, "wallets/balances/name": "main"}', '["wallets", "71e6788ad1954139bec5c3e35ee4a2dc", "main"]'); + + +-- +-- Data for Name: idempotency; Type: TABLE DATA; Schema: wallets-002 +-- + + + +-- +-- Data for Name: log; Type: TABLE DATA; Schema: wallets-002 +-- + +INSERT INTO "wallets-002".log (id, type, hash, date, data) VALUES (0, 'SET_METADATA', 'c3d4b844838f4feaf0d35f1f37f8eae496b66328a69fc3d73e46a7cd53b231b6', '2023-12-14 09:30:48+00', '{"metadata": {"wallets/id": "15b7a366-c6e9-473f-9627-6803ef585ae9", "wallets/name": "wallet1", "wallets/balances": "true", "wallets/createdAt": "2023-12-14T09:30:48.01540488Z", "wallets/spec/type": "wallets.primary", "wallets/custom_data": {}, "wallets/balances/name": "main"}, "targetId": "wallets:15b7a366c6e9473f96276803ef585ae9:main", "targetType": "ACCOUNT"}'); +INSERT INTO "wallets-002".log (id, type, hash, date, data) VALUES (1, 'NEW_TRANSACTION', '1f2d8e75e937cee1c91e0a2696f5fbe59947d77ad568cf45c58a01430acb5f0b', '2023-12-14 09:32:04+00', '{"txid": 0, "metadata": {"wallets/custom_data": {}, "wallets/transaction": "true"}, "postings": [{"asset": "USD/2", "amount": 100, "source": "world", "destination": "wallets:15b7a366c6e9473f96276803ef585ae9:main"}], "reference": "", "timestamp": "2023-12-14T09:32:04Z"}'); +INSERT INTO "wallets-002".log (id, type, hash, date, data) VALUES (2, 'SET_METADATA', '3665750bbbe64e79c4631927e9399a8c7f817b55d572ef41cfd9714bd679db7d', '2023-12-14 09:32:38+00', '{"metadata": {"wallets/id": "71e6788a-d195-4139-bec5-c3e35ee4a2dc", "wallets/name": "wallet2", "wallets/balances": "true", "wallets/createdAt": "2023-12-14T09:32:38.001913219Z", "wallets/spec/type": "wallets.primary", "wallets/custom_data": {"catgory": "gold"}, "wallets/balances/name": "main"}, "targetId": "wallets:71e6788ad1954139bec5c3e35ee4a2dc:main", "targetType": "ACCOUNT"}'); + + +-- +-- Data for Name: mapping; Type: TABLE DATA; Schema: wallets-002 +-- + + + +-- +-- Data for Name: migrations; Type: TABLE DATA; Schema: wallets-002 +-- + +INSERT INTO "wallets-002".migrations (version, date) VALUES ('0', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('1', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('2', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('3', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('4', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('5', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('6', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('7', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('8', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('9', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('10', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('11', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('12', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('13', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('14', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('15', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('16', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('17', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('18', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('19', '2023-12-13T18:16:36Z'); +INSERT INTO "wallets-002".migrations (version, date) VALUES ('20', '2023-12-13T18:16:36Z'); + + +-- +-- Data for Name: postings; Type: TABLE DATA; Schema: wallets-002 +-- + +INSERT INTO "wallets-002".postings (txid, posting_index, source, destination) VALUES (0, 0, '["world"]', '["wallets", "15b7a366c6e9473f96276803ef585ae9", "main"]'); + + +-- +-- Data for Name: transactions; Type: TABLE DATA; Schema: wallets-002 +-- + +INSERT INTO "wallets-002".transactions (id, "timestamp", reference, hash, postings, metadata, pre_commit_volumes, post_commit_volumes) VALUES (0, '2023-12-14 09:32:04+00', NULL, NULL, '[{"asset": "USD/2", "amount": 100, "source": "world", "destination": "wallets:15b7a366c6e9473f96276803ef585ae9:main"}]', '{"wallets/custom_data": {}, "wallets/transaction": "true"}', '{"world": {"USD/2": {"input": 0, "output": 0, "balance": 0}}, "wallets:15b7a366c6e9473f96276803ef585ae9:main": {"USD/2": {"input": 0, "output": 0, "balance": 0}}}', '{"world": {"USD/2": {"input": 0, "output": 100, "balance": -100}}, "wallets:15b7a366c6e9473f96276803ef585ae9:main": {"USD/2": {"input": 100, "output": 0, "balance": 100}}}'); + + +-- +-- Data for Name: volumes; Type: TABLE DATA; Schema: wallets-002 +-- + +INSERT INTO "wallets-002".volumes (account, asset, input, output, account_json) VALUES ('world', 'USD/2', 0, 100, '["world"]'); +INSERT INTO "wallets-002".volumes (account, asset, input, output, account_json) VALUES ('wallets:15b7a366c6e9473f96276803ef585ae9:main', 'USD/2', 100, 0, '["wallets", "15b7a366c6e9473f96276803ef585ae9", "main"]'); + + +-- +-- Name: log_seq; Type: SEQUENCE SET; Schema: default +-- + +SELECT pg_catalog.setval('"default".log_seq', 0, false); + + +-- +-- Name: log_seq; Type: SEQUENCE SET; Schema: wallets-002 +-- + +SELECT pg_catalog.setval('"wallets-002".log_seq', 0, false); + + +-- +-- Name: configuration configuration_pkey; Type: CONSTRAINT; Schema: _system +-- + +ALTER TABLE ONLY _system.configuration + ADD CONSTRAINT configuration_pkey PRIMARY KEY (key); + + +-- +-- Name: ledgers ledgers_pkey; Type: CONSTRAINT; Schema: _system +-- + +ALTER TABLE ONLY _system.ledgers + ADD CONSTRAINT ledgers_pkey PRIMARY KEY (ledger); + + +-- +-- Name: accounts accounts_address_key; Type: CONSTRAINT; Schema: default +-- + +ALTER TABLE ONLY "default".accounts + ADD CONSTRAINT accounts_address_key UNIQUE (address); + + +-- +-- Name: idempotency idempotency_pkey; Type: CONSTRAINT; Schema: default +-- + +ALTER TABLE ONLY "default".idempotency + ADD CONSTRAINT idempotency_pkey PRIMARY KEY (key); + + +-- +-- Name: log log_id_key; Type: CONSTRAINT; Schema: default +-- + +ALTER TABLE ONLY "default".log + ADD CONSTRAINT log_id_key UNIQUE (id); + + +-- +-- Name: mapping mapping_mapping_id_key; Type: CONSTRAINT; Schema: default +-- + +ALTER TABLE ONLY "default".mapping + ADD CONSTRAINT mapping_mapping_id_key UNIQUE (mapping_id); + + +-- +-- Name: migrations migrations_version_key; Type: CONSTRAINT; Schema: default +-- + +ALTER TABLE ONLY "default".migrations + ADD CONSTRAINT migrations_version_key UNIQUE (version); + + +-- +-- Name: transactions transactions_id_key; Type: CONSTRAINT; Schema: default +-- + +ALTER TABLE ONLY "default".transactions + ADD CONSTRAINT transactions_id_key UNIQUE (id); + + +-- +-- Name: transactions transactions_reference_key; Type: CONSTRAINT; Schema: default +-- + +ALTER TABLE ONLY "default".transactions + ADD CONSTRAINT transactions_reference_key UNIQUE (reference); + + +-- +-- Name: volumes volumes_account_asset_key; Type: CONSTRAINT; Schema: default +-- + +ALTER TABLE ONLY "default".volumes + ADD CONSTRAINT volumes_account_asset_key UNIQUE (account, asset); + + +-- +-- Name: accounts accounts_address_key; Type: CONSTRAINT; Schema: wallets-002 +-- + +ALTER TABLE ONLY "wallets-002".accounts + ADD CONSTRAINT accounts_address_key UNIQUE (address); + + +-- +-- Name: idempotency idempotency_pkey; Type: CONSTRAINT; Schema: wallets-002 +-- + +ALTER TABLE ONLY "wallets-002".idempotency + ADD CONSTRAINT idempotency_pkey PRIMARY KEY (key); + + +-- +-- Name: log log_id_key; Type: CONSTRAINT; Schema: wallets-002 +-- + +ALTER TABLE ONLY "wallets-002".log + ADD CONSTRAINT log_id_key UNIQUE (id); + + +-- +-- Name: mapping mapping_mapping_id_key; Type: CONSTRAINT; Schema: wallets-002 +-- + +ALTER TABLE ONLY "wallets-002".mapping + ADD CONSTRAINT mapping_mapping_id_key UNIQUE (mapping_id); + + +-- +-- Name: migrations migrations_version_key; Type: CONSTRAINT; Schema: wallets-002 +-- + +ALTER TABLE ONLY "wallets-002".migrations + ADD CONSTRAINT migrations_version_key UNIQUE (version); + + +-- +-- Name: transactions transactions_id_key; Type: CONSTRAINT; Schema: wallets-002 +-- + +ALTER TABLE ONLY "wallets-002".transactions + ADD CONSTRAINT transactions_id_key UNIQUE (id); + + +-- +-- Name: transactions transactions_reference_key; Type: CONSTRAINT; Schema: wallets-002 +-- + +ALTER TABLE ONLY "wallets-002".transactions + ADD CONSTRAINT transactions_reference_key UNIQUE (reference); + + +-- +-- Name: volumes volumes_account_asset_key; Type: CONSTRAINT; Schema: wallets-002 +-- + +ALTER TABLE ONLY "wallets-002".volumes + ADD CONSTRAINT volumes_account_asset_key UNIQUE (account, asset); + + +-- +-- Name: accounts_address_json; Type: INDEX; Schema: default +-- + +CREATE INDEX accounts_address_json ON "default".accounts USING gin (address_json); + + +-- +-- Name: accounts_array_length; Type: INDEX; Schema: default +-- + +CREATE INDEX accounts_array_length ON "default".accounts USING btree (jsonb_array_length(address_json)); + + +-- +-- Name: postings_addresses; Type: INDEX; Schema: default +-- + +CREATE INDEX postings_addresses ON "default".transactions USING gin (postings); + + +-- +-- Name: postings_array_length_dst; Type: INDEX; Schema: default +-- + +CREATE INDEX postings_array_length_dst ON "default".postings USING btree (jsonb_array_length(destination)); + + +-- +-- Name: postings_array_length_src; Type: INDEX; Schema: default +-- + +CREATE INDEX postings_array_length_src ON "default".postings USING btree (jsonb_array_length(source)); + + +-- +-- Name: postings_dest; Type: INDEX; Schema: default +-- + +CREATE INDEX postings_dest ON "default".postings USING gin (destination); + + +-- +-- Name: postings_src; Type: INDEX; Schema: default +-- + +CREATE INDEX postings_src ON "default".postings USING gin (source); + + +-- +-- Name: postings_txid; Type: INDEX; Schema: default +-- + +CREATE INDEX postings_txid ON "default".postings USING btree (txid); + + +-- +-- Name: volumes_account_json; Type: INDEX; Schema: default +-- + +CREATE INDEX volumes_account_json ON "default".volumes USING gin (account_json); + + +-- +-- Name: volumes_array_length; Type: INDEX; Schema: default +-- + +CREATE INDEX volumes_array_length ON "default".volumes USING btree (jsonb_array_length(account_json)); + + +-- +-- Name: accounts_address_json; Type: INDEX; Schema: wallets-002 +-- + +CREATE INDEX accounts_address_json ON "wallets-002".accounts USING gin (address_json); + + +-- +-- Name: accounts_array_length; Type: INDEX; Schema: wallets-002 +-- + +CREATE INDEX accounts_array_length ON "wallets-002".accounts USING btree (jsonb_array_length(address_json)); + + +-- +-- Name: postings_addresses; Type: INDEX; Schema: wallets-002 +-- + +CREATE INDEX postings_addresses ON "wallets-002".transactions USING gin (postings); + + +-- +-- Name: postings_array_length_dst; Type: INDEX; Schema: wallets-002 +-- + +CREATE INDEX postings_array_length_dst ON "wallets-002".postings USING btree (jsonb_array_length(destination)); + + +-- +-- Name: postings_array_length_src; Type: INDEX; Schema: wallets-002 +-- + +CREATE INDEX postings_array_length_src ON "wallets-002".postings USING btree (jsonb_array_length(source)); + + +-- +-- Name: postings_dest; Type: INDEX; Schema: wallets-002 +-- + +CREATE INDEX postings_dest ON "wallets-002".postings USING gin (destination); + + +-- +-- Name: postings_src; Type: INDEX; Schema: wallets-002 +-- + +CREATE INDEX postings_src ON "wallets-002".postings USING gin (source); + + +-- +-- Name: postings_txid; Type: INDEX; Schema: wallets-002 +-- + +CREATE INDEX postings_txid ON "wallets-002".postings USING btree (txid); + + +-- +-- Name: volumes_account_json; Type: INDEX; Schema: wallets-002 +-- + +CREATE INDEX volumes_account_json ON "wallets-002".volumes USING gin (account_json); + + +-- +-- Name: volumes_array_length; Type: INDEX; Schema: wallets-002 +-- + +CREATE INDEX volumes_array_length ON "wallets-002".volumes USING btree (jsonb_array_length(account_json)); + + +-- +-- PostgreSQL database dump complete +-- + diff --git a/libs/collectionutils/map.go b/libs/collectionutils/map.go index 5fde51e1c..54d0eb475 100644 --- a/libs/collectionutils/map.go +++ b/libs/collectionutils/map.go @@ -1,5 +1,7 @@ package collectionutils +import "fmt" + func Keys[K comparable, V any](m map[K]V) []K { ret := make([]K, 0) for k := range m { @@ -7,3 +9,19 @@ func Keys[K comparable, V any](m map[K]V) []K { } return ret } + +func ConvertMap[K comparable, FROM any, TO any](m map[K]FROM, mapper func(v FROM) TO) map[K]TO { + ret := make(map[K]TO) + for k, from := range m { + ret[k] = mapper(from) + } + return ret +} + +func ToAny[V any](v V) any { + return v +} + +func ToFmtString[V any](v any) string { + return fmt.Sprint(v) +} diff --git a/openapi.yaml b/openapi.yaml index 422b0d0a4..dfe056e66 100644 --- a/openapi.yaml +++ b/openapi.yaml @@ -7,6 +7,7 @@ paths: /_info: get: tags: + - Ledger - Server summary: Show server information operationId: getInfo @@ -55,6 +56,7 @@ paths: summary: Count the accounts from a ledger operationId: countAccounts tags: + - Ledger - Accounts parameters: - name: ledger @@ -77,7 +79,8 @@ paths: explode: true schema: type: object - properties: {} + additionalProperties: true + example: metadata[key]=value1&metadata[a.nested.key]=value2 responses: "200": description: OK @@ -98,6 +101,7 @@ paths: description: List accounts from a ledger, sorted by address in descending order. operationId: listAccounts tags: + - Ledger - Accounts parameters: - name: ledger @@ -118,6 +122,20 @@ paths: minimum: 1 maximum: 1000 default: 15 + - name: page_size + x-speakeasy-ignore: true + in: query + description: | + The maximum number of results to return per page. + Deprecated, please use `pageSize` instead. + example: 100 + schema: + type: integer + format: int64 + minimum: 1 + maximum: 1000 + default: 15 + deprecated: true - name: after in: query description: Pagination cursor, will return accounts after given address, in descending order. @@ -138,6 +156,7 @@ paths: schema: type: object additionalProperties: true + example: metadata[key]=value1&metadata[a.nested.key]=value2 - name: balance in: query description: Filter accounts by their balance (default operator is gte) @@ -146,6 +165,7 @@ paths: format: int64 example: 2400 - name: balanceOperator + x-speakeasy-ignore: true in: query description: | Operator used for the filtering of balances can be greater than/equal, less than/equal, greater than, less than, equal or not. @@ -159,6 +179,23 @@ paths: - e - ne example: gte + - name: balance_operator + x-speakeasy-ignore: true + in: query + description: | + Operator used for the filtering of balances can be greater than/equal, less than/equal, greater than, less than, equal or not. + Deprecated, please use `balanceOperator` instead. + schema: + type: string + enum: + - gte + - lte + - gt + - lt + - e + - ne + example: gte + deprecated: true - name: cursor in: query description: | @@ -169,6 +206,18 @@ paths: schema: type: string example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== + - name: pagination_token + in: query + description: | + Parameter used in pagination requests. Maximum page size is set to 15. + Set to the value of next for the next page of results. + Set to the value of previous for the previous page of results. + No other parameters can be set when this parameter is set. + Deprecated, please use `cursor` instead. + schema: + type: string + example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== + deprecated: true responses: "200": description: OK @@ -187,6 +236,7 @@ paths: summary: Get account by its address operationId: getAccount tags: + - Ledger - Accounts parameters: - name: ledger @@ -225,6 +275,7 @@ paths: summary: Add metadata to an account operationId: addMetadataToAccount tags: + - Ledger - Accounts parameters: - name: ledger @@ -256,6 +307,18 @@ paths: "204": description: No Content content: {} + "400": + description: Error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + "404": + description: Error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' default: description: Error content: @@ -265,6 +328,7 @@ paths: /{ledger}/mapping: get: tags: + - Ledger - Mapping operationId: getMapping summary: Get the mapping of a ledger @@ -291,6 +355,7 @@ paths: $ref: '#/components/schemas/ErrorResponse' put: tags: + - Ledger - Mapping operationId: updateMapping summary: Update the mapping of a ledger @@ -325,6 +390,7 @@ paths: post: deprecated: true tags: + - Ledger - Script operationId: runScript summary: Execute a Numscript @@ -366,6 +432,7 @@ paths: /{ledger}/stats: get: tags: + - Ledger - Stats operationId: readStats summary: Get statistics from a ledger @@ -395,6 +462,7 @@ paths: /{ledger}/transactions: head: tags: + - Ledger - Transactions summary: Count the transactions from a ledger operationId: countTransactions @@ -438,6 +506,17 @@ paths: schema: type: string format: date-time + - name: start_time + x-speakeasy-ignore: true + in: query + description: | + Filter transactions that occurred after this timestamp. + The format is RFC3339 and is inclusive (for example, "2023-01-02T15:04:01Z" includes the first second of 4th minute). + Deprecated, please use `startTime` instead. + schema: + type: string + format: date-time + deprecated: true - name: endTime in: query description: | @@ -446,6 +525,17 @@ paths: schema: type: string format: date-time + - name: end_time + x-speakeasy-ignore: true + in: query + description: | + Filter transactions that occurred before this timestamp. + The format is RFC3339 and is exclusive (for example, "2023-01-02T15:04:01Z" excludes the first second of 4th minute). + Deprecated, please use `endTime` instead. + schema: + type: string + format: date-time + deprecated: true - name: metadata in: query description: Filter transactions by metadata key value pairs. Nested objects can be used as seen in the example below. @@ -454,6 +544,7 @@ paths: schema: type: object properties: {} + example: metadata[key]=value1&metadata[a.nested.key]=value2 responses: "200": description: OK @@ -471,6 +562,7 @@ paths: $ref: '#/components/schemas/ErrorResponse' get: tags: + - Ledger - Transactions summary: List transactions from a ledger description: List transactions from a ledger, sorted by txid in descending order. @@ -494,12 +586,26 @@ paths: minimum: 1 maximum: 1000 default: 15 + - name: page_size + x-speakeasy-ignore: true + in: query + description: | + The maximum number of results to return per page. + Deprecated, please use `pageSize` instead. + example: 100 + schema: + type: integer + format: int64 + minimum: 1 + maximum: 1000 + default: 15 + deprecated: true - name: after in: query description: Pagination cursor, will return transactions after given txid (in descending order). schema: type: string - example: "1234" + example: 1234 - name: reference in: query description: Find transactions by reference field. @@ -532,6 +638,17 @@ paths: schema: type: string format: date-time + - name: start_time + x-speakeasy-ignore: true + in: query + description: | + Filter transactions that occurred after this timestamp. + The format is RFC3339 and is inclusive (for example, "2023-01-02T15:04:01Z" includes the first second of 4th minute). + Deprecated, please use `startTime` instead. + schema: + type: string + format: date-time + deprecated: true - name: endTime in: query description: | @@ -540,6 +657,17 @@ paths: schema: type: string format: date-time + - name: end_time + x-speakeasy-ignore: true + in: query + description: | + Filter transactions that occurred before this timestamp. + The format is RFC3339 and is exclusive (for example, "2023-01-02T15:04:01Z" excludes the first second of 4th minute). + Deprecated, please use `endTime` instead. + schema: + type: string + format: date-time + deprecated: true - name: cursor in: query description: | @@ -550,6 +678,19 @@ paths: schema: type: string example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== + - name: pagination_token + x-speakeasy-ignore: true + in: query + description: | + Parameter used in pagination requests. Maximum page size is set to 15. + Set to the value of next for the next page of results. + Set to the value of previous for the previous page of results. + No other parameters can be set when this parameter is set. + Deprecated, please use `cursor` instead. + schema: + type: string + example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== + deprecated: true - name: metadata in: query description: Filter transactions by metadata key value pairs. Nested objects can be used as seen in the example below. @@ -573,6 +714,7 @@ paths: $ref: '#/components/schemas/ErrorResponse' post: tags: + - Ledger - Transactions summary: Create a new transaction to a ledger operationId: createTransaction @@ -607,6 +749,12 @@ paths: application/json: schema: $ref: '#/components/schemas/TransactionsResponse' + "400": + description: Error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' default: description: Error content: @@ -616,6 +764,7 @@ paths: /{ledger}/transactions/{txid}: get: tags: + - Ledger - Transactions summary: Get transaction from a ledger by its ID operationId: getTransaction @@ -652,6 +801,7 @@ paths: /{ledger}/transactions/{txid}/metadata: post: tags: + - Ledger - Transactions summary: Set the metadata of a transaction by its ID operationId: addMetadataOnTransaction @@ -669,7 +819,7 @@ paths: required: true schema: type: integer - format: int64 + format: bigint minimum: 0 example: 1234 requestBody: @@ -691,6 +841,7 @@ paths: /{ledger}/transactions/{txid}/revert: post: tags: + - Ledger - Transactions operationId: revertTransaction summary: Revert a ledger transaction by its ID @@ -733,6 +884,7 @@ paths: /{ledger}/transactions/batch: post: tags: + - Ledger - Transactions summary: Create a new batch of transactions to a ledger operationId: CreateTransactions @@ -766,6 +918,7 @@ paths: /{ledger}/balances: get: tags: + - Ledger - Balances summary: Get the balances from a ledger's account operationId: getBalances @@ -799,6 +952,18 @@ paths: schema: type: string example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== + - name: pagination_token + x-speakeasy-ignore: true + in: query + description: |- + Parameter used in pagination requests. + Set to the value of next for the next page of results. + Set to the value of previous for the previous page of results. + Deprecated, please use `cursor` instead. + schema: + type: string + example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== + deprecated: true responses: "200": description: OK @@ -815,6 +980,7 @@ paths: /{ledger}/aggregate/balances: get: tags: + - Ledger - Balances summary: Get the aggregated balances from selected accounts operationId: getBalancesAggregated @@ -848,6 +1014,7 @@ paths: /{ledger}/logs: get: tags: + - Ledger - Logs summary: List the logs from a ledger description: List the logs from a ledger, sorted by ID in descending order. @@ -871,12 +1038,26 @@ paths: minimum: 1 maximum: 1000 default: 15 + - name: page_size + x-speakeasy-ignore: true + in: query + description: | + The maximum number of results to return per page. + Deprecated, please use `pageSize` instead. + example: 100 + schema: + type: integer + format: int64 + minimum: 1 + maximum: 1000 + default: 15 + deprecated: true - name: after in: query description: Pagination cursor, will return the logs after a given ID. (in descending order). schema: type: string - example: "1234" + example: 1234 - name: startTime in: query description: | @@ -885,6 +1066,17 @@ paths: schema: type: string format: date-time + - name: start_time + x-speakeasy-ignore: true + in: query + description: | + Filter transactions that occurred after this timestamp. + The format is RFC3339 and is inclusive (for example, "2023-01-02T15:04:01Z" includes the first second of 4th minute). + Deprecated, please use `startTime` instead. + schema: + type: string + format: date-time + deprecated: true - name: endTime in: query description: | @@ -893,6 +1085,17 @@ paths: schema: type: string format: date-time + - name: end_time + x-speakeasy-ignore: true + in: query + description: | + Filter transactions that occurred before this timestamp. + The format is RFC3339 and is exclusive (for example, "2023-01-02T15:04:01Z" excludes the first second of 4th minute). + Deprecated, please use `endTime` instead. + schema: + type: string + format: date-time + deprecated: true - name: cursor in: query description: | @@ -903,6 +1106,19 @@ paths: schema: type: string example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== + - name: pagination_token + x-speakeasy-ignore: true + in: query + description: | + Parameter used in pagination requests. Maximum page size is set to 15. + Set to the value of next for the next page of results. + Set to the value of previous for the previous page of results. + No other parameters can be set when this parameter is set. + Deprecated, please use `cursor` instead. + schema: + type: string + example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== + deprecated: true responses: "200": description: OK @@ -2068,6 +2284,11 @@ components: metadata: type: object additionalProperties: true + example: + admin: true + a: + nested: + key: value AccountWithVolumesAndBalances: type: object required: @@ -2082,18 +2303,13 @@ components: metadata: type: object additionalProperties: true - volumes: - type: object - additionalProperties: - type: object - additionalProperties: - type: integer - format: bigint - minimum: 0 example: - COIN: - input: 100 - output: 0 + admin: true + a: + nested: + key: value + volumes: + $ref: '#/components/schemas/Volumes' balances: type: object additionalProperties: @@ -2306,7 +2522,7 @@ components: - SET_METADATA data: type: object - properties: {} + additionalProperties: true hash: type: string example: 9ee060170400f556b7e1575cb13f9db004f150a08355c7431c62bc639166431e @@ -2404,7 +2620,8 @@ components: ErrorResponse: type: object required: - - error_code + - errorCode + - errorMessage properties: errorCode: $ref: '#/components/schemas/ErrorsEnum' diff --git a/openapi/v1.yaml b/openapi/v1.yaml index 20bc37028..b97420e07 100644 --- a/openapi/v1.yaml +++ b/openapi/v1.yaml @@ -8,6 +8,7 @@ paths: /_info: get: tags: + - Ledger - Server summary: Show server information operationId: getInfo @@ -58,6 +59,7 @@ paths: summary: Count the accounts from a ledger operationId: countAccounts tags: + - Ledger - Accounts parameters: - name: ledger @@ -80,7 +82,8 @@ paths: explode: true schema: type: object - properties: {} + additionalProperties: true + example: metadata[key]=value1&metadata[a.nested.key]=value2 responses: "200": description: OK @@ -102,6 +105,7 @@ paths: description: List accounts from a ledger, sorted by address in descending order. operationId: listAccounts tags: + - Ledger - Accounts parameters: - name: ledger @@ -122,6 +126,20 @@ paths: minimum: 1 maximum: 1000 default: 15 + - name: page_size + x-speakeasy-ignore: true + in: query + description: | + The maximum number of results to return per page. + Deprecated, please use `pageSize` instead. + example: 100 + schema: + type: integer + format: int64 + minimum: 1 + maximum: 1000 + default: 15 + deprecated: true - name: after in: query description: Pagination cursor, will return accounts after given address, in descending order. @@ -142,6 +160,7 @@ paths: schema: type: object additionalProperties: true + example: metadata[key]=value1&metadata[a.nested.key]=value2 - name: balance in: query description: Filter accounts by their balance (default operator is gte) @@ -150,13 +169,25 @@ paths: format: int64 example: 2400 - name: balanceOperator + x-speakeasy-ignore: true + in: query + description: | + Operator used for the filtering of balances can be greater than/equal, less than/equal, greater than, less than, equal or not. + schema: + type: string + enum: [gte, lte, gt, lt, e, ne] + example: gte + - name: balance_operator + x-speakeasy-ignore: true in: query description: | Operator used for the filtering of balances can be greater than/equal, less than/equal, greater than, less than, equal or not. + Deprecated, please use `balanceOperator` instead. schema: type: string enum: [gte, lte, gt, lt, e, ne] example: gte + deprecated: true - name: cursor in: query description: | @@ -167,6 +198,18 @@ paths: schema: type: string example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== + - name: pagination_token + in: query + description: | + Parameter used in pagination requests. Maximum page size is set to 15. + Set to the value of next for the next page of results. + Set to the value of previous for the previous page of results. + No other parameters can be set when this parameter is set. + Deprecated, please use `cursor` instead. + schema: + type: string + example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== + deprecated: true responses: "200": description: OK @@ -186,6 +229,7 @@ paths: summary: Get account by its address operationId: getAccount tags: + - Ledger - Accounts parameters: - name: ledger @@ -225,6 +269,7 @@ paths: summary: Add metadata to an account operationId: addMetadataToAccount tags: + - Ledger - Accounts parameters: - name: ledger @@ -256,6 +301,21 @@ paths: "204": description: No Content content: {} + + "400": + description: Error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + + "404": + description: Error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + default: description: Error content: @@ -266,6 +326,7 @@ paths: /{ledger}/mapping: get: tags: + - Ledger - Mapping operationId: getMapping summary: Get the mapping of a ledger @@ -293,6 +354,7 @@ paths: put: tags: + - Ledger - Mapping operationId: updateMapping summary: Update the mapping of a ledger @@ -328,6 +390,7 @@ paths: post: deprecated: true tags: + - Ledger - Script operationId: runScript summary: Execute a Numscript @@ -371,6 +434,7 @@ paths: /{ledger}/stats: get: tags: + - Ledger - Stats operationId: readStats summary: Get statistics from a ledger @@ -401,6 +465,7 @@ paths: /{ledger}/transactions: head: tags: + - Ledger - Transactions summary: Count the transactions from a ledger operationId: countTransactions @@ -445,6 +510,17 @@ paths: schema: type: string format: date-time + - name: start_time + x-speakeasy-ignore: true + in: query + description: | + Filter transactions that occurred after this timestamp. + The format is RFC3339 and is inclusive (for example, "2023-01-02T15:04:01Z" includes the first second of 4th minute). + Deprecated, please use `startTime` instead. + schema: + type: string + format: date-time + deprecated: true - name: endTime in: query description: | @@ -453,6 +529,17 @@ paths: schema: type: string format: date-time + - name: end_time + x-speakeasy-ignore: true + in: query + description: | + Filter transactions that occurred before this timestamp. + The format is RFC3339 and is exclusive (for example, "2023-01-02T15:04:01Z" excludes the first second of 4th minute). + Deprecated, please use `endTime` instead. + schema: + type: string + format: date-time + deprecated: true - name: metadata in: query description: Filter transactions by metadata key value pairs. Nested objects can be used as seen in the example below. @@ -461,6 +548,7 @@ paths: schema: type: object properties: { } + example: metadata[key]=value1&metadata[a.nested.key]=value2 responses: "200": description: OK @@ -479,6 +567,7 @@ paths: get: tags: + - Ledger - Transactions summary: List transactions from a ledger description: List transactions from a ledger, sorted by txid in descending order. @@ -502,13 +591,27 @@ paths: minimum: 1 maximum: 1000 default: 15 + - name: page_size + x-speakeasy-ignore: true + in: query + description: | + The maximum number of results to return per page. + Deprecated, please use `pageSize` instead. + example: 100 + schema: + type: integer + format: int64 + minimum: 1 + maximum: 1000 + default: 15 + deprecated: true - name: after in: query description: Pagination cursor, will return transactions after given txid (in descending order). schema: type: string - example: "1234" + example: 1234 - name: reference in: query description: Find transactions by reference field. @@ -542,6 +645,17 @@ paths: schema: type: string format: date-time + - name: start_time + x-speakeasy-ignore: true + in: query + description: | + Filter transactions that occurred after this timestamp. + The format is RFC3339 and is inclusive (for example, "2023-01-02T15:04:01Z" includes the first second of 4th minute). + Deprecated, please use `startTime` instead. + schema: + type: string + format: date-time + deprecated: true - name: endTime in: query description: | @@ -550,6 +664,17 @@ paths: schema: type: string format: date-time + - name: end_time + x-speakeasy-ignore: true + in: query + description: | + Filter transactions that occurred before this timestamp. + The format is RFC3339 and is exclusive (for example, "2023-01-02T15:04:01Z" excludes the first second of 4th minute). + Deprecated, please use `endTime` instead. + schema: + type: string + format: date-time + deprecated: true - name: cursor in: query description: | @@ -560,6 +685,19 @@ paths: schema: type: string example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== + - name: pagination_token + x-speakeasy-ignore: true + in: query + description: | + Parameter used in pagination requests. Maximum page size is set to 15. + Set to the value of next for the next page of results. + Set to the value of previous for the previous page of results. + No other parameters can be set when this parameter is set. + Deprecated, please use `cursor` instead. + schema: + type: string + example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== + deprecated: true - name: metadata in: query description: Filter transactions by metadata key value pairs. Nested objects can be used as seen in the example below. @@ -584,6 +722,7 @@ paths: post: tags: + - Ledger - Transactions summary: Create a new transaction to a ledger operationId: createTransaction @@ -618,6 +757,14 @@ paths: application/json: schema: $ref: '#/components/schemas/TransactionsResponse' + + "400": + description: Error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorResponse' + default: description: Error content: @@ -628,6 +775,7 @@ paths: /{ledger}/transactions/{txid}: get: tags: + - Ledger - Transactions summary: Get transaction from a ledger by its ID operationId: getTransaction @@ -665,6 +813,7 @@ paths: /{ledger}/transactions/{txid}/metadata: post: tags: + - Ledger - Transactions summary: Set the metadata of a transaction by its ID operationId: addMetadataOnTransaction @@ -682,7 +831,7 @@ paths: required: true schema: type: integer - format: int64 + format: bigint minimum: 0 example: 1234 requestBody: @@ -705,6 +854,7 @@ paths: /{ledger}/transactions/{txid}/revert: post: tags: + - Ledger - Transactions operationId: revertTransaction summary: Revert a ledger transaction by its ID @@ -748,6 +898,7 @@ paths: /{ledger}/transactions/batch: post: tags: + - Ledger - Transactions summary: Create a new batch of transactions to a ledger operationId: CreateTransactions @@ -782,6 +933,7 @@ paths: /{ledger}/balances: get: tags: + - Ledger - Balances summary: Get the balances from a ledger's account operationId: getBalances @@ -816,6 +968,18 @@ paths: schema: type: string example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== + - name: pagination_token + x-speakeasy-ignore: true + in: query + description: |- + Parameter used in pagination requests. + Set to the value of next for the next page of results. + Set to the value of previous for the previous page of results. + Deprecated, please use `cursor` instead. + schema: + type: string + example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== + deprecated: true responses: "200": description: OK @@ -833,6 +997,7 @@ paths: /{ledger}/aggregate/balances: get: tags: + - Ledger - Balances summary: Get the aggregated balances from selected accounts operationId: getBalancesAggregated @@ -868,6 +1033,7 @@ paths: /{ledger}/logs: get: tags: + - Ledger - Logs summary: List the logs from a ledger description: List the logs from a ledger, sorted by ID in descending order. @@ -891,13 +1057,27 @@ paths: minimum: 1 maximum: 1000 default: 15 + - name: page_size + x-speakeasy-ignore: true + in: query + description: | + The maximum number of results to return per page. + Deprecated, please use `pageSize` instead. + example: 100 + schema: + type: integer + format: int64 + minimum: 1 + maximum: 1000 + default: 15 + deprecated: true - name: after in: query description: Pagination cursor, will return the logs after a given ID. (in descending order). schema: type: string - example: "1234" + example: 1234 - name: startTime in: query description: | @@ -906,6 +1086,17 @@ paths: schema: type: string format: date-time + - name: start_time + x-speakeasy-ignore: true + in: query + description: | + Filter transactions that occurred after this timestamp. + The format is RFC3339 and is inclusive (for example, "2023-01-02T15:04:01Z" includes the first second of 4th minute). + Deprecated, please use `startTime` instead. + schema: + type: string + format: date-time + deprecated: true - name: endTime in: query description: | @@ -914,6 +1105,17 @@ paths: schema: type: string format: date-time + - name: end_time + x-speakeasy-ignore: true + in: query + description: | + Filter transactions that occurred before this timestamp. + The format is RFC3339 and is exclusive (for example, "2023-01-02T15:04:01Z" excludes the first second of 4th minute). + Deprecated, please use `endTime` instead. + schema: + type: string + format: date-time + deprecated: true - name: cursor in: query description: | @@ -924,6 +1126,19 @@ paths: schema: type: string example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== + - name: pagination_token + x-speakeasy-ignore: true + in: query + description: | + Parameter used in pagination requests. Maximum page size is set to 15. + Set to the value of next for the next page of results. + Set to the value of previous for the previous page of results. + No other parameters can be set when this parameter is set. + Deprecated, please use `cursor` instead. + schema: + type: string + example: aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== + deprecated: true responses: "200": description: OK @@ -1152,6 +1367,7 @@ components: metadata: type: object additionalProperties: true + example: { admin: true, a: { nested: { key: value}} } AccountWithVolumesAndBalances: type: object @@ -1167,15 +1383,9 @@ components: metadata: type: object additionalProperties: true + example: { admin: true, a: { nested: { key: value}} } volumes: - type: object - additionalProperties: - type: object - additionalProperties: - type: integer - format: bigint - minimum: 0 - example: { COIN: { input: 100, output: 0 } } + $ref: '#/components/schemas/Volumes' balances: type: object additionalProperties: @@ -1388,7 +1598,7 @@ components: - SET_METADATA data: type: object - properties: {} + additionalProperties: true hash: type: string example: "9ee060170400f556b7e1575cb13f9db004f150a08355c7431c62bc639166431e" @@ -1495,7 +1705,8 @@ components: ErrorResponse: type: object required: - - error_code + - errorCode + - errorMessage properties: errorCode: $ref: '#/components/schemas/ErrorsEnum'