diff --git a/.github/workflows/deploy-libre-testnet.yaml b/.github/workflows/deploy-libre-testnet.yaml index c2ac86dc..c3fb83cd 100644 --- a/.github/workflows/deploy-libre-testnet.yaml +++ b/.github/workflows/deploy-libre-testnet.yaml @@ -85,6 +85,8 @@ jobs: HAPI_EOS_STATE_HISTORY_PLUGIN_ENDPOINT: 'ws://api-node.libre-testnet:8080' HAPI_EOS_MISSED_BLOCKS_ENABLED: 'false' HAPI_EOS_BLOCK_HISTORY_DAYS: 90 + HAPI_EOS_MAX_CPU_BLOCK: 100000 + HAPI_EOS_MAX_NET_BLOCK: 1048576 HAPI_EOS_API_CHAIN_ID: b64646740308df2ee06c6b72f34c0f7fa066d940e831f752db2006fcc2b78dee HAPI_EOS_BASE_ACCOUNT: ${{ secrets.HAPI_EOS_BASE_ACCOUNT }} HAPI_EOS_BASE_ACCOUNT_PASSWORD: ${{ secrets.HAPI_EOS_BASE_ACCOUNT_PASSWORD }} @@ -107,6 +109,7 @@ jobs: HAPI_SYNC_PRODUCER_CPU_INTERVAL: '6' HAPI_SYNC_PRODUCER_INFO_INTERVAL: '1' HAPI_SYNC_SCHEDULE_HISTORY_INTERVAL: 86400 + HAPI_SYNC_STATS_INTERVAL: 3600 HAPI_EOS_EXCHANGE_RATE_API: 'https://dashboard-api.libre.org/exchange-rates' HAPI_COINGECKO_API_TOKEN_ID: LIBRE HAPI_REWARDS_TOKEN: LIBRE diff --git a/docker-compose.yaml b/docker-compose.yaml index e2fe5a50..621bc166 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -69,6 +69,8 @@ services: HAPI_EOSRATE_GET_STATS_USER: '${HAPI_EOSRATE_GET_STATS_USER}' HAPI_EOSRATE_GET_STATS_PASSWORD: '${HAPI_EOSRATE_GET_STATS_PASSWORD}' HAPI_EOS_BLOCK_HISTORY_DAYS: '${HAPI_EOS_BLOCK_HISTORY_DAYS}' + HAPI_EOS_MAX_CPU_BLOCK: '${HAPI_EOS_MAX_CPU_BLOCK}' + HAPI_EOS_MAX_NET_BLOCK: '${HAPI_EOS_MAX_NET_BLOCK}' HAPI_EOS_MISSED_BLOCKS_ENABLED: '${HAPI_EOS_MISSED_BLOCKS_ENABLED}' hasura: container_name: '${STAGE}-${APP_NAME}-hasura' diff --git a/hapi/src/config/eos.config.js b/hapi/src/config/eos.config.js index 493dc873..d61c547d 100644 --- a/hapi/src/config/eos.config.js +++ b/hapi/src/config/eos.config.js @@ -60,5 +60,7 @@ module.exports = { rewardsToken: process.env.HAPI_REWARDS_TOKEN, eosRateUrl: process.env.HAPI_EOSRATE_GET_STATS_URL, eosRateUser: process.env.HAPI_EOSRATE_GET_STATS_USER, - eosRatePassword: process.env.HAPI_EOSRATE_GET_STATS_PASSWORD + eosRatePassword: process.env.HAPI_EOSRATE_GET_STATS_PASSWORD, + maxBlockNetUsage: parseInt(process.env.HAPI_EOS_MAX_NET_BLOCK) || 1048576, + maxBlockCpuUsage: parseInt(process.env.HAPI_EOS_MAX_CPU_BLOCK) || 100000 } diff --git a/hapi/src/config/workers.config.js b/hapi/src/config/workers.config.js index 2ecfd79a..2940ff3d 100644 --- a/hapi/src/config/workers.config.js +++ b/hapi/src/config/workers.config.js @@ -6,7 +6,7 @@ module.exports = { process.env.HAPI_SYNC_PRODUCER_INFO_INTERVAL || 1 ), cpuWorkerInterval: parseInt(process.env.HAPI_SYNC_PRODUCER_CPU_INTERVAL), - syncStatsInterval: parseInt(process.env.HAPI_SYNC_STATS_INTERVAL || 60), + syncStatsInterval: parseInt(process.env.HAPI_SYNC_STATS_INTERVAL || 3600), syncExchangeRate: parseInt(process.env.HAPI_SYNC_EXCHANGE_RATE || 86400), syncScheduleHistoryInterval: parseInt( process.env.HAPI_SYNC_SCHEDULE_HISTORY_INTERVAL || 0 diff --git a/hapi/src/services/missed-blocks.service.js b/hapi/src/services/missed-blocks.service.js index f522f063..f8394999 100644 --- a/hapi/src/services/missed-blocks.service.js +++ b/hapi/src/services/missed-blocks.service.js @@ -113,8 +113,7 @@ const getBlocksInRange = async (start, end) => { SELECT schedule_version, producer, - block_num, - block_id + block_num FROM block_history WHERE diff --git a/hapi/src/services/state-history-plugin.service.js b/hapi/src/services/state-history-plugin.service.js index 6e5912bb..3f8c805b 100644 --- a/hapi/src/services/state-history-plugin.service.js +++ b/hapi/src/services/state-history-plugin.service.js @@ -4,7 +4,7 @@ const { Serialize } = require('eosjs') const statsService = require('./stats.service') const { eosConfig } = require('../config') -const { hasuraUtil, sleepFor } = require('../utils') +const { hasuraUtil, sleepFor, eosUtil } = require('../utils') let types let ws @@ -13,7 +13,6 @@ const getLastBlockNumInDatabase = async () => { const query = ` query { blocks: block_history(limit: 1, order_by: {block_num: desc}, where: {producer: {_neq: "NULL"}}) { - id block_num } } @@ -23,18 +22,16 @@ const getLastBlockNumInDatabase = async () => { return data?.blocks?.length > 0 ? data.blocks[0].block_num : 0 } -const saveBlockHistory = async payload => { - const mutation = ` - mutation ($payload: block_history_insert_input!) { - block: insert_block_history_one(object: $payload, on_conflict: {constraint: block_history_block_num_key, update_columns: [producer,schedule_version,block_id,timestamp,transactions_length]}) { - id +const saveBlocks = async (blocks) => { + const upsertMutation = ` + mutation ($blocks: [block_history_insert_input!]!) { + insert_block_history(objects: $blocks, on_conflict: {constraint: block_history_pkey, update_columns: [block_num,producer,schedule_version,timestamp,transactions_length,cpu_usage,net_usage]}) { + affected_rows, } - } + } ` - const data = await hasuraUtil.request(mutation, { payload }) - - return data.block + await hasuraUtil.request(upsertMutation, { blocks }) } const deserialize = (type, array) => { @@ -66,15 +63,13 @@ const serialize = (type, value) => { return buffer.asUint8Array() } -const send = async message => { +const send = async (message) => { if (ws.readyState === 1) { return ws.send(message) } console.log('waiting for ready state before send message') - await sleepFor(1) - - return send(message) + ws.close() } const requestBlocks = (requestArgs = {}) => { @@ -84,7 +79,7 @@ const requestBlocks = (requestArgs = {}) => { { start_block_num: 0, end_block_num: 4294967295, - max_messages_in_flight: 1000, + max_messages_in_flight: 1, have_positions: [], fetch_block: true, irreversible_only: false, @@ -96,7 +91,9 @@ const requestBlocks = (requestArgs = {}) => { ) } -const handleBlocksResult = async data => { +let blocksData = [] + +const handleBlocksResult = async (data) => { try { if (!data.block || !data.block.length) { send( @@ -114,15 +111,31 @@ const handleBlocksResult = async data => { prev_block: data.prev_block } - await saveBlockHistory({ + const usage = block?.transactions?.reduce( + (total, current) => { + total.cpu_usage += + (current.cpu_usage_us / eosConfig.maxBlockCpuUsage) * 100 || 0 + total.net_usage += + (current.net_usage_words / eosConfig.maxBlockNetUsage) * 100 || 0 + return total + }, + { net_usage: 0, cpu_usage: 0 } + ) + + blocksData.push({ producer: block.producer, schedule_version: block.schedule_version, - block_id: block.this_block.block_id, block_num: block.this_block.block_num, transactions_length: block.transactions.length, - timestamp: block.timestamp + timestamp: block.timestamp, + ...usage }) + if (blocksData.length === 50) { + await saveBlocks(blocksData) + blocksData = [] + } + await statsService.udpateStats({ last_block_at: block.timestamp }) send( serialize('request', ['get_blocks_ack_request_v0', { num_messages: 1 }]) @@ -149,12 +162,31 @@ const cleanOldBlocks = async () => { await hasuraUtil.request(mutation, { date }) } +const getStartBlockNum = async () => { + const startBlockNum = await getLastBlockNumInDatabase() + + if (startBlockNum === 0) { + const info = await eosUtil.getInfo() + const LIB = info?.last_irreversible_block_num + const days = eosConfig.keepBlockHistoryForDays + const date = new Date() + + date.setSeconds(date.getSeconds() - 60 * 60 * 24 * days) + + const estimatedBlockNum = Math.ceil(LIB - ((new Date() - date) / 1000) * 2) + + return estimatedBlockNum > 0 ? estimatedBlockNum : 0 + } + + return startBlockNum +} + const init = async () => { if (!eosConfig.stateHistoryPluginEndpoint) { return } - const startBlockNum = await getLastBlockNumInDatabase() + const startBlockNum = await getStartBlockNum() ws = new WebSocket(eosConfig.stateHistoryPluginEndpoint, { perMessageDeflate: false, @@ -165,32 +197,33 @@ const init = async () => { console.log('🚀 Connected to state_history_plugin socket') }) - ws.on('message', data => { - try { - if (!types) { - const abi = JSON.parse(data) - types = Serialize.getTypesFromAbi(Serialize.createInitialTypes(), abi) - requestBlocks({ start_block_num: startBlockNum }) + ws.on('message', (data) => { + if (!types) { + const abi = JSON.parse(data) + types = Serialize.getTypesFromAbi(Serialize.createInitialTypes(), abi) + requestBlocks({ start_block_num: startBlockNum }) - return - } + return + } - const [type, response] = deserialize('result', data) + const [type, response] = deserialize('result', data) - switch (type) { - case 'get_blocks_result_v0': - handleBlocksResult(response) - break - default: - console.log(`unsupported result ${type}`) - break - } - } catch (error) { - console.log(`ws message error: ${error.message}`) + switch (type) { + case 'get_blocks_result_v0': + handleBlocksResult(response) + break + default: + console.log(`unsupported result ${type}`) + break } }) - ws.on('error', error => console.error(error)) + ws.on('error', (error) => console.error('STATE HISTORY PLUGIN', error)) + + ws.on('close', async () => { + await sleepFor(60) + init() + }) } module.exports = { diff --git a/hapi/src/services/stats.service.js b/hapi/src/services/stats.service.js index 1c634d5d..2319a6fe 100644 --- a/hapi/src/services/stats.service.js +++ b/hapi/src/services/stats.service.js @@ -3,7 +3,6 @@ const { StatusCodes } = require('http-status-codes') const moment = require('moment') const { hasuraUtil, sequelizeUtil, sleepFor, eosUtil } = require('../utils') -const transactionService = require('./transactions.service') const STAT_ID = 'bceb5b75-6cb9-45af-9735-5389e0664847' @@ -152,7 +151,6 @@ const getStats = async () => { last_block_at tps_all_time_high missed_blocks - transaction_history updated_at created_at } @@ -163,38 +161,6 @@ const getStats = async () => { return data.stat } -const formatTransactionHistory = async () => { - let txrHistory = {} - const intervals = [ - '3 Hours', - '6 Hours', - '12 Hours', - '1 Day', - '4 Days', - '7 Days', - '14 Days', - '1 Month', - '2 Months', - '3 Months', - '6 Months', - '1 Year' - ] - - const stats = await getStats() - - if (!stats) return - - for (const interval of intervals) { - const data = await transactionService.getTransactions(interval) - - txrHistory = { ...txrHistory, [interval]: data } - } - - await udpateStats({ - transaction_history: txrHistory - }) -} - const getCurrentMissedBlock = async () => { let lastBlockAt = null let data = null @@ -386,7 +352,6 @@ const syncTPSAllTimeHigh = async () => { interval.value as datetime, sum(block_history.transactions_length) as transactions_count, array_to_string(array_agg(block_history.block_num), ',') as blocks - FROM interval INNER JOIN @@ -491,6 +456,5 @@ module.exports = { getBlockDistribution, getStats, udpateStats, - getCurrentMissedBlock, - formatTransactionHistory + getCurrentMissedBlock } diff --git a/hapi/src/services/transactions.service.js b/hapi/src/services/transactions.service.js index d83057cf..edf95a73 100644 --- a/hapi/src/services/transactions.service.js +++ b/hapi/src/services/transactions.service.js @@ -13,7 +13,9 @@ const getTransactions = async (range = '3 Hours') => { SELECT interval.value as datetime, - sum(block_history.transactions_length)::integer as transactions_count + avg(block_history.transactions_length)::integer as transactions_count, + avg(block_history.cpu_usage)::numeric(5,2) as cpu, + avg(block_history.net_usage)::numeric(6,3) as net FROM interval LEFT JOIN diff --git a/hapi/src/utils/get-granularity-from-range.js b/hapi/src/utils/get-granularity-from-range.js index 66bde1cb..d0f651cf 100644 --- a/hapi/src/utils/get-granularity-from-range.js +++ b/hapi/src/utils/get-granularity-from-range.js @@ -8,6 +8,7 @@ const getGranularityFromRange = range => { granularity = 'minute' break case '1 Day': + case '2 Days': case '4 Days': case '7 Days': case '14 Days': diff --git a/hasura/metadata/actions.graphql b/hasura/metadata/actions.graphql index 4461801a..0b70dfb6 100644 --- a/hasura/metadata/actions.graphql +++ b/hasura/metadata/actions.graphql @@ -80,6 +80,8 @@ type CPUBenchmark { type Transaction { datetime: String transactions_count: Int + cpu: Float + net: Float } type ProducersSummary { diff --git a/hasura/metadata/databases/default/tables/public_block_history.yaml b/hasura/metadata/databases/default/tables/public_block_history.yaml index 01b835ab..5c0f34e5 100644 --- a/hasura/metadata/databases/default/tables/public_block_history.yaml +++ b/hasura/metadata/databases/default/tables/public_block_history.yaml @@ -1,17 +1,16 @@ table: - schema: public name: block_history + schema: public select_permissions: - role: guest permission: columns: - - id - - block_id - block_num - - transactions_length - timestamp - - created_at - - updated_at + - transactions_length - producer + - cpu_usage + - net_usage - schedule_version + - created_at filter: {} diff --git a/hasura/migrations/default/1688407028433_remove_id_in_block_history_by_producer_query/down.sql b/hasura/migrations/default/1688407028433_remove_id_in_block_history_by_producer_query/down.sql new file mode 100644 index 00000000..f757f01b --- /dev/null +++ b/hasura/migrations/default/1688407028433_remove_id_in_block_history_by_producer_query/down.sql @@ -0,0 +1,9 @@ +-- Could not auto-generate a down migration. +-- Please write an appropriate down migration for the SQL below: +-- CREATE OR REPLACE FUNCTION public.block_history_by_producer(since timestamp with time zone) +-- RETURNS SETOF block_history_by_producer_type +-- LANGUAGE sql +-- IMMUTABLE STRICT +-- AS $function$ +-- SELECT gen_random_uuid() as id, producer, count(1) AS "blocks" FROM block_history WHERE "timestamp" >= since GROUP BY producer; +-- $function$; diff --git a/hasura/migrations/default/1688407028433_remove_id_in_block_history_by_producer_query/up.sql b/hasura/migrations/default/1688407028433_remove_id_in_block_history_by_producer_query/up.sql new file mode 100644 index 00000000..44eef123 --- /dev/null +++ b/hasura/migrations/default/1688407028433_remove_id_in_block_history_by_producer_query/up.sql @@ -0,0 +1,7 @@ +CREATE OR REPLACE FUNCTION public.block_history_by_producer(since timestamp with time zone) + RETURNS SETOF block_history_by_producer_type + LANGUAGE sql + IMMUTABLE STRICT +AS $function$ + SELECT gen_random_uuid() as id, producer, count(1) AS "blocks" FROM block_history WHERE "timestamp" >= since GROUP BY producer; +$function$; diff --git a/hasura/migrations/default/1688407287288_drop_table_public_block_history/down.sql b/hasura/migrations/default/1688407287288_drop_table_public_block_history/down.sql new file mode 100644 index 00000000..66c0a138 --- /dev/null +++ b/hasura/migrations/default/1688407287288_drop_table_public_block_history/down.sql @@ -0,0 +1,3 @@ +-- Could not auto-generate a down migration. +-- Please write an appropriate down migration for the SQL below: +-- DROP table "public"."block_history"; diff --git a/hasura/migrations/default/1688407287288_drop_table_public_block_history/up.sql b/hasura/migrations/default/1688407287288_drop_table_public_block_history/up.sql new file mode 100644 index 00000000..a577fb1f --- /dev/null +++ b/hasura/migrations/default/1688407287288_drop_table_public_block_history/up.sql @@ -0,0 +1 @@ +DROP table "public"."block_history"; diff --git a/hasura/migrations/default/1688407733672_create_table_public_block_history/down.sql b/hasura/migrations/default/1688407733672_create_table_public_block_history/down.sql new file mode 100644 index 00000000..ebb08f73 --- /dev/null +++ b/hasura/migrations/default/1688407733672_create_table_public_block_history/down.sql @@ -0,0 +1 @@ +DROP TABLE "public"."block_history"; diff --git a/hasura/migrations/default/1688407733672_create_table_public_block_history/up.sql b/hasura/migrations/default/1688407733672_create_table_public_block_history/up.sql new file mode 100644 index 00000000..aee836d0 --- /dev/null +++ b/hasura/migrations/default/1688407733672_create_table_public_block_history/up.sql @@ -0,0 +1 @@ +CREATE TABLE "public"."block_history" ("block_num" integer NOT NULL, "timestamp" timestamp with time zone NOT NULL, "transactions_length" int2 NOT NULL, "producer" character varying(12) NOT NULL, "cpu_usage" Numeric(5,2) NOT NULL, "net_usage" Numeric(6,3) NOT NULL, "schedule_version" integer NOT NULL, "created_at" timestamptz NOT NULL DEFAULT now(), PRIMARY KEY ("block_num") , UNIQUE ("block_num")); diff --git a/hasura/migrations/default/1688407936132_create_index_block_history_timestamp_index/down.sql b/hasura/migrations/default/1688407936132_create_index_block_history_timestamp_index/down.sql new file mode 100644 index 00000000..76e1458e --- /dev/null +++ b/hasura/migrations/default/1688407936132_create_index_block_history_timestamp_index/down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "public"."block_history_timestamp_index"; diff --git a/hasura/migrations/default/1688407936132_create_index_block_history_timestamp_index/up.sql b/hasura/migrations/default/1688407936132_create_index_block_history_timestamp_index/up.sql new file mode 100644 index 00000000..9600cc81 --- /dev/null +++ b/hasura/migrations/default/1688407936132_create_index_block_history_timestamp_index/up.sql @@ -0,0 +1,2 @@ +CREATE INDEX "block_history_timestamp_index" on + "public"."block_history" using btree ("timestamp"); diff --git a/kubernetes/configmap-dashboard.yaml b/kubernetes/configmap-dashboard.yaml index 8cd1f83c..e6149c69 100644 --- a/kubernetes/configmap-dashboard.yaml +++ b/kubernetes/configmap-dashboard.yaml @@ -97,6 +97,8 @@ data: HAPI_EOSRATE_GET_STATS_USER: '${HAPI_EOSRATE_GET_STATS_USER}' HAPI_EOSRATE_GET_STATS_PASSWORD: '${HAPI_EOSRATE_GET_STATS_PASSWORD}' HAPI_EOS_BLOCK_HISTORY_DAYS: '${HAPI_EOS_BLOCK_HISTORY_DAYS}' + HAPI_EOS_MAX_CPU_BLOCK: '${HAPI_EOS_MAX_CPU_BLOCK}' + HAPI_EOS_MAX_NET_BLOCK: '${HAPI_EOS_MAX_NET_BLOCK}' HAPI_EOS_MISSED_BLOCKS_ENABLED: '${HAPI_EOS_MISSED_BLOCKS_ENABLED}' --- apiVersion: v1 diff --git a/webapp/src/gql/transaction.gql.js b/webapp/src/gql/transaction.gql.js index c72a637f..670650cd 100644 --- a/webapp/src/gql/transaction.gql.js +++ b/webapp/src/gql/transaction.gql.js @@ -5,14 +5,8 @@ export const TRANSACTION_QUERY = gql` transactions(range: $range) { datetime transactions_count - } - } -` - -export const TRANSACTION_HISTORY_QUERY = gql` - query getTrxHistoryStats { - trxHistory: stat(limit: 1) { - transaction_history + cpu + net } } ` diff --git a/webapp/src/routes/Home/TransactionInfo.js b/webapp/src/routes/Home/TransactionInfo.js index f5660492..efcfccfd 100644 --- a/webapp/src/routes/Home/TransactionInfo.js +++ b/webapp/src/routes/Home/TransactionInfo.js @@ -28,11 +28,7 @@ const useStyles = makeStyles(styles) const options = ['Live (30s)', ...rangeOptions] -const TransactionInfo = ({ - t, - startTrackingInfo, - stopTrackingInfo, -}) => { +const TransactionInfo = ({ t, startTrackingInfo, stopTrackingInfo }) => { const classes = useStyles() const theme = useTheme() const [{ tps, tpb }] = useSharedState() @@ -62,8 +58,8 @@ const TransactionInfo = ({ for (let index = 0; index < tpb.length; index++) { trxPerBlock.push({ name: `Block: ${tpb[index].blocks.join()}`, - cpu: formatWithThousandSeparator(tpb[index].cpu,2), - net: formatWithThousandSeparator(tpb[index].net,3), + cpu: formatWithThousandSeparator(tpb[index].cpu, 2), + net: formatWithThousandSeparator(tpb[index].net, 3), y: tpb[index].transactions, x: index > 0 ? index / 2 : index, }) @@ -72,8 +68,8 @@ const TransactionInfo = ({ for (let index = 0; index < tps.length; index++) { trxPerSecond.push({ name: `Blocks: ${tps[index].blocks.join(', ')}`, - cpu: formatWithThousandSeparator(tps[index].cpu,2), - net: formatWithThousandSeparator(tps[index].net,3), + cpu: formatWithThousandSeparator(tps[index].cpu, 2), + net: formatWithThousandSeparator(tps[index].net, 3), y: tps[index].transactions, x: index, }) @@ -120,18 +116,35 @@ const TransactionInfo = ({ return } - const intervalGraphicData = data.transactions.map((transactionHistory) => { - return [ - new Date(transactionHistory.datetime).getTime(), - transactionHistory.transactions_count || 0, - ] - }) + const { trxPerBlock, trxPerSecond } = data.transactions.reduce( + (history, transactionHistory) => { + history.trxPerBlock.push({ + cpu: transactionHistory.cpu || 0, + net: transactionHistory.net || 0, + y: transactionHistory.transactions_count || 0, + x: new Date(transactionHistory.datetime).getTime(), + }) + + history.trxPerSecond.push({ + y: transactionHistory.transactions_count * 2 || 0, + x: new Date(transactionHistory.datetime).getTime(), + }) + + return history + }, + { trxPerBlock: [], trxPerSecond: [] }, + ) setGraphicData([ + { + name: t('transactionsPerSecond'), + color: theme.palette.secondary.main, + data: trxPerSecond, + }, { name: t('transactionsPerBlock'), color: '#00C853', - data: intervalGraphicData, + data: trxPerBlock, }, ]) // eslint-disable-next-line