From 4c03fa3d681bb201b3a5e7cb722fc231d2138e9b Mon Sep 17 00:00:00 2001 From: Torresmorah Date: Tue, 27 Jun 2023 09:19:31 -0600 Subject: [PATCH 01/17] fix(webapp): add historic transactions per second --- webapp/src/routes/Home/TransactionInfo.js | 45 +++++++++++++++-------- 1 file changed, 29 insertions(+), 16 deletions(-) diff --git a/webapp/src/routes/Home/TransactionInfo.js b/webapp/src/routes/Home/TransactionInfo.js index f5660492..1f2e580b 100644 --- a/webapp/src/routes/Home/TransactionInfo.js +++ b/webapp/src/routes/Home/TransactionInfo.js @@ -28,11 +28,7 @@ const useStyles = makeStyles(styles) const options = ['Live (30s)', ...rangeOptions] -const TransactionInfo = ({ - t, - startTrackingInfo, - stopTrackingInfo, -}) => { +const TransactionInfo = ({ t, startTrackingInfo, stopTrackingInfo }) => { const classes = useStyles() const theme = useTheme() const [{ tps, tpb }] = useSharedState() @@ -62,8 +58,8 @@ const TransactionInfo = ({ for (let index = 0; index < tpb.length; index++) { trxPerBlock.push({ name: `Block: ${tpb[index].blocks.join()}`, - cpu: formatWithThousandSeparator(tpb[index].cpu,2), - net: formatWithThousandSeparator(tpb[index].net,3), + cpu: formatWithThousandSeparator(tpb[index].cpu, 2), + net: formatWithThousandSeparator(tpb[index].net, 3), y: tpb[index].transactions, x: index > 0 ? index / 2 : index, }) @@ -72,8 +68,8 @@ const TransactionInfo = ({ for (let index = 0; index < tps.length; index++) { trxPerSecond.push({ name: `Blocks: ${tps[index].blocks.join(', ')}`, - cpu: formatWithThousandSeparator(tps[index].cpu,2), - net: formatWithThousandSeparator(tps[index].net,3), + cpu: formatWithThousandSeparator(tps[index].cpu, 2), + net: formatWithThousandSeparator(tps[index].net, 3), y: tps[index].transactions, x: index, }) @@ -120,18 +116,35 @@ const TransactionInfo = ({ return } - const intervalGraphicData = data.transactions.map((transactionHistory) => { - return [ - new Date(transactionHistory.datetime).getTime(), - transactionHistory.transactions_count || 0, - ] - }) + const { trxPerBlock, trxPerSecond } = data.transactions.reduce( + (history, transactionHistory) => { + history.trxPerBlock.push([ + new Date(transactionHistory.datetime).getTime(), + transactionHistory.transactions_count || 0, + ]) + + history.trxPerSecond.push([ + new Date(transactionHistory.datetime).getTime(), + transactionHistory.transactions_count * 2 || 0, + ]) + + return history + }, + { trxPerBlock: [], trxPerSecond: [] }, + ) + + console.log(trxPerSecond) setGraphicData([ + { + name: t('transactionsPerSecond'), + color: theme.palette.secondary.main, + data: trxPerSecond, + }, { name: t('transactionsPerBlock'), color: '#00C853', - data: intervalGraphicData, + data: trxPerBlock, }, ]) // eslint-disable-next-line From 4b90fc4a9c13392562913336008270c27e0a3cc0 Mon Sep 17 00:00:00 2001 From: Torresmorah Date: Tue, 27 Jun 2023 09:20:14 -0600 Subject: [PATCH 02/17] fix(hapi): query the average of transactions instead of the sum --- hapi/src/services/transactions.service.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hapi/src/services/transactions.service.js b/hapi/src/services/transactions.service.js index d83057cf..909cc9cd 100644 --- a/hapi/src/services/transactions.service.js +++ b/hapi/src/services/transactions.service.js @@ -13,7 +13,7 @@ const getTransactions = async (range = '3 Hours') => { SELECT interval.value as datetime, - sum(block_history.transactions_length)::integer as transactions_count + avg(block_history.transactions_length)::integer as transactions_count FROM interval LEFT JOIN From 6af85b67a7654d80a73d4246a8f22954c5536b6f Mon Sep 17 00:00:00 2001 From: Torresmorah Date: Thu, 29 Jun 2023 10:22:29 -0600 Subject: [PATCH 03/17] fix(hapi): increase interval to sync the stats --- hapi/src/config/workers.config.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hapi/src/config/workers.config.js b/hapi/src/config/workers.config.js index 2ecfd79a..2940ff3d 100644 --- a/hapi/src/config/workers.config.js +++ b/hapi/src/config/workers.config.js @@ -6,7 +6,7 @@ module.exports = { process.env.HAPI_SYNC_PRODUCER_INFO_INTERVAL || 1 ), cpuWorkerInterval: parseInt(process.env.HAPI_SYNC_PRODUCER_CPU_INTERVAL), - syncStatsInterval: parseInt(process.env.HAPI_SYNC_STATS_INTERVAL || 60), + syncStatsInterval: parseInt(process.env.HAPI_SYNC_STATS_INTERVAL || 3600), syncExchangeRate: parseInt(process.env.HAPI_SYNC_EXCHANGE_RATE || 86400), syncScheduleHistoryInterval: parseInt( process.env.HAPI_SYNC_SCHEDULE_HISTORY_INTERVAL || 0 From b14c7dbdec17379f93f4fdd47d142f3f0ace2dbb Mon Sep 17 00:00:00 2001 From: Torresmorah Date: Thu, 29 Jun 2023 10:23:21 -0600 Subject: [PATCH 04/17] fix(workflows): add HAPI_SYNC_STATS_INTERVAL in the libre testnet configuration --- .github/workflows/deploy-libre-testnet.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/deploy-libre-testnet.yaml b/.github/workflows/deploy-libre-testnet.yaml index c2ac86dc..7b63dfd4 100644 --- a/.github/workflows/deploy-libre-testnet.yaml +++ b/.github/workflows/deploy-libre-testnet.yaml @@ -107,6 +107,7 @@ jobs: HAPI_SYNC_PRODUCER_CPU_INTERVAL: '6' HAPI_SYNC_PRODUCER_INFO_INTERVAL: '1' HAPI_SYNC_SCHEDULE_HISTORY_INTERVAL: 86400 + HAPI_SYNC_STATS_INTERVAL: 3600 HAPI_EOS_EXCHANGE_RATE_API: 'https://dashboard-api.libre.org/exchange-rates' HAPI_COINGECKO_API_TOKEN_ID: LIBRE HAPI_REWARDS_TOKEN: LIBRE From 1cbc08db43203d03b922c754ab5ab91f248c2425 Mon Sep 17 00:00:00 2001 From: Torresmorah Date: Mon, 3 Jul 2023 10:49:02 -0600 Subject: [PATCH 05/17] chore(hapi): remove unused fields --- hapi/src/services/missed-blocks.service.js | 3 +- hapi/src/services/stats.service.js | 37 +------------------- hapi/src/utils/get-granularity-from-range.js | 1 + 3 files changed, 3 insertions(+), 38 deletions(-) diff --git a/hapi/src/services/missed-blocks.service.js b/hapi/src/services/missed-blocks.service.js index f522f063..f8394999 100644 --- a/hapi/src/services/missed-blocks.service.js +++ b/hapi/src/services/missed-blocks.service.js @@ -113,8 +113,7 @@ const getBlocksInRange = async (start, end) => { SELECT schedule_version, producer, - block_num, - block_id + block_num FROM block_history WHERE diff --git a/hapi/src/services/stats.service.js b/hapi/src/services/stats.service.js index 1c634d5d..330a72f9 100644 --- a/hapi/src/services/stats.service.js +++ b/hapi/src/services/stats.service.js @@ -152,7 +152,6 @@ const getStats = async () => { last_block_at tps_all_time_high missed_blocks - transaction_history updated_at created_at } @@ -163,38 +162,6 @@ const getStats = async () => { return data.stat } -const formatTransactionHistory = async () => { - let txrHistory = {} - const intervals = [ - '3 Hours', - '6 Hours', - '12 Hours', - '1 Day', - '4 Days', - '7 Days', - '14 Days', - '1 Month', - '2 Months', - '3 Months', - '6 Months', - '1 Year' - ] - - const stats = await getStats() - - if (!stats) return - - for (const interval of intervals) { - const data = await transactionService.getTransactions(interval) - - txrHistory = { ...txrHistory, [interval]: data } - } - - await udpateStats({ - transaction_history: txrHistory - }) -} - const getCurrentMissedBlock = async () => { let lastBlockAt = null let data = null @@ -386,7 +353,6 @@ const syncTPSAllTimeHigh = async () => { interval.value as datetime, sum(block_history.transactions_length) as transactions_count, array_to_string(array_agg(block_history.block_num), ',') as blocks - FROM interval INNER JOIN @@ -491,6 +457,5 @@ module.exports = { getBlockDistribution, getStats, udpateStats, - getCurrentMissedBlock, - formatTransactionHistory + getCurrentMissedBlock } diff --git a/hapi/src/utils/get-granularity-from-range.js b/hapi/src/utils/get-granularity-from-range.js index 66bde1cb..d0f651cf 100644 --- a/hapi/src/utils/get-granularity-from-range.js +++ b/hapi/src/utils/get-granularity-from-range.js @@ -8,6 +8,7 @@ const getGranularityFromRange = range => { granularity = 'minute' break case '1 Day': + case '2 Days': case '4 Days': case '7 Days': case '14 Days': From 707a1a5fd172840d70a110ba51fb84a37b413ba3 Mon Sep 17 00:00:00 2001 From: Torresmorah Date: Mon, 3 Jul 2023 10:53:40 -0600 Subject: [PATCH 06/17] chore(webapp): remove unused query --- webapp/src/gql/transaction.gql.js | 8 -------- 1 file changed, 8 deletions(-) diff --git a/webapp/src/gql/transaction.gql.js b/webapp/src/gql/transaction.gql.js index c72a637f..160df907 100644 --- a/webapp/src/gql/transaction.gql.js +++ b/webapp/src/gql/transaction.gql.js @@ -8,11 +8,3 @@ export const TRANSACTION_QUERY = gql` } } ` - -export const TRANSACTION_HISTORY_QUERY = gql` - query getTrxHistoryStats { - trxHistory: stat(limit: 1) { - transaction_history - } - } -` From d28b3b6bf60d51738e7e1b2125162cc17062a7d3 Mon Sep 17 00:00:00 2001 From: Torresmorah Date: Mon, 3 Jul 2023 12:00:06 -0600 Subject: [PATCH 07/17] chore(hasura): remove id from block_history_by_producer function --- .../down.sql | 9 +++++++++ .../up.sql | 7 +++++++ 2 files changed, 16 insertions(+) create mode 100644 hasura/migrations/default/1688407028433_remove_id_in_block_history_by_producer_query/down.sql create mode 100644 hasura/migrations/default/1688407028433_remove_id_in_block_history_by_producer_query/up.sql diff --git a/hasura/migrations/default/1688407028433_remove_id_in_block_history_by_producer_query/down.sql b/hasura/migrations/default/1688407028433_remove_id_in_block_history_by_producer_query/down.sql new file mode 100644 index 00000000..f757f01b --- /dev/null +++ b/hasura/migrations/default/1688407028433_remove_id_in_block_history_by_producer_query/down.sql @@ -0,0 +1,9 @@ +-- Could not auto-generate a down migration. +-- Please write an appropriate down migration for the SQL below: +-- CREATE OR REPLACE FUNCTION public.block_history_by_producer(since timestamp with time zone) +-- RETURNS SETOF block_history_by_producer_type +-- LANGUAGE sql +-- IMMUTABLE STRICT +-- AS $function$ +-- SELECT gen_random_uuid() as id, producer, count(1) AS "blocks" FROM block_history WHERE "timestamp" >= since GROUP BY producer; +-- $function$; diff --git a/hasura/migrations/default/1688407028433_remove_id_in_block_history_by_producer_query/up.sql b/hasura/migrations/default/1688407028433_remove_id_in_block_history_by_producer_query/up.sql new file mode 100644 index 00000000..44eef123 --- /dev/null +++ b/hasura/migrations/default/1688407028433_remove_id_in_block_history_by_producer_query/up.sql @@ -0,0 +1,7 @@ +CREATE OR REPLACE FUNCTION public.block_history_by_producer(since timestamp with time zone) + RETURNS SETOF block_history_by_producer_type + LANGUAGE sql + IMMUTABLE STRICT +AS $function$ + SELECT gen_random_uuid() as id, producer, count(1) AS "blocks" FROM block_history WHERE "timestamp" >= since GROUP BY producer; +$function$; From 870a19ae7a88fcfb39926bd634b776e11da05290 Mon Sep 17 00:00:00 2001 From: Torresmorah Date: Mon, 3 Jul 2023 13:27:05 -0600 Subject: [PATCH 08/17] perf(hasura): change the schema of the block_history table --- .../default/tables/public_block_history.yaml | 11 +++++------ .../down.sql | 3 +++ .../up.sql | 1 + .../down.sql | 1 + .../up.sql | 1 + .../down.sql | 1 + .../up.sql | 2 ++ 7 files changed, 14 insertions(+), 6 deletions(-) create mode 100644 hasura/migrations/default/1688407287288_drop_table_public_block_history/down.sql create mode 100644 hasura/migrations/default/1688407287288_drop_table_public_block_history/up.sql create mode 100644 hasura/migrations/default/1688407733672_create_table_public_block_history/down.sql create mode 100644 hasura/migrations/default/1688407733672_create_table_public_block_history/up.sql create mode 100644 hasura/migrations/default/1688407936132_create_index_block_history_timestamp_index/down.sql create mode 100644 hasura/migrations/default/1688407936132_create_index_block_history_timestamp_index/up.sql diff --git a/hasura/metadata/databases/default/tables/public_block_history.yaml b/hasura/metadata/databases/default/tables/public_block_history.yaml index 01b835ab..5c0f34e5 100644 --- a/hasura/metadata/databases/default/tables/public_block_history.yaml +++ b/hasura/metadata/databases/default/tables/public_block_history.yaml @@ -1,17 +1,16 @@ table: - schema: public name: block_history + schema: public select_permissions: - role: guest permission: columns: - - id - - block_id - block_num - - transactions_length - timestamp - - created_at - - updated_at + - transactions_length - producer + - cpu_usage + - net_usage - schedule_version + - created_at filter: {} diff --git a/hasura/migrations/default/1688407287288_drop_table_public_block_history/down.sql b/hasura/migrations/default/1688407287288_drop_table_public_block_history/down.sql new file mode 100644 index 00000000..66c0a138 --- /dev/null +++ b/hasura/migrations/default/1688407287288_drop_table_public_block_history/down.sql @@ -0,0 +1,3 @@ +-- Could not auto-generate a down migration. +-- Please write an appropriate down migration for the SQL below: +-- DROP table "public"."block_history"; diff --git a/hasura/migrations/default/1688407287288_drop_table_public_block_history/up.sql b/hasura/migrations/default/1688407287288_drop_table_public_block_history/up.sql new file mode 100644 index 00000000..a577fb1f --- /dev/null +++ b/hasura/migrations/default/1688407287288_drop_table_public_block_history/up.sql @@ -0,0 +1 @@ +DROP table "public"."block_history"; diff --git a/hasura/migrations/default/1688407733672_create_table_public_block_history/down.sql b/hasura/migrations/default/1688407733672_create_table_public_block_history/down.sql new file mode 100644 index 00000000..ebb08f73 --- /dev/null +++ b/hasura/migrations/default/1688407733672_create_table_public_block_history/down.sql @@ -0,0 +1 @@ +DROP TABLE "public"."block_history"; diff --git a/hasura/migrations/default/1688407733672_create_table_public_block_history/up.sql b/hasura/migrations/default/1688407733672_create_table_public_block_history/up.sql new file mode 100644 index 00000000..aee836d0 --- /dev/null +++ b/hasura/migrations/default/1688407733672_create_table_public_block_history/up.sql @@ -0,0 +1 @@ +CREATE TABLE "public"."block_history" ("block_num" integer NOT NULL, "timestamp" timestamp with time zone NOT NULL, "transactions_length" int2 NOT NULL, "producer" character varying(12) NOT NULL, "cpu_usage" Numeric(5,2) NOT NULL, "net_usage" Numeric(6,3) NOT NULL, "schedule_version" integer NOT NULL, "created_at" timestamptz NOT NULL DEFAULT now(), PRIMARY KEY ("block_num") , UNIQUE ("block_num")); diff --git a/hasura/migrations/default/1688407936132_create_index_block_history_timestamp_index/down.sql b/hasura/migrations/default/1688407936132_create_index_block_history_timestamp_index/down.sql new file mode 100644 index 00000000..76e1458e --- /dev/null +++ b/hasura/migrations/default/1688407936132_create_index_block_history_timestamp_index/down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "public"."block_history_timestamp_index"; diff --git a/hasura/migrations/default/1688407936132_create_index_block_history_timestamp_index/up.sql b/hasura/migrations/default/1688407936132_create_index_block_history_timestamp_index/up.sql new file mode 100644 index 00000000..9600cc81 --- /dev/null +++ b/hasura/migrations/default/1688407936132_create_index_block_history_timestamp_index/up.sql @@ -0,0 +1,2 @@ +CREATE INDEX "block_history_timestamp_index" on + "public"."block_history" using btree ("timestamp"); From 6adc74ca28c8e5ec4fe9a2748121b904c9418a20 Mon Sep 17 00:00:00 2001 From: Torresmorah Date: Mon, 3 Jul 2023 14:12:43 -0600 Subject: [PATCH 09/17] chore(hasura): add net and cpu usage to Transaction type --- hasura/metadata/actions.graphql | 2 ++ 1 file changed, 2 insertions(+) diff --git a/hasura/metadata/actions.graphql b/hasura/metadata/actions.graphql index 4461801a..0b70dfb6 100644 --- a/hasura/metadata/actions.graphql +++ b/hasura/metadata/actions.graphql @@ -80,6 +80,8 @@ type CPUBenchmark { type Transaction { datetime: String transactions_count: Int + cpu: Float + net: Float } type ProducersSummary { From a0def824f8d4f016726288898e9b0c16b0c95b04 Mon Sep 17 00:00:00 2001 From: Torresmorah Date: Mon, 3 Jul 2023 14:14:13 -0600 Subject: [PATCH 10/17] feat(hapi): return cpu and net average in getTransactions function --- hapi/src/services/transactions.service.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/hapi/src/services/transactions.service.js b/hapi/src/services/transactions.service.js index 909cc9cd..edf95a73 100644 --- a/hapi/src/services/transactions.service.js +++ b/hapi/src/services/transactions.service.js @@ -13,7 +13,9 @@ const getTransactions = async (range = '3 Hours') => { SELECT interval.value as datetime, - avg(block_history.transactions_length)::integer as transactions_count + avg(block_history.transactions_length)::integer as transactions_count, + avg(block_history.cpu_usage)::numeric(5,2) as cpu, + avg(block_history.net_usage)::numeric(6,3) as net FROM interval LEFT JOIN From 2c59d83a12b7182256e2729307322b9efc5cc0db Mon Sep 17 00:00:00 2001 From: Torresmorah Date: Mon, 3 Jul 2023 14:15:37 -0600 Subject: [PATCH 11/17] feat(webapp): add cpu and net utilization to transaction per block history --- webapp/src/gql/transaction.gql.js | 2 ++ webapp/src/routes/Home/TransactionInfo.js | 20 ++++++++++---------- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/webapp/src/gql/transaction.gql.js b/webapp/src/gql/transaction.gql.js index 160df907..670650cd 100644 --- a/webapp/src/gql/transaction.gql.js +++ b/webapp/src/gql/transaction.gql.js @@ -5,6 +5,8 @@ export const TRANSACTION_QUERY = gql` transactions(range: $range) { datetime transactions_count + cpu + net } } ` diff --git a/webapp/src/routes/Home/TransactionInfo.js b/webapp/src/routes/Home/TransactionInfo.js index 1f2e580b..efcfccfd 100644 --- a/webapp/src/routes/Home/TransactionInfo.js +++ b/webapp/src/routes/Home/TransactionInfo.js @@ -118,23 +118,23 @@ const TransactionInfo = ({ t, startTrackingInfo, stopTrackingInfo }) => { const { trxPerBlock, trxPerSecond } = data.transactions.reduce( (history, transactionHistory) => { - history.trxPerBlock.push([ - new Date(transactionHistory.datetime).getTime(), - transactionHistory.transactions_count || 0, - ]) + history.trxPerBlock.push({ + cpu: transactionHistory.cpu || 0, + net: transactionHistory.net || 0, + y: transactionHistory.transactions_count || 0, + x: new Date(transactionHistory.datetime).getTime(), + }) - history.trxPerSecond.push([ - new Date(transactionHistory.datetime).getTime(), - transactionHistory.transactions_count * 2 || 0, - ]) + history.trxPerSecond.push({ + y: transactionHistory.transactions_count * 2 || 0, + x: new Date(transactionHistory.datetime).getTime(), + }) return history }, { trxPerBlock: [], trxPerSecond: [] }, ) - console.log(trxPerSecond) - setGraphicData([ { name: t('transactionsPerSecond'), From 34daec840bb0e0f21db70ae8b447c78833a32435 Mon Sep 17 00:00:00 2001 From: Torresmorah Date: Mon, 3 Jul 2023 15:08:51 -0600 Subject: [PATCH 12/17] fix(hapi): fix block history - Use a simple batch processing to avoid saving blocks individually - Start again the websocket connection after a minute it has failed - Add cpu and net utilization in the block data --- hapi/src/config/eos.config.js | 4 +- .../services/state-history-plugin.service.js | 73 ++++++++++++++----- 2 files changed, 59 insertions(+), 18 deletions(-) diff --git a/hapi/src/config/eos.config.js b/hapi/src/config/eos.config.js index 493dc873..d61c547d 100644 --- a/hapi/src/config/eos.config.js +++ b/hapi/src/config/eos.config.js @@ -60,5 +60,7 @@ module.exports = { rewardsToken: process.env.HAPI_REWARDS_TOKEN, eosRateUrl: process.env.HAPI_EOSRATE_GET_STATS_URL, eosRateUser: process.env.HAPI_EOSRATE_GET_STATS_USER, - eosRatePassword: process.env.HAPI_EOSRATE_GET_STATS_PASSWORD + eosRatePassword: process.env.HAPI_EOSRATE_GET_STATS_PASSWORD, + maxBlockNetUsage: parseInt(process.env.HAPI_EOS_MAX_NET_BLOCK) || 1048576, + maxBlockCpuUsage: parseInt(process.env.HAPI_EOS_MAX_CPU_BLOCK) || 100000 } diff --git a/hapi/src/services/state-history-plugin.service.js b/hapi/src/services/state-history-plugin.service.js index 6e5912bb..ab97e31d 100644 --- a/hapi/src/services/state-history-plugin.service.js +++ b/hapi/src/services/state-history-plugin.service.js @@ -4,7 +4,7 @@ const { Serialize } = require('eosjs') const statsService = require('./stats.service') const { eosConfig } = require('../config') -const { hasuraUtil, sleepFor } = require('../utils') +const { hasuraUtil, sleepFor, eosUtil } = require('../utils') let types let ws @@ -13,7 +13,6 @@ const getLastBlockNumInDatabase = async () => { const query = ` query { blocks: block_history(limit: 1, order_by: {block_num: desc}, where: {producer: {_neq: "NULL"}}) { - id block_num } } @@ -23,18 +22,16 @@ const getLastBlockNumInDatabase = async () => { return data?.blocks?.length > 0 ? data.blocks[0].block_num : 0 } -const saveBlockHistory = async payload => { - const mutation = ` - mutation ($payload: block_history_insert_input!) { - block: insert_block_history_one(object: $payload, on_conflict: {constraint: block_history_block_num_key, update_columns: [producer,schedule_version,block_id,timestamp,transactions_length]}) { - id +const saveBlocks = async blocks => { + const upsertMutation = ` + mutation ($blocks: [block_history_insert_input!]!) { + insert_block_history(objects: $blocks, on_conflict: {constraint: block_history_pkey, update_columns: [block_num,producer,schedule_version,timestamp,transactions_length,cpu_usage,net_usage]}) { + affected_rows, } - } + } ` - const data = await hasuraUtil.request(mutation, { payload }) - - return data.block + await hasuraUtil.request(upsertMutation, { blocks }) } const deserialize = (type, array) => { @@ -84,7 +81,7 @@ const requestBlocks = (requestArgs = {}) => { { start_block_num: 0, end_block_num: 4294967295, - max_messages_in_flight: 1000, + max_messages_in_flight: 1, have_positions: [], fetch_block: true, irreversible_only: false, @@ -96,7 +93,9 @@ const requestBlocks = (requestArgs = {}) => { ) } -const handleBlocksResult = async data => { +let blocksData = [] + +const handleBlocksResult = async (data) => { try { if (!data.block || !data.block.length) { send( @@ -114,15 +113,31 @@ const handleBlocksResult = async data => { prev_block: data.prev_block } - await saveBlockHistory({ + const usage = block?.transactions?.reduce( + (total, current) => { + total.cpu_usage += + (current.cpu_usage_us / eosConfig.maxBlockCpuUsage) * 100 || 0 + total.net_usage += + (current.net_usage_words / eosConfig.maxBlockNetUsage) * 100 || 0 + return total + }, + { net_usage: 0, cpu_usage: 0 } + ) + + blocksData.push({ producer: block.producer, schedule_version: block.schedule_version, - block_id: block.this_block.block_id, block_num: block.this_block.block_num, transactions_length: block.transactions.length, - timestamp: block.timestamp + timestamp: block.timestamp, + ...usage }) + if (blocksData.length === 50) { + await saveBlocks(blocksData) + blocksData = [] + } + await statsService.udpateStats({ last_block_at: block.timestamp }) send( serialize('request', ['get_blocks_ack_request_v0', { num_messages: 1 }]) @@ -149,12 +164,31 @@ const cleanOldBlocks = async () => { await hasuraUtil.request(mutation, { date }) } +const getStartBlockNum = async () => { + let startBlockNum = await getLastBlockNumInDatabase() + + if (startBlockNum === 0) { + const info = await eosUtil.getInfo() + const LIB = info?.last_irreversible_block_num + const days = eosConfig.keepBlockHistoryForDays + const date = new Date() + + date.setSeconds(date.getSeconds() - 60 * 60 * 24 * days) + + const estimatedBlockNum = Math.ceil(LIB - ((new Date() - date) / 1000) * 2) + + return estimatedBlockNum > 0 ? estimatedBlockNum : 0 + } + + return startBlockNum +} + const init = async () => { if (!eosConfig.stateHistoryPluginEndpoint) { return } - const startBlockNum = await getLastBlockNumInDatabase() + const startBlockNum = await getStartBlockNum() ws = new WebSocket(eosConfig.stateHistoryPluginEndpoint, { perMessageDeflate: false, @@ -191,6 +225,11 @@ const init = async () => { }) ws.on('error', error => console.error(error)) + + ws.on('close', async () => { + await sleepFor(60) + init() + }) } module.exports = { From 71eb92285ddbc87c7ce9ae26fa69d6d8b37a5351 Mon Sep 17 00:00:00 2001 From: Torresmorah Date: Mon, 3 Jul 2023 15:16:25 -0600 Subject: [PATCH 13/17] chore(kubernetes): add env variables to the max cpu and net utilization config --- docker-compose.yaml | 4 +++- kubernetes/configmap-dashboard.yaml | 2 ++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index e2fe5a50..58b3aa11 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -69,7 +69,9 @@ services: HAPI_EOSRATE_GET_STATS_USER: '${HAPI_EOSRATE_GET_STATS_USER}' HAPI_EOSRATE_GET_STATS_PASSWORD: '${HAPI_EOSRATE_GET_STATS_PASSWORD}' HAPI_EOS_BLOCK_HISTORY_DAYS: '${HAPI_EOS_BLOCK_HISTORY_DAYS}' - HAPI_EOS_MISSED_BLOCKS_ENABLED: '${HAPI_EOS_MISSED_BLOCKS_ENABLED}' + HAPI_EOS_MAX_CPU_BLOCK: '${HAPI_EOS_MAX_CPU_BLOCK}' + HAPI_EOS_MAX_NET_BLOCK: '${HAPI_EOS_MAX_NET_BLOCK}' + HAPI_EOS_MISSED_BLOCKS_ENABLED: '${HAPI_EOS_MISSED_BLOCKS_ENABLED}' hasura: container_name: '${STAGE}-${APP_NAME}-hasura' image: hasura/graphql-engine:v2.16.0.cli-migrations-v3 diff --git a/kubernetes/configmap-dashboard.yaml b/kubernetes/configmap-dashboard.yaml index 8cd1f83c..e6149c69 100644 --- a/kubernetes/configmap-dashboard.yaml +++ b/kubernetes/configmap-dashboard.yaml @@ -97,6 +97,8 @@ data: HAPI_EOSRATE_GET_STATS_USER: '${HAPI_EOSRATE_GET_STATS_USER}' HAPI_EOSRATE_GET_STATS_PASSWORD: '${HAPI_EOSRATE_GET_STATS_PASSWORD}' HAPI_EOS_BLOCK_HISTORY_DAYS: '${HAPI_EOS_BLOCK_HISTORY_DAYS}' + HAPI_EOS_MAX_CPU_BLOCK: '${HAPI_EOS_MAX_CPU_BLOCK}' + HAPI_EOS_MAX_NET_BLOCK: '${HAPI_EOS_MAX_NET_BLOCK}' HAPI_EOS_MISSED_BLOCKS_ENABLED: '${HAPI_EOS_MISSED_BLOCKS_ENABLED}' --- apiVersion: v1 From 87b90ccd30eb6929afc482436b6b8b425b6f8322 Mon Sep 17 00:00:00 2001 From: Torresmorah Date: Mon, 3 Jul 2023 15:17:23 -0600 Subject: [PATCH 14/17] chore(workflows): add max cpu and net utilization for libre testnet --- .github/workflows/deploy-libre-testnet.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/deploy-libre-testnet.yaml b/.github/workflows/deploy-libre-testnet.yaml index 7b63dfd4..c3fb83cd 100644 --- a/.github/workflows/deploy-libre-testnet.yaml +++ b/.github/workflows/deploy-libre-testnet.yaml @@ -85,6 +85,8 @@ jobs: HAPI_EOS_STATE_HISTORY_PLUGIN_ENDPOINT: 'ws://api-node.libre-testnet:8080' HAPI_EOS_MISSED_BLOCKS_ENABLED: 'false' HAPI_EOS_BLOCK_HISTORY_DAYS: 90 + HAPI_EOS_MAX_CPU_BLOCK: 100000 + HAPI_EOS_MAX_NET_BLOCK: 1048576 HAPI_EOS_API_CHAIN_ID: b64646740308df2ee06c6b72f34c0f7fa066d940e831f752db2006fcc2b78dee HAPI_EOS_BASE_ACCOUNT: ${{ secrets.HAPI_EOS_BASE_ACCOUNT }} HAPI_EOS_BASE_ACCOUNT_PASSWORD: ${{ secrets.HAPI_EOS_BASE_ACCOUNT_PASSWORD }} From 8a57f076e5dc7efb56e7541e19956cc9ba8491a7 Mon Sep 17 00:00:00 2001 From: Torresmorah Date: Tue, 4 Jul 2023 09:57:07 -0600 Subject: [PATCH 15/17] chore(hapi): close ws connection if it is not ready when try to send a message --- .../services/state-history-plugin.service.js | 38 ++++++++----------- 1 file changed, 16 insertions(+), 22 deletions(-) diff --git a/hapi/src/services/state-history-plugin.service.js b/hapi/src/services/state-history-plugin.service.js index ab97e31d..ab940cc7 100644 --- a/hapi/src/services/state-history-plugin.service.js +++ b/hapi/src/services/state-history-plugin.service.js @@ -69,9 +69,7 @@ const send = async message => { } console.log('waiting for ready state before send message') - await sleepFor(1) - - return send(message) + ws.close() } const requestBlocks = (requestArgs = {}) => { @@ -200,31 +198,27 @@ const init = async () => { }) ws.on('message', data => { - try { - if (!types) { - const abi = JSON.parse(data) - types = Serialize.getTypesFromAbi(Serialize.createInitialTypes(), abi) - requestBlocks({ start_block_num: startBlockNum }) + if (!types) { + const abi = JSON.parse(data) + types = Serialize.getTypesFromAbi(Serialize.createInitialTypes(), abi) + requestBlocks({ start_block_num: startBlockNum }) - return - } + return + } - const [type, response] = deserialize('result', data) + const [type, response] = deserialize('result', data) - switch (type) { - case 'get_blocks_result_v0': - handleBlocksResult(response) - break - default: - console.log(`unsupported result ${type}`) - break - } - } catch (error) { - console.log(`ws message error: ${error.message}`) + switch (type) { + case 'get_blocks_result_v0': + handleBlocksResult(response) + break + default: + console.log(`unsupported result ${type}`) + break } }) - ws.on('error', error => console.error(error)) + ws.on('error', error => console.error('STATE HISTORY PLUGIN',error)) ws.on('close', async () => { await sleepFor(60) From 94f34533ad26d5c369d5693b74e5b62e31ed54d3 Mon Sep 17 00:00:00 2001 From: codefactor-io Date: Tue, 4 Jul 2023 16:27:33 +0000 Subject: [PATCH 16/17] [CodeFactor] Apply fixes --- hapi/src/services/state-history-plugin.service.js | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/hapi/src/services/state-history-plugin.service.js b/hapi/src/services/state-history-plugin.service.js index ab940cc7..3f8c805b 100644 --- a/hapi/src/services/state-history-plugin.service.js +++ b/hapi/src/services/state-history-plugin.service.js @@ -22,7 +22,7 @@ const getLastBlockNumInDatabase = async () => { return data?.blocks?.length > 0 ? data.blocks[0].block_num : 0 } -const saveBlocks = async blocks => { +const saveBlocks = async (blocks) => { const upsertMutation = ` mutation ($blocks: [block_history_insert_input!]!) { insert_block_history(objects: $blocks, on_conflict: {constraint: block_history_pkey, update_columns: [block_num,producer,schedule_version,timestamp,transactions_length,cpu_usage,net_usage]}) { @@ -63,7 +63,7 @@ const serialize = (type, value) => { return buffer.asUint8Array() } -const send = async message => { +const send = async (message) => { if (ws.readyState === 1) { return ws.send(message) } @@ -163,7 +163,7 @@ const cleanOldBlocks = async () => { } const getStartBlockNum = async () => { - let startBlockNum = await getLastBlockNumInDatabase() + const startBlockNum = await getLastBlockNumInDatabase() if (startBlockNum === 0) { const info = await eosUtil.getInfo() @@ -197,7 +197,7 @@ const init = async () => { console.log('🚀 Connected to state_history_plugin socket') }) - ws.on('message', data => { + ws.on('message', (data) => { if (!types) { const abi = JSON.parse(data) types = Serialize.getTypesFromAbi(Serialize.createInitialTypes(), abi) @@ -218,7 +218,7 @@ const init = async () => { } }) - ws.on('error', error => console.error('STATE HISTORY PLUGIN',error)) + ws.on('error', (error) => console.error('STATE HISTORY PLUGIN', error)) ws.on('close', async () => { await sleepFor(60) From 33fd01edfd2b1b2a655a6f33ba25c12898fa4219 Mon Sep 17 00:00:00 2001 From: Torresmorah Date: Tue, 4 Jul 2023 10:29:09 -0600 Subject: [PATCH 17/17] fix: format code --- docker-compose.yaml | 2 +- hapi/src/services/stats.service.js | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index 58b3aa11..621bc166 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -71,7 +71,7 @@ services: HAPI_EOS_BLOCK_HISTORY_DAYS: '${HAPI_EOS_BLOCK_HISTORY_DAYS}' HAPI_EOS_MAX_CPU_BLOCK: '${HAPI_EOS_MAX_CPU_BLOCK}' HAPI_EOS_MAX_NET_BLOCK: '${HAPI_EOS_MAX_NET_BLOCK}' - HAPI_EOS_MISSED_BLOCKS_ENABLED: '${HAPI_EOS_MISSED_BLOCKS_ENABLED}' + HAPI_EOS_MISSED_BLOCKS_ENABLED: '${HAPI_EOS_MISSED_BLOCKS_ENABLED}' hasura: container_name: '${STAGE}-${APP_NAME}-hasura' image: hasura/graphql-engine:v2.16.0.cli-migrations-v3 diff --git a/hapi/src/services/stats.service.js b/hapi/src/services/stats.service.js index 330a72f9..2319a6fe 100644 --- a/hapi/src/services/stats.service.js +++ b/hapi/src/services/stats.service.js @@ -3,7 +3,6 @@ const { StatusCodes } = require('http-status-codes') const moment = require('moment') const { hasuraUtil, sequelizeUtil, sleepFor, eosUtil } = require('../utils') -const transactionService = require('./transactions.service') const STAT_ID = 'bceb5b75-6cb9-45af-9735-5389e0664847'