Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion nym-node-status-api/nym-node-status-agent/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
set -eu
export ENVIRONMENT=${ENVIRONMENT:-"mainnet"}

probe_git_ref="nym-vpn-core-v1.10.0"
probe_git_ref="nym-vpn-core-v1.4.0"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

downgrade?


crate_root=$(dirname $(realpath "$0"))
monorepo_root=$(realpath "${crate_root}/../..")
Expand Down
2 changes: 1 addition & 1 deletion nym-node-status-api/nym-node-status-api/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

[package]
name = "nym-node-status-api"
version = "3.1.1"
version = "3.1.2"
authors.workspace = true
repository.workspace = true
homepage.workspace = true
Expand Down
7 changes: 7 additions & 0 deletions nym-node-status-api/nym-node-status-api/src/cli/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,13 @@ pub(crate) struct Cli {
#[arg(value_delimiter = ',')]
pub(crate) agent_key_list: Vec<String>,

#[clap(
long,
default_value_t = 10,
env = "NYM_NODE_STATUS_API_PACKET_STATS_MAX_CONCURRENT_TASKS"
)]
pub(crate) packet_stats_max_concurrent_tasks: usize,

/// https://github.com/ipinfo/rust
#[clap(long, env = "IPINFO_API_TOKEN")]
pub(crate) ipinfo_api_token: String,
Expand Down
11 changes: 9 additions & 2 deletions nym-node-status-api/nym-node-status-api/src/db/models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use serde::{Deserialize, Serialize};
use sqlx::FromRow;
use std::str::FromStr;
use strum_macros::{EnumString, FromRepr};
use time::{Date, OffsetDateTime};
use time::{Date, OffsetDateTime, UtcDateTime};
use utoipa::ToSchema;

macro_rules! serialize_opt_to_value {
Expand Down Expand Up @@ -362,7 +362,7 @@ impl TryFrom<GatewaySessionsRecord> for http::models::SessionStats {
}
}

#[derive(strum_macros::Display)]
#[derive(strum_macros::Display, Clone)]
pub(crate) enum ScrapeNodeKind {
LegacyMixnode { mix_id: i64 },
MixingNymNode { node_id: i64 },
Expand Down Expand Up @@ -520,3 +520,10 @@ pub struct NodeStats {
pub packets_sent: i64,
pub packets_dropped: i64,
}

pub struct InsertStatsRecord {
pub node_kind: ScrapeNodeKind,
pub timestamp_utc: UtcDateTime,
pub unix_timestamp: i64,
pub stats: NodeStats,
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use crate::{
DbPool,
},
http::models::Gateway,
mixnet_scraper::helpers::NodeDescriptionResponse,
node_scraper::helpers::NodeDescriptionResponse,
};
use futures_util::TryStreamExt;
use sqlx::{pool::PoolConnection, Sqlite};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use crate::{
DbPool,
},
http::models::{DailyStats, Mixnode},
mixnet_scraper::helpers::NodeDescriptionResponse,
node_scraper::helpers::NodeDescriptionResponse,
};

pub(crate) async fn update_mixnodes(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ pub(crate) use nym_nodes::{
get_described_node_bond_info, get_node_self_description, update_nym_nodes,
};
pub(crate) use packet_stats::{
get_raw_node_stats, insert_daily_node_stats, insert_node_packet_stats,
batch_store_packet_stats, get_raw_node_stats, insert_daily_node_stats_uncommitted,
};
pub(crate) use scraper::{get_nodes_for_scraping, insert_scraped_node_description};
pub(crate) use summary::{get_summary, get_summary_history};
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use crate::{
models::{NymNodeDto, NymNodeInsertRecord},
DbPool,
},
mixnet_scraper::helpers::NodeDescriptionResponse,
node_scraper::helpers::NodeDescriptionResponse,
};

pub(crate) async fn get_all_nym_nodes(pool: &DbPool) -> anyhow::Result<Vec<NymNodeDto>> {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,17 +1,70 @@
use crate::db::{
models::{NodeStats, ScrapeNodeKind, ScraperNodeInfo},
DbPool,
use crate::{
db::{
models::{InsertStatsRecord, NodeStats, ScrapeNodeKind},
DbPool,
},
node_scraper::helpers::update_daily_stats_uncommitted,
utils::now_utc,
};
use anyhow::Result;
use sqlx::Transaction;
use std::sync::Arc;
use tokio::sync::Mutex;
use tracing::{info, instrument};

pub(crate) async fn insert_node_packet_stats(
#[instrument(level = "info", skip_all)]
pub(crate) async fn batch_store_packet_stats(
pool: &DbPool,
results: Arc<Mutex<Vec<InsertStatsRecord>>>,
) -> anyhow::Result<()> {
let results_iter = results.lock().await;
info!(
"📊 ⏳ Storing {} packet stats into the DB",
results_iter.len()
);
let started_at = now_utc();

let mut tx = pool
.begin()
.await
.map_err(|err| anyhow::anyhow!("Failed to begin transaction: {err}"))?;

for stats_record in &(*results_iter) {
insert_node_packet_stats_uncommitted(
&mut tx,
&stats_record.node_kind,
&stats_record.stats,
stats_record.unix_timestamp,
)
.await?;

update_daily_stats_uncommitted(
&mut tx,
&stats_record.node_kind,
stats_record.timestamp_utc,
&stats_record.stats,
)
.await?;
}

tx.commit()
.await
.inspect(|_| {
let elapsed = now_utc() - started_at;
info!(
"📊 ☑️ Packet stats successfully committed to DB (took {}s)",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: use humantime::format_duration which is more readable, especially for smaller values. I think it will be better than say 0.1234567s : )

elapsed.as_seconds_f32()
);
})
.map_err(|err| anyhow::anyhow!("Failed to commit: {err}"))
}

async fn insert_node_packet_stats_uncommitted(
tx: &mut Transaction<'static, sqlx::Sqlite>,
node_kind: &ScrapeNodeKind,
stats: &NodeStats,
timestamp_utc: i64,
) -> Result<()> {
let mut conn = pool.acquire().await?;

match node_kind {
ScrapeNodeKind::LegacyMixnode { mix_id } => {
sqlx::query!(
Expand All @@ -26,7 +79,7 @@ pub(crate) async fn insert_node_packet_stats(
stats.packets_sent,
stats.packets_dropped,
)
.execute(&mut *conn)
.execute(tx.as_mut())
.await?;
}
ScrapeNodeKind::MixingNymNode { node_id }
Expand All @@ -43,7 +96,7 @@ pub(crate) async fn insert_node_packet_stats(
stats.packets_sent,
stats.packets_dropped,
)
.execute(&mut *conn)
.execute(tx.as_mut())
.await?;
}
}
Expand All @@ -52,12 +105,10 @@ pub(crate) async fn insert_node_packet_stats(
}

pub(crate) async fn get_raw_node_stats(
pool: &DbPool,
node: &ScraperNodeInfo,
tx: &mut Transaction<'static, sqlx::Sqlite>,
node_kind: &ScrapeNodeKind,
) -> Result<Option<NodeStats>> {
let mut conn = pool.acquire().await?;

let packets = match node.node_kind {
let packets = match node_kind {
// if no packets are found, it's fine to assume 0 because that's also
// SQL default value if none provided
ScrapeNodeKind::LegacyMixnode { mix_id } => {
Expand All @@ -75,7 +126,7 @@ pub(crate) async fn get_raw_node_stats(
"#,
mix_id
)
.fetch_optional(&mut *conn)
.fetch_optional(tx.as_mut())
.await?
}
ScrapeNodeKind::MixingNymNode { node_id }
Expand All @@ -94,23 +145,21 @@ pub(crate) async fn get_raw_node_stats(
"#,
node_id
)
.fetch_optional(&mut *conn)
.fetch_optional(tx.as_mut())
.await?
}
};

Ok(packets)
}

pub(crate) async fn insert_daily_node_stats(
pool: &DbPool,
node: &ScraperNodeInfo,
pub(crate) async fn insert_daily_node_stats_uncommitted(
tx: &mut Transaction<'static, sqlx::Sqlite>,
node_kind: &ScrapeNodeKind,
date_utc: &str,
packets: NodeStats,
) -> Result<()> {
let mut conn = pool.acquire().await?;

match node.node_kind {
match node_kind {
ScrapeNodeKind::LegacyMixnode { mix_id } => {
let total_stake = sqlx::query_scalar!(
r#"
Expand All @@ -121,7 +170,7 @@ pub(crate) async fn insert_daily_node_stats(
"#,
mix_id
)
.fetch_one(&mut *conn)
.fetch_one(tx.as_mut())
.await?;

sqlx::query!(
Expand All @@ -144,7 +193,7 @@ pub(crate) async fn insert_daily_node_stats(
packets.packets_sent,
packets.packets_dropped,
)
.execute(&mut *conn)
.execute(tx.as_mut())
.await?;
}
ScrapeNodeKind::MixingNymNode { node_id }
Expand All @@ -158,7 +207,7 @@ pub(crate) async fn insert_daily_node_stats(
"#,
node_id
)
.fetch_one(&mut *conn)
.fetch_one(tx.as_mut())
.await?;

sqlx::query!(
Expand All @@ -181,7 +230,7 @@ pub(crate) async fn insert_daily_node_stats(
packets.packets_sent,
packets.packets_dropped,
)
.execute(&mut *conn)
.execute(tx.as_mut())
.await?;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use crate::{
},
DbPool,
},
mixnet_scraper::helpers::NodeDescriptionResponse,
node_scraper::helpers::NodeDescriptionResponse,
utils::now_utc,
};
use anyhow::Result;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -160,11 +160,11 @@ async fn submit_testrun(
.map(unix_timestamp_to_utc_rfc3339)
.unwrap_or_else(|| String::from("never"));
tracing::info!(
"✅ Testrun row_id {} for gateway {} complete (last assigned {}, created at {})",
gateway_id = gw_identity,
last_assigned = last_assigned,
created_at = created_at,
"✅ Testrun row_id {} for gateway complete",
assigned_testrun.id,
gw_identity,
last_assigned,
created_at
);

Ok(StatusCode::CREATED)
Expand Down
14 changes: 11 additions & 3 deletions nym-node-status-api/nym-node-status-api/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ mod cli;
mod db;
mod http;
mod logging;
mod mixnet_scraper;
mod metrics_scraper;
mod monitor;
mod node_scraper;
mod testruns;
Expand All @@ -35,7 +35,14 @@ async fn main() -> anyhow::Result<()> {
let db_pool = storage.pool_owned();

// Start the node scraper
let scraper = mixnet_scraper::Scraper::new(storage.pool_owned());
let scraper = node_scraper::DescriptionScraper::new(storage.pool_owned());
tokio::spawn(async move {
scraper.start().await;
});
let scraper = node_scraper::PacketScraper::new(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the nittest of nits, but can you rename this guy? at first glance, before finishing my morning coffee, I got quite confused by having two things defined as scraper

storage.pool_owned(),
args.packet_stats_max_concurrent_tasks,
);
tokio::spawn(async move {
scraper.start().await;
});
Expand Down Expand Up @@ -74,7 +81,8 @@ async fn main() -> anyhow::Result<()> {

let db_pool_scraper = storage.pool_owned();
tokio::spawn(async move {
node_scraper::spawn_in_background(db_pool_scraper, args_clone.nym_api_client_timeout).await;
metrics_scraper::spawn_in_background(db_pool_scraper, args_clone.nym_api_client_timeout)
.await;
tracing::info!("Started metrics scraper task");
});

Expand Down
Loading
Loading