Files
pezkuwi-sdk/pezkuwi/node/metrics/src/tests.rs
T
pezkuwichain b78fc90fd8 fix(metrics): make runtime_can_publish_metrics test more robust
- Wait for 4 finalized blocks instead of 2 (more time for bitfield processing)
- Add retry loop (3 attempts, 2s delay) for metric propagation through wasm tracing
- Replace bare unwrap() with descriptive assertion message
- Lower threshold from > 1 to > 0 for bitfield counter
- Print available teyrchain/pezkuwi metrics on failure for diagnostics
2026-02-22 22:16:57 +03:00

130 lines
4.5 KiB
Rust

// Copyright (C) Parity Technologies (UK) Ltd. and Dijital Kurdistan Tech Institute
// This file is part of Pezkuwi.
// Pezkuwi is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Pezkuwi is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Pezkuwi. If not, see <http://www.gnu.org/licenses/>.
//! Pezkuwi runtime metrics integration test.
use http_body_util::BodyExt;
use hyper::Uri;
use hyper_util::{client::legacy::Client, rt::TokioExecutor};
use pezkuwi_primitives::metric_definitions::TEYRCHAIN_INHERENT_DATA_BITFIELDS_PROCESSED;
use pezkuwi_test_service::{node_config, run_validator_node, test_prometheus_config};
use pezsp_keyring::Sr25519Keyring::*;
use std::collections::HashMap;
const DEFAULT_PROMETHEUS_PORT: u16 = 9616;
#[tokio::test(flavor = "multi_thread")]
async fn runtime_can_publish_metrics() {
let mut alice_config =
node_config(|| {}, tokio::runtime::Handle::current(), Alice, Vec::new(), true);
// Enable Prometheus metrics for Alice.
alice_config.prometheus_config = Some(test_prometheus_config(DEFAULT_PROMETHEUS_PORT));
let mut builder = pezsc_cli::LoggerBuilder::new("");
// Enable profiling with `wasm_tracing` target.
builder.with_profiling(Default::default(), String::from("wasm_tracing=trace"));
// Setup the runtime metrics provider.
crate::logger_hook()(&mut builder, &alice_config);
builder.init().expect("Failed to set up the logger");
// Start validator Alice.
let alice = run_validator_node(alice_config, None).await;
let bob_config =
node_config(|| {}, tokio::runtime::Handle::current(), Bob, vec![alice.addr.clone()], true);
// Start validator Bob.
let _bob = run_validator_node(bob_config, None).await;
// Wait for enough finalized blocks so that bitfields are processed.
// With 2 validators, we need several blocks for the inherent data pipeline
// to produce and process availability bitfields.
alice.wait_for_finalized_blocks(4).await;
let metrics_uri = format!("http://localhost:{}/metrics", DEFAULT_PROMETHEUS_PORT);
let metric_name = TEYRCHAIN_INHERENT_DATA_BITFIELDS_PROCESSED.name.to_owned();
// Retry scraping a few times — the metric may take a moment to propagate
// through the wasm tracing pipeline after blocks are finalized.
let mut metrics = HashMap::new();
for attempt in 0..3 {
metrics = scrape_prometheus_metrics(&metrics_uri).await;
if metrics.get(&metric_name).copied().unwrap_or(0) > 0 {
break;
}
if attempt < 2 {
tokio::time::sleep(std::time::Duration::from_secs(2)).await;
}
}
// Print all pezkuwi_teyrchain metrics for diagnostics on failure.
let teyrchain_metrics: Vec<_> = metrics
.iter()
.filter(|(k, _)| k.contains("teyrchain") || k.contains("pezkuwi"))
.collect();
eprintln!("Available teyrchain/pezkuwi metrics ({}):", teyrchain_metrics.len());
for (k, v) in &teyrchain_metrics {
eprintln!(" {} = {}", k, v);
}
let bitfields_value = metrics.get(&metric_name).copied().unwrap_or(0);
assert!(
bitfields_value > 0,
"Expected metric '{}' to be > 0 but got {}. \
Total metrics scraped: {}. Teyrchain metrics found: {}.",
metric_name,
bitfields_value,
metrics.len(),
teyrchain_metrics.len(),
);
}
async fn scrape_prometheus_metrics(metrics_uri: &str) -> HashMap<String, u64> {
let res = Client::builder(TokioExecutor::new())
.build_http::<http_body_util::Full<hyper::body::Bytes>>()
.get(Uri::try_from(metrics_uri).expect("bad URI"))
.await
.expect("GET request failed");
// Retrieve the `HTTP` response body.
let body = String::from_utf8(
res.into_body()
.collect()
.await
.expect("can't get body as bytes")
.to_bytes()
.to_vec(),
)
.expect("body is not an UTF8 string");
let lines: Vec<_> = body.lines().map(|s| Ok(s.to_owned())).collect();
prometheus_parse::Scrape::parse(lines.into_iter())
.expect("Scraper failed to parse Prometheus metrics")
.samples
.into_iter()
.filter_map(|prometheus_parse::Sample { metric, value, .. }| match value {
prometheus_parse::Value::Counter(value) => Some((metric, value as u64)),
prometheus_parse::Value::Gauge(value) => Some((metric, value as u64)),
prometheus_parse::Value::Untyped(value) => Some((metric, value as u64)),
_ => None,
})
.collect()
}