diff --git a/.editorconfig b/.editorconfig index 9fb23ce694..f0735cedfb 100644 --- a/.editorconfig +++ b/.editorconfig @@ -11,3 +11,6 @@ trim_trailing_whitespace = true insert_final_newline = true indent_style = space indent_size = 4 + +[*.yml] +indent_size = 2 diff --git a/.github/workflows/build-substrate.yml b/.github/workflows/build-substrate.yml index 4ab008fb85..4cadcc7200 100644 --- a/.github/workflows/build-substrate.yml +++ b/.github/workflows/build-substrate.yml @@ -41,4 +41,4 @@ jobs: name: nightly-substrate-binary path: target/release/substrate-node retention-days: 2 - if-no-files-found: error \ No newline at end of file + if-no-files-found: error diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 640c2716fe..2add781112 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -5,7 +5,6 @@ on: # Run at 8am every day, well after the new binary is built - cron: "0 8 * * *" - env: CARGO_TERM_COLOR: always diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index acc812341a..fdcc2e659f 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -1,25 +1,25 @@ name: Rust on: - push: - # Run jobs when commits are pushed to - # master or release-like branches: - branches: - - master - pull_request: - # Run jobs for any external PR that wants - # to merge to master, too: - branches: - - master + push: + # Run jobs when commits are pushed to + # master or release-like branches: + branches: + - master + pull_request: + # Run jobs for any external PR that wants + # to merge to master, too: + branches: + - master concurrency: - group: ${{ github.ref }}-${{ github.workflow }} - cancel-in-progress: true + group: ${{ github.ref }}-${{ github.workflow }} + cancel-in-progress: true env: - CARGO_TERM_COLOR: always - # Increase wasm test timeout from 20 seconds (default) to 1 minute. - WASM_BINDGEN_TEST_TIMEOUT: 60 + CARGO_TERM_COLOR: always + # Increase wasm test timeout from 20 seconds (default) to 1 minute. + WASM_BINDGEN_TEST_TIMEOUT: 60 jobs: clippy: diff --git a/.github/workflows/update-artifacts.yml b/.github/workflows/update-artifacts.yml new file mode 100644 index 0000000000..fefed66a3d --- /dev/null +++ b/.github/workflows/update-artifacts.yml @@ -0,0 +1,56 @@ +name: Update Artifacts + +on: + workflow_dispatch: # Allows manual triggering + schedule: + - cron: "0 0 * * 1" # weekly on Monday at 00:00 UTC + +concurrency: + group: ${{ github.ref }}-${{ github.workflow }} + cancel-in-progress: true + +env: + CARGO_TERM_COLOR: always + +jobs: + check: + name: Renew Artifacts + runs-on: ubuntu-latest + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + # We run this (up-to-date) node locally to fetch metadata from it for the artifacts + - name: Use substrate-node binary + uses: ./.github/workflows/actions/use-substrate + + - name: Install Rust stable toolchain + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + override: true + + - name: Rust Cache + uses: Swatinem/rust-cache@3cf7f8cc28d1b4e7d01e3783be10a97d55d483c8 # v2.7.1 + + # This starts a substrate node and runs a few subxt cli child processes to fetch metadata from it and generate code. + # In particular it generates: + # - 4 metadata (*.scale) files in the `artifacts` directory + # - a polkadot.rs file from the full metadata that is checked in integration tests + # - a polkadot.json in the `artifacts/demo_chain_specs` directory + - name: Fetch Artifacts + run: cargo build --bin artifacts + + - name: Create Pull Request + uses: peter-evans/create-pull-request@v5 + with: + base: master + branch: update-artifacts + commit-message: Update Artifacts (auto-generated) + branch-suffix: timestamp + title: Update Artifacts (auto-generated) + body: | + This PR updates the artifacts by fetching fresh metadata from a substrate node. + It also recreates the polkadot.rs file used in the integration tests. + It was created automatically by a Weekly GitHub Action Cronjob. diff --git a/Cargo.lock b/Cargo.lock index 71312952ec..2e5a5bb427 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -331,6 +331,13 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" +[[package]] +name = "artifacts" +version = "0.33.0" +dependencies = [ + "substrate-runner", +] + [[package]] name = "assert_matches" version = "1.5.0" diff --git a/Cargo.toml b/Cargo.toml index 8bd8eb21a2..d52a1126e2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,6 +12,7 @@ members = [ "metadata", "signer", "subxt", + "scripts/artifacts" ] # We exclude any crates that would depend on non mutually diff --git a/scripts/artifacts.sh b/scripts/artifacts.sh deleted file mode 100755 index f201e7cf81..0000000000 --- a/scripts/artifacts.sh +++ /dev/null @@ -1,26 +0,0 @@ -# # Generates the 3 metadata files in `/artifacts` and the polkadot.rs file for integration tests -# -# This script is to be run from the root of the repository: `scripts/artifacts.sh` -# -# It expects a local polkadot node to be running a JSON-RPC HTTP server at 127.0.0.1:9933 -# A local polkadot node can be run via: -# ``` -# git clone https://github.com/paritytech/polkadot.git -# cd polkadot -# cargo build --release -# ./target/release/polkadot --dev --tmp -# ``` - -# get the full metadata -cargo run --bin subxt metadata --version 15 > artifacts/polkadot_metadata_full.scale -# use it to generate polkadot.rs -cargo run --bin subxt codegen --file artifacts/polkadot_metadata_full.scale | rustfmt > testing/integration-tests/src/full_client/codegen/polkadot.rs -# generate a metadata file that only contains a few pallets that we need for our examples. -cargo run --bin subxt metadata --file artifacts/polkadot_metadata_full.scale --pallets "Balances,Staking,System,Multisig,Timestamp,ParaInherent" > artifacts/polkadot_metadata_small.scale -# generate a metadata file that contains no pallets -cargo run --bin subxt metadata --file artifacts/polkadot_metadata_full.scale --pallets "" > artifacts/polkadot_metadata_tiny.scale -# generate a metadata file that only contains some custom metadata -cargo run --bin generate-custom-metadata > artifacts/metadata_with_custom_values.scale - -# Generate the polkadot chain spec. -cargo run --features chain-spec-pruning --bin subxt chain-spec --url wss://rpc.polkadot.io:443 --output-file artifacts/demo_chain_specs/polkadot.json --state-root-hash --remove-substitutes diff --git a/scripts/artifacts/.gitignore b/scripts/artifacts/.gitignore new file mode 100644 index 0000000000..c41cc9e35e --- /dev/null +++ b/scripts/artifacts/.gitignore @@ -0,0 +1 @@ +/target \ No newline at end of file diff --git a/scripts/artifacts/Cargo.lock b/scripts/artifacts/Cargo.lock new file mode 100644 index 0000000000..d1fb446b78 --- /dev/null +++ b/scripts/artifacts/Cargo.lock @@ -0,0 +1,14 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "artifacts" +version = "0.1.0" +dependencies = [ + "substrate-runner", +] + +[[package]] +name = "substrate-runner" +version = "0.33.0" diff --git a/scripts/artifacts/Cargo.toml b/scripts/artifacts/Cargo.toml new file mode 100644 index 0000000000..b06ea9da56 --- /dev/null +++ b/scripts/artifacts/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "artifacts" +authors.workspace = true +edition.workspace = true +version.workspace = true +rust-version.workspace = true +license.workspace = true +repository.workspace = true +documentation.workspace = true +homepage.workspace = true +description = "Internal tool to regenerate artifacts" + +[dependencies] +substrate-runner = { workspace = true } diff --git a/scripts/artifacts/src/main.rs b/scripts/artifacts/src/main.rs new file mode 100644 index 0000000000..fa3ecb4315 --- /dev/null +++ b/scripts/artifacts/src/main.rs @@ -0,0 +1,89 @@ +use std::{ + fs::File, + process::{Command, Stdio}, +}; + +use substrate_runner::SubstrateNode; + +/// A Script to generate artifacts that are used in the integration tests. +/// +/// Run with `cargo run --bin artifacts` from the root of the repository. +fn main() { + let mut node_builder = SubstrateNode::builder(); + node_builder.binary_paths(["substrate-node", "substrate"]); + + // Spawn the node and retrieve a ws URL to it: + let proc = node_builder + .spawn() + .map_err(|e| e.to_string()) + .expect("Could not spawn node"); + let node_url = format!("ws://127.0.0.1:{}", proc.ws_port()); + + // Get the full metadata from the spawned substrate node + Command::make(&format!( + "cargo run --bin subxt metadata --version 15 --url {node_url}" + )) + .out("artifacts/polkadot_metadata_full.scale"); + + // Use it to generate polkadot.rs + Command::make("cargo run --bin subxt codegen --file artifacts/polkadot_metadata_full.scale") + .pipe("rustfmt") + .out("testing/integration-tests/src/full_client/codegen/polkadot.rs"); + + // Generate a metadata file that only contains a few pallets that we need for our examples. + Command::make(r#"cargo run --bin subxt metadata --file artifacts/polkadot_metadata_full.scale --pallets "Balances,Staking,System,Multisig,Timestamp,ParaInherent""#) + .out("artifacts/polkadot_metadata_small.scale"); + + // Generate a metadata file that contains no pallets + Command::make(r#"cargo run --bin subxt metadata --file artifacts/polkadot_metadata_full.scale --pallets """#) + .out("artifacts/polkadot_metadata_tiny.scale"); + + // Generate a metadata file that only contains some custom metadata + Command::make("cargo run --bin generate-custom-metadata") + .out("artifacts/metadata_with_custom_values.scale"); + + // Generate the polkadot chain spec. + Command::make("cargo run --features chain-spec-pruning --bin subxt chain-spec --url wss://rpc.polkadot.io:443 --output-file artifacts/demo_chain_specs/polkadot.json --state-root-hash --remove-substitutes").spawn().unwrap().wait().unwrap(); +} + +trait CommandT { + /// Creates a new command, parsing the arg_string provided. + fn make(arg_string: &str) -> Self; + + /// Pipes the output of the current command to the next command. + fn pipe(self, arg_string: &str) -> Self; + + /// Writes bytes from stdout to a new file at path. + fn out(self, path: &str); +} + +impl CommandT for Command { + fn make(arg_string: &str) -> Self { + // Note: simple space splitting, no fancy parsing of e.g. quotes surrounding whitespace. + let mut parts = arg_string.split(' '); + let program = parts.next().expect("no program in command string"); + let mut command = Command::new(program); + for e in parts { + command.arg(e); + } + command + } + + fn pipe(mut self, arg_string: &str) -> Self { + // execute self + let old_cmd = self.stdout(Stdio::piped()).spawn().unwrap(); + let mut next_cmd = Self::make(arg_string); + next_cmd.stdin(Stdio::from(old_cmd.stdout.unwrap())); + next_cmd + } + + fn out(mut self, path: &str) { + dbg!(path); + let file = File::create(path).unwrap(); + self.stdout(Stdio::from(file)) + .spawn() + .unwrap() + .wait() + .unwrap(); + } +} diff --git a/subxt/src/backend/mod.rs b/subxt/src/backend/mod.rs index 3fb9782c53..3db06818b3 100644 --- a/subxt/src/backend/mod.rs +++ b/subxt/src/backend/mod.rs @@ -310,7 +310,7 @@ pub enum TransactionStatus { /// Number of peers it's been broadcast to. num_peers: u32, }, - /// Transaciton is no longer in a best block. + /// Transaction is no longer in a best block. NoLongerInBestBlock, /// Transaction has been included in block with given hash. InBestBlock { diff --git a/subxt/src/backend/unstable/follow_stream_driver.rs b/subxt/src/backend/unstable/follow_stream_driver.rs index 4a38c32e7a..d32c98a104 100644 --- a/subxt/src/backend/unstable/follow_stream_driver.rs +++ b/subxt/src/backend/unstable/follow_stream_driver.rs @@ -241,7 +241,7 @@ impl Shared { } } - // Keep our buffer of ready/block events uptodate: + // Keep our buffer of ready/block events up-to-date: match item { FollowStreamMsg::Ready(sub_id) => { // Set new subscription ID when it comes in. diff --git a/subxt/src/backend/unstable/follow_stream_unpin.rs b/subxt/src/backend/unstable/follow_stream_unpin.rs index 00214cb2fe..ca00e37690 100644 --- a/subxt/src/backend/unstable/follow_stream_unpin.rs +++ b/subxt/src/backend/unstable/follow_stream_unpin.rs @@ -212,7 +212,7 @@ impl Stream for FollowStreamUnpin { FollowStreamMsg::Event(FollowEvent::Stop) } - // These events aren't intresting; we just forward them on: + // These events aren't interesting; we just forward them on: FollowStreamMsg::Event(FollowEvent::OperationBodyDone(details)) => { FollowStreamMsg::Event(FollowEvent::OperationBodyDone(details)) } @@ -370,7 +370,7 @@ impl FollowStreamUnpin { // Any new futures pushed above need polling to start. We could // just wait for the next stream event, but let's wake the task to - // have it polled sooner, just incase it's slow to receive things. + // have it polled sooner, just in case it's slow to receive things. waker.wake_by_ref(); } } diff --git a/subxt/src/backend/unstable/rpc_methods.rs b/subxt/src/backend/unstable/rpc_methods.rs index ca1a38691b..331cb5459b 100644 --- a/subxt/src/backend/unstable/rpc_methods.rs +++ b/subxt/src/backend/unstable/rpc_methods.rs @@ -542,7 +542,7 @@ pub enum StorageResultType { ClosestDescendantMerkleValue(Bytes), } -/// The method respose of `chainHead_body`, `chainHead_call` and `chainHead_storage`. +/// The method response of `chainHead_body`, `chainHead_call` and `chainHead_storage`. #[derive(Debug, Clone, PartialEq, Eq, Deserialize)] #[serde(rename_all = "camelCase")] #[serde(tag = "result")] diff --git a/subxt/src/client/light_client/builder.rs b/subxt/src/client/light_client/builder.rs index ce0f7c6aca..2733fd4bb4 100644 --- a/subxt/src/client/light_client/builder.rs +++ b/subxt/src/client/light_client/builder.rs @@ -248,13 +248,36 @@ async fn build_client_from_rpc( /// Fetch the chain spec from the URL. #[cfg(feature = "jsonrpsee")] async fn fetch_url(url: impl AsRef) -> Result { - use jsonrpsee::core::client::ClientT; + use jsonrpsee::core::client::{ClientT, SubscriptionClientT}; + use jsonrpsee::rpc_params; + use serde_json::value::RawValue; + let client = jsonrpsee_helpers::client(url.as_ref()).await?; - client + let result = client .request("sync_state_genSyncSpec", jsonrpsee::rpc_params![true]) .await - .map_err(|err| Error::Rpc(crate::error::RpcError::ClientError(Box::new(err)))) + .map_err(|err| Error::Rpc(crate::error::RpcError::ClientError(Box::new(err))))?; + + // Subscribe to the finalized heads of the chain. + let mut subscription = SubscriptionClientT::subscribe::, _>( + &client, + "chain_subscribeFinalizedHeads", + rpc_params![], + "chain_unsubscribeFinalizedHeads", + ) + .await + .map_err(|err| Error::Rpc(crate::error::RpcError::ClientError(Box::new(err))))?; + + // We must ensure that the finalized block of the chain is not the block included + // in the chainSpec. + // This is a temporary workaround for: https://github.com/smol-dot/smoldot/issues/1562. + // The first finalized block that is received might by the finalized block could be the one + // included in the chainSpec. Decoding the chainSpec for this purpose is too complex. + let _ = subscription.next().await; + let _ = subscription.next().await; + + Ok(result) } cfg_jsonrpsee_native! { diff --git a/subxt/src/config/default_extrinsic_params.rs b/subxt/src/config/default_extrinsic_params.rs index 880591e7f0..dce83853bf 100644 --- a/subxt/src/config/default_extrinsic_params.rs +++ b/subxt/src/config/default_extrinsic_params.rs @@ -100,7 +100,7 @@ impl DefaultExtrinsicParamsBuilder { self } - /// Provide a tip to the block auther using the token denominated by the `asset_id` provided. This + /// Provide a tip to the block author using the token denominated by the `asset_id` provided. This /// is not applicable on chains which don't use the `ChargeAssetTxPayment` signed extension; in this /// case, no tip will be given. pub fn tip_of(mut self, tip: u128, asset_id: T::AssetId) -> Self {