Hashing proc macro utils (#9875)

* hashing macro

* fmt

* use in easy place, and fix blake sizes

* fix

* Fixes, docs.
Allow ident as input.

* fix doc tests

* update error in test (nmapkey and key are same type).

* hashing crates under sp_core

* Doc updates and format.

* use all existing hashing functions.

* return array of u8

* Update primitives/core/hashing/proc-macro/src/impls.rs

Co-authored-by: Bastian Köcher <bkchr@users.noreply.github.com>

* ToTokeen for an array of u8

* fix

* re

* Improve impls

* complete doc tests

* fmt

* fix doctest format

* fix ui test (nmap key type alias)

Co-authored-by: Bastian Köcher <bkchr@users.noreply.github.com>
Co-authored-by: Bastian Köcher <info@kchr.de>
This commit is contained in:
cheme
2021-11-02 10:35:23 +01:00
committed by GitHub
parent 098815948a
commit 471e9cfbf1
14 changed files with 547 additions and 143 deletions
@@ -0,0 +1,124 @@
// This file is part of Substrate.
// Copyright (C) 2021 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use quote::quote;
use syn::parse::{Parse, ParseStream};
use proc_macro::TokenStream;
pub(super) struct InputBytes(pub Vec<u8>);
pub(super) struct MultipleInputBytes(pub Vec<Vec<u8>>);
impl MultipleInputBytes {
pub(super) fn concatenated(mut self) -> Vec<u8> {
if self.0.len() == 0 {
Vec::new()
} else {
let mut result = core::mem::take(&mut self.0[0]);
for other in self.0[1..].iter_mut() {
result.append(other);
}
result
}
}
}
impl Parse for InputBytes {
fn parse(input: ParseStream) -> syn::Result<Self> {
match syn::ExprArray::parse(input) {
Ok(array) => {
let mut bytes = Vec::<u8>::new();
for expr in array.elems.iter() {
match expr {
syn::Expr::Lit(lit) => match &lit.lit {
syn::Lit::Int(b) => bytes.push(b.base10_parse()?),
syn::Lit::Byte(b) => bytes.push(b.value()),
_ =>
return Err(syn::Error::new(
input.span(),
"Expected array of u8 elements.".to_string(),
)),
},
_ =>
return Err(syn::Error::new(
input.span(),
"Expected array of u8 elements.".to_string(),
)),
}
}
return Ok(InputBytes(bytes))
},
Err(_e) => (),
}
// use rust names as a vec of their utf8 bytecode.
match syn::Ident::parse(input) {
Ok(ident) => return Ok(InputBytes(ident.to_string().as_bytes().to_vec())),
Err(_e) => (),
}
Ok(InputBytes(syn::LitByteStr::parse(input)?.value()))
}
}
impl Parse for MultipleInputBytes {
fn parse(input: ParseStream) -> syn::Result<Self> {
let elts =
syn::punctuated::Punctuated::<InputBytes, syn::token::Comma>::parse_terminated(input)?;
Ok(MultipleInputBytes(elts.into_iter().map(|elt| elt.0).collect()))
}
}
pub(super) fn twox_64(bytes: Vec<u8>) -> TokenStream {
bytes_to_array(sp_core_hashing::twox_64(bytes.as_slice()))
}
pub(super) fn twox_128(bytes: Vec<u8>) -> TokenStream {
bytes_to_array(sp_core_hashing::twox_128(bytes.as_slice()))
}
pub(super) fn blake2b_512(bytes: Vec<u8>) -> TokenStream {
bytes_to_array(sp_core_hashing::blake2_512(bytes.as_slice()))
}
pub(super) fn blake2b_256(bytes: Vec<u8>) -> TokenStream {
bytes_to_array(sp_core_hashing::blake2_256(bytes.as_slice()))
}
pub(super) fn blake2b_64(bytes: Vec<u8>) -> TokenStream {
bytes_to_array(sp_core_hashing::blake2_64(bytes.as_slice()))
}
pub(super) fn keccak_256(bytes: Vec<u8>) -> TokenStream {
bytes_to_array(sp_core_hashing::keccak_256(bytes.as_slice()))
}
pub(super) fn keccak_512(bytes: Vec<u8>) -> TokenStream {
bytes_to_array(sp_core_hashing::keccak_512(bytes.as_slice()))
}
pub(super) fn sha2_256(bytes: Vec<u8>) -> TokenStream {
bytes_to_array(sp_core_hashing::sha2_256(bytes.as_slice()))
}
fn bytes_to_array(bytes: impl IntoIterator<Item = u8>) -> TokenStream {
let bytes = bytes.into_iter();
quote!(
[ #( #bytes ),* ]
)
.into()
}
@@ -0,0 +1,129 @@
// This file is part of Substrate.
// Copyright (C) 2021 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Macros to calculate constant hash bytes result.
//!
//! Macros from this crate does apply a specific hash function on input.
//! Input can be literal byte array as `b"content"` or array of bytes
//! as `[1, 2, 3]`.
//! Rust identifier can also be use, in this case we use their utf8 string
//! byte representation, for instance if the ident is `MyStruct`, then
//! `b"MyStruct"` will be hashed.
//! If multiple arguments comma separated are passed, they are concatenated
//! then hashed.
//!
//! Examples:
//!
//! ```rust
//! assert_eq!(
//! sp_core_hashing_proc_macro::blake2b_256!(b"test"),
//! sp_core_hashing::blake2_256(b"test"),
//! );
//! assert_eq!(
//! sp_core_hashing_proc_macro::blake2b_256!([1u8]),
//! sp_core_hashing::blake2_256(&[1u8]),
//! );
//! assert_eq!(
//! sp_core_hashing_proc_macro::blake2b_256!([1, 2, 3]),
//! sp_core_hashing::blake2_256(&[1, 2, 3]),
//! );
//! assert_eq!(
//! sp_core_hashing_proc_macro::blake2b_256!(identifier),
//! sp_core_hashing::blake2_256(b"identifier"),
//! );
//! assert_eq!(
//! sp_core_hashing_proc_macro::blake2b_256!(identifier, b"/string"),
//! sp_core_hashing::blake2_256(b"identifier/string"),
//! );
//! ```
mod impls;
use impls::MultipleInputBytes;
use proc_macro::TokenStream;
/// Process a Blake2 64-bit hash of bytes parameter outputs a `[u8; 8]`.
/// Multiple inputs are concatenated before hashing.
/// Input can be identifier (name of identifier as bytes is used), byte string or
/// array of bytes.
#[proc_macro]
pub fn blake2b_64(input: TokenStream) -> TokenStream {
impls::blake2b_64(syn::parse_macro_input!(input as MultipleInputBytes).concatenated())
}
/// Apply a Blake2 256-bit hash of bytes parameter, outputs a `[u8; 32]`.
/// Multiple inputs are concatenated before hashing.
/// Input can be identifier (name of identifier as bytes is used), byte string or
/// array of bytes.
#[proc_macro]
pub fn blake2b_256(input: TokenStream) -> TokenStream {
impls::blake2b_256(syn::parse_macro_input!(input as MultipleInputBytes).concatenated())
}
/// Apply a Blake2 512-bit hash of bytes parameter, outputs a `[u8; 64]`.
/// Multiple inputs are concatenated before hashing.
/// Input can be identifier (name of identifier as bytes is used), byte string or
/// array of bytes.
#[proc_macro]
pub fn blake2b_512(input: TokenStream) -> TokenStream {
impls::blake2b_512(syn::parse_macro_input!(input as MultipleInputBytes).concatenated())
}
/// Apply a XX 64-bit hash on its bytes parameter, outputs a `[u8; 8]`.
/// Multiple inputs are concatenated before hashing.
/// Input can be identifier (name of identifier as bytes is used), byte string or
/// array of bytes.
#[proc_macro]
pub fn twox_64(input: TokenStream) -> TokenStream {
impls::twox_64(syn::parse_macro_input!(input as MultipleInputBytes).concatenated())
}
/// Apply a XX 128-bit hash on its bytes parameter, outputs a `[u8; 16]`.
/// Multiple inputs are concatenated before hashing.
/// Input can be identifier (name of identifier as bytes is used), byte string or
/// array of bytes.
#[proc_macro]
pub fn twox_128(input: TokenStream) -> TokenStream {
impls::twox_128(syn::parse_macro_input!(input as MultipleInputBytes).concatenated())
}
/// Apply a keccak 256-bit hash on its bytes parameter, outputs a `[u8; 32]`.
/// Multiple inputs are concatenated before hashing.
/// Input can be identifier (name of identifier as bytes is used), byte string or
/// array of bytes.
#[proc_macro]
pub fn keccak_256(input: TokenStream) -> TokenStream {
impls::keccak_256(syn::parse_macro_input!(input as MultipleInputBytes).concatenated())
}
/// Apply a keccak 512-bit hash on its bytes parameter, outputs a `[u8; 64]`.
/// Multiple inputs are concatenated before hashing.
/// Input can be identifier (name of identifier as bytes is used), byte string or
/// array of bytes.
#[proc_macro]
pub fn keccak_512(input: TokenStream) -> TokenStream {
impls::keccak_512(syn::parse_macro_input!(input as MultipleInputBytes).concatenated())
}
/// Apply a sha2 256-bit hash on its bytes parameter, outputs a `[u8; 32]`.
/// Multiple inputs are concatenated before hashing.
/// Input can be identifier (name of identifier as bytes is used), byte string or
/// array of bytes.
#[proc_macro]
pub fn sha2_256(input: TokenStream) -> TokenStream {
impls::sha2_256(syn::parse_macro_input!(input as MultipleInputBytes).concatenated())
}