feat: initialize Kurdistan SDK - independent fork of Polkadot SDK
This commit is contained in:
@@ -0,0 +1,195 @@
|
||||
// Copyright (C) Parity Technologies (UK) Ltd.
|
||||
// This file is part of Pezkuwi.
|
||||
|
||||
// Pezkuwi is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Pezkuwi is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Pezkuwi. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
#![deny(unused_crate_dependencies)]
|
||||
#![deny(missing_docs)]
|
||||
#![deny(clippy::dbg_macro)]
|
||||
|
||||
//! Generative part of `tracing-gum`. See `tracing-gum` for usage documentation.
|
||||
|
||||
use proc_macro2::{Ident, Span, TokenStream};
|
||||
use quote::{quote, ToTokens};
|
||||
use syn::{parse2, parse_quote, punctuated::Punctuated, Result, Token};
|
||||
|
||||
mod types;
|
||||
|
||||
use self::types::*;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
/// Print an error message.
|
||||
#[proc_macro]
|
||||
pub fn error(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
gum(item, Level::Error)
|
||||
}
|
||||
|
||||
/// Print a warning level message.
|
||||
#[proc_macro]
|
||||
pub fn warn(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
gum(item, Level::Warn)
|
||||
}
|
||||
|
||||
/// Print a warning or debug level message depending on their frequency
|
||||
#[proc_macro]
|
||||
pub fn warn_if_frequent(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let ArgsIfFrequent { freq, max_rate, rest } = parse2(item.into()).unwrap();
|
||||
|
||||
let freq_expr = freq.expr;
|
||||
let max_rate_expr = max_rate.expr;
|
||||
let debug: proc_macro2::TokenStream = gum(rest.clone().into(), Level::Debug).into();
|
||||
let warn: proc_macro2::TokenStream = gum(rest.into(), Level::Warn).into();
|
||||
|
||||
let stream = quote! {
|
||||
if #freq_expr .is_frequent(#max_rate_expr) {
|
||||
#warn
|
||||
} else {
|
||||
#debug
|
||||
}
|
||||
};
|
||||
|
||||
stream.into()
|
||||
}
|
||||
|
||||
/// Print a info level message.
|
||||
#[proc_macro]
|
||||
pub fn info(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
gum(item, Level::Info)
|
||||
}
|
||||
|
||||
/// Print a debug level message.
|
||||
#[proc_macro]
|
||||
pub fn debug(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
gum(item, Level::Debug)
|
||||
}
|
||||
|
||||
/// Print a trace level message.
|
||||
#[proc_macro]
|
||||
pub fn trace(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
gum(item, Level::Trace)
|
||||
}
|
||||
|
||||
/// One-size-fits all internal implementation that produces the actual code.
|
||||
pub(crate) fn gum(item: proc_macro::TokenStream, level: Level) -> proc_macro::TokenStream {
|
||||
let item: TokenStream = item.into();
|
||||
|
||||
let res = expander::Expander::new("gum")
|
||||
.add_comment("Generated overseer code by `gum::warn!(..)`".to_owned())
|
||||
// `dry=true` until rust-analyzer can selectively disable features so it's
|
||||
// not all red squiggles. Originally: `!cfg!(feature = "expand")`
|
||||
// ISSUE: https://github.com/rust-lang/rust-analyzer/issues/11777
|
||||
.dry(true)
|
||||
.verbose(false)
|
||||
.fmt(expander::Edition::_2021)
|
||||
.maybe_write_to_out_dir(impl_gum2(item, level))
|
||||
.expect("Expander does not fail due to IO in OUT_DIR. qed");
|
||||
|
||||
res.unwrap_or_else(|err| err.to_compile_error()).into()
|
||||
}
|
||||
|
||||
/// Does the actual parsing and token generation based on `proc_macro2` types.
|
||||
///
|
||||
/// Required for unit tests.
|
||||
pub(crate) fn impl_gum2(orig: TokenStream, level: Level) -> Result<TokenStream> {
|
||||
let args: Args = parse2(orig)?;
|
||||
|
||||
let krate = support_crate();
|
||||
let span = Span::call_site();
|
||||
|
||||
let Args { target, comma, mut values, fmt } = args;
|
||||
|
||||
// find a value or alias called `candidate_hash`.
|
||||
let maybe_candidate_hash = values.iter_mut().find(|value| value.as_ident() == "candidate_hash");
|
||||
|
||||
if let Some(kv) = maybe_candidate_hash {
|
||||
let (ident, rhs_expr, replace_with) = match kv {
|
||||
Value::Alias(alias) => {
|
||||
let ValueWithAliasIdent { alias, marker, expr, .. } = alias.clone();
|
||||
(
|
||||
alias.clone(),
|
||||
expr.to_token_stream(),
|
||||
Some(Value::Value(ValueWithFormatMarker {
|
||||
marker,
|
||||
ident: alias,
|
||||
dot: None,
|
||||
inner: Punctuated::new(),
|
||||
})),
|
||||
)
|
||||
},
|
||||
Value::Value(value) => (value.ident.clone(), value.ident.to_token_stream(), None),
|
||||
};
|
||||
|
||||
// we generate a local value with the same alias name
|
||||
// so replace the expr with just a value
|
||||
if let Some(replace_with) = replace_with {
|
||||
let _old = std::mem::replace(kv, replace_with);
|
||||
};
|
||||
|
||||
// Inject the addition `traceID = % trace_id` identifier
|
||||
// while maintaining trailing comma semantics.
|
||||
let had_trailing_comma = values.trailing_punct();
|
||||
if !had_trailing_comma {
|
||||
values.push_punct(Token);
|
||||
}
|
||||
|
||||
values.push_value(parse_quote! {
|
||||
traceID = % trace_id
|
||||
});
|
||||
if had_trailing_comma {
|
||||
values.push_punct(Token);
|
||||
}
|
||||
|
||||
Ok(quote! {
|
||||
if #krate :: enabled!(#target #comma #level) {
|
||||
use ::std::ops::Deref;
|
||||
|
||||
// create a scoped let binding of something that `deref`s to
|
||||
// `Hash`.
|
||||
let value = #rhs_expr;
|
||||
let value = &value;
|
||||
let value: & #krate:: Hash = value.deref();
|
||||
// Do the `deref` to `Hash` and convert to a `TraceIdentifier`.
|
||||
let #ident: #krate:: Hash = * value;
|
||||
let trace_id = #krate:: hash_to_trace_identifier ( #ident );
|
||||
#krate :: event!(
|
||||
#target #comma #level, #values #fmt
|
||||
)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
Ok(quote! {
|
||||
#krate :: event!(
|
||||
#target #comma #level, #values #fmt
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the support crate path.
|
||||
fn support_crate() -> TokenStream {
|
||||
let support_crate_name = if cfg!(test) {
|
||||
quote! {crate}
|
||||
} else {
|
||||
use proc_macro_crate::{crate_name, FoundCrate};
|
||||
let crate_name = crate_name("tracing-gum")
|
||||
.expect("Support crate `tracing-gum` is present in `Cargo.toml`. qed");
|
||||
match crate_name {
|
||||
FoundCrate::Itself => quote! {crate},
|
||||
FoundCrate::Name(name) => Ident::new(&name, Span::call_site()).to_token_stream(),
|
||||
}
|
||||
};
|
||||
support_crate_name
|
||||
}
|
||||
@@ -0,0 +1,208 @@
|
||||
// Copyright (C) Parity Technologies (UK) Ltd.
|
||||
// This file is part of Pezkuwi.
|
||||
|
||||
// Pezkuwi is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Pezkuwi is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Pezkuwi. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
#![allow(clippy::dbg_macro)]
|
||||
|
||||
use super::*;
|
||||
|
||||
use assert_matches::assert_matches;
|
||||
use quote::quote;
|
||||
|
||||
#[test]
|
||||
fn smoke() {
|
||||
assert_matches!(
|
||||
impl_gum2(
|
||||
quote! {
|
||||
target: "xyz",
|
||||
x = Foo::default(),
|
||||
z = ?Game::new(),
|
||||
"Foo {p} x {q}",
|
||||
p,
|
||||
q,
|
||||
},
|
||||
Level::Warn
|
||||
),
|
||||
Ok(_)
|
||||
);
|
||||
}
|
||||
|
||||
mod roundtrip {
|
||||
use super::*;
|
||||
|
||||
macro_rules! roundtrip {
|
||||
($whatty:ty | $ts:expr) => {
|
||||
let input = $ts;
|
||||
assert_matches!(
|
||||
::syn::parse2::<$whatty>(input),
|
||||
Ok(typed) => {
|
||||
let downgraded = dbg!(typed.to_token_stream());
|
||||
assert_matches!(::syn::parse2::<$whatty>(downgraded),
|
||||
Ok(reparsed) => {
|
||||
assert_eq!(
|
||||
dbg!(typed.into_token_stream().to_string()),
|
||||
reparsed.into_token_stream().to_string(),
|
||||
)
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn u_target() {
|
||||
roundtrip! {Target | quote! {target: "foo" } };
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn u_format_marker() {
|
||||
roundtrip! {FormatMarker | quote! {?} };
|
||||
roundtrip! {FormatMarker | quote! {%} };
|
||||
roundtrip! {FormatMarker | quote! {} };
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn u_value_w_alias() {
|
||||
roundtrip! {Value | quote! {x = y} };
|
||||
roundtrip! {Value | quote! {f = f} };
|
||||
roundtrip! {Value | quote! {ff = ?ff} };
|
||||
roundtrip! {Value | quote! {fff = %fff} };
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn u_value_bare_w_format_marker() {
|
||||
roundtrip! {Value | quote! {?q} };
|
||||
roundtrip! {Value | quote! {%etcpp} };
|
||||
|
||||
roundtrip! {ValueWithFormatMarker | quote! {?q} };
|
||||
roundtrip! {ValueWithFormatMarker | quote! {%etcpp} };
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn u_value_bare_w_field_access() {
|
||||
roundtrip! {ValueWithFormatMarker | quote! {a.b} };
|
||||
roundtrip! {ValueWithFormatMarker | quote! {a.b.cdef.ghij} };
|
||||
roundtrip! {ValueWithFormatMarker | quote! {?a.b.c} };
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn u_args() {
|
||||
roundtrip! {Args | quote! {target: "yes", k=?v, candidate_hash, "But why? {a}", a} };
|
||||
roundtrip! {Args | quote! {target: "also", candidate_hash = ?c_hash, "But why?"} };
|
||||
roundtrip! {Args | quote! {"Nope? {}", candidate_hash} };
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn e2e() {
|
||||
roundtrip! {Args | quote! {target: "yes", k=?v, candidate_hash, "But why? {a}", a} };
|
||||
roundtrip! {Args | quote! {target: "also", candidate_hash = ?c_hash, "But why?"} };
|
||||
roundtrip! {Args | quote! { "Nope? But yes {}", candidate_hash} };
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sample_w_candidate_hash_aliased() {
|
||||
dbg!(impl_gum2(
|
||||
quote! {
|
||||
target: "bar",
|
||||
a = a,
|
||||
candidate_hash = %Hash::repeat_byte(0xF0),
|
||||
b = ?Y::default(),
|
||||
c = ?a,
|
||||
"xxx"
|
||||
},
|
||||
Level::Info
|
||||
)
|
||||
.unwrap()
|
||||
.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sample_w_candidate_hash_aliased_unnecessary() {
|
||||
assert_matches!(impl_gum2(
|
||||
quote! {
|
||||
"bar",
|
||||
a = a,
|
||||
candidate_hash = ?candidate_hash,
|
||||
b = ?Y::default(),
|
||||
c = ?a,
|
||||
"xxx {} {}",
|
||||
a,
|
||||
a,
|
||||
},
|
||||
Level::Info
|
||||
), Ok(x) => {
|
||||
dbg!(x.to_string())
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_fmt_str_args() {
|
||||
assert_matches!(impl_gum2(
|
||||
quote! {
|
||||
target: "bar",
|
||||
a = a,
|
||||
candidate_hash = ?candidate_hash,
|
||||
b = ?Y::default(),
|
||||
c = a,
|
||||
"xxx",
|
||||
},
|
||||
Level::Trace
|
||||
), Ok(x) => {
|
||||
dbg!(x.to_string())
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_fmt_str() {
|
||||
assert_matches!(impl_gum2(
|
||||
quote! {
|
||||
target: "bar",
|
||||
a = a,
|
||||
candidate_hash = ?candidate_hash,
|
||||
b = ?Y::default(),
|
||||
c = a,
|
||||
},
|
||||
Level::Trace
|
||||
), Ok(x) => {
|
||||
dbg!(x.to_string())
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn field_member_as_kv() {
|
||||
assert_matches!(impl_gum2(
|
||||
quote! {
|
||||
target: "z",
|
||||
?y.x,
|
||||
},
|
||||
Level::Info
|
||||
), Ok(x) => {
|
||||
dbg!(x.to_string())
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_field_member_as_kv() {
|
||||
assert_matches!(impl_gum2(
|
||||
quote! {
|
||||
target: "z",
|
||||
?a.b.c.d.e.f.g,
|
||||
},
|
||||
Level::Info
|
||||
), Ok(x) => {
|
||||
dbg!(x.to_string())
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,382 @@
|
||||
// Copyright (C) Parity Technologies (UK) Ltd.
|
||||
// This file is part of Pezkuwi.
|
||||
|
||||
// Pezkuwi is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Pezkuwi is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Pezkuwi. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use super::*;
|
||||
|
||||
use syn::{
|
||||
parse::{Parse, ParseStream},
|
||||
Token,
|
||||
};
|
||||
|
||||
pub(crate) mod kw {
|
||||
syn::custom_keyword!(target);
|
||||
syn::custom_keyword!(freq);
|
||||
syn::custom_keyword!(max_rate);
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct Target {
|
||||
kw: kw::target,
|
||||
colon: Token![:],
|
||||
expr: syn::Expr,
|
||||
}
|
||||
|
||||
impl Parse for Target {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(Self { kw: input.parse()?, colon: input.parse()?, expr: input.parse()? })
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Target {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let kw = &self.kw;
|
||||
let colon = &self.colon;
|
||||
let expr = &self.expr;
|
||||
tokens.extend(quote! {
|
||||
#kw #colon #expr
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) enum FormatMarker {
|
||||
Questionmark(Token![?]),
|
||||
Percentage(Token![%]),
|
||||
None,
|
||||
}
|
||||
|
||||
impl Parse for FormatMarker {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let lookahead = input.lookahead1();
|
||||
if lookahead.peek(Token![?]) {
|
||||
input.parse().map(Self::Questionmark)
|
||||
} else if lookahead.peek(Token![%]) {
|
||||
input.parse().map(Self::Percentage)
|
||||
} else {
|
||||
Ok(Self::None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for FormatMarker {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.extend(match self {
|
||||
Self::Percentage(p) => p.to_token_stream(),
|
||||
Self::Questionmark(q) => q.to_token_stream(),
|
||||
Self::None => TokenStream::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct ValueWithAliasIdent {
|
||||
pub alias: Ident,
|
||||
pub eq: Token![=],
|
||||
pub marker: FormatMarker,
|
||||
pub expr: syn::Expr,
|
||||
}
|
||||
|
||||
impl Parse for ValueWithAliasIdent {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(Self {
|
||||
alias: input.parse()?,
|
||||
eq: input.parse()?,
|
||||
marker: input.parse()?,
|
||||
expr: input.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ValueWithAliasIdent {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let alias = &self.alias;
|
||||
let eq = &self.eq;
|
||||
let marker = &self.marker;
|
||||
let expr = &self.expr;
|
||||
tokens.extend(quote! {
|
||||
#alias #eq #marker #expr
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
|
||||
pub(crate) struct ValueWithFormatMarker {
|
||||
pub marker: FormatMarker,
|
||||
pub ident: Ident,
|
||||
pub dot: Option<Token![.]>,
|
||||
pub inner: Punctuated<syn::Member, Token![.]>,
|
||||
}
|
||||
|
||||
impl Parse for ValueWithFormatMarker {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let marker = input.parse::<FormatMarker>()?;
|
||||
let ident = input.parse::<syn::Ident>()?;
|
||||
|
||||
let mut inner = Punctuated::<syn::Member, Token![.]>::new();
|
||||
|
||||
let lookahead = input.lookahead1();
|
||||
let dot = if lookahead.peek(Token![.]) {
|
||||
let dot = Some(input.parse::<Token![.]>()?);
|
||||
|
||||
loop {
|
||||
let member = input.parse::<syn::Member>()?;
|
||||
inner.push_value(member);
|
||||
|
||||
let lookahead = input.lookahead1();
|
||||
if !lookahead.peek(Token![.]) {
|
||||
break;
|
||||
}
|
||||
|
||||
let token = input.parse::<Token![.]>()?;
|
||||
inner.push_punct(token);
|
||||
}
|
||||
|
||||
dot
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(Self { marker, ident, dot, inner })
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ValueWithFormatMarker {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let marker = &self.marker;
|
||||
let ident = &self.ident;
|
||||
let dot = &self.dot;
|
||||
let inner = &self.inner;
|
||||
tokens.extend(quote! {
|
||||
#marker #ident #dot #inner
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A value as passed to the macro, appearing _before_ the format string.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
|
||||
pub(crate) enum Value {
|
||||
Alias(ValueWithAliasIdent),
|
||||
Value(ValueWithFormatMarker),
|
||||
}
|
||||
|
||||
impl Value {
|
||||
pub fn as_ident(&self) -> &Ident {
|
||||
match self {
|
||||
Self::Alias(alias) => &alias.alias,
|
||||
Self::Value(value) => &value.ident,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Value {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if input.fork().parse::<ValueWithAliasIdent>().is_ok() {
|
||||
input.parse().map(Self::Alias)
|
||||
} else if input.fork().parse::<ValueWithFormatMarker>().is_ok() {
|
||||
input.parse().map(Self::Value)
|
||||
} else {
|
||||
Err(syn::Error::new(Span::call_site(), "Neither value nor aliased value."))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Value {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.extend(match self {
|
||||
Self::Alias(alias) => quote! { #alias },
|
||||
Self::Value(value) => quote! { #value },
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Defines the token stream consisting of a format string and it's arguments.
|
||||
///
|
||||
/// Attention: Currently the correctness of the arguments is not checked as part
|
||||
/// of the parsing logic.
|
||||
/// It would be possible to use `parse_fmt_str:2.0.0`
|
||||
/// to do so and possibly improve the error message here - for the time being
|
||||
/// it's not clear if this yields any practical benefits, and is hence
|
||||
/// left for future consideration.
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct FmtGroup {
|
||||
pub format_str: syn::LitStr,
|
||||
pub maybe_comma: Option<Token![,]>,
|
||||
pub rest: TokenStream,
|
||||
}
|
||||
|
||||
impl Parse for FmtGroup {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let format_str = input
|
||||
.parse()
|
||||
.map_err(|e| syn::Error::new(e.span(), "Expected format specifier"))?;
|
||||
|
||||
let (maybe_comma, rest) = if input.peek(Token![,]) {
|
||||
let comma = input.parse::<Token![,]>()?;
|
||||
let rest = input.parse()?;
|
||||
(Some(comma), rest)
|
||||
} else {
|
||||
(None, TokenStream::new())
|
||||
};
|
||||
|
||||
if !input.is_empty() {
|
||||
return Err(syn::Error::new(input.span(), "Unexpected data, expected closing `)`."));
|
||||
}
|
||||
|
||||
Ok(Self { format_str, maybe_comma, rest })
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for FmtGroup {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let format_str = &self.format_str;
|
||||
let maybe_comma = &self.maybe_comma;
|
||||
let rest = &self.rest;
|
||||
|
||||
tokens.extend(quote! { #format_str #maybe_comma #rest });
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct Freq {
|
||||
kw: kw::freq,
|
||||
colon: Token![:],
|
||||
pub expr: syn::Expr,
|
||||
}
|
||||
|
||||
impl Parse for Freq {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(Self { kw: input.parse()?, colon: input.parse()?, expr: input.parse()? })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct MaxRate {
|
||||
kw: kw::max_rate,
|
||||
colon: Token![:],
|
||||
pub expr: syn::Expr,
|
||||
}
|
||||
|
||||
impl Parse for MaxRate {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(Self { kw: input.parse()?, colon: input.parse()?, expr: input.parse()? })
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct ArgsIfFrequent {
|
||||
pub freq: Freq,
|
||||
pub max_rate: MaxRate,
|
||||
pub rest: TokenStream,
|
||||
}
|
||||
|
||||
impl Parse for ArgsIfFrequent {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let freq = input.parse()?;
|
||||
let _: Token![,] = input.parse()?;
|
||||
let max_rate = input.parse()?;
|
||||
let _: Token![,] = input.parse()?;
|
||||
let rest = input.parse()?;
|
||||
|
||||
Ok(Self { freq, max_rate, rest })
|
||||
}
|
||||
}
|
||||
|
||||
/// Full set of arguments as provided to the `gum::warn!` call.
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct Args {
|
||||
pub target: Option<Target>,
|
||||
pub comma: Option<Token![,]>,
|
||||
pub values: Punctuated<Value, Token![,]>,
|
||||
pub fmt: Option<FmtGroup>,
|
||||
}
|
||||
|
||||
impl Parse for Args {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let lookahead = input.lookahead1();
|
||||
let (target, comma) = if lookahead.peek(kw::target) {
|
||||
let target = input.parse()?;
|
||||
let comma = input.parse::<Token![,]>()?;
|
||||
(Some(target), Some(comma))
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
let mut values = Punctuated::new();
|
||||
loop {
|
||||
if input.fork().parse::<Value>().is_ok() {
|
||||
values.push_value(input.parse::<Value>()?);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
if input.peek(Token![,]) {
|
||||
values.push_punct(input.parse::<Token![,]>()?);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let fmt = if values.empty_or_trailing() && !input.is_empty() {
|
||||
let fmt = input.parse::<FmtGroup>()?;
|
||||
Some(fmt)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Self { target, comma, values, fmt })
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Args {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let target = &self.target;
|
||||
let comma = &self.comma;
|
||||
let values = &self.values;
|
||||
let fmt = &self.fmt;
|
||||
tokens.extend(quote! {
|
||||
#target #comma #values #fmt
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Support tracing levels, passed to `tracing::event!`
|
||||
///
|
||||
/// Note: Not parsed from the input stream, but implicitly defined
|
||||
/// by the macro name, i.e. `level::debug!` is `Level::Debug`.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) enum Level {
|
||||
Error,
|
||||
Warn,
|
||||
Info,
|
||||
Debug,
|
||||
Trace,
|
||||
}
|
||||
|
||||
impl ToTokens for Level {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let span = Span::call_site();
|
||||
let variant = match self {
|
||||
Self::Error => Ident::new("ERROR", span),
|
||||
Self::Warn => Ident::new("WARN", span),
|
||||
Self::Info => Ident::new("INFO", span),
|
||||
Self::Debug => Ident::new("DEBUG", span),
|
||||
Self::Trace => Ident::new("TRACE", span),
|
||||
};
|
||||
let krate = support_crate();
|
||||
tokens.extend(quote! {
|
||||
#krate :: Level :: #variant
|
||||
})
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user