diff --git a/serde/src/de/mod.rs b/serde/src/de/mod.rs index e90ced27..a590f4c6 100644 --- a/serde/src/de/mod.rs +++ b/serde/src/de/mod.rs @@ -1011,6 +1011,12 @@ pub trait Deserializer<'de>: Sized { fn deserialize_ignored_any(self, visitor: V) -> Result where V: Visitor<'de>; + + /// Returns whether the serialized data is human readable or not. + /// + /// Some formats are not intended to be human readable. For these formats + /// a type being serialized may opt to serialize into a more compact form. + fn is_human_readable(&self) -> bool { true } } //////////////////////////////////////////////////////////////////////////////// diff --git a/serde/src/ser/mod.rs b/serde/src/ser/mod.rs index 4356f930..fd7daf05 100644 --- a/serde/src/ser/mod.rs +++ b/serde/src/ser/mod.rs @@ -1363,6 +1363,12 @@ pub trait Serializer: Sized { fn collect_str(self, value: &T) -> Result where T: Display; + + /// Returns wheter the data format is human readable or not. + /// + /// Some formats are not intended to be human readable. For these formats + /// a type being serialized may opt to serialize into a more compact form. + fn is_human_readable(&self) -> bool { true } } /// Returned from `Serializer::serialize_seq`. diff --git a/serde_test/src/assert.rs b/serde_test/src/assert.rs index 2af2ebf5..31eb622f 100644 --- a/serde_test/src/assert.rs +++ b/serde_test/src/assert.rs @@ -84,7 +84,17 @@ pub fn assert_ser_tokens(value: &T, tokens: &[Token]) where T: Serialize, { - let mut ser = Serializer::new(tokens); + assert_ser_tokens_readable(value, tokens, true) +} + +/// Asserts that `value` serializes to the given `tokens`. +/// +/// See: `assert_ser_tokens` +pub fn assert_ser_tokens_readable(value: &T, tokens: &[Token], human_readable: bool) +where + T: Serialize, +{ + let mut ser = Serializer::readable(tokens, human_readable); match value.serialize(&mut ser) { Ok(_) => {} Err(err) => panic!("value failed to serialize: {}", err), @@ -183,7 +193,14 @@ pub fn assert_de_tokens<'de, T>(value: &T, tokens: &'de [Token]) where T: Deserialize<'de> + PartialEq + Debug, { - let mut de = Deserializer::new(tokens); + assert_de_tokens_readable(value, tokens, true) +} + +pub fn assert_de_tokens_readable<'de, T>(value: &T, tokens: &'de [Token], human_readable: bool) +where + T: Deserialize<'de> + PartialEq + Debug, +{ + let mut de = Deserializer::readable(tokens, human_readable); match T::deserialize(&mut de) { Ok(v) => assert_eq!(v, *value), Err(e) => panic!("tokens failed to deserialize: {}", e), diff --git a/serde_test/src/de.rs b/serde_test/src/de.rs index 9e465a73..4b0bc8ab 100644 --- a/serde_test/src/de.rs +++ b/serde_test/src/de.rs @@ -16,6 +16,7 @@ use token::Token; #[derive(Debug)] pub struct Deserializer<'de> { tokens: &'de [Token], + is_human_readable: bool, } macro_rules! assert_next_token { @@ -48,7 +49,11 @@ macro_rules! end_of_tokens { impl<'de> Deserializer<'de> { pub fn new(tokens: &'de [Token]) -> Self { - Deserializer { tokens: tokens } + Deserializer::readable(tokens, true) + } + + pub fn readable(tokens: &'de [Token], is_human_readable: bool) -> Self { + Deserializer { tokens: tokens, is_human_readable: is_human_readable } } fn peek_token_opt(&self) -> Option { @@ -364,6 +369,10 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> { _ => self.deserialize_any(visitor), } } + + fn is_human_readable(&self) -> bool { + self.is_human_readable + } } ////////////////////////////////////////////////////////////////////////// diff --git a/serde_test/src/lib.rs b/serde_test/src/lib.rs index 391837d1..0ccf578a 100644 --- a/serde_test/src/lib.rs +++ b/serde_test/src/lib.rs @@ -168,8 +168,8 @@ mod token; mod assert; pub use token::Token; -pub use assert::{assert_tokens, assert_ser_tokens, assert_ser_tokens_error, assert_de_tokens, - assert_de_tokens_error}; +pub use assert::{assert_tokens, assert_ser_tokens, assert_ser_tokens_error, assert_ser_tokens_readable, + assert_de_tokens, assert_de_tokens_error, assert_de_tokens_readable}; // Not public API. #[doc(hidden)] diff --git a/serde_test/src/ser.rs b/serde_test/src/ser.rs index 31ba72ee..c7d64d29 100644 --- a/serde_test/src/ser.rs +++ b/serde_test/src/ser.rs @@ -15,12 +15,17 @@ use token::Token; #[derive(Debug)] pub struct Serializer<'a> { tokens: &'a [Token], + is_human_readable: bool, } impl<'a> Serializer<'a> { /// Creates the serializer. pub fn new(tokens: &'a [Token]) -> Self { - Serializer { tokens: tokens } + Serializer::readable(tokens, true) + } + + pub fn readable(tokens: &'a [Token], is_human_readable: bool) -> Self { + Serializer { tokens: tokens, is_human_readable: is_human_readable } } /// Pulls the next token off of the serializer, ignoring it. @@ -282,6 +287,10 @@ impl<'s, 'a> ser::Serializer for &'s mut Serializer<'a> { Ok(Variant { ser: self, end: Token::StructVariantEnd }) } } + + fn is_human_readable(&self) -> bool { + self.is_human_readable + } } pub struct Variant<'s, 'a: 's> { diff --git a/test_suite/tests/test_de.rs b/test_suite/tests/test_de.rs index 9fbfed46..e1a015e2 100644 --- a/test_suite/tests/test_de.rs +++ b/test_suite/tests/test_de.rs @@ -28,7 +28,7 @@ extern crate fnv; use self::fnv::FnvHasher; extern crate serde_test; -use self::serde_test::{Token, assert_de_tokens, assert_de_tokens_error}; +use self::serde_test::{Token, assert_de_tokens, assert_de_tokens_error, assert_de_tokens_readable}; #[macro_use] mod macros; @@ -1078,3 +1078,39 @@ declare_error_tests! { "invalid type: sequence, expected unit struct UnitStruct", } } + +#[derive(Debug, PartialEq)] +struct CompactBinary((u8, u8)); + +impl<'de> serde::Deserialize<'de> for CompactBinary { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + if deserializer.is_human_readable() { + <(u8, u8)>::deserialize(deserializer).map(CompactBinary) + } else { + <&[u8]>::deserialize(deserializer).map(|bytes| { + CompactBinary((bytes[0], bytes[1])) + }) + } + } +} + +#[test] +fn test_human_readable() { + assert_de_tokens( + &CompactBinary((1, 2)), + &[ + Token::Tuple { len: 2}, + Token::U8(1), + Token::U8(2), + Token::TupleEnd, + ], + ); + assert_de_tokens_readable( + &CompactBinary((1, 2)), + &[Token::BorrowedBytes(&[1, 2])], + false, + ); +} diff --git a/test_suite/tests/test_ser.rs b/test_suite/tests/test_ser.rs index 9dfb7053..a623e5d6 100644 --- a/test_suite/tests/test_ser.rs +++ b/test_suite/tests/test_ser.rs @@ -23,7 +23,8 @@ use std::str; extern crate serde; extern crate serde_test; -use self::serde_test::{Token, assert_ser_tokens, assert_ser_tokens_error}; +use self::serde_test::{Token, assert_ser_tokens, assert_ser_tokens_error, + assert_ser_tokens_readable}; extern crate fnv; use self::fnv::FnvHasher; @@ -474,3 +475,26 @@ fn test_enum_skipped() { "the enum variant Enum::SkippedMap cannot be serialized", ); } + +struct CompactBinary(String); + +impl serde::Serialize for CompactBinary { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer + { + if serializer.is_human_readable() { + serializer.serialize_str(&self.0) + } else { + serializer.serialize_bytes(self.0.as_bytes()) + } + } +} + +#[test] +fn test_human_readable() { + let value = CompactBinary("test".to_string()); + assert_ser_tokens(&value, &[Token::String("test")]); + + assert_ser_tokens_readable(&value, &[Token::Bytes(b"test")], false); +}