Merge pull request #204 from JohnHeitmann/master

Implemented disallow_unknown
This commit is contained in:
Erick Tryzelaar
2016-01-16 13:32:20 -08:00
8 changed files with 366 additions and 10 deletions
+67 -1
View File
@@ -1,6 +1,13 @@
use std::default;
use token::{Token, assert_tokens, assert_ser_tokens, assert_de_tokens};
use token::{
Error,
Token,
assert_tokens,
assert_ser_tokens,
assert_de_tokens,
assert_de_tokens_error
};
#[derive(Debug, PartialEq, Serialize, Deserialize)]
struct Default {
@@ -9,6 +16,12 @@ struct Default {
a2: i32,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
struct DisallowUnknown {
a1: i32,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
struct Rename {
a1: i32,
@@ -86,6 +99,59 @@ fn test_default() {
);
}
#[test]
fn test_ignore_unknown() {
// 'Default' allows unknown. Basic smoke test of ignore...
assert_de_tokens(
&Default { a1: 1, a2: 2},
vec![
Token::StructStart("Default", Some(5)),
Token::MapSep,
Token::Str("whoops1"),
Token::I32(2),
Token::MapSep,
Token::Str("a1"),
Token::I32(1),
Token::MapSep,
Token::Str("whoops2"),
Token::SeqStart(Some(1)),
Token::SeqSep,
Token::I32(2),
Token::SeqEnd,
Token::MapSep,
Token::Str("a2"),
Token::I32(2),
Token::MapSep,
Token::Str("whoops3"),
Token::I32(2),
Token::MapEnd,
]
);
assert_de_tokens_error::<DisallowUnknown>(
vec![
Token::StructStart("DisallowUnknown", Some(2)),
Token::MapSep,
Token::Str("a1"),
Token::I32(1),
Token::MapSep,
Token::Str("whoops"),
Token::I32(2),
Token::MapEnd,
],
Error::UnknownFieldError("whoops".to_owned())
);
}
#[test]
fn test_rename() {
assert_tokens(
+5 -1
View File
@@ -7,7 +7,7 @@ use num::rational::Ratio;
use serde::de::{Deserializer, Visitor};
use token::{Token, assert_de_tokens};
use token::{Error, Token, assert_de_tokens, assert_de_tokens_ignore};
//////////////////////////////////////////////////////////////////////////
@@ -39,7 +39,11 @@ macro_rules! declare_test {
#[test]
fn $name() {
$(
// Test ser/de roundtripping
assert_de_tokens(&$value, $tokens);
// Test that the tokens are ignorable
assert_de_tokens_ignore($tokens);
)+
}
}
+53 -2
View File
@@ -310,7 +310,7 @@ impl<'a, I> ser::Serializer for Serializer<I>
//////////////////////////////////////////////////////////////////////////////
#[derive(Clone, PartialEq, Debug)]
enum Error {
pub enum Error {
SyntaxError,
EndOfStreamError,
UnknownFieldError(String),
@@ -644,7 +644,7 @@ impl<'a, I> de::MapVisitor for DeserializerMapVisitor<'a, I>
match self.de.tokens.peek() {
Some(&Token::MapSep) => {
self.de.tokens.next();
self.len = self.len.map(|len| len - 1);
self.len = self.len.map(|len| if len > 0 { len - 1} else { 0 });
Ok(Some(try!(de::Deserialize::deserialize(self.de))))
}
Some(&Token::MapEnd) => Ok(None),
@@ -799,6 +799,57 @@ pub fn assert_de_tokens<T>(value: &T, tokens: Vec<Token<'static>>)
assert_eq!(de.tokens.next(), None);
}
// Expect an error deserializing tokens into a T
pub fn assert_de_tokens_error<T>(tokens: Vec<Token<'static>>, error: Error)
where T: de::Deserialize + PartialEq + fmt::Debug,
{
let mut de = Deserializer::new(tokens.into_iter());
let v: Result<T, Error> = de::Deserialize::deserialize(&mut de);
assert_eq!(v.as_ref(), Err(&error));
}
// Tests that the given token stream is ignorable when embedded in
// an otherwise normal struct
pub fn assert_de_tokens_ignore(ignorable_tokens: Vec<Token<'static>>) {
#[derive(PartialEq, Debug, Deserialize)]
struct IgnoreBase {
a: i32,
}
let expected = IgnoreBase{a: 1};
// Embed the tokens to be ignored in the normal token
// stream for an IgnoreBase type
let concated_tokens : Vec<Token<'static>> = vec![
Token::MapStart(Some(2)),
Token::MapSep,
Token::Str("a"),
Token::I32(1),
Token::MapSep,
Token::Str("ignored")
]
.into_iter()
.chain(ignorable_tokens.into_iter())
.chain(vec![
Token::MapEnd,
].into_iter())
.collect();
let mut de = Deserializer::new(concated_tokens.into_iter());
let v: Result<IgnoreBase, Error> = de::Deserialize::deserialize(&mut de);
// We run this test on every token stream for convenience, but
// some token streams don't make sense embedded as a map value,
// so we ignore those. SyntaxError is the real sign of trouble.
if let Err(Error::UnexpectedToken(_)) = v {
return;
}
assert_eq!(v.as_ref(), Ok(&expected));
assert_eq!(de.tokens.next(), None);
}
pub fn assert_tokens<T>(value: &T, tokens: Vec<Token<'static>>)
where T: ser::Serialize + de::Deserialize + PartialEq + fmt::Debug,
{