Compare commits

...

34 Commits

Author SHA1 Message Date
David Tolnay 4de20bd48d Release 1.0.59 2018-05-21 03:51:32 -07:00
David Tolnay 922fadf7e3 Merge pull request #1270 from serde-rs/transparent
Transparent attribute to specify that representation is the same as its only field
2018-05-20 22:17:07 -07:00
David Tolnay 6bbc415fdf Resolve conflicts between transparent and proc-macro2 upgrade 2018-05-20 21:57:23 -07:00
David Tolnay b13875dd97 Add compile-fail tests for transparent error messages 2018-05-20 21:55:50 -07:00
David Tolnay ac1b25e91d Improve error messages related to transparent 2018-05-20 21:55:48 -07:00
David Tolnay 1335f85213 Test transparent attribute 2018-05-20 21:55:21 -07:00
David Tolnay 0a4d536253 Implement transparent deserialize 2018-05-20 21:55:20 -07:00
David Tolnay 7dba1e303d Implement transparent serialize 2018-05-20 21:55:19 -07:00
David Tolnay 0ea9d73fdf Validate use of serde(transparent) 2018-05-20 21:55:18 -07:00
David Tolnay a64aaeeb3b Parse serde(transparent) container attribute 2018-05-20 21:55:16 -07:00
David Tolnay 320897679b Merge pull request #1273 from serde-rs/up
Update to proc-macro2 0.4
2018-05-20 21:54:07 -07:00
David Tolnay 3d5141a2f1 Update to proc-macro2 0.4 2018-05-20 20:55:14 -07:00
David Tolnay 656ea96c65 Remove reminders about flatten in a sequence 2018-05-20 12:42:40 -07:00
David Tolnay 5302482596 Simplify deserialize_seq_in_place 2018-05-20 12:40:35 -07:00
David Tolnay 7ada27014d Track field index in internal AST 2018-05-20 12:40:28 -07:00
David Tolnay 4fa2a50f62 Format with rustfmt 0.7.0 2018-05-19 17:33:30 -07:00
David Tolnay 0c5f20c148 Release 1.0.58 2018-05-19 17:30:39 -07:00
David Tolnay aa2bbb4704 Merge pull request #1269 from serde-rs/with
Fix generated code for deserializing untagged newtype variant
2018-05-19 17:30:30 -07:00
David Tolnay 16d1265e17 Fix generated code for deserializing untagged newtype variant 2018-05-19 17:20:14 -07:00
David Tolnay f09320b293 Remove unused methods on FromPrimitive trait 2018-05-19 16:29:25 -07:00
David Tolnay 3b4803115b Release 1.0.57 2018-05-18 23:31:33 -07:00
David Tolnay fa5f0f4541 Remove EnumSet from documentation
These impls were removed in Serde 0.9.6.
2018-05-18 21:26:23 -07:00
David Tolnay 4b7f55bd42 Merge pull request #1265 from serde-rs/nonzero
Remove impls for NonZero<T>
2018-05-18 21:16:03 -07:00
David Tolnay 593bcb087d Remove impls for NonZero<T> 2018-05-18 21:06:14 -07:00
David Tolnay f58000cb41 Release 1.0.56 2018-05-18 12:37:06 -07:00
David Tolnay 01b86d5ce4 Merge pull request #1259 from serde-rs/build
Build script that does nothing
2018-05-18 12:35:13 -07:00
David Tolnay c80f9238d7 Link to i128 announcement 2018-05-18 12:34:36 -07:00
David Tolnay 62850bf832 Disable nightly proc-macro build 2018-05-18 12:24:03 -07:00
David Tolnay 9f114548f4 Revert "Use version_check crate instead of handcrafted version parsing"
This reverts commit 8890061f82.
2018-05-18 11:48:05 -07:00
Oliver Schneider 8890061f82 Use version_check crate instead of handcrafted version parsing 2018-05-18 14:41:40 +02:00
David Tolnay 2c05518810 Build script that does nothing
Eventually we will want a build script that enables Serde impls for i128
and u128. As a first step here is a build script that does nothing to
see whether we can roll this out without breaking anyone's workflow,
without having a supported feature at stake in the event that it needs
to be rolled back.
2018-05-15 14:41:38 -07:00
David Tolnay 4aeb0df88f Add a button to clarify any other type of issue is welcome 2018-05-12 11:27:14 -07:00
David Tolnay 6550231a51 Release 1.0.55 2018-05-12 09:47:43 -07:00
David Tolnay ea0012fc5a Support deserializing bytes as the flattened identifier 2018-05-12 09:44:04 -07:00
33 changed files with 650 additions and 425 deletions
+7
View File
@@ -0,0 +1,7 @@
---
name: Anything else!
about: Whatever is on your mind
---
+3 -2
View File
@@ -1,6 +1,6 @@
[package] [package]
name = "serde" name = "serde"
version = "1.0.54" # remember to update html_root_url version = "1.0.59" # remember to update html_root_url
authors = ["Erick Tryzelaar <erick.tryzelaar@gmail.com>", "David Tolnay <dtolnay@gmail.com>"] authors = ["Erick Tryzelaar <erick.tryzelaar@gmail.com>", "David Tolnay <dtolnay@gmail.com>"]
license = "MIT/Apache-2.0" license = "MIT/Apache-2.0"
description = "A generic serialization/deserialization framework" description = "A generic serialization/deserialization framework"
@@ -10,7 +10,8 @@ documentation = "https://docs.serde.rs/serde/"
keywords = ["serde", "serialization", "no_std"] keywords = ["serde", "serialization", "no_std"]
categories = ["encoding"] categories = ["encoding"]
readme = "README.md" readme = "README.md"
include = ["Cargo.toml", "src/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"] include = ["Cargo.toml", "build.rs", "src/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
build = "build.rs"
[badges] [badges]
travis-ci = { repository = "serde-rs/serde" } travis-ci = { repository = "serde-rs/serde" }
+41
View File
@@ -0,0 +1,41 @@
use std::env;
use std::process::Command;
use std::str::{self, FromStr};
fn main() {
let rustc = match env::var_os("RUSTC") {
Some(rustc) => rustc,
None => return,
};
let output = match Command::new(rustc).arg("--version").output() {
Ok(output) => output,
Err(_) => return,
};
let version = match str::from_utf8(&output.stdout) {
Ok(version) => version,
Err(_) => return,
};
let mut pieces = version.split('.');
if pieces.next() != Some("rustc 1") {
return;
}
let next = match pieces.next() {
Some(next) => next,
None => return,
};
let minor = match u32::from_str(next) {
Ok(minor) => minor,
Err(_) => return,
};
// 128-bit integers stabilized in Rust 1.26:
// https://blog.rust-lang.org/2018/05/10/Rust-1.26.html
if minor >= 26 {
println!("cargo:rustc-cfg=integer128");
}
}
-26
View File
@@ -39,12 +39,10 @@ macro_rules! uint_to {
} }
pub trait FromPrimitive: Sized { pub trait FromPrimitive: Sized {
fn from_isize(n: isize) -> Option<Self>;
fn from_i8(n: i8) -> Option<Self>; fn from_i8(n: i8) -> Option<Self>;
fn from_i16(n: i16) -> Option<Self>; fn from_i16(n: i16) -> Option<Self>;
fn from_i32(n: i32) -> Option<Self>; fn from_i32(n: i32) -> Option<Self>;
fn from_i64(n: i64) -> Option<Self>; fn from_i64(n: i64) -> Option<Self>;
fn from_usize(n: usize) -> Option<Self>;
fn from_u8(n: u8) -> Option<Self>; fn from_u8(n: u8) -> Option<Self>;
fn from_u16(n: u16) -> Option<Self>; fn from_u16(n: u16) -> Option<Self>;
fn from_u32(n: u32) -> Option<Self>; fn from_u32(n: u32) -> Option<Self>;
@@ -54,10 +52,6 @@ pub trait FromPrimitive: Sized {
macro_rules! impl_from_primitive_for_int { macro_rules! impl_from_primitive_for_int {
($t:ident) => { ($t:ident) => {
impl FromPrimitive for $t { impl FromPrimitive for $t {
#[inline]
fn from_isize(n: isize) -> Option<Self> {
int_to_int!($t, n)
}
#[inline] #[inline]
fn from_i8(n: i8) -> Option<Self> { fn from_i8(n: i8) -> Option<Self> {
int_to_int!($t, n) int_to_int!($t, n)
@@ -75,10 +69,6 @@ macro_rules! impl_from_primitive_for_int {
int_to_int!($t, n) int_to_int!($t, n)
} }
#[inline] #[inline]
fn from_usize(n: usize) -> Option<Self> {
uint_to!($t, n)
}
#[inline]
fn from_u8(n: u8) -> Option<Self> { fn from_u8(n: u8) -> Option<Self> {
uint_to!($t, n) uint_to!($t, n)
} }
@@ -101,10 +91,6 @@ macro_rules! impl_from_primitive_for_int {
macro_rules! impl_from_primitive_for_uint { macro_rules! impl_from_primitive_for_uint {
($t:ident) => { ($t:ident) => {
impl FromPrimitive for $t { impl FromPrimitive for $t {
#[inline]
fn from_isize(n: isize) -> Option<Self> {
int_to_uint!($t, n)
}
#[inline] #[inline]
fn from_i8(n: i8) -> Option<Self> { fn from_i8(n: i8) -> Option<Self> {
int_to_uint!($t, n) int_to_uint!($t, n)
@@ -122,10 +108,6 @@ macro_rules! impl_from_primitive_for_uint {
int_to_uint!($t, n) int_to_uint!($t, n)
} }
#[inline] #[inline]
fn from_usize(n: usize) -> Option<Self> {
uint_to!($t, n)
}
#[inline]
fn from_u8(n: u8) -> Option<Self> { fn from_u8(n: u8) -> Option<Self> {
uint_to!($t, n) uint_to!($t, n)
} }
@@ -148,10 +130,6 @@ macro_rules! impl_from_primitive_for_uint {
macro_rules! impl_from_primitive_for_float { macro_rules! impl_from_primitive_for_float {
($t:ident) => { ($t:ident) => {
impl FromPrimitive for $t { impl FromPrimitive for $t {
#[inline]
fn from_isize(n: isize) -> Option<Self> {
Some(n as Self)
}
#[inline] #[inline]
fn from_i8(n: i8) -> Option<Self> { fn from_i8(n: i8) -> Option<Self> {
Some(n as Self) Some(n as Self)
@@ -169,10 +147,6 @@ macro_rules! impl_from_primitive_for_float {
Some(n as Self) Some(n as Self)
} }
#[inline] #[inline]
fn from_usize(n: usize) -> Option<Self> {
Some(n as Self)
}
#[inline]
fn from_u8(n: u8) -> Option<Self> { fn from_u8(n: u8) -> Option<Self> {
Some(n as Self) Some(n as Self)
} }
+2 -19
View File
@@ -1411,7 +1411,8 @@ impl<'de> Visitor<'de> for OsStringVisitor {
use std::os::windows::ffi::OsStringExt; use std::os::windows::ffi::OsStringExt;
match try!(data.variant()) { match try!(data.variant()) {
(OsStringKind::Windows, v) => v.newtype_variant::<Vec<u16>>() (OsStringKind::Windows, v) => v
.newtype_variant::<Vec<u16>>()
.map(|vec| OsString::from_wide(&vec)), .map(|vec| OsString::from_wide(&vec)),
(OsStringKind::Unix, _) => Err(Error::custom( (OsStringKind::Unix, _) => Err(Error::custom(
"cannot deserialize Unix OS string on Windows", "cannot deserialize Unix OS string on Windows",
@@ -2005,24 +2006,6 @@ where
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
#[cfg(feature = "unstable")]
#[allow(deprecated)]
impl<'de, T> Deserialize<'de> for NonZero<T>
where
T: Deserialize<'de> + Zeroable,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let value = try!(Deserialize::deserialize(deserializer));
match NonZero::new(value) {
Some(nonzero) => Ok(nonzero),
None => Err(Error::custom("expected a non-zero value")),
}
}
}
macro_rules! nonzero_integers { macro_rules! nonzero_integers {
( $( $T: ty, )+ ) => { ( $( $T: ty, )+ ) => {
$( $(
-2
View File
@@ -84,7 +84,6 @@
//! - LinkedList\<T\> //! - LinkedList\<T\>
//! - VecDeque\<T\> //! - VecDeque\<T\>
//! - Vec\<T\> //! - Vec\<T\>
//! - EnumSet\<T\> (unstable)
//! - **Zero-copy types**: //! - **Zero-copy types**:
//! - &str //! - &str
//! - &[u8] //! - &[u8]
@@ -98,7 +97,6 @@
//! - Path //! - Path
//! - PathBuf //! - PathBuf
//! - Range\<T\> //! - Range\<T\>
//! - NonZero\<T\> (unstable, deprecated)
//! - num::NonZero* (unstable) //! - num::NonZero* (unstable)
//! - **Net types**: //! - **Net types**:
//! - IpAddr //! - IpAddr
+2 -6
View File
@@ -79,14 +79,14 @@
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
// Serde types in rustdoc of other crates get linked to here. // Serde types in rustdoc of other crates get linked to here.
#![doc(html_root_url = "https://docs.rs/serde/1.0.54")] #![doc(html_root_url = "https://docs.rs/serde/1.0.59")]
// Support using Serde without the standard library! // Support using Serde without the standard library!
#![cfg_attr(not(feature = "std"), no_std)] #![cfg_attr(not(feature = "std"), no_std)]
// Unstable functionality only if the user asks for it. For tracking and // Unstable functionality only if the user asks for it. For tracking and
// discussion of these features please refer to this issue: // discussion of these features please refer to this issue:
// //
// https://github.com/serde-rs/serde/issues/812 // https://github.com/serde-rs/serde/issues/812
#![cfg_attr(feature = "unstable", feature(nonzero, specialization))] #![cfg_attr(feature = "unstable", feature(specialization))]
#![cfg_attr(feature = "alloc", feature(alloc))] #![cfg_attr(feature = "alloc", feature(alloc))]
#![cfg_attr(feature = "cargo-clippy", deny(clippy, clippy_pedantic))] #![cfg_attr(feature = "cargo-clippy", deny(clippy, clippy_pedantic))]
// Whitelisted clippy lints // Whitelisted clippy lints
@@ -213,10 +213,6 @@ mod lib {
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub use std::time::{Duration, SystemTime, UNIX_EPOCH}; pub use std::time::{Duration, SystemTime, UNIX_EPOCH};
#[cfg(feature = "unstable")]
#[allow(deprecated)]
pub use core::nonzero::{NonZero, Zeroable};
#[cfg(feature = "unstable")] #[cfg(feature = "unstable")]
pub use core::num::{NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize}; pub use core::num::{NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize};
} }
+15 -9
View File
@@ -276,6 +276,8 @@ mod content {
match *self { match *self {
Content::Str(x) => Some(x), Content::Str(x) => Some(x),
Content::String(ref x) => Some(x), Content::String(ref x) => Some(x),
Content::Bytes(x) => str::from_utf8(x).ok(),
Content::ByteBuf(ref x) => str::from_utf8(x).ok(),
_ => None, _ => None,
} }
} }
@@ -1423,6 +1425,8 @@ mod content {
match self.content { match self.content {
Content::String(v) => visitor.visit_string(v), Content::String(v) => visitor.visit_string(v),
Content::Str(v) => visitor.visit_borrowed_str(v), Content::Str(v) => visitor.visit_borrowed_str(v),
Content::ByteBuf(v) => visitor.visit_byte_buf(v),
Content::Bytes(v) => visitor.visit_borrowed_bytes(v),
_ => Err(self.invalid_type(&visitor)), _ => Err(self.invalid_type(&visitor)),
} }
} }
@@ -2123,6 +2127,8 @@ mod content {
match *self.content { match *self.content {
Content::String(ref v) => visitor.visit_str(v), Content::String(ref v) => visitor.visit_str(v),
Content::Str(v) => visitor.visit_borrowed_str(v), Content::Str(v) => visitor.visit_borrowed_str(v),
Content::ByteBuf(ref v) => visitor.visit_bytes(v),
Content::Bytes(v) => visitor.visit_borrowed_bytes(v),
_ => Err(self.invalid_type(&visitor)), _ => Err(self.invalid_type(&visitor)),
} }
} }
@@ -2317,7 +2323,8 @@ mod content {
T: de::DeserializeSeed<'de>, T: de::DeserializeSeed<'de>,
{ {
match self.iter.next() { match self.iter.next() {
Some(value) => seed.deserialize(ContentRefDeserializer::new(value)) Some(value) => seed
.deserialize(ContentRefDeserializer::new(value))
.map(Some), .map(Some),
None => Ok(None), None => Ok(None),
} }
@@ -2815,14 +2822,13 @@ where
// about. In case we do not know which fields we want, we take them all. // about. In case we do not know which fields we want, we take them all.
let use_item = match *item { let use_item = match *item {
None => false, None => false,
Some((ref c, _)) => { Some((ref c, _)) => c.as_str().map_or(self.fields.is_none(), |key| {
c.as_str() match self.fields {
.map_or(self.fields.is_none(), |key| match self.fields { None => true,
None => true, Some(fields) if fields.contains(&key) => true,
Some(fields) if fields.contains(&key) => true, _ => false,
_ => false, }
}) }),
}
}; };
if use_item { if use_item {
+2 -1
View File
@@ -947,7 +947,8 @@ mod content {
where where
T: Serialize, T: Serialize,
{ {
let key = self.key let key = self
.key
.take() .take()
.expect("serialize_value called before serialize_key"); .expect("serialize_value called before serialize_key");
let value = try!(value.serialize(ContentSerializer::<E>::new())); let value = try!(value.serialize(ContentSerializer::<E>::new()));
+2 -15
View File
@@ -408,20 +408,6 @@ where
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
#[cfg(feature = "unstable")]
#[allow(deprecated)]
impl<T> Serialize for NonZero<T>
where
T: Serialize + Zeroable + Clone,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.clone().get().serialize(serializer)
}
}
macro_rules! nonzero_integers { macro_rules! nonzero_integers {
( $( $T: ident, )+ ) => { ( $( $T: ident, )+ ) => {
$( $(
@@ -549,7 +535,8 @@ impl Serialize for SystemTime {
S: Serializer, S: Serializer,
{ {
use super::SerializeStruct; use super::SerializeStruct;
let duration_since_epoch = self.duration_since(UNIX_EPOCH) let duration_since_epoch = self
.duration_since(UNIX_EPOCH)
.expect("SystemTime must be later than UNIX_EPOCH"); .expect("SystemTime must be later than UNIX_EPOCH");
let mut state = try!(serializer.serialize_struct("SystemTime", 2)); let mut state = try!(serializer.serialize_struct("SystemTime", 2));
try!(state.serialize_field("secs_since_epoch", &duration_since_epoch.as_secs())); try!(state.serialize_field("secs_since_epoch", &duration_since_epoch.as_secs()));
-2
View File
@@ -81,7 +81,6 @@
//! - LinkedList\<T\> //! - LinkedList\<T\>
//! - VecDeque\<T\> //! - VecDeque\<T\>
//! - Vec\<T\> //! - Vec\<T\>
//! - EnumSet\<T\> (unstable)
//! - **FFI types**: //! - **FFI types**:
//! - CStr //! - CStr
//! - CString //! - CString
@@ -93,7 +92,6 @@
//! - Path //! - Path
//! - PathBuf //! - PathBuf
//! - Range\<T\> //! - Range\<T\>
//! - NonZero\<T\> (unstable, deprecated)
//! - num::NonZero* (unstable) //! - num::NonZero* (unstable)
//! - **Net types**: //! - **Net types**:
//! - IpAddr //! - IpAddr
+4 -4
View File
@@ -1,6 +1,6 @@
[package] [package]
name = "serde_derive" name = "serde_derive"
version = "1.0.54" # remember to update html_root_url version = "1.0.59" # remember to update html_root_url
authors = ["Erick Tryzelaar <erick.tryzelaar@gmail.com>", "David Tolnay <dtolnay@gmail.com>"] authors = ["Erick Tryzelaar <erick.tryzelaar@gmail.com>", "David Tolnay <dtolnay@gmail.com>"]
license = "MIT/Apache-2.0" license = "MIT/Apache-2.0"
description = "Macros 1.1 implementation of #[derive(Serialize, Deserialize)]" description = "Macros 1.1 implementation of #[derive(Serialize, Deserialize)]"
@@ -23,9 +23,9 @@ name = "serde_derive"
proc-macro = true proc-macro = true
[dependencies] [dependencies]
proc-macro2 = "0.3" proc-macro2 = "0.4"
quote = "0.5.2" quote = "0.6"
syn = { version = "0.13", features = ["visit"] } syn = { version = "0.14", features = ["visit"] }
[dev-dependencies] [dev-dependencies]
serde = { version = "1.0", path = "../serde" } serde = { version = "1.0", path = "../serde" }
+15 -13
View File
@@ -55,7 +55,8 @@ pub fn with_where_predicates_from_fields(
generics: &syn::Generics, generics: &syn::Generics,
from_field: fn(&attr::Field) -> Option<&[syn::WherePredicate]>, from_field: fn(&attr::Field) -> Option<&[syn::WherePredicate]>,
) -> syn::Generics { ) -> syn::Generics {
let predicates = cont.data let predicates = cont
.data
.all_fields() .all_fields()
.flat_map(|field| from_field(&field.attrs)) .flat_map(|field| from_field(&field.attrs))
.flat_map(|predicates| predicates.to_vec()); .flat_map(|predicates| predicates.to_vec());
@@ -139,9 +140,9 @@ pub fn with_bound(
} }
} }
if path.leading_colon.is_none() && path.segments.len() == 1 { if path.leading_colon.is_none() && path.segments.len() == 1 {
let id = path.segments[0].ident; let id = &path.segments[0].ident;
if self.all_type_params.contains(&id) { if self.all_type_params.contains(id) {
self.relevant_type_params.insert(id); self.relevant_type_params.insert(id.clone());
} }
} }
visit::visit_path(self, path); visit::visit_path(self, path);
@@ -156,7 +157,7 @@ pub fn with_bound(
fn visit_macro(&mut self, _mac: &'ast syn::Macro) {} fn visit_macro(&mut self, _mac: &'ast syn::Macro) {}
} }
let all_type_params = generics.type_params().map(|param| param.ident).collect(); let all_type_params = generics.type_params().map(|param| param.ident.clone()).collect();
let mut visitor = FindTyParams { let mut visitor = FindTyParams {
all_type_params: all_type_params, all_type_params: all_type_params,
@@ -184,7 +185,7 @@ pub fn with_bound(
let associated_type_usage = visitor.associated_type_usage; let associated_type_usage = visitor.associated_type_usage;
let new_predicates = generics let new_predicates = generics
.type_params() .type_params()
.map(|param| param.ident) .map(|param| param.ident.clone())
.filter(|id| relevant_type_params.contains(id)) .filter(|id| relevant_type_params.contains(id))
.map(|id| syn::TypePath { .map(|id| syn::TypePath {
qself: None, qself: None,
@@ -246,7 +247,7 @@ pub fn with_lifetime_bound(generics: &syn::Generics, lifetime: &str) -> syn::Gen
let bound = syn::Lifetime::new(lifetime, Span::call_site()); let bound = syn::Lifetime::new(lifetime, Span::call_site());
let def = syn::LifetimeDef { let def = syn::LifetimeDef {
attrs: Vec::new(), attrs: Vec::new(),
lifetime: bound, lifetime: bound.clone(),
colon_token: None, colon_token: None,
bounds: Punctuated::new(), bounds: Punctuated::new(),
}; };
@@ -256,10 +257,10 @@ pub fn with_lifetime_bound(generics: &syn::Generics, lifetime: &str) -> syn::Gen
.chain(generics.params.iter().cloned().map(|mut param| { .chain(generics.params.iter().cloned().map(|mut param| {
match param { match param {
syn::GenericParam::Lifetime(ref mut param) => { syn::GenericParam::Lifetime(ref mut param) => {
param.bounds.push(bound); param.bounds.push(bound.clone());
} }
syn::GenericParam::Type(ref mut param) => { syn::GenericParam::Type(ref mut param) => {
param.bounds.push(syn::TypeParamBound::Lifetime(bound)); param.bounds.push(syn::TypeParamBound::Lifetime(bound.clone()));
} }
syn::GenericParam::Const(_) => {} syn::GenericParam::Const(_) => {}
} }
@@ -279,23 +280,24 @@ fn type_of_item(cont: &Container) -> syn::Type {
path: syn::Path { path: syn::Path {
leading_colon: None, leading_colon: None,
segments: vec![syn::PathSegment { segments: vec![syn::PathSegment {
ident: cont.ident, ident: cont.ident.clone(),
arguments: syn::PathArguments::AngleBracketed( arguments: syn::PathArguments::AngleBracketed(
syn::AngleBracketedGenericArguments { syn::AngleBracketedGenericArguments {
colon2_token: None, colon2_token: None,
lt_token: Default::default(), lt_token: Default::default(),
args: cont.generics args: cont
.generics
.params .params
.iter() .iter()
.map(|param| match *param { .map(|param| match *param {
syn::GenericParam::Type(ref param) => { syn::GenericParam::Type(ref param) => {
syn::GenericArgument::Type(syn::Type::Path(syn::TypePath { syn::GenericArgument::Type(syn::Type::Path(syn::TypePath {
qself: None, qself: None,
path: param.ident.into(), path: param.ident.clone().into(),
})) }))
} }
syn::GenericParam::Lifetime(ref param) => { syn::GenericParam::Lifetime(ref param) => {
syn::GenericArgument::Lifetime(param.lifetime) syn::GenericArgument::Lifetime(param.lifetime.clone())
} }
syn::GenericParam::Const(_) => { syn::GenericParam::Const(_) => {
panic!("Serde does not support const generics yet"); panic!("Serde does not support const generics yet");
+145 -130
View File
@@ -6,8 +6,8 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use proc_macro2::{Literal, Span}; use proc_macro2::{Literal, Span, TokenStream};
use quote::{ToTokens, Tokens}; use quote::ToTokens;
use syn::punctuated::Punctuated; use syn::punctuated::Punctuated;
use syn::spanned::Spanned; use syn::spanned::Spanned;
use syn::{self, Ident, Index, Member}; use syn::{self, Ident, Index, Member};
@@ -15,19 +15,19 @@ use syn::{self, Ident, Index, Member};
use bound; use bound;
use fragment::{Expr, Fragment, Match, Stmts}; use fragment::{Expr, Fragment, Match, Stmts};
use internals::ast::{Container, Data, Field, Style, Variant}; use internals::ast::{Container, Data, Field, Style, Variant};
use internals::{attr, Ctxt}; use internals::{attr, Ctxt, Derive};
use pretend; use pretend;
use try; use try;
use std::collections::BTreeSet; use std::collections::BTreeSet;
pub fn expand_derive_deserialize(input: &syn::DeriveInput) -> Result<Tokens, String> { pub fn expand_derive_deserialize(input: &syn::DeriveInput) -> Result<TokenStream, String> {
let ctxt = Ctxt::new(); let ctxt = Ctxt::new();
let cont = Container::from_ast(&ctxt, input); let cont = Container::from_ast(&ctxt, input, Derive::Deserialize);
precondition(&ctxt, &cont); precondition(&ctxt, &cont);
try!(ctxt.check()); try!(ctxt.check());
let ident = cont.ident; let ident = &cont.ident;
let params = Parameters::new(&cont); let params = Parameters::new(&cont);
let (de_impl_generics, _, ty_generics, where_clause) = split_with_de_lifetime(&params); let (de_impl_generics, _, ty_generics, where_clause) = split_with_de_lifetime(&params);
let dummy_const = Ident::new( let dummy_const = Ident::new(
@@ -130,10 +130,10 @@ struct Parameters {
impl Parameters { impl Parameters {
fn new(cont: &Container) -> Self { fn new(cont: &Container) -> Self {
let local = cont.ident; let local = cont.ident.clone();
let this = match cont.attrs.remote() { let this = match cont.attrs.remote() {
Some(remote) => remote.clone(), Some(remote) => remote.clone(),
None => cont.ident.into(), None => cont.ident.clone().into(),
}; };
let borrowed = borrowed_lifetimes(cont); let borrowed = borrowed_lifetimes(cont);
let generics = build_generics(cont, &borrowed); let generics = build_generics(cont, &borrowed);
@@ -150,8 +150,8 @@ impl Parameters {
/// Type name to use in error messages and `&'static str` arguments to /// Type name to use in error messages and `&'static str` arguments to
/// various Deserializer methods. /// various Deserializer methods.
fn type_name(&self) -> &str { fn type_name(&self) -> String {
self.this.segments.last().unwrap().value().ident.as_ref() self.this.segments.last().unwrap().value().ident.to_string()
} }
} }
@@ -203,7 +203,8 @@ fn build_generics(cont: &Container, borrowed: &BorrowedLifetimes) -> syn::Generi
fn needs_deserialize_bound(field: &attr::Field, variant: Option<&attr::Variant>) -> bool { fn needs_deserialize_bound(field: &attr::Field, variant: Option<&attr::Variant>) -> bool {
!field.skip_deserializing() && field.deserialize_with().is_none() && field.de_bound().is_none() !field.skip_deserializing() && field.deserialize_with().is_none() && field.de_bound().is_none()
&& variant.map_or(true, |variant| { && variant.map_or(true, |variant| {
!variant.skip_deserializing() && variant.deserialize_with().is_none() !variant.skip_deserializing()
&& variant.deserialize_with().is_none()
&& variant.de_bound().is_none() && variant.de_bound().is_none()
}) })
} }
@@ -268,21 +269,17 @@ fn borrowed_lifetimes(cont: &Container) -> BorrowedLifetimes {
} }
fn deserialize_body(cont: &Container, params: &Parameters) -> Fragment { fn deserialize_body(cont: &Container, params: &Parameters) -> Fragment {
if let Some(type_from) = cont.attrs.type_from() { if cont.attrs.transparent() {
deserialize_transparent(cont, params)
} else if let Some(type_from) = cont.attrs.type_from() {
deserialize_from(type_from) deserialize_from(type_from)
} else if let attr::Identifier::No = cont.attrs.identifier() { } else if let attr::Identifier::No = cont.attrs.identifier() {
match cont.data { match cont.data {
Data::Enum(ref variants) => deserialize_enum(params, variants, &cont.attrs), Data::Enum(ref variants) => deserialize_enum(params, variants, &cont.attrs),
Data::Struct(Style::Struct, ref fields) => { Data::Struct(Style::Struct, ref fields) => {
if fields.iter().any(|field| field.ident.is_none()) {
panic!("struct has unnamed fields");
}
deserialize_struct(None, params, fields, &cont.attrs, None, &Untagged::No) deserialize_struct(None, params, fields, &cont.attrs, None, &Untagged::No)
} }
Data::Struct(Style::Tuple, ref fields) | Data::Struct(Style::Newtype, ref fields) => { Data::Struct(Style::Tuple, ref fields) | Data::Struct(Style::Newtype, ref fields) => {
if fields.iter().any(|field| field.ident.is_some()) {
panic!("tuple struct has named fields");
}
deserialize_tuple(None, params, fields, &cont.attrs, None) deserialize_tuple(None, params, fields, &cont.attrs, None)
} }
Data::Struct(Style::Unit, _) => deserialize_unit_struct(params, &cont.attrs), Data::Struct(Style::Unit, _) => deserialize_unit_struct(params, &cont.attrs),
@@ -303,8 +300,11 @@ fn deserialize_in_place_body(cont: &Container, params: &Parameters) -> Option<St
// deserialize_in_place for remote derives. // deserialize_in_place for remote derives.
assert!(!params.has_getter); assert!(!params.has_getter);
if cont.attrs.type_from().is_some() || cont.attrs.identifier().is_some() if cont.attrs.transparent()
|| cont.data || cont.attrs.type_from().is_some()
|| cont.attrs.identifier().is_some()
|| cont
.data
.all_fields() .all_fields()
.all(|f| f.attrs.deserialize_with().is_some()) .all(|f| f.attrs.deserialize_with().is_some())
{ {
@@ -348,6 +348,41 @@ fn deserialize_in_place_body(_cont: &Container, _params: &Parameters) -> Option<
None None
} }
fn deserialize_transparent(cont: &Container, params: &Parameters) -> Fragment {
let fields = match cont.data {
Data::Struct(_, ref fields) => fields,
Data::Enum(_) => unreachable!(),
};
let this = &params.this;
let transparent_field = fields.iter().find(|f| f.attrs.transparent()).unwrap();
let path = match transparent_field.attrs.deserialize_with() {
Some(path) => quote!(#path),
None => quote!(_serde::Deserialize::deserialize),
};
let assign = fields.iter().map(|field| {
let member = &field.member;
if field as *const Field == transparent_field as *const Field {
quote!(#member: __transparent)
} else {
let value = match *field.attrs.default() {
attr::Default::Default => quote!(_serde::export::Default::default()),
attr::Default::Path(ref path) => quote!(#path()),
attr::Default::None => quote!(_serde::export::PhantomData),
};
quote!(#member: #value)
}
});
quote_block! {
_serde::export::Result::map(
#path(__deserializer),
|__transparent| #this { #(#assign),* })
}
}
fn deserialize_from(type_from: &syn::Type) -> Fragment { fn deserialize_from(type_from: &syn::Type) -> Fragment {
quote_block! { quote_block! {
_serde::export::Result::map( _serde::export::Result::map(
@@ -386,11 +421,11 @@ fn deserialize_unit_struct(params: &Parameters, cattrs: &attr::Container) -> Fra
} }
fn deserialize_tuple( fn deserialize_tuple(
variant_ident: Option<syn::Ident>, variant_ident: Option<&syn::Ident>,
params: &Parameters, params: &Parameters,
fields: &[Field], fields: &[Field],
cattrs: &attr::Container, cattrs: &attr::Container,
deserializer: Option<Tokens>, deserializer: Option<TokenStream>,
) -> Fragment { ) -> Fragment {
let this = &params.this; let this = &params.this;
let (de_impl_generics, de_ty_generics, ty_generics, where_clause) = let (de_impl_generics, de_ty_generics, ty_generics, where_clause) =
@@ -403,7 +438,7 @@ fn deserialize_tuple(
// and use an `Into` conversion to get the remote type. If there are no // and use an `Into` conversion to get the remote type. If there are no
// getters then construct the target type directly. // getters then construct the target type directly.
let construct = if params.has_getter { let construct = if params.has_getter {
let local = params.local; let local = &params.local;
quote!(#local) quote!(#local)
} else { } else {
quote!(#this) quote!(#this)
@@ -411,7 +446,7 @@ fn deserialize_tuple(
let is_enum = variant_ident.is_some(); let is_enum = variant_ident.is_some();
let type_path = match variant_ident { let type_path = match variant_ident {
Some(variant_ident) => quote!(#construct::#variant_ident), Some(ref variant_ident) => quote!(#construct::#variant_ident),
None => construct, None => construct,
}; };
let expecting = match variant_ident { let expecting = match variant_ident {
@@ -490,7 +525,7 @@ fn deserialize_tuple_in_place(
params: &Parameters, params: &Parameters,
fields: &[Field], fields: &[Field],
cattrs: &attr::Container, cattrs: &attr::Container,
deserializer: Option<Tokens>, deserializer: Option<TokenStream>,
) -> Fragment { ) -> Fragment {
let this = &params.this; let this = &params.this;
let (de_impl_generics, de_ty_generics, ty_generics, where_clause) = let (de_impl_generics, de_ty_generics, ty_generics, where_clause) =
@@ -574,7 +609,7 @@ fn deserialize_tuple_in_place(
} }
fn deserialize_seq( fn deserialize_seq(
type_path: &Tokens, type_path: &TokenStream,
params: &Parameters, params: &Parameters,
fields: &[Field], fields: &[Field],
is_struct: bool, is_struct: bool,
@@ -583,8 +618,6 @@ fn deserialize_seq(
) -> Fragment { ) -> Fragment {
let vars = (0..fields.len()).map(field_i as fn(_) -> _); let vars = (0..fields.len()).map(field_i as fn(_) -> _);
// XXX: do we need an error for flattening here?
let deserialized_count = fields let deserialized_count = fields
.iter() .iter()
.filter(|field| !field.attrs.skip_deserializing()) .filter(|field| !field.attrs.skip_deserializing())
@@ -635,7 +668,7 @@ fn deserialize_seq(
}); });
let mut result = if is_struct { let mut result = if is_struct {
let names = fields.iter().map(|f| &f.ident); let names = fields.iter().map(|f| &f.member);
quote! { quote! {
#type_path { #( #names: #vars ),* } #type_path { #( #names: #vars ),* }
} }
@@ -680,10 +713,6 @@ fn deserialize_seq_in_place(
cattrs: &attr::Container, cattrs: &attr::Container,
expecting: &str, expecting: &str,
) -> Fragment { ) -> Fragment {
let vars = (0..fields.len()).map(field_i as fn(_) -> _);
// XXX: do we need an error for flattening here?
let deserialized_count = fields let deserialized_count = fields
.iter() .iter()
.filter(|field| !field.attrs.skip_deserializing()) .filter(|field| !field.attrs.skip_deserializing())
@@ -695,57 +724,47 @@ fn deserialize_seq_in_place(
}; };
let mut index_in_seq = 0usize; let mut index_in_seq = 0usize;
let write_values = vars.clone() let write_values = fields.iter().map(|field| {
.zip(fields) let member = &field.member;
.enumerate()
.map(|(field_index, (_, field))| {
// If there's no field name, assume we're a tuple-struct and use a numeric index
let field_name = field.ident.map(Member::Named).unwrap_or_else(|| {
Member::Unnamed(Index {
index: field_index as u32,
span: Span::call_site(),
})
});
if field.attrs.skip_deserializing() { if field.attrs.skip_deserializing() {
let default = Expr(expr_is_missing(field, cattrs)); let default = Expr(expr_is_missing(field, cattrs));
quote! { quote! {
self.place.#field_name = #default; self.place.#member = #default;
} }
} else { } else {
let return_invalid_length = quote! { let return_invalid_length = quote! {
return _serde::export::Err(_serde::de::Error::invalid_length(#index_in_seq, &#expecting)); return _serde::export::Err(_serde::de::Error::invalid_length(#index_in_seq, &#expecting));
}; };
let write = match field.attrs.deserialize_with() { let write = match field.attrs.deserialize_with() {
None => { None => {
quote! { quote! {
if let _serde::export::None = try!(_serde::de::SeqAccess::next_element_seed(&mut __seq, if let _serde::export::None = try!(_serde::de::SeqAccess::next_element_seed(&mut __seq,
_serde::private::de::InPlaceSeed(&mut self.place.#field_name))) _serde::private::de::InPlaceSeed(&mut self.place.#member)))
{ {
#return_invalid_length #return_invalid_length
}
} }
} }
Some(path) => { }
let (wrapper, wrapper_ty) = Some(path) => {
wrap_deserialize_field_with(params, field.ty, path); let (wrapper, wrapper_ty) = wrap_deserialize_field_with(params, field.ty, path);
quote!({ quote!({
#wrapper #wrapper
match try!(_serde::de::SeqAccess::next_element::<#wrapper_ty>(&mut __seq)) { match try!(_serde::de::SeqAccess::next_element::<#wrapper_ty>(&mut __seq)) {
_serde::export::Some(__wrap) => { _serde::export::Some(__wrap) => {
self.place.#field_name = __wrap.value; self.place.#member = __wrap.value;
} }
_serde::export::None => { _serde::export::None => {
#return_invalid_length #return_invalid_length
} }
} }
}) })
} }
}; };
index_in_seq += 1; index_in_seq += 1;
write write
} }
}); });
let this = &params.this; let this = &params.this;
let (_, ty_generics, _) = params.generics.split_for_impl(); let (_, ty_generics, _) = params.generics.split_for_impl();
@@ -770,7 +789,7 @@ fn deserialize_seq_in_place(
} }
} }
fn deserialize_newtype_struct(type_path: &Tokens, params: &Parameters, field: &Field) -> Tokens { fn deserialize_newtype_struct(type_path: &TokenStream, params: &Parameters, field: &Field) -> TokenStream {
let delife = params.borrowed.de_lifetime(); let delife = params.borrowed.de_lifetime();
let field_ty = field.ty; let field_ty = field.ty;
@@ -808,7 +827,7 @@ fn deserialize_newtype_struct(type_path: &Tokens, params: &Parameters, field: &F
} }
#[cfg(feature = "deserialize_in_place")] #[cfg(feature = "deserialize_in_place")]
fn deserialize_newtype_struct_in_place(params: &Parameters, field: &Field) -> Tokens { fn deserialize_newtype_struct_in_place(params: &Parameters, field: &Field) -> TokenStream {
// We do not generate deserialize_in_place if every field has a deserialize_with. // We do not generate deserialize_in_place if every field has a deserialize_with.
assert!(field.attrs.deserialize_with().is_none()); assert!(field.attrs.deserialize_with().is_none());
@@ -831,11 +850,11 @@ enum Untagged {
} }
fn deserialize_struct( fn deserialize_struct(
variant_ident: Option<syn::Ident>, variant_ident: Option<&syn::Ident>,
params: &Parameters, params: &Parameters,
fields: &[Field], fields: &[Field],
cattrs: &attr::Container, cattrs: &attr::Container,
deserializer: Option<Tokens>, deserializer: Option<TokenStream>,
untagged: &Untagged, untagged: &Untagged,
) -> Fragment { ) -> Fragment {
let is_enum = variant_ident.is_some(); let is_enum = variant_ident.is_some();
@@ -849,14 +868,14 @@ fn deserialize_struct(
// and use an `Into` conversion to get the remote type. If there are no // and use an `Into` conversion to get the remote type. If there are no
// getters then construct the target type directly. // getters then construct the target type directly.
let construct = if params.has_getter { let construct = if params.has_getter {
let local = params.local; let local = &params.local;
quote!(#local) quote!(#local)
} else { } else {
quote!(#this) quote!(#this)
}; };
let type_path = match variant_ident { let type_path = match variant_ident {
Some(variant_ident) => quote!(#construct::#variant_ident), Some(ref variant_ident) => quote!(#construct::#variant_ident),
None => construct, None => construct,
}; };
let expecting = match variant_ident { let expecting = match variant_ident {
@@ -985,7 +1004,7 @@ fn deserialize_struct_in_place(
params: &Parameters, params: &Parameters,
fields: &[Field], fields: &[Field],
cattrs: &attr::Container, cattrs: &attr::Container,
deserializer: Option<Tokens>, deserializer: Option<TokenStream>,
) -> Option<Fragment> { ) -> Option<Fragment> {
let is_enum = variant_ident.is_some(); let is_enum = variant_ident.is_some();
@@ -1370,7 +1389,7 @@ fn deserialize_adjacently_tagged_enum(
.filter(|&(_, variant)| !variant.attrs.skip_deserializing() && is_unit(variant)) .filter(|&(_, variant)| !variant.attrs.skip_deserializing() && is_unit(variant))
.map(|(i, variant)| { .map(|(i, variant)| {
let variant_index = field_i(i); let variant_index = field_i(i);
let variant_ident = variant.ident; let variant_ident = &variant.ident;
quote! { quote! {
__Field::#variant_index => _serde::export::Ok(#this::#variant_ident), __Field::#variant_index => _serde::export::Ok(#this::#variant_ident),
} }
@@ -1634,7 +1653,7 @@ fn deserialize_externally_tagged_variant(
}; };
} }
let variant_ident = variant.ident; let variant_ident = &variant.ident;
match variant.style { match variant.style {
Style::Unit => { Style::Unit => {
@@ -1665,19 +1684,19 @@ fn deserialize_internally_tagged_variant(
params: &Parameters, params: &Parameters,
variant: &Variant, variant: &Variant,
cattrs: &attr::Container, cattrs: &attr::Container,
deserializer: Tokens, deserializer: TokenStream,
) -> Fragment { ) -> Fragment {
if variant.attrs.deserialize_with().is_some() { if variant.attrs.deserialize_with().is_some() {
return deserialize_untagged_variant(params, variant, cattrs, deserializer); return deserialize_untagged_variant(params, variant, cattrs, deserializer);
} }
let variant_ident = variant.ident; let variant_ident = &variant.ident;
match variant.style { match variant.style {
Style::Unit => { Style::Unit => {
let this = &params.this; let this = &params.this;
let type_name = params.type_name(); let type_name = params.type_name();
let variant_name = variant.ident.as_ref(); let variant_name = variant.ident.to_string();
quote_block! { quote_block! {
try!(_serde::Deserializer::deserialize_any(#deserializer, _serde::private::de::InternallyTaggedUnitVisitor::new(#type_name, #variant_name))); try!(_serde::Deserializer::deserialize_any(#deserializer, _serde::private::de::InternallyTaggedUnitVisitor::new(#type_name, #variant_name)));
_serde::export::Ok(#this::#variant_ident) _serde::export::Ok(#this::#variant_ident)
@@ -1705,7 +1724,7 @@ fn deserialize_untagged_variant(
params: &Parameters, params: &Parameters,
variant: &Variant, variant: &Variant,
cattrs: &attr::Container, cattrs: &attr::Container,
deserializer: Tokens, deserializer: TokenStream,
) -> Fragment { ) -> Fragment {
if let Some(path) = variant.attrs.deserialize_with() { if let Some(path) = variant.attrs.deserialize_with() {
let (wrapper, wrapper_ty, unwrap_fn) = wrap_deserialize_variant_with(params, variant, path); let (wrapper, wrapper_ty, unwrap_fn) = wrap_deserialize_variant_with(params, variant, path);
@@ -1716,13 +1735,13 @@ fn deserialize_untagged_variant(
}; };
} }
let variant_ident = variant.ident; let variant_ident = &variant.ident;
match variant.style { match variant.style {
Style::Unit => { Style::Unit => {
let this = &params.this; let this = &params.this;
let type_name = params.type_name(); let type_name = params.type_name();
let variant_name = variant.ident.as_ref(); let variant_name = variant.ident.to_string();
quote_expr! { quote_expr! {
match _serde::Deserializer::deserialize_any( match _serde::Deserializer::deserialize_any(
#deserializer, #deserializer,
@@ -1758,7 +1777,7 @@ fn deserialize_untagged_variant(
} }
fn deserialize_externally_tagged_newtype_variant( fn deserialize_externally_tagged_newtype_variant(
variant_ident: syn::Ident, variant_ident: &syn::Ident,
params: &Parameters, params: &Parameters,
field: &Field, field: &Field,
) -> Fragment { ) -> Fragment {
@@ -1785,10 +1804,10 @@ fn deserialize_externally_tagged_newtype_variant(
} }
fn deserialize_untagged_newtype_variant( fn deserialize_untagged_newtype_variant(
variant_ident: syn::Ident, variant_ident: &syn::Ident,
params: &Parameters, params: &Parameters,
field: &Field, field: &Field,
deserializer: &Tokens, deserializer: &TokenStream,
) -> Fragment { ) -> Fragment {
let this = &params.this; let this = &params.this;
let field_ty = field.ty; let field_ty = field.ty;
@@ -1802,10 +1821,8 @@ fn deserialize_untagged_newtype_variant(
} }
Some(path) => { Some(path) => {
quote_block! { quote_block! {
let __value: #field_ty = _serde::export::Result::map( let __value: _serde::export::Result<#field_ty, _> = #path(#deserializer);
#path(#deserializer), _serde::export::Result::map(__value, #this::#variant_ident)
#this::#variant_ident);
__value
} }
} }
} }
@@ -1887,7 +1904,7 @@ fn deserialize_custom_identifier(
let this = quote!(#this); let this = quote!(#this);
let (ordinary, fallthrough) = if let Some(last) = variants.last() { let (ordinary, fallthrough) = if let Some(last) = variants.last() {
let last_ident = last.ident; let last_ident = &last.ident;
if last.attrs.other() { if last.attrs.other() {
let ordinary = &variants[..variants.len() - 1]; let ordinary = &variants[..variants.len() - 1];
let fallthrough = quote!(_serde::export::Ok(#this::#last_ident)); let fallthrough = quote!(_serde::export::Ok(#this::#last_ident));
@@ -1910,7 +1927,7 @@ fn deserialize_custom_identifier(
let names_idents: Vec<_> = ordinary let names_idents: Vec<_> = ordinary
.iter() .iter()
.map(|variant| (variant.attrs.name().deserialize_name(), variant.ident)) .map(|variant| (variant.attrs.name().deserialize_name(), variant.ident.clone()))
.collect(); .collect();
let names = names_idents.iter().map(|&(ref name, _)| name); let names = names_idents.iter().map(|&(ref name, _)| name);
@@ -1963,10 +1980,10 @@ fn deserialize_custom_identifier(
} }
fn deserialize_identifier( fn deserialize_identifier(
this: &Tokens, this: &TokenStream,
fields: &[(String, Ident)], fields: &[(String, Ident)],
is_variant: bool, is_variant: bool,
fallthrough: Option<Tokens>, fallthrough: Option<TokenStream>,
collect_other_fields: bool, collect_other_fields: bool,
) -> Fragment { ) -> Fragment {
let field_strs = fields.iter().map(|&(ref name, _)| name); let field_strs = fields.iter().map(|&(ref name, _)| name);
@@ -2220,7 +2237,7 @@ fn deserialize_identifier(
} }
fn deserialize_struct_as_struct_visitor( fn deserialize_struct_as_struct_visitor(
struct_path: &Tokens, struct_path: &TokenStream,
params: &Parameters, params: &Parameters,
fields: &[Field], fields: &[Field],
cattrs: &attr::Container, cattrs: &attr::Container,
@@ -2249,7 +2266,7 @@ fn deserialize_struct_as_struct_visitor(
} }
fn deserialize_struct_as_map_visitor( fn deserialize_struct_as_map_visitor(
struct_path: &Tokens, struct_path: &TokenStream,
params: &Parameters, params: &Parameters,
fields: &[Field], fields: &[Field],
cattrs: &attr::Container, cattrs: &attr::Container,
@@ -2269,7 +2286,7 @@ fn deserialize_struct_as_map_visitor(
} }
fn deserialize_map( fn deserialize_map(
struct_path: &Tokens, struct_path: &TokenStream,
params: &Parameters, params: &Parameters,
fields: &[Field], fields: &[Field],
cattrs: &attr::Container, cattrs: &attr::Container,
@@ -2424,12 +2441,12 @@ fn deserialize_map(
}; };
let result = fields_names.iter().map(|&(field, ref name)| { let result = fields_names.iter().map(|&(field, ref name)| {
let ident = field.ident.expect("struct contains unnamed fields"); let member = &field.member;
if field.attrs.skip_deserializing() { if field.attrs.skip_deserializing() {
let value = Expr(expr_is_missing(field, cattrs)); let value = Expr(expr_is_missing(field, cattrs));
quote!(#ident: #value) quote!(#member: #value)
} else { } else {
quote!(#ident: #name) quote!(#member: #name)
} }
}); });
@@ -2534,19 +2551,19 @@ fn deserialize_map_in_place(
.filter(|&&(field, _)| !field.attrs.skip_deserializing()) .filter(|&&(field, _)| !field.attrs.skip_deserializing())
.map(|&(field, ref name)| { .map(|&(field, ref name)| {
let deser_name = field.attrs.name().deserialize_name(); let deser_name = field.attrs.name().deserialize_name();
let field_name = field.ident; let member = &field.member;
let visit = match field.attrs.deserialize_with() { let visit = match field.attrs.deserialize_with() {
None => { None => {
quote! { quote! {
try!(_serde::de::MapAccess::next_value_seed(&mut __map, _serde::private::de::InPlaceSeed(&mut self.place.#field_name))) try!(_serde::de::MapAccess::next_value_seed(&mut __map, _serde::private::de::InPlaceSeed(&mut self.place.#member)))
} }
} }
Some(path) => { Some(path) => {
let (wrapper, wrapper_ty) = wrap_deserialize_field_with(params, field.ty, path); let (wrapper, wrapper_ty) = wrap_deserialize_field_with(params, field.ty, path);
quote!({ quote!({
#wrapper #wrapper
self.place.#field_name = try!(_serde::de::MapAccess::next_value::<#wrapper_ty>(&mut __map)).value self.place.#member = try!(_serde::de::MapAccess::next_value::<#wrapper_ty>(&mut __map)).value
}) })
} }
}; };
@@ -2598,7 +2615,8 @@ fn deserialize_map_in_place(
let missing_expr = expr_is_missing(field, cattrs); let missing_expr = expr_is_missing(field, cattrs);
// If missing_expr unconditionally returns an error, don't try // If missing_expr unconditionally returns an error, don't try
// to assign its value to self.place. // to assign its value to self.place.
if field.attrs.default().is_none() && cattrs.default().is_none() if field.attrs.default().is_none()
&& cattrs.default().is_none()
&& field.attrs.deserialize_with().is_some() && field.attrs.deserialize_with().is_some()
{ {
let missing_expr = Stmts(missing_expr); let missing_expr = Stmts(missing_expr);
@@ -2608,11 +2626,11 @@ fn deserialize_map_in_place(
} }
} }
} else { } else {
let field_name = field.ident; let member = &field.member;
let missing_expr = Expr(missing_expr); let missing_expr = Expr(missing_expr);
quote! { quote! {
if !#name { if !#name {
self.place.#field_name = #missing_expr; self.place.#member = #missing_expr;
}; };
} }
} }
@@ -2656,9 +2674,9 @@ fn field_i(i: usize) -> Ident {
/// in a trait to prevent it from accessing the internal `Deserialize` state. /// in a trait to prevent it from accessing the internal `Deserialize` state.
fn wrap_deserialize_with( fn wrap_deserialize_with(
params: &Parameters, params: &Parameters,
value_ty: &Tokens, value_ty: &TokenStream,
deserialize_with: &syn::ExprPath, deserialize_with: &syn::ExprPath,
) -> (Tokens, Tokens) { ) -> (TokenStream, TokenStream) {
let this = &params.this; let this = &params.this;
let (de_impl_generics, de_ty_generics, ty_generics, where_clause) = let (de_impl_generics, de_ty_generics, ty_generics, where_clause) =
split_with_de_lifetime(params); split_with_de_lifetime(params);
@@ -2694,7 +2712,7 @@ fn wrap_deserialize_field_with(
params: &Parameters, params: &Parameters,
field_ty: &syn::Type, field_ty: &syn::Type,
deserialize_with: &syn::ExprPath, deserialize_with: &syn::ExprPath,
) -> (Tokens, Tokens) { ) -> (TokenStream, TokenStream) {
wrap_deserialize_with(params, &quote!(#field_ty), deserialize_with) wrap_deserialize_with(params, &quote!(#field_ty), deserialize_with)
} }
@@ -2702,9 +2720,9 @@ fn wrap_deserialize_variant_with(
params: &Parameters, params: &Parameters,
variant: &Variant, variant: &Variant,
deserialize_with: &syn::ExprPath, deserialize_with: &syn::ExprPath,
) -> (Tokens, Tokens, Tokens) { ) -> (TokenStream, TokenStream, TokenStream) {
let this = &params.this; let this = &params.this;
let variant_ident = variant.ident; let variant_ident = &variant.ident;
let field_tys = variant.fields.iter().map(|field| field.ty); let field_tys = variant.fields.iter().map(|field| field.ty);
let (wrapper, wrapper_ty) = let (wrapper, wrapper_ty) =
@@ -2718,18 +2736,15 @@ fn wrap_deserialize_variant_with(
}); });
let unwrap_fn = match variant.style { let unwrap_fn = match variant.style {
Style::Struct if variant.fields.len() == 1 => { Style::Struct if variant.fields.len() == 1 => {
let field_ident = variant.fields[0].ident.unwrap(); let member = &variant.fields[0].member;
quote! { quote! {
|__wrap| #this::#variant_ident { #field_ident: __wrap.value } |__wrap| #this::#variant_ident { #member: __wrap.value }
} }
} }
Style::Struct => { Style::Struct => {
let field_idents = variant let members = variant.fields.iter().map(|field| &field.member);
.fields
.iter()
.map(|field| field.ident.as_ref().unwrap());
quote! { quote! {
|__wrap| #this::#variant_ident { #(#field_idents: __wrap.value.#field_access),* } |__wrap| #this::#variant_ident { #(#members: __wrap.value.#field_access),* }
} }
} }
Style::Tuple => quote! { Style::Tuple => quote! {
@@ -2759,8 +2774,8 @@ fn expr_is_missing(field: &Field, cattrs: &attr::Container) -> Fragment {
match *cattrs.default() { match *cattrs.default() {
attr::Default::Default | attr::Default::Path(_) => { attr::Default::Default | attr::Default::Path(_) => {
let ident = field.ident; let member = &field.member;
return quote_expr!(__default.#ident); return quote_expr!(__default.#member);
} }
attr::Default::None => { /* below */ } attr::Default::None => { /* below */ }
} }
@@ -2787,7 +2802,7 @@ struct DeImplGenerics<'a>(&'a Parameters);
struct InPlaceImplGenerics<'a>(&'a Parameters); struct InPlaceImplGenerics<'a>(&'a Parameters);
impl<'a> ToTokens for DeImplGenerics<'a> { impl<'a> ToTokens for DeImplGenerics<'a> {
fn to_tokens(&self, tokens: &mut Tokens) { fn to_tokens(&self, tokens: &mut TokenStream) {
let mut generics = self.0.generics.clone(); let mut generics = self.0.generics.clone();
if let Some(de_lifetime) = self.0.borrowed.de_lifetime_def() { if let Some(de_lifetime) = self.0.borrowed.de_lifetime_def() {
generics.params = Some(syn::GenericParam::Lifetime(de_lifetime)) generics.params = Some(syn::GenericParam::Lifetime(de_lifetime))
@@ -2802,7 +2817,7 @@ impl<'a> ToTokens for DeImplGenerics<'a> {
#[cfg(feature = "deserialize_in_place")] #[cfg(feature = "deserialize_in_place")]
impl<'a> ToTokens for InPlaceImplGenerics<'a> { impl<'a> ToTokens for InPlaceImplGenerics<'a> {
fn to_tokens(&self, tokens: &mut Tokens) { fn to_tokens(&self, tokens: &mut TokenStream) {
let place_lifetime = place_lifetime(); let place_lifetime = place_lifetime();
let mut generics = self.0.generics.clone(); let mut generics = self.0.generics.clone();
@@ -2810,12 +2825,12 @@ impl<'a> ToTokens for InPlaceImplGenerics<'a> {
for param in &mut generics.params { for param in &mut generics.params {
match *param { match *param {
syn::GenericParam::Lifetime(ref mut param) => { syn::GenericParam::Lifetime(ref mut param) => {
param.bounds.push(place_lifetime.lifetime); param.bounds.push(place_lifetime.lifetime.clone());
} }
syn::GenericParam::Type(ref mut param) => { syn::GenericParam::Type(ref mut param) => {
param param
.bounds .bounds
.push(syn::TypeParamBound::Lifetime(place_lifetime.lifetime)); .push(syn::TypeParamBound::Lifetime(place_lifetime.lifetime.clone()));
} }
syn::GenericParam::Const(_) => {} syn::GenericParam::Const(_) => {}
} }
@@ -2847,7 +2862,7 @@ struct DeTypeGenerics<'a>(&'a Parameters);
struct InPlaceTypeGenerics<'a>(&'a Parameters); struct InPlaceTypeGenerics<'a>(&'a Parameters);
impl<'a> ToTokens for DeTypeGenerics<'a> { impl<'a> ToTokens for DeTypeGenerics<'a> {
fn to_tokens(&self, tokens: &mut Tokens) { fn to_tokens(&self, tokens: &mut TokenStream) {
let mut generics = self.0.generics.clone(); let mut generics = self.0.generics.clone();
if self.0.borrowed.de_lifetime_def().is_some() { if self.0.borrowed.de_lifetime_def().is_some() {
let def = syn::LifetimeDef { let def = syn::LifetimeDef {
@@ -2868,7 +2883,7 @@ impl<'a> ToTokens for DeTypeGenerics<'a> {
#[cfg(feature = "deserialize_in_place")] #[cfg(feature = "deserialize_in_place")]
impl<'a> ToTokens for InPlaceTypeGenerics<'a> { impl<'a> ToTokens for InPlaceTypeGenerics<'a> {
fn to_tokens(&self, tokens: &mut Tokens) { fn to_tokens(&self, tokens: &mut TokenStream) {
let mut generics = self.0.generics.clone(); let mut generics = self.0.generics.clone();
generics.params = Some(syn::GenericParam::Lifetime(place_lifetime())) generics.params = Some(syn::GenericParam::Lifetime(place_lifetime()))
.into_iter() .into_iter()
+9 -8
View File
@@ -6,15 +6,16 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use quote::{ToTokens, Tokens}; use quote::ToTokens;
use syn::token; use syn::token;
use proc_macro2::TokenStream;
pub enum Fragment { pub enum Fragment {
/// Tokens that can be used as an expression. /// Tokens that can be used as an expression.
Expr(Tokens), Expr(TokenStream),
/// Tokens that can be used inside a block. The surrounding curly braces are /// Tokens that can be used inside a block. The surrounding curly braces are
/// not part of these tokens. /// not part of these tokens.
Block(Tokens), Block(TokenStream),
} }
macro_rules! quote_expr { macro_rules! quote_expr {
@@ -33,7 +34,7 @@ macro_rules! quote_block {
/// Block fragments in curly braces. /// Block fragments in curly braces.
pub struct Expr(pub Fragment); pub struct Expr(pub Fragment);
impl ToTokens for Expr { impl ToTokens for Expr {
fn to_tokens(&self, out: &mut Tokens) { fn to_tokens(&self, out: &mut TokenStream) {
match self.0 { match self.0 {
Fragment::Expr(ref expr) => expr.to_tokens(out), Fragment::Expr(ref expr) => expr.to_tokens(out),
Fragment::Block(ref block) => { Fragment::Block(ref block) => {
@@ -46,7 +47,7 @@ impl ToTokens for Expr {
/// Interpolate a fragment as the statements of a block. /// Interpolate a fragment as the statements of a block.
pub struct Stmts(pub Fragment); pub struct Stmts(pub Fragment);
impl ToTokens for Stmts { impl ToTokens for Stmts {
fn to_tokens(&self, out: &mut Tokens) { fn to_tokens(&self, out: &mut TokenStream) {
match self.0 { match self.0 {
Fragment::Expr(ref expr) => expr.to_tokens(out), Fragment::Expr(ref expr) => expr.to_tokens(out),
Fragment::Block(ref block) => block.to_tokens(out), Fragment::Block(ref block) => block.to_tokens(out),
@@ -58,7 +59,7 @@ impl ToTokens for Stmts {
/// involves putting a comma after expressions and curly braces around blocks. /// involves putting a comma after expressions and curly braces around blocks.
pub struct Match(pub Fragment); pub struct Match(pub Fragment);
impl ToTokens for Match { impl ToTokens for Match {
fn to_tokens(&self, out: &mut Tokens) { fn to_tokens(&self, out: &mut TokenStream) {
match self.0 { match self.0 {
Fragment::Expr(ref expr) => { Fragment::Expr(ref expr) => {
expr.to_tokens(out); expr.to_tokens(out);
@@ -71,8 +72,8 @@ impl ToTokens for Match {
} }
} }
impl AsRef<Tokens> for Fragment { impl AsRef<TokenStream> for Fragment {
fn as_ref(&self) -> &Tokens { fn as_ref(&self) -> &TokenStream {
match *self { match *self {
Fragment::Expr(ref expr) => expr, Fragment::Expr(ref expr) => expr,
Fragment::Block(ref block) => block, Fragment::Block(ref block) => block,
+11 -8
View File
@@ -8,7 +8,7 @@
use internals::attr; use internals::attr;
use internals::check; use internals::check;
use internals::Ctxt; use internals::{Ctxt, Derive};
use syn; use syn;
use syn::punctuated::Punctuated; use syn::punctuated::Punctuated;
@@ -32,7 +32,7 @@ pub struct Variant<'a> {
} }
pub struct Field<'a> { pub struct Field<'a> {
pub ident: Option<syn::Ident>, pub member: syn::Member,
pub attrs: attr::Field, pub attrs: attr::Field,
pub ty: &'a syn::Type, pub ty: &'a syn::Type,
pub original: &'a syn::Field, pub original: &'a syn::Field,
@@ -47,7 +47,7 @@ pub enum Style {
} }
impl<'a> Container<'a> { impl<'a> Container<'a> {
pub fn from_ast(cx: &Ctxt, item: &'a syn::DeriveInput) -> Container<'a> { pub fn from_ast(cx: &Ctxt, item: &'a syn::DeriveInput, derive: Derive) -> Container<'a> {
let mut attrs = attr::Container::from_ast(cx, item); let mut attrs = attr::Container::from_ast(cx, item);
let mut data = match item.data { let mut data = match item.data {
@@ -86,13 +86,13 @@ impl<'a> Container<'a> {
attrs.mark_has_flatten(); attrs.mark_has_flatten();
} }
let item = Container { let mut item = Container {
ident: item.ident, ident: item.ident.clone(),
attrs: attrs, attrs: attrs,
data: data, data: data,
generics: &item.generics, generics: &item.generics,
}; };
check::check(cx, &item); check::check(cx, &mut item, derive);
item item
} }
} }
@@ -124,7 +124,7 @@ fn enum_from_ast<'a>(
let (style, fields) = let (style, fields) =
struct_from_ast(cx, &variant.fields, Some(&attrs), container_default); struct_from_ast(cx, &variant.fields, Some(&attrs), container_default);
Variant { Variant {
ident: variant.ident, ident: variant.ident.clone(),
attrs: attrs, attrs: attrs,
style: style, style: style,
fields: fields, fields: fields,
@@ -166,7 +166,10 @@ fn fields_from_ast<'a>(
.iter() .iter()
.enumerate() .enumerate()
.map(|(i, field)| Field { .map(|(i, field)| Field {
ident: field.ident, member: match field.ident {
Some(ref ident) => syn::Member::Named(ident.clone()),
None => syn::Member::Unnamed(i.into()),
},
attrs: attr::Field::from_ast(cx, i, field, attrs, container_default), attrs: attr::Field::from_ast(cx, i, field, attrs, container_default),
ty: &field.ty, ty: &field.ty,
original: field, original: field,
+78 -54
View File
@@ -105,6 +105,7 @@ impl Name {
/// Represents container (e.g. struct) attribute information /// Represents container (e.g. struct) attribute information
pub struct Container { pub struct Container {
name: Name, name: Name,
transparent: bool,
deny_unknown_fields: bool, deny_unknown_fields: bool,
default: Default, default: Default,
rename_all: RenameRule, rename_all: RenameRule,
@@ -181,6 +182,7 @@ impl Container {
pub fn from_ast(cx: &Ctxt, item: &syn::DeriveInput) -> Self { pub fn from_ast(cx: &Ctxt, item: &syn::DeriveInput) -> Self {
let mut ser_name = Attr::none(cx, "rename"); let mut ser_name = Attr::none(cx, "rename");
let mut de_name = Attr::none(cx, "rename"); let mut de_name = Attr::none(cx, "rename");
let mut transparent = BoolAttr::none(cx, "transparent");
let mut deny_unknown_fields = BoolAttr::none(cx, "deny_unknown_fields"); let mut deny_unknown_fields = BoolAttr::none(cx, "deny_unknown_fields");
let mut default = Attr::none(cx, "default"); let mut default = Attr::none(cx, "default");
let mut rename_all = Attr::none(cx, "rename_all"); let mut rename_all = Attr::none(cx, "rename_all");
@@ -200,7 +202,7 @@ impl Container {
match meta_item { match meta_item {
// Parse `#[serde(rename = "foo")]` // Parse `#[serde(rename = "foo")]`
Meta(NameValue(ref m)) if m.ident == "rename" => { Meta(NameValue(ref m)) if m.ident == "rename" => {
if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) { if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) {
ser_name.set(s.value()); ser_name.set(s.value());
de_name.set(s.value()); de_name.set(s.value());
} }
@@ -216,7 +218,7 @@ impl Container {
// Parse `#[serde(rename_all = "foo")]` // Parse `#[serde(rename_all = "foo")]`
Meta(NameValue(ref m)) if m.ident == "rename_all" => { Meta(NameValue(ref m)) if m.ident == "rename_all" => {
if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) { if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) {
match RenameRule::from_str(&s.value()) { match RenameRule::from_str(&s.value()) {
Ok(rename_rule) => rename_all.set(rename_rule), Ok(rename_rule) => rename_all.set(rename_rule),
Err(()) => cx.error(format!( Err(()) => cx.error(format!(
@@ -228,13 +230,18 @@ impl Container {
} }
} }
// Parse `#[serde(transparent)]`
Meta(Word(ref word)) if word == "transparent" => {
transparent.set_true();
}
// Parse `#[serde(deny_unknown_fields)]` // Parse `#[serde(deny_unknown_fields)]`
Meta(Word(word)) if word == "deny_unknown_fields" => { Meta(Word(ref word)) if word == "deny_unknown_fields" => {
deny_unknown_fields.set_true(); deny_unknown_fields.set_true();
} }
// Parse `#[serde(default)]` // Parse `#[serde(default)]`
Meta(Word(word)) if word == "default" => match item.data { Meta(Word(ref word)) if word == "default" => match item.data {
syn::Data::Struct(syn::DataStruct { syn::Data::Struct(syn::DataStruct {
fields: syn::Fields::Named(_), fields: syn::Fields::Named(_),
.. ..
@@ -249,7 +256,7 @@ impl Container {
// Parse `#[serde(default = "...")]` // Parse `#[serde(default = "...")]`
Meta(NameValue(ref m)) if m.ident == "default" => { Meta(NameValue(ref m)) if m.ident == "default" => {
if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
match item.data { match item.data {
syn::Data::Struct(syn::DataStruct { syn::Data::Struct(syn::DataStruct {
fields: syn::Fields::Named(_), fields: syn::Fields::Named(_),
@@ -268,7 +275,7 @@ impl Container {
// Parse `#[serde(bound = "D: Serialize")]` // Parse `#[serde(bound = "D: Serialize")]`
Meta(NameValue(ref m)) if m.ident == "bound" => { Meta(NameValue(ref m)) if m.ident == "bound" => {
if let Ok(where_predicates) = if let Ok(where_predicates) =
parse_lit_into_where(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) parse_lit_into_where(cx, &m.ident, &m.ident, &m.lit)
{ {
ser_bound.set(where_predicates.clone()); ser_bound.set(where_predicates.clone());
de_bound.set(where_predicates); de_bound.set(where_predicates);
@@ -284,7 +291,7 @@ impl Container {
} }
// Parse `#[serde(untagged)]` // Parse `#[serde(untagged)]`
Meta(Word(word)) if word == "untagged" => match item.data { Meta(Word(ref word)) if word == "untagged" => match item.data {
syn::Data::Enum(_) => { syn::Data::Enum(_) => {
untagged.set_true(); untagged.set_true();
} }
@@ -295,7 +302,7 @@ impl Container {
// Parse `#[serde(tag = "type")]` // Parse `#[serde(tag = "type")]`
Meta(NameValue(ref m)) if m.ident == "tag" => { Meta(NameValue(ref m)) if m.ident == "tag" => {
if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) { if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) {
match item.data { match item.data {
syn::Data::Enum(_) => { syn::Data::Enum(_) => {
internal_tag.set(s.value()); internal_tag.set(s.value());
@@ -309,7 +316,7 @@ impl Container {
// Parse `#[serde(content = "c")]` // Parse `#[serde(content = "c")]`
Meta(NameValue(ref m)) if m.ident == "content" => { Meta(NameValue(ref m)) if m.ident == "content" => {
if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) { if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) {
match item.data { match item.data {
syn::Data::Enum(_) => { syn::Data::Enum(_) => {
content.set(s.value()); content.set(s.value());
@@ -324,23 +331,23 @@ impl Container {
// Parse `#[serde(from = "Type")] // Parse `#[serde(from = "Type")]
Meta(NameValue(ref m)) if m.ident == "from" => { Meta(NameValue(ref m)) if m.ident == "from" => {
if let Ok(from_ty) = parse_lit_into_ty(cx, m.ident.as_ref(), &m.lit) { if let Ok(from_ty) = parse_lit_into_ty(cx, &m.ident, &m.lit) {
type_from.set_opt(Some(from_ty)); type_from.set_opt(Some(from_ty));
} }
} }
// Parse `#[serde(into = "Type")] // Parse `#[serde(into = "Type")]
Meta(NameValue(ref m)) if m.ident == "into" => { Meta(NameValue(ref m)) if m.ident == "into" => {
if let Ok(into_ty) = parse_lit_into_ty(cx, m.ident.as_ref(), &m.lit) { if let Ok(into_ty) = parse_lit_into_ty(cx, &m.ident, &m.lit) {
type_into.set_opt(Some(into_ty)); type_into.set_opt(Some(into_ty));
} }
} }
// Parse `#[serde(remote = "...")]` // Parse `#[serde(remote = "...")]`
Meta(NameValue(ref m)) if m.ident == "remote" => { Meta(NameValue(ref m)) if m.ident == "remote" => {
if let Ok(path) = parse_lit_into_path(cx, m.ident.as_ref(), &m.lit) { if let Ok(path) = parse_lit_into_path(cx, &m.ident, &m.lit) {
if is_primitive_path(&path, "Self") { if is_primitive_path(&path, "Self") {
remote.set(item.ident.into()); remote.set(item.ident.clone().into());
} else { } else {
remote.set(path); remote.set(path);
} }
@@ -348,12 +355,12 @@ impl Container {
} }
// Parse `#[serde(field_identifier)]` // Parse `#[serde(field_identifier)]`
Meta(Word(word)) if word == "field_identifier" => { Meta(Word(ref word)) if word == "field_identifier" => {
field_identifier.set_true(); field_identifier.set_true();
} }
// Parse `#[serde(variant_identifier)]` // Parse `#[serde(variant_identifier)]`
Meta(Word(word)) if word == "variant_identifier" => { Meta(Word(ref word)) if word == "variant_identifier" => {
variant_identifier.set_true(); variant_identifier.set_true();
} }
@@ -376,6 +383,7 @@ impl Container {
serialize: ser_name.get().unwrap_or_else(|| item.ident.to_string()), serialize: ser_name.get().unwrap_or_else(|| item.ident.to_string()),
deserialize: de_name.get().unwrap_or_else(|| item.ident.to_string()), deserialize: de_name.get().unwrap_or_else(|| item.ident.to_string()),
}, },
transparent: transparent.get(),
deny_unknown_fields: deny_unknown_fields.get(), deny_unknown_fields: deny_unknown_fields.get(),
default: default.get().unwrap_or(Default::None), default: default.get().unwrap_or(Default::None),
rename_all: rename_all.get().unwrap_or(RenameRule::None), rename_all: rename_all.get().unwrap_or(RenameRule::None),
@@ -398,6 +406,10 @@ impl Container {
&self.rename_all &self.rename_all
} }
pub fn transparent(&self) -> bool {
self.transparent
}
pub fn deny_unknown_fields(&self) -> bool { pub fn deny_unknown_fields(&self) -> bool {
self.deny_unknown_fields self.deny_unknown_fields
} }
@@ -556,7 +568,7 @@ impl Variant {
match meta_item { match meta_item {
// Parse `#[serde(rename = "foo")]` // Parse `#[serde(rename = "foo")]`
Meta(NameValue(ref m)) if m.ident == "rename" => { Meta(NameValue(ref m)) if m.ident == "rename" => {
if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) { if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) {
ser_name.set(s.value()); ser_name.set(s.value());
de_name.set(s.value()); de_name.set(s.value());
} }
@@ -572,7 +584,7 @@ impl Variant {
// Parse `#[serde(rename_all = "foo")]` // Parse `#[serde(rename_all = "foo")]`
Meta(NameValue(ref m)) if m.ident == "rename_all" => { Meta(NameValue(ref m)) if m.ident == "rename_all" => {
if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) { if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) {
match RenameRule::from_str(&s.value()) { match RenameRule::from_str(&s.value()) {
Ok(rename_rule) => rename_all.set(rename_rule), Ok(rename_rule) => rename_all.set(rename_rule),
Err(()) => cx.error(format!( Err(()) => cx.error(format!(
@@ -585,30 +597,30 @@ impl Variant {
} }
// Parse `#[serde(skip)]` // Parse `#[serde(skip)]`
Meta(Word(word)) if word == "skip" => { Meta(Word(ref word)) if word == "skip" => {
skip_serializing.set_true(); skip_serializing.set_true();
skip_deserializing.set_true(); skip_deserializing.set_true();
} }
// Parse `#[serde(skip_deserializing)]` // Parse `#[serde(skip_deserializing)]`
Meta(Word(word)) if word == "skip_deserializing" => { Meta(Word(ref word)) if word == "skip_deserializing" => {
skip_deserializing.set_true(); skip_deserializing.set_true();
} }
// Parse `#[serde(skip_serializing)]` // Parse `#[serde(skip_serializing)]`
Meta(Word(word)) if word == "skip_serializing" => { Meta(Word(ref word)) if word == "skip_serializing" => {
skip_serializing.set_true(); skip_serializing.set_true();
} }
// Parse `#[serde(other)]` // Parse `#[serde(other)]`
Meta(Word(word)) if word == "other" => { Meta(Word(ref word)) if word == "other" => {
other.set_true(); other.set_true();
} }
// Parse `#[serde(bound = "D: Serialize")]` // Parse `#[serde(bound = "D: Serialize")]`
Meta(NameValue(ref m)) if m.ident == "bound" => { Meta(NameValue(ref m)) if m.ident == "bound" => {
if let Ok(where_predicates) = if let Ok(where_predicates) =
parse_lit_into_where(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) parse_lit_into_where(cx, &m.ident, &m.ident, &m.lit)
{ {
ser_bound.set(where_predicates.clone()); ser_bound.set(where_predicates.clone());
de_bound.set(where_predicates); de_bound.set(where_predicates);
@@ -625,7 +637,7 @@ impl Variant {
// Parse `#[serde(with = "...")]` // Parse `#[serde(with = "...")]`
Meta(NameValue(ref m)) if m.ident == "with" => { Meta(NameValue(ref m)) if m.ident == "with" => {
if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
let mut ser_path = path.clone(); let mut ser_path = path.clone();
ser_path ser_path
.path .path
@@ -643,14 +655,14 @@ impl Variant {
// Parse `#[serde(serialize_with = "...")]` // Parse `#[serde(serialize_with = "...")]`
Meta(NameValue(ref m)) if m.ident == "serialize_with" => { Meta(NameValue(ref m)) if m.ident == "serialize_with" => {
if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
serialize_with.set(path); serialize_with.set(path);
} }
} }
// Parse `#[serde(deserialize_with = "...")]` // Parse `#[serde(deserialize_with = "...")]`
Meta(NameValue(ref m)) if m.ident == "deserialize_with" => { Meta(NameValue(ref m)) if m.ident == "deserialize_with" => {
if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
deserialize_with.set(path); deserialize_with.set(path);
} }
} }
@@ -764,6 +776,7 @@ pub struct Field {
borrowed_lifetimes: BTreeSet<syn::Lifetime>, borrowed_lifetimes: BTreeSet<syn::Lifetime>,
getter: Option<syn::ExprPath>, getter: Option<syn::ExprPath>,
flatten: bool, flatten: bool,
transparent: bool,
} }
/// Represents the default to use for a field when deserializing. /// Represents the default to use for a field when deserializing.
@@ -777,7 +790,6 @@ pub enum Default {
} }
impl Default { impl Default {
#[cfg(feature = "deserialize_in_place")]
pub fn is_none(&self) -> bool { pub fn is_none(&self) -> bool {
match *self { match *self {
Default::None => true, Default::None => true,
@@ -828,7 +840,7 @@ impl Field {
match meta_item { match meta_item {
// Parse `#[serde(rename = "foo")]` // Parse `#[serde(rename = "foo")]`
Meta(NameValue(ref m)) if m.ident == "rename" => { Meta(NameValue(ref m)) if m.ident == "rename" => {
if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) { if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) {
ser_name.set(s.value()); ser_name.set(s.value());
de_name.set(s.value()); de_name.set(s.value());
} }
@@ -843,57 +855,57 @@ impl Field {
} }
// Parse `#[serde(default)]` // Parse `#[serde(default)]`
Meta(Word(word)) if word == "default" => { Meta(Word(ref word)) if word == "default" => {
default.set(Default::Default); default.set(Default::Default);
} }
// Parse `#[serde(default = "...")]` // Parse `#[serde(default = "...")]`
Meta(NameValue(ref m)) if m.ident == "default" => { Meta(NameValue(ref m)) if m.ident == "default" => {
if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
default.set(Default::Path(path)); default.set(Default::Path(path));
} }
} }
// Parse `#[serde(skip_serializing)]` // Parse `#[serde(skip_serializing)]`
Meta(Word(word)) if word == "skip_serializing" => { Meta(Word(ref word)) if word == "skip_serializing" => {
skip_serializing.set_true(); skip_serializing.set_true();
} }
// Parse `#[serde(skip_deserializing)]` // Parse `#[serde(skip_deserializing)]`
Meta(Word(word)) if word == "skip_deserializing" => { Meta(Word(ref word)) if word == "skip_deserializing" => {
skip_deserializing.set_true(); skip_deserializing.set_true();
} }
// Parse `#[serde(skip)]` // Parse `#[serde(skip)]`
Meta(Word(word)) if word == "skip" => { Meta(Word(ref word)) if word == "skip" => {
skip_serializing.set_true(); skip_serializing.set_true();
skip_deserializing.set_true(); skip_deserializing.set_true();
} }
// Parse `#[serde(skip_serializing_if = "...")]` // Parse `#[serde(skip_serializing_if = "...")]`
Meta(NameValue(ref m)) if m.ident == "skip_serializing_if" => { Meta(NameValue(ref m)) if m.ident == "skip_serializing_if" => {
if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
skip_serializing_if.set(path); skip_serializing_if.set(path);
} }
} }
// Parse `#[serde(serialize_with = "...")]` // Parse `#[serde(serialize_with = "...")]`
Meta(NameValue(ref m)) if m.ident == "serialize_with" => { Meta(NameValue(ref m)) if m.ident == "serialize_with" => {
if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
serialize_with.set(path); serialize_with.set(path);
} }
} }
// Parse `#[serde(deserialize_with = "...")]` // Parse `#[serde(deserialize_with = "...")]`
Meta(NameValue(ref m)) if m.ident == "deserialize_with" => { Meta(NameValue(ref m)) if m.ident == "deserialize_with" => {
if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
deserialize_with.set(path); deserialize_with.set(path);
} }
} }
// Parse `#[serde(with = "...")]` // Parse `#[serde(with = "...")]`
Meta(NameValue(ref m)) if m.ident == "with" => { Meta(NameValue(ref m)) if m.ident == "with" => {
if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
let mut ser_path = path.clone(); let mut ser_path = path.clone();
ser_path ser_path
.path .path
@@ -912,7 +924,7 @@ impl Field {
// Parse `#[serde(bound = "D: Serialize")]` // Parse `#[serde(bound = "D: Serialize")]`
Meta(NameValue(ref m)) if m.ident == "bound" => { Meta(NameValue(ref m)) if m.ident == "bound" => {
if let Ok(where_predicates) = if let Ok(where_predicates) =
parse_lit_into_where(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) parse_lit_into_where(cx, &m.ident, &m.ident, &m.lit)
{ {
ser_bound.set(where_predicates.clone()); ser_bound.set(where_predicates.clone());
de_bound.set(where_predicates); de_bound.set(where_predicates);
@@ -928,7 +940,7 @@ impl Field {
} }
// Parse `#[serde(borrow)]` // Parse `#[serde(borrow)]`
Meta(Word(word)) if word == "borrow" => { Meta(Word(ref word)) if word == "borrow" => {
if let Ok(borrowable) = borrowable_lifetimes(cx, &ident, &field.ty) { if let Ok(borrowable) = borrowable_lifetimes(cx, &ident, &field.ty) {
borrowed_lifetimes.set(borrowable); borrowed_lifetimes.set(borrowable);
} }
@@ -937,7 +949,7 @@ impl Field {
// Parse `#[serde(borrow = "'a + 'b")]` // Parse `#[serde(borrow = "'a + 'b")]`
Meta(NameValue(ref m)) if m.ident == "borrow" => { Meta(NameValue(ref m)) if m.ident == "borrow" => {
if let Ok(lifetimes) = if let Ok(lifetimes) =
parse_lit_into_lifetimes(cx, m.ident.as_ref(), &m.lit) parse_lit_into_lifetimes(cx, &m.ident, &m.lit)
{ {
if let Ok(borrowable) = borrowable_lifetimes(cx, &ident, &field.ty) { if let Ok(borrowable) = borrowable_lifetimes(cx, &ident, &field.ty) {
for lifetime in &lifetimes { for lifetime in &lifetimes {
@@ -955,13 +967,13 @@ impl Field {
// Parse `#[serde(getter = "...")]` // Parse `#[serde(getter = "...")]`
Meta(NameValue(ref m)) if m.ident == "getter" => { Meta(NameValue(ref m)) if m.ident == "getter" => {
if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
getter.set(path); getter.set(path);
} }
} }
// Parse `#[serde(flatten)]` // Parse `#[serde(flatten)]`
Meta(Word(word)) if word == "flatten" => { Meta(Word(ref word)) if word == "flatten" => {
flatten.set_true(); flatten.set_true();
} }
@@ -1066,6 +1078,7 @@ impl Field {
borrowed_lifetimes: borrowed_lifetimes, borrowed_lifetimes: borrowed_lifetimes,
getter: getter.get(), getter: getter.get(),
flatten: flatten.get(), flatten: flatten.get(),
transparent: false,
} }
} }
@@ -1125,6 +1138,14 @@ impl Field {
pub fn flatten(&self) -> bool { pub fn flatten(&self) -> bool {
self.flatten self.flatten
} }
pub fn transparent(&self) -> bool {
self.transparent
}
pub fn mark_transparent(&mut self) {
self.transparent = true;
}
} }
type SerAndDe<T> = (Option<T>, Option<T>); type SerAndDe<T> = (Option<T>, Option<T>);
@@ -1137,21 +1158,22 @@ fn get_ser_and_de<'a, T, F>(
) -> Result<SerAndDe<T>, ()> ) -> Result<SerAndDe<T>, ()>
where where
T: 'a, T: 'a,
F: Fn(&Ctxt, &str, &str, &'a syn::Lit) -> Result<T, ()>, F: Fn(&Ctxt, &Ident, &Ident, &'a syn::Lit) -> Result<T, ()>,
{ {
let mut ser_meta = Attr::none(cx, attr_name); let mut ser_meta = Attr::none(cx, attr_name);
let mut de_meta = Attr::none(cx, attr_name); let mut de_meta = Attr::none(cx, attr_name);
let attr_name = Ident::new(attr_name, Span::call_site());
for meta in metas { for meta in metas {
match *meta { match *meta {
Meta(NameValue(ref meta)) if meta.ident == "serialize" => { Meta(NameValue(ref meta)) if meta.ident == "serialize" => {
if let Ok(v) = f(cx, attr_name, meta.ident.as_ref(), &meta.lit) { if let Ok(v) = f(cx, &attr_name, &meta.ident, &meta.lit) {
ser_meta.set(v); ser_meta.set(v);
} }
} }
Meta(NameValue(ref meta)) if meta.ident == "deserialize" => { Meta(NameValue(ref meta)) if meta.ident == "deserialize" => {
if let Ok(v) = f(cx, attr_name, meta.ident.as_ref(), &meta.lit) { if let Ok(v) = f(cx, &attr_name, &meta.ident, &meta.lit) {
de_meta.set(v); de_meta.set(v);
} }
} }
@@ -1200,8 +1222,8 @@ pub fn get_serde_meta_items(attr: &syn::Attribute) -> Option<Vec<syn::NestedMeta
fn get_lit_str<'a>( fn get_lit_str<'a>(
cx: &Ctxt, cx: &Ctxt,
attr_name: &str, attr_name: &Ident,
meta_item_name: &str, meta_item_name: &Ident,
lit: &'a syn::Lit, lit: &'a syn::Lit,
) -> Result<&'a syn::LitStr, ()> { ) -> Result<&'a syn::LitStr, ()> {
if let syn::Lit::Str(ref lit) = *lit { if let syn::Lit::Str(ref lit) = *lit {
@@ -1215,7 +1237,7 @@ fn get_lit_str<'a>(
} }
} }
fn parse_lit_into_path(cx: &Ctxt, attr_name: &str, lit: &syn::Lit) -> Result<syn::Path, ()> { fn parse_lit_into_path(cx: &Ctxt, attr_name: &Ident, lit: &syn::Lit) -> Result<syn::Path, ()> {
let string = try!(get_lit_str(cx, attr_name, attr_name, lit)); let string = try!(get_lit_str(cx, attr_name, attr_name, lit));
parse_lit_str(string) parse_lit_str(string)
.map_err(|_| cx.error(format!("failed to parse path: {:?}", string.value()))) .map_err(|_| cx.error(format!("failed to parse path: {:?}", string.value())))
@@ -1223,7 +1245,7 @@ fn parse_lit_into_path(cx: &Ctxt, attr_name: &str, lit: &syn::Lit) -> Result<syn
fn parse_lit_into_expr_path( fn parse_lit_into_expr_path(
cx: &Ctxt, cx: &Ctxt,
attr_name: &str, attr_name: &Ident,
lit: &syn::Lit, lit: &syn::Lit,
) -> Result<syn::ExprPath, ()> { ) -> Result<syn::ExprPath, ()> {
let string = try!(get_lit_str(cx, attr_name, attr_name, lit)); let string = try!(get_lit_str(cx, attr_name, attr_name, lit));
@@ -1233,8 +1255,8 @@ fn parse_lit_into_expr_path(
fn parse_lit_into_where( fn parse_lit_into_where(
cx: &Ctxt, cx: &Ctxt,
attr_name: &str, attr_name: &Ident,
meta_item_name: &str, meta_item_name: &Ident,
lit: &syn::Lit, lit: &syn::Lit,
) -> Result<Vec<syn::WherePredicate>, ()> { ) -> Result<Vec<syn::WherePredicate>, ()> {
let string = try!(get_lit_str(cx, attr_name, meta_item_name, lit)); let string = try!(get_lit_str(cx, attr_name, meta_item_name, lit));
@@ -1249,7 +1271,7 @@ fn parse_lit_into_where(
.map_err(|err| cx.error(err)) .map_err(|err| cx.error(err))
} }
fn parse_lit_into_ty(cx: &Ctxt, attr_name: &str, lit: &syn::Lit) -> Result<syn::Type, ()> { fn parse_lit_into_ty(cx: &Ctxt, attr_name: &Ident, lit: &syn::Lit) -> Result<syn::Type, ()> {
let string = try!(get_lit_str(cx, attr_name, attr_name, lit)); let string = try!(get_lit_str(cx, attr_name, attr_name, lit));
parse_lit_str(string).map_err(|_| { parse_lit_str(string).map_err(|_| {
@@ -1265,7 +1287,7 @@ fn parse_lit_into_ty(cx: &Ctxt, attr_name: &str, lit: &syn::Lit) -> Result<syn::
// lifetimes separated by `+`. // lifetimes separated by `+`.
fn parse_lit_into_lifetimes( fn parse_lit_into_lifetimes(
cx: &Ctxt, cx: &Ctxt,
attr_name: &str, attr_name: &Ident,
lit: &syn::Lit, lit: &syn::Lit,
) -> Result<BTreeSet<syn::Lifetime>, ()> { ) -> Result<BTreeSet<syn::Lifetime>, ()> {
let string = try!(get_lit_str(cx, attr_name, attr_name, lit)); let string = try!(get_lit_str(cx, attr_name, attr_name, lit));
@@ -1286,7 +1308,7 @@ fn parse_lit_into_lifetimes(
if let Ok(BorrowedLifetimes(lifetimes)) = parse_lit_str(string) { if let Ok(BorrowedLifetimes(lifetimes)) = parse_lit_str(string) {
let mut set = BTreeSet::new(); let mut set = BTreeSet::new();
for lifetime in lifetimes { for lifetime in lifetimes {
if !set.insert(lifetime) { if !set.insert(lifetime.clone()) {
cx.error(format!("duplicate borrowed lifetime `{}`", lifetime)); cx.error(format!("duplicate borrowed lifetime `{}`", lifetime));
} }
} }
@@ -1426,7 +1448,9 @@ fn is_primitive_type(ty: &syn::Type, primitive: &str) -> bool {
} }
fn is_primitive_path(path: &syn::Path, primitive: &str) -> bool { fn is_primitive_path(path: &syn::Path, primitive: &str) -> bool {
path.leading_colon.is_none() && path.segments.len() == 1 && path.segments[0].ident == primitive path.leading_colon.is_none()
&& path.segments.len() == 1
&& path.segments[0].ident == primitive
&& path.segments[0].arguments.is_empty() && path.segments[0].arguments.is_empty()
} }
+84 -15
View File
@@ -8,17 +8,19 @@
use internals::ast::{Container, Data, Field, Style}; use internals::ast::{Container, Data, Field, Style};
use internals::attr::{EnumTag, Identifier}; use internals::attr::{EnumTag, Identifier};
use internals::Ctxt; use internals::{Ctxt, Derive};
use syn::{Member, Type};
/// Cross-cutting checks that require looking at more than a single attrs /// Cross-cutting checks that require looking at more than a single attrs
/// object. Simpler checks should happen when parsing and building the attrs. /// object. Simpler checks should happen when parsing and building the attrs.
pub fn check(cx: &Ctxt, cont: &Container) { pub fn check(cx: &Ctxt, cont: &mut Container, derive: Derive) {
check_getter(cx, cont); check_getter(cx, cont);
check_flatten(cx, cont); check_flatten(cx, cont);
check_identifier(cx, cont); check_identifier(cx, cont);
check_variant_skip_attrs(cx, cont); check_variant_skip_attrs(cx, cont);
check_internal_tag_field_name_conflict(cx, cont); check_internal_tag_field_name_conflict(cx, cont);
check_adjacent_tag_conflict(cx, cont); check_adjacent_tag_conflict(cx, cont);
check_transparent(cx, cont, derive);
} }
/// Getters are only allowed inside structs (not enums) with the `remote` /// Getters are only allowed inside structs (not enums) with the `remote`
@@ -171,17 +173,14 @@ fn check_variant_skip_attrs(cx: &Ctxt, cont: &Container) {
)); ));
} }
for (i, field) in variant.fields.iter().enumerate() { for field in &variant.fields {
let ident = field let member = member_message(&field.member);
.ident
.as_ref()
.map_or_else(|| format!("{}", i), |ident| format!("`{}`", ident));
if field.attrs.skip_serializing() { if field.attrs.skip_serializing() {
cx.error(format!( cx.error(format!(
"variant `{}` cannot have both #[serde(serialize_with)] and \ "variant `{}` cannot have both #[serde(serialize_with)] and \
a field {} marked with #[serde(skip_serializing)]", a field {} marked with #[serde(skip_serializing)]",
variant.ident, ident variant.ident, member
)); ));
} }
@@ -189,7 +188,7 @@ fn check_variant_skip_attrs(cx: &Ctxt, cont: &Container) {
cx.error(format!( cx.error(format!(
"variant `{}` cannot have both #[serde(serialize_with)] and \ "variant `{}` cannot have both #[serde(serialize_with)] and \
a field {} marked with #[serde(skip_serializing_if)]", a field {} marked with #[serde(skip_serializing_if)]",
variant.ident, ident variant.ident, member
)); ));
} }
} }
@@ -204,17 +203,14 @@ fn check_variant_skip_attrs(cx: &Ctxt, cont: &Container) {
)); ));
} }
for (i, field) in variant.fields.iter().enumerate() { for field in &variant.fields {
if field.attrs.skip_deserializing() { if field.attrs.skip_deserializing() {
let ident = field let member = member_message(&field.member);
.ident
.as_ref()
.map_or_else(|| format!("{}", i), |ident| format!("`{}`", ident));
cx.error(format!( cx.error(format!(
"variant `{}` cannot have both #[serde(deserialize_with)] \ "variant `{}` cannot have both #[serde(deserialize_with)] \
and a field {} marked with #[serde(skip_deserializing)]", and a field {} marked with #[serde(skip_deserializing)]",
variant.ident, ident variant.ident, member
)); ));
} }
} }
@@ -282,3 +278,76 @@ fn check_adjacent_tag_conflict(cx: &Ctxt, cont: &Container) {
cx.error(message); cx.error(message);
} }
} }
/// Enums and unit structs cannot be transparent.
fn check_transparent(cx: &Ctxt, cont: &mut Container, derive: Derive) {
if !cont.attrs.transparent() {
return;
}
if cont.attrs.type_from().is_some() {
cx.error("#[serde(transparent)] is not allowed with #[serde(from = \"...\")]");
}
if cont.attrs.type_into().is_some() {
cx.error("#[serde(transparent)] is not allowed with #[serde(into = \"...\")]");
}
let fields = match cont.data {
Data::Enum(_) => {
cx.error("#[serde(transparent)] is not allowed on an enum");
return;
}
Data::Struct(Style::Unit, _) => {
cx.error("#[serde(transparent)] is not allowed on a unit struct");
return;
}
Data::Struct(_, ref mut fields) => fields,
};
let mut transparent_field = None;
for field in fields {
if allow_transparent(field, derive) {
if transparent_field.is_some() {
cx.error("#[serde(transparent)] requires struct to have at most one transparent field");
return;
}
transparent_field = Some(field);
}
}
match transparent_field {
Some(transparent_field) => transparent_field.attrs.mark_transparent(),
None => match derive {
Derive::Serialize => {
cx.error("#[serde(transparent)] requires at least one field that is not skipped");
}
Derive::Deserialize => {
cx.error("#[serde(transparent)] requires at least one field that is neither skipped nor has a default");
}
}
}
}
fn member_message(member: &Member) -> String {
match *member {
Member::Named(ref ident) => format!("`{}`", ident),
Member::Unnamed(ref i) => i.index.to_string(),
}
}
fn allow_transparent(field: &Field, derive: Derive) -> bool {
if let Type::Path(ref ty) = *field.ty {
if let Some(seg) = ty.path.segments.last() {
if seg.into_value().ident == "PhantomData" {
return false;
}
}
}
match derive {
Derive::Serialize => !field.attrs.skip_serializing(),
Derive::Deserialize => !field.attrs.skip_deserializing() && field.attrs.default().is_none(),
}
}
+6
View File
@@ -14,3 +14,9 @@ pub use self::ctxt::Ctxt;
mod case; mod case;
mod check; mod check;
#[derive(Copy, Clone)]
pub enum Derive {
Serialize,
Deserialize,
}
+1 -1
View File
@@ -22,7 +22,7 @@
//! //!
//! [https://serde.rs/derive.html]: https://serde.rs/derive.html //! [https://serde.rs/derive.html]: https://serde.rs/derive.html
#![doc(html_root_url = "https://docs.rs/serde_derive/1.0.54")] #![doc(html_root_url = "https://docs.rs/serde_derive/1.0.59")]
#![cfg_attr(feature = "cargo-clippy", deny(clippy, clippy_pedantic))] #![cfg_attr(feature = "cargo-clippy", deny(clippy, clippy_pedantic))]
// Whitelisted clippy lints // Whitelisted clippy lints
#![cfg_attr( #![cfg_attr(
+13 -14
View File
@@ -1,5 +1,4 @@
use proc_macro2::Span; use proc_macro2::{Span, TokenStream};
use quote::Tokens;
use syn::Ident; use syn::Ident;
use internals::ast::{Container, Data, Field, Style}; use internals::ast::{Container, Data, Field, Style};
@@ -21,7 +20,7 @@ use internals::ast::{Container, Data, Field, Style};
// 8 | enum EnumDef { V } // 8 | enum EnumDef { V }
// | ^ // | ^
// //
pub fn pretend_used(cont: &Container) -> Tokens { pub fn pretend_used(cont: &Container) -> TokenStream {
let pretend_fields = pretend_fields_used(cont); let pretend_fields = pretend_fields_used(cont);
let pretend_variants = pretend_variants_used(cont); let pretend_variants = pretend_variants_used(cont);
@@ -49,8 +48,8 @@ pub fn pretend_used(cont: &Container) -> Tokens {
// The `ref` is important in case the user has written a Drop impl on their // The `ref` is important in case the user has written a Drop impl on their
// type. Rust does not allow destructuring a struct or enum that has a Drop // type. Rust does not allow destructuring a struct or enum that has a Drop
// impl. // impl.
fn pretend_fields_used(cont: &Container) -> Tokens { fn pretend_fields_used(cont: &Container) -> TokenStream {
let type_ident = cont.ident; let type_ident = &cont.ident;
let (_, ty_generics, _) = cont.generics.split_for_impl(); let (_, ty_generics, _) = cont.generics.split_for_impl();
let patterns = match cont.data { let patterns = match cont.data {
@@ -58,7 +57,7 @@ fn pretend_fields_used(cont: &Container) -> Tokens {
.iter() .iter()
.filter_map(|variant| match variant.style { .filter_map(|variant| match variant.style {
Style::Struct => { Style::Struct => {
let variant_ident = variant.ident; let variant_ident = &variant.ident;
let pat = struct_pattern(&variant.fields); let pat = struct_pattern(&variant.fields);
Some(quote!(#type_ident::#variant_ident #pat)) Some(quote!(#type_ident::#variant_ident #pat))
} }
@@ -93,7 +92,7 @@ fn pretend_fields_used(cont: &Container) -> Tokens {
// _ => {} // _ => {}
// } // }
// //
fn pretend_variants_used(cont: &Container) -> Tokens { fn pretend_variants_used(cont: &Container) -> TokenStream {
let variants = match cont.data { let variants = match cont.data {
Data::Enum(ref variants) => variants, Data::Enum(ref variants) => variants,
Data::Struct(_, _) => { Data::Struct(_, _) => {
@@ -101,20 +100,20 @@ fn pretend_variants_used(cont: &Container) -> Tokens {
} }
}; };
let type_ident = cont.ident; let type_ident = &cont.ident;
let (_, ty_generics, _) = cont.generics.split_for_impl(); let (_, ty_generics, _) = cont.generics.split_for_impl();
let turbofish = ty_generics.as_turbofish(); let turbofish = ty_generics.as_turbofish();
let cases = variants.iter().map(|variant| { let cases = variants.iter().map(|variant| {
let variant_ident = variant.ident; let variant_ident = &variant.ident;
let placeholders = &(0..variant.fields.len()) let placeholders = &(0..variant.fields.len())
.map(|i| Ident::new(&format!("__v{}", i), Span::call_site())) .map(|i| Ident::new(&format!("__v{}", i), Span::call_site()))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let pat = match variant.style { let pat = match variant.style {
Style::Struct => { Style::Struct => {
let names = variant.fields.iter().map(|field| field.ident); let members = variant.fields.iter().map(|field| &field.member);
quote!({ #(#names: #placeholders),* }) quote!({ #(#members: #placeholders),* })
} }
Style::Tuple | Style::Newtype => quote!(( #(#placeholders),* )), Style::Tuple | Style::Newtype => quote!(( #(#placeholders),* )),
Style::Unit => quote!(), Style::Unit => quote!(),
@@ -133,9 +132,9 @@ fn pretend_variants_used(cont: &Container) -> Tokens {
quote!(#(#cases)*) quote!(#(#cases)*)
} }
fn struct_pattern(fields: &[Field]) -> Tokens { fn struct_pattern(fields: &[Field]) -> TokenStream {
let names = fields.iter().map(|field| field.ident); let members = fields.iter().map(|field| &field.member);
let placeholders = let placeholders =
(0..fields.len()).map(|i| Ident::new(&format!("__v{}", i), Span::call_site())); (0..fields.len()).map(|i| Ident::new(&format!("__v{}", i), Span::call_site()));
quote!({ #(#names: ref #placeholders),* }) quote!({ #(#members: ref #placeholders),* })
} }
+73 -64
View File
@@ -6,25 +6,24 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use proc_macro2::Span; use proc_macro2::{Span, TokenStream};
use quote::Tokens;
use syn::spanned::Spanned; use syn::spanned::Spanned;
use syn::{self, Ident, Index, Member}; use syn::{self, Ident, Index, Member};
use bound; use bound;
use fragment::{Fragment, Match, Stmts}; use fragment::{Fragment, Match, Stmts};
use internals::ast::{Container, Data, Field, Style, Variant}; use internals::ast::{Container, Data, Field, Style, Variant};
use internals::{attr, Ctxt}; use internals::{attr, Ctxt, Derive};
use pretend; use pretend;
use try; use try;
pub fn expand_derive_serialize(input: &syn::DeriveInput) -> Result<Tokens, String> { pub fn expand_derive_serialize(input: &syn::DeriveInput) -> Result<TokenStream, String> {
let ctxt = Ctxt::new(); let ctxt = Ctxt::new();
let cont = Container::from_ast(&ctxt, input); let cont = Container::from_ast(&ctxt, input, Derive::Serialize);
precondition(&ctxt, &cont); precondition(&ctxt, &cont);
try!(ctxt.check()); try!(ctxt.check());
let ident = cont.ident; let ident = &cont.ident;
let params = Parameters::new(&cont); let params = Parameters::new(&cont);
let (impl_generics, ty_generics, where_clause) = params.generics.split_for_impl(); let (impl_generics, ty_generics, where_clause) = params.generics.split_for_impl();
let dummy_const = Ident::new(&format!("_IMPL_SERIALIZE_FOR_{}", ident), Span::call_site()); let dummy_const = Ident::new(&format!("_IMPL_SERIALIZE_FOR_{}", ident), Span::call_site());
@@ -110,7 +109,7 @@ impl Parameters {
let this = match cont.attrs.remote() { let this = match cont.attrs.remote() {
Some(remote) => remote.clone(), Some(remote) => remote.clone(),
None => cont.ident.into(), None => cont.ident.clone().into(),
}; };
let generics = build_generics(cont); let generics = build_generics(cont);
@@ -125,8 +124,8 @@ impl Parameters {
/// Type name to use in error messages and `&'static str` arguments to /// Type name to use in error messages and `&'static str` arguments to
/// various Serializer methods. /// various Serializer methods.
fn type_name(&self) -> &str { fn type_name(&self) -> String {
self.this.segments.last().unwrap().value().ident.as_ref() self.this.segments.last().unwrap().value().ident.to_string()
} }
} }
@@ -160,27 +159,24 @@ fn build_generics(cont: &Container) -> syn::Generics {
fn needs_serialize_bound(field: &attr::Field, variant: Option<&attr::Variant>) -> bool { fn needs_serialize_bound(field: &attr::Field, variant: Option<&attr::Variant>) -> bool {
!field.skip_serializing() && field.serialize_with().is_none() && field.ser_bound().is_none() !field.skip_serializing() && field.serialize_with().is_none() && field.ser_bound().is_none()
&& variant.map_or(true, |variant| { && variant.map_or(true, |variant| {
!variant.skip_serializing() && variant.serialize_with().is_none() !variant.skip_serializing()
&& variant.serialize_with().is_none()
&& variant.ser_bound().is_none() && variant.ser_bound().is_none()
}) })
} }
fn serialize_body(cont: &Container, params: &Parameters) -> Fragment { fn serialize_body(cont: &Container, params: &Parameters) -> Fragment {
if let Some(type_into) = cont.attrs.type_into() { if cont.attrs.transparent() {
serialize_transparent(cont, params)
} else if let Some(type_into) = cont.attrs.type_into() {
serialize_into(params, type_into) serialize_into(params, type_into)
} else { } else {
match cont.data { match cont.data {
Data::Enum(ref variants) => serialize_enum(params, variants, &cont.attrs), Data::Enum(ref variants) => serialize_enum(params, variants, &cont.attrs),
Data::Struct(Style::Struct, ref fields) => { Data::Struct(Style::Struct, ref fields) => {
if fields.iter().any(|field| field.ident.is_none()) {
panic!("struct has unnamed fields");
}
serialize_struct(params, fields, &cont.attrs) serialize_struct(params, fields, &cont.attrs)
} }
Data::Struct(Style::Tuple, ref fields) => { Data::Struct(Style::Tuple, ref fields) => {
if fields.iter().any(|field| field.ident.is_some()) {
panic!("tuple struct has named fields");
}
serialize_tuple_struct(params, fields, &cont.attrs) serialize_tuple_struct(params, fields, &cont.attrs)
} }
Data::Struct(Style::Newtype, ref fields) => { Data::Struct(Style::Newtype, ref fields) => {
@@ -191,8 +187,28 @@ fn serialize_body(cont: &Container, params: &Parameters) -> Fragment {
} }
} }
fn serialize_transparent(cont: &Container, params: &Parameters) -> Fragment {
let fields = match cont.data {
Data::Struct(_, ref fields) => fields,
Data::Enum(_) => unreachable!(),
};
let self_var = &params.self_var;
let transparent_field = fields.iter().find(|f| f.attrs.transparent()).unwrap();
let member = &transparent_field.member;
let path = match transparent_field.attrs.serialize_with() {
Some(path) => quote!(#path),
None => quote!(_serde::Serialize::serialize),
};
quote_block! {
#path(&#self_var.#member, __serializer)
}
}
fn serialize_into(params: &Parameters, type_into: &syn::Type) -> Fragment { fn serialize_into(params: &Parameters, type_into: &syn::Type) -> Fragment {
let self_var = params.self_var; let self_var = &params.self_var;
quote_block! { quote_block! {
_serde::Serialize::serialize( _serde::Serialize::serialize(
&_serde::export::Into::<#type_into>::into(_serde::export::Clone::clone(#self_var)), &_serde::export::Into::<#type_into>::into(_serde::export::Clone::clone(#self_var)),
@@ -304,8 +320,7 @@ fn serialize_struct_as_struct(
.map(|field| match field.attrs.skip_serializing_if() { .map(|field| match field.attrs.skip_serializing_if() {
None => quote!(1), None => quote!(1),
Some(path) => { Some(path) => {
let ident = field.ident.expect("struct has unnamed fields"); let field_expr = get_member(params, field, &field.member);
let field_expr = get_member(params, field, &Member::Named(ident));
quote!(if #path(#field_expr) { 0 } else { 1 }) quote!(if #path(#field_expr) { 0 } else { 1 })
} }
}) })
@@ -340,8 +355,7 @@ fn serialize_struct_as_map(
.map(|field| match field.attrs.skip_serializing_if() { .map(|field| match field.attrs.skip_serializing_if() {
None => quote!(1), None => quote!(1),
Some(path) => { Some(path) => {
let ident = field.ident.expect("struct has unnamed fields"); let field_expr = get_member(params, field, &field.member);
let field_expr = get_member(params, field, &Member::Named(ident));
quote!(if #path(#field_expr) { 0 } else { 1 }) quote!(if #path(#field_expr) { 0 } else { 1 })
} }
}) })
@@ -359,7 +373,7 @@ fn serialize_struct_as_map(
fn serialize_enum(params: &Parameters, variants: &[Variant], cattrs: &attr::Container) -> Fragment { fn serialize_enum(params: &Parameters, variants: &[Variant], cattrs: &attr::Container) -> Fragment {
assert!(variants.len() as u64 <= u64::from(u32::max_value())); assert!(variants.len() as u64 <= u64::from(u32::max_value()));
let self_var = params.self_var; let self_var = &params.self_var;
let arms: Vec<_> = variants let arms: Vec<_> = variants
.iter() .iter()
@@ -381,9 +395,9 @@ fn serialize_variant(
variant: &Variant, variant: &Variant,
variant_index: u32, variant_index: u32,
cattrs: &attr::Container, cattrs: &attr::Container,
) -> Tokens { ) -> TokenStream {
let this = &params.this; let this = &params.this;
let variant_ident = variant.ident; let variant_ident = &variant.ident;
if variant.attrs.skip_serializing() { if variant.attrs.skip_serializing() {
let skipped_msg = format!( let skipped_msg = format!(
@@ -423,12 +437,9 @@ fn serialize_variant(
} }
} }
Style::Struct => { Style::Struct => {
let fields = variant let members = variant.fields.iter().map(|f| &f.member);
.fields
.iter()
.map(|f| f.ident.expect("struct variant has unnamed fields"));
quote! { quote! {
#this::#variant_ident { #(ref #fields),* } #this::#variant_ident { #(ref #members),* }
} }
} }
}; };
@@ -534,7 +545,7 @@ fn serialize_internally_tagged_variant(
let variant_name = variant.attrs.name().serialize_name(); let variant_name = variant.attrs.name().serialize_name();
let enum_ident_str = params.type_name(); let enum_ident_str = params.type_name();
let variant_ident_str = variant.ident.as_ref(); let variant_ident_str = variant.ident.to_string();
if let Some(path) = variant.attrs.serialize_with() { if let Some(path) = variant.attrs.serialize_with() {
let ser = wrap_serialize_variant_with(params, path, variant); let ser = wrap_serialize_variant_with(params, path, variant);
@@ -656,15 +667,11 @@ fn serialize_adjacently_tagged_variant(
unreachable!() unreachable!()
} }
} }
Style::Newtype => vec![Ident::new("__field0", Span::call_site())], Style::Newtype => vec![Member::Named(Ident::new("__field0", Span::call_site()))],
Style::Tuple => (0..variant.fields.len()) Style::Tuple => (0..variant.fields.len())
.map(|i| Ident::new(&format!("__field{}", i), Span::call_site())) .map(|i| Member::Named(Ident::new(&format!("__field{}", i), Span::call_site())))
.collect(),
Style::Struct => variant
.fields
.iter()
.map(|f| f.ident.expect("struct variant has unnamed fields"))
.collect(), .collect(),
Style::Struct => variant.fields.iter().map(|f| f.member.clone()).collect(),
}; };
let (_, ty_generics, where_clause) = params.generics.split_for_impl(); let (_, ty_generics, where_clause) = params.generics.split_for_impl();
@@ -850,10 +857,10 @@ fn serialize_struct_variant<'a>(
let len = serialized_fields let len = serialized_fields
.map(|field| { .map(|field| {
let ident = field.ident.expect("struct has unnamed fields"); let member = &field.member;
match field.attrs.skip_serializing_if() { match field.attrs.skip_serializing_if() {
Some(path) => quote!(if #path(#ident) { 0 } else { 1 }), Some(path) => quote!(if #path(#member) { 0 } else { 1 }),
None => quote!(1), None => quote!(1),
} }
}) })
@@ -929,7 +936,7 @@ fn serialize_struct_variant_with_flatten<'a>(
} => { } => {
let this = &params.this; let this = &params.this;
let fields_ty = fields.iter().map(|f| &f.ty); let fields_ty = fields.iter().map(|f| &f.ty);
let fields_ident = &fields.iter().map(|f| f.ident).collect::<Vec<_>>(); let members = &fields.iter().map(|f| &f.member).collect::<Vec<_>>();
let (_, ty_generics, where_clause) = params.generics.split_for_impl(); let (_, ty_generics, where_clause) = params.generics.split_for_impl();
let wrapper_generics = bound::with_lifetime_bound(&params.generics, "'__a"); let wrapper_generics = bound::with_lifetime_bound(&params.generics, "'__a");
@@ -946,7 +953,7 @@ fn serialize_struct_variant_with_flatten<'a>(
where where
__S: _serde::Serializer, __S: _serde::Serializer,
{ {
let (#(#fields_ident,)*) = self.data; let (#(#members,)*) = self.data;
let #let_mut __serde_state = try!(_serde::Serializer::serialize_map( let #let_mut __serde_state = try!(_serde::Serializer::serialize_map(
__serializer, __serializer,
_serde::export::None)); _serde::export::None));
@@ -961,7 +968,7 @@ fn serialize_struct_variant_with_flatten<'a>(
#variant_index, #variant_index,
#variant_name, #variant_name,
&__EnumFlatten { &__EnumFlatten {
data: (#(#fields_ident,)*), data: (#(#members,)*),
phantom: _serde::export::PhantomData::<#this #ty_generics>, phantom: _serde::export::PhantomData::<#this #ty_generics>,
}) })
} }
@@ -997,7 +1004,7 @@ fn serialize_tuple_struct_visitor(
params: &Parameters, params: &Parameters,
is_enum: bool, is_enum: bool,
tuple_trait: &TupleTrait, tuple_trait: &TupleTrait,
) -> Vec<Tokens> { ) -> Vec<TokenStream> {
fields fields
.iter() .iter()
.enumerate() .enumerate()
@@ -1045,17 +1052,17 @@ fn serialize_struct_visitor(
params: &Parameters, params: &Parameters,
is_enum: bool, is_enum: bool,
struct_trait: &StructTrait, struct_trait: &StructTrait,
) -> Vec<Tokens> { ) -> Vec<TokenStream> {
fields fields
.iter() .iter()
.filter(|&field| !field.attrs.skip_serializing()) .filter(|&field| !field.attrs.skip_serializing())
.map(|field| { .map(|field| {
let field_ident = field.ident.expect("struct has unnamed field"); let member = &field.member;
let mut field_expr = if is_enum { let mut field_expr = if is_enum {
quote!(#field_ident) quote!(#member)
} else { } else {
get_member(params, field, &Member::Named(field_ident)) get_member(params, field, &member)
}; };
let key_expr = field.attrs.name().serialize_name(); let key_expr = field.attrs.name().serialize_name();
@@ -1109,8 +1116,8 @@ fn wrap_serialize_field_with(
params: &Parameters, params: &Parameters,
field_ty: &syn::Type, field_ty: &syn::Type,
serialize_with: &syn::ExprPath, serialize_with: &syn::ExprPath,
field_expr: &Tokens, field_expr: &TokenStream,
) -> Tokens { ) -> TokenStream {
wrap_serialize_with(params, serialize_with, &[field_ty], &[quote!(#field_expr)]) wrap_serialize_with(params, serialize_with, &[field_ty], &[quote!(#field_expr)])
} }
@@ -1118,16 +1125,18 @@ fn wrap_serialize_variant_with(
params: &Parameters, params: &Parameters,
serialize_with: &syn::ExprPath, serialize_with: &syn::ExprPath,
variant: &Variant, variant: &Variant,
) -> Tokens { ) -> TokenStream {
let field_tys: Vec<_> = variant.fields.iter().map(|field| field.ty).collect(); let field_tys: Vec<_> = variant.fields.iter().map(|field| field.ty).collect();
let field_exprs: Vec<_> = variant let field_exprs: Vec<_> = variant
.fields .fields
.iter() .iter()
.enumerate() .map(|field| {
.map(|(i, field)| { let id = match field.member {
let id = field Member::Named(ref ident) => ident.clone(),
.ident Member::Unnamed(ref member) => {
.unwrap_or_else(|| Ident::new(&format!("__field{}", i), Span::call_site())); Ident::new(&format!("__field{}", member.index), Span::call_site())
}
};
quote!(#id) quote!(#id)
}) })
.collect(); .collect();
@@ -1143,8 +1152,8 @@ fn wrap_serialize_with(
params: &Parameters, params: &Parameters,
serialize_with: &syn::ExprPath, serialize_with: &syn::ExprPath,
field_tys: &[&syn::Type], field_tys: &[&syn::Type],
field_exprs: &[Tokens], field_exprs: &[TokenStream],
) -> Tokens { ) -> TokenStream {
let this = &params.this; let this = &params.this;
let (_, ty_generics, where_clause) = params.generics.split_for_impl(); let (_, ty_generics, where_clause) = params.generics.split_for_impl();
@@ -1190,7 +1199,7 @@ fn wrap_serialize_with(
// _serde::ser::SerializeStruct::end(__serde_state) // _serde::ser::SerializeStruct::end(__serde_state)
// //
// where we want to omit the `mut` to avoid a warning. // where we want to omit the `mut` to avoid a warning.
fn mut_if(is_mut: bool) -> Option<Tokens> { fn mut_if(is_mut: bool) -> Option<TokenStream> {
if is_mut { if is_mut {
Some(quote!(mut)) Some(quote!(mut))
} else { } else {
@@ -1198,8 +1207,8 @@ fn mut_if(is_mut: bool) -> Option<Tokens> {
} }
} }
fn get_member(params: &Parameters, field: &Field, member: &Member) -> Tokens { fn get_member(params: &Parameters, field: &Field, member: &Member) -> TokenStream {
let self_var = params.self_var; let self_var = &params.self_var;
match (params.is_remote, field.attrs.getter()) { match (params.is_remote, field.attrs.getter()) {
(false, None) => quote!(&#self_var.#member), (false, None) => quote!(&#self_var.#member),
(true, None) => { (true, None) => {
@@ -1224,7 +1233,7 @@ enum StructTrait {
} }
impl StructTrait { impl StructTrait {
fn serialize_field(&self, span: Span) -> Tokens { fn serialize_field(&self, span: Span) -> TokenStream {
match *self { match *self {
StructTrait::SerializeMap => { StructTrait::SerializeMap => {
quote_spanned!(span=> _serde::ser::SerializeMap::serialize_entry) quote_spanned!(span=> _serde::ser::SerializeMap::serialize_entry)
@@ -1238,7 +1247,7 @@ impl StructTrait {
} }
} }
fn skip_field(&self, span: Span) -> Option<Tokens> { fn skip_field(&self, span: Span) -> Option<TokenStream> {
match *self { match *self {
StructTrait::SerializeMap => None, StructTrait::SerializeMap => None,
StructTrait::SerializeStruct => { StructTrait::SerializeStruct => {
@@ -1258,7 +1267,7 @@ enum TupleTrait {
} }
impl TupleTrait { impl TupleTrait {
fn serialize_element(&self, span: Span) -> Tokens { fn serialize_element(&self, span: Span) -> TokenStream {
match *self { match *self {
TupleTrait::SerializeTuple => { TupleTrait::SerializeTuple => {
quote_spanned!(span=> _serde::ser::SerializeTuple::serialize_element) quote_spanned!(span=> _serde::ser::SerializeTuple::serialize_element)
+3 -4
View File
@@ -1,13 +1,12 @@
use proc_macro2::{Op, Spacing}; use proc_macro2::{Punct, Spacing, TokenStream};
use quote::Tokens;
// None of our generated code requires the `From::from` error conversion // None of our generated code requires the `From::from` error conversion
// performed by the standard library's `try!` macro. With this simplified macro // performed by the standard library's `try!` macro. With this simplified macro
// we see a significant improvement in type checking and borrow checking time of // we see a significant improvement in type checking and borrow checking time of
// the generated code and a slight improvement in binary size. // the generated code and a slight improvement in binary size.
pub fn replacement() -> Tokens { pub fn replacement() -> TokenStream {
// Cannot pass `$expr` to `quote!` prior to Rust 1.17.0 so interpolate it. // Cannot pass `$expr` to `quote!` prior to Rust 1.17.0 so interpolate it.
let dollar = Op::new('$', Spacing::Alone); let dollar = Punct::new('$', Spacing::Alone);
quote! { quote! {
#[allow(unused_macros)] #[allow(unused_macros)]
+2 -2
View File
@@ -15,8 +15,8 @@ include = ["Cargo.toml", "lib.rs", "src/**/*.rs", "README.md", "LICENSE-APACHE",
path = "lib.rs" path = "lib.rs"
[dependencies] [dependencies]
proc-macro2 = "0.3" proc-macro2 = "0.4"
syn = { version = "0.13", default-features = false, features = ["derive", "parsing", "clone-impls"] } syn = { version = "0.14", default-features = false, features = ["derive", "parsing", "clone-impls"] }
[badges] [badges]
travis-ci = { repository = "serde-rs/serde" } travis-ci = { repository = "serde-rs/serde" }
+1 -1
View File
@@ -1,6 +1,6 @@
[package] [package]
name = "serde_test" name = "serde_test"
version = "1.0.54" # remember to update html_root_url version = "1.0.59" # remember to update html_root_url
authors = ["Erick Tryzelaar <erick.tryzelaar@gmail.com>", "David Tolnay <dtolnay@gmail.com>"] authors = ["Erick Tryzelaar <erick.tryzelaar@gmail.com>", "David Tolnay <dtolnay@gmail.com>"]
license = "MIT/Apache-2.0" license = "MIT/Apache-2.0"
description = "Token De/Serializer for testing De/Serialize implementations" description = "Token De/Serializer for testing De/Serialize implementations"
+2 -1
View File
@@ -599,7 +599,8 @@ impl<'de, 'a> MapAccess<'de> for EnumMapVisitor<'a, 'de> {
{ {
match self.variant.take() { match self.variant.take() {
Some(Token::Str(variant)) => seed.deserialize(variant.into_deserializer()).map(Some), Some(Token::Str(variant)) => seed.deserialize(variant.into_deserializer()).map(Some),
Some(Token::Bytes(variant)) => seed.deserialize(BytesDeserializer { value: variant }) Some(Token::Bytes(variant)) => seed
.deserialize(BytesDeserializer { value: variant })
.map(Some), .map(Some),
Some(Token::U32(variant)) => seed.deserialize(variant.into_deserializer()).map(Some), Some(Token::U32(variant)) => seed.deserialize(variant.into_deserializer()).map(Some),
Some(other) => unexpected!(other), Some(other) => unexpected!(other),
+1 -1
View File
@@ -161,7 +161,7 @@
//! # } //! # }
//! ``` //! ```
#![doc(html_root_url = "https://docs.rs/serde_test/1.0.54")] #![doc(html_root_url = "https://docs.rs/serde_test/1.0.59")]
#![cfg_attr(feature = "cargo-clippy", deny(clippy, clippy_pedantic))] #![cfg_attr(feature = "cargo-clippy", deny(clippy, clippy_pedantic))]
// Whitelisted clippy lints // Whitelisted clippy lints
#![cfg_attr(feature = "cargo-clippy", allow(float_cmp))] #![cfg_attr(feature = "cargo-clippy", allow(float_cmp))]
@@ -0,0 +1,20 @@
// Copyright 2018 Serde Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[macro_use]
extern crate serde_derive;
#[derive(Serialize)] //~ ERROR: proc-macro derive panicked
#[serde(transparent)]
struct S {
//~^^^ HELP: #[serde(transparent)] requires struct to have at most one transparent field
a: u8,
b: u8,
}
fn main() {}
@@ -0,0 +1,22 @@
// Copyright 2018 Serde Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[macro_use]
extern crate serde_derive;
#[derive(Deserialize)] //~ ERROR: proc-macro derive panicked
#[serde(transparent)]
struct S {
//~^^^ HELP: #[serde(transparent)] requires at least one field that is neither skipped nor has a default
#[serde(skip)]
a: u8,
#[serde(default)]
b: u8,
}
fn main() {}
@@ -0,0 +1,20 @@
// Copyright 2018 Serde Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[macro_use]
extern crate serde_derive;
#[derive(Serialize)] //~ ERROR: proc-macro derive panicked
#[serde(transparent)]
struct S {
//~^^^ HELP: #[serde(transparent)] requires at least one field that is not skipped
#[serde(skip)]
a: u8,
}
fn main() {}
+46 -22
View File
@@ -15,6 +15,7 @@ extern crate serde;
use self::serde::de::{self, Unexpected}; use self::serde::de::{self, Unexpected};
use self::serde::{Deserialize, Deserializer, Serialize, Serializer}; use self::serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::collections::HashMap; use std::collections::HashMap;
use std::marker::PhantomData;
extern crate serde_test; extern crate serde_test;
use self::serde_test::{ use self::serde_test::{
@@ -2074,15 +2075,11 @@ fn test_flatten_untagged_enum() {
#[derive(Serialize, Deserialize, PartialEq, Debug)] #[derive(Serialize, Deserialize, PartialEq, Debug)]
#[serde(untagged)] #[serde(untagged)]
enum Inner { enum Inner {
Variant { Variant { a: i32 },
a: i32,
},
} }
let data = Outer { let data = Outer {
inner: Inner::Variant { inner: Inner::Variant { a: 0 },
a: 0,
}
}; };
assert_tokens( assert_tokens(
@@ -2118,12 +2115,8 @@ fn test_flatten_option() {
assert_tokens( assert_tokens(
&Outer { &Outer {
inner1: Some(Inner1 { inner1: Some(Inner1 { inner1: 1 }),
inner1: 1, inner2: Some(Inner2 { inner2: 2 }),
}),
inner2: Some(Inner2 {
inner2: 2,
}),
}, },
&[ &[
Token::Map { len: None }, Token::Map { len: None },
@@ -2137,9 +2130,7 @@ fn test_flatten_option() {
assert_tokens( assert_tokens(
&Outer { &Outer {
inner1: Some(Inner1 { inner1: Some(Inner1 { inner1: 1 }),
inner1: 1,
}),
inner2: None, inner2: None,
}, },
&[ &[
@@ -2153,9 +2144,7 @@ fn test_flatten_option() {
assert_tokens( assert_tokens(
&Outer { &Outer {
inner1: None, inner1: None,
inner2: Some(Inner2 { inner2: Some(Inner2 { inner2: 2 }),
inner2: 2,
}),
}, },
&[ &[
Token::Map { len: None }, Token::Map { len: None },
@@ -2170,9 +2159,44 @@ fn test_flatten_option() {
inner1: None, inner1: None,
inner2: None, inner2: None,
}, },
&[ &[Token::Map { len: None }, Token::MapEnd],
Token::Map { len: None },
Token::MapEnd,
],
); );
} }
#[test]
fn test_transparent_struct() {
#[derive(Serialize, Deserialize, PartialEq, Debug)]
#[serde(transparent)]
struct Transparent {
#[serde(skip)]
a: bool,
b: u32,
#[serde(skip)]
c: bool,
d: PhantomData<()>,
}
assert_tokens(
&Transparent {
a: false,
b: 1,
c: false,
d: PhantomData,
},
&[Token::U32(1)],
);
}
#[test]
fn test_transparent_tuple_struct() {
#[derive(Serialize, Deserialize, PartialEq, Debug)]
#[serde(transparent)]
struct Transparent(
#[serde(skip)] bool,
u32,
#[serde(skip)] bool,
PhantomData<()>,
);
assert_tokens(&Transparent(false, 1, false, PhantomData), &[Token::U32(1)]);
}
+18
View File
@@ -643,6 +643,24 @@ fn test_gen() {
struct ImpliciltyBorrowedOption<'a> { struct ImpliciltyBorrowedOption<'a> {
option: std::option::Option<&'a str>, option: std::option::Option<&'a str>,
} }
#[derive(Serialize, Deserialize)]
#[serde(untagged)]
enum UntaggedNewtypeVariantWith {
Newtype(
#[serde(serialize_with = "ser_x")]
#[serde(deserialize_with = "de_x")]
X,
),
}
#[derive(Serialize, Deserialize)]
#[serde(transparent)]
struct TransparentWith {
#[serde(serialize_with = "ser_x")]
#[serde(deserialize_with = "de_x")]
x: X,
}
} }
////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////
+2 -1
View File
@@ -55,7 +55,8 @@ else
channel build channel build
cd "$DIR/test_suite" cd "$DIR/test_suite"
channel test --features unstable channel test --features unstable
channel build --tests --features proc-macro2/nightly # Broken while syn and quote update to the new proc-macro API
#channel build --tests --features proc-macro2/nightly
if [ -z "${APPVEYOR}" ]; then if [ -z "${APPVEYOR}" ]; then
cd "$DIR/test_suite/no_std" cd "$DIR/test_suite/no_std"
channel build channel build