mirror of
https://github.com/kristoferssolo/filecaster.git
synced 2025-10-21 19:00:34 +00:00
refactor: restructure code
This commit is contained in:
parent
f8527d60e7
commit
f18bbc7fdd
@ -15,7 +15,7 @@ pub struct Field {
|
|||||||
pub attrs: Vec<Attribute>,
|
pub attrs: Vec<Attribute>,
|
||||||
pub vis: TokenStream,
|
pub vis: TokenStream,
|
||||||
pub name: Ident,
|
pub name: Ident,
|
||||||
pub ty: Ident,
|
pub ty: TokenStream,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@ -53,7 +53,7 @@ impl From<grammar::Field> for Field {
|
|||||||
.collect(),
|
.collect(),
|
||||||
vis: value.vis.to_token_stream(),
|
vis: value.vis.to_token_stream(),
|
||||||
name: value.name,
|
name: value.name,
|
||||||
ty: value.ty,
|
ty: value.ty.to_token_stream(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -6,29 +6,29 @@ pub fn generate_impl(info: &Struct) -> Result<TokenStream> {
|
|||||||
let name = &info.name;
|
let name = &info.name;
|
||||||
let vis = &info.vis;
|
let vis = &info.vis;
|
||||||
let generics = &info.generics;
|
let generics = &info.generics;
|
||||||
let file_ident = format_ident!("{name}File");
|
let file_ident = format_ident!("{}File", name.to_string());
|
||||||
|
|
||||||
let mut file_fields = Vec::new();
|
let mut file_fields = Vec::new();
|
||||||
let mut assignments = Vec::new();
|
let mut assignments = Vec::new();
|
||||||
|
|
||||||
for field in &info.fields {
|
for field in &info.fields {
|
||||||
let name = &field.name;
|
let fname = &field.name;
|
||||||
let ty = &field.ty;
|
let fty = &field.ty;
|
||||||
let vis = &field.vis;
|
let fvis = &field.vis;
|
||||||
let default_override = parse_from_file_default_attr(&field.attrs)?;
|
let default_override = parse_from_file_default_attr(&field.attrs)?;
|
||||||
|
|
||||||
let shadow_ty = quote! { <#ty as filecaster::FromFile>::Shadow };
|
let shadow_ty = quote! { <#fty as ::filecaster::FromFile>::Shadow };
|
||||||
file_fields.push(quote! { #vis #name: Option<#shadow_ty> });
|
file_fields.push(quote! { #fvis #fname: Option<#shadow_ty> });
|
||||||
|
|
||||||
if let Some(expr) = default_override {
|
if let Some(expr) = default_override {
|
||||||
assignments.push(quote! {
|
assignments.push(quote! {
|
||||||
#name: file.#name
|
#fname: file.#fname
|
||||||
.map(|inner| <#ty as filecaster::FromFile>::from_file(Some(inner)))
|
.map(|inner| <#fty as ::filecaster::FromFile>::from_file(Some(inner)))
|
||||||
.unwrap_or(#expr.into())
|
.unwrap_or_else(|| (#expr).into())
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
assignments.push(quote! {
|
assignments.push(quote! {
|
||||||
#name: <#ty as filecaster::FromFile>::from_file(file.#name)
|
#fname: <#fty as ::filecaster::FromFile>::from_file(file.#fname)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -41,7 +41,7 @@ pub fn generate_impl(info: &Struct) -> Result<TokenStream> {
|
|||||||
#(#file_fields),*
|
#(#file_fields),*
|
||||||
}
|
}
|
||||||
|
|
||||||
impl #generics filecaster::FromFile for #name #generics {
|
impl #generics ::filecaster::FromFile for #name #generics {
|
||||||
type Shadow = #file_ident #generics;
|
type Shadow = #file_ident #generics;
|
||||||
|
|
||||||
fn from_file(file: Option<Self::Shadow>) -> Self {
|
fn from_file(file: Option<Self::Shadow>) -> Self {
|
||||||
@ -54,32 +54,24 @@ pub fn generate_impl(info: &Struct) -> Result<TokenStream> {
|
|||||||
|
|
||||||
impl #generics From<Option<#file_ident #generics>> for #name #generics {
|
impl #generics From<Option<#file_ident #generics>> for #name #generics {
|
||||||
fn from(value: Option<#file_ident #generics>) -> Self {
|
fn from(value: Option<#file_ident #generics>) -> Self {
|
||||||
<Self as filecaster::FromFile>::from_file(value)
|
<Self as ::filecaster::FromFile>::from_file(value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl #generics From<#file_ident #generics> for #name #generics {
|
impl #generics From<#file_ident #generics> for #name #generics {
|
||||||
fn from(value: #file_ident #generics) -> Self {
|
fn from(value: #file_ident #generics) -> Self {
|
||||||
<Self as filecaster::FromFile>::from_file(Some(value))
|
<Self as ::filecaster::FromFile>::from_file(Some(value))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_derive_clause() -> TokenStream {
|
fn build_derive_clause() -> TokenStream {
|
||||||
let mut traits = vec![quote! { Debug }, quote! { Clone }, quote! { Default }];
|
quote! {
|
||||||
#[cfg(feature = "serde")]
|
#[derive(Debug, Clone, Default)]
|
||||||
{
|
#[cfg_attr(feature = "serde", derive(::serde::Deserialize, ::serde::Serialize))]
|
||||||
traits.push(quote! { serde::Deserialize });
|
#[cfg_attr(feature = "merge", derive(::merge::Merge))]
|
||||||
traits.push(quote! { serde::Serialize });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "merge")]
|
|
||||||
{
|
|
||||||
traits.push(quote! { merge::Merge });
|
|
||||||
}
|
|
||||||
|
|
||||||
quote! { #[derive( #(#traits),* )] }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@ -12,7 +12,6 @@ pub struct Foo {
|
|||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
unsynn! {
|
unsynn! {
|
||||||
|
|
||||||
pub struct Attribute {
|
pub struct Attribute {
|
||||||
pub path: Ident, // attr
|
pub path: Ident, // attr
|
||||||
pub tokens: ParenthesisGroupContaining<TokenStream> // "value"
|
pub tokens: ParenthesisGroupContaining<TokenStream> // "value"
|
||||||
|
|||||||
@ -4,7 +4,7 @@ use unsynn::*;
|
|||||||
|
|
||||||
pub fn parse_from_file_default_attr(attrs: &[Attribute]) -> Result<Option<TokenStream>> {
|
pub fn parse_from_file_default_attr(attrs: &[Attribute]) -> Result<Option<TokenStream>> {
|
||||||
for attr in attrs {
|
for attr in attrs {
|
||||||
if attr.path == "from_file" {
|
if attr.path.tokens_to_string().trim() == "from_file" {
|
||||||
let tokens = attr.tokens.clone();
|
let tokens = attr.tokens.clone();
|
||||||
let iter = tokens.clone().into_token_iter();
|
let iter = tokens.clone().into_token_iter();
|
||||||
|
|
||||||
@ -21,16 +21,22 @@ pub fn parse_from_file_default_attr(attrs: &[Attribute]) -> Result<Option<TokenS
|
|||||||
|
|
||||||
fn extract_default_token(token: TokenStream) -> Option<TokenStream> {
|
fn extract_default_token(token: TokenStream) -> Option<TokenStream> {
|
||||||
let mut iter = token.into_token_iter().peekable();
|
let mut iter = token.into_token_iter().peekable();
|
||||||
|
while let Some(tt) = iter.next() {
|
||||||
while let Some(TokenTree::Ident(id)) = iter.next() {
|
match &tt {
|
||||||
if id != "default" {
|
TokenTree::Ident(id) if id == "default" => {
|
||||||
continue;
|
// accept optional whitespace/punct and then '='
|
||||||
}
|
// next non-whitespace token should be '='
|
||||||
match iter.next() {
|
if let Some(next) = iter.peek()
|
||||||
Some(TokenTree::Punct(eq)) if eq.as_char() == '=' => {
|
&& let TokenTree::Punct(p) = next
|
||||||
|
&& p.as_char() == '='
|
||||||
|
{
|
||||||
|
iter.next();
|
||||||
return Some(collect_until_commas(&mut iter));
|
return Some(collect_until_commas(&mut iter));
|
||||||
}
|
}
|
||||||
_ => return None,
|
// if we see "default" without '=', treat as parse failure
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
_ => continue,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -48,7 +54,8 @@ where
|
|||||||
iter.next();
|
iter.next();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
expr.extend(once(iter.next().unwrap()));
|
// peek returned Some, so unwrap is safe
|
||||||
|
expr.extend(once(iter.next().expect("this should be impossible to see")));
|
||||||
}
|
}
|
||||||
expr
|
expr
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user