refactor: migrate to unsynn

This commit is contained in:
Kristofers Solo 2025-08-12 17:34:19 +03:00
parent 98bcfb5006
commit aaf5a081c1
Signed by: kristoferssolo
GPG Key ID: 8687F2D3EEE6F0ED
11 changed files with 349 additions and 267 deletions

62
Cargo.lock generated
View File

@ -14,6 +14,12 @@ version = "2.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
[[package]]
name = "byteorder"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "cfg-if"
version = "1.0.1"
@ -73,7 +79,17 @@ dependencies = [
"quote",
"serde",
"serde_json",
"syn",
"thiserror",
"unsynn",
]
[[package]]
name = "fxhash"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
dependencies = [
"byteorder",
]
[[package]]
@ -156,6 +172,12 @@ dependencies = [
"syn",
]
[[package]]
name = "mutants"
version = "0.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc0287524726960e07b119cebd01678f852f147742ae0d925e6a520dca956126"
[[package]]
name = "num-traits"
version = "0.2.19"
@ -277,6 +299,12 @@ dependencies = [
"serde",
]
[[package]]
name = "shadow_counted"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65da48d447333cebe1aadbdd3662f3ba56e76e67f53bc46f3dd5f67c74629d6b"
[[package]]
name = "syn"
version = "2.0.104"
@ -316,6 +344,26 @@ dependencies = [
"winapi-util",
]
[[package]]
name = "thiserror"
version = "2.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b0949c3a6c842cbde3f1686d6eea5a010516deb7085f79db747562d4102f41e"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "2.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc5b44b4ab9c2fdd0e0512e6bece8388e214c0749f5862b114cc5b7a25daf227"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "toml"
version = "0.9.2"
@ -376,6 +424,18 @@ version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
[[package]]
name = "unsynn"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7940603a9e25cf11211cc43b81f4fcad2b8ab4df291ca855f32c40e1ac22d5bc"
dependencies = [
"fxhash",
"mutants",
"proc-macro2",
"shadow_counted",
]
[[package]]
name = "wasi"
version = "0.14.2+wasi-0.2.4"

View File

@ -6,6 +6,7 @@ members = ["filecaster", "filecaster-derive"]
filecaster-derive = { version = "0.2", path = "filecaster-derive" }
serde = { version = "1.0", features = ["derive"], default-features = false }
merge = "0.2"
thiserror = "2"
# dev-dependencies
filecaster = { path = "filecaster" }
claims = "0.8"

View File

@ -22,15 +22,15 @@ serde = ["dep:serde"]
merge = ["dep:merge"]
[dependencies]
merge = { workspace = true, optional = true }
proc-macro-error2 = "2.0"
proc-macro2 = "1.0"
quote = "1.0"
proc-macro-error2 = "2.0"
syn = { version = "2.0", features = ["parsing"] }
serde = { workspace = true, optional = true }
merge = { workspace = true, optional = true }
thiserror.workspace = true
unsynn = "0.1"
[dev-dependencies]
claims.workspace = true
serde_json.workspace = true
filecaster.workspace = true
syn = { version = "2.0", features = ["extra-traits", "parsing"] }

View File

@ -1,258 +0,0 @@
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
use syn::{
Attribute, Data, DeriveInput, Error, Expr, Field, Fields, FieldsNamed, GenericParam, Generics,
Ident, Lit, Meta, MetaList, Result, Type, parse_quote,
};
const WITH_MERGE: bool = cfg!(feature = "merge");
const WITH_SERDE: bool = cfg!(feature = "serde");
/// Entry point: generate the shadow struct + `FromFile` impls.
pub fn impl_from_file(input: &DeriveInput) -> Result<TokenStream> {
let name = &input.ident;
let vis = &input.vis;
let generics = add_trait_bounds(input.generics.clone());
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let file_ident = format_ident!("{name}File");
let fields = extract_named_fields(input)?;
let (field_assignments, file_fields) = process_fields(fields)?;
let derive_clause = build_derive_clause();
Ok(quote! {
#derive_clause
#vis struct #file_ident #ty_generics #where_clause {
#(#file_fields),*
}
impl #impl_generics filecaster::FromFile for #name #ty_generics #where_clause {
type Shadow = #file_ident #ty_generics;
fn from_file(file: Option<Self::Shadow>) -> Self {
let file = file.unwrap_or_default();
Self {
#(#field_assignments),*
}
}
}
impl #impl_generics From<Option<#file_ident #ty_generics>> for #name #ty_generics #where_clause {
fn from(value: Option<#file_ident #ty_generics>) -> Self {
<Self as filecaster::FromFile>::from_file(value)
}
}
impl #impl_generics From<#file_ident #ty_generics> for #name #ty_generics #where_clause {
fn from(value: #file_ident #ty_generics) -> Self {
<Self as filecaster::FromFile>::from_file(Some(value))
}
}
})
}
/// Ensure we only work on named-field structs
fn extract_named_fields(input: &DeriveInput) -> Result<&FieldsNamed> {
match &input.data {
Data::Struct(ds) => match &ds.fields {
Fields::Named(fields) => Ok(fields),
_ => Err(Error::new_spanned(
&input.ident,
r#"FromFile only works on structs with *named* fields.
Tuple structs and unit structs are not supported."#,
)),
},
_ => Err(Error::new_spanned(
&input.ident,
r#"FromFile only works on structs.
Enums are not supported."#,
)),
}
}
/// Build the shadow field + assignment for one original field
fn build_file_field(field: &Field) -> Result<(TokenStream, TokenStream)> {
let ident = field
.ident
.as_ref()
.ok_or_else(|| Error::new_spanned(field, "Expected named fields"))?;
let ty = &field.ty;
let default_override = parse_from_file_default_attr(&field.attrs)?;
let field_attrs = if WITH_MERGE {
quote! { #[merge(strategy = merge::option::overwrite_none)] }
} else {
quote! {}
};
// Nested struct -> delegate to its own `FromFile` impl
let shadow_ty = quote! { <#ty as filecaster::FromFile>::Shadow };
let field_decl = quote! {
#field_attrs
pub #ident: Option<#shadow_ty>
};
let assign = build_file_assing(ident, ty, default_override);
Ok((field_decl, assign))
}
fn build_file_assing(ident: &Ident, ty: &Type, default_override: Option<Expr>) -> TokenStream {
if let Some(expr) = default_override {
return quote! {
#ident: file.#ident.map(|inner| <#ty as filecaster::FromFile>::from_file(Some(inner))).unwrap_or(#expr)
};
}
quote! {
#ident: <#ty as filecaster::FromFile>::from_file(file.#ident)
}
}
/// Process all fields
fn process_fields(fields: &FieldsNamed) -> Result<(Vec<TokenStream>, Vec<TokenStream>)> {
fields.named.iter().try_fold(
(Vec::new(), Vec::new()),
|(mut assignments, mut file_fields), field| {
let (file_field, assignment) = build_file_field(field)?;
file_fields.push(file_field);
assignments.push(assignment);
Ok((assignments, file_fields))
},
)
}
/// Derive clause for the shadow struct
fn build_derive_clause() -> TokenStream {
let mut traits = vec![quote! {Debug}, quote! {Clone}, quote! {Default}];
if WITH_SERDE {
traits.extend([quote! { serde::Deserialize }, quote! { serde::Serialize }]);
}
if WITH_MERGE {
traits.push(quote! { merge::Merge });
}
quote! { #[derive( #(#traits),* )] }
}
/// Add Default bound to every generic parameter
fn add_trait_bounds(mut generics: Generics) -> Generics {
for param in &mut generics.params {
if let GenericParam::Type(ty) = param {
ty.bounds.push(parse_quote!(Default));
}
}
generics
}
/// Attribute parsing: `#[from_file(default = ...)]`
fn parse_from_file_default_attr(attrs: &[Attribute]) -> Result<Option<Expr>> {
for attr in attrs {
if !attr.path().is_ident("from_file") {
continue; // Not a #[from_file] attribute, skip it
}
// Parse the content inside the parentheses of #[from_file(...)]
return match &attr.meta {
Meta::List(meta_list) => parse_default(meta_list),
_ => Err(Error::new_spanned(
attr,
"Expected #[from_file(default = \"literal\")] or similar",
)),
};
}
Ok(None)
}
fn parse_default(list: &MetaList) -> Result<Option<Expr>> {
let mut default_expr = None;
list.parse_nested_meta(|meta| {
if meta.path.is_ident("default") {
let value = meta.value()?;
let expr = value.parse::<Expr>()?;
if let Expr::Lit(expr_lit) = &expr {
if let Lit::Str(lit_str) = &expr_lit.lit {
default_expr = Some(parse_quote! {
#lit_str.to_string()
});
return Ok(());
}
}
default_expr = Some(expr);
}
Ok(())
})?;
Ok(default_expr)
}
#[cfg(test)]
mod tests {
use claims::{assert_err, assert_none};
use quote::ToTokens;
use super::*;
#[test]
fn extract_named_fields_success() {
let input: DeriveInput = parse_quote! {
struct S { x: i32, y: String }
};
let fields = extract_named_fields(&input).unwrap();
let names = fields
.named
.iter()
.map(|f| f.ident.as_ref().unwrap().to_string())
.collect::<Vec<_>>();
assert_eq!(names, vec!["x", "y"]);
}
#[test]
fn extract_named_fields_err_on_enum() {
let input: DeriveInput = parse_quote! {
enum E { A, B }
};
assert_err!(extract_named_fields(&input));
}
#[test]
fn extract_named_fields_err_on_tuple_struct() {
let input: DeriveInput = parse_quote! {
struct T(i32, String);
};
assert_err!(extract_named_fields(&input));
}
#[test]
fn parse_default_attrs_none() {
let attrs: Vec<Attribute> = vec![parse_quote!(#[foo])];
assert_none!(parse_from_file_default_attr(&attrs).unwrap());
}
#[test]
fn process_fields_mixed() {
let fields: FieldsNamed = parse_quote! {
{
#[from_file(default = 1)]
a: u32,
b: String,
}
};
let (assign, file_fields) = process_fields(&fields).unwrap();
// two fields
assert_eq!(assign.len(), 2);
assert_eq!(file_fields.len(), 2);
}
#[test]
fn add_trait_bouds_appends_default() {
let gens: Generics = parse_quote!(<T, U>);
let new = add_trait_bounds(gens);
let s = new.to_token_stream().to_string();
assert!(s.contains("T : Default"));
assert!(s.contains("U : Default"));
}
}

View File

@ -0,0 +1,22 @@
use proc_macro2::{Ident, TokenStream};
#[derive(Debug)]
pub struct StructInfo {
pub ident: Ident,
pub vis: TokenStream,
pub generics: TokenStream,
pub fields: Vec<FieldInfo>,
}
#[derive(Debug)]
pub struct FieldInfo {
pub ident: Ident,
pub ty: TokenStream,
pub attrs: Vec<AttributeInfo>,
}
#[derive(Debug)]
pub struct AttributeInfo {
pub path: Ident,
pub tokens: TokenStream,
}

View File

@ -0,0 +1,84 @@
use crate::from_file::{
ast::StructInfo, error::FromFileError, parser::parse_from_file_default_attr,
};
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
pub fn generate_impl(info: &StructInfo) -> Result<TokenStream, FromFileError> {
let name = &info.ident;
let vis = &info.vis;
let generics = &info.generics;
let file_ident = format_ident!("{name}File");
let mut file_fields = Vec::new();
let mut assignments = Vec::new();
for field in &info.fields {
let ident = &field.ident;
let ty = &field.ty;
let default_override = parse_from_file_default_attr(&field.attrs)?;
let shadow_ty = quote! { <#ty as fielcaster::FromFile>::Shadow };
file_fields.push(quote! { pub #ident: Option<#shadow_ty> });
if let Some(expr) = default_override {
assignments.push(quote! {
#ident: fide.#ident
.map(|inner| <#ty as filecaster::FromFile>::from_file(Some(inner)))
.unwrap_or(#expr)
});
} else {
assignments.push(quote! {
#ident: <#ty as filecaster::FromFile>::from_file(file.#ident)
});
}
}
let derive_clause = build_derive_clause();
Ok(quote! {
#derive_clause
#vis struct #file_ident #generics {
#(#file_fields),*
}
impl #generics filecaster::FromFile for #name #generics {
type Shadow = #file_ident #generics;
fn from_file(file: Option<Self::Shadow>) -> Self {
let file = file.unwrap_or_default();
Self {
#(#assignments),*
}
}
}
impl #generics From<Option<#file_ident #generics>> for #name #generics {
fn from(value: Option<#file_ident #generics>) -> Self {
<Self as filecaster::FromFile>::from_file(value)
}
}
impl #generics From<#file_ident #generics> for #name #generics {
fn from(value: #file_ident #generics) -> Self {
<Self as filecaster::FromFile>::from_file(Some(value))
}
}
})
}
fn build_derive_clause() -> TokenStream {
let mut traits = vec![quote! {Debug}, quote! {Clone}, quote! {Default}];
#[cfg(feature = "serde")]
{
traits.push(quote! { serde::Deserialize });
traits.push(quote! { serde::Serialize });
}
#[cfg(feature = "merge")]
{
traits.push(quote! { merge::Merge });
}
quote! { #[derive( #(#traits),* )] }
}

View File

@ -0,0 +1,22 @@
use proc_macro2::{Span, TokenStream};
use quote::quote_spanned;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum FromFileError {
#[error("FromFile only works on structs with named fields")]
NotNamedStruct { span: Span },
#[error("Invalid #[from_file] attribute format")]
InvalidAttribute { span: Span },
}
impl FromFileError {
pub fn to_compile_error(&self) -> TokenStream {
let msg = self.to_string();
match self {
FromFileError::NotNamedStruct { span } | FromFileError::InvalidAttribute { span } => {
quote_spanned!(*span => compile_error!(#msg))
}
}
}
}

View File

@ -0,0 +1,20 @@
use unsynn::*;
unsynn! {
pub struct Field {
pub attrs: Vec<TokenStream>,
pub name: Ident,
pub colon: Colon,
pub ty: TokenStream
}
pub struct StructBody(pub CommaDelimitedVec<Field>);
pub struct StructDef {
pub vis: TokenStream,
pub kw_struct: Ident,
pub name: Ident,
pub generics: TokenStream,
pub body: BraceGroupContaining<StructBody>
}
}

View File

@ -0,0 +1,13 @@
mod ast;
mod codegen;
mod error;
mod grammar;
mod parser;
use crate::from_file::{codegen::generate_impl, error::FromFileError, parser::parse_scruct_info};
use proc_macro2::TokenStream;
pub fn impl_from_file(input: TokenStream) -> Result<TokenStream, FromFileError> {
let info = parse_scruct_info(input)?;
generate_impl(&info)
}

View File

@ -0,0 +1,118 @@
use std::iter::{Peekable, once};
use crate::from_file::ast::AttributeInfo;
use crate::from_file::grammar::{Field, StructDef};
use crate::from_file::{
ast::{FieldInfo, StructInfo},
error::FromFileError,
};
use proc_macro2::{Ident, Span, TokenStream};
use unsynn::TokenTree;
use unsynn::{IParse, ToTokens};
pub fn parse_scruct_info(input: TokenStream) -> Result<StructInfo, FromFileError> {
let mut iter = input.to_token_iter();
let def = iter
.parse::<StructDef>()
.map_err(|_| FromFileError::NotNamedStruct {
span: Span::call_site(),
})?;
Ok(def.into())
}
pub fn parse_from_file_default_attr(
attrs: &[AttributeInfo],
) -> Result<Option<TokenStream>, FromFileError> {
for attr in attrs {
if attr.path == "from_file" {
return extract_default_token(attr.tokens.clone())
.map(Some)
.ok_or_else(|| FromFileError::InvalidAttribute {
span: attr.path.span(),
});
}
}
Ok(None)
}
fn extract_default_token(tokens: TokenStream) -> Option<TokenStream> {
let mut iter = tokens.into_iter().peekable();
while let Some(TokenTree::Ident(id)) = iter.next() {
if id != "default" {
continue;
}
match iter.next() {
Some(TokenTree::Punct(eq)) if eq.as_char() == '=' => {
return Some(collect_until_commas(&mut iter));
}
_ => return None,
}
}
None
}
fn collect_until_commas<I>(iter: &mut Peekable<I>) -> TokenStream
where
I: Iterator<Item = TokenTree>,
{
let mut expr = TokenStream::new();
while let Some(tt) = iter.peek() {
let is_comma = matches!(tt, TokenTree::Punct(p) if p.as_char() ==',');
if is_comma {
iter.next();
break;
}
expr.extend(once(iter.next().unwrap()));
}
expr
}
impl From<StructDef> for StructInfo {
fn from(value: StructDef) -> Self {
Self {
ident: value.name,
vis: value.vis,
generics: value.generics,
fields: value
.body
.content
.0
.into_iter()
.map(|d| d.value.into())
.collect(),
}
}
}
impl From<Field> for FieldInfo {
fn from(value: Field) -> Self {
Self {
ident: value.name,
ty: value.ty,
attrs: value
.attrs
.into_iter()
.map(|ts| {
let path = extract_attr_path(ts.clone());
AttributeInfo { path, tokens: ts }
})
.collect(),
}
}
}
fn extract_attr_path(attr_tokens: TokenStream) -> Ident {
attr_tokens
.into_iter()
.find_map(|tt| {
if let TokenTree::Ident(id) = tt {
Some(id)
} else {
None
}
})
.unwrap_or_else(|| Ident::new("unknown", Span::call_site()))
}

View File

@ -96,10 +96,10 @@
mod from_file;
pub(crate) use from_file::impl_from_file;
use proc_macro::TokenStream;
use proc_macro_error2::proc_macro_error;
use syn::{DeriveInput, parse_macro_input};
use crate::from_file::impl_from_file;
/// Implements the [`FromFile`] trait.
///
@ -108,8 +108,8 @@ use syn::{DeriveInput, parse_macro_input};
#[proc_macro_error]
#[proc_macro_derive(FromFile, attributes(from_file))]
pub fn derive_from_file(input: TokenStream) -> TokenStream {
let inp = parse_macro_input!(input as DeriveInput);
impl_from_file(&inp)
let ts = input.into();
impl_from_file(ts)
.unwrap_or_else(|e| e.to_compile_error())
.into()
}