feat: finish migration

This commit is contained in:
Kristofers Solo 2025-09-11 16:43:05 +03:00
parent 3d6d32af49
commit 0d4abce168
Signed by: kristoferssolo
GPG Key ID: 8687F2D3EEE6F0ED
9 changed files with 235 additions and 63 deletions

22
Cargo.lock generated
View File

@ -74,12 +74,10 @@ dependencies = [
"claims",
"filecaster",
"merge",
"proc-macro-error2",
"proc-macro2",
"quote",
"serde",
"serde_json",
"thiserror",
"unsynn",
]
@ -344,26 +342,6 @@ dependencies = [
"winapi-util",
]
[[package]]
name = "thiserror"
version = "2.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b0949c3a6c842cbde3f1686d6eea5a010516deb7085f79db747562d4102f41e"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "2.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc5b44b4ab9c2fdd0e0512e6bece8388e214c0749f5862b114cc5b7a25daf227"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "toml"
version = "0.9.2"

View File

@ -23,11 +23,9 @@ merge = ["dep:merge"]
[dependencies]
merge = { workspace = true, optional = true }
proc-macro-error2 = "2.0"
proc-macro2 = "1.0"
quote = "1.0"
serde = { workspace = true, optional = true }
thiserror.workspace = true
unsynn = "0.1"
[dev-dependencies]

View File

@ -1,22 +1,69 @@
use unsynn::{Ident, TokenStream};
use unsynn::{Ident, ToTokens, TokenStream};
use crate::from_file::grammar;
#[derive(Debug)]
pub struct StructInfo {
pub ident: Ident,
pub struct Struct {
pub vis: TokenStream,
pub name: Ident,
pub generics: TokenStream,
pub fields: Vec<FieldInfo>,
pub fields: Vec<Field>,
}
#[derive(Debug)]
pub struct FieldInfo {
pub ident: Ident,
pub ty: TokenStream,
pub attrs: Vec<AttributeInfo>,
pub struct Field {
pub attrs: Vec<Attribute>,
pub vis: TokenStream,
pub name: Ident,
pub ty: Ident,
}
#[derive(Debug)]
pub struct AttributeInfo {
pub struct Attribute {
pub path: Ident,
pub tokens: TokenStream,
}
impl From<grammar::StructDef> for Struct {
fn from(value: grammar::StructDef) -> Self {
Self {
vis: value.vis.to_token_stream(),
name: value.name,
generics: value.generics.to_token_stream(),
fields: value
.body
.content
.0
.0
.into_iter()
.map(|x| x.value.into())
.collect(),
}
}
}
impl From<grammar::Field> for Field {
fn from(value: grammar::Field) -> Self {
Self {
attrs: value
.attrs
.unwrap_or_default()
.into_iter()
.map(Attribute::from)
.collect(),
vis: value.vis.to_token_stream(),
name: value.name,
ty: value.ty,
}
}
}
impl From<grammar::AttributeGroup> for Attribute {
fn from(value: grammar::AttributeGroup) -> Self {
let attr = value.bracket_group.content;
Self {
path: attr.path,
tokens: attr.tokens.content,
}
}
}

View File

@ -0,0 +1,110 @@
use crate::from_file::{ast::Struct, parser::parse_from_file_default_attr};
use quote::{format_ident, quote};
use unsynn::*;
pub fn generate_impl(info: &Struct) -> Result<TokenStream> {
let name = &info.name;
let vis = &info.vis;
let generics = &info.generics;
let file_ident = format_ident!("{name}File");
let mut file_fields = Vec::new();
let mut assignments = Vec::new();
for field in &info.fields {
let name = &field.name;
let ty = &field.ty;
let vis = &field.vis;
let default_override = parse_from_file_default_attr(&field.attrs)?;
let shadow_ty = quote! { <#ty as filecaster::FromFile>::Shadow };
file_fields.push(quote! { #vis #name: Option<#shadow_ty> });
if let Some(expr) = default_override {
assignments.push(quote! {
#name: file.#name
.map(|inner| <#ty as filecaster::FromFile>::from_file(Some(inner)))
.unwrap_or(#expr.into())
});
} else {
assignments.push(quote! {
#name: <#ty as filecaster::FromFile>::from_file(file.#name)
});
}
}
let derive_clause = build_derive_clause();
Ok(quote! {
#derive_clause
#vis struct #file_ident #generics {
#(#file_fields),*
}
impl #generics filecaster::FromFile for #name #generics {
type Shadow = #file_ident #generics;
fn from_file(file: Option<Self::Shadow>) -> Self {
let file = file.unwrap_or_default();
Self {
#(#assignments),*
}
}
}
impl #generics From<Option<#file_ident #generics>> for #name #generics {
fn from(value: Option<#file_ident #generics>) -> Self {
<Self as filecaster::FromFile>::from_file(value)
}
}
impl #generics From<#file_ident #generics> for #name #generics {
fn from(value: #file_ident #generics) -> Self {
<Self as filecaster::FromFile>::from_file(Some(value))
}
}
})
}
fn build_derive_clause() -> TokenStream {
let mut traits = vec![quote! { Debug }, quote! { Clone }, quote! { Default }];
#[cfg(feature = "serde")]
{
traits.push(quote! { serde::Deserialize });
traits.push(quote! { serde::Serialize });
}
#[cfg(feature = "merge")]
{
traits.push(quote! { merge::Merge });
}
quote! { #[derive( #(#traits),* )] }
}
#[cfg(test)]
mod tests {
use super::*;
use crate::from_file::grammar::StructDef;
const SAMPLE: &str = r#"
pub struct Foo {
#[attr("value")]
pub bar: String,
#[attr("number")]
pub baz: i32
}
"#;
#[test]
fn implementation() {
let sdef = SAMPLE
.to_token_iter()
.parse::<StructDef>()
.expect("failed to parse StructDef");
let foo = generate_impl(&sdef.into()).expect("failed to generate implementation");
dbg!(foo.tokens_to_string());
}
}

View File

@ -1,11 +0,0 @@
use thiserror::Error;
use unsynn::TokenStream;
#[derive(Debug, Error)]
pub enum FromFileError {}
impl FromFileError {
pub fn to_compile_error(&self) -> TokenStream {
todo!()
}
}

View File

@ -1,8 +1,8 @@
use unsynn::*;
keyword! {
KwStruct = "struct";
KwPub = "pub";
pub KwStruct = "struct";
pub KwPub = "pub";
}
/*
@ -12,23 +12,29 @@ pub struct Foo {
}
*/
unsynn! {
pub struct Attribute {
pub path: Ident, // attr
pub tokens: ParenthesisGroupContaining<TokenStream> // "value"
}
pub struct AttributeGroup {
pub pound: Pound, // #
pub bracket_group: BracketGroupContaining<TokenStream> // [attr("value")]
pub bracket_group: BracketGroupContaining<Attribute> // [attr("value")]
}
pub struct Field {
pub attrs: Option<Vec<Attribute>>, // #[attr("value")]
pub attrs: Option<Vec<AttributeGroup>>, // #[attr("value")]
pub vis: Optional<KwPub>, // pub
pub name: Ident, // bar
pub colon: Colon, // :
pub ty: Ident // String
pub ty: Ident// String
}
pub struct StructBody(pub CommaDelimitedVec<Field>); // all fields
pub struct StructDef {
pub vis: Optional<KwPub>, // pub
pub vis: Option<KwPub>, // pub
pub kw_struct: KwStruct, // "struct" keyword
pub name: Ident, // Foo
pub generics: Optional<BracketGroupContaining<TokenStream>>,
@ -54,7 +60,7 @@ mod tests {
fn parse_attribute_roundup() {
let mut iter = r#"#[attr("value")]"#.to_token_iter();
let attr = iter
.parse::<Attribute>()
.parse::<AttributeGroup>()
.expect("failed to parse Attribute");
assert_eq!(attr.pound.tokens_to_string(), "#".tokens_to_string());
@ -88,11 +94,10 @@ mod tests {
#[test]
fn parse_struct_def_and_inspect_body() {
let mut iter = SAMPLE.to_token_iter();
dbg!(&iter);
let sdef = iter
.parse::<StructDef>()
.expect("faield to parse StructDef");
.expect("failed to parse StructDef");
assert_eq!(
sdef.kw_struct.tokens_to_string(),

View File

@ -1,11 +1,12 @@
mod ast;
mod error;
mod codegen;
mod grammar;
mod parser;
use crate::from_file::error::FromFileError;
use unsynn::TokenStream;
use crate::from_file::{codegen::generate_impl, grammar::StructDef};
use unsynn::*;
pub fn impl_from_file(input: TokenStream) -> Result<TokenStream, FromFileError> {
todo!()
pub fn impl_from_file(input: TokenStream) -> Result<TokenStream> {
let parsed = input.to_token_iter().parse::<StructDef>()?;
generate_impl(&parsed.into())
}

View File

@ -0,0 +1,48 @@
use crate::from_file::ast::Attribute;
use std::iter::{Peekable, once};
use unsynn::*;
pub fn parse_from_file_default_attr(attrs: &[Attribute]) -> Result<Option<TokenStream>> {
for attr in attrs {
if attr.path == "from_file" {
return extract_default_token(attr.tokens.clone())
.map(Some)
.ok_or_else(|| Error::no_error()); // TODO: different error
}
}
Ok(None)
}
fn extract_default_token(token: TokenStream) -> Option<TokenStream> {
let mut iter = token.into_token_iter().peekable();
while let Some(TokenTree::Ident(id)) = iter.next() {
if id != "default" {
continue;
}
match iter.next() {
Some(TokenTree::Punct(eq)) if eq.as_char() == '=' => {
return Some(collect_until_commas(&mut iter));
}
_ => return None,
}
}
None
}
fn collect_until_commas<I>(iter: &mut Peekable<I>) -> TokenStream
where
I: Iterator<Item = TokenTree>,
{
let mut expr = TokenStream::new();
while let Some(tt) = iter.peek() {
let is_comma = matches!(tt, TokenTree::Punct(p) if p.as_char() == ',');
if is_comma {
iter.next();
break;
}
expr.extend(once(iter.next().unwrap()));
}
expr
}

View File

@ -98,17 +98,13 @@ mod from_file;
use crate::from_file::impl_from_file;
use proc_macro::TokenStream;
use proc_macro_error2::proc_macro_error;
/// Implements the [`FromFile`] trait.
///
/// This macro processes the `#[from_file]` attribute on structs to generate
/// code for loading data from files.
#[proc_macro_error]
#[proc_macro_derive(FromFile, attributes(from_file))]
pub fn derive_from_file(input: TokenStream) -> TokenStream {
let ts = input.into();
impl_from_file(ts)
.unwrap_or_else(|e| e.to_compile_error())
.into()
impl_from_file(ts).unwrap().into()
}