mirror of
https://github.com/kristoferssolo/filecaster.git
synced 2025-10-21 19:00:34 +00:00
feat: finish migration
This commit is contained in:
parent
3d6d32af49
commit
0d4abce168
22
Cargo.lock
generated
22
Cargo.lock
generated
@ -74,12 +74,10 @@ dependencies = [
|
|||||||
"claims",
|
"claims",
|
||||||
"filecaster",
|
"filecaster",
|
||||||
"merge",
|
"merge",
|
||||||
"proc-macro-error2",
|
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"thiserror",
|
|
||||||
"unsynn",
|
"unsynn",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -344,26 +342,6 @@ dependencies = [
|
|||||||
"winapi-util",
|
"winapi-util",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "thiserror"
|
|
||||||
version = "2.0.14"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "0b0949c3a6c842cbde3f1686d6eea5a010516deb7085f79db747562d4102f41e"
|
|
||||||
dependencies = [
|
|
||||||
"thiserror-impl",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "thiserror-impl"
|
|
||||||
version = "2.0.14"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "cc5b44b4ab9c2fdd0e0512e6bece8388e214c0749f5862b114cc5b7a25daf227"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml"
|
name = "toml"
|
||||||
version = "0.9.2"
|
version = "0.9.2"
|
||||||
|
|||||||
@ -23,11 +23,9 @@ merge = ["dep:merge"]
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
merge = { workspace = true, optional = true }
|
merge = { workspace = true, optional = true }
|
||||||
proc-macro-error2 = "2.0"
|
|
||||||
proc-macro2 = "1.0"
|
proc-macro2 = "1.0"
|
||||||
quote = "1.0"
|
quote = "1.0"
|
||||||
serde = { workspace = true, optional = true }
|
serde = { workspace = true, optional = true }
|
||||||
thiserror.workspace = true
|
|
||||||
unsynn = "0.1"
|
unsynn = "0.1"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
|||||||
@ -1,22 +1,69 @@
|
|||||||
use unsynn::{Ident, TokenStream};
|
use unsynn::{Ident, ToTokens, TokenStream};
|
||||||
|
|
||||||
|
use crate::from_file::grammar;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct StructInfo {
|
pub struct Struct {
|
||||||
pub ident: Ident,
|
|
||||||
pub vis: TokenStream,
|
pub vis: TokenStream,
|
||||||
|
pub name: Ident,
|
||||||
pub generics: TokenStream,
|
pub generics: TokenStream,
|
||||||
pub fields: Vec<FieldInfo>,
|
pub fields: Vec<Field>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct FieldInfo {
|
pub struct Field {
|
||||||
pub ident: Ident,
|
pub attrs: Vec<Attribute>,
|
||||||
pub ty: TokenStream,
|
pub vis: TokenStream,
|
||||||
pub attrs: Vec<AttributeInfo>,
|
pub name: Ident,
|
||||||
|
pub ty: Ident,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct AttributeInfo {
|
pub struct Attribute {
|
||||||
pub path: Ident,
|
pub path: Ident,
|
||||||
pub tokens: TokenStream,
|
pub tokens: TokenStream,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<grammar::StructDef> for Struct {
|
||||||
|
fn from(value: grammar::StructDef) -> Self {
|
||||||
|
Self {
|
||||||
|
vis: value.vis.to_token_stream(),
|
||||||
|
name: value.name,
|
||||||
|
generics: value.generics.to_token_stream(),
|
||||||
|
fields: value
|
||||||
|
.body
|
||||||
|
.content
|
||||||
|
.0
|
||||||
|
.0
|
||||||
|
.into_iter()
|
||||||
|
.map(|x| x.value.into())
|
||||||
|
.collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<grammar::Field> for Field {
|
||||||
|
fn from(value: grammar::Field) -> Self {
|
||||||
|
Self {
|
||||||
|
attrs: value
|
||||||
|
.attrs
|
||||||
|
.unwrap_or_default()
|
||||||
|
.into_iter()
|
||||||
|
.map(Attribute::from)
|
||||||
|
.collect(),
|
||||||
|
vis: value.vis.to_token_stream(),
|
||||||
|
name: value.name,
|
||||||
|
ty: value.ty,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<grammar::AttributeGroup> for Attribute {
|
||||||
|
fn from(value: grammar::AttributeGroup) -> Self {
|
||||||
|
let attr = value.bracket_group.content;
|
||||||
|
Self {
|
||||||
|
path: attr.path,
|
||||||
|
tokens: attr.tokens.content,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
110
filecaster-derive/src/from_file/codegen.rs
Normal file
110
filecaster-derive/src/from_file/codegen.rs
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
use crate::from_file::{ast::Struct, parser::parse_from_file_default_attr};
|
||||||
|
use quote::{format_ident, quote};
|
||||||
|
use unsynn::*;
|
||||||
|
|
||||||
|
pub fn generate_impl(info: &Struct) -> Result<TokenStream> {
|
||||||
|
let name = &info.name;
|
||||||
|
let vis = &info.vis;
|
||||||
|
let generics = &info.generics;
|
||||||
|
let file_ident = format_ident!("{name}File");
|
||||||
|
|
||||||
|
let mut file_fields = Vec::new();
|
||||||
|
let mut assignments = Vec::new();
|
||||||
|
|
||||||
|
for field in &info.fields {
|
||||||
|
let name = &field.name;
|
||||||
|
let ty = &field.ty;
|
||||||
|
let vis = &field.vis;
|
||||||
|
let default_override = parse_from_file_default_attr(&field.attrs)?;
|
||||||
|
|
||||||
|
let shadow_ty = quote! { <#ty as filecaster::FromFile>::Shadow };
|
||||||
|
file_fields.push(quote! { #vis #name: Option<#shadow_ty> });
|
||||||
|
|
||||||
|
if let Some(expr) = default_override {
|
||||||
|
assignments.push(quote! {
|
||||||
|
#name: file.#name
|
||||||
|
.map(|inner| <#ty as filecaster::FromFile>::from_file(Some(inner)))
|
||||||
|
.unwrap_or(#expr.into())
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
assignments.push(quote! {
|
||||||
|
#name: <#ty as filecaster::FromFile>::from_file(file.#name)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let derive_clause = build_derive_clause();
|
||||||
|
|
||||||
|
Ok(quote! {
|
||||||
|
#derive_clause
|
||||||
|
#vis struct #file_ident #generics {
|
||||||
|
#(#file_fields),*
|
||||||
|
}
|
||||||
|
|
||||||
|
impl #generics filecaster::FromFile for #name #generics {
|
||||||
|
type Shadow = #file_ident #generics;
|
||||||
|
|
||||||
|
fn from_file(file: Option<Self::Shadow>) -> Self {
|
||||||
|
let file = file.unwrap_or_default();
|
||||||
|
Self {
|
||||||
|
#(#assignments),*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl #generics From<Option<#file_ident #generics>> for #name #generics {
|
||||||
|
fn from(value: Option<#file_ident #generics>) -> Self {
|
||||||
|
<Self as filecaster::FromFile>::from_file(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl #generics From<#file_ident #generics> for #name #generics {
|
||||||
|
fn from(value: #file_ident #generics) -> Self {
|
||||||
|
<Self as filecaster::FromFile>::from_file(Some(value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_derive_clause() -> TokenStream {
|
||||||
|
let mut traits = vec![quote! { Debug }, quote! { Clone }, quote! { Default }];
|
||||||
|
#[cfg(feature = "serde")]
|
||||||
|
{
|
||||||
|
traits.push(quote! { serde::Deserialize });
|
||||||
|
traits.push(quote! { serde::Serialize });
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "merge")]
|
||||||
|
{
|
||||||
|
traits.push(quote! { merge::Merge });
|
||||||
|
}
|
||||||
|
|
||||||
|
quote! { #[derive( #(#traits),* )] }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::from_file::grammar::StructDef;
|
||||||
|
|
||||||
|
const SAMPLE: &str = r#"
|
||||||
|
pub struct Foo {
|
||||||
|
#[attr("value")]
|
||||||
|
pub bar: String,
|
||||||
|
#[attr("number")]
|
||||||
|
pub baz: i32
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn implementation() {
|
||||||
|
let sdef = SAMPLE
|
||||||
|
.to_token_iter()
|
||||||
|
.parse::<StructDef>()
|
||||||
|
.expect("failed to parse StructDef");
|
||||||
|
|
||||||
|
let foo = generate_impl(&sdef.into()).expect("failed to generate implementation");
|
||||||
|
|
||||||
|
dbg!(foo.tokens_to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,11 +0,0 @@
|
|||||||
use thiserror::Error;
|
|
||||||
use unsynn::TokenStream;
|
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum FromFileError {}
|
|
||||||
|
|
||||||
impl FromFileError {
|
|
||||||
pub fn to_compile_error(&self) -> TokenStream {
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,8 +1,8 @@
|
|||||||
use unsynn::*;
|
use unsynn::*;
|
||||||
|
|
||||||
keyword! {
|
keyword! {
|
||||||
KwStruct = "struct";
|
pub KwStruct = "struct";
|
||||||
KwPub = "pub";
|
pub KwPub = "pub";
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -12,23 +12,29 @@ pub struct Foo {
|
|||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
unsynn! {
|
unsynn! {
|
||||||
|
|
||||||
pub struct Attribute {
|
pub struct Attribute {
|
||||||
|
pub path: Ident, // attr
|
||||||
|
pub tokens: ParenthesisGroupContaining<TokenStream> // "value"
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct AttributeGroup {
|
||||||
pub pound: Pound, // #
|
pub pound: Pound, // #
|
||||||
pub bracket_group: BracketGroupContaining<TokenStream> // [attr("value")]
|
pub bracket_group: BracketGroupContaining<Attribute> // [attr("value")]
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Field {
|
pub struct Field {
|
||||||
pub attrs: Option<Vec<Attribute>>, // #[attr("value")]
|
pub attrs: Option<Vec<AttributeGroup>>, // #[attr("value")]
|
||||||
pub vis: Optional<KwPub>, // pub
|
pub vis: Optional<KwPub>, // pub
|
||||||
pub name: Ident, // bar
|
pub name: Ident, // bar
|
||||||
pub colon: Colon, // :
|
pub colon: Colon, // :
|
||||||
pub ty: Ident // String
|
pub ty: Ident// String
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct StructBody(pub CommaDelimitedVec<Field>); // all fields
|
pub struct StructBody(pub CommaDelimitedVec<Field>); // all fields
|
||||||
|
|
||||||
pub struct StructDef {
|
pub struct StructDef {
|
||||||
pub vis: Optional<KwPub>, // pub
|
pub vis: Option<KwPub>, // pub
|
||||||
pub kw_struct: KwStruct, // "struct" keyword
|
pub kw_struct: KwStruct, // "struct" keyword
|
||||||
pub name: Ident, // Foo
|
pub name: Ident, // Foo
|
||||||
pub generics: Optional<BracketGroupContaining<TokenStream>>,
|
pub generics: Optional<BracketGroupContaining<TokenStream>>,
|
||||||
@ -54,7 +60,7 @@ mod tests {
|
|||||||
fn parse_attribute_roundup() {
|
fn parse_attribute_roundup() {
|
||||||
let mut iter = r#"#[attr("value")]"#.to_token_iter();
|
let mut iter = r#"#[attr("value")]"#.to_token_iter();
|
||||||
let attr = iter
|
let attr = iter
|
||||||
.parse::<Attribute>()
|
.parse::<AttributeGroup>()
|
||||||
.expect("failed to parse Attribute");
|
.expect("failed to parse Attribute");
|
||||||
|
|
||||||
assert_eq!(attr.pound.tokens_to_string(), "#".tokens_to_string());
|
assert_eq!(attr.pound.tokens_to_string(), "#".tokens_to_string());
|
||||||
@ -88,11 +94,10 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn parse_struct_def_and_inspect_body() {
|
fn parse_struct_def_and_inspect_body() {
|
||||||
let mut iter = SAMPLE.to_token_iter();
|
let mut iter = SAMPLE.to_token_iter();
|
||||||
dbg!(&iter);
|
|
||||||
|
|
||||||
let sdef = iter
|
let sdef = iter
|
||||||
.parse::<StructDef>()
|
.parse::<StructDef>()
|
||||||
.expect("faield to parse StructDef");
|
.expect("failed to parse StructDef");
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
sdef.kw_struct.tokens_to_string(),
|
sdef.kw_struct.tokens_to_string(),
|
||||||
|
|||||||
@ -1,11 +1,12 @@
|
|||||||
mod ast;
|
mod ast;
|
||||||
mod error;
|
mod codegen;
|
||||||
mod grammar;
|
mod grammar;
|
||||||
mod parser;
|
mod parser;
|
||||||
|
|
||||||
use crate::from_file::error::FromFileError;
|
use crate::from_file::{codegen::generate_impl, grammar::StructDef};
|
||||||
use unsynn::TokenStream;
|
use unsynn::*;
|
||||||
|
|
||||||
pub fn impl_from_file(input: TokenStream) -> Result<TokenStream, FromFileError> {
|
pub fn impl_from_file(input: TokenStream) -> Result<TokenStream> {
|
||||||
todo!()
|
let parsed = input.to_token_iter().parse::<StructDef>()?;
|
||||||
|
generate_impl(&parsed.into())
|
||||||
}
|
}
|
||||||
|
|||||||
@ -0,0 +1,48 @@
|
|||||||
|
use crate::from_file::ast::Attribute;
|
||||||
|
use std::iter::{Peekable, once};
|
||||||
|
use unsynn::*;
|
||||||
|
|
||||||
|
pub fn parse_from_file_default_attr(attrs: &[Attribute]) -> Result<Option<TokenStream>> {
|
||||||
|
for attr in attrs {
|
||||||
|
if attr.path == "from_file" {
|
||||||
|
return extract_default_token(attr.tokens.clone())
|
||||||
|
.map(Some)
|
||||||
|
.ok_or_else(|| Error::no_error()); // TODO: different error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_default_token(token: TokenStream) -> Option<TokenStream> {
|
||||||
|
let mut iter = token.into_token_iter().peekable();
|
||||||
|
|
||||||
|
while let Some(TokenTree::Ident(id)) = iter.next() {
|
||||||
|
if id != "default" {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
match iter.next() {
|
||||||
|
Some(TokenTree::Punct(eq)) if eq.as_char() == '=' => {
|
||||||
|
return Some(collect_until_commas(&mut iter));
|
||||||
|
}
|
||||||
|
_ => return None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn collect_until_commas<I>(iter: &mut Peekable<I>) -> TokenStream
|
||||||
|
where
|
||||||
|
I: Iterator<Item = TokenTree>,
|
||||||
|
{
|
||||||
|
let mut expr = TokenStream::new();
|
||||||
|
while let Some(tt) = iter.peek() {
|
||||||
|
let is_comma = matches!(tt, TokenTree::Punct(p) if p.as_char() == ',');
|
||||||
|
if is_comma {
|
||||||
|
iter.next();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
expr.extend(once(iter.next().unwrap()));
|
||||||
|
}
|
||||||
|
expr
|
||||||
|
}
|
||||||
@ -98,17 +98,13 @@ mod from_file;
|
|||||||
|
|
||||||
use crate::from_file::impl_from_file;
|
use crate::from_file::impl_from_file;
|
||||||
use proc_macro::TokenStream;
|
use proc_macro::TokenStream;
|
||||||
use proc_macro_error2::proc_macro_error;
|
|
||||||
|
|
||||||
/// Implements the [`FromFile`] trait.
|
/// Implements the [`FromFile`] trait.
|
||||||
///
|
///
|
||||||
/// This macro processes the `#[from_file]` attribute on structs to generate
|
/// This macro processes the `#[from_file]` attribute on structs to generate
|
||||||
/// code for loading data from files.
|
/// code for loading data from files.
|
||||||
#[proc_macro_error]
|
|
||||||
#[proc_macro_derive(FromFile, attributes(from_file))]
|
#[proc_macro_derive(FromFile, attributes(from_file))]
|
||||||
pub fn derive_from_file(input: TokenStream) -> TokenStream {
|
pub fn derive_from_file(input: TokenStream) -> TokenStream {
|
||||||
let ts = input.into();
|
let ts = input.into();
|
||||||
impl_from_file(ts)
|
impl_from_file(ts).unwrap().into()
|
||||||
.unwrap_or_else(|e| e.to_compile_error())
|
|
||||||
.into()
|
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user