Compare commits

...

18 Commits
v0.1.0 ... main

Author SHA1 Message Date
087fdc7ea5
ci: update publish workflow
Some checks failed
CI / build-and-test (push) Has been cancelled
2025-09-25 11:12:43 +03:00
523cf95b6b
fix: clippy errors 2025-09-25 11:01:22 +03:00
b900680235
ci: change rust toolchain 2025-09-25 10:53:26 +03:00
0b9994a689
fix: typo 2025-09-25 10:45:02 +03:00
6f60d8f5f1
ci: update CI workflows 2025-09-25 10:43:12 +03:00
e20000513a
refactor: add better messages
Some checks failed
CI / Tests (push) Has been cancelled
CI / Clippy (push) Has been cancelled
CI / Format (push) Has been cancelled
CI / Docs (push) Has been cancelled
2025-08-12 00:04:09 +03:00
c4cf6aa25b
docs(readme): add examples block in readme 2025-07-15 19:46:25 +03:00
b1b4a3daeb
feat(examples): add nested examples 2025-07-15 19:45:07 +03:00
f972876880
feat(examples): add simple example 2025-07-15 19:38:06 +03:00
89732ff8e2
fix: clippy needless doctest main warning 2025-07-15 19:16:34 +03:00
9d365a9593
docs: update libdocs 2025-07-15 19:00:50 +03:00
dabacf02df
fix: feature derive clause 2025-07-15 17:32:33 +03:00
32ad6514ab
fix: version numbering 2025-07-15 16:52:16 +03:00
545b1d385f
chore: package derive lib 2025-07-15 16:36:59 +03:00
60488d364e
fix: nested structure 2025-07-15 16:11:28 +03:00
db1dab2aa1
feat(serde): make serde optional default 2025-07-15 14:40:36 +03:00
337491b37f
refactor(workspace): make a workspace 2025-07-15 14:33:51 +03:00
6a973db003
feat(trait): add FromFile trait 2025-07-15 13:43:30 +03:00
26 changed files with 1042 additions and 591 deletions

View File

@ -9,112 +9,30 @@ env:
RUSTFLAGS: --deny warnings
RUSTDOCFLAGS: --deny warnings
jobs:
# Run tests
test:
name: Tests
build-and-test:
runs-on: ubuntu-latest
timeout-minutes: 30
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install Rust toolchain
- name: Checkout code
uses: actions/checkout@v5
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install --no-install-recommends \
libasound2-dev libudev-dev libwayland-dev \
libxkbcommon-dev
- name: Populate target directory from cache
uses: Leafwing-Studios/cargo-cache@v2
with:
sweep-cache: true
toolchain: stable
components: clippy, rustfmt
- name: Run sccache-cache
uses: mozilla-actions/sccache-action@v0.0.9
- name: Install cargo-nextest
run: cargo install cargo-nextest --locked
- name: Run tests with nextest
uses: taiki-e/install-action@cargo-nextest
- name: Run Clippy
run: cargo clippy --locked --workspace --all-targets --all-features -- -D warnings
- name: Run formatting
run: cargo fmt --all --check
- name: Run Tests
run: |
cargo nextest run \
--all-features \
--all-targets
# Workaround for https://github.com/rust-lang/cargo/issues/6669
cargo test \
--locked \
--workspace \
--all-features \
--doc
# Run clippy lints
clippy:
name: Clippy
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
with:
components: clippy
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install --no-install-recommends \
libasound2-dev libudev-dev libwayland-dev \
libxkbcommon-dev
- name: Populate target directory from cache
uses: Leafwing-Studios/cargo-cache@v2
with:
sweep-cache: true
- name: Run clippy lints
run: |
cargo clippy \
--locked \
--workspace \
--all-features \
-- \
--deny warnings
# Check formatting
format:
name: Format
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
with:
components: rustfmt
- name: Run cargo fmt
run: |
cargo fmt \
--all \
-- \
--check
# Check documentation
doc:
name: Docs
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install --no-install-recommends \
libasound2-dev libudev-dev libwayland-dev \
libxkbcommon-dev
- name: Populate target directory from cache
uses: Leafwing-Studios/cargo-cache@v2
with:
sweep-cache: true
- name: Check documentation
run: |
cargo doc \
--locked \
--workspace \
--all-features \
--document-private-items \
--no-deps
cargo nextest run --all-features --all-targets
cargo test --locked --workspace --all-features --doc
- name: Check Documentation
run: cargo doc --locked --workspace --all-features --document-private-items --no-deps

View File

@ -4,7 +4,7 @@ on:
push:
tags:
# Pattern syntax: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
- "v[0-9]+.[0-9]+.[0-9]+*"
- "v[0-9]*.[0-9]*.[0-9]*"
# Trigger this workflow manually via workflow dispatch.
workflow_dispatch:
inputs:
@ -15,10 +15,9 @@ on:
jobs:
audit:
name: Audit
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: actions-rust-lang/audit@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
@ -28,9 +27,17 @@ jobs:
- audit
runs-on: ubuntu-latest
timeout-minutes: 25
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
components: clippy, rustfmt
- name: Run sccache-cache
uses: mozilla-actions/sccache-action@v0.0.9
- name: cargo-release Cache
id: cargo_release_cache
uses: actions/cache@v4
@ -57,12 +64,5 @@ jobs:
# allow-branch HEAD is because GitHub actions switches
# to the tag while building, which is a detached head
run: |-
cargo release \
publish \
--workspace \
--all-features \
--allow-branch HEAD \
--no-confirm \
--no-verify \
--execute
run: |
cargo release publish --workspace --all-features --allow-branch HEAD --no-confirm --no-verify --execute

63
Cargo.lock generated
View File

@ -50,9 +50,23 @@ checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "filecaster"
version = "0.1.0"
version = "0.2.3"
dependencies = [
"filecaster-derive",
"merge",
"serde",
"serde_json",
"tempfile",
"toml",
"trybuild",
]
[[package]]
name = "filecaster-derive"
version = "0.2.3"
dependencies = [
"claims",
"filecaster",
"merge",
"proc-macro-error2",
"proc-macro2",
@ -60,8 +74,6 @@ dependencies = [
"serde",
"serde_json",
"syn",
"tempfile",
"toml",
]
[[package]]
@ -76,6 +88,12 @@ dependencies = [
"wasi",
]
[[package]]
name = "glob"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
[[package]]
name = "hashbrown"
version = "0.15.4"
@ -270,6 +288,12 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "target-triple"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ac9aa371f599d22256307c24a9d748c041e548cbf599f35d890f9d365361790"
[[package]]
name = "tempfile"
version = "3.20.0"
@ -283,6 +307,15 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "termcolor"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
dependencies = [
"winapi-util",
]
[[package]]
name = "toml"
version = "0.9.2"
@ -322,6 +355,21 @@ version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcc842091f2def52017664b53082ecbbeb5c7731092bad69d2c63050401dfd64"
[[package]]
name = "trybuild"
version = "1.0.110"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32e257d7246e7a9fd015fb0b28b330a8d4142151a33f03e6a497754f4b1f6a8e"
dependencies = [
"glob",
"serde",
"serde_derive",
"serde_json",
"target-triple",
"termcolor",
"toml",
]
[[package]]
name = "unicode-ident"
version = "1.0.18"
@ -337,6 +385,15 @@ dependencies = [
"wit-bindgen-rt",
]
[[package]]
name = "winapi-util"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "windows-sys"
version = "0.59.0"

View File

@ -1,40 +1,20 @@
[package]
name = "filecaster"
version = "0.1.0"
edition = "2024"
authors = ["Kristofers Solo <dev@kristofers.xyz>"]
description = "Procedural macro to derive configuration from files, with optional merging capabilities."
repository = "https://github.com/kristoferssolo/filecaster"
documentation = "https://docs.rs/filecaster"
homepage = "https://github.com/kristoferssolo/filecaster"
license = "MIT OR Apache-2.0"
keywords = ["proc-macro", "derive", "configuration", "file-parsing"]
categories = ["rust-patterns", "parsing", "config"]
exclude = ["/.github", "/.gitignore", "/tests", "*.png", "*.md"]
readme = "README.md"
[workspace]
resolver = "2"
members = ["filecaster", "filecaster-derive"]
[features]
default = []
merge = ["dep:merge"]
[dependencies]
proc-macro2 = "1.0"
quote = "1.0"
proc-macro-error2 = "2.0"
syn = { version = "2.0", features = ["extra-traits", "parsing"] }
serde = { version = "1.0", features = ["derive"] }
merge = { version = "0.2", optional = true }
[dev-dependencies]
[workspace.dependencies]
filecaster-derive = { version = "0.2", path = "filecaster-derive" }
serde = { version = "1.0", features = ["derive"], default-features = false }
merge = "0.2"
# dev-dependencies
filecaster = { path = "filecaster" }
claims = "0.8"
serde_json = "1.0"
tempfile = "3.20"
tempfile = "3.10"
toml = "0.9"
trybuild = "1.0"
[lib]
proc-macro = true
[lints.clippy]
[workspace.lints.clippy]
pedantic = "warn"
nursery = "warn"
unwrap_used = "warn"

View File

@ -5,45 +5,71 @@ Procedural macro to derive configuration from files, with optional merging capab
## Features
- **Derive Configuration:** Easily load configuration from files into your Rust structs.
- **Default Values:** Specify default values for struct fields using the `#[default = "..."]` attribute.
- **Default Values:** Specify default values for struct fields using the `#[from_file(default = "...")]` attribute.
- **Optional Merging:** When the `merge` feature is enabled, allows merging multiple configuration sources.
## Usage
```toml
[dependencies]
filecaster = "0.1"
filecaster = "0.2"
```
```rust
use filecaster::FromFile;
#[derive(Debug, Clone, FromFile)]
pub struct MyConfig {
#[from_file(default = "localhost")]
pub host: String,
#[derive(Debug, Clone, PartialEq, FromFile)]
struct AppConfig {
/// If the user does not specify a host, use `"127.0.0.1"`.
#[from_file(default = "127.0.0.1")]
host: String,
/// Port number; defaults to `8080`.
#[from_file(default = 8080)]
pub port: u16,
#[from_file(default = false)]
pub enabled: bool,
port: u16,
/// If not set, use `false`. Requires `bool: Default`.
auto_reload: bool,
}
fn main() {
// Simulate loading from a file (e.g., JSON, YAML, TOML)
let file_content = r#"
{
"host": "localhost"
}
"#;
// Simulate file content (e.g., from a JSON file)
let file_content = r#"{ "host": "localhost", "port": 3000 }"#;
let config_from_file: MyConfig = serde_json::from_str(file_content).unwrap();
let config = MyConfig::from_file(Some(config_from_file));
// The `AppConfigFile` struct is automatically generated by `#[derive(FromFile)]`.
// It has all fields as `Option<T>`.
let partial_config: AppConfigFile = serde_json::from_str(file_content).unwrap();
let partial_config2 = partial_config.clone();
println!("Config: {:?}", config);
// Expected output: Config { host: "localhost", port: 8080, enabled: false }
// Use the generated `from_file` method to get the final config.
// Default values are applied for missing fields.
let config = AppConfig::from_file(Some(partial_config));
// or
let config: AppConfig = partial_config2.into();
assert_eq!(config.host, "localhost");
assert_eq!(config.port, 3000);
assert_eq!(config.auto_reload, false); // `bool::default()` is `false`
println!("Final Config: {:#?}", config);
// Example with no file content (all defaults)
let default_config = AppConfig::from_file(None);
assert_eq!(default_config.host, "127.0.0.1");
assert_eq!(default_config.port, 8080);
assert_eq!(default_config.auto_reload, false);
}
```
## Examples
Use `cargo run --example <example_name>` to execute a specific example. For example:
```bash
cargo run --example simple
cargo run --example nested
```
## Documentation
Full documentation is available at [docs.rs](https://docs.rs/filecaster).

View File

@ -0,0 +1,35 @@
[package]
name = "filecaster-derive"
version = "0.2.3"
edition = "2024"
authors = ["Kristofers Solo <dev@kristofers.xyz>"]
description = "Procedural derive macro for `filecaster`: automatically implement `FromFile` for your structs."
license = "MIT OR Apache-2.0"
repository = "https://github.com/kristoferssolo/filecaster"
homepage = "https://github.com/kristoferssolo/filecaster"
documentation = "https://docs.rs/filecaster-derive"
readme = "../README.md"
keywords = ["proc-macro", "derive", "configuration", "file-parsing"]
categories = ["rust-patterns", "parsing", "config"]
exclude = ["/.github", "/.gitignore", "/tests", "*.png", "*.md"]
[lib]
proc-macro = true
[features]
default = ["serde"]
serde = ["dep:serde"]
merge = ["dep:merge"]
[dependencies]
proc-macro2 = "1.0"
quote = "1.0"
proc-macro-error2 = "2.0"
syn = { version = "2.0", features = ["extra-traits", "parsing"] }
serde = { workspace = true, optional = true }
merge = { workspace = true, optional = true }
[dev-dependencies]
claims.workspace = true
serde_json.workspace = true
filecaster.workspace = true

View File

@ -0,0 +1,258 @@
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
use syn::{
Attribute, Data, DeriveInput, Error, Expr, Field, Fields, FieldsNamed, GenericParam, Generics,
Ident, Lit, Meta, MetaList, Result, Type, parse_quote,
};
const WITH_MERGE: bool = cfg!(feature = "merge");
const WITH_SERDE: bool = cfg!(feature = "serde");
/// Entry point: generate the shadow struct + `FromFile` impls.
pub fn impl_from_file(input: &DeriveInput) -> Result<TokenStream> {
let name = &input.ident;
let vis = &input.vis;
let generics = add_trait_bounds(input.generics.clone());
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let file_ident = format_ident!("{name}File");
let fields = extract_named_fields(input)?;
let (field_assignments, file_fields) = process_fields(fields)?;
let derive_clause = build_derive_clause();
Ok(quote! {
#derive_clause
#vis struct #file_ident #ty_generics #where_clause {
#(#file_fields),*
}
impl #impl_generics filecaster::FromFile for #name #ty_generics #where_clause {
type Shadow = #file_ident #ty_generics;
fn from_file(file: Option<Self::Shadow>) -> Self {
let file = file.unwrap_or_default();
Self {
#(#field_assignments),*
}
}
}
impl #impl_generics From<Option<#file_ident #ty_generics>> for #name #ty_generics #where_clause {
fn from(value: Option<#file_ident #ty_generics>) -> Self {
<Self as filecaster::FromFile>::from_file(value)
}
}
impl #impl_generics From<#file_ident #ty_generics> for #name #ty_generics #where_clause {
fn from(value: #file_ident #ty_generics) -> Self {
<Self as filecaster::FromFile>::from_file(Some(value))
}
}
})
}
/// Ensure we only work on named-field structs
fn extract_named_fields(input: &DeriveInput) -> Result<&FieldsNamed> {
match &input.data {
Data::Struct(ds) => match &ds.fields {
Fields::Named(fields) => Ok(fields),
_ => Err(Error::new_spanned(
&input.ident,
r#"FromFile only works on structs with *named* fields.
Tuple structs and unit structs are not supported."#,
)),
},
_ => Err(Error::new_spanned(
&input.ident,
r#"FromFile only works on structs.
Enums are not supported."#,
)),
}
}
/// Build the shadow field + assignment for one original field
fn build_file_field(field: &Field) -> Result<(TokenStream, TokenStream)> {
let ident = field
.ident
.as_ref()
.ok_or_else(|| Error::new_spanned(field, "Expected named fields"))?;
let ty = &field.ty;
let default_override = parse_from_file_default_attr(&field.attrs)?;
let field_attrs = if WITH_MERGE {
quote! { #[merge(strategy = merge::option::overwrite_none)] }
} else {
quote! {}
};
// Nested struct -> delegate to its own `FromFile` impl
let shadow_ty = quote! { <#ty as filecaster::FromFile>::Shadow };
let field_decl = quote! {
#field_attrs
pub #ident: Option<#shadow_ty>
};
let assign = build_file_assing(ident, ty, default_override);
Ok((field_decl, assign))
}
fn build_file_assing(ident: &Ident, ty: &Type, default_override: Option<Expr>) -> TokenStream {
if let Some(expr) = default_override {
return quote! {
#ident: file.#ident.map(|inner| <#ty as filecaster::FromFile>::from_file(Some(inner))).unwrap_or(#expr)
};
}
quote! {
#ident: <#ty as filecaster::FromFile>::from_file(file.#ident)
}
}
/// Process all fields
fn process_fields(fields: &FieldsNamed) -> Result<(Vec<TokenStream>, Vec<TokenStream>)> {
fields.named.iter().try_fold(
(Vec::new(), Vec::new()),
|(mut assignments, mut file_fields), field| {
let (file_field, assignment) = build_file_field(field)?;
file_fields.push(file_field);
assignments.push(assignment);
Ok((assignments, file_fields))
},
)
}
/// Derive clause for the shadow struct
fn build_derive_clause() -> TokenStream {
let mut traits = vec![quote! {Debug}, quote! {Clone}, quote! {Default}];
if WITH_SERDE {
traits.extend([quote! { serde::Deserialize }, quote! { serde::Serialize }]);
}
if WITH_MERGE {
traits.push(quote! { merge::Merge });
}
quote! { #[derive( #(#traits),* )] }
}
/// Add Default bound to every generic parameter
fn add_trait_bounds(mut generics: Generics) -> Generics {
for param in &mut generics.params {
if let GenericParam::Type(ty) = param {
ty.bounds.push(parse_quote!(Default));
}
}
generics
}
/// Attribute parsing: `#[from_file(default = ...)]`
fn parse_from_file_default_attr(attrs: &[Attribute]) -> Result<Option<Expr>> {
for attr in attrs {
if !attr.path().is_ident("from_file") {
continue; // Not a #[from_file] attribute, skip it
}
// Parse the content inside the parentheses of #[from_file(...)]
return match &attr.meta {
Meta::List(meta_list) => parse_default(meta_list),
_ => Err(Error::new_spanned(
attr,
"Expected #[from_file(default = \"literal\")] or similar",
)),
};
}
Ok(None)
}
fn parse_default(list: &MetaList) -> Result<Option<Expr>> {
let mut default_expr = None;
list.parse_nested_meta(|meta| {
if meta.path.is_ident("default") {
let value = meta.value()?;
let expr = value.parse::<Expr>()?;
if let Expr::Lit(expr_lit) = &expr
&& let Lit::Str(lit_str) = &expr_lit.lit
{
default_expr = Some(parse_quote! {
#lit_str.to_string()
});
return Ok(());
}
default_expr = Some(expr);
}
Ok(())
})?;
Ok(default_expr)
}
#[cfg(test)]
mod tests {
use claims::{assert_err, assert_none};
use quote::ToTokens;
use super::*;
#[test]
fn extract_named_fields_success() {
let input: DeriveInput = parse_quote! {
struct S { x: i32, y: String }
};
let fields = extract_named_fields(&input).unwrap();
let names = fields
.named
.iter()
.map(|f| f.ident.as_ref().unwrap().to_string())
.collect::<Vec<_>>();
assert_eq!(names, vec!["x", "y"]);
}
#[test]
fn extract_named_fields_err_on_enum() {
let input: DeriveInput = parse_quote! {
enum E { A, B }
};
assert_err!(extract_named_fields(&input));
}
#[test]
fn extract_named_fields_err_on_tuple_struct() {
let input: DeriveInput = parse_quote! {
struct T(i32, String);
};
assert_err!(extract_named_fields(&input));
}
#[test]
fn parse_default_attrs_none() {
let attrs: Vec<Attribute> = vec![parse_quote!(#[foo])];
assert_none!(parse_from_file_default_attr(&attrs).unwrap());
}
#[test]
fn process_fields_mixed() {
let fields: FieldsNamed = parse_quote! {
{
#[from_file(default = 1)]
a: u32,
b: String,
}
};
let (assign, file_fields) = process_fields(&fields).unwrap();
// two fields
assert_eq!(assign.len(), 2);
assert_eq!(file_fields.len(), 2);
}
#[test]
fn add_trait_bouds_appends_default() {
let gens: Generics = parse_quote!(<T, U>);
let new = add_trait_bounds(gens);
let s = new.to_token_stream().to_string();
assert!(s.contains("T : Default"));
assert!(s.contains("U : Default"));
}
}

View File

@ -0,0 +1,115 @@
//! # filecaster-derive
//!
//! `filecaster-derive` is the procedural macro crate for `filecaster`. It provides the
//! `#[derive(FromFile)]` macro, which automates the process of loading partial
//! configurations from files, merging them with default values, and constructing
//! fully-populated Rust structs.
//!
//! This crate significantly simplifies configuration management by generating
//! the necessary boilerplate code for the `FromFile` trait (defined in the
//! `filecaster` crate).
//!
//! ## What it does
//!
//! For any struct with named fields, `#[derive(FromFile)]` generates:
//!
//! 1. A companion "shadow" struct (e.g., `YourStructFile` for `YourStruct`)
//! where each field is wrapped in `Option<T>`. This shadow struct is
//! designed for deserialization from configuration files (e.g., JSON, TOML, YAML).
//! 2. An implementation of the `FromFile` trait for your original struct. This
//! includes the `from_file` method, which takes an `Option<YourStructFile>`
//! and constructs your final `YourStruct`. It intelligently fills in `None`
//! fields with either:
//! - An expression you supply via `#[from_file(default = ...)]`.
//! - `Default::default()` (if no `default` attribute is provided, requiring `T: Default`).
//!
//! ## Optional per-field defaults
//!
//! Use a `#[from_file(default = <expr>)]` attribute on any field to override
//! the fallback value. You may supply any expression valid in that structs
//! context. If you omit it, the macro will require the field's type to implement
//! `Default` and will call `Default::default()`.
//!
//! ## Example
//!
//! ```rust
//! use filecaster::FromFile;
//!
//! #[derive(Debug, Clone, PartialEq, FromFile)]
//! struct AppConfig {
//! /// If the user does not specify a host, use `"127.0.0.1"`.
//! #[from_file(default = "127.0.0.1")]
//! host: String,
//!
//! /// Port number; defaults to `8080`.
//! #[from_file(default = 8080)]
//! port: u16,
//!
//! /// If not set, use `false`. Requires `bool: Default`.
//! auto_reload: bool,
//! }
//!
//! fn example() {
//! // Simulate file content (e.g., from a JSON file)
//! let file_content = r#"{ "host": "localhost", "port": 3000 }"#;
//!
//! // The `AppConfigFile` struct is automatically generated by `#[derive(FromFile)]`.
//! // It has all fields as `Option<T>`.
//! let partial_config: AppConfigFile = serde_json::from_str(file_content).unwrap();
//! let partial_config2 = partial_config.clone();
//!
//! // Use the generated `from_file` method to get the final config.
//! // Default values are applied for missing fields.
//! let config = AppConfig::from_file(Some(partial_config));
//! // or
//! let config: AppConfig = partial_config2.into();
//!
//! assert_eq!(config.host, "localhost");
//! assert_eq!(config.port, 3000);
//! assert_eq!(config.auto_reload, false); // `Default::default()` for bool is `false`
//!
//! println!("Final Config: {:#?}", config);
//!
//! // Example with no file content (all defaults)
//! let default_config = AppConfig::from_file(None);
//! assert_eq!(default_config.host, "127.0.0.1");
//! assert_eq!(default_config.port, 8080);
//! assert_eq!(default_config.auto_reload, false);
//! }
//! ```
//!
//! ## Feature flags
//!
//! - `serde`: Enables `serde` serialization/deserialization support for the
//! generated shadow structs. This is typically required to deserialize
//! your configuration from file formats like JSON, TOML, or YAML.
//! - `merge`: If enabled, the generated shadow struct will also derive
//! `merge::Merge`. This allows you to layer multiple partial configuration
//! files together before calling `.from_file(...)`. Any field-level
//! `#[merge(...)]` attributes will be respected.
//!
//! ## Limitations
//!
//! - Only works on structs with _named_ fields (no tuple structs or enums).
//! - All fields without a `#[from_file(default = ...)]` attribute must
//! implement the `Default` trait.
mod from_file;
pub(crate) use from_file::impl_from_file;
use proc_macro::TokenStream;
use proc_macro_error2::proc_macro_error;
use syn::{DeriveInput, parse_macro_input};
/// Implements the [`FromFile`] trait.
///
/// This macro processes the `#[from_file]` attribute on structs to generate
/// code for loading data from files.
#[proc_macro_error]
#[proc_macro_derive(FromFile, attributes(from_file))]
pub fn derive_from_file(input: TokenStream) -> TokenStream {
let inp = parse_macro_input!(input as DeriveInput);
impl_from_file(&inp)
.unwrap_or_else(|e| e.to_compile_error())
.into()
}

31
filecaster/Cargo.toml Normal file
View File

@ -0,0 +1,31 @@
[package]
name = "filecaster"
version = "0.2.3"
edition = "2024"
authors = ["Kristofers Solo <dev@kristofers.xyz>"]
description = "Procedural macro to derive configuration from files, with optional merging capabilities."
license = "MIT OR Apache-2.0"
repository = "https://github.com/kristoferssolo/filecaster"
homepage = "https://github.com/kristoferssolo/filecaster"
documentation = "https://docs.rs/filecaster"
readme = "../README.md"
keywords = ["configuration", "file-parsing"]
categories = ["rust-patterns", "parsing", "config"]
exclude = ["/.github", "/.gitignore", "/tests", "*.png", "*.md"]
[features]
default = ["serde", "derive"]
derive = ["dep:filecaster-derive"]
serde = ["dep:serde", "filecaster-derive/serde"]
merge = ["dep:merge", "filecaster-derive/merge"]
[dependencies]
filecaster-derive = { workspace = true, optional = true }
serde = { workspace = true, optional = true }
merge = { workspace = true, optional = true }
[dev-dependencies]
serde_json.workspace = true
tempfile.workspace = true
toml.workspace = true
trybuild.workspace = true

View File

@ -0,0 +1,7 @@
{
"key": "json key",
"number": 123,
"nested": {
"inner_number": 42
}
}

View File

@ -0,0 +1,6 @@
key = "toml key"
number = 456
[nested]
inner_key = "inner toml key"
inner_number = 99

View File

@ -0,0 +1,4 @@
{
"key": "json key",
"number": 123
}

View File

@ -0,0 +1,2 @@
number = 456
exists = true

View File

@ -0,0 +1,59 @@
use filecaster::FromFile;
use std::fs;
#[derive(Debug, FromFile)]
pub struct InnerData {
#[from_file(default = "inner default")]
pub inner_key: String,
#[from_file(default = 42)]
pub inner_number: i32,
}
#[derive(Debug, FromFile)]
pub struct MyData {
#[from_file(default = "default key")]
pub key: String,
#[from_file(default = 0)]
pub number: i32,
pub nested: InnerData,
}
fn main() {
// Get the absolute current directory
let current_dir = std::env::current_dir().expect("Failed to get current directory");
// Path to the data directory
let data_dir = current_dir.join("filecaster/examples/data");
// Paths to JSON and TOML files
let json_path = data_dir.join("nested.json");
let toml_path = data_dir.join("nested.toml");
// Read and parse JSON file
let json_content = fs::read_to_string(&json_path)
.unwrap_or_else(|e| panic!("Failed to read JSON file at {:?}: {}", json_path, e));
let json_data: MyData = serde_json::from_str::<MyDataFile>(&json_content)
.unwrap_or_else(|e| panic!("Failed to parse JSON in {:?}: {}", json_path, e))
.into();
// Read and parse TOML file
let toml_content = fs::read_to_string(&toml_path)
.unwrap_or_else(|e| panic!("Failed to read TOML file at {:?}: {}", toml_path, e));
let toml_data: MyData = toml::from_str::<MyDataFile>(&toml_content)
.unwrap_or_else(|e| panic!("Failed to parse TOML in {:?}: {}", toml_path, e))
.into();
// Output the parsed data
dbg!(&json_data);
dbg!(&toml_data);
// Example assertions (adjust based on your actual file contents)
assert_eq!(json_data.key, "json key");
assert_eq!(json_data.number, 123);
assert_eq!(json_data.nested.inner_key, "inner default");
assert_eq!(json_data.nested.inner_number, 42);
assert_eq!(toml_data.key, "toml key");
assert_eq!(toml_data.number, 456);
assert_eq!(toml_data.nested.inner_key, "inner toml key");
assert_eq!(toml_data.nested.inner_number, 99);
}

View File

@ -0,0 +1,48 @@
use filecaster::FromFile;
use std::fs;
#[derive(Debug, FromFile)]
pub struct MyData {
#[from_file(default = "default key")]
pub key: String,
pub number: i32,
pub exists: bool,
}
fn main() {
// Get the absolute current directory
let current_dir = std::env::current_dir().expect("Failed to get current directory");
// Path to the data directory
let data_dir = current_dir.join("filecaster/examples/data");
// Paths to JSON and TOML files
let json_path = data_dir.join("simple.json");
let toml_path = data_dir.join("simple.toml");
// Read and parse JSON file
let json_content = fs::read_to_string(&json_path)
.unwrap_or_else(|e| panic!("Failed to read JSON file at {:?}: {}", json_path, e));
let json_data: MyData = serde_json::from_str::<MyDataFile>(&json_content)
.unwrap_or_else(|e| panic!("Failed to parse JSON in {:?}: {}", json_path, e))
.into();
// Read and parse TOML file
let toml_content = fs::read_to_string(&toml_path)
.unwrap_or_else(|e| panic!("Failed to read TOML file at {:?}: {}", toml_path, e));
let toml_data: MyData = toml::from_str::<MyDataFile>(&toml_content)
.unwrap_or_else(|e| panic!("Failed to parse TOML in {:?}: {}", toml_path, e))
.into();
// Output the parsed data
dbg!(&json_data);
dbg!(&toml_data);
// Example assertions (adjust based on your actual file contents)
assert_eq!(json_data.key, "json key".to_string());
assert_eq!(json_data.number, 123);
assert!(!json_data.exists); // `bool::default()` is `false`
assert_eq!(toml_data.key, "default key".to_string());
assert_eq!(toml_data.number, 456);
assert!(toml_data.exists);
}

151
filecaster/src/lib.rs Normal file
View File

@ -0,0 +1,151 @@
//! # filecaster
//!
//! `filecaster` provides the core `FromFile` trait, which is used in conjunction with the
//! `filecaster-derive` crate to enable automatic deserialization and merging of
//! configuration from various file formats into Rust structs.
//!
//! This crate defines the fundamental interface for types that can be constructed
//! from an optional "shadow" representation, typically deserialized from a file.
//! The `filecaster-derive` crate provides a procedural macro to automatically
//! implement this trait for your structs, handling default values and merging logic.
//!
//! ## How it works
//!
//! The `FromFile` trait defines how a final configuration struct (`Self`) can be
//! constructed from an optional intermediate "shadow" struct (`Self::Shadow`).
//! The `filecaster-derive` macro generates this `Shadow` struct and the
//! `from_file` implementation for your configuration types.
//!
//! When you derive `FromFile` for a struct, `filecaster-derive` creates a
//! corresponding `YourStructFile` (the `Shadow` type) where all fields are
//! wrapped in `Option<T>`. This `YourStructFile` can then be deserialized
//! from a file (e.g., JSON, TOML, YAML) using `serde`.
//!
//! The `from_file` method then takes this `Option<YourStructFile>` and
//! constructs your final `YourStruct`, applying default values for any fields
//! that were `None` in the `YourStructFile`.
//!
//! ## Example
//!
//! While the `FromFile` trait is implemented via the `filecaster-derive` macro,
//! here's a conceptual example of how it's used:
//!
//! ```rust,ignore
//! use filecaster::FromFile;
//! use serde::{Deserialize, Serialize};
//!
//! // This struct would typically have `#[derive(FromFile)]`
//! // from the `filecaster-derive` crate.
//! #[derive(Debug, Default, PartialEq, Serialize, Deserialize)]
//! struct AppConfig {
//! host: String,
//! port: u16,
//! auto_reload: bool,
//! }
//!
//! // The `Shadow` type is automatically generated by `filecaster-derive`
//! // and would look something like this:
//! #[derive(Debug, Default, PartialEq, Serialize, Deserialize)]
//! struct AppConfigFile {
//! host: Option<String>,
//! port: Option<u16>,
//! auto_reload: Option<bool>,
//! }
//!
//! // The `FromFile` implementation is also automatically generated.
//! // For demonstration, here's a simplified manual implementation:
//! impl FromFile for AppConfig {
//! type Shadow = AppConfigFile;
//!
//! fn from_file(file: Option<Self::Shadow>) -> Self {
//! let file = file.unwrap_or_default();
//! AppConfig {
//! host: file.host.unwrap_or_else(|| "127.0.0.1".to_string()),
//! port: file.port.unwrap_or(8080),
//! auto_reload: file.auto_reload.unwrap_or(true),
//! }
//! }
//! }
//!
//! fn example() {
//! // Simulate deserializing from a file
//! let file_content = r#"{ "host": "localhost", "port": 3000 }"#;
//! let partial_config: AppConfigFile = serde_json::from_str(file_content).unwrap();
//!
//! // Construct the final config using the FromFile trait
//! let config = AppConfig::from_file(Some(partial_config));
//!
//! assert_eq!(config.host, "localhost");
//! assert_eq!(config.port, 3000);
//! assert_eq!(config.auto_reload, false); // `Default::default()` for bool is `false`
//!
//! println!("Final Config: {:#?}", config);
//!
//! // Example with no file content (all defaults)
//! let default_config = AppConfig::from_file(None);
//! assert_eq!(default_config.host, "127.0.0.1");
//! assert_eq!(default_config.port, 8080);
//! assert_eq!(default_config.auto_reload, false);
//! }
//! ```
//!
//! ## Feature flags
//!
//! - `derive`: Enables the `filecaster-derive` crate, allowing you to use `#[derive(FromFile)]`.
//! - `serde`: Enables `serde` serialization/deserialization support for the `FromFile` trait.
//! - `merge`: Enables `merge` crate support, allowing for merging multiple partial configurations.
pub use filecaster_derive::FromFile;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
/// Marker for types that can be built from an [`Option<Shadow>`] produced by the macro.
///
/// The `FromFile` trait is the core interface for `filecaster`. It defines how a
/// final configuration struct (`Self`) can be constructed from an optional
/// intermediate "shadow" struct (`Self::Shadow`).
///
/// The `Self::Shadow` associated type represents the intermediate structure
/// that is typically deserialized from a configuration file. All fields in
/// `Self::Shadow` are usually `Option<T>`, allowing for partial configurations.
///
/// The `from_file` method takes an `Option<Self::Shadow>` and is responsible
/// for producing a fully-populated `Self` instance. This involves applying
/// default values for any fields that were `None` in the `Shadow` instance.
///
/// This trait is primarily designed to be implemented automatically via the
/// `#[derive(FromFile)]` procedural macro provided by the `filecaster-derive` crate.
pub trait FromFile: Sized {
/// The intermediate "shadow" type that is typically deserialized from a file.
///
/// This type usually mirrors the main struct but with all fields wrapped in `Option<T>`.
type Shadow: Default;
/// Constructs the final struct from an optional shadow representation.
///
/// If `file` is `None`, a default `Shadow` instance should be used.
/// The implementation should then populate `Self` by taking values from
/// `file` where present, and applying defaults otherwise.
fn from_file(file: Option<Self::Shadow>) -> Self;
}
#[cfg(not(feature = "serde"))]
impl<T> FromFile for T
where
T: Default,
{
type Shadow = T;
fn from_file(file: Option<Self>) -> Self {
file.unwrap_or_default()
}
}
#[cfg(feature = "serde")]
impl<T> FromFile for T
where
T: Default + Serialize + for<'de> Deserialize<'de>,
{
type Shadow = T;
fn from_file(file: Option<Self>) -> Self {
file.unwrap_or_default()
}
}

View File

@ -0,0 +1,141 @@
use filecaster::FromFile;
#[derive(Debug, Clone, PartialEq, FromFile)]
pub struct Coordinates {
x: i32,
y: i32,
}
impl Coordinates {
fn new(x: i32, y: i32) -> Self {
Self { x, y }
}
}
impl CoordinatesFile {
fn new(x: i32, y: i32) -> Self {
Self {
x: Some(x),
y: Some(y),
}
}
}
#[derive(Debug, Clone, PartialEq, FromFile)]
struct Wrapper {
parent: Parent,
}
// And one more level
#[derive(Debug, Clone, PartialEq, FromFile)]
struct DoubleWrapper {
wrapper: Wrapper,
}
#[derive(Debug, Clone, PartialEq, FromFile)]
pub struct Parent {
#[from_file(default = "Foo")]
name: String,
coordinates: Coordinates,
}
#[test]
fn parent_all_defaults() {
let p = Parent::from_file(None);
assert_eq!(p.name, "Foo".to_string());
assert_eq!(p.coordinates, Coordinates::new(0, 0));
}
#[test]
fn parent_partial_shadow_merges_defaults() {
let shadow = ParentFile {
name: None,
coordinates: Some(CoordinatesFile::new(1, 2)),
};
let p = Parent::from_file(Some(shadow));
assert_eq!(p.name, "Foo".to_string());
assert_eq!(p.coordinates, Coordinates::new(1, 2));
}
#[test]
fn parent_full_shadow_overrides_everything() {
let shadow = ParentFile {
name: Some("Bar".into()),
coordinates: Some(CoordinatesFile::new(42, 24)),
};
let p = Parent::from_file(Some(shadow));
assert_eq!(p.name, "Bar".to_string());
assert_eq!(p.coordinates, Coordinates::new(42, 24));
}
#[test]
fn wrapper_all_defaults() {
// None → WrapperFile::default() → parent = Parent::from_file(None)
let w = Wrapper::from_file(None);
assert_eq!(w.parent.name, "Foo".to_string());
assert_eq!(w.parent.coordinates, Coordinates::new(0, 0));
}
#[test]
fn wrapper_partial_parent() {
// We supply only coordinates
let shadow = WrapperFile {
parent: Some(ParentFile {
name: None,
coordinates: Some(CoordinatesFile::new(5, -2)),
}),
};
let w = Wrapper::from_file(Some(shadow));
assert_eq!(w.parent.name, "Foo".to_string());
assert_eq!(w.parent.coordinates, Coordinates::new(5, -2));
}
#[test]
fn wrapper_full_parent_override() {
let shadow = WrapperFile {
parent: Some(ParentFile {
name: Some("Baz".into()),
coordinates: Some(CoordinatesFile::new(1, 1)),
}),
};
let w = Wrapper::from_file(Some(shadow));
assert_eq!(w.parent.name, "Baz".to_string());
assert_eq!(w.parent.coordinates, Coordinates::new(1, 1));
}
#[test]
fn double_wrapper_all_defaults() {
let dw = DoubleWrapper::from_file(None);
assert_eq!(dw.wrapper.parent.name, "Foo".to_string());
assert_eq!(dw.wrapper.parent.coordinates, Coordinates::new(0, 0));
}
#[test]
fn double_wrapper_partial_deep() {
let shadow = DoubleWrapperFile {
wrapper: Some(WrapperFile {
parent: Some(ParentFile {
name: None,
coordinates: Some(CoordinatesFile::new(10, 20)),
}),
}),
};
let dw = DoubleWrapper::from_file(Some(shadow));
assert_eq!(dw.wrapper.parent.name, "Foo".to_string());
assert_eq!(dw.wrapper.parent.coordinates, Coordinates::new(10, 20));
}
#[test]
fn double_wrapper_full_override_deep() {
let shadow = DoubleWrapperFile {
wrapper: Some(WrapperFile {
parent: Some(ParentFile {
name: Some("Deep".into()),
coordinates: Some(CoordinatesFile::new(3, 4)),
}),
}),
};
let dw = DoubleWrapper::from_file(Some(shadow));
assert_eq!(dw.wrapper.parent.name, "Deep".to_string());
assert_eq!(dw.wrapper.parent.coordinates, Coordinates::new(3, 4));
}

7
filecaster/tests/ui.rs Normal file
View File

@ -0,0 +1,7 @@
use trybuild::TestCases;
#[test]
fn ui() {
let t = TestCases::new();
t.compile_fail("tests/ui/*.rs");
}

View File

@ -0,0 +1,9 @@
use filecaster::FromFile;
#[derive(FromFile)]
enum MyEnum {
A,
B,
}
fn main() {}

View File

@ -0,0 +1,6 @@
error: FromFile only works on structs.
Enums are not supported.
--> tests/ui/enum_not_supported.rs:4:6
|
4 | enum MyEnum {
| ^^^^^^

View File

@ -0,0 +1,6 @@
use filecaster::FromFile;
#[derive(FromFile)]
struct MyTuple(i32, String);
fn main() {}

View File

@ -0,0 +1,6 @@
error: FromFile only works on structs with *named* fields.
Tuple structs and unit structs are not supported.
--> tests/ui/tuple_struct_not_supported.rs:4:8
|
4 | struct MyTuple(i32, String);
| ^^^^^^^

View File

@ -1,324 +0,0 @@
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
use syn::{
Attribute, Data, DeriveInput, Error, Expr, Fields, FieldsNamed, GenericParam, Generics, Lit,
Meta, MetaList, Result, WhereClause, WherePredicate, parse_quote, parse2,
};
const WITH_MERGE: bool = cfg!(feature = "merge");
pub fn impl_from_file(input: &DeriveInput) -> Result<TokenStream> {
let name = &input.ident;
let vis = &input.vis;
let generics = add_trait_bouds(input.generics.clone());
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let file_ident = format_ident!("{name}File");
let fields = extract_named_fields(input)?;
let (field_assignments, file_fields, default_bounds) = process_fields(fields)?;
let where_clause = build_where_clause(where_clause.cloned(), default_bounds)?;
let derive_clause = build_derive_clause();
Ok(quote! {
#derive_clause
#vis struct #file_ident #where_clause {
#(#file_fields),*
}
impl #impl_generics #name #ty_generics #where_clause {
pub fn from_file(file: Option<#file_ident #ty_generics>) -> Self {
let file = file.unwrap_or_default();
Self {
#(#field_assignments),*
}
}
}
impl #impl_generics From<Option<#file_ident #ty_generics>> for #name #ty_generics #where_clause {
fn from(value: Option<#file_ident #ty_generics>) -> Self {
Self::from_file(value)
}
}
})
}
fn extract_named_fields(input: &DeriveInput) -> Result<&FieldsNamed> {
match &input.data {
Data::Struct(ds) => match &ds.fields {
Fields::Named(fields) => Ok(fields),
_ => Err(Error::new_spanned(
&input.ident,
"FromFile can only be derived for structs with named fields",
)),
},
_ => Err(Error::new_spanned(
&input.ident,
"FromFile can only be derived for structs",
)),
}
}
fn process_fields(
fields: &FieldsNamed,
) -> Result<(Vec<TokenStream>, Vec<TokenStream>, Vec<TokenStream>)> {
let mut field_assignments = Vec::new();
let mut file_fields = Vec::new();
let mut default_bounds = Vec::new();
for field in &fields.named {
let ident = field
.ident
.as_ref()
.ok_or_else(|| Error::new_spanned(field, "Expected named fields"))?;
let ty = &field.ty;
let default_expr = parse_from_file_default_attr(&field.attrs)?;
let field_attrs = if WITH_MERGE {
quote! {
#[merge(strategy = merge::option::overwrite_none)]
}
} else {
quote! {}
};
file_fields.push(quote! {
#field_attrs
pub #ident: Option<#ty>
});
if let Some(expr) = default_expr {
field_assignments.push(quote! {
#ident: file.#ident.unwrap_or_else(|| #expr)
});
} else {
default_bounds.push(quote! { #ty: Default });
field_assignments.push(quote! {
#ident: file.#ident.unwrap_or_default()
});
}
}
Ok((field_assignments, file_fields, default_bounds))
}
fn build_where_clause(
where_clause: Option<WhereClause>,
default_bounds: Vec<TokenStream>,
) -> Result<Option<WhereClause>> {
if default_bounds.is_empty() {
return Ok(where_clause);
}
let mut where_clause = where_clause;
if let Some(wc) = &mut where_clause {
for bound in default_bounds {
let predicate = parse2::<WherePredicate>(bound.clone())
.map_err(|_| Error::new_spanned(&bound, "Failed to parse where predicate"))?;
wc.predicates.push(predicate);
}
} else {
where_clause = Some(parse_quote!(where #(#default_bounds),*));
}
Ok(where_clause)
}
fn build_derive_clause() -> TokenStream {
if WITH_MERGE {
return quote! {
#[derive(Debug, Clone, Default, serde::Deserialize, serde::Serialize, merge::Merge)]
};
}
quote! {
#[derive(Debug, Clone, Default, serde::Deserialize, serde::Serialize)]
}
}
fn add_trait_bouds(mut generics: Generics) -> Generics {
for param in &mut generics.params {
if let GenericParam::Type(type_param) = param {
type_param.bounds.push(parse_quote!(Default));
}
}
generics
}
/// Parses attributes for `#[from_file(default = ...)]`
fn parse_from_file_default_attr(attrs: &[Attribute]) -> Result<Option<Expr>> {
for attr in attrs {
if !attr.path().is_ident("from_file") {
continue; // Not a #[from_file] attribute, skip it
}
// Parse the content inside the parentheses of #[from_file(...)]
return match &attr.meta {
Meta::List(meta_list) => parse_default(meta_list),
_ => Err(Error::new_spanned(
attr,
"Expected #[from_file(default = \"literal\")] or similar",
)),
};
}
Ok(None)
}
fn parse_default(list: &MetaList) -> Result<Option<Expr>> {
let mut default_expr = None;
list.parse_nested_meta(|meta| {
if meta.path.is_ident("default") {
let value = meta.value()?;
let expr = value.parse::<Expr>()?;
if let Expr::Lit(expr_lit) = &expr {
if let Lit::Str(lit_str) = &expr_lit.lit {
default_expr = Some(parse_quote! {
#lit_str.to_string()
});
return Ok(());
}
}
default_expr = Some(expr);
}
Ok(())
})?;
Ok(default_expr)
}
#[cfg(test)]
mod tests {
use claims::{assert_err, assert_none};
use quote::ToTokens;
use super::*;
#[test]
fn extract_named_fields_success() {
let input: DeriveInput = parse_quote! {
struct S { x: i32, y: String }
};
let fields = extract_named_fields(&input).unwrap();
let names = fields
.named
.iter()
.map(|f| f.ident.as_ref().unwrap().to_string())
.collect::<Vec<_>>();
assert_eq!(names, vec!["x", "y"]);
}
#[test]
fn extract_named_fields_err_on_enum() {
let input: DeriveInput = parse_quote! {
enum E { A, B }
};
assert_err!(extract_named_fields(&input));
}
#[test]
fn extract_named_fields_err_on_tuple_struct() {
let input: DeriveInput = parse_quote! {
struct T(i32, String);
};
assert_err!(extract_named_fields(&input));
}
#[test]
fn parse_default_attrs_none() {
let attrs: Vec<Attribute> = vec![parse_quote!(#[foo])];
assert_none!(parse_from_file_default_attr(&attrs).unwrap());
}
#[test]
fn process_fields_mixed() {
let fields: FieldsNamed = parse_quote! {
{
#[from_file(default = 1)]
a: u32,
b: String,
}
};
let (assign, file_fields, bounds) = process_fields(&fields).unwrap();
// two fields
assert_eq!(assign.len(), 2);
assert_eq!(file_fields.len(), 2);
// a uses unwrap_or_else
assert!(
assign[0]
.to_string()
.contains("a : file . a . unwrap_or_else")
);
// b uses unwrap_or_default
assert!(
assign[1]
.to_string()
.contains("b : file . b . unwrap_or_default")
);
// default-bound should only mention String
assert_eq!(bounds.len(), 1);
assert!(bounds[0].to_string().contains("String : Default"));
}
#[test]
fn build_where_clause_to_new() {
let bounds = vec![quote! { A: Default }, quote! { B: Default }];
let wc = build_where_clause(None, bounds).unwrap().unwrap();
let s = wc.to_token_stream().to_string();
assert!(s.contains("where A : Default , B : Default"));
}
#[test]
fn build_where_clause_append_existing() {
let orig: WhereClause = parse_quote!(where X: Clone);
let bounds = vec![quote! { Y: Default }];
let wc = build_where_clause(Some(orig.clone()), bounds)
.unwrap()
.unwrap();
let preds: Vec<_> = wc
.predicates
.iter()
.map(|p| p.to_token_stream().to_string())
.collect();
assert!(preds.contains(&"X : Clone".to_string()));
assert!(preds.contains(&"Y : Default".to_string()));
}
#[test]
fn build_where_clause_no_bounds_keeps_original() {
let orig: WhereClause = parse_quote!(where Z: Eq);
let wc = build_where_clause(Some(orig.clone()), vec![])
.unwrap()
.unwrap();
let preds: Vec<_> = wc
.predicates
.iter()
.map(|p| p.to_token_stream().to_string())
.collect();
assert_eq!(preds, vec!["Z : Eq".to_string()]);
}
#[test]
fn build_derive_clause_defaults() {
let derive_ts = build_derive_clause();
let s = derive_ts.to_string();
if WITH_MERGE {
assert!(s.contains(
"derive (Debug , Clone , Default , serde :: Deserialize , serde :: Serialize , merge :: Merge)"
));
} else {
assert!(s.contains(
"derive (Debug , Clone , Default , serde :: Deserialize , serde :: Serialize)"
));
}
}
#[test]
fn add_trait_bouds_appends_default() {
let gens: Generics = parse_quote!(<T, U>);
let new = add_trait_bouds(gens);
let s = new.to_token_stream().to_string();
assert!(s.contains("T : Default"));
assert!(s.contains("U : Default"));
}
}

View File

@ -1,97 +0,0 @@
//! # filecaster
//!
//! `filecaster` is a small `proc-macro` crate that provides a derivemacro
//! `#[derive(FromFile)]` to make it trivial to load partial configurations
//! from files, merge them with defaults, and get a fullypopulated struct.
//!
//! ## What it does
//!
//! For any struct with named fields, `#[derive(FromFile)]` generates:
//!
//! 1. A companion `<YourStruct>NameFile` struct in which each field is wrapped
//! in `Option<...>`.
//! 2. A constructor `YourStruct::from_file(file: Option<YourStructFile>) -> YourStruct`
//! that takes your partiallyfilled file struct, fills in `None` fields
//! with either:
//! - an expression you supply via `#[from_file(default = ...)]`, or
//! - `Default::default()` (requires `T: Default`)
//! 3. An implementation of `From<Option<YourStructFile>> for YourStruct`.
//!
//! Because each field in the filestruct is optional, you can deserialize
//! e.g. JSON, YAML or TOML into it via Serde, then call `.from_file(...)`
//! to get your final struct.
//!
//! ## Optional perfield defaults
//!
//! Use a `#[from_file(default = <expr>)]` attribute on any field to override
//! the fallback value. You may supply any expression valid in that structs
//! context. If you omit it, the macro will require `T: Default` and call
//! `unwrap_or_default()`.
//!
//! Example:
//!
//! ```rust
//! use filecaster::FromFile;
//!
//! #[derive(Debug, Clone, FromFile)]
//! struct AppConfig {
//! /// If the user does not specify a host, use `"127.0.0.1"`.
//! #[from_file(default = "127.0.0.1")]
//! host: String,
//!
//! /// Number of worker threads; defaults to `4`.
//! #[from_file(default = 4)]
//! workers: usize,
//!
//! /// If not set, use `false`.
//! auto_reload: bool, // requires `bool: Default`
//! }
//!
//! let file_content = r#"
//! {
//! "host": "localhost"
//! }
//! "#;
//!
//! let config_from_file = serde_json::from_str::<AppConfigFile>(file_content).unwrap();
//! // After deserializing the partial config from disk (e.g. with Serde):
//! let cfg = AppConfig::from_file(Some(config_from_file));
//! println!("{cfg:#?}");
//! ```
//!
//! ## Feature flags
//!
//! - `merge`
//! If you enable the `merge` feature, the generated `<Name>File` struct will
//! also derive `merge::Merge`, and you can layer multiple partial files
//! together before calling `.from_file(...)`. Any fieldlevel merge strategy
//! annotations (`#[merge(...)]`) are applied automatically.
//!
//! ## Limitations
//!
//! - Only works on structs with _named_ fields (no tuplestructs or enums).
//! - All fields without a `#[from_file(default = ...)]` must implement `Default`.
//!
//! ## License
//!
//! MIT OR Apache-2.0
mod from_file;
pub(crate) use from_file::impl_from_file;
use proc_macro::TokenStream;
use proc_macro_error2::proc_macro_error;
use syn::{DeriveInput, parse_macro_input};
/// Implements the `FromFile` derive macro.
///
/// This macro processes the `#[from_file]` attribute on structs to generate
/// code for loading data from files.
#[proc_macro_error]
#[proc_macro_derive(FromFile, attributes(from_file))]
pub fn derive_from_file(input: TokenStream) -> TokenStream {
let inp = parse_macro_input!(input as DeriveInput);
impl_from_file(&inp)
.unwrap_or_else(|e| e.to_compile_error())
.into()
}