feat: implement main functionality

This commit is contained in:
2025-03-20 13:23:33 +02:00
parent 39ccbd6e58
commit 73e1b44312
8 changed files with 1309 additions and 2 deletions

104
src/commands.rs Normal file
View File

@@ -0,0 +1,104 @@
use crate::errors::{ProjectFinderError, Result};
use std::{
path::{Path, PathBuf},
process::Stdio,
};
use tokio::process::Command;
use tracing::{debug, warn};
use crate::dependencies::Dependencies;
/// Run fd command to find files and directories
pub async fn find_files(
deps: &Dependencies,
dir: &Path,
pattern: &str,
max_depth: usize,
) -> Result<Vec<PathBuf>> {
let mut cmd = Command::new(&deps.fd_path);
cmd.arg("--hidden")
.arg("--no-ignore-vcs")
.arg("--type")
.arg("f")
.arg("--max-depth")
.arg(max_depth.to_string())
.arg(pattern)
.arg(dir)
.stdout(Stdio::piped());
debug!("Running: fd {} in {}", pattern, dir.display());
let output = cmd.output().await.map_err(|e| {
ProjectFinderError::CommandExecutionFailed(format!("Failed to execute fd: {e}"))
})?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
warn!("fd command failed: {stderr}");
return Ok(Vec::new());
}
let stdout = String::from_utf8(output.stdout).map_err(ProjectFinderError::Utf8Error)?;
let paths = stdout.lines().map(PathBuf::from).collect();
Ok(paths)
}
/// Find Git repositories
pub async fn find_git_repos(
deps: &Dependencies,
dir: &Path,
max_depth: usize,
) -> Result<Vec<PathBuf>> {
let mut cmd = Command::new(&deps.fd_path);
cmd.arg("--hidden")
.arg("--type")
.arg("d")
.arg("--max-depth")
.arg(max_depth.to_string())
.arg("^.git$")
.arg(dir)
.stdout(Stdio::piped());
debug!("Finding git repos in {}", dir.display());
let output = cmd.output().await.map_err(|e| {
ProjectFinderError::CommandExecutionFailed(format!("Failed to find git repositories: {e}"))
})?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
warn!("fd command failed: {}", stderr);
return Ok(Vec::new());
}
let stdout = String::from_utf8(output.stdout).map_err(ProjectFinderError::Utf8Error)?;
let paths = stdout
.lines()
// Convert .git directories to their parent (the actual repo root)
.filter_map(|line| {
let path = PathBuf::from(line);
path.parent().map(std::path::Path::to_path_buf)
})
.collect();
Ok(paths)
}
/// Run grep on a file to check for a pattern
pub async fn grep_file(deps: &Dependencies, file: &Path, pattern: &str) -> Result<bool> {
let mut cmd = Command::new(&deps.rg_path);
cmd.arg("-q") // quiet mode, just return exit code
.arg(pattern)
.arg(file);
let status = cmd.status().await.map_err(|e| {
ProjectFinderError::CommandExecutionFailed(format!("Failed to execute ripgrep: {e}"))
})?;
Ok(status.success())
}

21
src/config.rs Normal file
View File

@@ -0,0 +1,21 @@
use clap::Parser;
#[derive(Debug, Parser, Clone)]
#[clap(
author,
version,
about = "Find coding projects in specified directories"
)]
pub struct Config {
/// Directories to search for projects
#[clap(default_value = ".")]
pub paths: Vec<String>,
/// Maximum search depth
#[clap(short, long, default_value = "5")]
pub depth: usize,
/// Show verbose output
#[clap(short, long)]
pub verbose: bool,
}

41
src/dependencies.rs Normal file
View File

@@ -0,0 +1,41 @@
use crate::errors::{ProjectFinderError, Result};
use tracing::info;
use which::which;
#[derive(Debug, Clone)]
pub struct Dependencies {
pub fd_path: String,
pub rg_path: String,
}
impl Dependencies {
pub fn new(fd_path: impl Into<String>, rg_path: impl Into<String>) -> Self {
Self {
fd_path: fd_path.into(),
rg_path: rg_path.into(),
}
}
pub fn check() -> Result<Self> {
info!("Checking dependencies...");
let fd_path = which("fd").map_err(|_| {
ProjectFinderError::DependencyNotFound(
"fd - install from https://github.com/sharkdp/fd".into(),
)
})?;
let rg_path = which("rg").map_err(|_| {
ProjectFinderError::DependencyNotFound(
"ripgrep (rg) - install from https://github.com/BurntSushi/ripgrep".into(),
)
})?;
info!("Found fd at: {}", fd_path.display());
info!("Found rupgrep at: {}", rg_path.display());
Ok(Self::new(
fd_path.to_string_lossy(),
rg_path.to_string_lossy(),
))
}
}

22
src/errors.rs Normal file
View File

@@ -0,0 +1,22 @@
use std::{path::PathBuf, string::FromUtf8Error};
use thiserror::Error;
#[derive(Debug, Error)]
pub enum ProjectFinderError {
#[error("Dependency not found: {0}. Please install it and try again.")]
DependencyNotFound(String),
#[error("Failed to execute command: {0}")]
CommandExecutionFailed(String),
#[error("Path not found: {0}")]
PathNotFound(PathBuf),
#[error("IO error: {0}")]
IoError(#[from] std::io::Error),
#[error("Invalid UTF-8: {0}")]
Utf8Error(#[from] FromUtf8Error),
}
pub type Result<T> = std::result::Result<T, ProjectFinderError>;

334
src/finder.rs Normal file
View File

@@ -0,0 +1,334 @@
use crate::{
commands::{find_files, find_git_repos, grep_file},
config::Config,
dependencies::Dependencies,
errors::{ProjectFinderError, Result},
};
use std::{
collections::{HashMap, HashSet},
path::{Path, PathBuf},
sync::Arc,
};
use tokio::sync::Mutex;
use tracing::{debug, info};
type ProjectSet = Arc<Mutex<HashSet<PathBuf>>>;
type WorkspaceCache = Arc<Mutex<HashMap<PathBuf, bool>>>;
type RootCache = Arc<Mutex<HashMap<(PathBuf, String), PathBuf>>>;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum MarkerType {
PackageJson,
CargoToml,
DenoJson,
BuildFile(String),
OtherConfig(String),
}
#[derive(Debug, Clone)]
pub struct ProjectFinder {
config: Config,
deps: Dependencies,
discovered_projects: ProjectSet,
workspace_cache: WorkspaceCache,
root_cache: RootCache,
}
impl ProjectFinder {
pub fn new(config: Config, deps: Dependencies) -> Self {
Self {
config,
deps,
discovered_projects: Arc::new(Mutex::new(HashSet::new())),
workspace_cache: Arc::new(Mutex::new(HashMap::new())),
root_cache: Arc::new(Mutex::new(HashMap::new())),
}
}
pub async fn find_projects(&self) -> Result<Vec<PathBuf>> {
// Process each search directory
let mut handles = vec![];
for path in &self.config.paths {
let path_buf = PathBuf::from(path);
if !path_buf.is_dir() {
return Err(ProjectFinderError::PathNotFound(path_buf));
}
if self.config.verbose {
info!("Searching in: {}", path);
}
let finder_clone = self.clone();
let path_clone = path_buf.clone();
// Spawn a task for each directory
let handle =
tokio::spawn(async move { finder_clone.process_directory(&path_clone).await });
handles.push(handle);
}
// Wait for all tasks to complete
for handle in handles {
match handle.await {
Ok(result) => {
// Propagate internal errors
if let Err(e) = result {
debug!("Task failed: {}", e);
}
}
Err(e) => {
debug!("Task join error: {}", e);
}
}
}
// Return sorted results
let mut projects: Vec<PathBuf> = {
let projects_guard = self.discovered_projects.lock().await;
projects_guard.iter().cloned().collect()
};
projects.sort();
Ok(projects)
}
async fn process_directory(&self, dir: &Path) -> Result<()> {
// First find all git repositories (usually the most reliable project indicators)
let git_repos = find_git_repos(&self.deps, dir, self.config.depth).await?;
{
let mut projects = self.discovered_projects.lock().await;
projects.extend(git_repos);
}
// Find relevant marker files
let marker_patterns = [
"package.json",
"pnpm-workspace.yaml",
"lerna.json",
"Cargo.toml",
"go.mod",
"pyproject.toml",
"CMakeLists.txt",
"Makefile",
"justfile",
"Justfile",
"deno.json",
"deno.jsonc",
"bunfig.toml",
];
for pattern in &marker_patterns {
let paths = find_files(&self.deps, dir, pattern, self.config.depth).await?;
for path in paths {
if let Some(parent_dir) = path.parent() {
self.process_marker(parent_dir, pattern).await?;
}
}
}
Ok(())
}
async fn process_marker(&self, dir: &Path, marker_name: &str) -> Result<()> {
// Determine marker type
let marker_type = match marker_name {
"package.json" => MarkerType::PackageJson,
"Cargo.toml" => MarkerType::CargoToml,
"deno.json" | "deno.jsonc" => MarkerType::DenoJson,
"Makefile" | "CMakeLists.txt" | "justfile" | "Justfile" => {
MarkerType::BuildFile(marker_name.to_string())
}
_ => MarkerType::OtherConfig(marker_name.to_string()),
};
// Find project root
let project_root = self.find_project_root(dir, &marker_type).await?;
// Check if it's a subdirectory of an already discovered project
let mut should_add = true;
{
let projects = self.discovered_projects.lock().await;
for known_project in projects.iter() {
if project_root.starts_with(known_project) && project_root != *known_project {
should_add = false;
break;
}
}
}
if should_add {
let mut projects = self.discovered_projects.lock().await;
projects.insert(project_root);
}
Ok(())
}
async fn find_project_root(&self, dir: &Path, marker_type: &MarkerType) -> Result<PathBuf> {
// Check cache
let cache_key = (dir.to_path_buf(), format!("{marker_type:?}"));
{
let cache = self.root_cache.lock().await;
if let Some(root) = cache.get(&cache_key) {
return Ok(root.clone());
}
}
let mut result = dir.to_path_buf();
match marker_type {
MarkerType::PackageJson | MarkerType::DenoJson => {
// Check for workspace roots
let mut current = dir.to_path_buf();
while let Some(parent) = current.parent() {
if parent.as_os_str().is_empty() {
break;
}
if self.is_workspace_root(parent).await? {
result = parent.to_path_buf();
break;
}
if parent.join(".git").is_dir() {
result = parent.to_path_buf();
break;
}
current = parent.to_path_buf();
}
}
MarkerType::CargoToml => {
// Check for Cargo workspace
let mut current = dir.to_path_buf();
while let Some(parent) = current.parent() {
if parent.as_os_str().is_empty() {
break;
}
let cargo_toml = parent.join("Cargo.toml");
if cargo_toml.exists()
&& grep_file(&self.deps, &cargo_toml, r"^\[workspace\]").await?
{
result = parent.to_path_buf();
break;
}
if parent.join(".git").is_dir() {
result = parent.to_path_buf();
break;
}
current = parent.to_path_buf();
}
}
MarkerType::BuildFile(name) => {
// For build system files, find the highest one that's still in the same git repo
let mut highest_dir = dir.to_path_buf();
let mut current = dir.to_path_buf();
while let Some(parent) = current.parent() {
if parent.as_os_str().is_empty() {
break;
}
if parent.join(name).exists() {
highest_dir = parent.to_path_buf();
}
if parent.join(".git").is_dir() {
result = parent.to_path_buf();
break;
}
current = parent.to_path_buf();
}
if result == dir.to_path_buf() {
result = highest_dir;
}
}
MarkerType::OtherConfig(_) => {
// For other file types, just look for git repos
let mut current = dir.to_path_buf();
while let Some(parent) = current.parent() {
if parent.as_os_str().is_empty() {
break;
}
if parent.join(".git").is_dir() {
result = parent.to_path_buf();
break;
}
current = parent.to_path_buf();
}
}
}
// Cache the result
self.root_cache
.lock()
.await
.insert(cache_key, result.clone());
Ok(result)
}
async fn is_workspace_root(&self, dir: &Path) -> Result<bool> {
// Check cache
{
let cache = self.workspace_cache.lock().await;
if let Some(&result) = cache.get(dir) {
return Ok(result);
}
}
// Define workspace patterns to check
let workspace_patterns = [
(dir.join("package.json"), r#""workspaces""#),
(dir.join("deno.json"), r#""workspaces""#),
(dir.join("deno.jsonc"), r#""workspaces""#),
(dir.join("bunfig.toml"), r"workspaces"),
];
// Files that indicate workspaces just by existing
let workspace_files = [dir.join("pnpm-workspace.yaml"), dir.join("lerna.json")];
// Check for workspace by pattern matching
for (file, pattern) in &workspace_patterns {
if file.exists() && grep_file(&self.deps, file, pattern).await? {
self.workspace_cache
.lock()
.await
.insert(dir.to_path_buf(), true);
return Ok(true);
}
}
// Check for workspace by file existence
for file in &workspace_files {
if file.exists() {
self.workspace_cache
.lock()
.await
.insert(dir.to_path_buf(), true);
return Ok(true);
}
}
// No workspace found
self.workspace_cache
.lock()
.await
.insert(dir.to_path_buf(), false);
Ok(false)
}
}

View File

@@ -1,3 +1,66 @@
fn main() {
println!("Hello, world!");
mod commands;
mod config;
mod dependencies;
mod errors;
mod finder;
use clap::Parser;
use config::Config;
use dependencies::Dependencies;
use finder::ProjectFinder;
use std::process::exit;
use tracing::{Level, error};
use tracing_subscriber::FmtSubscriber;
#[tokio::main]
async fn main() {
// Parse CLI arguments
let config = Config::parse();
// Setup logging
let log_level = if config.verbose {
Level::INFO
} else {
Level::ERROR
};
let subscriber = FmtSubscriber::builder().with_max_level(log_level).finish();
if let Err(e) = tracing::subscriber::set_global_default(subscriber) {
eprintln!("Failed to set up logging: {e}");
exit(1);
}
// Check for required dependencies
let deps = match Dependencies::check() {
Ok(deps) => deps,
Err(e) => {
error!("{e}");
eprintln!("Error: {e}");
eprintln!(
"This tool requires both 'fd' and 'ripgrep' (rg) to be installed and available in your PATH."
);
eprintln!("Please install the missing dependencies and try again.");
eprintln!("\nInstallation instructions:");
eprintln!(" fd: https://github.com/sharkdp/fd#installation");
eprintln!(" ripgrep: https://github.com/BurntSushi/ripgrep#installation");
exit(1);
}
};
// Create finder and search for projects
let finder = ProjectFinder::new(config, deps);
match finder.find_projects().await {
Ok(projects) => {
// Output results
for project in projects {
println!("{}", project.display());
}
}
Err(e) => {
error!("Failed to find projects: {e}");
eprintln!("Error: {e}");
exit(1);
}
}
}