refactor: decompose CLI into commands, fix clippy, improve error handling
- Decompose main.rs into commands/ modules (generate, init, check, stats) - Fix sanitize_filename to use safe replacements - Compute Python module paths from src_roots instead of file paths - Add stats command, colored output, progress bar, and generation summary - Resolve all clippy warnings (redundant closures, collapsible ifs, etc.) - Replace last unwrap() with proper error handling - Add target/ to .gitignore, remove target/ artifacts from git tracking
This commit is contained in:
@@ -53,7 +53,7 @@ impl CacheManager {
|
||||
|
||||
// Read cache file
|
||||
let content = fs::read_to_string(&cache_file)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
.map_err(ArchDocError::Io)?;
|
||||
|
||||
let cache_entry: CacheEntry = serde_json::from_str(&content)
|
||||
.map_err(|e| ArchDocError::AnalysisError(format!("Failed to deserialize cache entry: {}", e)))?;
|
||||
@@ -73,10 +73,10 @@ impl CacheManager {
|
||||
|
||||
// Check if source file has been modified since caching
|
||||
let metadata = fs::metadata(file_path)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
.map_err(ArchDocError::Io)?;
|
||||
|
||||
let modified_time = metadata.modified()
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
.map_err(ArchDocError::Io)?;
|
||||
|
||||
let modified_time: DateTime<Utc> = modified_time.into();
|
||||
|
||||
@@ -100,10 +100,10 @@ impl CacheManager {
|
||||
|
||||
// Get file modification time
|
||||
let metadata = fs::metadata(file_path)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
.map_err(ArchDocError::Io)?;
|
||||
|
||||
let modified_time = metadata.modified()
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
.map_err(ArchDocError::Io)?;
|
||||
|
||||
let modified_time: DateTime<Utc> = modified_time.into();
|
||||
|
||||
@@ -117,7 +117,7 @@ impl CacheManager {
|
||||
.map_err(|e| ArchDocError::AnalysisError(format!("Failed to serialize cache entry: {}", e)))?;
|
||||
|
||||
fs::write(&cache_file, content)
|
||||
.map_err(|e| ArchDocError::Io(e))
|
||||
.map_err(ArchDocError::Io)
|
||||
}
|
||||
|
||||
/// Generate cache key for a file path
|
||||
@@ -156,11 +156,11 @@ impl CacheManager {
|
||||
pub fn clear_cache(&self) -> Result<(), ArchDocError> {
|
||||
if Path::new(&self.cache_dir).exists() {
|
||||
fs::remove_dir_all(&self.cache_dir)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
.map_err(ArchDocError::Io)?;
|
||||
|
||||
// Recreate cache directory
|
||||
fs::create_dir_all(&self.cache_dir)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
.map_err(ArchDocError::Io)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -7,6 +7,7 @@ use std::path::Path;
|
||||
use crate::errors::ArchDocError;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Default)]
|
||||
pub struct Config {
|
||||
#[serde(default)]
|
||||
pub project: ProjectConfig,
|
||||
@@ -30,22 +31,6 @@ pub struct Config {
|
||||
pub caching: CachingConfig,
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
project: ProjectConfig::default(),
|
||||
scan: ScanConfig::default(),
|
||||
python: PythonConfig::default(),
|
||||
analysis: AnalysisConfig::default(),
|
||||
output: OutputConfig::default(),
|
||||
diff: DiffConfig::default(),
|
||||
thresholds: ThresholdsConfig::default(),
|
||||
rendering: RenderingConfig::default(),
|
||||
logging: LoggingConfig::default(),
|
||||
caching: CachingConfig::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProjectConfig {
|
||||
|
||||
@@ -13,14 +13,14 @@ use rustpython_parser::{ast, Parse};
|
||||
use rustpython_ast::{Stmt, Expr, Ranged};
|
||||
|
||||
pub struct PythonAnalyzer {
|
||||
_config: Config,
|
||||
config: Config,
|
||||
cache_manager: CacheManager,
|
||||
}
|
||||
|
||||
impl PythonAnalyzer {
|
||||
pub fn new(config: Config) -> Self {
|
||||
let cache_manager = CacheManager::new(config.clone());
|
||||
Self { _config: config, cache_manager }
|
||||
Self { config, cache_manager }
|
||||
}
|
||||
|
||||
pub fn parse_module(&self, file_path: &Path) -> Result<ParsedModule, ArchDocError> {
|
||||
@@ -67,7 +67,7 @@ impl PythonAnalyzer {
|
||||
imports: &mut Vec<Import>,
|
||||
symbols: &mut Vec<Symbol>,
|
||||
calls: &mut Vec<Call>,
|
||||
depth: usize,
|
||||
_depth: usize,
|
||||
) {
|
||||
match stmt {
|
||||
Stmt::Import(import_stmt) => {
|
||||
@@ -104,7 +104,7 @@ impl PythonAnalyzer {
|
||||
};
|
||||
|
||||
let signature = self.build_function_signature(&func_def.name, &func_def.args);
|
||||
let integrations_flags = self.detect_integrations(&func_def.body, &self._config);
|
||||
let integrations_flags = self.detect_integrations(&func_def.body, &self.config);
|
||||
let docstring = self.extract_docstring(&func_def.body);
|
||||
|
||||
let symbol = Symbol {
|
||||
@@ -130,7 +130,7 @@ impl PythonAnalyzer {
|
||||
symbols.push(symbol);
|
||||
|
||||
for body_stmt in &func_def.body {
|
||||
self.extract_from_statement(body_stmt, parent_class, imports, symbols, calls, depth + 1);
|
||||
self.extract_from_statement(body_stmt, parent_class, imports, symbols, calls, _depth + 1);
|
||||
}
|
||||
// Extract calls from body expressions recursively
|
||||
self.extract_calls_from_body(&func_def.body, Some(&qualname), calls);
|
||||
@@ -143,7 +143,7 @@ impl PythonAnalyzer {
|
||||
};
|
||||
|
||||
let signature = format!("async {}", self.build_function_signature(&func_def.name, &func_def.args));
|
||||
let integrations_flags = self.detect_integrations(&func_def.body, &self._config);
|
||||
let integrations_flags = self.detect_integrations(&func_def.body, &self.config);
|
||||
let docstring = self.extract_docstring(&func_def.body);
|
||||
|
||||
let symbol = Symbol {
|
||||
@@ -169,12 +169,12 @@ impl PythonAnalyzer {
|
||||
symbols.push(symbol);
|
||||
|
||||
for body_stmt in &func_def.body {
|
||||
self.extract_from_statement(body_stmt, parent_class, imports, symbols, calls, depth + 1);
|
||||
self.extract_from_statement(body_stmt, parent_class, imports, symbols, calls, _depth + 1);
|
||||
}
|
||||
self.extract_calls_from_body(&func_def.body, Some(&qualname), calls);
|
||||
}
|
||||
Stmt::ClassDef(class_def) => {
|
||||
let integrations_flags = self.detect_integrations(&class_def.body, &self._config);
|
||||
let integrations_flags = self.detect_integrations(&class_def.body, &self.config);
|
||||
let docstring = self.extract_docstring(&class_def.body);
|
||||
|
||||
let symbol = Symbol {
|
||||
@@ -201,7 +201,7 @@ impl PythonAnalyzer {
|
||||
|
||||
// Process class body with class name as parent
|
||||
for body_stmt in &class_def.body {
|
||||
self.extract_from_statement(body_stmt, Some(&class_def.name), imports, symbols, calls, depth + 1);
|
||||
self.extract_from_statement(body_stmt, Some(&class_def.name), imports, symbols, calls, _depth + 1);
|
||||
}
|
||||
}
|
||||
Stmt::Expr(expr_stmt) => {
|
||||
@@ -346,10 +346,10 @@ impl PythonAnalyzer {
|
||||
}
|
||||
|
||||
fn extract_docstring(&self, body: &[Stmt]) -> Option<String> {
|
||||
if let Some(first_stmt) = body.first() {
|
||||
if let Stmt::Expr(expr_stmt) = first_stmt {
|
||||
if let Expr::Constant(constant_expr) = &*expr_stmt.value {
|
||||
if let Some(docstring) = constant_expr.value.as_str() {
|
||||
if let Some(first_stmt) = body.first()
|
||||
&& let Stmt::Expr(expr_stmt) = first_stmt
|
||||
&& let Expr::Constant(constant_expr) = &*expr_stmt.value
|
||||
&& let Some(docstring) = constant_expr.value.as_str() {
|
||||
// Return full docstring, trimmed
|
||||
let trimmed = docstring.trim();
|
||||
if trimmed.is_empty() {
|
||||
@@ -357,9 +357,6 @@ impl PythonAnalyzer {
|
||||
}
|
||||
return Some(trimmed.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
@@ -446,10 +443,8 @@ impl PythonAnalyzer {
|
||||
self.extract_from_expression(&if_exp.orelse, current_symbol, calls);
|
||||
}
|
||||
Expr::Dict(dict_expr) => {
|
||||
for key in &dict_expr.keys {
|
||||
if let Some(k) = key {
|
||||
self.extract_from_expression(k, current_symbol, calls);
|
||||
}
|
||||
for k in dict_expr.keys.iter().flatten() {
|
||||
self.extract_from_expression(k, current_symbol, calls);
|
||||
}
|
||||
for value in &dict_expr.values {
|
||||
self.extract_from_expression(value, current_symbol, calls);
|
||||
@@ -522,6 +517,55 @@ impl PythonAnalyzer {
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute Python module path from file path using src_roots from config.
|
||||
/// E.g. `./src/core.py` with src_root `src` → `core`
|
||||
/// `./src/__init__.py` with src_root `src` → `src` (package)
|
||||
/// `back-end/services/chat/agent.py` with src_root `.` → `back-end.services.chat.agent`
|
||||
fn compute_module_path(&self, file_path: &Path) -> String {
|
||||
let path_str = file_path.to_string_lossy().to_string();
|
||||
// Normalize: strip leading ./
|
||||
let normalized = path_str.strip_prefix("./").unwrap_or(&path_str);
|
||||
let path = std::path::Path::new(normalized);
|
||||
|
||||
for src_root in &self.config.python.src_roots {
|
||||
let root = if src_root == "." {
|
||||
std::path::Path::new("")
|
||||
} else {
|
||||
std::path::Path::new(src_root)
|
||||
};
|
||||
|
||||
let relative = if root == std::path::Path::new("") {
|
||||
Some(path.to_path_buf())
|
||||
} else {
|
||||
path.strip_prefix(root).ok().map(|p| p.to_path_buf())
|
||||
};
|
||||
|
||||
if let Some(rel) = relative {
|
||||
let rel_str = rel.to_string_lossy().to_string();
|
||||
// Check if it's an __init__.py → use the parent directory name as module
|
||||
if rel.file_name().map(|f| f == "__init__.py").unwrap_or(false)
|
||||
&& let Some(parent) = rel.parent() {
|
||||
if parent == std::path::Path::new("") {
|
||||
// __init__.py at src_root level → use src_root as module name
|
||||
if src_root == "." {
|
||||
return "__init__".to_string();
|
||||
}
|
||||
return src_root.replace('/', ".");
|
||||
}
|
||||
return parent.to_string_lossy().replace(['/', '\\'], ".");
|
||||
}
|
||||
|
||||
// Strip .py extension and convert path separators to dots
|
||||
let without_ext = rel_str.strip_suffix(".py").unwrap_or(&rel_str);
|
||||
let module_path = without_ext.replace(['/', '\\'], ".");
|
||||
return module_path;
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: use file path as-is
|
||||
normalized.to_string()
|
||||
}
|
||||
|
||||
pub fn resolve_symbols(&self, modules: &[ParsedModule]) -> Result<ProjectModel, ArchDocError> {
|
||||
let mut project_model = ProjectModel::new();
|
||||
|
||||
@@ -537,7 +581,7 @@ impl PythonAnalyzer {
|
||||
}
|
||||
|
||||
for parsed_module in modules {
|
||||
let module_id = parsed_module.module_path.clone();
|
||||
let module_id = self.compute_module_path(&parsed_module.path);
|
||||
let file_id = parsed_module.path.to_string_lossy().to_string();
|
||||
|
||||
let file_doc = FileDoc {
|
||||
@@ -625,7 +669,7 @@ impl PythonAnalyzer {
|
||||
|
||||
fn build_dependency_graphs(&self, project_model: &mut ProjectModel, parsed_modules: &[ParsedModule]) -> Result<(), ArchDocError> {
|
||||
for parsed_module in parsed_modules {
|
||||
let from_module_id = parsed_module.module_path.clone();
|
||||
let from_module_id = self.compute_module_path(&parsed_module.path);
|
||||
|
||||
for import in &parsed_module.imports {
|
||||
let to_module_id = import.module_name.clone();
|
||||
|
||||
@@ -20,6 +20,12 @@ pub struct Renderer {
|
||||
templates: Handlebars<'static>,
|
||||
}
|
||||
|
||||
impl Default for Renderer {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Renderer {
|
||||
pub fn new() -> Self {
|
||||
let mut handlebars = Handlebars::new();
|
||||
@@ -393,7 +399,7 @@ impl Renderer {
|
||||
// Collect layout information from files
|
||||
let mut layout_items = Vec::new();
|
||||
|
||||
for (_file_id, file_doc) in &model.files {
|
||||
for file_doc in model.files.values() {
|
||||
layout_items.push(serde_json::json!({
|
||||
"path": file_doc.path,
|
||||
"purpose": "Source file",
|
||||
@@ -525,7 +531,7 @@ impl Renderer {
|
||||
// Collect layout information from files
|
||||
let mut layout_items = Vec::new();
|
||||
|
||||
for (_file_id, file_doc) in &model.files {
|
||||
for file_doc in model.files.values() {
|
||||
layout_items.push(serde_json::json!({
|
||||
"path": file_doc.path,
|
||||
"purpose": "Source file",
|
||||
|
||||
@@ -41,8 +41,7 @@ impl FileScanner {
|
||||
.into_iter() {
|
||||
|
||||
let entry = entry.map_err(|e| {
|
||||
ArchDocError::Io(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
ArchDocError::Io(std::io::Error::other(
|
||||
format!("Failed to read directory entry: {}", e)
|
||||
))
|
||||
})?;
|
||||
@@ -51,11 +50,7 @@ impl FileScanner {
|
||||
|
||||
// Skip excluded paths
|
||||
if self.is_excluded(path) {
|
||||
if path.is_dir() {
|
||||
continue;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Include Python files
|
||||
|
||||
@@ -26,6 +26,12 @@ pub struct DiffAwareWriter {
|
||||
// Configuration
|
||||
}
|
||||
|
||||
impl Default for DiffAwareWriter {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl DiffAwareWriter {
|
||||
pub fn new() -> Self {
|
||||
Self {}
|
||||
@@ -40,13 +46,13 @@ impl DiffAwareWriter {
|
||||
// Read existing file
|
||||
let existing_content = if file_path.exists() {
|
||||
fs::read_to_string(file_path)
|
||||
.map_err(|e| ArchDocError::Io(e))?
|
||||
.map_err(ArchDocError::Io)?
|
||||
} else {
|
||||
// Create new file with template
|
||||
let template_content = self.create_template_file(file_path, section_name)?;
|
||||
// Write template to file
|
||||
fs::write(file_path, &template_content)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
.map_err(ArchDocError::Io)?;
|
||||
template_content
|
||||
};
|
||||
|
||||
@@ -68,12 +74,12 @@ impl DiffAwareWriter {
|
||||
if content_changed {
|
||||
let updated_content = self.update_timestamp(new_content)?;
|
||||
fs::write(file_path, updated_content)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
.map_err(ArchDocError::Io)?;
|
||||
} else {
|
||||
// Content hasn't changed, but we might still need to update timestamp
|
||||
// TODO: Implement timestamp update logic based on config
|
||||
fs::write(file_path, new_content)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
.map_err(ArchDocError::Io)?;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,12 +95,12 @@ impl DiffAwareWriter {
|
||||
// Read existing file
|
||||
let existing_content = if file_path.exists() {
|
||||
fs::read_to_string(file_path)
|
||||
.map_err(|e| ArchDocError::Io(e))?
|
||||
.map_err(ArchDocError::Io)?
|
||||
} else {
|
||||
// If file doesn't exist, create it with a basic template
|
||||
let template_content = self.create_template_file(file_path, "symbol")?;
|
||||
fs::write(file_path, &template_content)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
.map_err(ArchDocError::Io)?;
|
||||
template_content
|
||||
};
|
||||
|
||||
@@ -116,12 +122,12 @@ impl DiffAwareWriter {
|
||||
if content_changed {
|
||||
let updated_content = self.update_timestamp(new_content)?;
|
||||
fs::write(file_path, updated_content)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
.map_err(ArchDocError::Io)?;
|
||||
} else {
|
||||
// Content hasn't changed, but we might still need to update timestamp
|
||||
// TODO: Implement timestamp update logic based on config
|
||||
fs::write(file_path, new_content)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
.map_err(ArchDocError::Io)?;
|
||||
}
|
||||
} else {
|
||||
eprintln!("Warning: No symbol marker found for {} in {}", symbol_id, file_path.display());
|
||||
|
||||
Reference in New Issue
Block a user