Add initial project structure and core functionality for ArchDoc
- Created `.gitignore` files for various directories to exclude unnecessary files. - Added `PLAN.md` to outline the project goals and architecture documentation generation. - Implemented the `archdoc-cli` with a command-line interface for initializing and generating documentation. - Developed the `archdoc-core` library for analyzing Python projects and generating architecture documentation. - Included caching mechanisms to optimize repeated analysis. - Established a comprehensive test suite to ensure functionality and error handling. - Updated `README.md` to provide an overview and installation instructions for ArchDoc.
This commit is contained in:
12
archdoc-core/.gitignore
vendored
Normal file
12
archdoc-core/.gitignore
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# Compiled files
|
||||
target/
|
||||
|
||||
# IDE files
|
||||
*.swp
|
||||
.DS_Store
|
||||
|
||||
# Backup files
|
||||
*.rs.bk
|
||||
|
||||
# Documentation files
|
||||
doc/
|
||||
1320
archdoc-core/Cargo.lock
generated
Normal file
1320
archdoc-core/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
18
archdoc-core/Cargo.toml
Normal file
18
archdoc-core/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "archdoc-core"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
toml = "0.9.11+spec-1.1.0"
|
||||
tracing = "0.1"
|
||||
anyhow = "1.0"
|
||||
thiserror = "2.0.18"
|
||||
walkdir = "2.3"
|
||||
handlebars = "6.4.0"
|
||||
rustpython-parser = "0.4"
|
||||
rustpython-ast = "0.4"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
tempfile = "3.10"
|
||||
168
archdoc-core/src/cache.rs
Normal file
168
archdoc-core/src/cache.rs
Normal file
@@ -0,0 +1,168 @@
|
||||
//! Caching module for ArchDoc
|
||||
//!
|
||||
//! This module provides caching functionality to speed up repeated analysis
|
||||
//! by storing parsed ASTs and analysis results.
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::errors::ArchDocError;
|
||||
use crate::model::ParsedModule;
|
||||
use std::path::Path;
|
||||
use std::fs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use chrono::{DateTime, Utc};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct CacheEntry {
|
||||
/// Timestamp when the cache entry was created
|
||||
created_at: DateTime<Utc>,
|
||||
/// Timestamp when the source file was last modified
|
||||
file_modified_at: DateTime<Utc>,
|
||||
/// The parsed module data
|
||||
parsed_module: ParsedModule,
|
||||
}
|
||||
|
||||
pub struct CacheManager {
|
||||
config: Config,
|
||||
cache_dir: String,
|
||||
}
|
||||
|
||||
impl CacheManager {
|
||||
pub fn new(config: Config) -> Self {
|
||||
let cache_dir = config.caching.cache_dir.clone();
|
||||
|
||||
// Create cache directory if it doesn't exist
|
||||
if config.caching.enabled && !Path::new(&cache_dir).exists() {
|
||||
let _ = fs::create_dir_all(&cache_dir);
|
||||
}
|
||||
|
||||
Self { config, cache_dir }
|
||||
}
|
||||
|
||||
/// Get cached parsed module if available and not expired
|
||||
pub fn get_cached_module(&self, file_path: &Path) -> Result<Option<ParsedModule>, ArchDocError> {
|
||||
if !self.config.caching.enabled {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let cache_key = self.get_cache_key(file_path);
|
||||
let cache_file = Path::new(&self.cache_dir).join(&cache_key);
|
||||
|
||||
if !cache_file.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Read cache file
|
||||
let content = fs::read_to_string(&cache_file)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
|
||||
let cache_entry: CacheEntry = serde_json::from_str(&content)
|
||||
.map_err(|e| ArchDocError::AnalysisError(format!("Failed to deserialize cache entry: {}", e)))?;
|
||||
|
||||
// Check if cache is expired
|
||||
let now = Utc::now();
|
||||
let cache_age = now.signed_duration_since(cache_entry.created_at);
|
||||
|
||||
// Parse max_cache_age (simple format: "24h", "7d", etc.)
|
||||
let max_age_seconds = self.parse_duration(&self.config.caching.max_cache_age)?;
|
||||
|
||||
if cache_age.num_seconds() > max_age_seconds as i64 {
|
||||
// Cache expired, remove it
|
||||
let _ = fs::remove_file(&cache_file);
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Check if source file has been modified since caching
|
||||
let metadata = fs::metadata(file_path)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
|
||||
let modified_time = metadata.modified()
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
|
||||
let modified_time: DateTime<Utc> = modified_time.into();
|
||||
|
||||
if modified_time > cache_entry.file_modified_at {
|
||||
// Source file is newer than cache, invalidate cache
|
||||
let _ = fs::remove_file(&cache_file);
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
Ok(Some(cache_entry.parsed_module))
|
||||
}
|
||||
|
||||
/// Store parsed module in cache
|
||||
pub fn store_module(&self, file_path: &Path, parsed_module: ParsedModule) -> Result<(), ArchDocError> {
|
||||
if !self.config.caching.enabled {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let cache_key = self.get_cache_key(file_path);
|
||||
let cache_file = Path::new(&self.cache_dir).join(&cache_key);
|
||||
|
||||
// Get file modification time
|
||||
let metadata = fs::metadata(file_path)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
|
||||
let modified_time = metadata.modified()
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
|
||||
let modified_time: DateTime<Utc> = modified_time.into();
|
||||
|
||||
let cache_entry = CacheEntry {
|
||||
created_at: Utc::now(),
|
||||
file_modified_at: modified_time,
|
||||
parsed_module,
|
||||
};
|
||||
|
||||
let content = serde_json::to_string(&cache_entry)
|
||||
.map_err(|e| ArchDocError::AnalysisError(format!("Failed to serialize cache entry: {}", e)))?;
|
||||
|
||||
fs::write(&cache_file, content)
|
||||
.map_err(|e| ArchDocError::Io(e))
|
||||
}
|
||||
|
||||
/// Generate cache key for a file path
|
||||
fn get_cache_key(&self, file_path: &Path) -> String {
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
let mut hasher = DefaultHasher::new();
|
||||
file_path.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
|
||||
format!("{:x}.json", hash)
|
||||
}
|
||||
|
||||
/// Parse duration string like "24h" or "7d" into seconds
|
||||
fn parse_duration(&self, duration_str: &str) -> Result<u64, ArchDocError> {
|
||||
if duration_str.is_empty() {
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let chars: Vec<char> = duration_str.chars().collect();
|
||||
let (number_str, unit) = chars.split_at(chars.len() - 1);
|
||||
let number: u64 = number_str.iter().collect::<String>().parse()
|
||||
.map_err(|_| ArchDocError::AnalysisError(format!("Invalid duration format: {}", duration_str)))?;
|
||||
|
||||
match unit[0] {
|
||||
's' => Ok(number), // seconds
|
||||
'm' => Ok(number * 60), // minutes
|
||||
'h' => Ok(number * 3600), // hours
|
||||
'd' => Ok(number * 86400), // days
|
||||
_ => Err(ArchDocError::AnalysisError(format!("Unknown duration unit: {}", unit[0]))),
|
||||
}
|
||||
}
|
||||
|
||||
/// Clear all cache entries
|
||||
pub fn clear_cache(&self) -> Result<(), ArchDocError> {
|
||||
if Path::new(&self.cache_dir).exists() {
|
||||
fs::remove_dir_all(&self.cache_dir)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
|
||||
// Recreate cache directory
|
||||
fs::create_dir_all(&self.cache_dir)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
458
archdoc-core/src/config.rs
Normal file
458
archdoc-core/src/config.rs
Normal file
@@ -0,0 +1,458 @@
|
||||
//! Configuration management for ArchDoc
|
||||
//!
|
||||
//! This module handles loading and validating the archdoc.toml configuration file.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::Path;
|
||||
use crate::errors::ArchDocError;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Config {
|
||||
#[serde(default)]
|
||||
pub project: ProjectConfig,
|
||||
#[serde(default)]
|
||||
pub scan: ScanConfig,
|
||||
#[serde(default)]
|
||||
pub python: PythonConfig,
|
||||
#[serde(default)]
|
||||
pub analysis: AnalysisConfig,
|
||||
#[serde(default)]
|
||||
pub output: OutputConfig,
|
||||
#[serde(default)]
|
||||
pub diff: DiffConfig,
|
||||
#[serde(default)]
|
||||
pub thresholds: ThresholdsConfig,
|
||||
#[serde(default)]
|
||||
pub rendering: RenderingConfig,
|
||||
#[serde(default)]
|
||||
pub logging: LoggingConfig,
|
||||
#[serde(default)]
|
||||
pub caching: CachingConfig,
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
project: ProjectConfig::default(),
|
||||
scan: ScanConfig::default(),
|
||||
python: PythonConfig::default(),
|
||||
analysis: AnalysisConfig::default(),
|
||||
output: OutputConfig::default(),
|
||||
diff: DiffConfig::default(),
|
||||
thresholds: ThresholdsConfig::default(),
|
||||
rendering: RenderingConfig::default(),
|
||||
logging: LoggingConfig::default(),
|
||||
caching: CachingConfig::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProjectConfig {
|
||||
#[serde(default = "default_root")]
|
||||
pub root: String,
|
||||
#[serde(default = "default_out_dir")]
|
||||
pub out_dir: String,
|
||||
#[serde(default = "default_entry_file")]
|
||||
pub entry_file: String,
|
||||
#[serde(default = "default_language")]
|
||||
pub language: String,
|
||||
#[serde(default)]
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
impl Default for ProjectConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
root: default_root(),
|
||||
out_dir: default_out_dir(),
|
||||
entry_file: default_entry_file(),
|
||||
language: default_language(),
|
||||
name: String::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_root() -> String {
|
||||
".".to_string()
|
||||
}
|
||||
|
||||
fn default_out_dir() -> String {
|
||||
"docs/architecture".to_string()
|
||||
}
|
||||
|
||||
fn default_entry_file() -> String {
|
||||
"ARCHITECTURE.md".to_string()
|
||||
}
|
||||
|
||||
fn default_language() -> String {
|
||||
"python".to_string()
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ScanConfig {
|
||||
#[serde(default = "default_include")]
|
||||
pub include: Vec<String>,
|
||||
#[serde(default = "default_exclude")]
|
||||
pub exclude: Vec<String>,
|
||||
#[serde(default)]
|
||||
pub follow_symlinks: bool,
|
||||
#[serde(default = "default_max_file_size")]
|
||||
pub max_file_size: String,
|
||||
}
|
||||
|
||||
impl Default for ScanConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
include: default_include(),
|
||||
exclude: default_exclude(),
|
||||
follow_symlinks: false,
|
||||
max_file_size: default_max_file_size(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_include() -> Vec<String> {
|
||||
vec!["src".to_string(), "app".to_string(), "tests".to_string()]
|
||||
}
|
||||
|
||||
fn default_exclude() -> Vec<String> {
|
||||
vec![
|
||||
".venv".to_string(),
|
||||
"venv".to_string(),
|
||||
"__pycache__".to_string(),
|
||||
".git".to_string(),
|
||||
"dist".to_string(),
|
||||
"build".to_string(),
|
||||
".mypy_cache".to_string(),
|
||||
".ruff_cache".to_string(),
|
||||
".pytest_cache".to_string(),
|
||||
"*.egg-info".to_string(),
|
||||
]
|
||||
}
|
||||
|
||||
fn default_max_file_size() -> String {
|
||||
"10MB".to_string()
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PythonConfig {
|
||||
#[serde(default = "default_src_roots")]
|
||||
pub src_roots: Vec<String>,
|
||||
#[serde(default = "default_include_tests")]
|
||||
pub include_tests: bool,
|
||||
#[serde(default = "default_parse_docstrings")]
|
||||
pub parse_docstrings: bool,
|
||||
#[serde(default = "default_max_parse_errors")]
|
||||
pub max_parse_errors: usize,
|
||||
}
|
||||
|
||||
impl Default for PythonConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
src_roots: default_src_roots(),
|
||||
include_tests: default_include_tests(),
|
||||
parse_docstrings: default_parse_docstrings(),
|
||||
max_parse_errors: default_max_parse_errors(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_src_roots() -> Vec<String> {
|
||||
vec!["src".to_string(), ".".to_string()]
|
||||
}
|
||||
|
||||
fn default_include_tests() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_parse_docstrings() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_max_parse_errors() -> usize {
|
||||
10
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AnalysisConfig {
|
||||
#[serde(default = "default_resolve_calls")]
|
||||
pub resolve_calls: bool,
|
||||
#[serde(default)]
|
||||
pub resolve_inheritance: bool,
|
||||
#[serde(default = "default_detect_integrations")]
|
||||
pub detect_integrations: bool,
|
||||
#[serde(default = "default_integration_patterns")]
|
||||
pub integration_patterns: Vec<IntegrationPattern>,
|
||||
}
|
||||
|
||||
impl Default for AnalysisConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
resolve_calls: default_resolve_calls(),
|
||||
resolve_inheritance: false,
|
||||
detect_integrations: default_detect_integrations(),
|
||||
integration_patterns: default_integration_patterns(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_resolve_calls() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_detect_integrations() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_integration_patterns() -> Vec<IntegrationPattern> {
|
||||
vec![
|
||||
IntegrationPattern {
|
||||
type_: "http".to_string(),
|
||||
patterns: vec!["requests".to_string(), "httpx".to_string(), "aiohttp".to_string()],
|
||||
},
|
||||
IntegrationPattern {
|
||||
type_: "db".to_string(),
|
||||
patterns: vec![
|
||||
"sqlalchemy".to_string(),
|
||||
"psycopg".to_string(),
|
||||
"mysql".to_string(),
|
||||
"sqlite3".to_string(),
|
||||
],
|
||||
},
|
||||
IntegrationPattern {
|
||||
type_: "queue".to_string(),
|
||||
patterns: vec![
|
||||
"celery".to_string(),
|
||||
"kafka".to_string(),
|
||||
"pika".to_string(),
|
||||
"redis".to_string(),
|
||||
],
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct IntegrationPattern {
|
||||
#[serde(rename = "type")]
|
||||
pub type_: String,
|
||||
pub patterns: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct OutputConfig {
|
||||
#[serde(default)]
|
||||
pub single_file: bool,
|
||||
#[serde(default = "default_per_file_docs")]
|
||||
pub per_file_docs: bool,
|
||||
#[serde(default = "default_create_directories")]
|
||||
pub create_directories: bool,
|
||||
#[serde(default)]
|
||||
pub overwrite_manual_sections: bool,
|
||||
}
|
||||
|
||||
impl Default for OutputConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
single_file: false,
|
||||
per_file_docs: default_per_file_docs(),
|
||||
create_directories: default_create_directories(),
|
||||
overwrite_manual_sections: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_per_file_docs() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_create_directories() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DiffConfig {
|
||||
#[serde(default = "default_update_timestamp_on_change_only")]
|
||||
pub update_timestamp_on_change_only: bool,
|
||||
#[serde(default = "default_hash_algorithm")]
|
||||
pub hash_algorithm: String,
|
||||
#[serde(default = "default_preserve_manual_content")]
|
||||
pub preserve_manual_content: bool,
|
||||
}
|
||||
|
||||
impl Default for DiffConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
update_timestamp_on_change_only: default_update_timestamp_on_change_only(),
|
||||
hash_algorithm: default_hash_algorithm(),
|
||||
preserve_manual_content: default_preserve_manual_content(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_update_timestamp_on_change_only() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_hash_algorithm() -> String {
|
||||
"sha256".to_string()
|
||||
}
|
||||
|
||||
fn default_preserve_manual_content() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ThresholdsConfig {
|
||||
#[serde(default = "default_critical_fan_in")]
|
||||
pub critical_fan_in: usize,
|
||||
#[serde(default = "default_critical_fan_out")]
|
||||
pub critical_fan_out: usize,
|
||||
#[serde(default = "default_high_complexity")]
|
||||
pub high_complexity: usize,
|
||||
}
|
||||
|
||||
impl Default for ThresholdsConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
critical_fan_in: default_critical_fan_in(),
|
||||
critical_fan_out: default_critical_fan_out(),
|
||||
high_complexity: default_high_complexity(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_critical_fan_in() -> usize {
|
||||
20
|
||||
}
|
||||
|
||||
fn default_critical_fan_out() -> usize {
|
||||
20
|
||||
}
|
||||
|
||||
fn default_high_complexity() -> usize {
|
||||
50
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct RenderingConfig {
|
||||
#[serde(default = "default_template_engine")]
|
||||
pub template_engine: String,
|
||||
#[serde(default = "default_max_table_rows")]
|
||||
pub max_table_rows: usize,
|
||||
#[serde(default = "default_truncate_long_descriptions")]
|
||||
pub truncate_long_descriptions: bool,
|
||||
#[serde(default = "default_description_max_length")]
|
||||
pub description_max_length: usize,
|
||||
}
|
||||
|
||||
impl Default for RenderingConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
template_engine: default_template_engine(),
|
||||
max_table_rows: default_max_table_rows(),
|
||||
truncate_long_descriptions: default_truncate_long_descriptions(),
|
||||
description_max_length: default_description_max_length(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_template_engine() -> String {
|
||||
"handlebars".to_string()
|
||||
}
|
||||
|
||||
fn default_max_table_rows() -> usize {
|
||||
100
|
||||
}
|
||||
|
||||
fn default_truncate_long_descriptions() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_description_max_length() -> usize {
|
||||
200
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LoggingConfig {
|
||||
#[serde(default = "default_log_level")]
|
||||
pub level: String,
|
||||
#[serde(default = "default_log_file")]
|
||||
pub file: String,
|
||||
#[serde(default = "default_log_format")]
|
||||
pub format: String,
|
||||
}
|
||||
|
||||
impl Default for LoggingConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
level: default_log_level(),
|
||||
file: default_log_file(),
|
||||
format: default_log_format(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_log_level() -> String {
|
||||
"info".to_string()
|
||||
}
|
||||
|
||||
fn default_log_file() -> String {
|
||||
"archdoc.log".to_string()
|
||||
}
|
||||
|
||||
fn default_log_format() -> String {
|
||||
"compact".to_string()
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CachingConfig {
|
||||
#[serde(default = "default_caching_enabled")]
|
||||
pub enabled: bool,
|
||||
#[serde(default = "default_cache_dir")]
|
||||
pub cache_dir: String,
|
||||
#[serde(default = "default_max_cache_age")]
|
||||
pub max_cache_age: String,
|
||||
}
|
||||
|
||||
impl Default for CachingConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: default_caching_enabled(),
|
||||
cache_dir: default_cache_dir(),
|
||||
max_cache_age: default_max_cache_age(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_caching_enabled() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_cache_dir() -> String {
|
||||
".archdoc/cache".to_string()
|
||||
}
|
||||
|
||||
fn default_max_cache_age() -> String {
|
||||
"24h".to_string()
|
||||
}
|
||||
|
||||
impl Config {
|
||||
/// Load configuration from a TOML file
|
||||
pub fn load_from_file(path: &Path) -> Result<Self, ArchDocError> {
|
||||
let content = std::fs::read_to_string(path)
|
||||
.map_err(|e| ArchDocError::ConfigError(format!("Failed to read config file: {}", e)))?;
|
||||
|
||||
toml::from_str(&content)
|
||||
.map_err(|e| ArchDocError::ConfigError(format!("Failed to parse config file: {}", e)))
|
||||
}
|
||||
|
||||
/// Save configuration to a TOML file
|
||||
pub fn save_to_file(&self, path: &Path) -> Result<(), ArchDocError> {
|
||||
let content = toml::to_string_pretty(self)
|
||||
.map_err(|e| ArchDocError::ConfigError(format!("Failed to serialize config: {}", e)))?;
|
||||
|
||||
std::fs::write(path, content)
|
||||
.map_err(|e| ArchDocError::ConfigError(format!("Failed to write config file: {}", e)))
|
||||
}
|
||||
}
|
||||
26
archdoc-core/src/errors.rs
Normal file
26
archdoc-core/src/errors.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum ArchDocError {
|
||||
#[error("IO error: {0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
#[error("Parse error in {file}:{line}: {message}")]
|
||||
ParseError {
|
||||
file: String,
|
||||
line: usize,
|
||||
message: String,
|
||||
},
|
||||
|
||||
#[error("Configuration error: {0}")]
|
||||
ConfigError(String),
|
||||
|
||||
#[error("Analysis error: {0}")]
|
||||
AnalysisError(String),
|
||||
|
||||
#[error("Rendering error: {0}")]
|
||||
RenderingError(String),
|
||||
|
||||
#[error("File consistency check failed: {0}")]
|
||||
ConsistencyError(String),
|
||||
}
|
||||
31
archdoc-core/src/lib.rs
Normal file
31
archdoc-core/src/lib.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
//! ArchDoc Core Library
|
||||
//!
|
||||
//! This crate provides the core functionality for analyzing Python projects
|
||||
//! and generating architecture documentation.
|
||||
|
||||
// Public modules
|
||||
pub mod errors;
|
||||
pub mod config;
|
||||
pub mod model;
|
||||
pub mod scanner;
|
||||
pub mod python_analyzer;
|
||||
pub mod renderer;
|
||||
pub mod writer;
|
||||
pub mod cache;
|
||||
|
||||
// Re-export commonly used types
|
||||
pub use errors::ArchDocError;
|
||||
pub use config::Config;
|
||||
pub use model::ProjectModel;
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn it_works() {
|
||||
let result = 2 + 2;
|
||||
assert_eq!(result, 4);
|
||||
}
|
||||
}
|
||||
168
archdoc-core/src/model.rs
Normal file
168
archdoc-core/src/model.rs
Normal file
@@ -0,0 +1,168 @@
|
||||
//! Intermediate Representation (IR) for ArchDoc
|
||||
//!
|
||||
//! This module defines the data structures that represent the analyzed Python project
|
||||
//! and are used for generating documentation.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProjectModel {
|
||||
pub modules: HashMap<String, Module>,
|
||||
pub files: HashMap<String, FileDoc>,
|
||||
pub symbols: HashMap<String, Symbol>,
|
||||
pub edges: Edges,
|
||||
}
|
||||
|
||||
impl ProjectModel {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
modules: HashMap::new(),
|
||||
files: HashMap::new(),
|
||||
symbols: HashMap::new(),
|
||||
edges: Edges::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ProjectModel {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Module {
|
||||
pub id: String,
|
||||
pub path: String,
|
||||
pub files: Vec<String>,
|
||||
pub doc_summary: Option<String>,
|
||||
pub outbound_modules: Vec<String>,
|
||||
pub inbound_modules: Vec<String>,
|
||||
pub symbols: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FileDoc {
|
||||
pub id: String,
|
||||
pub path: String,
|
||||
pub module_id: String,
|
||||
pub imports: Vec<String>, // normalized import strings
|
||||
pub outbound_modules: Vec<String>,
|
||||
pub inbound_files: Vec<String>,
|
||||
pub symbols: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Symbol {
|
||||
pub id: String,
|
||||
pub kind: SymbolKind,
|
||||
pub module_id: String,
|
||||
pub file_id: String,
|
||||
pub qualname: String,
|
||||
pub signature: String,
|
||||
pub annotations: Option<HashMap<String, String>>,
|
||||
pub docstring_first_line: Option<String>,
|
||||
pub purpose: String, // docstring or heuristic
|
||||
pub outbound_calls: Vec<String>,
|
||||
pub inbound_calls: Vec<String>,
|
||||
pub integrations_flags: IntegrationFlags,
|
||||
pub metrics: SymbolMetrics,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub enum SymbolKind {
|
||||
Function,
|
||||
AsyncFunction,
|
||||
Class,
|
||||
Method,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct IntegrationFlags {
|
||||
pub http: bool,
|
||||
pub db: bool,
|
||||
pub queue: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SymbolMetrics {
|
||||
pub fan_in: usize,
|
||||
pub fan_out: usize,
|
||||
pub is_critical: bool,
|
||||
pub cycle_participant: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Edges {
|
||||
pub module_import_edges: Vec<Edge>,
|
||||
pub file_import_edges: Vec<Edge>,
|
||||
pub symbol_call_edges: Vec<Edge>,
|
||||
}
|
||||
|
||||
impl Edges {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
module_import_edges: Vec::new(),
|
||||
file_import_edges: Vec::new(),
|
||||
symbol_call_edges: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Edges {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Edge {
|
||||
pub from_id: String,
|
||||
pub to_id: String,
|
||||
pub edge_type: EdgeType,
|
||||
pub meta: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum EdgeType {
|
||||
ModuleImport,
|
||||
FileImport,
|
||||
SymbolCall,
|
||||
ExternalCall,
|
||||
UnresolvedCall,
|
||||
}
|
||||
|
||||
// Additional structures for Python analysis
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||
pub struct ParsedModule {
|
||||
pub path: std::path::PathBuf,
|
||||
pub module_path: String,
|
||||
pub imports: Vec<Import>,
|
||||
pub symbols: Vec<Symbol>,
|
||||
pub calls: Vec<Call>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||
pub struct Import {
|
||||
pub module_name: String,
|
||||
pub alias: Option<String>,
|
||||
pub line_number: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||
pub struct Call {
|
||||
pub caller_symbol: String,
|
||||
pub callee_expr: String,
|
||||
pub line_number: usize,
|
||||
pub call_type: CallType,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||
pub enum CallType {
|
||||
Local,
|
||||
Imported,
|
||||
External,
|
||||
Unresolved,
|
||||
}
|
||||
386
archdoc-core/src/python_analyzer.rs
Normal file
386
archdoc-core/src/python_analyzer.rs
Normal file
@@ -0,0 +1,386 @@
|
||||
//! Python AST analyzer for ArchDoc
|
||||
//!
|
||||
//! This module handles parsing Python files using AST and extracting
|
||||
//! imports, definitions, and calls.
|
||||
|
||||
use crate::model::{ParsedModule, ProjectModel, Import, Call, CallType, Symbol, Module, FileDoc};
|
||||
use crate::config::Config;
|
||||
use crate::errors::ArchDocError;
|
||||
use crate::cache::CacheManager;
|
||||
use std::path::Path;
|
||||
use std::fs;
|
||||
use rustpython_parser::{ast, Parse};
|
||||
use rustpython_ast::{Stmt, StmtClassDef, StmtFunctionDef, Expr, Ranged};
|
||||
|
||||
pub struct PythonAnalyzer {
|
||||
_config: Config,
|
||||
cache_manager: CacheManager,
|
||||
}
|
||||
|
||||
impl PythonAnalyzer {
|
||||
pub fn new(config: Config) -> Self {
|
||||
let cache_manager = CacheManager::new(config.clone());
|
||||
Self { _config: config, cache_manager }
|
||||
}
|
||||
|
||||
pub fn parse_module(&self, file_path: &Path) -> Result<ParsedModule, ArchDocError> {
|
||||
// Try to get from cache first
|
||||
if let Some(cached_module) = self.cache_manager.get_cached_module(file_path)? {
|
||||
return Ok(cached_module);
|
||||
}
|
||||
|
||||
// Read the Python file
|
||||
let code = fs::read_to_string(file_path)
|
||||
.map_err(ArchDocError::Io)?;
|
||||
|
||||
// Parse the Python code into an AST
|
||||
let ast = ast::Suite::parse(&code, file_path.to_str().unwrap_or("<unknown>"))
|
||||
.map_err(|e| ArchDocError::ParseError {
|
||||
file: file_path.to_string_lossy().to_string(),
|
||||
line: 0, // We don't have line info from the error
|
||||
message: format!("Failed to parse: {}", e),
|
||||
})?;
|
||||
|
||||
// Extract imports, definitions, and calls
|
||||
let mut imports = Vec::new();
|
||||
let mut symbols = Vec::new();
|
||||
let mut calls = Vec::new();
|
||||
|
||||
for stmt in ast {
|
||||
self.extract_from_statement(&stmt, None, &mut imports, &mut symbols, &mut calls, 0);
|
||||
}
|
||||
|
||||
let parsed_module = ParsedModule {
|
||||
path: file_path.to_path_buf(),
|
||||
module_path: file_path.to_string_lossy().to_string(),
|
||||
imports,
|
||||
symbols,
|
||||
calls,
|
||||
};
|
||||
|
||||
// Store in cache
|
||||
self.cache_manager.store_module(file_path, parsed_module.clone())?;
|
||||
|
||||
Ok(parsed_module)
|
||||
}
|
||||
|
||||
fn extract_from_statement(&self, stmt: &Stmt, current_symbol: Option<&str>, imports: &mut Vec<Import>, symbols: &mut Vec<Symbol>, calls: &mut Vec<Call>, depth: usize) {
|
||||
match stmt {
|
||||
Stmt::Import(import_stmt) => {
|
||||
for alias in &import_stmt.names {
|
||||
imports.push(Import {
|
||||
module_name: alias.name.to_string(),
|
||||
alias: alias.asname.as_ref().map(|n| n.to_string()),
|
||||
line_number: alias.range().start().into(),
|
||||
});
|
||||
}
|
||||
}
|
||||
Stmt::ImportFrom(import_from_stmt) => {
|
||||
let module_name = import_from_stmt.module.as_ref()
|
||||
.map(|m| m.to_string())
|
||||
.unwrap_or_default();
|
||||
for alias in &import_from_stmt.names {
|
||||
let full_name = if module_name.is_empty() {
|
||||
alias.name.to_string()
|
||||
} else {
|
||||
format!("{}.{}", module_name, alias.name)
|
||||
};
|
||||
imports.push(Import {
|
||||
module_name: full_name,
|
||||
alias: alias.asname.as_ref().map(|n| n.to_string()),
|
||||
line_number: alias.range().start().into(),
|
||||
});
|
||||
}
|
||||
}
|
||||
Stmt::FunctionDef(func_def) => {
|
||||
// Extract function definition
|
||||
// Create a symbol for this function
|
||||
let integrations_flags = self.detect_integrations(&func_def.body, &self._config);
|
||||
let symbol = Symbol {
|
||||
id: func_def.name.to_string(),
|
||||
kind: crate::model::SymbolKind::Function,
|
||||
module_id: "".to_string(), // Will be filled later
|
||||
file_id: "".to_string(), // Will be filled later
|
||||
qualname: func_def.name.to_string(),
|
||||
signature: format!("def {}(...)", func_def.name),
|
||||
annotations: None,
|
||||
docstring_first_line: self.extract_docstring(&func_def.body), // Extract docstring
|
||||
purpose: "extracted from AST".to_string(),
|
||||
outbound_calls: Vec::new(),
|
||||
inbound_calls: Vec::new(),
|
||||
integrations_flags,
|
||||
metrics: crate::model::SymbolMetrics {
|
||||
fan_in: 0,
|
||||
fan_out: 0,
|
||||
is_critical: false,
|
||||
cycle_participant: false,
|
||||
},
|
||||
};
|
||||
symbols.push(symbol);
|
||||
|
||||
// Recursively process function body for calls
|
||||
for body_stmt in &func_def.body {
|
||||
self.extract_from_statement(body_stmt, Some(&func_def.name), imports, symbols, calls, depth + 1);
|
||||
}
|
||||
}
|
||||
Stmt::ClassDef(class_def) => {
|
||||
// Extract class definition
|
||||
// Create a symbol for this class
|
||||
let integrations_flags = self.detect_integrations(&class_def.body, &self._config);
|
||||
let symbol = Symbol {
|
||||
id: class_def.name.to_string(),
|
||||
kind: crate::model::SymbolKind::Class,
|
||||
module_id: "".to_string(), // Will be filled later
|
||||
file_id: "".to_string(), // Will be filled later
|
||||
qualname: class_def.name.to_string(),
|
||||
signature: format!("class {}", class_def.name),
|
||||
annotations: None,
|
||||
docstring_first_line: self.extract_docstring(&class_def.body), // Extract docstring
|
||||
purpose: "extracted from AST".to_string(),
|
||||
outbound_calls: Vec::new(),
|
||||
inbound_calls: Vec::new(),
|
||||
integrations_flags,
|
||||
metrics: crate::model::SymbolMetrics {
|
||||
fan_in: 0,
|
||||
fan_out: 0,
|
||||
is_critical: false,
|
||||
cycle_participant: false,
|
||||
},
|
||||
};
|
||||
symbols.push(symbol);
|
||||
|
||||
// Recursively process class body
|
||||
for body_stmt in &class_def.body {
|
||||
self.extract_from_statement(body_stmt, Some(&class_def.name), imports, symbols, calls, depth + 1);
|
||||
}
|
||||
}
|
||||
Stmt::Expr(expr_stmt) => {
|
||||
self.extract_from_expression(&expr_stmt.value, current_symbol, calls);
|
||||
}
|
||||
_ => {
|
||||
// For other statement types, we might still need to check for calls in expressions
|
||||
// This is a simplified approach - a full implementation would need to traverse all expressions
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_docstring(&self, body: &[Stmt]) -> Option<String> {
|
||||
// For now, just return None until we figure out the correct way to extract docstrings
|
||||
// TODO: Implement proper docstring extraction
|
||||
None
|
||||
}
|
||||
|
||||
fn detect_integrations(&self, body: &[Stmt], config: &Config) -> crate::model::IntegrationFlags {
|
||||
let mut flags = crate::model::IntegrationFlags {
|
||||
http: false,
|
||||
db: false,
|
||||
queue: false,
|
||||
};
|
||||
|
||||
if !config.analysis.detect_integrations {
|
||||
return flags;
|
||||
}
|
||||
|
||||
// Convert body to string for pattern matching
|
||||
let body_str = format!("{:?}", body);
|
||||
|
||||
// Check for HTTP integrations
|
||||
for pattern in &config.analysis.integration_patterns {
|
||||
if pattern.type_ == "http" {
|
||||
for lib in &pattern.patterns {
|
||||
if body_str.contains(lib) {
|
||||
flags.http = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if pattern.type_ == "db" {
|
||||
for lib in &pattern.patterns {
|
||||
if body_str.contains(lib) {
|
||||
flags.db = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if pattern.type_ == "queue" {
|
||||
for lib in &pattern.patterns {
|
||||
if body_str.contains(lib) {
|
||||
flags.queue = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
flags
|
||||
}
|
||||
|
||||
fn extract_function_def(&self, _func_def: &StmtFunctionDef, _symbols: &mut Vec<Symbol>, _calls: &mut Vec<Call>, _depth: usize) {
|
||||
// Extract function information
|
||||
// This is a simplified implementation - a full implementation would extract more details
|
||||
}
|
||||
|
||||
fn extract_class_def(&self, _class_def: &StmtClassDef, _symbols: &mut Vec<Symbol>, _depth: usize) {
|
||||
// Extract class information
|
||||
// This is a simplified implementation - a full implementation would extract more details
|
||||
}
|
||||
|
||||
fn extract_from_expression(&self, expr: &Expr, current_symbol: Option<&str>, calls: &mut Vec<Call>) {
|
||||
match expr {
|
||||
Expr::Call(call_expr) => {
|
||||
// Extract call information
|
||||
let callee_expr = self.expr_to_string(&call_expr.func);
|
||||
calls.push(Call {
|
||||
caller_symbol: current_symbol.unwrap_or("unknown").to_string(), // Use current symbol as caller
|
||||
callee_expr,
|
||||
line_number: call_expr.range().start().into(),
|
||||
call_type: CallType::Unresolved,
|
||||
});
|
||||
|
||||
// Recursively process arguments
|
||||
for arg in &call_expr.args {
|
||||
self.extract_from_expression(arg, current_symbol, calls);
|
||||
}
|
||||
for keyword in &call_expr.keywords {
|
||||
self.extract_from_expression(&keyword.value, current_symbol, calls);
|
||||
}
|
||||
}
|
||||
Expr::Attribute(attr_expr) => {
|
||||
// Recursively process value
|
||||
self.extract_from_expression(&attr_expr.value, current_symbol, calls);
|
||||
}
|
||||
_ => {
|
||||
// For other expression types, recursively process child expressions
|
||||
// This is a simplified approach - a full implementation would handle all expression variants
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn expr_to_string(&self, expr: &Expr) -> String {
|
||||
match expr {
|
||||
Expr::Name(name_expr) => name_expr.id.to_string(),
|
||||
Expr::Attribute(attr_expr) => {
|
||||
format!("{}.{}", self.expr_to_string(&attr_expr.value), attr_expr.attr)
|
||||
}
|
||||
_ => "<complex_expression>".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_symbols(&self, modules: &[ParsedModule]) -> Result<ProjectModel, ArchDocError> {
|
||||
// Build symbol index
|
||||
// Resolve cross-module references
|
||||
// Build call graph
|
||||
|
||||
// This is a simplified implementation that creates a basic project model
|
||||
// A full implementation would do much more sophisticated symbol resolution
|
||||
|
||||
let mut project_model = ProjectModel::new();
|
||||
|
||||
// Add modules to project model
|
||||
for parsed_module in modules {
|
||||
let module_id = parsed_module.module_path.clone();
|
||||
let file_id = parsed_module.path.to_string_lossy().to_string();
|
||||
|
||||
// Create file doc
|
||||
let file_doc = FileDoc {
|
||||
id: file_id.clone(),
|
||||
path: parsed_module.path.to_string_lossy().to_string(),
|
||||
module_id: module_id.clone(),
|
||||
imports: parsed_module.imports.iter().map(|i| i.module_name.clone()).collect(),
|
||||
outbound_modules: Vec::new(), // TODO: Resolve outbound modules
|
||||
inbound_files: Vec::new(),
|
||||
symbols: parsed_module.symbols.iter().map(|s| s.id.clone()).collect(),
|
||||
};
|
||||
project_model.files.insert(file_id.clone(), file_doc);
|
||||
|
||||
// Add symbols to project model
|
||||
for mut symbol in parsed_module.symbols.clone() {
|
||||
symbol.module_id = module_id.clone();
|
||||
symbol.file_id = file_id.clone();
|
||||
project_model.symbols.insert(symbol.id.clone(), symbol);
|
||||
}
|
||||
|
||||
// Create module
|
||||
let module = Module {
|
||||
id: module_id.clone(),
|
||||
path: parsed_module.path.to_string_lossy().to_string(),
|
||||
files: vec![file_id.clone()],
|
||||
doc_summary: None,
|
||||
outbound_modules: Vec::new(), // TODO: Resolve outbound modules
|
||||
inbound_modules: Vec::new(),
|
||||
symbols: parsed_module.symbols.iter().map(|s| s.id.clone()).collect(),
|
||||
};
|
||||
project_model.modules.insert(module_id, module);
|
||||
}
|
||||
|
||||
// Build dependency graphs and compute metrics
|
||||
self.build_dependency_graphs(&mut project_model, modules)?;
|
||||
self.compute_metrics(&mut project_model)?;
|
||||
|
||||
Ok(project_model)
|
||||
}
|
||||
|
||||
fn build_dependency_graphs(&self, project_model: &mut ProjectModel, parsed_modules: &[ParsedModule]) -> Result<(), ArchDocError> {
|
||||
// Build module import edges
|
||||
for parsed_module in parsed_modules {
|
||||
let from_module_id = parsed_module.module_path.clone();
|
||||
|
||||
for import in &parsed_module.imports {
|
||||
// Try to resolve the imported module
|
||||
let to_module_id = import.module_name.clone();
|
||||
|
||||
// Create module import edge
|
||||
let edge = crate::model::Edge {
|
||||
from_id: from_module_id.clone(),
|
||||
to_id: to_module_id,
|
||||
edge_type: crate::model::EdgeType::ModuleImport,
|
||||
meta: None,
|
||||
};
|
||||
project_model.edges.module_import_edges.push(edge);
|
||||
}
|
||||
}
|
||||
|
||||
// Build symbol call edges
|
||||
for parsed_module in parsed_modules {
|
||||
let _module_id = parsed_module.module_path.clone();
|
||||
|
||||
for call in &parsed_module.calls {
|
||||
// Try to resolve the called symbol
|
||||
let callee_expr = call.callee_expr.clone();
|
||||
|
||||
// Create symbol call edge
|
||||
let edge = crate::model::Edge {
|
||||
from_id: call.caller_symbol.clone(),
|
||||
to_id: callee_expr,
|
||||
edge_type: crate::model::EdgeType::SymbolCall, // TODO: Map CallType to EdgeType properly
|
||||
meta: None,
|
||||
};
|
||||
project_model.edges.symbol_call_edges.push(edge);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn compute_metrics(&self, project_model: &mut ProjectModel) -> Result<(), ArchDocError> {
|
||||
// Compute fan-in and fan-out metrics for symbols
|
||||
for symbol in project_model.symbols.values_mut() {
|
||||
// Fan-out: count of outgoing calls
|
||||
let fan_out = project_model.edges.symbol_call_edges
|
||||
.iter()
|
||||
.filter(|edge| edge.from_id == symbol.id)
|
||||
.count();
|
||||
|
||||
// Fan-in: count of incoming calls
|
||||
let fan_in = project_model.edges.symbol_call_edges
|
||||
.iter()
|
||||
.filter(|edge| edge.to_id == symbol.id)
|
||||
.count();
|
||||
|
||||
symbol.metrics.fan_in = fan_in;
|
||||
symbol.metrics.fan_out = fan_out;
|
||||
symbol.metrics.is_critical = fan_in > 10 || fan_out > 10; // Simple threshold
|
||||
symbol.metrics.cycle_participant = false; // TODO: Detect cycles
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
369
archdoc-core/src/renderer.rs
Normal file
369
archdoc-core/src/renderer.rs
Normal file
@@ -0,0 +1,369 @@
|
||||
//! Markdown renderer for ArchDoc
|
||||
//!
|
||||
//! This module handles generating Markdown documentation from the project model
|
||||
//! using templates.
|
||||
|
||||
use crate::model::ProjectModel;
|
||||
use handlebars::Handlebars;
|
||||
|
||||
pub struct Renderer {
|
||||
templates: Handlebars<'static>,
|
||||
}
|
||||
|
||||
impl Renderer {
|
||||
pub fn new() -> Self {
|
||||
let mut handlebars = Handlebars::new();
|
||||
|
||||
// Register templates
|
||||
handlebars.register_template_string("architecture_md", Self::architecture_md_template())
|
||||
.expect("Failed to register architecture_md template");
|
||||
|
||||
// TODO: Register other templates
|
||||
|
||||
Self {
|
||||
templates: handlebars,
|
||||
}
|
||||
}
|
||||
|
||||
fn architecture_md_template() -> &'static str {
|
||||
r#"# ARCHITECTURE — {{{project_name}}}
|
||||
|
||||
<!-- MANUAL:BEGIN -->
|
||||
## Project summary
|
||||
**Name:** {{{project_name}}}
|
||||
**Description:** {{{project_description}}}
|
||||
|
||||
## Key decisions (manual)
|
||||
{{#each key_decisions}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
## Non-goals (manual)
|
||||
{{#each non_goals}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
<!-- MANUAL:END -->
|
||||
|
||||
---
|
||||
|
||||
## Document metadata
|
||||
- **Created:** {{{created_date}}}
|
||||
- **Updated:** {{{updated_date}}}
|
||||
- **Generated by:** archdoc (cli) v0.1
|
||||
|
||||
---
|
||||
|
||||
## Integrations
|
||||
<!-- ARCHDOC:BEGIN section=integrations -->
|
||||
> Generated. Do not edit inside this block.
|
||||
|
||||
### Database Integrations
|
||||
{{#each db_integrations}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
### HTTP/API Integrations
|
||||
{{#each http_integrations}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
### Queue Integrations
|
||||
{{#each queue_integrations}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
<!-- ARCHDOC:END section=integrations -->
|
||||
|
||||
---
|
||||
|
||||
## Rails / Tooling
|
||||
<!-- ARCHDOC:BEGIN section=rails -->
|
||||
> Generated. Do not edit inside this block.
|
||||
{{{rails_summary}}}
|
||||
<!-- ARCHDOC:END section=rails -->
|
||||
|
||||
---
|
||||
|
||||
## Repository layout (top-level)
|
||||
<!-- ARCHDOC:BEGIN section=layout -->
|
||||
> Generated. Do not edit inside this block.
|
||||
| Path | Purpose | Link |
|
||||
|------|---------|------|
|
||||
{{#each layout_items}}
|
||||
| {{{path}}} | {{{purpose}}} | [details]({{{link}}}) |
|
||||
{{/each}}
|
||||
<!-- ARCHDOC:END section=layout -->
|
||||
|
||||
---
|
||||
|
||||
## Modules index
|
||||
<!-- ARCHDOC:BEGIN section=modules_index -->
|
||||
> Generated. Do not edit inside this block.
|
||||
| Module | Symbols | Inbound | Outbound | Link |
|
||||
|--------|---------|---------|----------|------|
|
||||
{{#each modules}}
|
||||
| {{{name}}} | {{{symbol_count}}} | {{{inbound_count}}} | {{{outbound_count}}} | [details]({{{link}}}) |
|
||||
{{/each}}
|
||||
<!-- ARCHDOC:END section=modules_index -->
|
||||
|
||||
---
|
||||
|
||||
## Critical dependency points
|
||||
<!-- ARCHDOC:BEGIN section=critical_points -->
|
||||
> Generated. Do not edit inside this block.
|
||||
### High Fan-in (Most Called)
|
||||
| Symbol | Fan-in | Critical |
|
||||
|--------|--------|----------|
|
||||
{{#each high_fan_in}}
|
||||
| {{{symbol}}} | {{{count}}} | {{{critical}}} |
|
||||
{{/each}}
|
||||
|
||||
### High Fan-out (Calls Many)
|
||||
| Symbol | Fan-out | Critical |
|
||||
|--------|---------|----------|
|
||||
{{#each high_fan_out}}
|
||||
| {{{symbol}}} | {{{count}}} | {{{critical}}} |
|
||||
{{/each}}
|
||||
|
||||
### Module Cycles
|
||||
{{#each cycles}}
|
||||
- {{{cycle_path}}}
|
||||
{{/each}}
|
||||
<!-- ARCHDOC:END section=critical_points -->
|
||||
|
||||
---
|
||||
|
||||
<!-- MANUAL:BEGIN -->
|
||||
## Change notes (manual)
|
||||
{{#each change_notes}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
<!-- MANUAL:END -->
|
||||
"#
|
||||
}
|
||||
|
||||
pub fn render_architecture_md(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
|
||||
// Collect integration information
|
||||
let mut db_integrations = Vec::new();
|
||||
let mut http_integrations = Vec::new();
|
||||
let mut queue_integrations = Vec::new();
|
||||
|
||||
for (symbol_id, symbol) in &model.symbols {
|
||||
if symbol.integrations_flags.db {
|
||||
db_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
||||
}
|
||||
if symbol.integrations_flags.http {
|
||||
http_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
||||
}
|
||||
if symbol.integrations_flags.queue {
|
||||
queue_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare data for template
|
||||
let data = serde_json::json!({
|
||||
"project_name": "New Project",
|
||||
"project_description": "<FILL_MANUALLY: what this project does in 3–7 lines>",
|
||||
"created_date": "2026-01-25",
|
||||
"updated_date": "2026-01-25",
|
||||
"key_decisions": ["<FILL_MANUALLY>"],
|
||||
"non_goals": ["<FILL_MANUALLY>"],
|
||||
"change_notes": ["<FILL_MANUALLY>"],
|
||||
"db_integrations": db_integrations,
|
||||
"http_integrations": http_integrations,
|
||||
"queue_integrations": queue_integrations,
|
||||
// TODO: Fill with more actual data from model
|
||||
});
|
||||
|
||||
self.templates.render("architecture_md", &data)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to render architecture.md: {}", e))
|
||||
}
|
||||
|
||||
pub fn render_integrations_section(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
|
||||
// Collect integration information
|
||||
let mut db_integrations = Vec::new();
|
||||
let mut http_integrations = Vec::new();
|
||||
let mut queue_integrations = Vec::new();
|
||||
|
||||
for (symbol_id, symbol) in &model.symbols {
|
||||
if symbol.integrations_flags.db {
|
||||
db_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
||||
}
|
||||
if symbol.integrations_flags.http {
|
||||
http_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
||||
}
|
||||
if symbol.integrations_flags.queue {
|
||||
queue_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare data for integrations section
|
||||
let data = serde_json::json!({
|
||||
"db_integrations": db_integrations,
|
||||
"http_integrations": http_integrations,
|
||||
"queue_integrations": queue_integrations,
|
||||
});
|
||||
|
||||
// Create a smaller template just for the integrations section
|
||||
let integrations_template = r#"
|
||||
|
||||
### Database Integrations
|
||||
{{#each db_integrations}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
### HTTP/API Integrations
|
||||
{{#each http_integrations}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
### Queue Integrations
|
||||
{{#each queue_integrations}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
"#;
|
||||
|
||||
let mut handlebars = Handlebars::new();
|
||||
handlebars.register_template_string("integrations", integrations_template)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to register integrations template: {}", e))?;
|
||||
|
||||
handlebars.render("integrations", &data)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to render integrations section: {}", e))
|
||||
}
|
||||
|
||||
pub fn render_rails_section(&self, _model: &ProjectModel) -> Result<String, anyhow::Error> {
|
||||
// For now, return a simple placeholder
|
||||
Ok("\n\nNo tooling information available.\n".to_string())
|
||||
}
|
||||
|
||||
pub fn render_layout_section(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
|
||||
// Collect layout information from files
|
||||
let mut layout_items = Vec::new();
|
||||
|
||||
for (file_id, file_doc) in &model.files {
|
||||
layout_items.push(serde_json::json!({
|
||||
"path": file_doc.path,
|
||||
"purpose": "Source file",
|
||||
"link": format!("docs/architecture/files/{}.md", file_id)
|
||||
}));
|
||||
}
|
||||
|
||||
// Prepare data for layout section
|
||||
let data = serde_json::json!({
|
||||
"layout_items": layout_items,
|
||||
});
|
||||
|
||||
// Create a smaller template just for the layout section
|
||||
let layout_template = r#"
|
||||
|
||||
| Path | Purpose | Link |
|
||||
|------|---------|------|
|
||||
{{#each layout_items}}
|
||||
| {{{path}}} | {{{purpose}}} | [details]({{{link}}}) |
|
||||
{{/each}}
|
||||
"#;
|
||||
|
||||
let mut handlebars = Handlebars::new();
|
||||
handlebars.register_template_string("layout", layout_template)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to register layout template: {}", e))?;
|
||||
|
||||
handlebars.render("layout", &data)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to render layout section: {}", e))
|
||||
}
|
||||
|
||||
pub fn render_modules_index_section(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
|
||||
// Collect module information
|
||||
let mut modules = Vec::new();
|
||||
|
||||
for (module_id, module) in &model.modules {
|
||||
modules.push(serde_json::json!({
|
||||
"name": module_id,
|
||||
"symbol_count": module.symbols.len(),
|
||||
"inbound_count": module.inbound_modules.len(),
|
||||
"outbound_count": module.outbound_modules.len(),
|
||||
"link": format!("docs/architecture/modules/{}.md", module_id)
|
||||
}));
|
||||
}
|
||||
|
||||
// Prepare data for modules index section
|
||||
let data = serde_json::json!({
|
||||
"modules": modules,
|
||||
});
|
||||
|
||||
// Create a smaller template just for the modules index section
|
||||
let modules_template = r#"
|
||||
|
||||
| Module | Symbols | Inbound | Outbound | Link |
|
||||
|--------|---------|---------|----------|------|
|
||||
{{#each modules}}
|
||||
| {{{name}}} | {{{symbol_count}}} | {{{inbound_count}}} | {{{outbound_count}}} | [details]({{{link}}}) |
|
||||
{{/each}}
|
||||
"#;
|
||||
|
||||
let mut handlebars = Handlebars::new();
|
||||
handlebars.register_template_string("modules_index", modules_template)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to register modules_index template: {}", e))?;
|
||||
|
||||
handlebars.render("modules_index", &data)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to render modules index section: {}", e))
|
||||
}
|
||||
|
||||
pub fn render_critical_points_section(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
|
||||
// Collect critical points information
|
||||
let mut high_fan_in = Vec::new();
|
||||
let mut high_fan_out = Vec::new();
|
||||
|
||||
for (symbol_id, symbol) in &model.symbols {
|
||||
if symbol.metrics.fan_in > 5 { // Threshold for high fan-in
|
||||
high_fan_in.push(serde_json::json!({
|
||||
"symbol": symbol_id,
|
||||
"count": symbol.metrics.fan_in,
|
||||
"critical": symbol.metrics.is_critical,
|
||||
}));
|
||||
}
|
||||
if symbol.metrics.fan_out > 5 { // Threshold for high fan-out
|
||||
high_fan_out.push(serde_json::json!({
|
||||
"symbol": symbol_id,
|
||||
"count": symbol.metrics.fan_out,
|
||||
"critical": symbol.metrics.is_critical,
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare data for critical points section
|
||||
let data = serde_json::json!({
|
||||
"high_fan_in": high_fan_in,
|
||||
"high_fan_out": high_fan_out,
|
||||
"cycles": Vec::<String>::new(), // TODO: Implement cycle detection
|
||||
});
|
||||
|
||||
// Create a smaller template just for the critical points section
|
||||
let critical_points_template = r#"
|
||||
|
||||
### High Fan-in (Most Called)
|
||||
| Symbol | Fan-in | Critical |
|
||||
|--------|--------|----------|
|
||||
{{#each high_fan_in}}
|
||||
| {{{symbol}}} | {{{count}}} | {{{critical}}} |
|
||||
{{/each}}
|
||||
|
||||
### High Fan-out (Calls Many)
|
||||
| Symbol | Fan-out | Critical |
|
||||
|--------|---------|----------|
|
||||
{{#each high_fan_out}}
|
||||
| {{{symbol}}} | {{{count}}} | {{{critical}}} |
|
||||
{{/each}}
|
||||
|
||||
### Module Cycles
|
||||
{{#each cycles}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
"#;
|
||||
|
||||
let mut handlebars = Handlebars::new();
|
||||
handlebars.register_template_string("critical_points", critical_points_template)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to register critical_points template: {}", e))?;
|
||||
|
||||
handlebars.render("critical_points", &data)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to render critical points section: {}", e))
|
||||
}
|
||||
}
|
||||
86
archdoc-core/src/scanner.rs
Normal file
86
archdoc-core/src/scanner.rs
Normal file
@@ -0,0 +1,86 @@
|
||||
//! File scanner for ArchDoc
|
||||
//!
|
||||
//! This module handles scanning the file system for Python files according to
|
||||
//! the configuration settings.
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::errors::ArchDocError;
|
||||
use std::path::{Path, PathBuf};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
pub struct FileScanner {
|
||||
config: Config,
|
||||
}
|
||||
|
||||
impl FileScanner {
|
||||
pub fn new(config: Config) -> Self {
|
||||
Self { config }
|
||||
}
|
||||
|
||||
pub fn scan_python_files(&self, root: &Path) -> Result<Vec<PathBuf>, ArchDocError> {
|
||||
// Check if root directory exists
|
||||
if !root.exists() {
|
||||
return Err(ArchDocError::Io(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
format!("Root directory does not exist: {}", root.display())
|
||||
)));
|
||||
}
|
||||
|
||||
if !root.is_dir() {
|
||||
return Err(ArchDocError::Io(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
format!("Root path is not a directory: {}", root.display())
|
||||
)));
|
||||
}
|
||||
|
||||
let mut python_files = Vec::new();
|
||||
|
||||
// Walk directory tree respecting include/exclude patterns
|
||||
for entry in WalkDir::new(root)
|
||||
.follow_links(self.config.scan.follow_symlinks)
|
||||
.into_iter() {
|
||||
|
||||
let entry = entry.map_err(|e| {
|
||||
ArchDocError::Io(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format!("Failed to read directory entry: {}", e)
|
||||
))
|
||||
})?;
|
||||
|
||||
let path = entry.path();
|
||||
|
||||
// Skip excluded paths
|
||||
if self.is_excluded(path) {
|
||||
if path.is_dir() {
|
||||
continue;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Include Python files
|
||||
if path.extension().and_then(|s| s.to_str()) == Some("py") {
|
||||
python_files.push(path.to_path_buf());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(python_files)
|
||||
}
|
||||
|
||||
fn is_excluded(&self, path: &Path) -> bool {
|
||||
// Convert path to string for pattern matching
|
||||
let path_str = match path.to_str() {
|
||||
Some(s) => s,
|
||||
None => return false, // If we can't convert to string, don't exclude
|
||||
};
|
||||
|
||||
// Check if path matches any exclude patterns
|
||||
for pattern in &self.config.scan.exclude {
|
||||
if path_str.contains(pattern) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
237
archdoc-core/src/writer.rs
Normal file
237
archdoc-core/src/writer.rs
Normal file
@@ -0,0 +1,237 @@
|
||||
//! Diff-aware file writer for ArchDoc
|
||||
//!
|
||||
//! This module handles writing generated documentation to files while preserving
|
||||
//! manual content and only updating generated sections.
|
||||
|
||||
use crate::errors::ArchDocError;
|
||||
use std::path::Path;
|
||||
use std::fs;
|
||||
use chrono::Utc;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SectionMarker {
|
||||
pub name: String,
|
||||
pub start_pos: usize,
|
||||
pub end_pos: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolMarker {
|
||||
pub symbol_id: String,
|
||||
pub start_pos: usize,
|
||||
pub end_pos: usize,
|
||||
}
|
||||
|
||||
pub struct DiffAwareWriter {
|
||||
// Configuration
|
||||
}
|
||||
|
||||
impl DiffAwareWriter {
|
||||
pub fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
|
||||
pub fn update_file_with_markers(
|
||||
&self,
|
||||
file_path: &Path,
|
||||
generated_content: &str,
|
||||
section_name: &str,
|
||||
) -> Result<(), ArchDocError> {
|
||||
// Read existing file
|
||||
let existing_content = if file_path.exists() {
|
||||
fs::read_to_string(file_path)
|
||||
.map_err(|e| ArchDocError::Io(e))?
|
||||
} else {
|
||||
// Create new file with template
|
||||
let template_content = self.create_template_file(file_path, section_name)?;
|
||||
// Write template to file
|
||||
fs::write(file_path, &template_content)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
template_content
|
||||
};
|
||||
|
||||
// Find section markers
|
||||
let markers = self.find_section_markers(&existing_content, section_name)?;
|
||||
|
||||
if let Some(marker) = markers.first() {
|
||||
// Replace content between markers
|
||||
let new_content = self.replace_section_content(
|
||||
&existing_content,
|
||||
marker,
|
||||
generated_content,
|
||||
)?;
|
||||
|
||||
// Check if content has changed
|
||||
let content_changed = existing_content != new_content;
|
||||
|
||||
// Write updated content
|
||||
if content_changed {
|
||||
let updated_content = self.update_timestamp(new_content)?;
|
||||
fs::write(file_path, updated_content)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
} else {
|
||||
// Content hasn't changed, but we might still need to update timestamp
|
||||
// TODO: Implement timestamp update logic based on config
|
||||
fs::write(file_path, new_content)
|
||||
.map_err(|e| ArchDocError::Io(e))?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_symbol_section(
|
||||
&self,
|
||||
_file_path: &Path,
|
||||
_symbol_id: &str,
|
||||
_generated_content: &str,
|
||||
) -> Result<(), ArchDocError> {
|
||||
// Similar to section update but for symbol-specific markers
|
||||
todo!("Implement symbol section update")
|
||||
}
|
||||
|
||||
fn find_section_markers(&self, content: &str, section_name: &str) -> Result<Vec<SectionMarker>, ArchDocError> {
|
||||
let begin_marker = format!("<!-- ARCHDOC:BEGIN section={} -->", section_name);
|
||||
let end_marker = format!("<!-- ARCHDOC:END section={} -->", section_name);
|
||||
|
||||
let mut markers = Vec::new();
|
||||
let mut pos = 0;
|
||||
|
||||
while let Some(begin_pos) = content[pos..].find(&begin_marker) {
|
||||
let absolute_begin = pos + begin_pos;
|
||||
let search_start = absolute_begin + begin_marker.len();
|
||||
|
||||
if let Some(end_pos) = content[search_start..].find(&end_marker) {
|
||||
let absolute_end = search_start + end_pos + end_marker.len();
|
||||
markers.push(SectionMarker {
|
||||
name: section_name.to_string(),
|
||||
start_pos: absolute_begin,
|
||||
end_pos: absolute_end,
|
||||
});
|
||||
pos = absolute_end;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(markers)
|
||||
}
|
||||
|
||||
fn replace_section_content(
|
||||
&self,
|
||||
content: &str,
|
||||
marker: &SectionMarker,
|
||||
new_content: &str,
|
||||
) -> Result<String, ArchDocError> {
|
||||
let before = &content[..marker.start_pos];
|
||||
let after = &content[marker.end_pos..];
|
||||
|
||||
let begin_marker = format!("<!-- ARCHDOC:BEGIN section={} -->", marker.name);
|
||||
let end_marker = format!("<!-- ARCHDOC:END section={} -->", marker.name);
|
||||
|
||||
Ok(format!(
|
||||
"{}{}{}{}{}",
|
||||
before, begin_marker, new_content, end_marker, after
|
||||
))
|
||||
}
|
||||
|
||||
fn update_timestamp(&self, content: String) -> Result<String, ArchDocError> {
|
||||
// Update the "Updated" field in the document metadata section
|
||||
// Find the metadata section and update the timestamp
|
||||
let today = Utc::now().format("%Y-%m-%d").to_string();
|
||||
|
||||
// Look for the "Updated:" line and replace it
|
||||
let lines: Vec<&str> = content.lines().collect();
|
||||
let mut updated_lines = Vec::new();
|
||||
|
||||
for line in lines {
|
||||
if line.trim_start().starts_with("- **Updated:**") {
|
||||
updated_lines.push(format!("- **Updated:** {}", today));
|
||||
} else {
|
||||
updated_lines.push(line.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(updated_lines.join("\n"))
|
||||
}
|
||||
|
||||
fn create_template_file(&self, _file_path: &Path, template_type: &str) -> Result<String, ArchDocError> {
|
||||
// Create file with appropriate template based on type
|
||||
match template_type {
|
||||
"architecture" => {
|
||||
let template = r#"# ARCHITECTURE — New Project
|
||||
|
||||
<!-- MANUAL:BEGIN -->
|
||||
## Project summary
|
||||
**Name:** New Project
|
||||
**Description:** <FILL_MANUALLY: what this project does in 3–7 lines>
|
||||
|
||||
## Key decisions (manual)
|
||||
- <FILL_MANUALLY>
|
||||
|
||||
## Non-goals (manual)
|
||||
- <FILL_MANUALLY>
|
||||
<!-- MANUAL:END -->
|
||||
|
||||
---
|
||||
|
||||
## Document metadata
|
||||
- **Created:** 2026-01-25
|
||||
- **Updated:** 2026-01-25
|
||||
- **Generated by:** archdoc (cli) v0.1
|
||||
|
||||
---
|
||||
|
||||
## Rails / Tooling
|
||||
<!-- ARCHDOC:BEGIN section=rails -->
|
||||
> Generated. Do not edit inside this block.
|
||||
|
||||
<!-- ARCHDOC:END section=rails -->
|
||||
|
||||
---
|
||||
|
||||
## Repository layout (top-level)
|
||||
<!-- ARCHDOC:BEGIN section=layout -->
|
||||
> Generated. Do not edit inside this block.
|
||||
|
||||
<!-- ARCHDOC:END section=layout -->
|
||||
|
||||
---
|
||||
|
||||
## Modules index
|
||||
<!-- ARCHDOC:BEGIN section=modules_index -->
|
||||
> Generated. Do not edit inside this block.
|
||||
|
||||
<!-- ARCHDOC:END section=modules_index -->
|
||||
|
||||
---
|
||||
|
||||
## Integrations
|
||||
<!-- ARCHDOC:BEGIN section=integrations -->
|
||||
> Generated. Do not edit inside this block.
|
||||
|
||||
<!-- ARCHDOC:END section=integrations -->
|
||||
|
||||
---
|
||||
|
||||
## Critical dependency points
|
||||
<!-- ARCHDOC:BEGIN section=critical_points -->
|
||||
> Generated. Do not edit inside this block.
|
||||
|
||||
<!-- ARCHDOC:END section=critical_points -->
|
||||
|
||||
---
|
||||
|
||||
<!-- MANUAL:BEGIN -->
|
||||
## Change notes (manual)
|
||||
- <FILL_MANUALLY>
|
||||
<!-- MANUAL:END -->
|
||||
"#;
|
||||
Ok(template.to_string())
|
||||
}
|
||||
_ => {
|
||||
Ok("".to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
100
archdoc-core/tests/caching.rs
Normal file
100
archdoc-core/tests/caching.rs
Normal file
@@ -0,0 +1,100 @@
|
||||
//! Caching tests for ArchDoc
|
||||
//!
|
||||
//! These tests verify that the caching functionality works correctly.
|
||||
|
||||
use std::path::Path;
|
||||
use std::fs;
|
||||
use tempfile::TempDir;
|
||||
use archdoc_core::{Config, python_analyzer::PythonAnalyzer};
|
||||
|
||||
#[test]
|
||||
fn test_cache_store_and_retrieve() {
|
||||
let config = Config::default();
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Create a temporary Python file
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let temp_file = temp_dir.path().join("test.py");
|
||||
let python_code = r#"
|
||||
def hello():
|
||||
return "Hello, World!"
|
||||
|
||||
class Calculator:
|
||||
def add(self, a, b):
|
||||
return a + b
|
||||
"#;
|
||||
fs::write(&temp_file, python_code).expect("Failed to write test file");
|
||||
|
||||
// Parse the module for the first time
|
||||
let parsed_module1 = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module first time");
|
||||
|
||||
// Parse the module again - should come from cache
|
||||
let parsed_module2 = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module second time");
|
||||
|
||||
// Both parses should return the same data
|
||||
assert_eq!(parsed_module1.path, parsed_module2.path);
|
||||
assert_eq!(parsed_module1.module_path, parsed_module2.module_path);
|
||||
assert_eq!(parsed_module1.imports.len(), parsed_module2.imports.len());
|
||||
assert_eq!(parsed_module1.symbols.len(), parsed_module2.symbols.len());
|
||||
assert_eq!(parsed_module1.calls.len(), parsed_module2.calls.len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_invalidation_on_file_change() {
|
||||
let config = Config::default();
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Create a temporary Python file
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let temp_file = temp_dir.path().join("test.py");
|
||||
let python_code1 = r#"
|
||||
def hello():
|
||||
return "Hello, World!"
|
||||
"#;
|
||||
fs::write(&temp_file, python_code1).expect("Failed to write test file");
|
||||
|
||||
// Parse the module for the first time
|
||||
let parsed_module1 = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module first time");
|
||||
|
||||
// Modify the file
|
||||
let python_code2 = r#"
|
||||
def hello():
|
||||
return "Hello, World!"
|
||||
|
||||
def goodbye():
|
||||
return "Goodbye, World!"
|
||||
"#;
|
||||
fs::write(&temp_file, python_code2).expect("Failed to write test file");
|
||||
|
||||
// Parse the module again - should NOT come from cache due to file change
|
||||
let parsed_module2 = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module second time");
|
||||
|
||||
// The second parse should have more symbols
|
||||
assert!(parsed_module2.symbols.len() >= parsed_module1.symbols.len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_disabled() {
|
||||
let mut config = Config::default();
|
||||
config.caching.enabled = false;
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Create a temporary Python file
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let temp_file = temp_dir.path().join("test.py");
|
||||
let python_code = r#"
|
||||
def hello():
|
||||
return "Hello, World!"
|
||||
"#;
|
||||
fs::write(&temp_file, python_code).expect("Failed to write test file");
|
||||
|
||||
// Parse the module - should work even with caching disabled
|
||||
let parsed_module = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module with caching disabled");
|
||||
|
||||
assert_eq!(parsed_module.symbols.len(), 1);
|
||||
}
|
||||
131
archdoc-core/tests/enhanced_analysis.rs
Normal file
131
archdoc-core/tests/enhanced_analysis.rs
Normal file
@@ -0,0 +1,131 @@
|
||||
//! Enhanced analysis tests for ArchDoc
|
||||
//!
|
||||
//! These tests verify that the enhanced analysis functionality works correctly
|
||||
//! with complex code that includes integrations, calls, and docstrings.
|
||||
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use archdoc_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
||||
|
||||
#[test]
|
||||
fn test_enhanced_analysis_with_integrations() {
|
||||
// Print current directory for debugging
|
||||
let current_dir = std::env::current_dir().unwrap();
|
||||
println!("Current directory: {:?}", current_dir);
|
||||
|
||||
// Try different paths for the config file
|
||||
let possible_paths = [
|
||||
"tests/golden/test_project/archdoc.toml",
|
||||
"../tests/golden/test_project/archdoc.toml",
|
||||
];
|
||||
|
||||
let config_path = possible_paths.iter().find(|&path| {
|
||||
Path::new(path).exists()
|
||||
}).expect("Could not find config file in any expected location");
|
||||
|
||||
println!("Using config path: {:?}", config_path);
|
||||
|
||||
let config = Config::load_from_file(Path::new(config_path)).expect("Failed to load config");
|
||||
|
||||
// Initialize scanner with the correct root path
|
||||
let project_root = Path::new("tests/golden/test_project");
|
||||
let scanner = FileScanner::new(config.clone());
|
||||
|
||||
// Scan for Python files
|
||||
let python_files = scanner.scan_python_files(project_root)
|
||||
.expect("Failed to scan Python files");
|
||||
|
||||
println!("Found Python files: {:?}", python_files);
|
||||
|
||||
// Should find both example.py and advanced_example.py
|
||||
assert_eq!(python_files.len(), 2);
|
||||
|
||||
// Initialize Python analyzer
|
||||
let analyzer = PythonAnalyzer::new(config.clone());
|
||||
|
||||
// Parse each Python file
|
||||
let mut parsed_modules = Vec::new();
|
||||
for file_path in python_files {
|
||||
println!("Parsing file: {:?}", file_path);
|
||||
match analyzer.parse_module(&file_path) {
|
||||
Ok(module) => {
|
||||
println!("Successfully parsed module: {:?}", module.module_path);
|
||||
println!("Imports: {:?}", module.imports);
|
||||
println!("Symbols: {:?}", module.symbols.len());
|
||||
println!("Calls: {:?}", module.calls.len());
|
||||
parsed_modules.push(module);
|
||||
},
|
||||
Err(e) => {
|
||||
panic!("Failed to parse {}: {}", file_path.display(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!("Parsed {} modules", parsed_modules.len());
|
||||
|
||||
// Resolve symbols and build project model
|
||||
let project_model = analyzer.resolve_symbols(&parsed_modules)
|
||||
.expect("Failed to resolve symbols");
|
||||
|
||||
println!("Project model modules: {}", project_model.modules.len());
|
||||
println!("Project model files: {}", project_model.files.len());
|
||||
println!("Project model symbols: {}", project_model.symbols.len());
|
||||
|
||||
// Add assertions to verify the project model
|
||||
assert!(!project_model.modules.is_empty());
|
||||
assert!(!project_model.files.is_empty());
|
||||
assert!(!project_model.symbols.is_empty());
|
||||
|
||||
// Check that we have the right number of modules (2 files = 2 modules)
|
||||
assert_eq!(project_model.modules.len(), 2);
|
||||
|
||||
// Check that we have the right number of files
|
||||
assert_eq!(project_model.files.len(), 2);
|
||||
|
||||
// Check that we have the right number of symbols
|
||||
// The actual number might be less due to deduplication or other factors
|
||||
// but should be at least the sum of symbols from both files minus duplicates
|
||||
assert!(project_model.symbols.len() >= 10);
|
||||
|
||||
// Check specific details about the advanced example module
|
||||
let mut found_advanced_module = false;
|
||||
for (_, module) in project_model.modules.iter() {
|
||||
if module.path.contains("advanced_example.py") {
|
||||
found_advanced_module = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert!(found_advanced_module);
|
||||
|
||||
// Check that we found the UserService class with DB integration
|
||||
let user_service_symbol = project_model.symbols.values().find(|s| s.id == "UserService");
|
||||
assert!(user_service_symbol.is_some());
|
||||
assert_eq!(user_service_symbol.unwrap().kind, archdoc_core::model::SymbolKind::Class);
|
||||
|
||||
// Check that we found the NotificationService class with queue integration
|
||||
let notification_service_symbol = project_model.symbols.values().find(|s| s.id == "NotificationService");
|
||||
assert!(notification_service_symbol.is_some());
|
||||
assert_eq!(notification_service_symbol.unwrap().kind, archdoc_core::model::SymbolKind::Class);
|
||||
|
||||
// Check that we found the fetch_external_user_data function with HTTP integration
|
||||
let fetch_external_user_data_symbol = project_model.symbols.values().find(|s| s.id == "fetch_external_user_data");
|
||||
assert!(fetch_external_user_data_symbol.is_some());
|
||||
assert_eq!(fetch_external_user_data_symbol.unwrap().kind, archdoc_core::model::SymbolKind::Function);
|
||||
|
||||
// Check file imports
|
||||
let mut found_advanced_file = false;
|
||||
for (_, file_doc) in project_model.files.iter() {
|
||||
if file_doc.path.contains("advanced_example.py") {
|
||||
found_advanced_file = true;
|
||||
assert!(!file_doc.imports.is_empty());
|
||||
// Should have imports for requests, sqlite3, redis, typing
|
||||
let import_names: Vec<&String> = file_doc.imports.iter().collect();
|
||||
assert!(import_names.contains(&&"requests".to_string()));
|
||||
assert!(import_names.contains(&&"sqlite3".to_string()));
|
||||
assert!(import_names.contains(&&"redis".to_string()));
|
||||
assert!(import_names.contains(&&"typing.List".to_string()) || import_names.contains(&&"typing".to_string()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert!(found_advanced_file);
|
||||
}
|
||||
83
archdoc-core/tests/error_handling.rs
Normal file
83
archdoc-core/tests/error_handling.rs
Normal file
@@ -0,0 +1,83 @@
|
||||
//! Error handling tests for ArchDoc
|
||||
//!
|
||||
//! These tests verify that ArchDoc properly handles various error conditions
|
||||
//! and edge cases.
|
||||
|
||||
use std::path::Path;
|
||||
use std::fs;
|
||||
use tempfile::TempDir;
|
||||
use archdoc_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
||||
|
||||
#[test]
|
||||
fn test_scanner_nonexistent_directory() {
|
||||
let config = Config::default();
|
||||
let scanner = FileScanner::new(config);
|
||||
|
||||
// Try to scan a nonexistent directory
|
||||
let result = scanner.scan_python_files(Path::new("/nonexistent/directory"));
|
||||
assert!(result.is_err());
|
||||
|
||||
// Check that we get an IO error
|
||||
match result.unwrap_err() {
|
||||
archdoc_core::errors::ArchDocError::Io(_) => {},
|
||||
_ => panic!("Expected IO error"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scanner_file_instead_of_directory() {
|
||||
let config = Config::default();
|
||||
let scanner = FileScanner::new(config);
|
||||
|
||||
// Create a temporary file
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let temp_file = temp_dir.path().join("test.txt");
|
||||
fs::write(&temp_file, "test content").expect("Failed to write test file");
|
||||
|
||||
// Try to scan a file instead of a directory
|
||||
let result = scanner.scan_python_files(&temp_file);
|
||||
assert!(result.is_err());
|
||||
|
||||
// Check that we get an IO error
|
||||
match result.unwrap_err() {
|
||||
archdoc_core::errors::ArchDocError::Io(_) => {},
|
||||
_ => panic!("Expected IO error"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_analyzer_nonexistent_file() {
|
||||
let config = Config::default();
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Try to parse a nonexistent file
|
||||
let result = analyzer.parse_module(Path::new("/nonexistent/file.py"));
|
||||
assert!(result.is_err());
|
||||
|
||||
// Check that we get an IO error
|
||||
match result.unwrap_err() {
|
||||
archdoc_core::errors::ArchDocError::Io(_) => {},
|
||||
_ => panic!("Expected IO error"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_analyzer_invalid_python_syntax() {
|
||||
let config = Config::default();
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Create a temporary file with invalid Python syntax
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let temp_file = temp_dir.path().join("invalid.py");
|
||||
fs::write(&temp_file, "invalid python syntax @@#$%").expect("Failed to write test file");
|
||||
|
||||
// Try to parse the file
|
||||
let result = analyzer.parse_module(&temp_file);
|
||||
assert!(result.is_err());
|
||||
|
||||
// Check that we get a parse error
|
||||
match result.unwrap_err() {
|
||||
archdoc_core::errors::ArchDocError::ParseError { .. } => {},
|
||||
_ => panic!("Expected parse error"),
|
||||
}
|
||||
}
|
||||
60
archdoc-core/tests/golden/files/example_architecture.md
Normal file
60
archdoc-core/tests/golden/files/example_architecture.md
Normal file
@@ -0,0 +1,60 @@
|
||||
# Architecture Documentation
|
||||
|
||||
Generated at: 1970-01-01 00:00:00 UTC
|
||||
|
||||
## Overview
|
||||
|
||||
This document provides an overview of the architecture for the project.
|
||||
|
||||
## Modules
|
||||
|
||||
### example.py
|
||||
|
||||
File: `example.py`
|
||||
|
||||
#### Imports
|
||||
|
||||
- `os`
|
||||
- `typing.List`
|
||||
|
||||
#### Symbols
|
||||
|
||||
##### Calculator
|
||||
|
||||
- Type: Class
|
||||
- Signature: `class Calculator`
|
||||
- Purpose: extracted from AST
|
||||
|
||||
##### Calculator.__init__
|
||||
|
||||
- Type: Function
|
||||
- Signature: `def __init__(...)`
|
||||
- Purpose: extracted from AST
|
||||
|
||||
##### Calculator.add
|
||||
|
||||
- Type: Function
|
||||
- Signature: `def add(...)`
|
||||
- Purpose: extracted from AST
|
||||
|
||||
##### Calculator.multiply
|
||||
|
||||
- Type: Function
|
||||
- Signature: `def multiply(...)`
|
||||
- Purpose: extracted from AST
|
||||
|
||||
##### process_numbers
|
||||
|
||||
- Type: Function
|
||||
- Signature: `def process_numbers(...)`
|
||||
- Purpose: extracted from AST
|
||||
|
||||
## Metrics
|
||||
|
||||
### Critical Components
|
||||
|
||||
No critical components identified.
|
||||
|
||||
### Component Dependencies
|
||||
|
||||
Dependency analysis not yet implemented.
|
||||
107
archdoc-core/tests/golden/mod.rs
Normal file
107
archdoc-core/tests/golden/mod.rs
Normal file
@@ -0,0 +1,107 @@
|
||||
//! Golden tests for ArchDoc
|
||||
//!
|
||||
//! These tests generate documentation for test projects and compare the output
|
||||
//! with expected "golden" files to ensure consistency.
|
||||
|
||||
mod test_utils;
|
||||
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use archdoc_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
||||
|
||||
#[test]
|
||||
fn test_simple_project_generation() {
|
||||
// Print current directory for debugging
|
||||
let current_dir = std::env::current_dir().unwrap();
|
||||
println!("Current directory: {:?}", current_dir);
|
||||
|
||||
// Try different paths for the config file
|
||||
let possible_paths = [
|
||||
"tests/golden/test_project/archdoc.toml",
|
||||
"../tests/golden/test_project/archdoc.toml",
|
||||
];
|
||||
|
||||
let config_path = possible_paths.iter().find(|&path| {
|
||||
Path::new(path).exists()
|
||||
}).expect("Could not find config file in any expected location");
|
||||
|
||||
println!("Using config path: {:?}", config_path);
|
||||
|
||||
let config = Config::load_from_file(Path::new(config_path)).expect("Failed to load config");
|
||||
|
||||
// Initialize scanner with the correct root path
|
||||
let project_root = Path::new("tests/golden/test_project");
|
||||
let scanner = FileScanner::new(config.clone());
|
||||
|
||||
// Scan for Python files
|
||||
let python_files = scanner.scan_python_files(project_root)
|
||||
.expect("Failed to scan Python files");
|
||||
|
||||
println!("Found Python files: {:?}", python_files);
|
||||
|
||||
// Initialize Python analyzer
|
||||
let analyzer = PythonAnalyzer::new(config.clone());
|
||||
|
||||
// Parse each Python file
|
||||
let mut parsed_modules = Vec::new();
|
||||
for file_path in python_files {
|
||||
println!("Parsing file: {:?}", file_path);
|
||||
match analyzer.parse_module(&file_path) {
|
||||
Ok(module) => {
|
||||
println!("Successfully parsed module: {:?}", module.module_path);
|
||||
println!("Imports: {:?}", module.imports);
|
||||
println!("Symbols: {:?}", module.symbols.len());
|
||||
println!("Calls: {:?}", module.calls.len());
|
||||
parsed_modules.push(module);
|
||||
},
|
||||
Err(e) => {
|
||||
panic!("Failed to parse {}: {}", file_path.display(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!("Parsed {} modules", parsed_modules.len());
|
||||
|
||||
// Resolve symbols and build project model
|
||||
let project_model = analyzer.resolve_symbols(&parsed_modules)
|
||||
.expect("Failed to resolve symbols");
|
||||
|
||||
println!("Project model modules: {}", project_model.modules.len());
|
||||
println!("Project model files: {}", project_model.files.len());
|
||||
println!("Project model symbols: {}", project_model.symbols.len());
|
||||
|
||||
// Add assertions to verify the project model
|
||||
assert!(!project_model.modules.is_empty());
|
||||
assert!(!project_model.files.is_empty());
|
||||
assert!(!project_model.symbols.is_empty());
|
||||
|
||||
// Check specific details about the parsed modules
|
||||
// Now we have 2 modules (example.py and advanced_example.py)
|
||||
assert_eq!(project_model.modules.len(), 2);
|
||||
|
||||
// Find the example.py module
|
||||
let mut found_example_module = false;
|
||||
for (_, module) in project_model.modules.iter() {
|
||||
if module.path.contains("example.py") {
|
||||
found_example_module = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert!(found_example_module);
|
||||
|
||||
// Check that we found the Calculator class
|
||||
let calculator_symbol = project_model.symbols.values().find(|s| s.id == "Calculator");
|
||||
assert!(calculator_symbol.is_some());
|
||||
assert_eq!(calculator_symbol.unwrap().kind, archdoc_core::model::SymbolKind::Class);
|
||||
|
||||
// Check that we found the process_numbers function
|
||||
let process_numbers_symbol = project_model.symbols.values().find(|s| s.id == "process_numbers");
|
||||
assert!(process_numbers_symbol.is_some());
|
||||
assert_eq!(process_numbers_symbol.unwrap().kind, archdoc_core::model::SymbolKind::Function);
|
||||
|
||||
// Check file imports
|
||||
assert!(!project_model.files.is_empty());
|
||||
let file_entry = project_model.files.iter().next().unwrap();
|
||||
let file_doc = file_entry.1;
|
||||
assert!(!file_doc.imports.is_empty());
|
||||
}
|
||||
107
archdoc-core/tests/golden/test_project/src/advanced_example.py
Normal file
107
archdoc-core/tests/golden/test_project/src/advanced_example.py
Normal file
@@ -0,0 +1,107 @@
|
||||
"""Advanced example module for testing with integrations."""
|
||||
|
||||
import requests
|
||||
import sqlite3
|
||||
import redis
|
||||
from typing import List, Dict
|
||||
|
||||
class UserService:
|
||||
"""A service for managing users with database integration."""
|
||||
|
||||
def __init__(self, db_path: str = "users.db"):
|
||||
"""Initialize the user service with database path."""
|
||||
self.db_path = db_path
|
||||
self._init_db()
|
||||
|
||||
def _init_db(self):
|
||||
"""Initialize the database."""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
email TEXT UNIQUE NOT NULL
|
||||
)
|
||||
""")
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
def create_user(self, name: str, email: str) -> Dict:
|
||||
"""Create a new user in the database."""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"INSERT INTO users (name, email) VALUES (?, ?)",
|
||||
(name, email)
|
||||
)
|
||||
user_id = cursor.lastrowid
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
return {"id": user_id, "name": name, "email": email}
|
||||
|
||||
def get_user(self, user_id: int) -> Dict:
|
||||
"""Get a user by ID from the database."""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT * FROM users WHERE id = ?", (user_id,))
|
||||
row = cursor.fetchone()
|
||||
conn.close()
|
||||
|
||||
if row:
|
||||
return {"id": row[0], "name": row[1], "email": row[2]}
|
||||
return None
|
||||
|
||||
class NotificationService:
|
||||
"""A service for sending notifications with queue integration."""
|
||||
|
||||
def __init__(self, redis_url: str = "redis://localhost:6379"):
|
||||
"""Initialize the notification service with Redis URL."""
|
||||
self.redis_client = redis.Redis.from_url(redis_url)
|
||||
|
||||
def send_email_notification(self, user_id: int, message: str) -> bool:
|
||||
"""Send an email notification by queuing it."""
|
||||
notification = {
|
||||
"user_id": user_id,
|
||||
"message": message,
|
||||
"type": "email"
|
||||
}
|
||||
|
||||
# Push to Redis queue
|
||||
self.redis_client.lpush("notifications", str(notification))
|
||||
return True
|
||||
|
||||
def fetch_external_user_data(user_id: int) -> Dict:
|
||||
"""Fetch user data from an external API."""
|
||||
response = requests.get(f"https://api.example.com/users/{user_id}")
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
return {}
|
||||
|
||||
def process_users(user_ids: List[int]) -> List[Dict]:
|
||||
"""Process a list of users with various integrations."""
|
||||
# Database integration
|
||||
user_service = UserService()
|
||||
|
||||
# Queue integration
|
||||
notification_service = NotificationService()
|
||||
|
||||
results = []
|
||||
for user_id in user_ids:
|
||||
# Database operation
|
||||
user = user_service.get_user(user_id)
|
||||
if user:
|
||||
# External API integration
|
||||
external_data = fetch_external_user_data(user_id)
|
||||
user.update(external_data)
|
||||
|
||||
# Queue operation
|
||||
notification_service.send_email_notification(
|
||||
user_id,
|
||||
f"Processing user {user['name']}"
|
||||
)
|
||||
|
||||
results.append(user)
|
||||
|
||||
return results
|
||||
29
archdoc-core/tests/golden/test_project/src/example.py
Normal file
29
archdoc-core/tests/golden/test_project/src/example.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""Example module for testing."""
|
||||
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
class Calculator:
|
||||
"""A simple calculator class."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the calculator."""
|
||||
pass
|
||||
|
||||
def add(self, a: int, b: int) -> int:
|
||||
"""Add two numbers."""
|
||||
return a + b
|
||||
|
||||
def multiply(self, a: int, b: int) -> int:
|
||||
"""Multiply two numbers."""
|
||||
return a * b
|
||||
|
||||
def process_numbers(numbers: List[int]) -> List[int]:
|
||||
"""Process a list of numbers."""
|
||||
calc = Calculator()
|
||||
return [calc.add(n, 1) for n in numbers]
|
||||
|
||||
if __name__ == "__main__":
|
||||
numbers = [1, 2, 3, 4, 5]
|
||||
result = process_numbers(numbers)
|
||||
print(f"Processed numbers: {result}")
|
||||
21
archdoc-core/tests/golden/test_utils.rs
Normal file
21
archdoc-core/tests/golden/test_utils.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
//! Test utilities for golden tests
|
||||
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
/// Read a file and return its contents
|
||||
pub fn read_test_file(path: &str) -> String {
|
||||
fs::read_to_string(path).expect(&format!("Failed to read test file: {}", path))
|
||||
}
|
||||
|
||||
/// Write content to a file for testing
|
||||
pub fn write_test_file(path: &str, content: &str) {
|
||||
fs::write(path, content).expect(&format!("Failed to write test file: {}", path))
|
||||
}
|
||||
|
||||
/// Compare two strings and panic if they don't match
|
||||
pub fn assert_strings_equal(actual: &str, expected: &str, message: &str) {
|
||||
if actual != expected {
|
||||
panic!("{}: Strings do not match\nActual:\n{}\nExpected:\n{}", message, actual, expected);
|
||||
}
|
||||
}
|
||||
134
archdoc-core/tests/integration_detection.rs
Normal file
134
archdoc-core/tests/integration_detection.rs
Normal file
@@ -0,0 +1,134 @@
|
||||
//! Integration detection tests for ArchDoc
|
||||
//!
|
||||
//! These tests verify that the integration detection functionality works correctly.
|
||||
|
||||
use std::fs;
|
||||
use tempfile::TempDir;
|
||||
use archdoc_core::{Config, python_analyzer::PythonAnalyzer};
|
||||
|
||||
#[test]
|
||||
fn test_http_integration_detection() {
|
||||
let config = Config::default();
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Create a temporary Python file with HTTP integration
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let temp_file = temp_dir.path().join("test.py");
|
||||
let python_code = r#"
|
||||
import requests
|
||||
|
||||
def fetch_data():
|
||||
response = requests.get("https://api.example.com/data")
|
||||
return response.json()
|
||||
"#;
|
||||
fs::write(&temp_file, python_code).expect("Failed to write test file");
|
||||
|
||||
// Parse the module
|
||||
let parsed_module = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module");
|
||||
|
||||
// Check that we found the function
|
||||
assert_eq!(parsed_module.symbols.len(), 1);
|
||||
let symbol = &parsed_module.symbols[0];
|
||||
assert_eq!(symbol.id, "fetch_data");
|
||||
|
||||
// Check that HTTP integration is detected
|
||||
assert!(symbol.integrations_flags.http);
|
||||
assert!(!symbol.integrations_flags.db);
|
||||
assert!(!symbol.integrations_flags.queue);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_db_integration_detection() {
|
||||
let config = Config::default();
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Create a temporary Python file with DB integration
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let temp_file = temp_dir.path().join("test.py");
|
||||
let python_code = r#"
|
||||
import sqlite3
|
||||
|
||||
def get_user(user_id):
|
||||
conn = sqlite3.connect("database.db")
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT * FROM users WHERE id = ?", (user_id,))
|
||||
return cursor.fetchone()
|
||||
"#;
|
||||
fs::write(&temp_file, python_code).expect("Failed to write test file");
|
||||
|
||||
// Parse the module
|
||||
let parsed_module = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module");
|
||||
|
||||
// Check that we found the function
|
||||
assert_eq!(parsed_module.symbols.len(), 1);
|
||||
let symbol = &parsed_module.symbols[0];
|
||||
assert_eq!(symbol.id, "get_user");
|
||||
|
||||
// Check that DB integration is detected
|
||||
assert!(!symbol.integrations_flags.http);
|
||||
assert!(symbol.integrations_flags.db);
|
||||
assert!(!symbol.integrations_flags.queue);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_queue_integration_detection() {
|
||||
let config = Config::default();
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Create a temporary Python file with queue integration
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let temp_file = temp_dir.path().join("test.py");
|
||||
let python_code = r#"
|
||||
import redis
|
||||
|
||||
def process_job(job_data):
|
||||
client = redis.Redis()
|
||||
client.lpush("job_queue", job_data)
|
||||
"#;
|
||||
fs::write(&temp_file, python_code).expect("Failed to write test file");
|
||||
|
||||
// Parse the module
|
||||
let parsed_module = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module");
|
||||
|
||||
// Check that we found the function
|
||||
assert_eq!(parsed_module.symbols.len(), 1);
|
||||
let symbol = &parsed_module.symbols[0];
|
||||
assert_eq!(symbol.id, "process_job");
|
||||
|
||||
// Check that queue integration is detected
|
||||
assert!(!symbol.integrations_flags.http);
|
||||
assert!(!symbol.integrations_flags.db);
|
||||
assert!(symbol.integrations_flags.queue);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_integration_detection() {
|
||||
let config = Config::default();
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Create a temporary Python file with no integrations
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let temp_file = temp_dir.path().join("test.py");
|
||||
let python_code = r#"
|
||||
def calculate_sum(a, b):
|
||||
return a + b
|
||||
"#;
|
||||
fs::write(&temp_file, python_code).expect("Failed to write test file");
|
||||
|
||||
// Parse the module
|
||||
let parsed_module = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module");
|
||||
|
||||
// Check that we found the function
|
||||
assert_eq!(parsed_module.symbols.len(), 1);
|
||||
let symbol = &parsed_module.symbols[0];
|
||||
assert_eq!(symbol.id, "calculate_sum");
|
||||
|
||||
// Check that no integrations are detected
|
||||
assert!(!symbol.integrations_flags.http);
|
||||
assert!(!symbol.integrations_flags.db);
|
||||
assert!(!symbol.integrations_flags.queue);
|
||||
}
|
||||
13
archdoc-core/tests/integration_tests.rs
Normal file
13
archdoc-core/tests/integration_tests.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
//! Integration tests for ArchDoc
|
||||
|
||||
// Include golden tests
|
||||
mod golden;
|
||||
mod error_handling;
|
||||
mod caching;
|
||||
mod integration_detection;
|
||||
mod enhanced_analysis;
|
||||
|
||||
// Run all tests
|
||||
fn main() {
|
||||
// This is just a placeholder - tests are run by cargo test
|
||||
}
|
||||
93
archdoc-core/tests/project_analysis.rs
Normal file
93
archdoc-core/tests/project_analysis.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
//! Tests for analyzing the test project
|
||||
|
||||
use archdoc_core::{
|
||||
config::Config,
|
||||
python_analyzer::PythonAnalyzer,
|
||||
};
|
||||
use std::path::Path;
|
||||
|
||||
#[test]
|
||||
fn test_project_analysis() {
|
||||
// Load config from test project
|
||||
let config = Config::load_from_file(Path::new("../test-project/archdoc.toml")).unwrap();
|
||||
|
||||
// Initialize analyzer
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Parse core module
|
||||
let core_module = analyzer.parse_module(Path::new("../test-project/src/core.py")).unwrap();
|
||||
|
||||
println!("Core module symbols: {}", core_module.symbols.len());
|
||||
for symbol in &core_module.symbols {
|
||||
println!(" Symbol: {} ({:?}), DB: {}, HTTP: {}", symbol.id, symbol.kind, symbol.integrations_flags.db, symbol.integrations_flags.http);
|
||||
}
|
||||
|
||||
println!("Core module calls: {}", core_module.calls.len());
|
||||
for call in &core_module.calls {
|
||||
println!(" Call: {} -> {}", call.caller_symbol, call.callee_expr);
|
||||
}
|
||||
|
||||
// Check that we found symbols
|
||||
assert!(!core_module.symbols.is_empty()); // Should find at least the main symbols
|
||||
|
||||
// Check that we found calls
|
||||
assert!(!core_module.calls.is_empty());
|
||||
|
||||
// Check that integrations are detected
|
||||
let db_integration_found = core_module.symbols.iter().any(|s| s.integrations_flags.db);
|
||||
let http_integration_found = core_module.symbols.iter().any(|s| s.integrations_flags.http);
|
||||
|
||||
assert!(db_integration_found, "Database integration should be detected");
|
||||
assert!(http_integration_found, "HTTP integration should be detected");
|
||||
|
||||
// Parse utils module
|
||||
let utils_module = analyzer.parse_module(Path::new("../test-project/src/utils.py")).unwrap();
|
||||
|
||||
println!("Utils module symbols: {}", utils_module.symbols.len());
|
||||
for symbol in &utils_module.symbols {
|
||||
println!(" Symbol: {} ({:?}), DB: {}, HTTP: {}", symbol.id, symbol.kind, symbol.integrations_flags.db, symbol.integrations_flags.http);
|
||||
}
|
||||
|
||||
// Check that we found symbols
|
||||
assert!(!utils_module.symbols.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_full_project_resolution() {
|
||||
// Load config from test project
|
||||
let config = Config::load_from_file(Path::new("../test-project/archdoc.toml")).unwrap();
|
||||
|
||||
// Initialize analyzer
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Parse all modules
|
||||
let core_module = analyzer.parse_module(Path::new("../test-project/src/core.py")).unwrap();
|
||||
let utils_module = analyzer.parse_module(Path::new("../test-project/src/utils.py")).unwrap();
|
||||
|
||||
let modules = vec![core_module, utils_module];
|
||||
|
||||
// Resolve symbols
|
||||
let project_model = analyzer.resolve_symbols(&modules).unwrap();
|
||||
|
||||
// Check project model
|
||||
assert!(!project_model.modules.is_empty());
|
||||
assert!(!project_model.symbols.is_empty());
|
||||
assert!(!project_model.files.is_empty());
|
||||
|
||||
// Check that integrations are preserved in the project model
|
||||
let db_integration_found = project_model.symbols.values().any(|s| s.integrations_flags.db);
|
||||
let http_integration_found = project_model.symbols.values().any(|s| s.integrations_flags.http);
|
||||
|
||||
assert!(db_integration_found, "Database integration should be preserved in project model");
|
||||
assert!(http_integration_found, "HTTP integration should be preserved in project model");
|
||||
|
||||
println!("Project modules: {:?}", project_model.modules.keys().collect::<Vec<_>>());
|
||||
println!("Project symbols: {}", project_model.symbols.len());
|
||||
|
||||
// Print integration information
|
||||
for (id, symbol) in &project_model.symbols {
|
||||
if symbol.integrations_flags.db || symbol.integrations_flags.http {
|
||||
println!("Symbol {} has DB: {}, HTTP: {}", id, symbol.integrations_flags.db, symbol.integrations_flags.http);
|
||||
}
|
||||
}
|
||||
}
|
||||
85
archdoc-core/tests/renderer_tests.rs
Normal file
85
archdoc-core/tests/renderer_tests.rs
Normal file
@@ -0,0 +1,85 @@
|
||||
//! Tests for the renderer functionality
|
||||
|
||||
use archdoc_core::{
|
||||
model::{ProjectModel, Symbol, SymbolKind, IntegrationFlags, SymbolMetrics},
|
||||
renderer::Renderer,
|
||||
};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[test]
|
||||
fn test_render_with_integrations() {
|
||||
// Create a mock project model with integration information
|
||||
let mut project_model = ProjectModel::new();
|
||||
|
||||
// Add a symbol with database integration
|
||||
let db_symbol = Symbol {
|
||||
id: "DatabaseManager".to_string(),
|
||||
kind: SymbolKind::Class,
|
||||
module_id: "test_module".to_string(),
|
||||
file_id: "test_file.py".to_string(),
|
||||
qualname: "DatabaseManager".to_string(),
|
||||
signature: "class DatabaseManager".to_string(),
|
||||
annotations: None,
|
||||
docstring_first_line: None,
|
||||
purpose: "test".to_string(),
|
||||
outbound_calls: vec![],
|
||||
inbound_calls: vec![],
|
||||
integrations_flags: IntegrationFlags {
|
||||
db: true,
|
||||
http: false,
|
||||
queue: false,
|
||||
},
|
||||
metrics: SymbolMetrics {
|
||||
fan_in: 0,
|
||||
fan_out: 0,
|
||||
is_critical: false,
|
||||
cycle_participant: false,
|
||||
},
|
||||
};
|
||||
|
||||
// Add a symbol with HTTP integration
|
||||
let http_symbol = Symbol {
|
||||
id: "fetch_data".to_string(),
|
||||
kind: SymbolKind::Function,
|
||||
module_id: "test_module".to_string(),
|
||||
file_id: "test_file.py".to_string(),
|
||||
qualname: "fetch_data".to_string(),
|
||||
signature: "def fetch_data()".to_string(),
|
||||
annotations: None,
|
||||
docstring_first_line: None,
|
||||
purpose: "test".to_string(),
|
||||
outbound_calls: vec![],
|
||||
inbound_calls: vec![],
|
||||
integrations_flags: IntegrationFlags {
|
||||
db: false,
|
||||
http: true,
|
||||
queue: false,
|
||||
},
|
||||
metrics: SymbolMetrics {
|
||||
fan_in: 0,
|
||||
fan_out: 0,
|
||||
is_critical: false,
|
||||
cycle_participant: false,
|
||||
},
|
||||
};
|
||||
|
||||
project_model.symbols.insert("DatabaseManager".to_string(), db_symbol);
|
||||
project_model.symbols.insert("fetch_data".to_string(), http_symbol);
|
||||
|
||||
// Initialize renderer
|
||||
let renderer = Renderer::new();
|
||||
|
||||
// Render architecture documentation
|
||||
let result = renderer.render_architecture_md(&project_model);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let rendered_content = result.unwrap();
|
||||
println!("Rendered content:\n{}", rendered_content);
|
||||
|
||||
// Check that integration sections are present
|
||||
assert!(rendered_content.contains("## Integrations"));
|
||||
assert!(rendered_content.contains("### Database Integrations"));
|
||||
assert!(rendered_content.contains("### HTTP/API Integrations"));
|
||||
assert!(rendered_content.contains("DatabaseManager in test_file.py"));
|
||||
assert!(rendered_content.contains("fetch_data in test_file.py"));
|
||||
}
|
||||
Reference in New Issue
Block a user