Compare commits

1 Commits

90 changed files with 420 additions and 1153 deletions

1
.gitignore vendored
View File

@@ -10,4 +10,3 @@
.roo/ .roo/
PLANS/ PLANS/
target/ target/
.wtismycode/

View File

@@ -1,6 +1,6 @@
# Changelog # Changelog
All notable changes to WTIsMyCode are documented in this file. All notable changes to ArchDoc are documented in this file.
Format follows [Keep a Changelog](https://keepachangelog.com/). Format follows [Keep a Changelog](https://keepachangelog.com/).
@@ -12,8 +12,8 @@ Format follows [Keep a Changelog](https://keepachangelog.com/).
- **Dependency cycle detection** (`cycle_detector.rs`) — DFS-based algorithm to find circular module dependencies - **Dependency cycle detection** (`cycle_detector.rs`) — DFS-based algorithm to find circular module dependencies
- **Cycle detection in renderer** — Critical points section now shows detected dependency cycles - **Cycle detection in renderer** — Critical points section now shows detected dependency cycles
- **Full pipeline integration tests** — Tests for config validation, scanning, cycle detection, and rendering - **Full pipeline integration tests** — Tests for config validation, scanning, cycle detection, and rendering
- **Stats command** — `wtismycode stats` displays project-level statistics (files, modules, symbols, edges) - **Stats command** — `archdoc stats` displays project-level statistics (files, modules, symbols, edges)
- **Check command** — `wtismycode check` verifies documentation consistency with code - **Check command** — `archdoc check` verifies documentation consistency with code
- **Colored CLI output** — Progress bars and colored status messages - **Colored CLI output** — Progress bars and colored status messages
- **Comprehensive README** — Badges, configuration reference table, command documentation, architecture overview - **Comprehensive README** — Badges, configuration reference table, command documentation, architecture overview

263
Cargo.lock generated
View File

@@ -2,12 +2,6 @@
# It is not intended for manual editing. # It is not intended for manual editing.
version = 4 version = 4
[[package]]
name = "adler2"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
[[package]] [[package]]
name = "ahash" name = "ahash"
version = "0.8.12" version = "0.8.12"
@@ -85,18 +79,48 @@ version = "1.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea" checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea"
[[package]]
name = "archdoc-cli"
version = "0.1.0"
dependencies = [
"anyhow",
"archdoc-core",
"clap",
"colored",
"indicatif",
"serde",
"serde_json",
"thiserror 1.0.69",
"tokio",
"toml 0.8.23",
"tracing",
"tracing-subscriber",
]
[[package]]
name = "archdoc-core"
version = "0.1.0"
dependencies = [
"anyhow",
"chrono",
"handlebars",
"rustpython-ast",
"rustpython-parser",
"serde",
"serde_json",
"tempfile",
"thiserror 2.0.18",
"toml 0.9.12+spec-1.1.0",
"tracing",
"walkdir",
]
[[package]] [[package]]
name = "autocfg" name = "autocfg"
version = "1.5.0" version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
[[package]]
name = "base64"
version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
[[package]] [[package]]
name = "bitflags" name = "bitflags"
version = "2.11.0" version = "2.11.0"
@@ -238,15 +262,6 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "crc32fast"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511"
dependencies = [
"cfg-if",
]
[[package]] [[package]]
name = "crunchy" name = "crunchy"
version = "0.2.4" version = "0.2.4"
@@ -400,16 +415,6 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582"
[[package]]
name = "flate2"
version = "1.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c"
dependencies = [
"crc32fast",
"miniz_oxide",
]
[[package]] [[package]]
name = "fnv" name = "fnv"
version = "1.0.7" version = "1.0.7"
@@ -511,22 +516,6 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "http"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a"
dependencies = [
"bytes",
"itoa",
]
[[package]]
name = "httparse"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
[[package]] [[package]]
name = "iana-time-zone" name = "iana-time-zone"
version = "0.1.65" version = "0.1.65"
@@ -746,16 +735,6 @@ version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79"
[[package]]
name = "miniz_oxide"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316"
dependencies = [
"adler2",
"simd-adler32",
]
[[package]] [[package]]
name = "mio" name = "mio"
version = "1.1.1" version = "1.1.1"
@@ -856,12 +835,6 @@ version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
[[package]]
name = "percent-encoding"
version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
[[package]] [[package]]
name = "pest" name = "pest"
version = "2.8.6" version = "2.8.6"
@@ -1037,20 +1010,6 @@ dependencies = [
"bitflags", "bitflags",
] ]
[[package]]
name = "ring"
version = "0.17.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7"
dependencies = [
"cc",
"cfg-if",
"getrandom 0.2.17",
"libc",
"untrusted",
"windows-sys 0.52.0",
]
[[package]] [[package]]
name = "rustc-hash" name = "rustc-hash"
version = "1.1.0" version = "1.1.0"
@@ -1070,41 +1029,6 @@ dependencies = [
"windows-sys 0.61.2", "windows-sys 0.61.2",
] ]
[[package]]
name = "rustls"
version = "0.23.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b"
dependencies = [
"log",
"once_cell",
"ring",
"rustls-pki-types",
"rustls-webpki",
"subtle",
"zeroize",
]
[[package]]
name = "rustls-pki-types"
version = "1.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd"
dependencies = [
"zeroize",
]
[[package]]
name = "rustls-webpki"
version = "0.103.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53"
dependencies = [
"ring",
"rustls-pki-types",
"untrusted",
]
[[package]] [[package]]
name = "rustpython-ast" name = "rustpython-ast"
version = "0.4.0" version = "0.4.0"
@@ -1292,12 +1216,6 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "simd-adler32"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2"
[[package]] [[package]]
name = "siphasher" name = "siphasher"
version = "1.0.2" version = "1.0.2"
@@ -1332,12 +1250,6 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "subtle"
version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.115" version = "2.0.115"
@@ -1689,47 +1601,6 @@ dependencies = [
"rand", "rand",
] ]
[[package]]
name = "untrusted"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
[[package]]
name = "ureq"
version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fdc97a28575b85cfedf2a7e7d3cc64b3e11bd8ac766666318003abbacc7a21fc"
dependencies = [
"base64",
"flate2",
"log",
"percent-encoding",
"rustls",
"rustls-pki-types",
"ureq-proto",
"utf-8",
"webpki-roots",
]
[[package]]
name = "ureq-proto"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d81f9efa9df032be5934a46a068815a10a042b494b6a58cb0a1a97bb5467ed6f"
dependencies = [
"base64",
"http",
"httparse",
"log",
]
[[package]]
name = "utf-8"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]] [[package]]
name = "utf8parse" name = "utf8parse"
version = "0.2.2" version = "0.2.2"
@@ -1871,15 +1742,6 @@ dependencies = [
"wasm-bindgen", "wasm-bindgen",
] ]
[[package]]
name = "webpki-roots"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22cfaf3c063993ff62e73cb4311efde4db1efb31ab78a3e5c457939ad5cc0bed"
dependencies = [
"rustls-pki-types",
]
[[package]] [[package]]
name = "winapi-util" name = "winapi-util"
version = "0.1.11" version = "0.1.11"
@@ -1948,15 +1810,6 @@ dependencies = [
"windows-link", "windows-link",
] ]
[[package]]
name = "windows-sys"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
"windows-targets 0.52.6",
]
[[package]] [[package]]
name = "windows-sys" name = "windows-sys"
version = "0.59.0" version = "0.59.0"
@@ -2210,44 +2063,6 @@ dependencies = [
"wasmparser", "wasmparser",
] ]
[[package]]
name = "wtismycode-cli"
version = "0.1.0"
dependencies = [
"anyhow",
"clap",
"colored",
"indicatif",
"serde",
"serde_json",
"thiserror 1.0.69",
"tokio",
"toml 0.8.23",
"tracing",
"tracing-subscriber",
"wtismycode-core",
]
[[package]]
name = "wtismycode-core"
version = "0.1.0"
dependencies = [
"anyhow",
"chrono",
"handlebars",
"lazy_static",
"rustpython-ast",
"rustpython-parser",
"serde",
"serde_json",
"tempfile",
"thiserror 2.0.18",
"toml 0.9.12+spec-1.1.0",
"tracing",
"ureq",
"walkdir",
]
[[package]] [[package]]
name = "zerocopy" name = "zerocopy"
version = "0.8.39" version = "0.8.39"
@@ -2268,12 +2083,6 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "zeroize"
version = "1.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
[[package]] [[package]]
name = "zmij" name = "zmij"
version = "1.0.21" version = "1.0.21"

View File

@@ -1,3 +1,3 @@
[workspace] [workspace]
members = ["wtismycode-cli", "wtismycode-core"] members = ["archdoc-cli", "archdoc-core"]
resolver = "3" resolver = "3"

View File

@@ -1,8 +1,8 @@
# PR: Major improvements to WTIsMyCode # PR: Major improvements to ArchDoc
## Summary ## Summary
Comprehensive refactoring and feature additions to WTIsMyCode — the Python architecture documentation generator. This PR improves code quality, adds new features, and significantly enhances the development experience. Comprehensive refactoring and feature additions to ArchDoc — the Python architecture documentation generator. This PR improves code quality, adds new features, and significantly enhances the development experience.
**Stats:** 24 files changed, ~3900 insertions, ~1400 deletions, 50 tests **Stats:** 24 files changed, ~3900 insertions, ~1400 deletions, 50 tests

View File

@@ -1,4 +1,4 @@
# WTIsMyCode # ArchDoc
![Rust](https://img.shields.io/badge/Rust-1.85%2B-orange?logo=rust) ![Rust](https://img.shields.io/badge/Rust-1.85%2B-orange?logo=rust)
![License](https://img.shields.io/badge/License-MIT-blue) ![License](https://img.shields.io/badge/License-MIT-blue)
@@ -6,7 +6,7 @@
**Automatic architecture documentation generator for Python projects.** **Automatic architecture documentation generator for Python projects.**
WTIsMyCode analyzes your Python codebase using AST parsing and generates comprehensive Markdown documentation covering module structure, dependencies, integration points, and critical hotspots. ArchDoc analyzes your Python codebase using AST parsing and generates comprehensive Markdown documentation covering module structure, dependencies, integration points, and critical hotspots.
## Features ## Features
@@ -15,7 +15,7 @@ WTIsMyCode analyzes your Python codebase using AST parsing and generates compreh
- **Integration Detection** — Automatically identifies HTTP, database, and message queue integrations - **Integration Detection** — Automatically identifies HTTP, database, and message queue integrations
- **Diff-Aware Updates** — Preserves manually written sections while regenerating docs - **Diff-Aware Updates** — Preserves manually written sections while regenerating docs
- **Caching** — Content-hash based caching for fast incremental regeneration - **Caching** — Content-hash based caching for fast incremental regeneration
- **Config Validation** — Comprehensive validation of `wtismycode.toml` with helpful error messages - **Config Validation** — Comprehensive validation of `archdoc.toml` with helpful error messages
- **Statistics** — Project-level stats: file counts, symbol counts, fan-in/fan-out metrics - **Statistics** — Project-level stats: file counts, symbol counts, fan-in/fan-out metrics
- **Consistency Checks** — Verify documentation stays in sync with code changes - **Consistency Checks** — Verify documentation stays in sync with code changes
@@ -24,33 +24,33 @@ WTIsMyCode analyzes your Python codebase using AST parsing and generates compreh
Requires Rust 1.85+: Requires Rust 1.85+:
```bash ```bash
cargo install --path wtismycode-cli cargo install --path archdoc-cli
``` ```
## Quick Start ## Quick Start
```bash ```bash
# Initialize config in your Python project # Initialize config in your Python project
wtismycode init archdoc init
# Generate architecture docs # Generate architecture docs
wtismycode generate archdoc generate
# View project statistics # View project statistics
wtismycode stats archdoc stats
# Check docs are up-to-date # Check docs are up-to-date
wtismycode check archdoc check
``` ```
## Commands ## Commands
### `wtismycode generate` ### `archdoc generate`
Scans the project, analyzes Python files, and generates documentation: Scans the project, analyzes Python files, and generates documentation:
``` ```
$ wtismycode generate $ archdoc generate
🔍 Scanning project... 🔍 Scanning project...
📂 Found 24 Python files in 6 modules 📂 Found 24 Python files in 6 modules
🔬 Analyzing dependencies... 🔬 Analyzing dependencies...
@@ -65,12 +65,12 @@ Output includes:
- **Integration map** — HTTP, database, and queue integration points - **Integration map** — HTTP, database, and queue integration points
- **Critical points** — High fan-in/fan-out symbols and dependency cycles - **Critical points** — High fan-in/fan-out symbols and dependency cycles
### `wtismycode stats` ### `archdoc stats`
Displays project statistics without generating docs: Displays project statistics without generating docs:
``` ```
$ wtismycode stats $ archdoc stats
📊 Project Statistics 📊 Project Statistics
Files: 24 Files: 24
Modules: 6 Modules: 6
@@ -80,29 +80,29 @@ $ wtismycode stats
Edges: 134 Edges: 134
``` ```
### `wtismycode check` ### `archdoc check`
Verifies documentation consistency with the current codebase: Verifies documentation consistency with the current codebase:
``` ```
$ wtismycode check $ archdoc check
✅ Documentation is up-to-date ✅ Documentation is up-to-date
``` ```
Returns non-zero exit code if docs are stale — useful in CI pipelines. Returns non-zero exit code if docs are stale — useful in CI pipelines.
### `wtismycode init` ### `archdoc init`
Creates a default `wtismycode.toml` configuration file: Creates a default `archdoc.toml` configuration file:
``` ```
$ wtismycode init $ archdoc init
✅ Created wtismycode.toml with default settings ✅ Created archdoc.toml with default settings
``` ```
## Configuration Reference ## Configuration Reference
WTIsMyCode is configured via `wtismycode.toml`: ArchDoc is configured via `archdoc.toml`:
| Section | Key | Default | Description | | Section | Key | Default | Description |
|---------|-----|---------|-------------| |---------|-----|---------|-------------|
@@ -125,7 +125,7 @@ WTIsMyCode is configured via `wtismycode.toml`:
| `thresholds` | `critical_fan_in` | `20` | Fan-in threshold for critical symbols | | `thresholds` | `critical_fan_in` | `20` | Fan-in threshold for critical symbols |
| `thresholds` | `critical_fan_out` | `20` | Fan-out threshold for critical symbols | | `thresholds` | `critical_fan_out` | `20` | Fan-out threshold for critical symbols |
| `caching` | `enabled` | `true` | Enable analysis caching | | `caching` | `enabled` | `true` | Enable analysis caching |
| `caching` | `cache_dir` | `".wtismycode/cache"` | Cache directory | | `caching` | `cache_dir` | `".archdoc/cache"` | Cache directory |
| `caching` | `max_cache_age` | `"24h"` | Cache TTL (supports s, m, h, d, w) | | `caching` | `max_cache_age` | `"24h"` | Cache TTL (supports s, m, h, d, w) |
### Example Configuration ### Example Configuration
@@ -172,12 +172,12 @@ max_cache_age = "24h"
## Architecture ## Architecture
``` ```
wtismycode/ archdoc/
├── wtismycode-cli/ # CLI binary (commands, output formatting) ├── archdoc-cli/ # CLI binary (commands, output formatting)
│ └── src/ │ └── src/
│ ├── main.rs │ ├── main.rs
│ └── commands/ # generate, check, stats, init │ └── commands/ # generate, check, stats, init
├── wtismycode-core/ # Core library ├── archdoc-core/ # Core library
│ └── src/ │ └── src/
│ ├── config.rs # Config loading & validation │ ├── config.rs # Config loading & validation
│ ├── scanner.rs # File discovery │ ├── scanner.rs # File discovery

View File

@@ -1,14 +1,10 @@
[package] [package]
name = "wtismycode-cli" name = "archdoc-cli"
version = "0.1.0" version = "0.1.0"
edition = "2024" edition = "2024"
[[bin]]
name = "wtismycode"
path = "src/main.rs"
[dependencies] [dependencies]
wtismycode-core = { path = "../wtismycode-core" } archdoc-core = { path = "../archdoc-core" }
clap = { version = "4.0", features = ["derive"] } clap = { version = "4.0", features = ["derive"] }
tokio = { version = "1.0", features = ["full"] } tokio = { version = "1.0", features = ["full"] }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }

View File

@@ -1,5 +1,5 @@
use anyhow::Result; use anyhow::Result;
use wtismycode_core::Config; use archdoc_core::Config;
use colored::Colorize; use colored::Colorize;
use super::generate::analyze_project; use super::generate::analyze_project;
@@ -9,7 +9,7 @@ pub fn check_docs_consistency(root: &str, config: &Config) -> Result<()> {
let model = analyze_project(root, config)?; let model = analyze_project(root, config)?;
let renderer = wtismycode_core::renderer::Renderer::new(); let renderer = archdoc_core::renderer::Renderer::new();
let _generated = renderer.render_architecture_md(&model, None)?; let _generated = renderer.render_architecture_md(&model, None)?;
let architecture_md_path = std::path::Path::new(root).join(&config.project.entry_file); let architecture_md_path = std::path::Path::new(root).join(&config.project.entry_file);

View File

@@ -1,5 +1,5 @@
use anyhow::Result; use anyhow::Result;
use wtismycode_core::{Config, ProjectModel, scanner::FileScanner, python_analyzer::PythonAnalyzer}; use archdoc_core::{Config, ProjectModel, scanner::FileScanner, python_analyzer::PythonAnalyzer};
use colored::Colorize; use colored::Colorize;
use indicatif::{ProgressBar, ProgressStyle}; use indicatif::{ProgressBar, ProgressStyle};
use std::path::Path; use std::path::Path;
@@ -12,10 +12,6 @@ pub fn load_config(config_path: &str) -> Result<Config> {
} }
pub fn analyze_project(root: &str, config: &Config) -> Result<ProjectModel> { pub fn analyze_project(root: &str, config: &Config) -> Result<ProjectModel> {
analyze_project_with_options(root, config, false)
}
pub fn analyze_project_with_options(root: &str, config: &Config, offline: bool) -> Result<ProjectModel> {
println!("{}", "Scanning project...".cyan()); println!("{}", "Scanning project...".cyan());
let scanner = FileScanner::new(config.clone()); let scanner = FileScanner::new(config.clone());
@@ -23,7 +19,7 @@ pub fn analyze_project_with_options(root: &str, config: &Config, offline: bool)
println!(" Found {} Python files", python_files.len().to_string().yellow()); println!(" Found {} Python files", python_files.len().to_string().yellow());
let analyzer = PythonAnalyzer::new_with_options(config.clone(), offline); let analyzer = PythonAnalyzer::new(config.clone());
let pb = ProgressBar::new(python_files.len() as u64); let pb = ProgressBar::new(python_files.len() as u64);
pb.set_style(ProgressStyle::default_bar() pb.set_style(ProgressStyle::default_bar()
@@ -122,8 +118,8 @@ pub fn generate_docs(model: &ProjectModel, out: &str, verbose: bool, _config: &C
} }
} }
let renderer = wtismycode_core::renderer::Renderer::new(); let renderer = archdoc_core::renderer::Renderer::new();
let writer = wtismycode_core::writer::DiffAwareWriter::new(); let writer = archdoc_core::writer::DiffAwareWriter::new();
let output_path = std::path::Path::new(".").join("ARCHITECTURE.md"); let output_path = std::path::Path::new(".").join("ARCHITECTURE.md");

View File

@@ -1,47 +1,8 @@
use anyhow::Result; use anyhow::Result;
use colored::Colorize; use colored::Colorize;
/// Detect project name from pyproject.toml or directory basename.
fn detect_project_name(root: &str) -> String {
let root_path = std::path::Path::new(root);
// Try pyproject.toml
let pyproject_path = root_path.join("pyproject.toml");
if let Ok(content) = std::fs::read_to_string(&pyproject_path) {
let mut in_project = false;
for line in content.lines() {
let trimmed = line.trim();
if trimmed == "[project]" {
in_project = true;
continue;
}
if trimmed.starts_with('[') {
in_project = false;
continue;
}
if in_project && trimmed.starts_with("name") {
if let Some(val) = trimmed.split('=').nth(1) {
let name = val.trim().trim_matches('"').trim_matches('\'');
if !name.is_empty() {
return name.to_string();
}
}
}
}
}
// Fallback: directory basename
root_path
.canonicalize()
.ok()
.and_then(|p| p.file_name().map(|n| n.to_string_lossy().to_string()))
.unwrap_or_else(|| "Project".to_string())
}
pub fn init_project(root: &str, out: &str) -> Result<()> { pub fn init_project(root: &str, out: &str) -> Result<()> {
println!("{}", "Initializing wtismycode project...".cyan().bold()); println!("{}", "Initializing archdoc project...".cyan().bold());
let project_name = detect_project_name(root);
let out_path = std::path::Path::new(out); let out_path = std::path::Path::new(out);
std::fs::create_dir_all(out_path)?; std::fs::create_dir_all(out_path)?;
@@ -84,7 +45,7 @@ pub fn init_project(root: &str, out: &str) -> Result<()> {
## Document metadata ## Document metadata
- **Created:** <AUTO_ON_INIT: YYYY-MM-DD> - **Created:** <AUTO_ON_INIT: YYYY-MM-DD>
- **Updated:** <AUTO_ON_CHANGE: YYYY-MM-DD> - **Updated:** <AUTO_ON_CHANGE: YYYY-MM-DD>
- **Generated by:** wtismycode (cli) v0.1 - **Generated by:** archdoc (cli) v0.1
--- ---
@@ -134,10 +95,8 @@ pub fn init_project(root: &str, out: &str) -> Result<()> {
<!-- MANUAL:END --> <!-- MANUAL:END -->
"#; "#;
let architecture_md_content = architecture_md_content.replace("<PROJECT_NAME>", &project_name);
let architecture_md_path = std::path::Path::new(root).join("ARCHITECTURE.md"); let architecture_md_path = std::path::Path::new(root).join("ARCHITECTURE.md");
std::fs::write(&architecture_md_path, &architecture_md_content)?; std::fs::write(&architecture_md_path, architecture_md_content)?;
let config_toml_content = r#"[project] let config_toml_content = r#"[project]
root = "." root = "."
@@ -194,16 +153,16 @@ description_max_length = 200
[logging] [logging]
level = "info" level = "info"
file = "wtismycode.log" file = "archdoc.log"
format = "compact" format = "compact"
[caching] [caching]
enabled = true enabled = true
cache_dir = ".wtismycode/cache" cache_dir = ".archdoc/cache"
max_cache_age = "24h" max_cache_age = "24h"
"#; "#;
let config_toml_path = std::path::Path::new(root).join("wtismycode.toml"); let config_toml_path = std::path::Path::new(root).join("archdoc.toml");
if !config_toml_path.exists() { if !config_toml_path.exists() {
std::fs::write(&config_toml_path, config_toml_content)?; std::fs::write(&config_toml_path, config_toml_content)?;
} }

View File

@@ -1,10 +1,10 @@
use wtismycode_core::ProjectModel; use archdoc_core::ProjectModel;
use colored::Colorize; use colored::Colorize;
pub fn print_stats(model: &ProjectModel) { pub fn print_stats(model: &ProjectModel) {
println!(); println!();
println!("{}", "╔══════════════════════════════════════╗".cyan()); println!("{}", "╔══════════════════════════════════════╗".cyan());
println!("{}", "wtismycode project statistics ║".cyan().bold()); println!("{}", "archdoc project statistics ║".cyan().bold());
println!("{}", "╚══════════════════════════════════════╝".cyan()); println!("{}", "╚══════════════════════════════════════╝".cyan());
println!(); println!();
@@ -24,10 +24,10 @@ pub fn print_stats(model: &ProjectModel) {
let mut async_functions = 0; let mut async_functions = 0;
for symbol in model.symbols.values() { for symbol in model.symbols.values() {
match symbol.kind { match symbol.kind {
wtismycode_core::model::SymbolKind::Function => functions += 1, archdoc_core::model::SymbolKind::Function => functions += 1,
wtismycode_core::model::SymbolKind::Method => methods += 1, archdoc_core::model::SymbolKind::Method => methods += 1,
wtismycode_core::model::SymbolKind::Class => classes += 1, archdoc_core::model::SymbolKind::Class => classes += 1,
wtismycode_core::model::SymbolKind::AsyncFunction => async_functions += 1, archdoc_core::model::SymbolKind::AsyncFunction => async_functions += 1,
} }
} }
println!("{}", "Symbol breakdown".bold().underline()); println!("{}", "Symbol breakdown".bold().underline());

View File

@@ -5,7 +5,7 @@ use clap::{Parser, Subcommand};
use anyhow::Result; use anyhow::Result;
#[derive(Parser)] #[derive(Parser)]
#[command(name = "wtismycode")] #[command(name = "archdoc")]
#[command(about = "Generate architecture documentation for Python projects")] #[command(about = "Generate architecture documentation for Python projects")]
#[command(version = "0.1.0")] #[command(version = "0.1.0")]
pub struct Cli { pub struct Cli {
@@ -19,7 +19,7 @@ pub struct Cli {
#[derive(Subcommand)] #[derive(Subcommand)]
enum Commands { enum Commands {
/// Initialize wtismycode in the project /// Initialize archdoc in the project
Init { Init {
#[arg(short, long, default_value = ".")] #[arg(short, long, default_value = ".")]
root: String, root: String,
@@ -32,27 +32,24 @@ enum Commands {
root: String, root: String,
#[arg(short, long, default_value = "docs/architecture")] #[arg(short, long, default_value = "docs/architecture")]
out: String, out: String,
#[arg(short, long, default_value = "wtismycode.toml")] #[arg(short, long, default_value = "archdoc.toml")]
config: String, config: String,
/// Show what would be generated without writing files /// Show what would be generated without writing files
#[arg(long)] #[arg(long)]
dry_run: bool, dry_run: bool,
/// Skip PyPI API lookups, use only built-in dictionary
#[arg(long)]
offline: bool,
}, },
/// Check if documentation is up to date /// Check if documentation is up to date
Check { Check {
#[arg(short, long, default_value = ".")] #[arg(short, long, default_value = ".")]
root: String, root: String,
#[arg(short, long, default_value = "wtismycode.toml")] #[arg(short, long, default_value = "archdoc.toml")]
config: String, config: String,
}, },
/// Show project statistics /// Show project statistics
Stats { Stats {
#[arg(short, long, default_value = ".")] #[arg(short, long, default_value = ".")]
root: String, root: String,
#[arg(short, long, default_value = "wtismycode.toml")] #[arg(short, long, default_value = "archdoc.toml")]
config: String, config: String,
}, },
} }
@@ -64,9 +61,9 @@ fn main() -> Result<()> {
Commands::Init { root, out } => { Commands::Init { root, out } => {
commands::init::init_project(root, out)?; commands::init::init_project(root, out)?;
} }
Commands::Generate { root, out, config, dry_run, offline } => { Commands::Generate { root, out, config, dry_run } => {
let config = commands::generate::load_config(config)?; let config = commands::generate::load_config(config)?;
let model = commands::generate::analyze_project_with_options(root, &config, *offline)?; let model = commands::generate::analyze_project(root, &config)?;
if *dry_run { if *dry_run {
commands::generate::dry_run_docs(&model, out, &config)?; commands::generate::dry_run_docs(&model, out, &config)?;
} else { } else {

View File

@@ -1,7 +1,7 @@
//! Colored output helpers and filename utilities for WTIsMyCode CLI //! Colored output helpers and filename utilities for ArchDoc CLI
use colored::Colorize; use colored::Colorize;
use wtismycode_core::ProjectModel; use archdoc_core::ProjectModel;
/// Sanitize a file path into a safe filename for docs. /// Sanitize a file path into a safe filename for docs.
/// Removes `./` prefix, replaces `/` with `__`. /// Removes `./` prefix, replaces `/` with `__`.
@@ -19,14 +19,17 @@ pub fn print_generate_summary(model: &ProjectModel) {
println!(" {} {}", "Edges:".bold(), println!(" {} {}", "Edges:".bold(),
model.edges.module_import_edges.len() + model.edges.symbol_call_edges.len()); model.edges.module_import_edges.len() + model.edges.symbol_call_edges.len());
if !model.classified_integrations.is_empty() { let integrations: Vec<&str> = {
let cats: Vec<String> = model.classified_integrations.iter() let mut v = Vec::new();
.filter(|(_, pkgs)| !pkgs.is_empty()) if model.symbols.values().any(|s| s.integrations_flags.http) { v.push("HTTP"); }
.map(|(cat, pkgs)| format!("{} ({})", cat, pkgs.join(", "))) if model.symbols.values().any(|s| s.integrations_flags.db) { v.push("DB"); }
.collect(); if model.symbols.values().any(|s| s.integrations_flags.queue) { v.push("Queue"); }
if !cats.is_empty() { if model.symbols.values().any(|s| s.integrations_flags.storage) { v.push("Storage"); }
println!(" {} {}", "Integrations:".bold(), cats.join(" | ").yellow()); if model.symbols.values().any(|s| s.integrations_flags.ai) { v.push("AI/ML"); }
} v
};
if !integrations.is_empty() {
println!(" {} {}", "Integrations:".bold(), integrations.join(", ").yellow());
} }
println!("{}", "─────────────────────────────────────".dimmed()); println!("{}", "─────────────────────────────────────".dimmed());
} }

View File

@@ -1,5 +1,5 @@
[package] [package]
name = "wtismycode-core" name = "archdoc-core"
version = "0.1.0" version = "0.1.0"
edition = "2024" edition = "2024"
@@ -16,5 +16,3 @@ rustpython-parser = "0.4"
rustpython-ast = "0.4" rustpython-ast = "0.4"
chrono = { version = "0.4", features = ["serde"] } chrono = { version = "0.4", features = ["serde"] }
tempfile = "3.10" tempfile = "3.10"
ureq = "3"
lazy_static = "1.4"

View File

@@ -1,10 +1,10 @@
//! Caching module for WTIsMyCode //! Caching module for ArchDoc
//! //!
//! This module provides caching functionality to speed up repeated analysis //! This module provides caching functionality to speed up repeated analysis
//! by storing parsed ASTs and analysis results. //! by storing parsed ASTs and analysis results.
use crate::config::Config; use crate::config::Config;
use crate::errors::WTIsMyCodeError; use crate::errors::ArchDocError;
use crate::model::ParsedModule; use crate::model::ParsedModule;
use std::path::Path; use std::path::Path;
use std::fs; use std::fs;
@@ -39,7 +39,7 @@ impl CacheManager {
} }
/// Get cached parsed module if available and not expired /// Get cached parsed module if available and not expired
pub fn get_cached_module(&self, file_path: &Path) -> Result<Option<ParsedModule>, WTIsMyCodeError> { pub fn get_cached_module(&self, file_path: &Path) -> Result<Option<ParsedModule>, ArchDocError> {
if !self.config.caching.enabled { if !self.config.caching.enabled {
return Ok(None); return Ok(None);
} }
@@ -53,10 +53,10 @@ impl CacheManager {
// Read cache file // Read cache file
let content = fs::read_to_string(&cache_file) let content = fs::read_to_string(&cache_file)
.map_err(WTIsMyCodeError::Io)?; .map_err(ArchDocError::Io)?;
let cache_entry: CacheEntry = serde_json::from_str(&content) let cache_entry: CacheEntry = serde_json::from_str(&content)
.map_err(|e| WTIsMyCodeError::AnalysisError(format!("Failed to deserialize cache entry: {}", e)))?; .map_err(|e| ArchDocError::AnalysisError(format!("Failed to deserialize cache entry: {}", e)))?;
// Check if cache is expired // Check if cache is expired
let now = Utc::now(); let now = Utc::now();
@@ -73,10 +73,10 @@ impl CacheManager {
// Check if source file has been modified since caching // Check if source file has been modified since caching
let metadata = fs::metadata(file_path) let metadata = fs::metadata(file_path)
.map_err(WTIsMyCodeError::Io)?; .map_err(ArchDocError::Io)?;
let modified_time = metadata.modified() let modified_time = metadata.modified()
.map_err(WTIsMyCodeError::Io)?; .map_err(ArchDocError::Io)?;
let modified_time: DateTime<Utc> = modified_time.into(); let modified_time: DateTime<Utc> = modified_time.into();
@@ -90,7 +90,7 @@ impl CacheManager {
} }
/// Store parsed module in cache /// Store parsed module in cache
pub fn store_module(&self, file_path: &Path, parsed_module: ParsedModule) -> Result<(), WTIsMyCodeError> { pub fn store_module(&self, file_path: &Path, parsed_module: ParsedModule) -> Result<(), ArchDocError> {
if !self.config.caching.enabled { if !self.config.caching.enabled {
return Ok(()); return Ok(());
} }
@@ -100,10 +100,10 @@ impl CacheManager {
// Get file modification time // Get file modification time
let metadata = fs::metadata(file_path) let metadata = fs::metadata(file_path)
.map_err(WTIsMyCodeError::Io)?; .map_err(ArchDocError::Io)?;
let modified_time = metadata.modified() let modified_time = metadata.modified()
.map_err(WTIsMyCodeError::Io)?; .map_err(ArchDocError::Io)?;
let modified_time: DateTime<Utc> = modified_time.into(); let modified_time: DateTime<Utc> = modified_time.into();
@@ -114,10 +114,10 @@ impl CacheManager {
}; };
let content = serde_json::to_string(&cache_entry) let content = serde_json::to_string(&cache_entry)
.map_err(|e| WTIsMyCodeError::AnalysisError(format!("Failed to serialize cache entry: {}", e)))?; .map_err(|e| ArchDocError::AnalysisError(format!("Failed to serialize cache entry: {}", e)))?;
fs::write(&cache_file, content) fs::write(&cache_file, content)
.map_err(WTIsMyCodeError::Io) .map_err(ArchDocError::Io)
} }
/// Generate cache key for a file path /// Generate cache key for a file path
@@ -133,7 +133,7 @@ impl CacheManager {
} }
/// Parse duration string like "24h" or "7d" into seconds /// Parse duration string like "24h" or "7d" into seconds
fn parse_duration(&self, duration_str: &str) -> Result<u64, WTIsMyCodeError> { fn parse_duration(&self, duration_str: &str) -> Result<u64, ArchDocError> {
if duration_str.is_empty() { if duration_str.is_empty() {
return Ok(0); return Ok(0);
} }
@@ -141,26 +141,26 @@ impl CacheManager {
let chars: Vec<char> = duration_str.chars().collect(); let chars: Vec<char> = duration_str.chars().collect();
let (number_str, unit) = chars.split_at(chars.len() - 1); let (number_str, unit) = chars.split_at(chars.len() - 1);
let number: u64 = number_str.iter().collect::<String>().parse() let number: u64 = number_str.iter().collect::<String>().parse()
.map_err(|_| WTIsMyCodeError::AnalysisError(format!("Invalid duration format: {}", duration_str)))?; .map_err(|_| ArchDocError::AnalysisError(format!("Invalid duration format: {}", duration_str)))?;
match unit[0] { match unit[0] {
's' => Ok(number), // seconds 's' => Ok(number), // seconds
'm' => Ok(number * 60), // minutes 'm' => Ok(number * 60), // minutes
'h' => Ok(number * 3600), // hours 'h' => Ok(number * 3600), // hours
'd' => Ok(number * 86400), // days 'd' => Ok(number * 86400), // days
_ => Err(WTIsMyCodeError::AnalysisError(format!("Unknown duration unit: {}", unit[0]))), _ => Err(ArchDocError::AnalysisError(format!("Unknown duration unit: {}", unit[0]))),
} }
} }
/// Clear all cache entries /// Clear all cache entries
pub fn clear_cache(&self) -> Result<(), WTIsMyCodeError> { pub fn clear_cache(&self) -> Result<(), ArchDocError> {
if Path::new(&self.cache_dir).exists() { if Path::new(&self.cache_dir).exists() {
fs::remove_dir_all(&self.cache_dir) fs::remove_dir_all(&self.cache_dir)
.map_err(WTIsMyCodeError::Io)?; .map_err(ArchDocError::Io)?;
// Recreate cache directory // Recreate cache directory
fs::create_dir_all(&self.cache_dir) fs::create_dir_all(&self.cache_dir)
.map_err(WTIsMyCodeError::Io)?; .map_err(ArchDocError::Io)?;
} }
Ok(()) Ok(())

View File

@@ -1,10 +1,10 @@
//! Configuration management for WTIsMyCode //! Configuration management for ArchDoc
//! //!
//! This module handles loading and validating the wtismycode.toml configuration file. //! This module handles loading and validating the archdoc.toml configuration file.
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::path::Path; use std::path::Path;
use crate::errors::WTIsMyCodeError; use crate::errors::ArchDocError;
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Default)] #[derive(Default)]
@@ -383,7 +383,7 @@ fn default_log_level() -> String {
} }
fn default_log_file() -> String { fn default_log_file() -> String {
"wtismycode.log".to_string() "archdoc.log".to_string()
} }
fn default_log_format() -> String { fn default_log_format() -> String {
@@ -415,7 +415,7 @@ fn default_caching_enabled() -> bool {
} }
fn default_cache_dir() -> String { fn default_cache_dir() -> String {
".wtismycode/cache".to_string() ".archdoc/cache".to_string()
} }
fn default_max_cache_age() -> String { fn default_max_cache_age() -> String {
@@ -426,17 +426,17 @@ impl Config {
/// Validate the configuration for correctness. /// Validate the configuration for correctness.
/// ///
/// Checks that paths exist, values are parseable, and settings are sensible. /// Checks that paths exist, values are parseable, and settings are sensible.
pub fn validate(&self) -> Result<(), WTIsMyCodeError> { pub fn validate(&self) -> Result<(), ArchDocError> {
// Check project.root exists and is a directory // Check project.root exists and is a directory
let root = Path::new(&self.project.root); let root = Path::new(&self.project.root);
if !root.exists() { if !root.exists() {
return Err(WTIsMyCodeError::ConfigError(format!( return Err(ArchDocError::ConfigError(format!(
"project.root '{}' does not exist", "project.root '{}' does not exist",
self.project.root self.project.root
))); )));
} }
if !root.is_dir() { if !root.is_dir() {
return Err(WTIsMyCodeError::ConfigError(format!( return Err(ArchDocError::ConfigError(format!(
"project.root '{}' is not a directory", "project.root '{}' is not a directory",
self.project.root self.project.root
))); )));
@@ -444,7 +444,7 @@ impl Config {
// Check language is python // Check language is python
if self.project.language != "python" { if self.project.language != "python" {
return Err(WTIsMyCodeError::ConfigError(format!( return Err(ArchDocError::ConfigError(format!(
"project.language '{}' is not supported. Only 'python' is currently supported", "project.language '{}' is not supported. Only 'python' is currently supported",
self.project.language self.project.language
))); )));
@@ -452,7 +452,7 @@ impl Config {
// Check scan.include is not empty // Check scan.include is not empty
if self.scan.include.is_empty() { if self.scan.include.is_empty() {
return Err(WTIsMyCodeError::ConfigError( return Err(ArchDocError::ConfigError(
"scan.include must not be empty — at least one directory must be specified".to_string(), "scan.include must not be empty — at least one directory must be specified".to_string(),
)); ));
} }
@@ -461,7 +461,7 @@ impl Config {
for src_root in &self.python.src_roots { for src_root in &self.python.src_roots {
let path = root.join(src_root); let path = root.join(src_root);
if !path.exists() { if !path.exists() {
return Err(WTIsMyCodeError::ConfigError(format!( return Err(ArchDocError::ConfigError(format!(
"python.src_roots entry '{}' does not exist (resolved to '{}')", "python.src_roots entry '{}' does not exist (resolved to '{}')",
src_root, src_root,
path.display() path.display()
@@ -471,7 +471,7 @@ impl Config {
// Parse max_cache_age // Parse max_cache_age
parse_duration(&self.caching.max_cache_age).map_err(|e| { parse_duration(&self.caching.max_cache_age).map_err(|e| {
WTIsMyCodeError::ConfigError(format!( ArchDocError::ConfigError(format!(
"caching.max_cache_age '{}' is not valid: {}. Use formats like '24h', '7d', '30m'", "caching.max_cache_age '{}' is not valid: {}. Use formats like '24h', '7d', '30m'",
self.caching.max_cache_age, e self.caching.max_cache_age, e
)) ))
@@ -479,7 +479,7 @@ impl Config {
// Parse max_file_size // Parse max_file_size
parse_file_size(&self.scan.max_file_size).map_err(|e| { parse_file_size(&self.scan.max_file_size).map_err(|e| {
WTIsMyCodeError::ConfigError(format!( ArchDocError::ConfigError(format!(
"scan.max_file_size '{}' is not valid: {}. Use formats like '10MB', '1GB', '500KB'", "scan.max_file_size '{}' is not valid: {}. Use formats like '10MB', '1GB', '500KB'",
self.scan.max_file_size, e self.scan.max_file_size, e
)) ))
@@ -489,21 +489,21 @@ impl Config {
} }
/// Load configuration from a TOML file /// Load configuration from a TOML file
pub fn load_from_file(path: &Path) -> Result<Self, WTIsMyCodeError> { pub fn load_from_file(path: &Path) -> Result<Self, ArchDocError> {
let content = std::fs::read_to_string(path) let content = std::fs::read_to_string(path)
.map_err(|e| WTIsMyCodeError::ConfigError(format!("Failed to read config file: {}", e)))?; .map_err(|e| ArchDocError::ConfigError(format!("Failed to read config file: {}", e)))?;
toml::from_str(&content) toml::from_str(&content)
.map_err(|e| WTIsMyCodeError::ConfigError(format!("Failed to parse config file: {}", e))) .map_err(|e| ArchDocError::ConfigError(format!("Failed to parse config file: {}", e)))
} }
/// Save configuration to a TOML file /// Save configuration to a TOML file
pub fn save_to_file(&self, path: &Path) -> Result<(), WTIsMyCodeError> { pub fn save_to_file(&self, path: &Path) -> Result<(), ArchDocError> {
let content = toml::to_string_pretty(self) let content = toml::to_string_pretty(self)
.map_err(|e| WTIsMyCodeError::ConfigError(format!("Failed to serialize config: {}", e)))?; .map_err(|e| ArchDocError::ConfigError(format!("Failed to serialize config: {}", e)))?;
std::fs::write(path, content) std::fs::write(path, content)
.map_err(|e| WTIsMyCodeError::ConfigError(format!("Failed to write config file: {}", e))) .map_err(|e| ArchDocError::ConfigError(format!("Failed to write config file: {}", e)))
} }
} }

View File

@@ -1,7 +1,7 @@
use thiserror::Error; use thiserror::Error;
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum WTIsMyCodeError { pub enum ArchDocError {
#[error("IO error: {0}")] #[error("IO error: {0}")]
Io(#[from] std::io::Error), Io(#[from] std::io::Error),

View File

@@ -1,4 +1,4 @@
//! WTIsMyCode Core Library //! ArchDoc Core Library
//! //!
//! This crate provides the core functionality for analyzing Python projects //! This crate provides the core functionality for analyzing Python projects
//! and generating architecture documentation. //! and generating architecture documentation.
@@ -13,10 +13,9 @@ pub mod renderer;
pub mod writer; pub mod writer;
pub mod cache; pub mod cache;
pub mod cycle_detector; pub mod cycle_detector;
pub mod package_classifier;
// Re-export commonly used types // Re-export commonly used types
pub use errors::WTIsMyCodeError; pub use errors::ArchDocError;
pub use config::Config; pub use config::Config;
pub use model::ProjectModel; pub use model::ProjectModel;

View File

@@ -1,4 +1,4 @@
//! Intermediate Representation (IR) for WTIsMyCode //! Intermediate Representation (IR) for ArchDoc
//! //!
//! This module defines the data structures that represent the analyzed Python project //! This module defines the data structures that represent the analyzed Python project
//! and are used for generating documentation. //! and are used for generating documentation.
@@ -12,9 +12,6 @@ pub struct ProjectModel {
pub files: HashMap<String, FileDoc>, pub files: HashMap<String, FileDoc>,
pub symbols: HashMap<String, Symbol>, pub symbols: HashMap<String, Symbol>,
pub edges: Edges, pub edges: Edges,
/// Classified integrations by category (e.g. "HTTP" -> ["fastapi", "requests"])
#[serde(default)]
pub classified_integrations: HashMap<String, Vec<String>>,
} }
impl ProjectModel { impl ProjectModel {
@@ -24,7 +21,6 @@ impl ProjectModel {
files: HashMap::new(), files: HashMap::new(),
symbols: HashMap::new(), symbols: HashMap::new(),
edges: Edges::new(), edges: Edges::new(),
classified_integrations: HashMap::new(),
} }
} }
} }

View File

@@ -1,11 +1,11 @@
//! Python AST analyzer for WTIsMyCode //! Python AST analyzer for ArchDoc
//! //!
//! This module handles parsing Python files using AST and extracting //! This module handles parsing Python files using AST and extracting
//! imports, definitions, and calls. //! imports, definitions, and calls.
use crate::model::{ParsedModule, ProjectModel, Import, Call, CallType, Symbol, Module, FileDoc}; use crate::model::{ParsedModule, ProjectModel, Import, Call, CallType, Symbol, Module, FileDoc};
use crate::config::Config; use crate::config::Config;
use crate::errors::WTIsMyCodeError; use crate::errors::ArchDocError;
use crate::cache::CacheManager; use crate::cache::CacheManager;
use std::path::Path; use std::path::Path;
use std::fs; use std::fs;
@@ -15,31 +15,25 @@ use rustpython_ast::{Stmt, Expr, Ranged};
pub struct PythonAnalyzer { pub struct PythonAnalyzer {
config: Config, config: Config,
cache_manager: CacheManager, cache_manager: CacheManager,
offline: bool,
} }
impl PythonAnalyzer { impl PythonAnalyzer {
pub fn new(config: Config) -> Self { pub fn new(config: Config) -> Self {
let cache_manager = CacheManager::new(config.clone()); let cache_manager = CacheManager::new(config.clone());
Self { config, cache_manager, offline: false } Self { config, cache_manager }
} }
pub fn new_with_options(config: Config, offline: bool) -> Self { pub fn parse_module(&self, file_path: &Path) -> Result<ParsedModule, ArchDocError> {
let cache_manager = CacheManager::new(config.clone());
Self { config, cache_manager, offline }
}
pub fn parse_module(&self, file_path: &Path) -> Result<ParsedModule, WTIsMyCodeError> {
// Try to get from cache first // Try to get from cache first
if let Some(cached_module) = self.cache_manager.get_cached_module(file_path)? { if let Some(cached_module) = self.cache_manager.get_cached_module(file_path)? {
return Ok(cached_module); return Ok(cached_module);
} }
let code = fs::read_to_string(file_path) let code = fs::read_to_string(file_path)
.map_err(WTIsMyCodeError::Io)?; .map_err(ArchDocError::Io)?;
let ast = ast::Suite::parse(&code, file_path.to_str().unwrap_or("<unknown>")) let ast = ast::Suite::parse(&code, file_path.to_str().unwrap_or("<unknown>"))
.map_err(|e| WTIsMyCodeError::ParseError { .map_err(|e| ArchDocError::ParseError {
file: file_path.to_string_lossy().to_string(), file: file_path.to_string_lossy().to_string(),
line: 0, line: 0,
message: format!("Failed to parse: {}", e), message: format!("Failed to parse: {}", e),
@@ -595,7 +589,7 @@ impl PythonAnalyzer {
normalized.to_string() normalized.to_string()
} }
pub fn resolve_symbols(&self, modules: &[ParsedModule]) -> Result<ProjectModel, WTIsMyCodeError> { pub fn resolve_symbols(&self, modules: &[ParsedModule]) -> Result<ProjectModel, ArchDocError> {
let mut project_model = ProjectModel::new(); let mut project_model = ProjectModel::new();
// Build import alias map for call resolution // Build import alias map for call resolution
@@ -664,9 +658,9 @@ impl PythonAnalyzer {
let doc_summary = if is_init { let doc_summary = if is_init {
parsed_module.file_docstring.clone() parsed_module.file_docstring.clone()
} else { } else {
// For non-init files, use file docstring first, then check __init__.py // For non-init files, check if there's an __init__.py docstring for this module's parent
parsed_module.file_docstring.clone() init_docstrings.get(&module_id).cloned()
.or_else(|| init_docstrings.get(&module_id).cloned()) .or_else(|| parsed_module.file_docstring.clone())
}; };
let module = Module { let module = Module {
@@ -685,84 +679,6 @@ impl PythonAnalyzer {
self.resolve_call_types(&mut project_model, modules, &import_aliases); self.resolve_call_types(&mut project_model, modules, &import_aliases);
self.compute_metrics(&mut project_model)?; self.compute_metrics(&mut project_model)?;
// Classify all imports using PackageClassifier
// Collect all known project module names to filter from integrations
let project_modules: std::collections::HashSet<String> = modules.iter()
.map(|m| {
let mod_path = self.compute_module_path(&m.path);
mod_path.split('.').next().unwrap_or(&mod_path).to_lowercase()
})
.collect();
let all_imports: Vec<String> = modules.iter()
.flat_map(|m| m.imports.iter().map(|i| i.module_name.clone()))
.filter(|import| {
let top = import.split('.').next().unwrap_or(import).to_lowercase();
// Skip imports that are project's own modules
!project_modules.contains(&top)
})
.collect();
let cache_dir = if self.config.caching.enabled {
Some(self.config.caching.cache_dir.clone())
} else {
None
};
let mut classifier = crate::package_classifier::PackageClassifier::new(self.offline, cache_dir);
// Add user overrides from config integration_patterns
if !self.config.analysis.integration_patterns.is_empty() {
let overrides: Vec<(String, Vec<String>)> = self.config.analysis.integration_patterns.iter()
.map(|p| (p.type_.clone(), p.patterns.clone()))
.collect();
classifier.add_user_overrides(&overrides);
}
let classified = classifier.classify_all(&all_imports);
classifier.save_cache();
project_model.classified_integrations = classified.by_category;
// Also update per-symbol integration flags based on classification
for parsed_module in modules {
let module_id = self.compute_module_path(&parsed_module.path);
let import_names: Vec<String> = parsed_module.imports.iter()
.map(|i| i.module_name.clone())
.collect();
let mut flags = crate::model::IntegrationFlags {
http: false, db: false, queue: false, storage: false, ai: false,
};
for import in &import_names {
let top = import.split('.').next().unwrap_or(import).to_lowercase().replace('-', "_");
{
let cat = crate::package_classifier::PackageClassifier::new(true, None).classify(&top);
match cat {
crate::package_classifier::PackageCategory::Http => flags.http = true,
crate::package_classifier::PackageCategory::Database => flags.db = true,
crate::package_classifier::PackageCategory::Queue => flags.queue = true,
crate::package_classifier::PackageCategory::Storage => flags.storage = true,
crate::package_classifier::PackageCategory::AiMl => flags.ai = true,
_ => {}
}
}
}
// Apply to all symbols in this module
if let Some(module) = project_model.modules.get(&module_id) {
for sym_id in &module.symbols {
if let Some(sym) = project_model.symbols.get_mut(sym_id) {
sym.integrations_flags.http |= flags.http;
sym.integrations_flags.db |= flags.db;
sym.integrations_flags.queue |= flags.queue;
sym.integrations_flags.storage |= flags.storage;
sym.integrations_flags.ai |= flags.ai;
}
}
}
}
Ok(project_model) Ok(project_model)
} }
@@ -813,7 +729,7 @@ impl PythonAnalyzer {
} }
} }
fn build_dependency_graphs(&self, project_model: &mut ProjectModel, parsed_modules: &[ParsedModule]) -> Result<(), WTIsMyCodeError> { fn build_dependency_graphs(&self, project_model: &mut ProjectModel, parsed_modules: &[ParsedModule]) -> Result<(), ArchDocError> {
// Collect known internal module IDs // Collect known internal module IDs
let known_modules: std::collections::HashSet<String> = project_model.modules.keys().cloned().collect(); let known_modules: std::collections::HashSet<String> = project_model.modules.keys().cloned().collect();
@@ -883,26 +799,7 @@ impl PythonAnalyzer {
Ok(()) Ok(())
} }
/// Check if a class symbol is a simple data container (dataclass-like). fn compute_metrics(&self, project_model: &mut ProjectModel) -> Result<(), ArchDocError> {
/// A class is considered a dataclass if it has ≤2 methods (typically __init__ and __repr__/__str__).
fn is_dataclass_like(symbol_id: &str, project_model: &ProjectModel) -> bool {
let symbol = match project_model.symbols.get(symbol_id) {
Some(s) => s,
None => return false,
};
if symbol.kind != crate::model::SymbolKind::Class {
return false;
}
// Count methods belonging to this class
let class_name = &symbol.qualname;
let method_prefix = format!("{}::{}.", symbol.module_id, class_name);
let method_count = project_model.symbols.values()
.filter(|s| s.kind == crate::model::SymbolKind::Method && s.id.starts_with(&method_prefix))
.count();
method_count <= 2
}
fn compute_metrics(&self, project_model: &mut ProjectModel) -> Result<(), WTIsMyCodeError> {
// Collect fan-in/fan-out first to avoid borrow issues // Collect fan-in/fan-out first to avoid borrow issues
let mut metrics: std::collections::HashMap<String, (usize, usize)> = std::collections::HashMap::new(); let mut metrics: std::collections::HashMap<String, (usize, usize)> = std::collections::HashMap::new();
@@ -918,20 +815,12 @@ impl PythonAnalyzer {
metrics.insert(symbol_id.clone(), (fan_in, fan_out)); metrics.insert(symbol_id.clone(), (fan_in, fan_out));
} }
// Pre-compute which symbols are dataclass-like (need immutable borrow)
let dataclass_ids: std::collections::HashSet<String> = metrics.keys()
.filter(|id| Self::is_dataclass_like(id, project_model))
.cloned()
.collect();
for (symbol_id, (fan_in, fan_out)) in &metrics { for (symbol_id, (fan_in, fan_out)) in &metrics {
if let Some(symbol) = project_model.symbols.get_mut(symbol_id) { if let Some(symbol) = project_model.symbols.get_mut(symbol_id) {
symbol.metrics.fan_in = *fan_in; symbol.metrics.fan_in = *fan_in;
symbol.metrics.fan_out = *fan_out; symbol.metrics.fan_out = *fan_out;
// Don't mark dataclass-like classes as critical — they're just data containers symbol.metrics.is_critical = *fan_in > self.config.thresholds.critical_fan_in
let exceeds_threshold = *fan_in > self.config.thresholds.critical_fan_in
|| *fan_out > self.config.thresholds.critical_fan_out; || *fan_out > self.config.thresholds.critical_fan_out;
symbol.metrics.is_critical = exceeds_threshold && !dataclass_ids.contains(symbol_id);
} }
} }

View File

@@ -1,4 +1,4 @@
//! Markdown renderer for WTIsMyCode //! Markdown renderer for ArchDoc
//! //!
//! This module handles generating Markdown documentation from the project model //! This module handles generating Markdown documentation from the project model
//! using templates. //! using templates.
@@ -65,7 +65,7 @@ impl Renderer {
## Document metadata ## Document metadata
- **Created:** {{{created_date}}} - **Created:** {{{created_date}}}
- **Updated:** {{{updated_date}}} - **Updated:** {{{updated_date}}}
- **Generated by:** wtismycode (cli) v0.1 - **Generated by:** archdoc (cli) v0.1
--- ---
@@ -73,12 +73,29 @@ impl Renderer {
<!-- ARCHDOC:BEGIN section=integrations --> <!-- ARCHDOC:BEGIN section=integrations -->
> Generated. Do not edit inside this block. > Generated. Do not edit inside this block.
{{#each integration_sections}} ### Database Integrations
### {{{category}}} {{#each db_integrations}}
{{#each packages}}
- {{{this}}} - {{{this}}}
{{/each}} {{/each}}
### HTTP/API Integrations
{{#each http_integrations}}
- {{{this}}}
{{/each}}
### Queue Integrations
{{#each queue_integrations}}
- {{{this}}}
{{/each}}
### Storage Integrations
{{#each storage_integrations}}
- {{{this}}}
{{/each}}
### AI/ML Integrations
{{#each ai_integrations}}
- {{{this}}}
{{/each}} {{/each}}
<!-- ARCHDOC:END section=integrations --> <!-- ARCHDOC:END section=integrations -->
@@ -241,17 +258,28 @@ impl Renderer {
} }
pub fn render_architecture_md(&self, model: &ProjectModel, config: Option<&Config>) -> Result<String, anyhow::Error> { pub fn render_architecture_md(&self, model: &ProjectModel, config: Option<&Config>) -> Result<String, anyhow::Error> {
// Build integration sections from classified_integrations // Collect integration information
let category_order = ["HTTP", "Database", "Queue", "Storage", "AI/ML", "Auth", "Testing", "Logging", "Internal", "Third-party"]; let mut db_integrations = Vec::new();
let mut integration_sections: Vec<serde_json::Value> = Vec::new(); let mut http_integrations = Vec::new();
for cat_name in &category_order { let mut queue_integrations = Vec::new();
if let Some(pkgs) = model.classified_integrations.get(*cat_name) { let mut storage_integrations = Vec::new();
if !pkgs.is_empty() { let mut ai_integrations = Vec::new();
integration_sections.push(serde_json::json!({
"category": cat_name, for (symbol_id, symbol) in &model.symbols {
"packages": pkgs, if symbol.integrations_flags.db {
})); db_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
} }
if symbol.integrations_flags.http {
http_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
}
if symbol.integrations_flags.queue {
queue_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
}
if symbol.integrations_flags.storage {
storage_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
}
if symbol.integrations_flags.ai {
ai_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
} }
} }
@@ -391,7 +419,11 @@ impl Renderer {
"key_decisions": ["<FILL_MANUALLY>"], "key_decisions": ["<FILL_MANUALLY>"],
"non_goals": ["<FILL_MANUALLY>"], "non_goals": ["<FILL_MANUALLY>"],
"change_notes": ["<FILL_MANUALLY>"], "change_notes": ["<FILL_MANUALLY>"],
"integration_sections": integration_sections, "db_integrations": db_integrations,
"http_integrations": http_integrations,
"queue_integrations": queue_integrations,
"storage_integrations": storage_integrations,
"ai_integrations": ai_integrations,
"rails_summary": "\n\nNo tooling information available.\n", "rails_summary": "\n\nNo tooling information available.\n",
"layout_items": layout_items, "layout_items": layout_items,
"modules": modules_list, "modules": modules_list,
@@ -547,31 +579,66 @@ impl Renderer {
} }
pub fn render_integrations_section(&self, model: &ProjectModel) -> Result<String, anyhow::Error> { pub fn render_integrations_section(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
let category_order = ["HTTP", "Database", "Queue", "Storage", "AI/ML", "Auth", "Testing", "Logging", "Internal", "Third-party"]; // Collect integration information
let mut integration_sections: Vec<serde_json::Value> = Vec::new(); let mut db_integrations = Vec::new();
for cat_name in &category_order { let mut http_integrations = Vec::new();
if let Some(pkgs) = model.classified_integrations.get(*cat_name) { let mut queue_integrations = Vec::new();
if !pkgs.is_empty() { let mut storage_integrations = Vec::new();
integration_sections.push(serde_json::json!({ let mut ai_integrations = Vec::new();
"category": cat_name,
"packages": pkgs, for (symbol_id, symbol) in &model.symbols {
})); if symbol.integrations_flags.db {
db_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
} }
if symbol.integrations_flags.http {
http_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
}
if symbol.integrations_flags.queue {
queue_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
}
if symbol.integrations_flags.storage {
storage_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
}
if symbol.integrations_flags.ai {
ai_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
} }
} }
// Prepare data for integrations section
let data = serde_json::json!({ let data = serde_json::json!({
"integration_sections": integration_sections, "db_integrations": db_integrations,
"http_integrations": http_integrations,
"queue_integrations": queue_integrations,
"storage_integrations": storage_integrations,
"ai_integrations": ai_integrations,
}); });
// Create a smaller template just for the integrations section
let integrations_template = r#" let integrations_template = r#"
{{#each integration_sections}} ### Database Integrations
### {{{category}}} {{#each db_integrations}}
{{#each packages}}
- {{{this}}} - {{{this}}}
{{/each}} {{/each}}
### HTTP/API Integrations
{{#each http_integrations}}
- {{{this}}}
{{/each}}
### Queue Integrations
{{#each queue_integrations}}
- {{{this}}}
{{/each}}
### Storage Integrations
{{#each storage_integrations}}
- {{{this}}}
{{/each}}
### AI/ML Integrations
{{#each ai_integrations}}
- {{{this}}}
{{/each}} {{/each}}
"#; "#;

View File

@@ -1,10 +1,10 @@
//! File scanner for WTIsMyCode //! File scanner for ArchDoc
//! //!
//! This module handles scanning the file system for Python files according to //! This module handles scanning the file system for Python files according to
//! the configuration settings. //! the configuration settings.
use crate::config::Config; use crate::config::Config;
use crate::errors::WTIsMyCodeError; use crate::errors::ArchDocError;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use walkdir::WalkDir; use walkdir::WalkDir;
@@ -17,17 +17,17 @@ impl FileScanner {
Self { config } Self { config }
} }
pub fn scan_python_files(&self, root: &Path) -> Result<Vec<PathBuf>, WTIsMyCodeError> { pub fn scan_python_files(&self, root: &Path) -> Result<Vec<PathBuf>, ArchDocError> {
// Check if root directory exists // Check if root directory exists
if !root.exists() { if !root.exists() {
return Err(WTIsMyCodeError::Io(std::io::Error::new( return Err(ArchDocError::Io(std::io::Error::new(
std::io::ErrorKind::NotFound, std::io::ErrorKind::NotFound,
format!("Root directory does not exist: {}", root.display()) format!("Root directory does not exist: {}", root.display())
))); )));
} }
if !root.is_dir() { if !root.is_dir() {
return Err(WTIsMyCodeError::Io(std::io::Error::new( return Err(ArchDocError::Io(std::io::Error::new(
std::io::ErrorKind::InvalidInput, std::io::ErrorKind::InvalidInput,
format!("Root path is not a directory: {}", root.display()) format!("Root path is not a directory: {}", root.display())
))); )));
@@ -41,7 +41,7 @@ impl FileScanner {
.into_iter() { .into_iter() {
let entry = entry.map_err(|e| { let entry = entry.map_err(|e| {
WTIsMyCodeError::Io(std::io::Error::other( ArchDocError::Io(std::io::Error::other(
format!("Failed to read directory entry: {}", e) format!("Failed to read directory entry: {}", e)
)) ))
})?; })?;

View File

@@ -1,9 +1,9 @@
//! Diff-aware file writer for WTIsMyCode //! Diff-aware file writer for ArchDoc
//! //!
//! This module handles writing generated documentation to files while preserving //! This module handles writing generated documentation to files while preserving
//! manual content and only updating generated sections. //! manual content and only updating generated sections.
use crate::errors::WTIsMyCodeError; use crate::errors::ArchDocError;
use std::path::Path; use std::path::Path;
use std::fs; use std::fs;
use chrono::Utc; use chrono::Utc;
@@ -42,17 +42,17 @@ impl DiffAwareWriter {
file_path: &Path, file_path: &Path,
generated_content: &str, generated_content: &str,
section_name: &str, section_name: &str,
) -> Result<(), WTIsMyCodeError> { ) -> Result<(), ArchDocError> {
// Read existing file // Read existing file
let existing_content = if file_path.exists() { let existing_content = if file_path.exists() {
fs::read_to_string(file_path) fs::read_to_string(file_path)
.map_err(WTIsMyCodeError::Io)? .map_err(ArchDocError::Io)?
} else { } else {
// Create new file with template // Create new file with template
let template_content = self.create_template_file(file_path, section_name)?; let template_content = self.create_template_file(file_path, section_name)?;
// Write template to file // Write template to file
fs::write(file_path, &template_content) fs::write(file_path, &template_content)
.map_err(WTIsMyCodeError::Io)?; .map_err(ArchDocError::Io)?;
template_content template_content
}; };
@@ -74,7 +74,7 @@ impl DiffAwareWriter {
if content_changed { if content_changed {
let updated_content = self.update_timestamp(new_content)?; let updated_content = self.update_timestamp(new_content)?;
fs::write(file_path, updated_content) fs::write(file_path, updated_content)
.map_err(WTIsMyCodeError::Io)?; .map_err(ArchDocError::Io)?;
} }
// If not changed, skip writing entirely // If not changed, skip writing entirely
} }
@@ -87,16 +87,16 @@ impl DiffAwareWriter {
file_path: &Path, file_path: &Path,
symbol_id: &str, symbol_id: &str,
generated_content: &str, generated_content: &str,
) -> Result<(), WTIsMyCodeError> { ) -> Result<(), ArchDocError> {
// Read existing file // Read existing file
let existing_content = if file_path.exists() { let existing_content = if file_path.exists() {
fs::read_to_string(file_path) fs::read_to_string(file_path)
.map_err(WTIsMyCodeError::Io)? .map_err(ArchDocError::Io)?
} else { } else {
// If file doesn't exist, create it with a basic template // If file doesn't exist, create it with a basic template
let template_content = self.create_template_file(file_path, "symbol")?; let template_content = self.create_template_file(file_path, "symbol")?;
fs::write(file_path, &template_content) fs::write(file_path, &template_content)
.map_err(WTIsMyCodeError::Io)?; .map_err(ArchDocError::Io)?;
template_content template_content
}; };
@@ -118,7 +118,7 @@ impl DiffAwareWriter {
if content_changed { if content_changed {
let updated_content = self.update_timestamp(new_content)?; let updated_content = self.update_timestamp(new_content)?;
fs::write(file_path, updated_content) fs::write(file_path, updated_content)
.map_err(WTIsMyCodeError::Io)?; .map_err(ArchDocError::Io)?;
} }
// If not changed, skip writing entirely // If not changed, skip writing entirely
} else { } else {
@@ -128,7 +128,7 @@ impl DiffAwareWriter {
Ok(()) Ok(())
} }
fn find_section_markers(&self, content: &str, section_name: &str) -> Result<Vec<SectionMarker>, WTIsMyCodeError> { fn find_section_markers(&self, content: &str, section_name: &str) -> Result<Vec<SectionMarker>, ArchDocError> {
let begin_marker = format!("<!-- ARCHDOC:BEGIN section={} -->", section_name); let begin_marker = format!("<!-- ARCHDOC:BEGIN section={} -->", section_name);
let end_marker = format!("<!-- ARCHDOC:END section={} -->", section_name); let end_marker = format!("<!-- ARCHDOC:END section={} -->", section_name);
@@ -155,7 +155,7 @@ impl DiffAwareWriter {
Ok(markers) Ok(markers)
} }
fn find_symbol_markers(&self, content: &str, symbol_id: &str) -> Result<Vec<SymbolMarker>, WTIsMyCodeError> { fn find_symbol_markers(&self, content: &str, symbol_id: &str) -> Result<Vec<SymbolMarker>, ArchDocError> {
let begin_marker = format!("<!-- ARCHDOC:BEGIN symbol id={} -->", symbol_id); let begin_marker = format!("<!-- ARCHDOC:BEGIN symbol id={} -->", symbol_id);
let end_marker = format!("<!-- ARCHDOC:END symbol id={} -->", symbol_id); let end_marker = format!("<!-- ARCHDOC:END symbol id={} -->", symbol_id);
@@ -187,7 +187,7 @@ impl DiffAwareWriter {
content: &str, content: &str,
marker: &SectionMarker, marker: &SectionMarker,
new_content: &str, new_content: &str,
) -> Result<String, WTIsMyCodeError> { ) -> Result<String, ArchDocError> {
let before = &content[..marker.start_pos]; let before = &content[..marker.start_pos];
let after = &content[marker.end_pos..]; let after = &content[marker.end_pos..];
@@ -205,7 +205,7 @@ impl DiffAwareWriter {
content: &str, content: &str,
marker: &SymbolMarker, marker: &SymbolMarker,
new_content: &str, new_content: &str,
) -> Result<String, WTIsMyCodeError> { ) -> Result<String, ArchDocError> {
let before = &content[..marker.start_pos]; let before = &content[..marker.start_pos];
let after = &content[marker.end_pos..]; let after = &content[marker.end_pos..];
@@ -218,7 +218,7 @@ impl DiffAwareWriter {
)) ))
} }
fn update_timestamp(&self, content: String) -> Result<String, WTIsMyCodeError> { fn update_timestamp(&self, content: String) -> Result<String, ArchDocError> {
// Update the "Updated" field in the document metadata section // Update the "Updated" field in the document metadata section
// Find the metadata section and update the timestamp // Find the metadata section and update the timestamp
let today = Utc::now().format("%Y-%m-%d").to_string(); let today = Utc::now().format("%Y-%m-%d").to_string();
@@ -238,7 +238,7 @@ impl DiffAwareWriter {
Ok(updated_lines.join("\n")) Ok(updated_lines.join("\n"))
} }
fn create_template_file(&self, _file_path: &Path, template_type: &str) -> Result<String, WTIsMyCodeError> { fn create_template_file(&self, _file_path: &Path, template_type: &str) -> Result<String, ArchDocError> {
// Create file with appropriate template based on type // Create file with appropriate template based on type
match template_type { match template_type {
"architecture" => { "architecture" => {
@@ -261,7 +261,7 @@ impl DiffAwareWriter {
## Document metadata ## Document metadata
- **Created:** <AUTO_ON_INIT: YYYY-MM-DD> - **Created:** <AUTO_ON_INIT: YYYY-MM-DD>
- **Updated:** <AUTO_ON_CHANGE: YYYY-MM-DD> - **Updated:** <AUTO_ON_CHANGE: YYYY-MM-DD>
- **Generated by:** wtismycode (cli) v0.1 - **Generated by:** archdoc (cli) v0.1
--- ---

View File

@@ -1,11 +1,11 @@
//! Caching tests for WTIsMyCode //! Caching tests for ArchDoc
//! //!
//! These tests verify that the caching functionality works correctly. //! These tests verify that the caching functionality works correctly.
use std::path::Path; use std::path::Path;
use std::fs; use std::fs;
use tempfile::TempDir; use tempfile::TempDir;
use wtismycode_core::{Config, python_analyzer::PythonAnalyzer}; use archdoc_core::{Config, python_analyzer::PythonAnalyzer};
#[test] #[test]
fn test_cache_store_and_retrieve() { fn test_cache_store_and_retrieve() {

View File

@@ -1,11 +1,11 @@
//! Enhanced analysis tests for WTIsMyCode //! Enhanced analysis tests for ArchDoc
//! //!
//! These tests verify that the enhanced analysis functionality works correctly //! These tests verify that the enhanced analysis functionality works correctly
//! with complex code that includes integrations, calls, and docstrings. //! with complex code that includes integrations, calls, and docstrings.
use std::fs; use std::fs;
use std::path::Path; use std::path::Path;
use wtismycode_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer}; use archdoc_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
#[test] #[test]
fn test_enhanced_analysis_with_integrations() { fn test_enhanced_analysis_with_integrations() {
@@ -15,8 +15,8 @@ fn test_enhanced_analysis_with_integrations() {
// Try different paths for the config file // Try different paths for the config file
let possible_paths = [ let possible_paths = [
"tests/golden/test_project/wtismycode.toml", "tests/golden/test_project/archdoc.toml",
"../tests/golden/test_project/wtismycode.toml", "../tests/golden/test_project/archdoc.toml",
]; ];
let config_path = possible_paths.iter().find(|&path| { let config_path = possible_paths.iter().find(|&path| {
@@ -100,17 +100,17 @@ fn test_enhanced_analysis_with_integrations() {
// Check that we found the UserService class with DB integration // Check that we found the UserService class with DB integration
let user_service_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::UserService")); let user_service_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::UserService"));
assert!(user_service_symbol.is_some()); assert!(user_service_symbol.is_some());
assert_eq!(user_service_symbol.unwrap().kind, wtismycode_core::model::SymbolKind::Class); assert_eq!(user_service_symbol.unwrap().kind, archdoc_core::model::SymbolKind::Class);
// Check that we found the NotificationService class with queue integration // Check that we found the NotificationService class with queue integration
let notification_service_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::NotificationService")); let notification_service_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::NotificationService"));
assert!(notification_service_symbol.is_some()); assert!(notification_service_symbol.is_some());
assert_eq!(notification_service_symbol.unwrap().kind, wtismycode_core::model::SymbolKind::Class); assert_eq!(notification_service_symbol.unwrap().kind, archdoc_core::model::SymbolKind::Class);
// Check that we found the fetch_external_user_data function with HTTP integration // Check that we found the fetch_external_user_data function with HTTP integration
let fetch_external_user_data_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::fetch_external_user_data")); let fetch_external_user_data_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::fetch_external_user_data"));
assert!(fetch_external_user_data_symbol.is_some()); assert!(fetch_external_user_data_symbol.is_some());
assert_eq!(fetch_external_user_data_symbol.unwrap().kind, wtismycode_core::model::SymbolKind::Function); assert_eq!(fetch_external_user_data_symbol.unwrap().kind, archdoc_core::model::SymbolKind::Function);
// Check file imports // Check file imports
let mut found_advanced_file = false; let mut found_advanced_file = false;

View File

@@ -1,12 +1,12 @@
//! Error handling tests for WTIsMyCode //! Error handling tests for ArchDoc
//! //!
//! These tests verify that WTIsMyCode properly handles various error conditions //! These tests verify that ArchDoc properly handles various error conditions
//! and edge cases. //! and edge cases.
use std::path::Path; use std::path::Path;
use std::fs; use std::fs;
use tempfile::TempDir; use tempfile::TempDir;
use wtismycode_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer}; use archdoc_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
#[test] #[test]
fn test_scanner_nonexistent_directory() { fn test_scanner_nonexistent_directory() {
@@ -19,7 +19,7 @@ fn test_scanner_nonexistent_directory() {
// Check that we get an IO error // Check that we get an IO error
match result.unwrap_err() { match result.unwrap_err() {
wtismycode_core::errors::WTIsMyCodeError::Io(_) => {}, archdoc_core::errors::ArchDocError::Io(_) => {},
_ => panic!("Expected IO error"), _ => panic!("Expected IO error"),
} }
} }
@@ -40,7 +40,7 @@ fn test_scanner_file_instead_of_directory() {
// Check that we get an IO error // Check that we get an IO error
match result.unwrap_err() { match result.unwrap_err() {
wtismycode_core::errors::WTIsMyCodeError::Io(_) => {}, archdoc_core::errors::ArchDocError::Io(_) => {},
_ => panic!("Expected IO error"), _ => panic!("Expected IO error"),
} }
} }
@@ -56,7 +56,7 @@ fn test_analyzer_nonexistent_file() {
// Check that we get an IO error // Check that we get an IO error
match result.unwrap_err() { match result.unwrap_err() {
wtismycode_core::errors::WTIsMyCodeError::Io(_) => {}, archdoc_core::errors::ArchDocError::Io(_) => {},
_ => panic!("Expected IO error"), _ => panic!("Expected IO error"),
} }
} }
@@ -77,7 +77,7 @@ fn test_analyzer_invalid_python_syntax() {
// Check that we get a parse error // Check that we get a parse error
match result.unwrap_err() { match result.unwrap_err() {
wtismycode_core::errors::WTIsMyCodeError::ParseError { .. } => {}, archdoc_core::errors::ArchDocError::ParseError { .. } => {},
_ => panic!("Expected parse error"), _ => panic!("Expected parse error"),
} }
} }

View File

@@ -1,12 +1,12 @@
//! Full pipeline integration tests for WTIsMyCode //! Full pipeline integration tests for ArchDoc
//! //!
//! Tests the complete scan → analyze → render pipeline using test-project/. //! Tests the complete scan → analyze → render pipeline using test-project/.
use wtismycode_core::config::Config; use archdoc_core::config::Config;
use wtismycode_core::cycle_detector; use archdoc_core::cycle_detector;
use wtismycode_core::model::{Module, ProjectModel}; use archdoc_core::model::{Module, ProjectModel};
use wtismycode_core::renderer::Renderer; use archdoc_core::renderer::Renderer;
use wtismycode_core::scanner::FileScanner; use archdoc_core::scanner::FileScanner;
use std::path::Path; use std::path::Path;
#[test] #[test]
@@ -14,7 +14,7 @@ fn test_config_load_and_validate() {
let config_path = Path::new(env!("CARGO_MANIFEST_DIR")) let config_path = Path::new(env!("CARGO_MANIFEST_DIR"))
.parent() .parent()
.unwrap() .unwrap()
.join("test-project/wtismycode.toml"); .join("test-project/archdoc.toml");
let config = Config::load_from_file(&config_path).expect("Failed to load config"); let config = Config::load_from_file(&config_path).expect("Failed to load config");
assert_eq!(config.project.language, "python"); assert_eq!(config.project.language, "python");
@@ -26,7 +26,7 @@ fn test_config_validate_on_test_project() {
let config_path = Path::new(env!("CARGO_MANIFEST_DIR")) let config_path = Path::new(env!("CARGO_MANIFEST_DIR"))
.parent() .parent()
.unwrap() .unwrap()
.join("test-project/wtismycode.toml"); .join("test-project/archdoc.toml");
let mut config = Config::load_from_file(&config_path).expect("Failed to load config"); let mut config = Config::load_from_file(&config_path).expect("Failed to load config");
// Set root to actual test-project path so validation passes // Set root to actual test-project path so validation passes
@@ -48,7 +48,7 @@ fn test_scan_test_project() {
.unwrap() .unwrap()
.join("test-project"); .join("test-project");
let config_path = test_project.join("wtismycode.toml"); let config_path = test_project.join("archdoc.toml");
let mut config = Config::load_from_file(&config_path).expect("Failed to load config"); let mut config = Config::load_from_file(&config_path).expect("Failed to load config");
config.project.root = test_project.to_string_lossy().to_string(); config.project.root = test_project.to_string_lossy().to_string();
@@ -148,7 +148,7 @@ fn test_renderer_produces_output() {
#[test] #[test]
fn test_parse_duration_values() { fn test_parse_duration_values() {
use wtismycode_core::config::{parse_duration, parse_file_size}; use archdoc_core::config::{parse_duration, parse_file_size};
assert_eq!(parse_duration("24h").unwrap(), 86400); assert_eq!(parse_duration("24h").unwrap(), 86400);
assert_eq!(parse_duration("7d").unwrap(), 604800); assert_eq!(parse_duration("7d").unwrap(), 604800);

View File

@@ -1,4 +1,4 @@
//! Golden tests for WTIsMyCode //! Golden tests for ArchDoc
//! //!
//! These tests generate documentation for test projects and compare the output //! These tests generate documentation for test projects and compare the output
//! with expected "golden" files to ensure consistency. //! with expected "golden" files to ensure consistency.
@@ -7,7 +7,7 @@ mod test_utils;
use std::fs; use std::fs;
use std::path::Path; use std::path::Path;
use wtismycode_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer}; use archdoc_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
#[test] #[test]
fn test_simple_project_generation() { fn test_simple_project_generation() {
@@ -17,8 +17,8 @@ fn test_simple_project_generation() {
// Try different paths for the config file // Try different paths for the config file
let possible_paths = [ let possible_paths = [
"tests/golden/test_project/wtismycode.toml", "tests/golden/test_project/archdoc.toml",
"../tests/golden/test_project/wtismycode.toml", "../tests/golden/test_project/archdoc.toml",
]; ];
let config_path = possible_paths.iter().find(|&path| { let config_path = possible_paths.iter().find(|&path| {
@@ -92,12 +92,12 @@ fn test_simple_project_generation() {
// Check that we found the Calculator class // Check that we found the Calculator class
let calculator_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::Calculator")); let calculator_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::Calculator"));
assert!(calculator_symbol.is_some()); assert!(calculator_symbol.is_some());
assert_eq!(calculator_symbol.unwrap().kind, wtismycode_core::model::SymbolKind::Class); assert_eq!(calculator_symbol.unwrap().kind, archdoc_core::model::SymbolKind::Class);
// Check that we found the process_numbers function // Check that we found the process_numbers function
let process_numbers_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::process_numbers")); let process_numbers_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::process_numbers"));
assert!(process_numbers_symbol.is_some()); assert!(process_numbers_symbol.is_some());
assert_eq!(process_numbers_symbol.unwrap().kind, wtismycode_core::model::SymbolKind::Function); assert_eq!(process_numbers_symbol.unwrap().kind, archdoc_core::model::SymbolKind::Function);
// Check file imports // Check file imports
assert!(!project_model.files.is_empty()); assert!(!project_model.files.is_empty());

View File

@@ -17,7 +17,7 @@
## Document metadata ## Document metadata
- **Created:** 2026-01-25 - **Created:** 2026-01-25
- **Updated:** 2026-01-25 - **Updated:** 2026-01-25
- **Generated by:** wtismycode (cli) v0.1 - **Generated by:** archdoc (cli) v0.1
--- ---

View File

@@ -53,10 +53,10 @@ description_max_length = 200
[logging] [logging]
level = "info" level = "info"
file = "wtismycode.log" file = "archdoc.log"
format = "compact" format = "compact"
[caching] [caching]
enabled = true enabled = true
cache_dir = ".wtismycode/cache" cache_dir = ".archdoc/cache"
max_cache_age = "24h" max_cache_age = "24h"

View File

@@ -1,4 +1,4 @@
//! Integration detection tests for WTIsMyCode //! Integration detection tests for ArchDoc
//! //!
//! These tests verify that the integration detection functionality works correctly. //! These tests verify that the integration detection functionality works correctly.
//! Integration detection now happens at module level during resolve_symbols, //! Integration detection now happens at module level during resolve_symbols,
@@ -6,7 +6,7 @@
use std::fs; use std::fs;
use tempfile::TempDir; use tempfile::TempDir;
use wtismycode_core::{Config, python_analyzer::PythonAnalyzer}; use archdoc_core::{Config, python_analyzer::PythonAnalyzer};
#[test] #[test]
fn test_http_integration_detection() { fn test_http_integration_detection() {

View File

@@ -1,4 +1,4 @@
//! Integration tests for WTIsMyCode //! Integration tests for ArchDoc
// Include golden tests // Include golden tests
mod golden; mod golden;

View File

@@ -1,6 +1,6 @@
//! Tests for analyzing the test project //! Tests for analyzing the test project
use wtismycode_core::{ use archdoc_core::{
config::Config, config::Config,
python_analyzer::PythonAnalyzer, python_analyzer::PythonAnalyzer,
}; };
@@ -9,7 +9,7 @@ use std::path::Path;
#[test] #[test]
fn test_project_analysis() { fn test_project_analysis() {
// Load config from test project // Load config from test project
let config = Config::load_from_file(Path::new("../test-project/wtismycode.toml")).unwrap(); let config = Config::load_from_file(Path::new("../test-project/archdoc.toml")).unwrap();
// Initialize analyzer // Initialize analyzer
let analyzer = PythonAnalyzer::new(config); let analyzer = PythonAnalyzer::new(config);
@@ -33,11 +33,9 @@ fn test_project_analysis() {
// Check that we found calls // Check that we found calls
assert!(!core_module.calls.is_empty()); assert!(!core_module.calls.is_empty());
// Integration flags are now set during resolve_symbols, not parse_module // Check that integrations are detected
// So we resolve and check there let db_integration_found = core_module.symbols.iter().any(|s| s.integrations_flags.db);
let project_model = analyzer.resolve_symbols(&[core_module.clone()]).unwrap(); let http_integration_found = core_module.symbols.iter().any(|s| s.integrations_flags.http);
let db_integration_found = project_model.symbols.values().any(|s| s.integrations_flags.db);
let http_integration_found = project_model.symbols.values().any(|s| s.integrations_flags.http);
assert!(db_integration_found, "Database integration should be detected"); assert!(db_integration_found, "Database integration should be detected");
assert!(http_integration_found, "HTTP integration should be detected"); assert!(http_integration_found, "HTTP integration should be detected");
@@ -57,7 +55,7 @@ fn test_project_analysis() {
#[test] #[test]
fn test_full_project_resolution() { fn test_full_project_resolution() {
// Load config from test project // Load config from test project
let config = Config::load_from_file(Path::new("../test-project/wtismycode.toml")).unwrap(); let config = Config::load_from_file(Path::new("../test-project/archdoc.toml")).unwrap();
// Initialize analyzer // Initialize analyzer
let analyzer = PythonAnalyzer::new(config); let analyzer = PythonAnalyzer::new(config);

View File

@@ -0,0 +1,89 @@
//! Tests for the renderer functionality
use archdoc_core::{
model::{ProjectModel, Symbol, SymbolKind, IntegrationFlags, SymbolMetrics},
renderer::Renderer,
};
use std::collections::HashMap;
#[test]
fn test_render_with_integrations() {
// Create a mock project model with integration information
let mut project_model = ProjectModel::new();
// Add a symbol with database integration
let db_symbol = Symbol {
id: "DatabaseManager".to_string(),
kind: SymbolKind::Class,
module_id: "test_module".to_string(),
file_id: "test_file.py".to_string(),
qualname: "DatabaseManager".to_string(),
signature: "class DatabaseManager".to_string(),
annotations: None,
docstring_first_line: None,
purpose: "test".to_string(),
outbound_calls: vec![],
inbound_calls: vec![],
integrations_flags: IntegrationFlags {
db: true,
http: false,
queue: false,
storage: false,
ai: false,
},
metrics: SymbolMetrics {
fan_in: 0,
fan_out: 0,
is_critical: false,
cycle_participant: false,
},
};
// Add a symbol with HTTP integration
let http_symbol = Symbol {
id: "fetch_data".to_string(),
kind: SymbolKind::Function,
module_id: "test_module".to_string(),
file_id: "test_file.py".to_string(),
qualname: "fetch_data".to_string(),
signature: "def fetch_data()".to_string(),
annotations: None,
docstring_first_line: None,
purpose: "test".to_string(),
outbound_calls: vec![],
inbound_calls: vec![],
integrations_flags: IntegrationFlags {
db: false,
http: true,
queue: false,
storage: false,
ai: false,
},
metrics: SymbolMetrics {
fan_in: 0,
fan_out: 0,
is_critical: false,
cycle_participant: false,
},
};
project_model.symbols.insert("DatabaseManager".to_string(), db_symbol);
project_model.symbols.insert("fetch_data".to_string(), http_symbol);
// Initialize renderer
let renderer = Renderer::new();
// Render architecture documentation
let result = renderer.render_architecture_md(&project_model, None);
assert!(result.is_ok());
let rendered_content = result.unwrap();
println!("Rendered content:\n{}", rendered_content);
// Check that integration sections are present
assert!(rendered_content.contains("## Integrations"));
assert!(rendered_content.contains("### Database Integrations"));
assert!(rendered_content.contains("### HTTP/API Integrations"));
assert!(rendered_content.contains("DatabaseManager in test_file.py"));
assert!(rendered_content.contains("fetch_data in test_file.py"));
}

View File

@@ -17,7 +17,7 @@
## Document metadata ## Document metadata
- **Created:** 2026-01-25 - **Created:** 2026-01-25
- **Updated:** 2026-02-15 - **Updated:** 2026-02-15
- **Generated by:** wtismycode (cli) v0.1 - **Generated by:** archdoc (cli) v0.1
--- ---

View File

@@ -1,6 +1,6 @@
# Test Project # Test Project
A test project for WTIsMyCode development and testing. A test project for ArchDoc development and testing.
## Installation ## Installation

View File

@@ -53,10 +53,10 @@ description_max_length = 200
[logging] [logging]
level = "info" level = "info"
file = "wtismycode.log" file = "archdoc.log"
format = "compact" format = "compact"
[caching] [caching]
enabled = true enabled = true
cache_dir = ".wtismycode/cache" cache_dir = ".archdoc/cache"
max_cache_age = "24h" max_cache_age = "24h"

View File

@@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta"
[project] [project]
name = "test-project" name = "test-project"
version = "0.1.0" version = "0.1.0"
description = "A test project for WTIsMyCode" description = "A test project for ArchDoc"
authors = [ authors = [
{name = "Test Author", email = "test@example.com"} {name = "Test Author", email = "test@example.com"}
] ]

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:21.939017204Z","file_modified_at":"2026-02-15T09:12:21.938241573Z","parsed_module":{"path":"/tmp/.tmpjrzBI1/test.py","module_path":"/tmp/.tmpjrzBI1/test.py","imports":[],"symbols":[{"id":"calculate_sum","kind":"Function","module_id":"","file_id":"","qualname":"calculate_sum","signature":"def calculate_sum(a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:21.929046662Z","file_modified_at":"2026-02-15T09:12:21.928241645Z","parsed_module":{"path":"/tmp/.tmpucjtMF/test.py","module_path":"/tmp/.tmpucjtMF/test.py","imports":[{"module_name":"redis","alias":null,"line_number":8}],"symbols":[{"id":"process_job","kind":"Function","module_id":"","file_id":"","qualname":"process_job","signature":"def process_job(job_data)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"}],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:21.901000313Z","file_modified_at":"2026-02-15T09:12:21.900241847Z","parsed_module":{"path":"/tmp/.tmpQwpTTi/test.py","module_path":"/tmp/.tmpQwpTTi/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator","kind":"Class","module_id":"","file_id":"","qualname":"Calculator","signature":"class Calculator","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.add","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.add","signature":"def add(self, a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:27.638281687Z","file_modified_at":"2026-02-15T09:12:27.637200566Z","parsed_module":{"path":"/tmp/.tmp5HECBh/test.py","module_path":"/tmp/.tmp5HECBh/test.py","imports":[{"module_name":"requests","alias":null,"line_number":8}],"symbols":[{"id":"fetch_data","kind":"Function","module_id":"","file_id":"","qualname":"fetch_data","signature":"def fetch_data()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"}],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:21.938417589Z","file_modified_at":"2026-02-15T09:12:21.937241580Z","parsed_module":{"path":"/tmp/.tmpHn93FX/test.py","module_path":"/tmp/.tmpHn93FX/test.py","imports":[{"module_name":"requests","alias":null,"line_number":8}],"symbols":[{"id":"fetch_data","kind":"Function","module_id":"","file_id":"","qualname":"fetch_data","signature":"def fetch_data()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"}],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:21.900267168Z","file_modified_at":"2026-02-15T09:12:21.899241854Z","parsed_module":{"path":"/tmp/.tmpVPUjB4/test.py","module_path":"/tmp/.tmpVPUjB4/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:21.939756459Z","file_modified_at":"2026-02-15T09:12:21.938241573Z","parsed_module":{"path":"/tmp/.tmp5yAI8O/test.py","module_path":"/tmp/.tmp5yAI8O/test.py","imports":[{"module_name":"redis","alias":null,"line_number":8}],"symbols":[{"id":"process_job","kind":"Function","module_id":"","file_id":"","qualname":"process_job","signature":"def process_job(job_data)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"}],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:21.949122466Z","file_modified_at":"2026-02-15T00:22:51.124088300Z","parsed_module":{"path":"../test-project/src/utils.py","module_path":"../test-project/src/utils.py","imports":[{"module_name":"json","alias":null,"line_number":54},{"module_name":"os","alias":null,"line_number":66}],"symbols":[{"id":"load_config","kind":"Function","module_id":"","file_id":"","qualname":"load_config","signature":"def load_config(config_path: str)","annotations":null,"docstring_first_line":"Load configuration from a JSON file.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"save_config","kind":"Function","module_id":"","file_id":"","qualname":"save_config","signature":"def save_config(config: dict, config_path: str)","annotations":null,"docstring_first_line":"Save configuration to a JSON file.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"get_file_size","kind":"Function","module_id":"","file_id":"","qualname":"get_file_size","signature":"def get_file_size(filepath: str)","annotations":null,"docstring_first_line":"Get the size of a file in bytes.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"format_bytes","kind":"Function","module_id":"","file_id":"","qualname":"format_bytes","signature":"def format_bytes(size: int)","annotations":null,"docstring_first_line":"Format bytes into a human-readable string.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"open","line_number":169,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"json.load","line_number":213,"call_type":"Unresolved"},{"caller_symbol":"load_config","callee_expr":"open","line_number":169,"call_type":"Unresolved"},{"caller_symbol":"load_config","callee_expr":"json.load","line_number":213,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"open","line_number":330,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"json.dump","line_number":367,"call_type":"Unresolved"},{"caller_symbol":"save_config","callee_expr":"open","line_number":330,"call_type":"Unresolved"},{"caller_symbol":"save_config","callee_expr":"json.dump","line_number":367,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"os.path.getsize","line_number":494,"call_type":"Unresolved"},{"caller_symbol":"get_file_size","callee_expr":"os.path.getsize","line_number":494,"call_type":"Unresolved"}],"file_docstring":"Utility functions for the test project."}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:21.932282950Z","file_modified_at":"2026-02-15T09:12:21.931241624Z","parsed_module":{"path":"/tmp/.tmpMK4GyS/test.py","module_path":"/tmp/.tmpMK4GyS/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"goodbye","kind":"Function","module_id":"","file_id":"","qualname":"goodbye","signature":"def goodbye()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:27.646855488Z","file_modified_at":"2026-02-15T09:12:27.645200509Z","parsed_module":{"path":"/tmp/.tmpXh0uQg/test.py","module_path":"/tmp/.tmpXh0uQg/test.py","imports":[],"symbols":[{"id":"calculate_sum","kind":"Function","module_id":"","file_id":"","qualname":"calculate_sum","signature":"def calculate_sum(a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:21.932289740Z","file_modified_at":"2026-02-15T09:12:21.931241624Z","parsed_module":{"path":"/tmp/.tmpn1WePQ/test.py","module_path":"/tmp/.tmpn1WePQ/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator","kind":"Class","module_id":"","file_id":"","qualname":"Calculator","signature":"class Calculator","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.add","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.add","signature":"def add(self, a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:27.646347331Z","file_modified_at":"2026-02-15T09:12:27.645200509Z","parsed_module":{"path":"/tmp/.tmpFFmDl3/test.py","module_path":"/tmp/.tmpFFmDl3/test.py","imports":[{"module_name":"sqlite3","alias":null,"line_number":8}],"symbols":[{"id":"get_user","kind":"Function","module_id":"","file_id":"","qualname":"get_user","signature":"def get_user(user_id)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"}],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:21.937802033Z","file_modified_at":"2026-02-15T09:12:21.936241587Z","parsed_module":{"path":"/tmp/.tmpU9hOcm/test.py","module_path":"/tmp/.tmpU9hOcm/test.py","imports":[{"module_name":"sqlite3","alias":null,"line_number":8}],"symbols":[{"id":"get_user","kind":"Function","module_id":"","file_id":"","qualname":"get_user","signature":"def get_user(user_id)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"}],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:27.646167123Z","file_modified_at":"2026-02-15T09:12:27.645200509Z","parsed_module":{"path":"/tmp/.tmpj84SS2/test.py","module_path":"/tmp/.tmpj84SS2/test.py","imports":[{"module_name":"requests","alias":null,"line_number":8}],"symbols":[{"id":"fetch_data","kind":"Function","module_id":"","file_id":"","qualname":"fetch_data","signature":"def fetch_data()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"}],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:27.647109436Z","file_modified_at":"2026-02-15T09:12:27.646200502Z","parsed_module":{"path":"/tmp/.tmpTS6Kf7/test.py","module_path":"/tmp/.tmpTS6Kf7/test.py","imports":[{"module_name":"redis","alias":null,"line_number":8}],"symbols":[{"id":"process_job","kind":"Function","module_id":"","file_id":"","qualname":"process_job","signature":"def process_job(job_data)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"}],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:21.906280597Z","file_modified_at":"2026-02-15T00:21:25.872722975Z","parsed_module":{"path":"tests/golden/test_project/src/example.py","module_path":"tests/golden/test_project/src/example.py","imports":[{"module_name":"os","alias":null,"line_number":42},{"module_name":"typing.List","alias":null,"line_number":64}],"symbols":[{"id":"Calculator","kind":"Class","module_id":"","file_id":"","qualname":"Calculator","signature":"class Calculator","annotations":null,"docstring_first_line":"A simple calculator class.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.__init__","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.__init__","signature":"def __init__(self)","annotations":null,"docstring_first_line":"Initialize the calculator.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.add","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.add","signature":"def add(self, a: int, b: int)","annotations":null,"docstring_first_line":"Add two numbers.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.multiply","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.multiply","signature":"def multiply(self, a: int, b: int)","annotations":null,"docstring_first_line":"Multiply two numbers.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"process_numbers","kind":"Function","module_id":"","file_id":"","qualname":"process_numbers","signature":"def process_numbers(numbers: List[int])","annotations":null,"docstring_first_line":"Process a list of numbers.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"Calculator","line_number":519,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"calc.add","line_number":544,"call_type":"Unresolved"},{"caller_symbol":"process_numbers","callee_expr":"Calculator","line_number":519,"call_type":"Unresolved"},{"caller_symbol":"process_numbers","callee_expr":"calc.add","line_number":544,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"process_numbers","line_number":648,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"print","line_number":677,"call_type":"Unresolved"}],"file_docstring":"Example module for testing."}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:27.639487788Z","file_modified_at":"2026-02-15T09:12:27.638200559Z","parsed_module":{"path":"/tmp/.tmp7gcSsx/test.py","module_path":"/tmp/.tmp7gcSsx/test.py","imports":[{"module_name":"redis","alias":null,"line_number":8}],"symbols":[{"id":"process_job","kind":"Function","module_id":"","file_id":"","qualname":"process_job","signature":"def process_job(job_data)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"}],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:27.623913794Z","file_modified_at":"2026-02-15T09:12:27.622200674Z","parsed_module":{"path":"/tmp/.tmpY5jXEG/test.py","module_path":"/tmp/.tmpY5jXEG/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator","kind":"Class","module_id":"","file_id":"","qualname":"Calculator","signature":"class Calculator","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.add","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.add","signature":"def add(self, a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:27.623293468Z","file_modified_at":"2026-02-15T09:12:27.622200674Z","parsed_module":{"path":"/tmp/.tmpbimwTO/test.py","module_path":"/tmp/.tmpbimwTO/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:27.638405646Z","file_modified_at":"2026-02-15T09:12:27.637200566Z","parsed_module":{"path":"/tmp/.tmpDqAWXp/test.py","module_path":"/tmp/.tmpDqAWXp/test.py","imports":[{"module_name":"sqlite3","alias":null,"line_number":8}],"symbols":[{"id":"get_user","kind":"Function","module_id":"","file_id":"","qualname":"get_user","signature":"def get_user(user_id)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"}],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:21.928408667Z","file_modified_at":"2026-02-15T09:12:21.927241652Z","parsed_module":{"path":"/tmp/.tmpkuoSO4/test.py","module_path":"/tmp/.tmpkuoSO4/test.py","imports":[],"symbols":[{"id":"calculate_sum","kind":"Function","module_id":"","file_id":"","qualname":"calculate_sum","signature":"def calculate_sum(a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:27.642603187Z","file_modified_at":"2026-02-15T09:12:27.641200538Z","parsed_module":{"path":"/tmp/.tmplZ7Gfg/test.py","module_path":"/tmp/.tmplZ7Gfg/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator","kind":"Class","module_id":"","file_id":"","qualname":"Calculator","signature":"class Calculator","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.add","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.add","signature":"def add(self, a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:27.642573298Z","file_modified_at":"2026-02-15T09:12:27.641200538Z","parsed_module":{"path":"/tmp/.tmpiVOCMi/test.py","module_path":"/tmp/.tmpiVOCMi/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"goodbye","kind":"Function","module_id":"","file_id":"","qualname":"goodbye","signature":"def goodbye()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:21.927910330Z","file_modified_at":"2026-02-15T09:12:21.926241659Z","parsed_module":{"path":"/tmp/.tmp1gFjk3/test.py","module_path":"/tmp/.tmp1gFjk3/test.py","imports":[{"module_name":"sqlite3","alias":null,"line_number":8}],"symbols":[{"id":"get_user","kind":"Function","module_id":"","file_id":"","qualname":"get_user","signature":"def get_user(user_id)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"}],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:21.927753122Z","file_modified_at":"2026-02-15T09:12:21.926241659Z","parsed_module":{"path":"/tmp/.tmpp9A45l/test.py","module_path":"/tmp/.tmpp9A45l/test.py","imports":[{"module_name":"requests","alias":null,"line_number":8}],"symbols":[{"id":"fetch_data","kind":"Function","module_id":"","file_id":"","qualname":"fetch_data","signature":"def fetch_data()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"}],"file_docstring":null}}

View File

@@ -1 +0,0 @@
{"created_at":"2026-02-15T09:12:27.638896492Z","file_modified_at":"2026-02-15T09:12:27.638200559Z","parsed_module":{"path":"/tmp/.tmp7IEFw5/test.py","module_path":"/tmp/.tmp7IEFw5/test.py","imports":[],"symbols":[{"id":"calculate_sum","kind":"Function","module_id":"","file_id":"","qualname":"calculate_sum","signature":"def calculate_sum(a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}

File diff suppressed because one or more lines are too long

View File

@@ -1,464 +0,0 @@
//! Package classifier for Python imports
//!
//! Classifies Python packages into categories using:
//! 1. Python stdlib list (hardcoded)
//! 2. Built-in dictionary (~200 popular packages)
//! 3. PyPI API lookup (online mode)
//! 4. Internal package detection (fallback)
use std::collections::HashMap;
use std::path::Path;
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub enum PackageCategory {
Stdlib,
Http,
Database,
Queue,
Storage,
AiMl,
Testing,
Logging,
Auth,
Internal,
ThirdParty,
}
impl PackageCategory {
pub fn display_name(&self) -> &'static str {
match self {
Self::Stdlib => "Stdlib",
Self::Http => "HTTP",
Self::Database => "Database",
Self::Queue => "Queue",
Self::Storage => "Storage",
Self::AiMl => "AI/ML",
Self::Testing => "Testing",
Self::Logging => "Logging",
Self::Auth => "Auth",
Self::Internal => "Internal",
Self::ThirdParty => "Third-party",
}
}
}
/// Result of classifying all imports in a project
#[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)]
pub struct ClassifiedIntegrations {
/// category -> list of package names
pub by_category: HashMap<String, Vec<String>>,
}
pub struct PackageClassifier {
offline: bool,
cache_dir: Option<String>,
/// user overrides from config integration_patterns
user_overrides: HashMap<String, PackageCategory>,
/// PyPI cache: package_name -> Option<PackageCategory> (None = not found)
pypi_cache: HashMap<String, Option<PackageCategory>>,
}
impl PackageClassifier {
pub fn new(offline: bool, cache_dir: Option<String>) -> Self {
let mut classifier = Self {
offline,
cache_dir: cache_dir.clone(),
user_overrides: HashMap::new(),
pypi_cache: HashMap::new(),
};
// Load PyPI cache from disk
if let Some(ref dir) = cache_dir {
classifier.load_pypi_cache(dir);
}
classifier
}
/// Add user overrides from config integration_patterns
pub fn add_user_overrides(&mut self, patterns: &[(String, Vec<String>)]) {
for (type_name, pkgs) in patterns {
let cat = match type_name.as_str() {
"http" => PackageCategory::Http,
"db" => PackageCategory::Database,
"queue" => PackageCategory::Queue,
"storage" => PackageCategory::Storage,
"ai" => PackageCategory::AiMl,
"testing" => PackageCategory::Testing,
"logging" => PackageCategory::Logging,
"auth" => PackageCategory::Auth,
_ => PackageCategory::ThirdParty,
};
for pkg in pkgs {
self.user_overrides.insert(pkg.to_lowercase(), cat.clone());
}
}
}
/// Classify a single package name (top-level import)
pub fn classify(&mut self, package_name: &str) -> PackageCategory {
let normalized = normalize_package_name(package_name);
// 1. User overrides take priority
if let Some(cat) = self.user_overrides.get(&normalized) {
return cat.clone();
}
// 2. Built-in dictionary (check BEFORE stdlib, so sqlite3 etc. are categorized properly)
if let Some(cat) = builtin_lookup(&normalized) {
return cat;
}
// 3. Stdlib
if is_stdlib(&normalized) {
return PackageCategory::Stdlib;
}
// 4. PyPI lookup (if online)
if !self.offline {
if let Some(cached) = self.pypi_cache.get(&normalized) {
return cached.clone().unwrap_or(PackageCategory::Internal);
}
match self.pypi_lookup(&normalized) {
Some(cat) => {
self.pypi_cache.insert(normalized, Some(cat.clone()));
return cat;
}
None => {
self.pypi_cache.insert(normalized, None);
return PackageCategory::Internal;
}
}
}
// 5. Offline fallback: if not in stdlib or dictionary, assume internal
PackageCategory::Internal
}
/// Classify all imports and return grouped integrations
pub fn classify_all(&mut self, import_names: &[String]) -> ClassifiedIntegrations {
let mut result = ClassifiedIntegrations::default();
let mut seen: HashMap<String, PackageCategory> = HashMap::new();
for import in import_names {
let top_level = top_level_package(import);
if seen.contains_key(&top_level) {
continue;
}
let cat = self.classify(&top_level);
seen.insert(top_level.clone(), cat.clone());
// Skip stdlib and third-party without category
if cat == PackageCategory::Stdlib {
continue;
}
let category_name = cat.display_name().to_string();
result.by_category
.entry(category_name)
.or_default()
.push(top_level);
}
// Deduplicate and sort each category
for pkgs in result.by_category.values_mut() {
pkgs.sort();
pkgs.dedup();
}
result
}
/// Save PyPI cache to disk
pub fn save_cache(&self) {
if let Some(ref dir) = self.cache_dir {
let cache_path = Path::new(dir).join("pypi.json");
if let Ok(json) = serde_json::to_string_pretty(&self.pypi_cache) {
let _ = std::fs::create_dir_all(dir);
let _ = std::fs::write(&cache_path, json);
}
}
}
fn load_pypi_cache(&mut self, dir: &str) {
let cache_path = Path::new(dir).join("pypi.json");
if let Ok(content) = std::fs::read_to_string(&cache_path) {
if let Ok(cache) = serde_json::from_str::<HashMap<String, Option<PackageCategory>>>(&content) {
self.pypi_cache = cache;
}
}
}
fn pypi_lookup(&self, package_name: &str) -> Option<PackageCategory> {
let url = format!("https://pypi.org/pypi/{}/json", package_name);
let agent = ureq::Agent::new_with_config(
ureq::config::Config::builder()
.timeout_global(Some(std::time::Duration::from_secs(3)))
.build()
);
let response = agent.get(&url).call().ok()?;
if response.status() != 200 {
return None;
}
let body_str = response.into_body().read_to_string().ok()?;
let body: serde_json::Value = serde_json::from_str(&body_str).ok()?;
let info = body.get("info")?;
// Check classifiers
if let Some(classifiers) = info.get("classifiers").and_then(|c: &serde_json::Value| c.as_array()) {
for classifier in classifiers {
if let Some(s) = classifier.as_str() {
if let Some(cat) = classify_from_pypi_classifier(s) {
return Some(cat);
}
}
}
}
// Check summary and keywords for hints
let summary = info.get("summary").and_then(|s: &serde_json::Value| s.as_str()).unwrap_or("");
let keywords = info.get("keywords").and_then(|s: &serde_json::Value| s.as_str()).unwrap_or("");
let combined = format!("{} {}", summary, keywords).to_lowercase();
if combined.contains("database") || combined.contains("sql") || combined.contains("orm") {
return Some(PackageCategory::Database);
}
if combined.contains("http") || combined.contains("web framework") || combined.contains("rest api") {
return Some(PackageCategory::Http);
}
if combined.contains("queue") || combined.contains("message broker") || combined.contains("amqp") || combined.contains("kafka") {
return Some(PackageCategory::Queue);
}
if combined.contains("storage") || combined.contains("s3") || combined.contains("blob") {
return Some(PackageCategory::Storage);
}
if combined.contains("machine learning") || combined.contains("deep learning") || combined.contains("neural") || combined.contains("artificial intelligence") {
return Some(PackageCategory::AiMl);
}
if combined.contains("testing") || combined.contains("test framework") {
return Some(PackageCategory::Testing);
}
if combined.contains("logging") || combined.contains("error tracking") {
return Some(PackageCategory::Logging);
}
if combined.contains("authentication") || combined.contains("jwt") || combined.contains("oauth") {
return Some(PackageCategory::Auth);
}
// Found on PyPI but no category detected
Some(PackageCategory::ThirdParty)
}
}
fn classify_from_pypi_classifier(classifier: &str) -> Option<PackageCategory> {
let c = classifier.to_lowercase();
if c.contains("framework :: django") || c.contains("framework :: flask") ||
c.contains("framework :: fastapi") || c.contains("framework :: tornado") ||
c.contains("framework :: aiohttp") || c.contains("topic :: internet :: www") {
return Some(PackageCategory::Http);
}
if c.contains("topic :: database") {
return Some(PackageCategory::Database);
}
if c.contains("topic :: scientific/engineering :: artificial intelligence") ||
c.contains("topic :: scientific/engineering :: machine learning") {
return Some(PackageCategory::AiMl);
}
if c.contains("topic :: software development :: testing") {
return Some(PackageCategory::Testing);
}
if c.contains("topic :: system :: logging") {
return Some(PackageCategory::Logging);
}
if c.contains("topic :: security") && (classifier.contains("auth") || classifier.contains("Auth")) {
return Some(PackageCategory::Auth);
}
None
}
/// Extract top-level package name from an import string
/// e.g. "sqlalchemy.orm.Session" -> "sqlalchemy"
fn top_level_package(import: &str) -> String {
import.split('.').next().unwrap_or(import).to_lowercase()
}
/// Normalize package name for lookup (lowercase, replace hyphens with underscores)
fn normalize_package_name(name: &str) -> String {
name.to_lowercase().replace('-', "_")
}
/// Check if a package is in the Python standard library
fn is_stdlib(name: &str) -> bool {
PYTHON_STDLIB.contains(&name)
}
/// Look up a package in the built-in dictionary
fn builtin_lookup(name: &str) -> Option<PackageCategory> {
for (cat, pkgs) in BUILTIN_PACKAGES.iter() {
if pkgs.contains(&name) {
return Some(cat.clone());
}
}
None
}
// Python 3.10+ standard library modules
const PYTHON_STDLIB: &[&str] = &[
"__future__", "_thread", "abc", "aifc", "argparse", "array", "ast",
"asynchat", "asyncio", "asyncore", "atexit", "audioop", "base64",
"bdb", "binascii", "binhex", "bisect", "builtins", "bz2",
"calendar", "cgi", "cgitb", "chunk", "cmath", "cmd", "code",
"codecs", "codeop", "collections", "colorsys", "compileall",
"concurrent", "configparser", "contextlib", "contextvars", "copy",
"copyreg", "cprofile", "crypt", "csv", "ctypes", "curses",
"dataclasses", "datetime", "dbm", "decimal", "difflib", "dis",
"distutils", "doctest", "email", "encodings", "enum", "errno",
"faulthandler", "fcntl", "filecmp", "fileinput", "fnmatch",
"formatter", "fractions", "ftplib", "functools", "gc", "getopt",
"getpass", "gettext", "glob", "grp", "gzip", "hashlib", "heapq",
"hmac", "html", "http", "idlelib", "imaplib", "imghdr", "imp",
"importlib", "inspect", "io", "ipaddress", "itertools", "json",
"keyword", "lib2to3", "linecache", "locale", "logging", "lzma",
"mailbox", "mailcap", "marshal", "math", "mimetypes", "mmap",
"modulefinder", "multiprocessing", "netrc", "nis", "nntplib",
"numbers", "operator", "optparse", "os", "ossaudiodev", "parser",
"pathlib", "pdb", "pickle", "pickletools", "pipes", "pkgutil",
"platform", "plistlib", "poplib", "posix", "posixpath", "pprint",
"profile", "pstats", "pty", "pwd", "py_compile", "pyclbr",
"pydoc", "queue", "quopri", "random", "re", "readline", "reprlib",
"resource", "rlcompleter", "runpy", "sched", "secrets", "select",
"selectors", "shelve", "shlex", "shutil", "signal", "site",
"smtpd", "smtplib", "sndhdr", "socket", "socketserver", "spwd",
"sqlite3", "ssl", "stat", "statistics", "string", "stringprep",
"struct", "subprocess", "sunau", "symtable", "sys", "sysconfig",
"syslog", "tabnanny", "tarfile", "telnetlib", "tempfile", "termios",
"test", "textwrap", "threading", "time", "timeit", "tkinter",
"token", "tokenize", "tomllib", "trace", "traceback", "tracemalloc",
"tty", "turtle", "turtledemo", "types", "typing", "unicodedata",
"unittest", "urllib", "uu", "uuid", "venv", "warnings", "wave",
"weakref", "webbrowser", "winreg", "winsound", "wsgiref", "xdrlib",
"xml", "xmlrpc", "zipapp", "zipfile", "zipimport", "zlib",
// Common sub-packages that appear as top-level imports
"os.path", "collections.abc", "concurrent.futures", "typing_extensions",
];
lazy_static::lazy_static! {
static ref BUILTIN_PACKAGES: Vec<(PackageCategory, Vec<&'static str>)> = vec![
(PackageCategory::Http, vec![
"requests", "httpx", "aiohttp", "fastapi", "flask", "django",
"starlette", "uvicorn", "gunicorn", "tornado", "sanic", "bottle",
"falcon", "quart", "werkzeug", "httptools", "uvloop", "hypercorn",
"grpcio", "grpc", "graphene", "strawberry", "ariadne",
"pydantic", "marshmallow", "connexion", "responder", "hug",
]),
(PackageCategory::Database, vec![
"sqlalchemy", "psycopg2", "psycopg", "asyncpg", "pymongo",
"mongoengine", "peewee", "tortoise", "databases",
"alembic", "pymysql", "opensearch", "opensearchpy", "elasticsearch",
"motor", "beanie", "odmantic", "sqlmodel",
"piccolo", "edgedb", "cassandra", "clickhouse_driver", "sqlite3",
"neo4j", "arango", "influxdb", "timescaledb",
]),
(PackageCategory::Queue, vec![
"celery", "pika", "aio_pika", "kafka", "confluent_kafka",
"kombu", "dramatiq", "huey", "rq", "nats", "redis", "aioredis",
"aiokafka", "taskiq", "arq",
]),
(PackageCategory::Storage, vec![
"minio", "boto3", "botocore", "google.cloud.storage",
"azure.storage.blob", "s3fs", "fsspec", "smart_open",
]),
(PackageCategory::AiMl, vec![
"torch", "tensorflow", "transformers", "langchain",
"langchain_core", "langchain_openai", "langchain_community",
"openai", "anthropic", "scikit_learn", "sklearn",
"numpy", "pandas", "scipy", "matplotlib", "keras",
"whisper", "sentence_transformers", "qdrant_client",
"chromadb", "pinecone", "faiss", "xgboost", "lightgbm",
"catboost", "spacy", "nltk", "gensim", "huggingface_hub",
"diffusers", "accelerate", "datasets", "tokenizers",
"tiktoken", "llama_index", "autogen", "crewai",
"seaborn", "plotly", "bokeh",
]),
(PackageCategory::Testing, vec![
"pytest", "mock", "faker", "hypothesis",
"factory_boy", "factory", "responses", "httpretty",
"vcrpy", "freezegun", "time_machine", "pytest_asyncio",
"pytest_mock", "pytest_cov", "coverage", "tox", "nox",
"behave", "robot", "selenium", "playwright", "locust",
]),
(PackageCategory::Auth, vec![
"pyjwt", "jwt", "python_jose", "jose", "passlib",
"authlib", "oauthlib", "itsdangerous", "bcrypt",
"cryptography", "paramiko",
]),
(PackageCategory::Logging, vec![
"loguru", "structlog", "sentry_sdk", "watchtower",
"python_json_logger", "colorlog", "rich", "prometheus_client",
]),
(PackageCategory::AiMl, vec![
"pyannote", "soundfile", "librosa", "audioread", "webrtcvad",
]),
(PackageCategory::Queue, vec![
"aiormq",
]),
(PackageCategory::Http, vec![
"pydantic_settings", "pydantic_extra_types", "fastapi_mail",
]),
(PackageCategory::Database, vec![
"peewee_async", "peewee_migrate",
]),
];
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_stdlib_detection() {
assert!(is_stdlib("os"));
assert!(is_stdlib("sys"));
assert!(is_stdlib("json"));
assert!(is_stdlib("asyncio"));
assert!(!is_stdlib("requests"));
assert!(!is_stdlib("fastapi"));
}
#[test]
fn test_builtin_lookup() {
assert_eq!(builtin_lookup("requests"), Some(PackageCategory::Http));
assert_eq!(builtin_lookup("sqlalchemy"), Some(PackageCategory::Database));
assert_eq!(builtin_lookup("celery"), Some(PackageCategory::Queue));
assert_eq!(builtin_lookup("minio"), Some(PackageCategory::Storage));
assert_eq!(builtin_lookup("torch"), Some(PackageCategory::AiMl));
assert_eq!(builtin_lookup("pytest"), Some(PackageCategory::Testing));
assert_eq!(builtin_lookup("loguru"), Some(PackageCategory::Logging));
assert_eq!(builtin_lookup("pyjwt"), Some(PackageCategory::Auth));
assert_eq!(builtin_lookup("nonexistent_pkg"), None);
}
#[test]
fn test_top_level_package() {
assert_eq!(top_level_package("sqlalchemy.orm.Session"), "sqlalchemy");
assert_eq!(top_level_package("os.path"), "os");
assert_eq!(top_level_package("requests"), "requests");
}
#[test]
fn test_normalize_package_name() {
assert_eq!(normalize_package_name("aio-pika"), "aio_pika");
assert_eq!(normalize_package_name("scikit-learn"), "scikit_learn");
assert_eq!(normalize_package_name("FastAPI"), "fastapi");
}
#[test]
fn test_classify_offline() {
let mut classifier = PackageClassifier::new(true, None);
assert_eq!(classifier.classify("os"), PackageCategory::Stdlib);
assert_eq!(classifier.classify("requests"), PackageCategory::Http);
assert_eq!(classifier.classify("my_internal_pkg"), PackageCategory::Internal);
}
}

View File

@@ -1,36 +0,0 @@
//! Tests for the renderer functionality
use wtismycode_core::{
model::ProjectModel,
renderer::Renderer,
};
#[test]
fn test_render_with_integrations() {
let mut project_model = ProjectModel::new();
// Add classified integrations (new format)
project_model.classified_integrations.insert(
"Database".to_string(),
vec!["sqlalchemy".to_string(), "asyncpg".to_string()],
);
project_model.classified_integrations.insert(
"HTTP".to_string(),
vec!["fastapi".to_string(), "requests".to_string()],
);
let renderer = Renderer::new();
let result = renderer.render_architecture_md(&project_model, None);
assert!(result.is_ok());
let rendered = result.unwrap();
println!("Rendered:\n{}", rendered);
assert!(rendered.contains("## Integrations"));
assert!(rendered.contains("### Database"));
assert!(rendered.contains("- sqlalchemy"));
assert!(rendered.contains("- asyncpg"));
assert!(rendered.contains("### HTTP"));
assert!(rendered.contains("- fastapi"));
assert!(rendered.contains("- requests"));
}