Compare commits
9 Commits
a3ee003947
...
feature/im
| Author | SHA1 | Date | |
|---|---|---|---|
| 5c93cbfb3a | |||
| 0396a53e0c | |||
| 0617f24744 | |||
| 1229235ac7 | |||
| d9457018fd | |||
| b3eb591809 | |||
| f4f8b8fa34 | |||
| 136697caf0 | |||
| 8e79e3950f |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -10,3 +10,6 @@
|
|||||||
.roo/
|
.roo/
|
||||||
PLANS/
|
PLANS/
|
||||||
target/
|
target/
|
||||||
|
.wtismycode/
|
||||||
|
docs/
|
||||||
|
ARCHITECTURE.md
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
All notable changes to ArchDoc are documented in this file.
|
All notable changes to WTIsMyCode are documented in this file.
|
||||||
|
|
||||||
Format follows [Keep a Changelog](https://keepachangelog.com/).
|
Format follows [Keep a Changelog](https://keepachangelog.com/).
|
||||||
|
|
||||||
@@ -12,8 +12,8 @@ Format follows [Keep a Changelog](https://keepachangelog.com/).
|
|||||||
- **Dependency cycle detection** (`cycle_detector.rs`) — DFS-based algorithm to find circular module dependencies
|
- **Dependency cycle detection** (`cycle_detector.rs`) — DFS-based algorithm to find circular module dependencies
|
||||||
- **Cycle detection in renderer** — Critical points section now shows detected dependency cycles
|
- **Cycle detection in renderer** — Critical points section now shows detected dependency cycles
|
||||||
- **Full pipeline integration tests** — Tests for config validation, scanning, cycle detection, and rendering
|
- **Full pipeline integration tests** — Tests for config validation, scanning, cycle detection, and rendering
|
||||||
- **Stats command** — `archdoc stats` displays project-level statistics (files, modules, symbols, edges)
|
- **Stats command** — `wtismycode stats` displays project-level statistics (files, modules, symbols, edges)
|
||||||
- **Check command** — `archdoc check` verifies documentation consistency with code
|
- **Check command** — `wtismycode check` verifies documentation consistency with code
|
||||||
- **Colored CLI output** — Progress bars and colored status messages
|
- **Colored CLI output** — Progress bars and colored status messages
|
||||||
- **Comprehensive README** — Badges, configuration reference table, command documentation, architecture overview
|
- **Comprehensive README** — Badges, configuration reference table, command documentation, architecture overview
|
||||||
|
|
||||||
|
|||||||
263
Cargo.lock
generated
263
Cargo.lock
generated
@@ -2,6 +2,12 @@
|
|||||||
# It is not intended for manual editing.
|
# It is not intended for manual editing.
|
||||||
version = 4
|
version = 4
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "adler2"
|
||||||
|
version = "2.0.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ahash"
|
name = "ahash"
|
||||||
version = "0.8.12"
|
version = "0.8.12"
|
||||||
@@ -79,48 +85,18 @@ version = "1.0.101"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea"
|
checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "archdoc-cli"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"anyhow",
|
|
||||||
"archdoc-core",
|
|
||||||
"clap",
|
|
||||||
"colored",
|
|
||||||
"indicatif",
|
|
||||||
"serde",
|
|
||||||
"serde_json",
|
|
||||||
"thiserror 1.0.69",
|
|
||||||
"tokio",
|
|
||||||
"toml 0.8.23",
|
|
||||||
"tracing",
|
|
||||||
"tracing-subscriber",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "archdoc-core"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"anyhow",
|
|
||||||
"chrono",
|
|
||||||
"handlebars",
|
|
||||||
"rustpython-ast",
|
|
||||||
"rustpython-parser",
|
|
||||||
"serde",
|
|
||||||
"serde_json",
|
|
||||||
"tempfile",
|
|
||||||
"thiserror 2.0.18",
|
|
||||||
"toml 0.9.12+spec-1.1.0",
|
|
||||||
"tracing",
|
|
||||||
"walkdir",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "autocfg"
|
name = "autocfg"
|
||||||
version = "1.5.0"
|
version = "1.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
|
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "base64"
|
||||||
|
version = "0.22.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bitflags"
|
name = "bitflags"
|
||||||
version = "2.11.0"
|
version = "2.11.0"
|
||||||
@@ -262,6 +238,15 @@ dependencies = [
|
|||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crc32fast"
|
||||||
|
version = "1.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crunchy"
|
name = "crunchy"
|
||||||
version = "0.2.4"
|
version = "0.2.4"
|
||||||
@@ -415,6 +400,16 @@ version = "0.1.9"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582"
|
checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "flate2"
|
||||||
|
version = "1.1.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c"
|
||||||
|
dependencies = [
|
||||||
|
"crc32fast",
|
||||||
|
"miniz_oxide",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fnv"
|
name = "fnv"
|
||||||
version = "1.0.7"
|
version = "1.0.7"
|
||||||
@@ -516,6 +511,22 @@ version = "0.5.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
|
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "http"
|
||||||
|
version = "1.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a"
|
||||||
|
dependencies = [
|
||||||
|
"bytes",
|
||||||
|
"itoa",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "httparse"
|
||||||
|
version = "1.10.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "iana-time-zone"
|
name = "iana-time-zone"
|
||||||
version = "0.1.65"
|
version = "0.1.65"
|
||||||
@@ -735,6 +746,16 @@ version = "2.8.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79"
|
checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "miniz_oxide"
|
||||||
|
version = "0.8.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316"
|
||||||
|
dependencies = [
|
||||||
|
"adler2",
|
||||||
|
"simd-adler32",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mio"
|
name = "mio"
|
||||||
version = "1.1.1"
|
version = "1.1.1"
|
||||||
@@ -835,6 +856,12 @@ version = "1.0.15"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
|
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "percent-encoding"
|
||||||
|
version = "2.3.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pest"
|
name = "pest"
|
||||||
version = "2.8.6"
|
version = "2.8.6"
|
||||||
@@ -1010,6 +1037,20 @@ dependencies = [
|
|||||||
"bitflags",
|
"bitflags",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ring"
|
||||||
|
version = "0.17.14"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7"
|
||||||
|
dependencies = [
|
||||||
|
"cc",
|
||||||
|
"cfg-if",
|
||||||
|
"getrandom 0.2.17",
|
||||||
|
"libc",
|
||||||
|
"untrusted",
|
||||||
|
"windows-sys 0.52.0",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc-hash"
|
name = "rustc-hash"
|
||||||
version = "1.1.0"
|
version = "1.1.0"
|
||||||
@@ -1029,6 +1070,41 @@ dependencies = [
|
|||||||
"windows-sys 0.61.2",
|
"windows-sys 0.61.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustls"
|
||||||
|
version = "0.23.36"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b"
|
||||||
|
dependencies = [
|
||||||
|
"log",
|
||||||
|
"once_cell",
|
||||||
|
"ring",
|
||||||
|
"rustls-pki-types",
|
||||||
|
"rustls-webpki",
|
||||||
|
"subtle",
|
||||||
|
"zeroize",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustls-pki-types"
|
||||||
|
version = "1.14.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd"
|
||||||
|
dependencies = [
|
||||||
|
"zeroize",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustls-webpki"
|
||||||
|
version = "0.103.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53"
|
||||||
|
dependencies = [
|
||||||
|
"ring",
|
||||||
|
"rustls-pki-types",
|
||||||
|
"untrusted",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustpython-ast"
|
name = "rustpython-ast"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
@@ -1216,6 +1292,12 @@ dependencies = [
|
|||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "simd-adler32"
|
||||||
|
version = "0.3.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "siphasher"
|
name = "siphasher"
|
||||||
version = "1.0.2"
|
version = "1.0.2"
|
||||||
@@ -1250,6 +1332,12 @@ version = "0.11.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "subtle"
|
||||||
|
version = "2.6.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.115"
|
version = "2.0.115"
|
||||||
@@ -1601,6 +1689,47 @@ dependencies = [
|
|||||||
"rand",
|
"rand",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "untrusted"
|
||||||
|
version = "0.9.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ureq"
|
||||||
|
version = "3.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "fdc97a28575b85cfedf2a7e7d3cc64b3e11bd8ac766666318003abbacc7a21fc"
|
||||||
|
dependencies = [
|
||||||
|
"base64",
|
||||||
|
"flate2",
|
||||||
|
"log",
|
||||||
|
"percent-encoding",
|
||||||
|
"rustls",
|
||||||
|
"rustls-pki-types",
|
||||||
|
"ureq-proto",
|
||||||
|
"utf-8",
|
||||||
|
"webpki-roots",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ureq-proto"
|
||||||
|
version = "0.5.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d81f9efa9df032be5934a46a068815a10a042b494b6a58cb0a1a97bb5467ed6f"
|
||||||
|
dependencies = [
|
||||||
|
"base64",
|
||||||
|
"http",
|
||||||
|
"httparse",
|
||||||
|
"log",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "utf-8"
|
||||||
|
version = "0.7.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "utf8parse"
|
name = "utf8parse"
|
||||||
version = "0.2.2"
|
version = "0.2.2"
|
||||||
@@ -1742,6 +1871,15 @@ dependencies = [
|
|||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "webpki-roots"
|
||||||
|
version = "1.0.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "22cfaf3c063993ff62e73cb4311efde4db1efb31ab78a3e5c457939ad5cc0bed"
|
||||||
|
dependencies = [
|
||||||
|
"rustls-pki-types",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "winapi-util"
|
name = "winapi-util"
|
||||||
version = "0.1.11"
|
version = "0.1.11"
|
||||||
@@ -1810,6 +1948,15 @@ dependencies = [
|
|||||||
"windows-link",
|
"windows-link",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows-sys"
|
||||||
|
version = "0.52.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||||
|
dependencies = [
|
||||||
|
"windows-targets 0.52.6",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-sys"
|
name = "windows-sys"
|
||||||
version = "0.59.0"
|
version = "0.59.0"
|
||||||
@@ -2063,6 +2210,44 @@ dependencies = [
|
|||||||
"wasmparser",
|
"wasmparser",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wtismycode-cli"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"clap",
|
||||||
|
"colored",
|
||||||
|
"indicatif",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"thiserror 1.0.69",
|
||||||
|
"tokio",
|
||||||
|
"toml 0.8.23",
|
||||||
|
"tracing",
|
||||||
|
"tracing-subscriber",
|
||||||
|
"wtismycode-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wtismycode-core"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"chrono",
|
||||||
|
"handlebars",
|
||||||
|
"lazy_static",
|
||||||
|
"rustpython-ast",
|
||||||
|
"rustpython-parser",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"tempfile",
|
||||||
|
"thiserror 2.0.18",
|
||||||
|
"toml 0.9.12+spec-1.1.0",
|
||||||
|
"tracing",
|
||||||
|
"ureq",
|
||||||
|
"walkdir",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zerocopy"
|
name = "zerocopy"
|
||||||
version = "0.8.39"
|
version = "0.8.39"
|
||||||
@@ -2083,6 +2268,12 @@ dependencies = [
|
|||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "zeroize"
|
||||||
|
version = "1.8.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zmij"
|
name = "zmij"
|
||||||
version = "1.0.21"
|
version = "1.0.21"
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
members = ["archdoc-cli", "archdoc-core"]
|
members = ["wtismycode-cli", "wtismycode-core"]
|
||||||
resolver = "3"
|
resolver = "3"
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
# PR: Major improvements to ArchDoc
|
# PR: Major improvements to WTIsMyCode
|
||||||
|
|
||||||
## Summary
|
## Summary
|
||||||
|
|
||||||
Comprehensive refactoring and feature additions to ArchDoc — the Python architecture documentation generator. This PR improves code quality, adds new features, and significantly enhances the development experience.
|
Comprehensive refactoring and feature additions to WTIsMyCode — the Python architecture documentation generator. This PR improves code quality, adds new features, and significantly enhances the development experience.
|
||||||
|
|
||||||
**Stats:** 24 files changed, ~3900 insertions, ~1400 deletions, 50 tests
|
**Stats:** 24 files changed, ~3900 insertions, ~1400 deletions, 50 tests
|
||||||
|
|
||||||
|
|||||||
46
README.md
46
README.md
@@ -1,4 +1,4 @@
|
|||||||
# ArchDoc
|
# WTIsMyCode
|
||||||
|
|
||||||

|

|
||||||

|

|
||||||
@@ -6,7 +6,7 @@
|
|||||||
|
|
||||||
**Automatic architecture documentation generator for Python projects.**
|
**Automatic architecture documentation generator for Python projects.**
|
||||||
|
|
||||||
ArchDoc analyzes your Python codebase using AST parsing and generates comprehensive Markdown documentation covering module structure, dependencies, integration points, and critical hotspots.
|
WTIsMyCode analyzes your Python codebase using AST parsing and generates comprehensive Markdown documentation covering module structure, dependencies, integration points, and critical hotspots.
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
@@ -15,7 +15,7 @@ ArchDoc analyzes your Python codebase using AST parsing and generates comprehens
|
|||||||
- **Integration Detection** — Automatically identifies HTTP, database, and message queue integrations
|
- **Integration Detection** — Automatically identifies HTTP, database, and message queue integrations
|
||||||
- **Diff-Aware Updates** — Preserves manually written sections while regenerating docs
|
- **Diff-Aware Updates** — Preserves manually written sections while regenerating docs
|
||||||
- **Caching** — Content-hash based caching for fast incremental regeneration
|
- **Caching** — Content-hash based caching for fast incremental regeneration
|
||||||
- **Config Validation** — Comprehensive validation of `archdoc.toml` with helpful error messages
|
- **Config Validation** — Comprehensive validation of `wtismycode.toml` with helpful error messages
|
||||||
- **Statistics** — Project-level stats: file counts, symbol counts, fan-in/fan-out metrics
|
- **Statistics** — Project-level stats: file counts, symbol counts, fan-in/fan-out metrics
|
||||||
- **Consistency Checks** — Verify documentation stays in sync with code changes
|
- **Consistency Checks** — Verify documentation stays in sync with code changes
|
||||||
|
|
||||||
@@ -24,33 +24,33 @@ ArchDoc analyzes your Python codebase using AST parsing and generates comprehens
|
|||||||
Requires Rust 1.85+:
|
Requires Rust 1.85+:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cargo install --path archdoc-cli
|
cargo install --path wtismycode-cli
|
||||||
```
|
```
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Initialize config in your Python project
|
# Initialize config in your Python project
|
||||||
archdoc init
|
wtismycode init
|
||||||
|
|
||||||
# Generate architecture docs
|
# Generate architecture docs
|
||||||
archdoc generate
|
wtismycode generate
|
||||||
|
|
||||||
# View project statistics
|
# View project statistics
|
||||||
archdoc stats
|
wtismycode stats
|
||||||
|
|
||||||
# Check docs are up-to-date
|
# Check docs are up-to-date
|
||||||
archdoc check
|
wtismycode check
|
||||||
```
|
```
|
||||||
|
|
||||||
## Commands
|
## Commands
|
||||||
|
|
||||||
### `archdoc generate`
|
### `wtismycode generate`
|
||||||
|
|
||||||
Scans the project, analyzes Python files, and generates documentation:
|
Scans the project, analyzes Python files, and generates documentation:
|
||||||
|
|
||||||
```
|
```
|
||||||
$ archdoc generate
|
$ wtismycode generate
|
||||||
🔍 Scanning project...
|
🔍 Scanning project...
|
||||||
📂 Found 24 Python files in 6 modules
|
📂 Found 24 Python files in 6 modules
|
||||||
🔬 Analyzing dependencies...
|
🔬 Analyzing dependencies...
|
||||||
@@ -65,12 +65,12 @@ Output includes:
|
|||||||
- **Integration map** — HTTP, database, and queue integration points
|
- **Integration map** — HTTP, database, and queue integration points
|
||||||
- **Critical points** — High fan-in/fan-out symbols and dependency cycles
|
- **Critical points** — High fan-in/fan-out symbols and dependency cycles
|
||||||
|
|
||||||
### `archdoc stats`
|
### `wtismycode stats`
|
||||||
|
|
||||||
Displays project statistics without generating docs:
|
Displays project statistics without generating docs:
|
||||||
|
|
||||||
```
|
```
|
||||||
$ archdoc stats
|
$ wtismycode stats
|
||||||
📊 Project Statistics
|
📊 Project Statistics
|
||||||
Files: 24
|
Files: 24
|
||||||
Modules: 6
|
Modules: 6
|
||||||
@@ -80,29 +80,29 @@ $ archdoc stats
|
|||||||
Edges: 134
|
Edges: 134
|
||||||
```
|
```
|
||||||
|
|
||||||
### `archdoc check`
|
### `wtismycode check`
|
||||||
|
|
||||||
Verifies documentation consistency with the current codebase:
|
Verifies documentation consistency with the current codebase:
|
||||||
|
|
||||||
```
|
```
|
||||||
$ archdoc check
|
$ wtismycode check
|
||||||
✅ Documentation is up-to-date
|
✅ Documentation is up-to-date
|
||||||
```
|
```
|
||||||
|
|
||||||
Returns non-zero exit code if docs are stale — useful in CI pipelines.
|
Returns non-zero exit code if docs are stale — useful in CI pipelines.
|
||||||
|
|
||||||
### `archdoc init`
|
### `wtismycode init`
|
||||||
|
|
||||||
Creates a default `archdoc.toml` configuration file:
|
Creates a default `wtismycode.toml` configuration file:
|
||||||
|
|
||||||
```
|
```
|
||||||
$ archdoc init
|
$ wtismycode init
|
||||||
✅ Created archdoc.toml with default settings
|
✅ Created wtismycode.toml with default settings
|
||||||
```
|
```
|
||||||
|
|
||||||
## Configuration Reference
|
## Configuration Reference
|
||||||
|
|
||||||
ArchDoc is configured via `archdoc.toml`:
|
WTIsMyCode is configured via `wtismycode.toml`:
|
||||||
|
|
||||||
| Section | Key | Default | Description |
|
| Section | Key | Default | Description |
|
||||||
|---------|-----|---------|-------------|
|
|---------|-----|---------|-------------|
|
||||||
@@ -125,7 +125,7 @@ ArchDoc is configured via `archdoc.toml`:
|
|||||||
| `thresholds` | `critical_fan_in` | `20` | Fan-in threshold for critical symbols |
|
| `thresholds` | `critical_fan_in` | `20` | Fan-in threshold for critical symbols |
|
||||||
| `thresholds` | `critical_fan_out` | `20` | Fan-out threshold for critical symbols |
|
| `thresholds` | `critical_fan_out` | `20` | Fan-out threshold for critical symbols |
|
||||||
| `caching` | `enabled` | `true` | Enable analysis caching |
|
| `caching` | `enabled` | `true` | Enable analysis caching |
|
||||||
| `caching` | `cache_dir` | `".archdoc/cache"` | Cache directory |
|
| `caching` | `cache_dir` | `".wtismycode/cache"` | Cache directory |
|
||||||
| `caching` | `max_cache_age` | `"24h"` | Cache TTL (supports s, m, h, d, w) |
|
| `caching` | `max_cache_age` | `"24h"` | Cache TTL (supports s, m, h, d, w) |
|
||||||
|
|
||||||
### Example Configuration
|
### Example Configuration
|
||||||
@@ -172,12 +172,12 @@ max_cache_age = "24h"
|
|||||||
## Architecture
|
## Architecture
|
||||||
|
|
||||||
```
|
```
|
||||||
archdoc/
|
wtismycode/
|
||||||
├── archdoc-cli/ # CLI binary (commands, output formatting)
|
├── wtismycode-cli/ # CLI binary (commands, output formatting)
|
||||||
│ └── src/
|
│ └── src/
|
||||||
│ ├── main.rs
|
│ ├── main.rs
|
||||||
│ └── commands/ # generate, check, stats, init
|
│ └── commands/ # generate, check, stats, init
|
||||||
├── archdoc-core/ # Core library
|
├── wtismycode-core/ # Core library
|
||||||
│ └── src/
|
│ └── src/
|
||||||
│ ├── config.rs # Config loading & validation
|
│ ├── config.rs # Config loading & validation
|
||||||
│ ├── scanner.rs # File discovery
|
│ ├── scanner.rs # File discovery
|
||||||
|
|||||||
@@ -1,89 +0,0 @@
|
|||||||
//! Tests for the renderer functionality
|
|
||||||
|
|
||||||
use archdoc_core::{
|
|
||||||
model::{ProjectModel, Symbol, SymbolKind, IntegrationFlags, SymbolMetrics},
|
|
||||||
renderer::Renderer,
|
|
||||||
};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_render_with_integrations() {
|
|
||||||
// Create a mock project model with integration information
|
|
||||||
let mut project_model = ProjectModel::new();
|
|
||||||
|
|
||||||
// Add a symbol with database integration
|
|
||||||
let db_symbol = Symbol {
|
|
||||||
id: "DatabaseManager".to_string(),
|
|
||||||
kind: SymbolKind::Class,
|
|
||||||
module_id: "test_module".to_string(),
|
|
||||||
file_id: "test_file.py".to_string(),
|
|
||||||
qualname: "DatabaseManager".to_string(),
|
|
||||||
signature: "class DatabaseManager".to_string(),
|
|
||||||
annotations: None,
|
|
||||||
docstring_first_line: None,
|
|
||||||
purpose: "test".to_string(),
|
|
||||||
outbound_calls: vec![],
|
|
||||||
inbound_calls: vec![],
|
|
||||||
integrations_flags: IntegrationFlags {
|
|
||||||
db: true,
|
|
||||||
http: false,
|
|
||||||
queue: false,
|
|
||||||
storage: false,
|
|
||||||
ai: false,
|
|
||||||
},
|
|
||||||
metrics: SymbolMetrics {
|
|
||||||
fan_in: 0,
|
|
||||||
fan_out: 0,
|
|
||||||
is_critical: false,
|
|
||||||
cycle_participant: false,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Add a symbol with HTTP integration
|
|
||||||
let http_symbol = Symbol {
|
|
||||||
id: "fetch_data".to_string(),
|
|
||||||
kind: SymbolKind::Function,
|
|
||||||
module_id: "test_module".to_string(),
|
|
||||||
file_id: "test_file.py".to_string(),
|
|
||||||
qualname: "fetch_data".to_string(),
|
|
||||||
signature: "def fetch_data()".to_string(),
|
|
||||||
annotations: None,
|
|
||||||
docstring_first_line: None,
|
|
||||||
purpose: "test".to_string(),
|
|
||||||
outbound_calls: vec![],
|
|
||||||
inbound_calls: vec![],
|
|
||||||
integrations_flags: IntegrationFlags {
|
|
||||||
db: false,
|
|
||||||
http: true,
|
|
||||||
queue: false,
|
|
||||||
storage: false,
|
|
||||||
ai: false,
|
|
||||||
},
|
|
||||||
metrics: SymbolMetrics {
|
|
||||||
fan_in: 0,
|
|
||||||
fan_out: 0,
|
|
||||||
is_critical: false,
|
|
||||||
cycle_participant: false,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
project_model.symbols.insert("DatabaseManager".to_string(), db_symbol);
|
|
||||||
project_model.symbols.insert("fetch_data".to_string(), http_symbol);
|
|
||||||
|
|
||||||
// Initialize renderer
|
|
||||||
let renderer = Renderer::new();
|
|
||||||
|
|
||||||
// Render architecture documentation
|
|
||||||
let result = renderer.render_architecture_md(&project_model, None);
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let rendered_content = result.unwrap();
|
|
||||||
println!("Rendered content:\n{}", rendered_content);
|
|
||||||
|
|
||||||
// Check that integration sections are present
|
|
||||||
assert!(rendered_content.contains("## Integrations"));
|
|
||||||
assert!(rendered_content.contains("### Database Integrations"));
|
|
||||||
assert!(rendered_content.contains("### HTTP/API Integrations"));
|
|
||||||
assert!(rendered_content.contains("DatabaseManager in test_file.py"));
|
|
||||||
assert!(rendered_content.contains("fetch_data in test_file.py"));
|
|
||||||
}
|
|
||||||
@@ -17,7 +17,7 @@
|
|||||||
## Document metadata
|
## Document metadata
|
||||||
- **Created:** 2026-01-25
|
- **Created:** 2026-01-25
|
||||||
- **Updated:** 2026-02-15
|
- **Updated:** 2026-02-15
|
||||||
- **Generated by:** archdoc (cli) v0.1
|
- **Generated by:** wtismycode (cli) v0.1
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Test Project
|
# Test Project
|
||||||
|
|
||||||
A test project for ArchDoc development and testing.
|
A test project for WTIsMyCode development and testing.
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
[project]
|
[project]
|
||||||
name = "test-project"
|
name = "test-project"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
description = "A test project for ArchDoc"
|
description = "A test project for WTIsMyCode"
|
||||||
authors = [
|
authors = [
|
||||||
{name = "Test Author", email = "test@example.com"}
|
{name = "Test Author", email = "test@example.com"}
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -53,10 +53,10 @@ description_max_length = 200
|
|||||||
|
|
||||||
[logging]
|
[logging]
|
||||||
level = "info"
|
level = "info"
|
||||||
file = "archdoc.log"
|
file = "wtismycode.log"
|
||||||
format = "compact"
|
format = "compact"
|
||||||
|
|
||||||
[caching]
|
[caching]
|
||||||
enabled = true
|
enabled = true
|
||||||
cache_dir = ".archdoc/cache"
|
cache_dir = ".wtismycode/cache"
|
||||||
max_cache_age = "24h"
|
max_cache_age = "24h"
|
||||||
@@ -1,10 +1,14 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "archdoc-cli"
|
name = "wtismycode-cli"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "wtismycode"
|
||||||
|
path = "src/main.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
archdoc-core = { path = "../archdoc-core" }
|
wtismycode-core = { path = "../wtismycode-core" }
|
||||||
clap = { version = "4.0", features = ["derive"] }
|
clap = { version = "4.0", features = ["derive"] }
|
||||||
tokio = { version = "1.0", features = ["full"] }
|
tokio = { version = "1.0", features = ["full"] }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use archdoc_core::Config;
|
use wtismycode_core::Config;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
|
|
||||||
use super::generate::analyze_project;
|
use super::generate::analyze_project;
|
||||||
@@ -9,7 +9,7 @@ pub fn check_docs_consistency(root: &str, config: &Config) -> Result<()> {
|
|||||||
|
|
||||||
let model = analyze_project(root, config)?;
|
let model = analyze_project(root, config)?;
|
||||||
|
|
||||||
let renderer = archdoc_core::renderer::Renderer::new();
|
let renderer = wtismycode_core::renderer::Renderer::new();
|
||||||
let _generated = renderer.render_architecture_md(&model, None)?;
|
let _generated = renderer.render_architecture_md(&model, None)?;
|
||||||
|
|
||||||
let architecture_md_path = std::path::Path::new(root).join(&config.project.entry_file);
|
let architecture_md_path = std::path::Path::new(root).join(&config.project.entry_file);
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use archdoc_core::{Config, ProjectModel, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
use wtismycode_core::{Config, ProjectModel, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use indicatif::{ProgressBar, ProgressStyle};
|
use indicatif::{ProgressBar, ProgressStyle};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
@@ -12,6 +12,10 @@ pub fn load_config(config_path: &str) -> Result<Config> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn analyze_project(root: &str, config: &Config) -> Result<ProjectModel> {
|
pub fn analyze_project(root: &str, config: &Config) -> Result<ProjectModel> {
|
||||||
|
analyze_project_with_options(root, config, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn analyze_project_with_options(root: &str, config: &Config, offline: bool) -> Result<ProjectModel> {
|
||||||
println!("{}", "Scanning project...".cyan());
|
println!("{}", "Scanning project...".cyan());
|
||||||
|
|
||||||
let scanner = FileScanner::new(config.clone());
|
let scanner = FileScanner::new(config.clone());
|
||||||
@@ -19,7 +23,7 @@ pub fn analyze_project(root: &str, config: &Config) -> Result<ProjectModel> {
|
|||||||
|
|
||||||
println!(" Found {} Python files", python_files.len().to_string().yellow());
|
println!(" Found {} Python files", python_files.len().to_string().yellow());
|
||||||
|
|
||||||
let analyzer = PythonAnalyzer::new(config.clone());
|
let analyzer = PythonAnalyzer::new_with_options(config.clone(), offline);
|
||||||
|
|
||||||
let pb = ProgressBar::new(python_files.len() as u64);
|
let pb = ProgressBar::new(python_files.len() as u64);
|
||||||
pb.set_style(ProgressStyle::default_bar()
|
pb.set_style(ProgressStyle::default_bar()
|
||||||
@@ -118,8 +122,8 @@ pub fn generate_docs(model: &ProjectModel, out: &str, verbose: bool, _config: &C
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let renderer = archdoc_core::renderer::Renderer::new();
|
let renderer = wtismycode_core::renderer::Renderer::new();
|
||||||
let writer = archdoc_core::writer::DiffAwareWriter::new();
|
let writer = wtismycode_core::writer::DiffAwareWriter::new();
|
||||||
|
|
||||||
let output_path = std::path::Path::new(".").join("ARCHITECTURE.md");
|
let output_path = std::path::Path::new(".").join("ARCHITECTURE.md");
|
||||||
|
|
||||||
@@ -1,8 +1,46 @@
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
|
|
||||||
|
/// Detect project name from pyproject.toml or directory basename.
|
||||||
|
fn detect_project_name(root: &str) -> String {
|
||||||
|
let root_path = std::path::Path::new(root);
|
||||||
|
|
||||||
|
// Try pyproject.toml
|
||||||
|
let pyproject_path = root_path.join("pyproject.toml");
|
||||||
|
if let Ok(content) = std::fs::read_to_string(&pyproject_path) {
|
||||||
|
let mut in_project = false;
|
||||||
|
for line in content.lines() {
|
||||||
|
let trimmed = line.trim();
|
||||||
|
if trimmed == "[project]" {
|
||||||
|
in_project = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if trimmed.starts_with('[') {
|
||||||
|
in_project = false;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if in_project && trimmed.starts_with("name")
|
||||||
|
&& let Some(val) = trimmed.split('=').nth(1) {
|
||||||
|
let name = val.trim().trim_matches('"').trim_matches('\'');
|
||||||
|
if !name.is_empty() {
|
||||||
|
return name.to_string();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: directory basename
|
||||||
|
root_path
|
||||||
|
.canonicalize()
|
||||||
|
.ok()
|
||||||
|
.and_then(|p| p.file_name().map(|n| n.to_string_lossy().to_string()))
|
||||||
|
.unwrap_or_else(|| "Project".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn init_project(root: &str, out: &str) -> Result<()> {
|
pub fn init_project(root: &str, out: &str) -> Result<()> {
|
||||||
println!("{}", "Initializing archdoc project...".cyan().bold());
|
println!("{}", "Initializing wtismycode project...".cyan().bold());
|
||||||
|
|
||||||
|
let project_name = detect_project_name(root);
|
||||||
|
|
||||||
let out_path = std::path::Path::new(out);
|
let out_path = std::path::Path::new(out);
|
||||||
std::fs::create_dir_all(out_path)?;
|
std::fs::create_dir_all(out_path)?;
|
||||||
@@ -45,7 +83,7 @@ pub fn init_project(root: &str, out: &str) -> Result<()> {
|
|||||||
## Document metadata
|
## Document metadata
|
||||||
- **Created:** <AUTO_ON_INIT: YYYY-MM-DD>
|
- **Created:** <AUTO_ON_INIT: YYYY-MM-DD>
|
||||||
- **Updated:** <AUTO_ON_CHANGE: YYYY-MM-DD>
|
- **Updated:** <AUTO_ON_CHANGE: YYYY-MM-DD>
|
||||||
- **Generated by:** archdoc (cli) v0.1
|
- **Generated by:** wtismycode (cli) v0.1
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -95,8 +133,10 @@ pub fn init_project(root: &str, out: &str) -> Result<()> {
|
|||||||
<!-- MANUAL:END -->
|
<!-- MANUAL:END -->
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
|
let architecture_md_content = architecture_md_content.replace("<PROJECT_NAME>", &project_name);
|
||||||
|
|
||||||
let architecture_md_path = std::path::Path::new(root).join("ARCHITECTURE.md");
|
let architecture_md_path = std::path::Path::new(root).join("ARCHITECTURE.md");
|
||||||
std::fs::write(&architecture_md_path, architecture_md_content)?;
|
std::fs::write(&architecture_md_path, &architecture_md_content)?;
|
||||||
|
|
||||||
let config_toml_content = r#"[project]
|
let config_toml_content = r#"[project]
|
||||||
root = "."
|
root = "."
|
||||||
@@ -153,16 +193,16 @@ description_max_length = 200
|
|||||||
|
|
||||||
[logging]
|
[logging]
|
||||||
level = "info"
|
level = "info"
|
||||||
file = "archdoc.log"
|
file = "wtismycode.log"
|
||||||
format = "compact"
|
format = "compact"
|
||||||
|
|
||||||
[caching]
|
[caching]
|
||||||
enabled = true
|
enabled = true
|
||||||
cache_dir = ".archdoc/cache"
|
cache_dir = ".wtismycode/cache"
|
||||||
max_cache_age = "24h"
|
max_cache_age = "24h"
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
let config_toml_path = std::path::Path::new(root).join("archdoc.toml");
|
let config_toml_path = std::path::Path::new(root).join("wtismycode.toml");
|
||||||
if !config_toml_path.exists() {
|
if !config_toml_path.exists() {
|
||||||
std::fs::write(&config_toml_path, config_toml_content)?;
|
std::fs::write(&config_toml_path, config_toml_content)?;
|
||||||
}
|
}
|
||||||
@@ -1,10 +1,10 @@
|
|||||||
use archdoc_core::ProjectModel;
|
use wtismycode_core::ProjectModel;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
|
|
||||||
pub fn print_stats(model: &ProjectModel) {
|
pub fn print_stats(model: &ProjectModel) {
|
||||||
println!();
|
println!();
|
||||||
println!("{}", "╔══════════════════════════════════════╗".cyan());
|
println!("{}", "╔══════════════════════════════════════╗".cyan());
|
||||||
println!("{}", "║ archdoc project statistics ║".cyan().bold());
|
println!("{}", "║ wtismycode project statistics ║".cyan().bold());
|
||||||
println!("{}", "╚══════════════════════════════════════╝".cyan());
|
println!("{}", "╚══════════════════════════════════════╝".cyan());
|
||||||
println!();
|
println!();
|
||||||
|
|
||||||
@@ -24,10 +24,10 @@ pub fn print_stats(model: &ProjectModel) {
|
|||||||
let mut async_functions = 0;
|
let mut async_functions = 0;
|
||||||
for symbol in model.symbols.values() {
|
for symbol in model.symbols.values() {
|
||||||
match symbol.kind {
|
match symbol.kind {
|
||||||
archdoc_core::model::SymbolKind::Function => functions += 1,
|
wtismycode_core::model::SymbolKind::Function => functions += 1,
|
||||||
archdoc_core::model::SymbolKind::Method => methods += 1,
|
wtismycode_core::model::SymbolKind::Method => methods += 1,
|
||||||
archdoc_core::model::SymbolKind::Class => classes += 1,
|
wtismycode_core::model::SymbolKind::Class => classes += 1,
|
||||||
archdoc_core::model::SymbolKind::AsyncFunction => async_functions += 1,
|
wtismycode_core::model::SymbolKind::AsyncFunction => async_functions += 1,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
println!("{}", "Symbol breakdown".bold().underline());
|
println!("{}", "Symbol breakdown".bold().underline());
|
||||||
@@ -5,7 +5,7 @@ use clap::{Parser, Subcommand};
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
#[command(name = "archdoc")]
|
#[command(name = "wtismycode")]
|
||||||
#[command(about = "Generate architecture documentation for Python projects")]
|
#[command(about = "Generate architecture documentation for Python projects")]
|
||||||
#[command(version = "0.1.0")]
|
#[command(version = "0.1.0")]
|
||||||
pub struct Cli {
|
pub struct Cli {
|
||||||
@@ -19,7 +19,7 @@ pub struct Cli {
|
|||||||
|
|
||||||
#[derive(Subcommand)]
|
#[derive(Subcommand)]
|
||||||
enum Commands {
|
enum Commands {
|
||||||
/// Initialize archdoc in the project
|
/// Initialize wtismycode in the project
|
||||||
Init {
|
Init {
|
||||||
#[arg(short, long, default_value = ".")]
|
#[arg(short, long, default_value = ".")]
|
||||||
root: String,
|
root: String,
|
||||||
@@ -32,24 +32,27 @@ enum Commands {
|
|||||||
root: String,
|
root: String,
|
||||||
#[arg(short, long, default_value = "docs/architecture")]
|
#[arg(short, long, default_value = "docs/architecture")]
|
||||||
out: String,
|
out: String,
|
||||||
#[arg(short, long, default_value = "archdoc.toml")]
|
#[arg(short, long, default_value = "wtismycode.toml")]
|
||||||
config: String,
|
config: String,
|
||||||
/// Show what would be generated without writing files
|
/// Show what would be generated without writing files
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
dry_run: bool,
|
dry_run: bool,
|
||||||
|
/// Skip PyPI API lookups, use only built-in dictionary
|
||||||
|
#[arg(long)]
|
||||||
|
offline: bool,
|
||||||
},
|
},
|
||||||
/// Check if documentation is up to date
|
/// Check if documentation is up to date
|
||||||
Check {
|
Check {
|
||||||
#[arg(short, long, default_value = ".")]
|
#[arg(short, long, default_value = ".")]
|
||||||
root: String,
|
root: String,
|
||||||
#[arg(short, long, default_value = "archdoc.toml")]
|
#[arg(short, long, default_value = "wtismycode.toml")]
|
||||||
config: String,
|
config: String,
|
||||||
},
|
},
|
||||||
/// Show project statistics
|
/// Show project statistics
|
||||||
Stats {
|
Stats {
|
||||||
#[arg(short, long, default_value = ".")]
|
#[arg(short, long, default_value = ".")]
|
||||||
root: String,
|
root: String,
|
||||||
#[arg(short, long, default_value = "archdoc.toml")]
|
#[arg(short, long, default_value = "wtismycode.toml")]
|
||||||
config: String,
|
config: String,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -61,9 +64,9 @@ fn main() -> Result<()> {
|
|||||||
Commands::Init { root, out } => {
|
Commands::Init { root, out } => {
|
||||||
commands::init::init_project(root, out)?;
|
commands::init::init_project(root, out)?;
|
||||||
}
|
}
|
||||||
Commands::Generate { root, out, config, dry_run } => {
|
Commands::Generate { root, out, config, dry_run, offline } => {
|
||||||
let config = commands::generate::load_config(config)?;
|
let config = commands::generate::load_config(config)?;
|
||||||
let model = commands::generate::analyze_project(root, &config)?;
|
let model = commands::generate::analyze_project_with_options(root, &config, *offline)?;
|
||||||
if *dry_run {
|
if *dry_run {
|
||||||
commands::generate::dry_run_docs(&model, out, &config)?;
|
commands::generate::dry_run_docs(&model, out, &config)?;
|
||||||
} else {
|
} else {
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
//! Colored output helpers and filename utilities for ArchDoc CLI
|
//! Colored output helpers and filename utilities for WTIsMyCode CLI
|
||||||
|
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use archdoc_core::ProjectModel;
|
use wtismycode_core::ProjectModel;
|
||||||
|
|
||||||
/// Sanitize a file path into a safe filename for docs.
|
/// Sanitize a file path into a safe filename for docs.
|
||||||
/// Removes `./` prefix, replaces `/` with `__`.
|
/// Removes `./` prefix, replaces `/` with `__`.
|
||||||
@@ -19,17 +19,14 @@ pub fn print_generate_summary(model: &ProjectModel) {
|
|||||||
println!(" {} {}", "Edges:".bold(),
|
println!(" {} {}", "Edges:".bold(),
|
||||||
model.edges.module_import_edges.len() + model.edges.symbol_call_edges.len());
|
model.edges.module_import_edges.len() + model.edges.symbol_call_edges.len());
|
||||||
|
|
||||||
let integrations: Vec<&str> = {
|
if !model.classified_integrations.is_empty() {
|
||||||
let mut v = Vec::new();
|
let cats: Vec<String> = model.classified_integrations.iter()
|
||||||
if model.symbols.values().any(|s| s.integrations_flags.http) { v.push("HTTP"); }
|
.filter(|(_, pkgs)| !pkgs.is_empty())
|
||||||
if model.symbols.values().any(|s| s.integrations_flags.db) { v.push("DB"); }
|
.map(|(cat, pkgs)| format!("{} ({})", cat, pkgs.join(", ")))
|
||||||
if model.symbols.values().any(|s| s.integrations_flags.queue) { v.push("Queue"); }
|
.collect();
|
||||||
if model.symbols.values().any(|s| s.integrations_flags.storage) { v.push("Storage"); }
|
if !cats.is_empty() {
|
||||||
if model.symbols.values().any(|s| s.integrations_flags.ai) { v.push("AI/ML"); }
|
println!(" {} {}", "Integrations:".bold(), cats.join(" | ").yellow());
|
||||||
v
|
}
|
||||||
};
|
|
||||||
if !integrations.is_empty() {
|
|
||||||
println!(" {} {}", "Integrations:".bold(), integrations.join(", ").yellow());
|
|
||||||
}
|
}
|
||||||
println!("{}", "─────────────────────────────────────".dimmed());
|
println!("{}", "─────────────────────────────────────".dimmed());
|
||||||
}
|
}
|
||||||
1
wtismycode-core/.wtismycode/cache/1dd9479f63eeeea5.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/1dd9479f63eeeea5.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:21.939017204Z","file_modified_at":"2026-02-15T09:12:21.938241573Z","parsed_module":{"path":"/tmp/.tmpjrzBI1/test.py","module_path":"/tmp/.tmpjrzBI1/test.py","imports":[],"symbols":[{"id":"calculate_sum","kind":"Function","module_id":"","file_id":"","qualname":"calculate_sum","signature":"def calculate_sum(a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/22f137dfd1267b44.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/22f137dfd1267b44.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:21.929046662Z","file_modified_at":"2026-02-15T09:12:21.928241645Z","parsed_module":{"path":"/tmp/.tmpucjtMF/test.py","module_path":"/tmp/.tmpucjtMF/test.py","imports":[{"module_name":"redis","alias":null,"line_number":8}],"symbols":[{"id":"process_job","kind":"Function","module_id":"","file_id":"","qualname":"process_job","signature":"def process_job(job_data)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/242d46dd3d930a62.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/242d46dd3d930a62.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:21.901000313Z","file_modified_at":"2026-02-15T09:12:21.900241847Z","parsed_module":{"path":"/tmp/.tmpQwpTTi/test.py","module_path":"/tmp/.tmpQwpTTi/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator","kind":"Class","module_id":"","file_id":"","qualname":"Calculator","signature":"class Calculator","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.add","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.add","signature":"def add(self, a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/2d1d3488fad06abc.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/2d1d3488fad06abc.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:27.638281687Z","file_modified_at":"2026-02-15T09:12:27.637200566Z","parsed_module":{"path":"/tmp/.tmp5HECBh/test.py","module_path":"/tmp/.tmp5HECBh/test.py","imports":[{"module_name":"requests","alias":null,"line_number":8}],"symbols":[{"id":"fetch_data","kind":"Function","module_id":"","file_id":"","qualname":"fetch_data","signature":"def fetch_data()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/323af6c33c893dc9.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/323af6c33c893dc9.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:21.938417589Z","file_modified_at":"2026-02-15T09:12:21.937241580Z","parsed_module":{"path":"/tmp/.tmpHn93FX/test.py","module_path":"/tmp/.tmpHn93FX/test.py","imports":[{"module_name":"requests","alias":null,"line_number":8}],"symbols":[{"id":"fetch_data","kind":"Function","module_id":"","file_id":"","qualname":"fetch_data","signature":"def fetch_data()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/332464b9176fa65a.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/332464b9176fa65a.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:21.900267168Z","file_modified_at":"2026-02-15T09:12:21.899241854Z","parsed_module":{"path":"/tmp/.tmpVPUjB4/test.py","module_path":"/tmp/.tmpVPUjB4/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/34c7d0f0a5859bc4.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/34c7d0f0a5859bc4.json
vendored
Normal file
File diff suppressed because one or more lines are too long
1
wtismycode-core/.wtismycode/cache/3f48e681f7e81aa3.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/3f48e681f7e81aa3.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:21.939756459Z","file_modified_at":"2026-02-15T09:12:21.938241573Z","parsed_module":{"path":"/tmp/.tmp5yAI8O/test.py","module_path":"/tmp/.tmp5yAI8O/test.py","imports":[{"module_name":"redis","alias":null,"line_number":8}],"symbols":[{"id":"process_job","kind":"Function","module_id":"","file_id":"","qualname":"process_job","signature":"def process_job(job_data)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/4427b32031669c3a.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/4427b32031669c3a.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:21.949122466Z","file_modified_at":"2026-02-15T00:22:51.124088300Z","parsed_module":{"path":"../test-project/src/utils.py","module_path":"../test-project/src/utils.py","imports":[{"module_name":"json","alias":null,"line_number":54},{"module_name":"os","alias":null,"line_number":66}],"symbols":[{"id":"load_config","kind":"Function","module_id":"","file_id":"","qualname":"load_config","signature":"def load_config(config_path: str)","annotations":null,"docstring_first_line":"Load configuration from a JSON file.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"save_config","kind":"Function","module_id":"","file_id":"","qualname":"save_config","signature":"def save_config(config: dict, config_path: str)","annotations":null,"docstring_first_line":"Save configuration to a JSON file.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"get_file_size","kind":"Function","module_id":"","file_id":"","qualname":"get_file_size","signature":"def get_file_size(filepath: str)","annotations":null,"docstring_first_line":"Get the size of a file in bytes.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"format_bytes","kind":"Function","module_id":"","file_id":"","qualname":"format_bytes","signature":"def format_bytes(size: int)","annotations":null,"docstring_first_line":"Format bytes into a human-readable string.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"open","line_number":169,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"json.load","line_number":213,"call_type":"Unresolved"},{"caller_symbol":"load_config","callee_expr":"open","line_number":169,"call_type":"Unresolved"},{"caller_symbol":"load_config","callee_expr":"json.load","line_number":213,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"open","line_number":330,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"json.dump","line_number":367,"call_type":"Unresolved"},{"caller_symbol":"save_config","callee_expr":"open","line_number":330,"call_type":"Unresolved"},{"caller_symbol":"save_config","callee_expr":"json.dump","line_number":367,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"os.path.getsize","line_number":494,"call_type":"Unresolved"},{"caller_symbol":"get_file_size","callee_expr":"os.path.getsize","line_number":494,"call_type":"Unresolved"}],"file_docstring":"Utility functions for the test project."}}
|
||||||
1
wtismycode-core/.wtismycode/cache/44b31aff14e80d6b.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/44b31aff14e80d6b.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:21.932282950Z","file_modified_at":"2026-02-15T09:12:21.931241624Z","parsed_module":{"path":"/tmp/.tmpMK4GyS/test.py","module_path":"/tmp/.tmpMK4GyS/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"goodbye","kind":"Function","module_id":"","file_id":"","qualname":"goodbye","signature":"def goodbye()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/6b46d7daa9d35ecf.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/6b46d7daa9d35ecf.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:27.646855488Z","file_modified_at":"2026-02-15T09:12:27.645200509Z","parsed_module":{"path":"/tmp/.tmpXh0uQg/test.py","module_path":"/tmp/.tmpXh0uQg/test.py","imports":[],"symbols":[{"id":"calculate_sum","kind":"Function","module_id":"","file_id":"","qualname":"calculate_sum","signature":"def calculate_sum(a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/7ff0f715bb184391.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/7ff0f715bb184391.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:21.932289740Z","file_modified_at":"2026-02-15T09:12:21.931241624Z","parsed_module":{"path":"/tmp/.tmpn1WePQ/test.py","module_path":"/tmp/.tmpn1WePQ/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator","kind":"Class","module_id":"","file_id":"","qualname":"Calculator","signature":"class Calculator","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.add","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.add","signature":"def add(self, a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/80d24a35240626da.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/80d24a35240626da.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:27.646347331Z","file_modified_at":"2026-02-15T09:12:27.645200509Z","parsed_module":{"path":"/tmp/.tmpFFmDl3/test.py","module_path":"/tmp/.tmpFFmDl3/test.py","imports":[{"module_name":"sqlite3","alias":null,"line_number":8}],"symbols":[{"id":"get_user","kind":"Function","module_id":"","file_id":"","qualname":"get_user","signature":"def get_user(user_id)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/8e89f71b0bea2e6d.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/8e89f71b0bea2e6d.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:21.937802033Z","file_modified_at":"2026-02-15T09:12:21.936241587Z","parsed_module":{"path":"/tmp/.tmpU9hOcm/test.py","module_path":"/tmp/.tmpU9hOcm/test.py","imports":[{"module_name":"sqlite3","alias":null,"line_number":8}],"symbols":[{"id":"get_user","kind":"Function","module_id":"","file_id":"","qualname":"get_user","signature":"def get_user(user_id)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/90460d6c369f9d4c.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/90460d6c369f9d4c.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:27.646167123Z","file_modified_at":"2026-02-15T09:12:27.645200509Z","parsed_module":{"path":"/tmp/.tmpj84SS2/test.py","module_path":"/tmp/.tmpj84SS2/test.py","imports":[{"module_name":"requests","alias":null,"line_number":8}],"symbols":[{"id":"fetch_data","kind":"Function","module_id":"","file_id":"","qualname":"fetch_data","signature":"def fetch_data()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/a8dcf5363a5ef953.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/a8dcf5363a5ef953.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:27.647109436Z","file_modified_at":"2026-02-15T09:12:27.646200502Z","parsed_module":{"path":"/tmp/.tmpTS6Kf7/test.py","module_path":"/tmp/.tmpTS6Kf7/test.py","imports":[{"module_name":"redis","alias":null,"line_number":8}],"symbols":[{"id":"process_job","kind":"Function","module_id":"","file_id":"","qualname":"process_job","signature":"def process_job(job_data)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/ae981a5f144a6f7a.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/ae981a5f144a6f7a.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:21.906280597Z","file_modified_at":"2026-02-15T00:21:25.872722975Z","parsed_module":{"path":"tests/golden/test_project/src/example.py","module_path":"tests/golden/test_project/src/example.py","imports":[{"module_name":"os","alias":null,"line_number":42},{"module_name":"typing.List","alias":null,"line_number":64}],"symbols":[{"id":"Calculator","kind":"Class","module_id":"","file_id":"","qualname":"Calculator","signature":"class Calculator","annotations":null,"docstring_first_line":"A simple calculator class.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.__init__","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.__init__","signature":"def __init__(self)","annotations":null,"docstring_first_line":"Initialize the calculator.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.add","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.add","signature":"def add(self, a: int, b: int)","annotations":null,"docstring_first_line":"Add two numbers.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.multiply","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.multiply","signature":"def multiply(self, a: int, b: int)","annotations":null,"docstring_first_line":"Multiply two numbers.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"process_numbers","kind":"Function","module_id":"","file_id":"","qualname":"process_numbers","signature":"def process_numbers(numbers: List[int])","annotations":null,"docstring_first_line":"Process a list of numbers.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"Calculator","line_number":519,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"calc.add","line_number":544,"call_type":"Unresolved"},{"caller_symbol":"process_numbers","callee_expr":"Calculator","line_number":519,"call_type":"Unresolved"},{"caller_symbol":"process_numbers","callee_expr":"calc.add","line_number":544,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"process_numbers","line_number":648,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"print","line_number":677,"call_type":"Unresolved"}],"file_docstring":"Example module for testing."}}
|
||||||
1
wtismycode-core/.wtismycode/cache/af6c11e9a59f28dd.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/af6c11e9a59f28dd.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:27.639487788Z","file_modified_at":"2026-02-15T09:12:27.638200559Z","parsed_module":{"path":"/tmp/.tmp7gcSsx/test.py","module_path":"/tmp/.tmp7gcSsx/test.py","imports":[{"module_name":"redis","alias":null,"line_number":8}],"symbols":[{"id":"process_job","kind":"Function","module_id":"","file_id":"","qualname":"process_job","signature":"def process_job(job_data)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/b74dd266405fda26.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/b74dd266405fda26.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:27.623913794Z","file_modified_at":"2026-02-15T09:12:27.622200674Z","parsed_module":{"path":"/tmp/.tmpY5jXEG/test.py","module_path":"/tmp/.tmpY5jXEG/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator","kind":"Class","module_id":"","file_id":"","qualname":"Calculator","signature":"class Calculator","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.add","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.add","signature":"def add(self, a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/b967ef0258ec1d92.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/b967ef0258ec1d92.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:27.623293468Z","file_modified_at":"2026-02-15T09:12:27.622200674Z","parsed_module":{"path":"/tmp/.tmpbimwTO/test.py","module_path":"/tmp/.tmpbimwTO/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/ca89f5c4de39cd5c.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/ca89f5c4de39cd5c.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:27.638405646Z","file_modified_at":"2026-02-15T09:12:27.637200566Z","parsed_module":{"path":"/tmp/.tmpDqAWXp/test.py","module_path":"/tmp/.tmpDqAWXp/test.py","imports":[{"module_name":"sqlite3","alias":null,"line_number":8}],"symbols":[{"id":"get_user","kind":"Function","module_id":"","file_id":"","qualname":"get_user","signature":"def get_user(user_id)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/cc39a913d23e0148.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/cc39a913d23e0148.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:21.928408667Z","file_modified_at":"2026-02-15T09:12:21.927241652Z","parsed_module":{"path":"/tmp/.tmpkuoSO4/test.py","module_path":"/tmp/.tmpkuoSO4/test.py","imports":[],"symbols":[{"id":"calculate_sum","kind":"Function","module_id":"","file_id":"","qualname":"calculate_sum","signature":"def calculate_sum(a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/d49cc1c393cf173e.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/d49cc1c393cf173e.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:27.642603187Z","file_modified_at":"2026-02-15T09:12:27.641200538Z","parsed_module":{"path":"/tmp/.tmplZ7Gfg/test.py","module_path":"/tmp/.tmplZ7Gfg/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator","kind":"Class","module_id":"","file_id":"","qualname":"Calculator","signature":"class Calculator","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.add","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.add","signature":"def add(self, a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/d93abaa965fa2d8d.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/d93abaa965fa2d8d.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:27.642573298Z","file_modified_at":"2026-02-15T09:12:27.641200538Z","parsed_module":{"path":"/tmp/.tmpiVOCMi/test.py","module_path":"/tmp/.tmpiVOCMi/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"goodbye","kind":"Function","module_id":"","file_id":"","qualname":"goodbye","signature":"def goodbye()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/ddc166202153e62e.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/ddc166202153e62e.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:21.927910330Z","file_modified_at":"2026-02-15T09:12:21.926241659Z","parsed_module":{"path":"/tmp/.tmp1gFjk3/test.py","module_path":"/tmp/.tmp1gFjk3/test.py","imports":[{"module_name":"sqlite3","alias":null,"line_number":8}],"symbols":[{"id":"get_user","kind":"Function","module_id":"","file_id":"","qualname":"get_user","signature":"def get_user(user_id)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/e9433f25871e418.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/e9433f25871e418.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:21.927753122Z","file_modified_at":"2026-02-15T09:12:21.926241659Z","parsed_module":{"path":"/tmp/.tmpp9A45l/test.py","module_path":"/tmp/.tmpp9A45l/test.py","imports":[{"module_name":"requests","alias":null,"line_number":8}],"symbols":[{"id":"fetch_data","kind":"Function","module_id":"","file_id":"","qualname":"fetch_data","signature":"def fetch_data()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/f1a291dc5a093458.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/f1a291dc5a093458.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"created_at":"2026-02-15T09:12:27.638896492Z","file_modified_at":"2026-02-15T09:12:27.638200559Z","parsed_module":{"path":"/tmp/.tmp7IEFw5/test.py","module_path":"/tmp/.tmp7IEFw5/test.py","imports":[],"symbols":[{"id":"calculate_sum","kind":"Function","module_id":"","file_id":"","qualname":"calculate_sum","signature":"def calculate_sum(a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||||
1
wtismycode-core/.wtismycode/cache/f1b45c4f58b2d0dc.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/f1b45c4f58b2d0dc.json
vendored
Normal file
File diff suppressed because one or more lines are too long
@@ -1,12 +1,12 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "archdoc-core"
|
name = "wtismycode-core"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
toml = "0.9.11+spec-1.1.0"
|
toml = "0.9.11"
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
thiserror = "2.0.18"
|
thiserror = "2.0.18"
|
||||||
@@ -16,3 +16,5 @@ rustpython-parser = "0.4"
|
|||||||
rustpython-ast = "0.4"
|
rustpython-ast = "0.4"
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
tempfile = "3.10"
|
tempfile = "3.10"
|
||||||
|
ureq = "3"
|
||||||
|
lazy_static = "1.4"
|
||||||
@@ -1,10 +1,10 @@
|
|||||||
//! Caching module for ArchDoc
|
//! Caching module for WTIsMyCode
|
||||||
//!
|
//!
|
||||||
//! This module provides caching functionality to speed up repeated analysis
|
//! This module provides caching functionality to speed up repeated analysis
|
||||||
//! by storing parsed ASTs and analysis results.
|
//! by storing parsed ASTs and analysis results.
|
||||||
|
|
||||||
use crate::config::Config;
|
use crate::config::Config;
|
||||||
use crate::errors::ArchDocError;
|
use crate::errors::WTIsMyCodeError;
|
||||||
use crate::model::ParsedModule;
|
use crate::model::ParsedModule;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
@@ -39,7 +39,7 @@ impl CacheManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get cached parsed module if available and not expired
|
/// Get cached parsed module if available and not expired
|
||||||
pub fn get_cached_module(&self, file_path: &Path) -> Result<Option<ParsedModule>, ArchDocError> {
|
pub fn get_cached_module(&self, file_path: &Path) -> Result<Option<ParsedModule>, WTIsMyCodeError> {
|
||||||
if !self.config.caching.enabled {
|
if !self.config.caching.enabled {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
@@ -53,10 +53,10 @@ impl CacheManager {
|
|||||||
|
|
||||||
// Read cache file
|
// Read cache file
|
||||||
let content = fs::read_to_string(&cache_file)
|
let content = fs::read_to_string(&cache_file)
|
||||||
.map_err(ArchDocError::Io)?;
|
.map_err(WTIsMyCodeError::Io)?;
|
||||||
|
|
||||||
let cache_entry: CacheEntry = serde_json::from_str(&content)
|
let cache_entry: CacheEntry = serde_json::from_str(&content)
|
||||||
.map_err(|e| ArchDocError::AnalysisError(format!("Failed to deserialize cache entry: {}", e)))?;
|
.map_err(|e| WTIsMyCodeError::AnalysisError(format!("Failed to deserialize cache entry: {}", e)))?;
|
||||||
|
|
||||||
// Check if cache is expired
|
// Check if cache is expired
|
||||||
let now = Utc::now();
|
let now = Utc::now();
|
||||||
@@ -73,10 +73,10 @@ impl CacheManager {
|
|||||||
|
|
||||||
// Check if source file has been modified since caching
|
// Check if source file has been modified since caching
|
||||||
let metadata = fs::metadata(file_path)
|
let metadata = fs::metadata(file_path)
|
||||||
.map_err(ArchDocError::Io)?;
|
.map_err(WTIsMyCodeError::Io)?;
|
||||||
|
|
||||||
let modified_time = metadata.modified()
|
let modified_time = metadata.modified()
|
||||||
.map_err(ArchDocError::Io)?;
|
.map_err(WTIsMyCodeError::Io)?;
|
||||||
|
|
||||||
let modified_time: DateTime<Utc> = modified_time.into();
|
let modified_time: DateTime<Utc> = modified_time.into();
|
||||||
|
|
||||||
@@ -90,7 +90,7 @@ impl CacheManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Store parsed module in cache
|
/// Store parsed module in cache
|
||||||
pub fn store_module(&self, file_path: &Path, parsed_module: ParsedModule) -> Result<(), ArchDocError> {
|
pub fn store_module(&self, file_path: &Path, parsed_module: ParsedModule) -> Result<(), WTIsMyCodeError> {
|
||||||
if !self.config.caching.enabled {
|
if !self.config.caching.enabled {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
@@ -100,10 +100,10 @@ impl CacheManager {
|
|||||||
|
|
||||||
// Get file modification time
|
// Get file modification time
|
||||||
let metadata = fs::metadata(file_path)
|
let metadata = fs::metadata(file_path)
|
||||||
.map_err(ArchDocError::Io)?;
|
.map_err(WTIsMyCodeError::Io)?;
|
||||||
|
|
||||||
let modified_time = metadata.modified()
|
let modified_time = metadata.modified()
|
||||||
.map_err(ArchDocError::Io)?;
|
.map_err(WTIsMyCodeError::Io)?;
|
||||||
|
|
||||||
let modified_time: DateTime<Utc> = modified_time.into();
|
let modified_time: DateTime<Utc> = modified_time.into();
|
||||||
|
|
||||||
@@ -114,10 +114,10 @@ impl CacheManager {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let content = serde_json::to_string(&cache_entry)
|
let content = serde_json::to_string(&cache_entry)
|
||||||
.map_err(|e| ArchDocError::AnalysisError(format!("Failed to serialize cache entry: {}", e)))?;
|
.map_err(|e| WTIsMyCodeError::AnalysisError(format!("Failed to serialize cache entry: {}", e)))?;
|
||||||
|
|
||||||
fs::write(&cache_file, content)
|
fs::write(&cache_file, content)
|
||||||
.map_err(ArchDocError::Io)
|
.map_err(WTIsMyCodeError::Io)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate cache key for a file path
|
/// Generate cache key for a file path
|
||||||
@@ -133,7 +133,7 @@ impl CacheManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Parse duration string like "24h" or "7d" into seconds
|
/// Parse duration string like "24h" or "7d" into seconds
|
||||||
fn parse_duration(&self, duration_str: &str) -> Result<u64, ArchDocError> {
|
fn parse_duration(&self, duration_str: &str) -> Result<u64, WTIsMyCodeError> {
|
||||||
if duration_str.is_empty() {
|
if duration_str.is_empty() {
|
||||||
return Ok(0);
|
return Ok(0);
|
||||||
}
|
}
|
||||||
@@ -141,26 +141,26 @@ impl CacheManager {
|
|||||||
let chars: Vec<char> = duration_str.chars().collect();
|
let chars: Vec<char> = duration_str.chars().collect();
|
||||||
let (number_str, unit) = chars.split_at(chars.len() - 1);
|
let (number_str, unit) = chars.split_at(chars.len() - 1);
|
||||||
let number: u64 = number_str.iter().collect::<String>().parse()
|
let number: u64 = number_str.iter().collect::<String>().parse()
|
||||||
.map_err(|_| ArchDocError::AnalysisError(format!("Invalid duration format: {}", duration_str)))?;
|
.map_err(|_| WTIsMyCodeError::AnalysisError(format!("Invalid duration format: {}", duration_str)))?;
|
||||||
|
|
||||||
match unit[0] {
|
match unit[0] {
|
||||||
's' => Ok(number), // seconds
|
's' => Ok(number), // seconds
|
||||||
'm' => Ok(number * 60), // minutes
|
'm' => Ok(number * 60), // minutes
|
||||||
'h' => Ok(number * 3600), // hours
|
'h' => Ok(number * 3600), // hours
|
||||||
'd' => Ok(number * 86400), // days
|
'd' => Ok(number * 86400), // days
|
||||||
_ => Err(ArchDocError::AnalysisError(format!("Unknown duration unit: {}", unit[0]))),
|
_ => Err(WTIsMyCodeError::AnalysisError(format!("Unknown duration unit: {}", unit[0]))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Clear all cache entries
|
/// Clear all cache entries
|
||||||
pub fn clear_cache(&self) -> Result<(), ArchDocError> {
|
pub fn clear_cache(&self) -> Result<(), WTIsMyCodeError> {
|
||||||
if Path::new(&self.cache_dir).exists() {
|
if Path::new(&self.cache_dir).exists() {
|
||||||
fs::remove_dir_all(&self.cache_dir)
|
fs::remove_dir_all(&self.cache_dir)
|
||||||
.map_err(ArchDocError::Io)?;
|
.map_err(WTIsMyCodeError::Io)?;
|
||||||
|
|
||||||
// Recreate cache directory
|
// Recreate cache directory
|
||||||
fs::create_dir_all(&self.cache_dir)
|
fs::create_dir_all(&self.cache_dir)
|
||||||
.map_err(ArchDocError::Io)?;
|
.map_err(WTIsMyCodeError::Io)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -1,10 +1,10 @@
|
|||||||
//! Configuration management for ArchDoc
|
//! Configuration management for WTIsMyCode
|
||||||
//!
|
//!
|
||||||
//! This module handles loading and validating the archdoc.toml configuration file.
|
//! This module handles loading and validating the wtismycode.toml configuration file.
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use crate::errors::ArchDocError;
|
use crate::errors::WTIsMyCodeError;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
@@ -383,7 +383,7 @@ fn default_log_level() -> String {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn default_log_file() -> String {
|
fn default_log_file() -> String {
|
||||||
"archdoc.log".to_string()
|
"wtismycode.log".to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_log_format() -> String {
|
fn default_log_format() -> String {
|
||||||
@@ -415,7 +415,7 @@ fn default_caching_enabled() -> bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn default_cache_dir() -> String {
|
fn default_cache_dir() -> String {
|
||||||
".archdoc/cache".to_string()
|
".wtismycode/cache".to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_max_cache_age() -> String {
|
fn default_max_cache_age() -> String {
|
||||||
@@ -426,17 +426,17 @@ impl Config {
|
|||||||
/// Validate the configuration for correctness.
|
/// Validate the configuration for correctness.
|
||||||
///
|
///
|
||||||
/// Checks that paths exist, values are parseable, and settings are sensible.
|
/// Checks that paths exist, values are parseable, and settings are sensible.
|
||||||
pub fn validate(&self) -> Result<(), ArchDocError> {
|
pub fn validate(&self) -> Result<(), WTIsMyCodeError> {
|
||||||
// Check project.root exists and is a directory
|
// Check project.root exists and is a directory
|
||||||
let root = Path::new(&self.project.root);
|
let root = Path::new(&self.project.root);
|
||||||
if !root.exists() {
|
if !root.exists() {
|
||||||
return Err(ArchDocError::ConfigError(format!(
|
return Err(WTIsMyCodeError::ConfigError(format!(
|
||||||
"project.root '{}' does not exist",
|
"project.root '{}' does not exist",
|
||||||
self.project.root
|
self.project.root
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
if !root.is_dir() {
|
if !root.is_dir() {
|
||||||
return Err(ArchDocError::ConfigError(format!(
|
return Err(WTIsMyCodeError::ConfigError(format!(
|
||||||
"project.root '{}' is not a directory",
|
"project.root '{}' is not a directory",
|
||||||
self.project.root
|
self.project.root
|
||||||
)));
|
)));
|
||||||
@@ -444,7 +444,7 @@ impl Config {
|
|||||||
|
|
||||||
// Check language is python
|
// Check language is python
|
||||||
if self.project.language != "python" {
|
if self.project.language != "python" {
|
||||||
return Err(ArchDocError::ConfigError(format!(
|
return Err(WTIsMyCodeError::ConfigError(format!(
|
||||||
"project.language '{}' is not supported. Only 'python' is currently supported",
|
"project.language '{}' is not supported. Only 'python' is currently supported",
|
||||||
self.project.language
|
self.project.language
|
||||||
)));
|
)));
|
||||||
@@ -452,7 +452,7 @@ impl Config {
|
|||||||
|
|
||||||
// Check scan.include is not empty
|
// Check scan.include is not empty
|
||||||
if self.scan.include.is_empty() {
|
if self.scan.include.is_empty() {
|
||||||
return Err(ArchDocError::ConfigError(
|
return Err(WTIsMyCodeError::ConfigError(
|
||||||
"scan.include must not be empty — at least one directory must be specified".to_string(),
|
"scan.include must not be empty — at least one directory must be specified".to_string(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
@@ -461,7 +461,7 @@ impl Config {
|
|||||||
for src_root in &self.python.src_roots {
|
for src_root in &self.python.src_roots {
|
||||||
let path = root.join(src_root);
|
let path = root.join(src_root);
|
||||||
if !path.exists() {
|
if !path.exists() {
|
||||||
return Err(ArchDocError::ConfigError(format!(
|
return Err(WTIsMyCodeError::ConfigError(format!(
|
||||||
"python.src_roots entry '{}' does not exist (resolved to '{}')",
|
"python.src_roots entry '{}' does not exist (resolved to '{}')",
|
||||||
src_root,
|
src_root,
|
||||||
path.display()
|
path.display()
|
||||||
@@ -471,7 +471,7 @@ impl Config {
|
|||||||
|
|
||||||
// Parse max_cache_age
|
// Parse max_cache_age
|
||||||
parse_duration(&self.caching.max_cache_age).map_err(|e| {
|
parse_duration(&self.caching.max_cache_age).map_err(|e| {
|
||||||
ArchDocError::ConfigError(format!(
|
WTIsMyCodeError::ConfigError(format!(
|
||||||
"caching.max_cache_age '{}' is not valid: {}. Use formats like '24h', '7d', '30m'",
|
"caching.max_cache_age '{}' is not valid: {}. Use formats like '24h', '7d', '30m'",
|
||||||
self.caching.max_cache_age, e
|
self.caching.max_cache_age, e
|
||||||
))
|
))
|
||||||
@@ -479,7 +479,7 @@ impl Config {
|
|||||||
|
|
||||||
// Parse max_file_size
|
// Parse max_file_size
|
||||||
parse_file_size(&self.scan.max_file_size).map_err(|e| {
|
parse_file_size(&self.scan.max_file_size).map_err(|e| {
|
||||||
ArchDocError::ConfigError(format!(
|
WTIsMyCodeError::ConfigError(format!(
|
||||||
"scan.max_file_size '{}' is not valid: {}. Use formats like '10MB', '1GB', '500KB'",
|
"scan.max_file_size '{}' is not valid: {}. Use formats like '10MB', '1GB', '500KB'",
|
||||||
self.scan.max_file_size, e
|
self.scan.max_file_size, e
|
||||||
))
|
))
|
||||||
@@ -489,21 +489,21 @@ impl Config {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Load configuration from a TOML file
|
/// Load configuration from a TOML file
|
||||||
pub fn load_from_file(path: &Path) -> Result<Self, ArchDocError> {
|
pub fn load_from_file(path: &Path) -> Result<Self, WTIsMyCodeError> {
|
||||||
let content = std::fs::read_to_string(path)
|
let content = std::fs::read_to_string(path)
|
||||||
.map_err(|e| ArchDocError::ConfigError(format!("Failed to read config file: {}", e)))?;
|
.map_err(|e| WTIsMyCodeError::ConfigError(format!("Failed to read config file: {}", e)))?;
|
||||||
|
|
||||||
toml::from_str(&content)
|
toml::from_str(&content)
|
||||||
.map_err(|e| ArchDocError::ConfigError(format!("Failed to parse config file: {}", e)))
|
.map_err(|e| WTIsMyCodeError::ConfigError(format!("Failed to parse config file: {}", e)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Save configuration to a TOML file
|
/// Save configuration to a TOML file
|
||||||
pub fn save_to_file(&self, path: &Path) -> Result<(), ArchDocError> {
|
pub fn save_to_file(&self, path: &Path) -> Result<(), WTIsMyCodeError> {
|
||||||
let content = toml::to_string_pretty(self)
|
let content = toml::to_string_pretty(self)
|
||||||
.map_err(|e| ArchDocError::ConfigError(format!("Failed to serialize config: {}", e)))?;
|
.map_err(|e| WTIsMyCodeError::ConfigError(format!("Failed to serialize config: {}", e)))?;
|
||||||
|
|
||||||
std::fs::write(path, content)
|
std::fs::write(path, content)
|
||||||
.map_err(|e| ArchDocError::ConfigError(format!("Failed to write config file: {}", e)))
|
.map_err(|e| WTIsMyCodeError::ConfigError(format!("Failed to write config file: {}", e)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -114,8 +114,8 @@ fn deduplicate_cycles(cycles: Vec<Vec<String>>) -> Vec<Vec<String>> {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::model::{Edges, Module, ProjectModel};
|
use crate::model::{Module, ProjectModel};
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
fn make_module(id: &str, outbound: Vec<&str>) -> Module {
|
fn make_module(id: &str, outbound: Vec<&str>) -> Module {
|
||||||
Module {
|
Module {
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
pub enum ArchDocError {
|
pub enum WTIsMyCodeError {
|
||||||
#[error("IO error: {0}")]
|
#[error("IO error: {0}")]
|
||||||
Io(#[from] std::io::Error),
|
Io(#[from] std::io::Error),
|
||||||
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
//! ArchDoc Core Library
|
//! WTIsMyCode Core Library
|
||||||
//!
|
//!
|
||||||
//! This crate provides the core functionality for analyzing Python projects
|
//! This crate provides the core functionality for analyzing Python projects
|
||||||
//! and generating architecture documentation.
|
//! and generating architecture documentation.
|
||||||
@@ -13,16 +13,17 @@ pub mod renderer;
|
|||||||
pub mod writer;
|
pub mod writer;
|
||||||
pub mod cache;
|
pub mod cache;
|
||||||
pub mod cycle_detector;
|
pub mod cycle_detector;
|
||||||
|
pub mod package_classifier;
|
||||||
|
|
||||||
// Re-export commonly used types
|
// Re-export commonly used types
|
||||||
pub use errors::ArchDocError;
|
pub use errors::WTIsMyCodeError;
|
||||||
pub use config::Config;
|
pub use config::Config;
|
||||||
pub use model::ProjectModel;
|
pub use model::ProjectModel;
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_works() {
|
fn it_works() {
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
//! Intermediate Representation (IR) for ArchDoc
|
//! Intermediate Representation (IR) for WTIsMyCode
|
||||||
//!
|
//!
|
||||||
//! This module defines the data structures that represent the analyzed Python project
|
//! This module defines the data structures that represent the analyzed Python project
|
||||||
//! and are used for generating documentation.
|
//! and are used for generating documentation.
|
||||||
@@ -12,6 +12,9 @@ pub struct ProjectModel {
|
|||||||
pub files: HashMap<String, FileDoc>,
|
pub files: HashMap<String, FileDoc>,
|
||||||
pub symbols: HashMap<String, Symbol>,
|
pub symbols: HashMap<String, Symbol>,
|
||||||
pub edges: Edges,
|
pub edges: Edges,
|
||||||
|
/// Classified integrations by category (e.g. "HTTP" -> ["fastapi", "requests"])
|
||||||
|
#[serde(default)]
|
||||||
|
pub classified_integrations: HashMap<String, Vec<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ProjectModel {
|
impl ProjectModel {
|
||||||
@@ -21,6 +24,7 @@ impl ProjectModel {
|
|||||||
files: HashMap::new(),
|
files: HashMap::new(),
|
||||||
symbols: HashMap::new(),
|
symbols: HashMap::new(),
|
||||||
edges: Edges::new(),
|
edges: Edges::new(),
|
||||||
|
classified_integrations: HashMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
462
wtismycode-core/src/package_classifier.rs
Normal file
462
wtismycode-core/src/package_classifier.rs
Normal file
@@ -0,0 +1,462 @@
|
|||||||
|
//! Package classifier for Python imports
|
||||||
|
//!
|
||||||
|
//! Classifies Python packages into categories using:
|
||||||
|
//! 1. Python stdlib list (hardcoded)
|
||||||
|
//! 2. Built-in dictionary (~200 popular packages)
|
||||||
|
//! 3. PyPI API lookup (online mode)
|
||||||
|
//! 4. Internal package detection (fallback)
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
|
pub enum PackageCategory {
|
||||||
|
Stdlib,
|
||||||
|
Http,
|
||||||
|
Database,
|
||||||
|
Queue,
|
||||||
|
Storage,
|
||||||
|
AiMl,
|
||||||
|
Testing,
|
||||||
|
Logging,
|
||||||
|
Auth,
|
||||||
|
Internal,
|
||||||
|
ThirdParty,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PackageCategory {
|
||||||
|
pub fn display_name(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
Self::Stdlib => "Stdlib",
|
||||||
|
Self::Http => "HTTP",
|
||||||
|
Self::Database => "Database",
|
||||||
|
Self::Queue => "Queue",
|
||||||
|
Self::Storage => "Storage",
|
||||||
|
Self::AiMl => "AI/ML",
|
||||||
|
Self::Testing => "Testing",
|
||||||
|
Self::Logging => "Logging",
|
||||||
|
Self::Auth => "Auth",
|
||||||
|
Self::Internal => "Internal",
|
||||||
|
Self::ThirdParty => "Third-party",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Result of classifying all imports in a project
|
||||||
|
#[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)]
|
||||||
|
pub struct ClassifiedIntegrations {
|
||||||
|
/// category -> list of package names
|
||||||
|
pub by_category: HashMap<String, Vec<String>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct PackageClassifier {
|
||||||
|
offline: bool,
|
||||||
|
cache_dir: Option<String>,
|
||||||
|
/// user overrides from config integration_patterns
|
||||||
|
user_overrides: HashMap<String, PackageCategory>,
|
||||||
|
/// PyPI cache: package_name -> Option<PackageCategory> (None = not found)
|
||||||
|
pypi_cache: HashMap<String, Option<PackageCategory>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PackageClassifier {
|
||||||
|
pub fn new(offline: bool, cache_dir: Option<String>) -> Self {
|
||||||
|
let mut classifier = Self {
|
||||||
|
offline,
|
||||||
|
cache_dir: cache_dir.clone(),
|
||||||
|
user_overrides: HashMap::new(),
|
||||||
|
pypi_cache: HashMap::new(),
|
||||||
|
};
|
||||||
|
// Load PyPI cache from disk
|
||||||
|
if let Some(ref dir) = cache_dir {
|
||||||
|
classifier.load_pypi_cache(dir);
|
||||||
|
}
|
||||||
|
classifier
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add user overrides from config integration_patterns
|
||||||
|
pub fn add_user_overrides(&mut self, patterns: &[(String, Vec<String>)]) {
|
||||||
|
for (type_name, pkgs) in patterns {
|
||||||
|
let cat = match type_name.as_str() {
|
||||||
|
"http" => PackageCategory::Http,
|
||||||
|
"db" => PackageCategory::Database,
|
||||||
|
"queue" => PackageCategory::Queue,
|
||||||
|
"storage" => PackageCategory::Storage,
|
||||||
|
"ai" => PackageCategory::AiMl,
|
||||||
|
"testing" => PackageCategory::Testing,
|
||||||
|
"logging" => PackageCategory::Logging,
|
||||||
|
"auth" => PackageCategory::Auth,
|
||||||
|
_ => PackageCategory::ThirdParty,
|
||||||
|
};
|
||||||
|
for pkg in pkgs {
|
||||||
|
self.user_overrides.insert(pkg.to_lowercase(), cat.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Classify a single package name (top-level import)
|
||||||
|
pub fn classify(&mut self, package_name: &str) -> PackageCategory {
|
||||||
|
let normalized = normalize_package_name(package_name);
|
||||||
|
|
||||||
|
// 1. User overrides take priority
|
||||||
|
if let Some(cat) = self.user_overrides.get(&normalized) {
|
||||||
|
return cat.clone();
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Built-in dictionary (check BEFORE stdlib, so sqlite3 etc. are categorized properly)
|
||||||
|
if let Some(cat) = builtin_lookup(&normalized) {
|
||||||
|
return cat;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Stdlib
|
||||||
|
if is_stdlib(&normalized) {
|
||||||
|
return PackageCategory::Stdlib;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. PyPI lookup (if online)
|
||||||
|
if !self.offline {
|
||||||
|
if let Some(cached) = self.pypi_cache.get(&normalized) {
|
||||||
|
return cached.clone().unwrap_or(PackageCategory::Internal);
|
||||||
|
}
|
||||||
|
match self.pypi_lookup(&normalized) {
|
||||||
|
Some(cat) => {
|
||||||
|
self.pypi_cache.insert(normalized, Some(cat.clone()));
|
||||||
|
return cat;
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
self.pypi_cache.insert(normalized, None);
|
||||||
|
return PackageCategory::Internal;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Offline fallback: if not in stdlib or dictionary, assume internal
|
||||||
|
PackageCategory::Internal
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Classify all imports and return grouped integrations
|
||||||
|
pub fn classify_all(&mut self, import_names: &[String]) -> ClassifiedIntegrations {
|
||||||
|
let mut result = ClassifiedIntegrations::default();
|
||||||
|
let mut seen: HashMap<String, PackageCategory> = HashMap::new();
|
||||||
|
|
||||||
|
for import in import_names {
|
||||||
|
let top_level = top_level_package(import);
|
||||||
|
if seen.contains_key(&top_level) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let cat = self.classify(&top_level);
|
||||||
|
seen.insert(top_level.clone(), cat.clone());
|
||||||
|
|
||||||
|
// Skip stdlib and third-party without category
|
||||||
|
if cat == PackageCategory::Stdlib {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let category_name = cat.display_name().to_string();
|
||||||
|
result.by_category
|
||||||
|
.entry(category_name)
|
||||||
|
.or_default()
|
||||||
|
.push(top_level);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deduplicate and sort each category
|
||||||
|
for pkgs in result.by_category.values_mut() {
|
||||||
|
pkgs.sort();
|
||||||
|
pkgs.dedup();
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Save PyPI cache to disk
|
||||||
|
pub fn save_cache(&self) {
|
||||||
|
if let Some(ref dir) = self.cache_dir {
|
||||||
|
let cache_path = Path::new(dir).join("pypi.json");
|
||||||
|
if let Ok(json) = serde_json::to_string_pretty(&self.pypi_cache) {
|
||||||
|
let _ = std::fs::create_dir_all(dir);
|
||||||
|
let _ = std::fs::write(&cache_path, json);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_pypi_cache(&mut self, dir: &str) {
|
||||||
|
let cache_path = Path::new(dir).join("pypi.json");
|
||||||
|
if let Ok(content) = std::fs::read_to_string(&cache_path)
|
||||||
|
&& let Ok(cache) = serde_json::from_str::<HashMap<String, Option<PackageCategory>>>(&content) {
|
||||||
|
self.pypi_cache = cache;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pypi_lookup(&self, package_name: &str) -> Option<PackageCategory> {
|
||||||
|
let url = format!("https://pypi.org/pypi/{}/json", package_name);
|
||||||
|
|
||||||
|
let agent = ureq::Agent::new_with_config(
|
||||||
|
ureq::config::Config::builder()
|
||||||
|
.timeout_global(Some(std::time::Duration::from_secs(3)))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
let response = agent.get(&url).call().ok()?;
|
||||||
|
|
||||||
|
if response.status() != 200 {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let body_str = response.into_body().read_to_string().ok()?;
|
||||||
|
let body: serde_json::Value = serde_json::from_str(&body_str).ok()?;
|
||||||
|
let info = body.get("info")?;
|
||||||
|
|
||||||
|
// Check classifiers
|
||||||
|
if let Some(classifiers) = info.get("classifiers").and_then(|c: &serde_json::Value| c.as_array()) {
|
||||||
|
for classifier in classifiers {
|
||||||
|
if let Some(s) = classifier.as_str()
|
||||||
|
&& let Some(cat) = classify_from_pypi_classifier(s) {
|
||||||
|
return Some(cat);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check summary and keywords for hints
|
||||||
|
let summary = info.get("summary").and_then(|s: &serde_json::Value| s.as_str()).unwrap_or("");
|
||||||
|
let keywords = info.get("keywords").and_then(|s: &serde_json::Value| s.as_str()).unwrap_or("");
|
||||||
|
let combined = format!("{} {}", summary, keywords).to_lowercase();
|
||||||
|
|
||||||
|
if combined.contains("database") || combined.contains("sql") || combined.contains("orm") {
|
||||||
|
return Some(PackageCategory::Database);
|
||||||
|
}
|
||||||
|
if combined.contains("http") || combined.contains("web framework") || combined.contains("rest api") {
|
||||||
|
return Some(PackageCategory::Http);
|
||||||
|
}
|
||||||
|
if combined.contains("queue") || combined.contains("message broker") || combined.contains("amqp") || combined.contains("kafka") {
|
||||||
|
return Some(PackageCategory::Queue);
|
||||||
|
}
|
||||||
|
if combined.contains("storage") || combined.contains("s3") || combined.contains("blob") {
|
||||||
|
return Some(PackageCategory::Storage);
|
||||||
|
}
|
||||||
|
if combined.contains("machine learning") || combined.contains("deep learning") || combined.contains("neural") || combined.contains("artificial intelligence") {
|
||||||
|
return Some(PackageCategory::AiMl);
|
||||||
|
}
|
||||||
|
if combined.contains("testing") || combined.contains("test framework") {
|
||||||
|
return Some(PackageCategory::Testing);
|
||||||
|
}
|
||||||
|
if combined.contains("logging") || combined.contains("error tracking") {
|
||||||
|
return Some(PackageCategory::Logging);
|
||||||
|
}
|
||||||
|
if combined.contains("authentication") || combined.contains("jwt") || combined.contains("oauth") {
|
||||||
|
return Some(PackageCategory::Auth);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Found on PyPI but no category detected
|
||||||
|
Some(PackageCategory::ThirdParty)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn classify_from_pypi_classifier(classifier: &str) -> Option<PackageCategory> {
|
||||||
|
let c = classifier.to_lowercase();
|
||||||
|
if c.contains("framework :: django") || c.contains("framework :: flask") ||
|
||||||
|
c.contains("framework :: fastapi") || c.contains("framework :: tornado") ||
|
||||||
|
c.contains("framework :: aiohttp") || c.contains("topic :: internet :: www") {
|
||||||
|
return Some(PackageCategory::Http);
|
||||||
|
}
|
||||||
|
if c.contains("topic :: database") {
|
||||||
|
return Some(PackageCategory::Database);
|
||||||
|
}
|
||||||
|
if c.contains("topic :: scientific/engineering :: artificial intelligence") ||
|
||||||
|
c.contains("topic :: scientific/engineering :: machine learning") {
|
||||||
|
return Some(PackageCategory::AiMl);
|
||||||
|
}
|
||||||
|
if c.contains("topic :: software development :: testing") {
|
||||||
|
return Some(PackageCategory::Testing);
|
||||||
|
}
|
||||||
|
if c.contains("topic :: system :: logging") {
|
||||||
|
return Some(PackageCategory::Logging);
|
||||||
|
}
|
||||||
|
if c.contains("topic :: security") && (classifier.contains("auth") || classifier.contains("Auth")) {
|
||||||
|
return Some(PackageCategory::Auth);
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract top-level package name from an import string
|
||||||
|
/// e.g. "sqlalchemy.orm.Session" -> "sqlalchemy"
|
||||||
|
fn top_level_package(import: &str) -> String {
|
||||||
|
import.split('.').next().unwrap_or(import).to_lowercase()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Normalize package name for lookup (lowercase, replace hyphens with underscores)
|
||||||
|
fn normalize_package_name(name: &str) -> String {
|
||||||
|
name.to_lowercase().replace('-', "_")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if a package is in the Python standard library
|
||||||
|
fn is_stdlib(name: &str) -> bool {
|
||||||
|
PYTHON_STDLIB.contains(&name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Look up a package in the built-in dictionary
|
||||||
|
fn builtin_lookup(name: &str) -> Option<PackageCategory> {
|
||||||
|
for (cat, pkgs) in BUILTIN_PACKAGES.iter() {
|
||||||
|
if pkgs.contains(&name) {
|
||||||
|
return Some(cat.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
// Python 3.10+ standard library modules
|
||||||
|
const PYTHON_STDLIB: &[&str] = &[
|
||||||
|
"__future__", "_thread", "abc", "aifc", "argparse", "array", "ast",
|
||||||
|
"asynchat", "asyncio", "asyncore", "atexit", "audioop", "base64",
|
||||||
|
"bdb", "binascii", "binhex", "bisect", "builtins", "bz2",
|
||||||
|
"calendar", "cgi", "cgitb", "chunk", "cmath", "cmd", "code",
|
||||||
|
"codecs", "codeop", "collections", "colorsys", "compileall",
|
||||||
|
"concurrent", "configparser", "contextlib", "contextvars", "copy",
|
||||||
|
"copyreg", "cprofile", "crypt", "csv", "ctypes", "curses",
|
||||||
|
"dataclasses", "datetime", "dbm", "decimal", "difflib", "dis",
|
||||||
|
"distutils", "doctest", "email", "encodings", "enum", "errno",
|
||||||
|
"faulthandler", "fcntl", "filecmp", "fileinput", "fnmatch",
|
||||||
|
"formatter", "fractions", "ftplib", "functools", "gc", "getopt",
|
||||||
|
"getpass", "gettext", "glob", "grp", "gzip", "hashlib", "heapq",
|
||||||
|
"hmac", "html", "http", "idlelib", "imaplib", "imghdr", "imp",
|
||||||
|
"importlib", "inspect", "io", "ipaddress", "itertools", "json",
|
||||||
|
"keyword", "lib2to3", "linecache", "locale", "logging", "lzma",
|
||||||
|
"mailbox", "mailcap", "marshal", "math", "mimetypes", "mmap",
|
||||||
|
"modulefinder", "multiprocessing", "netrc", "nis", "nntplib",
|
||||||
|
"numbers", "operator", "optparse", "os", "ossaudiodev", "parser",
|
||||||
|
"pathlib", "pdb", "pickle", "pickletools", "pipes", "pkgutil",
|
||||||
|
"platform", "plistlib", "poplib", "posix", "posixpath", "pprint",
|
||||||
|
"profile", "pstats", "pty", "pwd", "py_compile", "pyclbr",
|
||||||
|
"pydoc", "queue", "quopri", "random", "re", "readline", "reprlib",
|
||||||
|
"resource", "rlcompleter", "runpy", "sched", "secrets", "select",
|
||||||
|
"selectors", "shelve", "shlex", "shutil", "signal", "site",
|
||||||
|
"smtpd", "smtplib", "sndhdr", "socket", "socketserver", "spwd",
|
||||||
|
"sqlite3", "ssl", "stat", "statistics", "string", "stringprep",
|
||||||
|
"struct", "subprocess", "sunau", "symtable", "sys", "sysconfig",
|
||||||
|
"syslog", "tabnanny", "tarfile", "telnetlib", "tempfile", "termios",
|
||||||
|
"test", "textwrap", "threading", "time", "timeit", "tkinter",
|
||||||
|
"token", "tokenize", "tomllib", "trace", "traceback", "tracemalloc",
|
||||||
|
"tty", "turtle", "turtledemo", "types", "typing", "unicodedata",
|
||||||
|
"unittest", "urllib", "uu", "uuid", "venv", "warnings", "wave",
|
||||||
|
"weakref", "webbrowser", "winreg", "winsound", "wsgiref", "xdrlib",
|
||||||
|
"xml", "xmlrpc", "zipapp", "zipfile", "zipimport", "zlib",
|
||||||
|
// Common sub-packages that appear as top-level imports
|
||||||
|
"os.path", "collections.abc", "concurrent.futures", "typing_extensions",
|
||||||
|
];
|
||||||
|
|
||||||
|
lazy_static::lazy_static! {
|
||||||
|
static ref BUILTIN_PACKAGES: Vec<(PackageCategory, Vec<&'static str>)> = vec![
|
||||||
|
(PackageCategory::Http, vec![
|
||||||
|
"requests", "httpx", "aiohttp", "fastapi", "flask", "django",
|
||||||
|
"starlette", "uvicorn", "gunicorn", "tornado", "sanic", "bottle",
|
||||||
|
"falcon", "quart", "werkzeug", "httptools", "uvloop", "hypercorn",
|
||||||
|
"grpcio", "grpc", "graphene", "strawberry", "ariadne",
|
||||||
|
"pydantic", "marshmallow", "connexion", "responder", "hug",
|
||||||
|
]),
|
||||||
|
(PackageCategory::Database, vec![
|
||||||
|
"sqlalchemy", "psycopg2", "psycopg", "asyncpg", "pymongo",
|
||||||
|
"mongoengine", "peewee", "tortoise", "databases",
|
||||||
|
"alembic", "pymysql", "opensearch", "opensearchpy", "elasticsearch",
|
||||||
|
"motor", "beanie", "odmantic", "sqlmodel",
|
||||||
|
"piccolo", "edgedb", "cassandra", "clickhouse_driver", "sqlite3",
|
||||||
|
"neo4j", "arango", "influxdb", "timescaledb",
|
||||||
|
]),
|
||||||
|
(PackageCategory::Queue, vec![
|
||||||
|
"celery", "pika", "aio_pika", "kafka", "confluent_kafka",
|
||||||
|
"kombu", "dramatiq", "huey", "rq", "nats", "redis", "aioredis",
|
||||||
|
"aiokafka", "taskiq", "arq",
|
||||||
|
]),
|
||||||
|
(PackageCategory::Storage, vec![
|
||||||
|
"minio", "boto3", "botocore", "google.cloud.storage",
|
||||||
|
"azure.storage.blob", "s3fs", "fsspec", "smart_open",
|
||||||
|
]),
|
||||||
|
(PackageCategory::AiMl, vec![
|
||||||
|
"torch", "tensorflow", "transformers", "langchain",
|
||||||
|
"langchain_core", "langchain_openai", "langchain_community",
|
||||||
|
"openai", "anthropic", "scikit_learn", "sklearn",
|
||||||
|
"numpy", "pandas", "scipy", "matplotlib", "keras",
|
||||||
|
"whisper", "sentence_transformers", "qdrant_client",
|
||||||
|
"chromadb", "pinecone", "faiss", "xgboost", "lightgbm",
|
||||||
|
"catboost", "spacy", "nltk", "gensim", "huggingface_hub",
|
||||||
|
"diffusers", "accelerate", "datasets", "tokenizers",
|
||||||
|
"tiktoken", "llama_index", "autogen", "crewai",
|
||||||
|
"seaborn", "plotly", "bokeh",
|
||||||
|
]),
|
||||||
|
(PackageCategory::Testing, vec![
|
||||||
|
"pytest", "mock", "faker", "hypothesis",
|
||||||
|
"factory_boy", "factory", "responses", "httpretty",
|
||||||
|
"vcrpy", "freezegun", "time_machine", "pytest_asyncio",
|
||||||
|
"pytest_mock", "pytest_cov", "coverage", "tox", "nox",
|
||||||
|
"behave", "robot", "selenium", "playwright", "locust",
|
||||||
|
]),
|
||||||
|
(PackageCategory::Auth, vec![
|
||||||
|
"pyjwt", "jwt", "python_jose", "jose", "passlib",
|
||||||
|
"authlib", "oauthlib", "itsdangerous", "bcrypt",
|
||||||
|
"cryptography", "paramiko",
|
||||||
|
]),
|
||||||
|
(PackageCategory::Logging, vec![
|
||||||
|
"loguru", "structlog", "sentry_sdk", "watchtower",
|
||||||
|
"python_json_logger", "colorlog", "rich", "prometheus_client",
|
||||||
|
]),
|
||||||
|
(PackageCategory::AiMl, vec![
|
||||||
|
"pyannote", "soundfile", "librosa", "audioread", "webrtcvad",
|
||||||
|
]),
|
||||||
|
(PackageCategory::Queue, vec![
|
||||||
|
"aiormq",
|
||||||
|
]),
|
||||||
|
(PackageCategory::Http, vec![
|
||||||
|
"pydantic_settings", "pydantic_extra_types", "fastapi_mail",
|
||||||
|
]),
|
||||||
|
(PackageCategory::Database, vec![
|
||||||
|
"peewee_async", "peewee_migrate",
|
||||||
|
]),
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_stdlib_detection() {
|
||||||
|
assert!(is_stdlib("os"));
|
||||||
|
assert!(is_stdlib("sys"));
|
||||||
|
assert!(is_stdlib("json"));
|
||||||
|
assert!(is_stdlib("asyncio"));
|
||||||
|
assert!(!is_stdlib("requests"));
|
||||||
|
assert!(!is_stdlib("fastapi"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_builtin_lookup() {
|
||||||
|
assert_eq!(builtin_lookup("requests"), Some(PackageCategory::Http));
|
||||||
|
assert_eq!(builtin_lookup("sqlalchemy"), Some(PackageCategory::Database));
|
||||||
|
assert_eq!(builtin_lookup("celery"), Some(PackageCategory::Queue));
|
||||||
|
assert_eq!(builtin_lookup("minio"), Some(PackageCategory::Storage));
|
||||||
|
assert_eq!(builtin_lookup("torch"), Some(PackageCategory::AiMl));
|
||||||
|
assert_eq!(builtin_lookup("pytest"), Some(PackageCategory::Testing));
|
||||||
|
assert_eq!(builtin_lookup("loguru"), Some(PackageCategory::Logging));
|
||||||
|
assert_eq!(builtin_lookup("pyjwt"), Some(PackageCategory::Auth));
|
||||||
|
assert_eq!(builtin_lookup("nonexistent_pkg"), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_top_level_package() {
|
||||||
|
assert_eq!(top_level_package("sqlalchemy.orm.Session"), "sqlalchemy");
|
||||||
|
assert_eq!(top_level_package("os.path"), "os");
|
||||||
|
assert_eq!(top_level_package("requests"), "requests");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_package_name() {
|
||||||
|
assert_eq!(normalize_package_name("aio-pika"), "aio_pika");
|
||||||
|
assert_eq!(normalize_package_name("scikit-learn"), "scikit_learn");
|
||||||
|
assert_eq!(normalize_package_name("FastAPI"), "fastapi");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_classify_offline() {
|
||||||
|
let mut classifier = PackageClassifier::new(true, None);
|
||||||
|
assert_eq!(classifier.classify("os"), PackageCategory::Stdlib);
|
||||||
|
assert_eq!(classifier.classify("requests"), PackageCategory::Http);
|
||||||
|
assert_eq!(classifier.classify("my_internal_pkg"), PackageCategory::Internal);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,11 +1,11 @@
|
|||||||
//! Python AST analyzer for ArchDoc
|
//! Python AST analyzer for WTIsMyCode
|
||||||
//!
|
//!
|
||||||
//! This module handles parsing Python files using AST and extracting
|
//! This module handles parsing Python files using AST and extracting
|
||||||
//! imports, definitions, and calls.
|
//! imports, definitions, and calls.
|
||||||
|
|
||||||
use crate::model::{ParsedModule, ProjectModel, Import, Call, CallType, Symbol, Module, FileDoc};
|
use crate::model::{ParsedModule, ProjectModel, Import, Call, CallType, Symbol, Module, FileDoc};
|
||||||
use crate::config::Config;
|
use crate::config::Config;
|
||||||
use crate::errors::ArchDocError;
|
use crate::errors::WTIsMyCodeError;
|
||||||
use crate::cache::CacheManager;
|
use crate::cache::CacheManager;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
@@ -15,25 +15,31 @@ use rustpython_ast::{Stmt, Expr, Ranged};
|
|||||||
pub struct PythonAnalyzer {
|
pub struct PythonAnalyzer {
|
||||||
config: Config,
|
config: Config,
|
||||||
cache_manager: CacheManager,
|
cache_manager: CacheManager,
|
||||||
|
offline: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PythonAnalyzer {
|
impl PythonAnalyzer {
|
||||||
pub fn new(config: Config) -> Self {
|
pub fn new(config: Config) -> Self {
|
||||||
let cache_manager = CacheManager::new(config.clone());
|
let cache_manager = CacheManager::new(config.clone());
|
||||||
Self { config, cache_manager }
|
Self { config, cache_manager, offline: false }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_module(&self, file_path: &Path) -> Result<ParsedModule, ArchDocError> {
|
pub fn new_with_options(config: Config, offline: bool) -> Self {
|
||||||
|
let cache_manager = CacheManager::new(config.clone());
|
||||||
|
Self { config, cache_manager, offline }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_module(&self, file_path: &Path) -> Result<ParsedModule, WTIsMyCodeError> {
|
||||||
// Try to get from cache first
|
// Try to get from cache first
|
||||||
if let Some(cached_module) = self.cache_manager.get_cached_module(file_path)? {
|
if let Some(cached_module) = self.cache_manager.get_cached_module(file_path)? {
|
||||||
return Ok(cached_module);
|
return Ok(cached_module);
|
||||||
}
|
}
|
||||||
|
|
||||||
let code = fs::read_to_string(file_path)
|
let code = fs::read_to_string(file_path)
|
||||||
.map_err(ArchDocError::Io)?;
|
.map_err(WTIsMyCodeError::Io)?;
|
||||||
|
|
||||||
let ast = ast::Suite::parse(&code, file_path.to_str().unwrap_or("<unknown>"))
|
let ast = ast::Suite::parse(&code, file_path.to_str().unwrap_or("<unknown>"))
|
||||||
.map_err(|e| ArchDocError::ParseError {
|
.map_err(|e| WTIsMyCodeError::ParseError {
|
||||||
file: file_path.to_string_lossy().to_string(),
|
file: file_path.to_string_lossy().to_string(),
|
||||||
line: 0,
|
line: 0,
|
||||||
message: format!("Failed to parse: {}", e),
|
message: format!("Failed to parse: {}", e),
|
||||||
@@ -589,7 +595,7 @@ impl PythonAnalyzer {
|
|||||||
normalized.to_string()
|
normalized.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_symbols(&self, modules: &[ParsedModule]) -> Result<ProjectModel, ArchDocError> {
|
pub fn resolve_symbols(&self, modules: &[ParsedModule]) -> Result<ProjectModel, WTIsMyCodeError> {
|
||||||
let mut project_model = ProjectModel::new();
|
let mut project_model = ProjectModel::new();
|
||||||
|
|
||||||
// Build import alias map for call resolution
|
// Build import alias map for call resolution
|
||||||
@@ -658,9 +664,9 @@ impl PythonAnalyzer {
|
|||||||
let doc_summary = if is_init {
|
let doc_summary = if is_init {
|
||||||
parsed_module.file_docstring.clone()
|
parsed_module.file_docstring.clone()
|
||||||
} else {
|
} else {
|
||||||
// For non-init files, check if there's an __init__.py docstring for this module's parent
|
// For non-init files, use file docstring first, then check __init__.py
|
||||||
init_docstrings.get(&module_id).cloned()
|
parsed_module.file_docstring.clone()
|
||||||
.or_else(|| parsed_module.file_docstring.clone())
|
.or_else(|| init_docstrings.get(&module_id).cloned())
|
||||||
};
|
};
|
||||||
|
|
||||||
let module = Module {
|
let module = Module {
|
||||||
@@ -679,6 +685,84 @@ impl PythonAnalyzer {
|
|||||||
self.resolve_call_types(&mut project_model, modules, &import_aliases);
|
self.resolve_call_types(&mut project_model, modules, &import_aliases);
|
||||||
self.compute_metrics(&mut project_model)?;
|
self.compute_metrics(&mut project_model)?;
|
||||||
|
|
||||||
|
// Classify all imports using PackageClassifier
|
||||||
|
// Collect all known project module names to filter from integrations
|
||||||
|
let project_modules: std::collections::HashSet<String> = modules.iter()
|
||||||
|
.map(|m| {
|
||||||
|
let mod_path = self.compute_module_path(&m.path);
|
||||||
|
mod_path.split('.').next().unwrap_or(&mod_path).to_lowercase()
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let all_imports: Vec<String> = modules.iter()
|
||||||
|
.flat_map(|m| m.imports.iter().map(|i| i.module_name.clone()))
|
||||||
|
.filter(|import| {
|
||||||
|
let top = import.split('.').next().unwrap_or(import).to_lowercase();
|
||||||
|
// Skip imports that are project's own modules
|
||||||
|
!project_modules.contains(&top)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let cache_dir = if self.config.caching.enabled {
|
||||||
|
Some(self.config.caching.cache_dir.clone())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
let mut classifier = crate::package_classifier::PackageClassifier::new(self.offline, cache_dir);
|
||||||
|
|
||||||
|
// Add user overrides from config integration_patterns
|
||||||
|
if !self.config.analysis.integration_patterns.is_empty() {
|
||||||
|
let overrides: Vec<(String, Vec<String>)> = self.config.analysis.integration_patterns.iter()
|
||||||
|
.map(|p| (p.type_.clone(), p.patterns.clone()))
|
||||||
|
.collect();
|
||||||
|
classifier.add_user_overrides(&overrides);
|
||||||
|
}
|
||||||
|
|
||||||
|
let classified = classifier.classify_all(&all_imports);
|
||||||
|
classifier.save_cache();
|
||||||
|
|
||||||
|
project_model.classified_integrations = classified.by_category;
|
||||||
|
|
||||||
|
// Also update per-symbol integration flags based on classification
|
||||||
|
for parsed_module in modules {
|
||||||
|
let module_id = self.compute_module_path(&parsed_module.path);
|
||||||
|
let import_names: Vec<String> = parsed_module.imports.iter()
|
||||||
|
.map(|i| i.module_name.clone())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut flags = crate::model::IntegrationFlags {
|
||||||
|
http: false, db: false, queue: false, storage: false, ai: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
for import in &import_names {
|
||||||
|
let top = import.split('.').next().unwrap_or(import).to_lowercase().replace('-', "_");
|
||||||
|
{
|
||||||
|
let cat = crate::package_classifier::PackageClassifier::new(true, None).classify(&top);
|
||||||
|
match cat {
|
||||||
|
crate::package_classifier::PackageCategory::Http => flags.http = true,
|
||||||
|
crate::package_classifier::PackageCategory::Database => flags.db = true,
|
||||||
|
crate::package_classifier::PackageCategory::Queue => flags.queue = true,
|
||||||
|
crate::package_classifier::PackageCategory::Storage => flags.storage = true,
|
||||||
|
crate::package_classifier::PackageCategory::AiMl => flags.ai = true,
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply to all symbols in this module
|
||||||
|
if let Some(module) = project_model.modules.get(&module_id) {
|
||||||
|
for sym_id in &module.symbols {
|
||||||
|
if let Some(sym) = project_model.symbols.get_mut(sym_id) {
|
||||||
|
sym.integrations_flags.http |= flags.http;
|
||||||
|
sym.integrations_flags.db |= flags.db;
|
||||||
|
sym.integrations_flags.queue |= flags.queue;
|
||||||
|
sym.integrations_flags.storage |= flags.storage;
|
||||||
|
sym.integrations_flags.ai |= flags.ai;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(project_model)
|
Ok(project_model)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -729,7 +813,7 @@ impl PythonAnalyzer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_dependency_graphs(&self, project_model: &mut ProjectModel, parsed_modules: &[ParsedModule]) -> Result<(), ArchDocError> {
|
fn build_dependency_graphs(&self, project_model: &mut ProjectModel, parsed_modules: &[ParsedModule]) -> Result<(), WTIsMyCodeError> {
|
||||||
// Collect known internal module IDs
|
// Collect known internal module IDs
|
||||||
let known_modules: std::collections::HashSet<String> = project_model.modules.keys().cloned().collect();
|
let known_modules: std::collections::HashSet<String> = project_model.modules.keys().cloned().collect();
|
||||||
|
|
||||||
@@ -784,11 +868,20 @@ impl PythonAnalyzer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for parsed_module in parsed_modules {
|
for parsed_module in parsed_modules {
|
||||||
|
let module_id = self.compute_module_path(&parsed_module.path);
|
||||||
for call in &parsed_module.calls {
|
for call in &parsed_module.calls {
|
||||||
let callee_expr = call.callee_expr.clone();
|
// Qualify from_id with module to match symbol IDs (module::symbol)
|
||||||
|
let from_id = format!("{}::{}", module_id, call.caller_symbol);
|
||||||
|
|
||||||
|
// Try to resolve callee to a qualified symbol ID
|
||||||
|
// If callee_expr is "module.func", try to find it as "resolved_module::func"
|
||||||
|
let to_id = self.resolve_callee_to_symbol_id(
|
||||||
|
&call.callee_expr, &module_id, project_model
|
||||||
|
);
|
||||||
|
|
||||||
let edge = crate::model::Edge {
|
let edge = crate::model::Edge {
|
||||||
from_id: call.caller_symbol.clone(),
|
from_id,
|
||||||
to_id: callee_expr,
|
to_id,
|
||||||
edge_type: crate::model::EdgeType::SymbolCall,
|
edge_type: crate::model::EdgeType::SymbolCall,
|
||||||
meta: None,
|
meta: None,
|
||||||
};
|
};
|
||||||
@@ -799,7 +892,54 @@ impl PythonAnalyzer {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compute_metrics(&self, project_model: &mut ProjectModel) -> Result<(), ArchDocError> {
|
/// Resolve a callee expression to a qualified symbol ID.
|
||||||
|
/// E.g., "SomeClass.method" or "func" -> "module::func"
|
||||||
|
fn resolve_callee_to_symbol_id(&self, callee_expr: &str, from_module: &str, model: &ProjectModel) -> String {
|
||||||
|
// First try: exact match as qualified ID in the same module
|
||||||
|
let same_module_id = format!("{}::{}", from_module, callee_expr);
|
||||||
|
if model.symbols.contains_key(&same_module_id) {
|
||||||
|
return same_module_id;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try: callee might be "func" and exist in another module via imports
|
||||||
|
// Check all symbols for a match on the bare name
|
||||||
|
let parts: Vec<&str> = callee_expr.splitn(2, '.').collect();
|
||||||
|
let bare_name = parts[0];
|
||||||
|
|
||||||
|
// Look through imports of from_module to find resolved target
|
||||||
|
if let Some(module) = model.modules.get(from_module) {
|
||||||
|
for outbound in &module.outbound_modules {
|
||||||
|
let candidate = format!("{}::{}", outbound, bare_name);
|
||||||
|
if model.symbols.contains_key(&candidate) {
|
||||||
|
return candidate;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: return qualified with current module
|
||||||
|
same_module_id
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if a class symbol is a simple data container (dataclass-like).
|
||||||
|
/// A class is considered a dataclass if it has ≤2 methods (typically __init__ and __repr__/__str__).
|
||||||
|
fn is_dataclass_like(symbol_id: &str, project_model: &ProjectModel) -> bool {
|
||||||
|
let symbol = match project_model.symbols.get(symbol_id) {
|
||||||
|
Some(s) => s,
|
||||||
|
None => return false,
|
||||||
|
};
|
||||||
|
if symbol.kind != crate::model::SymbolKind::Class {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// Count methods belonging to this class
|
||||||
|
let class_name = &symbol.qualname;
|
||||||
|
let method_prefix = format!("{}::{}.", symbol.module_id, class_name);
|
||||||
|
let method_count = project_model.symbols.values()
|
||||||
|
.filter(|s| s.kind == crate::model::SymbolKind::Method && s.id.starts_with(&method_prefix))
|
||||||
|
.count();
|
||||||
|
method_count <= 2
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_metrics(&self, project_model: &mut ProjectModel) -> Result<(), WTIsMyCodeError> {
|
||||||
// Collect fan-in/fan-out first to avoid borrow issues
|
// Collect fan-in/fan-out first to avoid borrow issues
|
||||||
let mut metrics: std::collections::HashMap<String, (usize, usize)> = std::collections::HashMap::new();
|
let mut metrics: std::collections::HashMap<String, (usize, usize)> = std::collections::HashMap::new();
|
||||||
|
|
||||||
@@ -815,12 +955,20 @@ impl PythonAnalyzer {
|
|||||||
metrics.insert(symbol_id.clone(), (fan_in, fan_out));
|
metrics.insert(symbol_id.clone(), (fan_in, fan_out));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Pre-compute which symbols are dataclass-like (need immutable borrow)
|
||||||
|
let dataclass_ids: std::collections::HashSet<String> = metrics.keys()
|
||||||
|
.filter(|id| Self::is_dataclass_like(id, project_model))
|
||||||
|
.cloned()
|
||||||
|
.collect();
|
||||||
|
|
||||||
for (symbol_id, (fan_in, fan_out)) in &metrics {
|
for (symbol_id, (fan_in, fan_out)) in &metrics {
|
||||||
if let Some(symbol) = project_model.symbols.get_mut(symbol_id) {
|
if let Some(symbol) = project_model.symbols.get_mut(symbol_id) {
|
||||||
symbol.metrics.fan_in = *fan_in;
|
symbol.metrics.fan_in = *fan_in;
|
||||||
symbol.metrics.fan_out = *fan_out;
|
symbol.metrics.fan_out = *fan_out;
|
||||||
symbol.metrics.is_critical = *fan_in > self.config.thresholds.critical_fan_in
|
// Don't mark dataclass-like classes as critical — they're just data containers
|
||||||
|
let exceeds_threshold = *fan_in > self.config.thresholds.critical_fan_in
|
||||||
|| *fan_out > self.config.thresholds.critical_fan_out;
|
|| *fan_out > self.config.thresholds.critical_fan_out;
|
||||||
|
symbol.metrics.is_critical = exceeds_threshold && !dataclass_ids.contains(symbol_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
//! Markdown renderer for ArchDoc
|
//! Markdown renderer for WTIsMyCode
|
||||||
//!
|
//!
|
||||||
//! This module handles generating Markdown documentation from the project model
|
//! This module handles generating Markdown documentation from the project model
|
||||||
//! using templates.
|
//! using templates.
|
||||||
@@ -8,6 +8,7 @@ use crate::cycle_detector;
|
|||||||
use crate::model::{ProjectModel, SymbolKind};
|
use crate::model::{ProjectModel, SymbolKind};
|
||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
use handlebars::Handlebars;
|
use handlebars::Handlebars;
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
fn sanitize_for_link(filename: &str) -> String {
|
fn sanitize_for_link(filename: &str) -> String {
|
||||||
let cleaned = filename.strip_prefix("./").unwrap_or(filename);
|
let cleaned = filename.strip_prefix("./").unwrap_or(filename);
|
||||||
@@ -65,7 +66,7 @@ impl Renderer {
|
|||||||
## Document metadata
|
## Document metadata
|
||||||
- **Created:** {{{created_date}}}
|
- **Created:** {{{created_date}}}
|
||||||
- **Updated:** {{{updated_date}}}
|
- **Updated:** {{{updated_date}}}
|
||||||
- **Generated by:** archdoc (cli) v0.1
|
- **Generated by:** wtismycode (cli) v0.1
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -73,29 +74,12 @@ impl Renderer {
|
|||||||
<!-- ARCHDOC:BEGIN section=integrations -->
|
<!-- ARCHDOC:BEGIN section=integrations -->
|
||||||
> Generated. Do not edit inside this block.
|
> Generated. Do not edit inside this block.
|
||||||
|
|
||||||
### Database Integrations
|
{{#each integration_sections}}
|
||||||
{{#each db_integrations}}
|
### {{{category}}}
|
||||||
|
{{#each packages}}
|
||||||
- {{{this}}}
|
- {{{this}}}
|
||||||
{{/each}}
|
{{/each}}
|
||||||
|
|
||||||
### HTTP/API Integrations
|
|
||||||
{{#each http_integrations}}
|
|
||||||
- {{{this}}}
|
|
||||||
{{/each}}
|
|
||||||
|
|
||||||
### Queue Integrations
|
|
||||||
{{#each queue_integrations}}
|
|
||||||
- {{{this}}}
|
|
||||||
{{/each}}
|
|
||||||
|
|
||||||
### Storage Integrations
|
|
||||||
{{#each storage_integrations}}
|
|
||||||
- {{{this}}}
|
|
||||||
{{/each}}
|
|
||||||
|
|
||||||
### AI/ML Integrations
|
|
||||||
{{#each ai_integrations}}
|
|
||||||
- {{{this}}}
|
|
||||||
{{/each}}
|
{{/each}}
|
||||||
<!-- ARCHDOC:END section=integrations -->
|
<!-- ARCHDOC:END section=integrations -->
|
||||||
|
|
||||||
@@ -124,10 +108,16 @@ impl Renderer {
|
|||||||
## Modules index
|
## Modules index
|
||||||
<!-- ARCHDOC:BEGIN section=modules_index -->
|
<!-- ARCHDOC:BEGIN section=modules_index -->
|
||||||
> Generated. Do not edit inside this block.
|
> Generated. Do not edit inside this block.
|
||||||
| Module | Symbols | Inbound | Outbound | Link |
|
|
||||||
|--------|---------|---------|----------|------|
|
{{#each module_groups}}
|
||||||
|
### {{{group_name}}} ({{{module_count}}} modules)
|
||||||
|
|
||||||
|
| Module | Tag | Symbols | Inbound | Outbound | Link |
|
||||||
|
|--------|-----|---------|---------|----------|------|
|
||||||
{{#each modules}}
|
{{#each modules}}
|
||||||
| {{{name}}} | {{{symbol_count}}} | {{{inbound_count}}} | {{{outbound_count}}} | [details]({{{link}}}) |
|
| {{{name}}} | {{{tag}}} | {{{symbol_count}}} | {{{inbound_count}}} | {{{outbound_count}}} | [details]({{{link}}}) |
|
||||||
|
{{/each}}
|
||||||
|
|
||||||
{{/each}}
|
{{/each}}
|
||||||
<!-- ARCHDOC:END section=modules_index -->
|
<!-- ARCHDOC:END section=modules_index -->
|
||||||
|
|
||||||
@@ -258,28 +248,23 @@ impl Renderer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn render_architecture_md(&self, model: &ProjectModel, config: Option<&Config>) -> Result<String, anyhow::Error> {
|
pub fn render_architecture_md(&self, model: &ProjectModel, config: Option<&Config>) -> Result<String, anyhow::Error> {
|
||||||
// Collect integration information
|
// Build integration sections from classified_integrations
|
||||||
let mut db_integrations = Vec::new();
|
// Filter out "Internal" — those are just cross-module imports, not real integrations
|
||||||
let mut http_integrations = Vec::new();
|
// Sort categories and packages alphabetically for consistent output
|
||||||
let mut queue_integrations = Vec::new();
|
let mut sorted_categories: Vec<(&String, &Vec<String>)> = model.classified_integrations.iter()
|
||||||
let mut storage_integrations = Vec::new();
|
.filter(|(cat, _)| cat.as_str() != "Internal")
|
||||||
let mut ai_integrations = Vec::new();
|
.collect();
|
||||||
|
sorted_categories.sort_by_key(|(cat, _)| cat.to_lowercase());
|
||||||
|
|
||||||
for (symbol_id, symbol) in &model.symbols {
|
let mut integration_sections: Vec<serde_json::Value> = Vec::new();
|
||||||
if symbol.integrations_flags.db {
|
for (cat_name, pkgs) in &sorted_categories {
|
||||||
db_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
if !pkgs.is_empty() {
|
||||||
}
|
let mut sorted_pkgs = pkgs.to_vec();
|
||||||
if symbol.integrations_flags.http {
|
sorted_pkgs.sort();
|
||||||
http_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
integration_sections.push(serde_json::json!({
|
||||||
}
|
"category": cat_name,
|
||||||
if symbol.integrations_flags.queue {
|
"packages": sorted_pkgs,
|
||||||
queue_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
}));
|
||||||
}
|
|
||||||
if symbol.integrations_flags.storage {
|
|
||||||
storage_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
|
||||||
}
|
|
||||||
if symbol.integrations_flags.ai {
|
|
||||||
ai_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -309,14 +294,13 @@ impl Renderer {
|
|||||||
in_project = false;
|
in_project = false;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if in_project && trimmed.starts_with("name") {
|
if in_project && trimmed.starts_with("name")
|
||||||
if let Some(val) = trimmed.split('=').nth(1) {
|
&& let Some(val) = trimmed.split('=').nth(1) {
|
||||||
let name = val.trim().trim_matches('"').trim_matches('\'');
|
let name = val.trim().trim_matches('"').trim_matches('\'');
|
||||||
if !name.is_empty() {
|
if !name.is_empty() {
|
||||||
return Some(name.to_string());
|
return Some(name.to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
})
|
})
|
||||||
@@ -363,38 +347,32 @@ impl Renderer {
|
|||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Collect module items for template
|
// Collect module items grouped by top-level directory
|
||||||
let mut modules_list = Vec::new();
|
let module_groups = Self::build_module_groups(model);
|
||||||
for (module_id, module) in &model.modules {
|
|
||||||
modules_list.push(serde_json::json!({
|
|
||||||
"name": module_id,
|
|
||||||
"symbol_count": module.symbols.len(),
|
|
||||||
"inbound_count": module.inbound_modules.len(),
|
|
||||||
"outbound_count": module.outbound_modules.len(),
|
|
||||||
"link": format!("docs/architecture/modules/{}.md", sanitize_for_link(module_id))
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Collect critical points
|
// Collect critical points as tuples (count, symbol_id, is_critical) for sorting
|
||||||
let mut high_fan_in = Vec::new();
|
let mut fan_in_tuples: Vec<(usize, &str, bool)> = Vec::new();
|
||||||
let mut high_fan_out = Vec::new();
|
let mut fan_out_tuples: Vec<(usize, &str, bool)> = Vec::new();
|
||||||
for (symbol_id, symbol) in &model.symbols {
|
for (symbol_id, symbol) in &model.symbols {
|
||||||
if symbol.metrics.fan_in > 5 {
|
if symbol.metrics.fan_in > 5 {
|
||||||
high_fan_in.push(serde_json::json!({
|
fan_in_tuples.push((symbol.metrics.fan_in, symbol_id, symbol.metrics.is_critical));
|
||||||
"symbol": symbol_id,
|
|
||||||
"count": symbol.metrics.fan_in,
|
|
||||||
"critical": symbol.metrics.is_critical,
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
if symbol.metrics.fan_out > 5 {
|
if symbol.metrics.fan_out > 5 {
|
||||||
high_fan_out.push(serde_json::json!({
|
fan_out_tuples.push((symbol.metrics.fan_out, symbol_id, symbol.metrics.is_critical));
|
||||||
"symbol": symbol_id,
|
|
||||||
"count": symbol.metrics.fan_out,
|
|
||||||
"critical": symbol.metrics.is_critical,
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Sort by count descending
|
||||||
|
fan_in_tuples.sort_by(|a, b| b.0.cmp(&a.0));
|
||||||
|
fan_out_tuples.sort_by(|a, b| b.0.cmp(&a.0));
|
||||||
|
|
||||||
|
let high_fan_in: Vec<_> = fan_in_tuples.iter().map(|(count, sym, crit)| {
|
||||||
|
serde_json::json!({"symbol": sym, "count": count, "critical": crit})
|
||||||
|
}).collect();
|
||||||
|
let high_fan_out: Vec<_> = fan_out_tuples.iter().map(|(count, sym, crit)| {
|
||||||
|
serde_json::json!({"symbol": sym, "count": count, "critical": crit})
|
||||||
|
}).collect();
|
||||||
|
|
||||||
let cycles: Vec<_> = cycle_detector::detect_cycles(model)
|
let cycles: Vec<_> = cycle_detector::detect_cycles(model)
|
||||||
.iter()
|
.iter()
|
||||||
.map(|cycle| {
|
.map(|cycle| {
|
||||||
@@ -419,14 +397,10 @@ impl Renderer {
|
|||||||
"key_decisions": ["<FILL_MANUALLY>"],
|
"key_decisions": ["<FILL_MANUALLY>"],
|
||||||
"non_goals": ["<FILL_MANUALLY>"],
|
"non_goals": ["<FILL_MANUALLY>"],
|
||||||
"change_notes": ["<FILL_MANUALLY>"],
|
"change_notes": ["<FILL_MANUALLY>"],
|
||||||
"db_integrations": db_integrations,
|
"integration_sections": integration_sections,
|
||||||
"http_integrations": http_integrations,
|
|
||||||
"queue_integrations": queue_integrations,
|
|
||||||
"storage_integrations": storage_integrations,
|
|
||||||
"ai_integrations": ai_integrations,
|
|
||||||
"rails_summary": "\n\nNo tooling information available.\n",
|
"rails_summary": "\n\nNo tooling information available.\n",
|
||||||
"layout_items": layout_items,
|
"layout_items": layout_items,
|
||||||
"modules": modules_list,
|
"module_groups": module_groups,
|
||||||
"high_fan_in": high_fan_in,
|
"high_fan_in": high_fan_in,
|
||||||
"high_fan_out": high_fan_out,
|
"high_fan_out": high_fan_out,
|
||||||
"cycles": cycles,
|
"cycles": cycles,
|
||||||
@@ -579,66 +553,36 @@ impl Renderer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn render_integrations_section(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
|
pub fn render_integrations_section(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
|
||||||
// Collect integration information
|
// Filter Internal, sort alphabetically
|
||||||
let mut db_integrations = Vec::new();
|
let mut sorted_categories: Vec<(&String, &Vec<String>)> = model.classified_integrations.iter()
|
||||||
let mut http_integrations = Vec::new();
|
.filter(|(cat, _)| cat.as_str() != "Internal")
|
||||||
let mut queue_integrations = Vec::new();
|
.collect();
|
||||||
let mut storage_integrations = Vec::new();
|
sorted_categories.sort_by_key(|(cat, _)| cat.to_lowercase());
|
||||||
let mut ai_integrations = Vec::new();
|
|
||||||
|
|
||||||
for (symbol_id, symbol) in &model.symbols {
|
let mut integration_sections: Vec<serde_json::Value> = Vec::new();
|
||||||
if symbol.integrations_flags.db {
|
for (cat_name, pkgs) in &sorted_categories {
|
||||||
db_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
if !pkgs.is_empty() {
|
||||||
}
|
let mut sorted_pkgs = pkgs.to_vec();
|
||||||
if symbol.integrations_flags.http {
|
sorted_pkgs.sort();
|
||||||
http_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
integration_sections.push(serde_json::json!({
|
||||||
}
|
"category": cat_name,
|
||||||
if symbol.integrations_flags.queue {
|
"packages": sorted_pkgs,
|
||||||
queue_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
}));
|
||||||
}
|
|
||||||
if symbol.integrations_flags.storage {
|
|
||||||
storage_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
|
||||||
}
|
|
||||||
if symbol.integrations_flags.ai {
|
|
||||||
ai_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prepare data for integrations section
|
|
||||||
let data = serde_json::json!({
|
let data = serde_json::json!({
|
||||||
"db_integrations": db_integrations,
|
"integration_sections": integration_sections,
|
||||||
"http_integrations": http_integrations,
|
|
||||||
"queue_integrations": queue_integrations,
|
|
||||||
"storage_integrations": storage_integrations,
|
|
||||||
"ai_integrations": ai_integrations,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Create a smaller template just for the integrations section
|
|
||||||
let integrations_template = r#"
|
let integrations_template = r#"
|
||||||
|
|
||||||
### Database Integrations
|
{{#each integration_sections}}
|
||||||
{{#each db_integrations}}
|
### {{{category}}}
|
||||||
|
{{#each packages}}
|
||||||
- {{{this}}}
|
- {{{this}}}
|
||||||
{{/each}}
|
{{/each}}
|
||||||
|
|
||||||
### HTTP/API Integrations
|
|
||||||
{{#each http_integrations}}
|
|
||||||
- {{{this}}}
|
|
||||||
{{/each}}
|
|
||||||
|
|
||||||
### Queue Integrations
|
|
||||||
{{#each queue_integrations}}
|
|
||||||
- {{{this}}}
|
|
||||||
{{/each}}
|
|
||||||
|
|
||||||
### Storage Integrations
|
|
||||||
{{#each storage_integrations}}
|
|
||||||
- {{{this}}}
|
|
||||||
{{/each}}
|
|
||||||
|
|
||||||
### AI/ML Integrations
|
|
||||||
{{#each ai_integrations}}
|
|
||||||
- {{{this}}}
|
|
||||||
{{/each}}
|
{{/each}}
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
@@ -707,31 +651,23 @@ impl Renderer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn render_modules_index_section(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
|
pub fn render_modules_index_section(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
|
||||||
// Collect module information
|
let module_groups = Self::build_module_groups(model);
|
||||||
let mut modules = Vec::new();
|
|
||||||
|
|
||||||
for (module_id, module) in &model.modules {
|
|
||||||
modules.push(serde_json::json!({
|
|
||||||
"name": module_id,
|
|
||||||
"symbol_count": module.symbols.len(),
|
|
||||||
"inbound_count": module.inbound_modules.len(),
|
|
||||||
"outbound_count": module.outbound_modules.len(),
|
|
||||||
"link": format!("docs/architecture/modules/{}.md", sanitize_for_link(module_id))
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prepare data for modules index section
|
|
||||||
let data = serde_json::json!({
|
let data = serde_json::json!({
|
||||||
"modules": modules,
|
"module_groups": module_groups,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Create a smaller template just for the modules index section
|
|
||||||
let modules_template = r#"
|
let modules_template = r#"
|
||||||
|
|
||||||
| Module | Symbols | Inbound | Outbound | Link |
|
{{#each module_groups}}
|
||||||
|--------|---------|---------|----------|------|
|
### {{{group_name}}} ({{{module_count}}} modules)
|
||||||
|
|
||||||
|
| Module | Tag | Symbols | Inbound | Outbound | Link |
|
||||||
|
|--------|-----|---------|---------|----------|------|
|
||||||
{{#each modules}}
|
{{#each modules}}
|
||||||
| {{{name}}} | {{{symbol_count}}} | {{{inbound_count}}} | {{{outbound_count}}} | [details]({{{link}}}) |
|
| {{{name}}} | {{{tag}}} | {{{symbol_count}}} | {{{inbound_count}}} | {{{outbound_count}}} | [details]({{{link}}}) |
|
||||||
|
{{/each}}
|
||||||
|
|
||||||
{{/each}}
|
{{/each}}
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
@@ -744,27 +680,29 @@ impl Renderer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn render_critical_points_section(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
|
pub fn render_critical_points_section(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
|
||||||
// Collect critical points information
|
// Collect and sort critical points by count descending
|
||||||
let mut high_fan_in = Vec::new();
|
let mut fan_in_items: Vec<(usize, &str, bool)> = Vec::new();
|
||||||
let mut high_fan_out = Vec::new();
|
let mut fan_out_items: Vec<(usize, &str, bool)> = Vec::new();
|
||||||
|
|
||||||
for (symbol_id, symbol) in &model.symbols {
|
for (symbol_id, symbol) in &model.symbols {
|
||||||
if symbol.metrics.fan_in > 5 { // Threshold for high fan-in
|
if symbol.metrics.fan_in > 5 {
|
||||||
high_fan_in.push(serde_json::json!({
|
fan_in_items.push((symbol.metrics.fan_in, symbol_id, symbol.metrics.is_critical));
|
||||||
"symbol": symbol_id,
|
|
||||||
"count": symbol.metrics.fan_in,
|
|
||||||
"critical": symbol.metrics.is_critical,
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
if symbol.metrics.fan_out > 5 { // Threshold for high fan-out
|
if symbol.metrics.fan_out > 5 {
|
||||||
high_fan_out.push(serde_json::json!({
|
fan_out_items.push((symbol.metrics.fan_out, symbol_id, symbol.metrics.is_critical));
|
||||||
"symbol": symbol_id,
|
|
||||||
"count": symbol.metrics.fan_out,
|
|
||||||
"critical": symbol.metrics.is_critical,
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fan_in_items.sort_by(|a, b| b.0.cmp(&a.0));
|
||||||
|
fan_out_items.sort_by(|a, b| b.0.cmp(&a.0));
|
||||||
|
|
||||||
|
let high_fan_in: Vec<_> = fan_in_items.iter().map(|(count, sym, crit)| {
|
||||||
|
serde_json::json!({"symbol": sym, "count": count, "critical": crit})
|
||||||
|
}).collect();
|
||||||
|
let high_fan_out: Vec<_> = fan_out_items.iter().map(|(count, sym, crit)| {
|
||||||
|
serde_json::json!({"symbol": sym, "count": count, "critical": crit})
|
||||||
|
}).collect();
|
||||||
|
|
||||||
// Prepare data for critical points section
|
// Prepare data for critical points section
|
||||||
let data = serde_json::json!({
|
let data = serde_json::json!({
|
||||||
"high_fan_in": high_fan_in,
|
"high_fan_in": high_fan_in,
|
||||||
@@ -872,6 +810,76 @@ impl Renderer {
|
|||||||
.map_err(|e| anyhow::anyhow!("Failed to render layout.md: {}", e))
|
.map_err(|e| anyhow::anyhow!("Failed to render layout.md: {}", e))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Build module groups by top-level directory, with tags for model/dataclass modules.
|
||||||
|
fn build_module_groups(model: &ProjectModel) -> Vec<serde_json::Value> {
|
||||||
|
let mut groups: BTreeMap<String, Vec<serde_json::Value>> = BTreeMap::new();
|
||||||
|
|
||||||
|
let mut sorted_modules: Vec<_> = model.modules.iter().collect();
|
||||||
|
sorted_modules.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||||
|
|
||||||
|
for (module_id, module) in &sorted_modules {
|
||||||
|
let top_level = module_id.split('.').next().unwrap_or(module_id).to_string();
|
||||||
|
|
||||||
|
// Determine tag
|
||||||
|
let tag = Self::classify_module_tag(module_id, module, model);
|
||||||
|
|
||||||
|
let entry = serde_json::json!({
|
||||||
|
"name": module_id,
|
||||||
|
"tag": tag,
|
||||||
|
"symbol_count": module.symbols.len(),
|
||||||
|
"inbound_count": module.inbound_modules.len(),
|
||||||
|
"outbound_count": module.outbound_modules.len(),
|
||||||
|
"link": format!("docs/architecture/modules/{}.md", sanitize_for_link(module_id))
|
||||||
|
});
|
||||||
|
groups.entry(top_level).or_default().push(entry);
|
||||||
|
}
|
||||||
|
|
||||||
|
groups.into_iter().map(|(group_name, modules)| {
|
||||||
|
let count = modules.len();
|
||||||
|
serde_json::json!({
|
||||||
|
"group_name": group_name,
|
||||||
|
"module_count": count,
|
||||||
|
"modules": modules,
|
||||||
|
})
|
||||||
|
}).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Classify a module with a tag: [models], [config], [tests], or empty.
|
||||||
|
fn classify_module_tag(module_id: &str, module: &crate::model::Module, model: &ProjectModel) -> String {
|
||||||
|
let parts: Vec<&str> = module_id.split('.').collect();
|
||||||
|
let last_part = parts.last().copied().unwrap_or("");
|
||||||
|
|
||||||
|
// Check if module name suggests models/schemas/dataclasses
|
||||||
|
if last_part == "models" || last_part == "schemas" || last_part == "types"
|
||||||
|
|| parts.contains(&"models") || parts.contains(&"schemas") {
|
||||||
|
return "[models]".to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if most symbols are classes with few methods (dataclass-like)
|
||||||
|
let class_count = module.symbols.iter()
|
||||||
|
.filter(|s| model.symbols.get(*s).map(|sym| sym.kind == SymbolKind::Class).unwrap_or(false))
|
||||||
|
.count();
|
||||||
|
let total = module.symbols.len();
|
||||||
|
if class_count > 0 && total > 0 {
|
||||||
|
// If >50% of top-level symbols are classes and module has few methods per class
|
||||||
|
let method_count = module.symbols.iter()
|
||||||
|
.filter(|s| model.symbols.get(*s).map(|sym| sym.kind == SymbolKind::Method).unwrap_or(false))
|
||||||
|
.count();
|
||||||
|
if class_count as f64 / total as f64 > 0.4 && method_count <= class_count * 3 {
|
||||||
|
return "[models]".to_string();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if parts.contains(&"tests") || last_part.starts_with("test_") {
|
||||||
|
return "[tests]".to_string();
|
||||||
|
}
|
||||||
|
if last_part == "config" || last_part == "settings" {
|
||||||
|
return "[config]".to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
String::new()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn render_symbol_details(&self, model: &ProjectModel, symbol_id: &str) -> Result<String, anyhow::Error> {
|
pub fn render_symbol_details(&self, model: &ProjectModel, symbol_id: &str) -> Result<String, anyhow::Error> {
|
||||||
// Find the symbol in the project model
|
// Find the symbol in the project model
|
||||||
let symbol = model.symbols.get(symbol_id)
|
let symbol = model.symbols.get(symbol_id)
|
||||||
@@ -1,10 +1,10 @@
|
|||||||
//! File scanner for ArchDoc
|
//! File scanner for WTIsMyCode
|
||||||
//!
|
//!
|
||||||
//! This module handles scanning the file system for Python files according to
|
//! This module handles scanning the file system for Python files according to
|
||||||
//! the configuration settings.
|
//! the configuration settings.
|
||||||
|
|
||||||
use crate::config::Config;
|
use crate::config::Config;
|
||||||
use crate::errors::ArchDocError;
|
use crate::errors::WTIsMyCodeError;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
@@ -17,17 +17,17 @@ impl FileScanner {
|
|||||||
Self { config }
|
Self { config }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn scan_python_files(&self, root: &Path) -> Result<Vec<PathBuf>, ArchDocError> {
|
pub fn scan_python_files(&self, root: &Path) -> Result<Vec<PathBuf>, WTIsMyCodeError> {
|
||||||
// Check if root directory exists
|
// Check if root directory exists
|
||||||
if !root.exists() {
|
if !root.exists() {
|
||||||
return Err(ArchDocError::Io(std::io::Error::new(
|
return Err(WTIsMyCodeError::Io(std::io::Error::new(
|
||||||
std::io::ErrorKind::NotFound,
|
std::io::ErrorKind::NotFound,
|
||||||
format!("Root directory does not exist: {}", root.display())
|
format!("Root directory does not exist: {}", root.display())
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
if !root.is_dir() {
|
if !root.is_dir() {
|
||||||
return Err(ArchDocError::Io(std::io::Error::new(
|
return Err(WTIsMyCodeError::Io(std::io::Error::new(
|
||||||
std::io::ErrorKind::InvalidInput,
|
std::io::ErrorKind::InvalidInput,
|
||||||
format!("Root path is not a directory: {}", root.display())
|
format!("Root path is not a directory: {}", root.display())
|
||||||
)));
|
)));
|
||||||
@@ -41,7 +41,7 @@ impl FileScanner {
|
|||||||
.into_iter() {
|
.into_iter() {
|
||||||
|
|
||||||
let entry = entry.map_err(|e| {
|
let entry = entry.map_err(|e| {
|
||||||
ArchDocError::Io(std::io::Error::other(
|
WTIsMyCodeError::Io(std::io::Error::other(
|
||||||
format!("Failed to read directory entry: {}", e)
|
format!("Failed to read directory entry: {}", e)
|
||||||
))
|
))
|
||||||
})?;
|
})?;
|
||||||
@@ -1,9 +1,9 @@
|
|||||||
//! Diff-aware file writer for ArchDoc
|
//! Diff-aware file writer for WTIsMyCode
|
||||||
//!
|
//!
|
||||||
//! This module handles writing generated documentation to files while preserving
|
//! This module handles writing generated documentation to files while preserving
|
||||||
//! manual content and only updating generated sections.
|
//! manual content and only updating generated sections.
|
||||||
|
|
||||||
use crate::errors::ArchDocError;
|
use crate::errors::WTIsMyCodeError;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
@@ -42,17 +42,17 @@ impl DiffAwareWriter {
|
|||||||
file_path: &Path,
|
file_path: &Path,
|
||||||
generated_content: &str,
|
generated_content: &str,
|
||||||
section_name: &str,
|
section_name: &str,
|
||||||
) -> Result<(), ArchDocError> {
|
) -> Result<(), WTIsMyCodeError> {
|
||||||
// Read existing file
|
// Read existing file
|
||||||
let existing_content = if file_path.exists() {
|
let existing_content = if file_path.exists() {
|
||||||
fs::read_to_string(file_path)
|
fs::read_to_string(file_path)
|
||||||
.map_err(ArchDocError::Io)?
|
.map_err(WTIsMyCodeError::Io)?
|
||||||
} else {
|
} else {
|
||||||
// Create new file with template
|
// Create new file with template
|
||||||
let template_content = self.create_template_file(file_path, section_name)?;
|
let template_content = self.create_template_file(file_path, section_name)?;
|
||||||
// Write template to file
|
// Write template to file
|
||||||
fs::write(file_path, &template_content)
|
fs::write(file_path, &template_content)
|
||||||
.map_err(ArchDocError::Io)?;
|
.map_err(WTIsMyCodeError::Io)?;
|
||||||
template_content
|
template_content
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -74,7 +74,7 @@ impl DiffAwareWriter {
|
|||||||
if content_changed {
|
if content_changed {
|
||||||
let updated_content = self.update_timestamp(new_content)?;
|
let updated_content = self.update_timestamp(new_content)?;
|
||||||
fs::write(file_path, updated_content)
|
fs::write(file_path, updated_content)
|
||||||
.map_err(ArchDocError::Io)?;
|
.map_err(WTIsMyCodeError::Io)?;
|
||||||
}
|
}
|
||||||
// If not changed, skip writing entirely
|
// If not changed, skip writing entirely
|
||||||
}
|
}
|
||||||
@@ -87,16 +87,16 @@ impl DiffAwareWriter {
|
|||||||
file_path: &Path,
|
file_path: &Path,
|
||||||
symbol_id: &str,
|
symbol_id: &str,
|
||||||
generated_content: &str,
|
generated_content: &str,
|
||||||
) -> Result<(), ArchDocError> {
|
) -> Result<(), WTIsMyCodeError> {
|
||||||
// Read existing file
|
// Read existing file
|
||||||
let existing_content = if file_path.exists() {
|
let existing_content = if file_path.exists() {
|
||||||
fs::read_to_string(file_path)
|
fs::read_to_string(file_path)
|
||||||
.map_err(ArchDocError::Io)?
|
.map_err(WTIsMyCodeError::Io)?
|
||||||
} else {
|
} else {
|
||||||
// If file doesn't exist, create it with a basic template
|
// If file doesn't exist, create it with a basic template
|
||||||
let template_content = self.create_template_file(file_path, "symbol")?;
|
let template_content = self.create_template_file(file_path, "symbol")?;
|
||||||
fs::write(file_path, &template_content)
|
fs::write(file_path, &template_content)
|
||||||
.map_err(ArchDocError::Io)?;
|
.map_err(WTIsMyCodeError::Io)?;
|
||||||
template_content
|
template_content
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -118,7 +118,7 @@ impl DiffAwareWriter {
|
|||||||
if content_changed {
|
if content_changed {
|
||||||
let updated_content = self.update_timestamp(new_content)?;
|
let updated_content = self.update_timestamp(new_content)?;
|
||||||
fs::write(file_path, updated_content)
|
fs::write(file_path, updated_content)
|
||||||
.map_err(ArchDocError::Io)?;
|
.map_err(WTIsMyCodeError::Io)?;
|
||||||
}
|
}
|
||||||
// If not changed, skip writing entirely
|
// If not changed, skip writing entirely
|
||||||
} else {
|
} else {
|
||||||
@@ -128,7 +128,7 @@ impl DiffAwareWriter {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_section_markers(&self, content: &str, section_name: &str) -> Result<Vec<SectionMarker>, ArchDocError> {
|
fn find_section_markers(&self, content: &str, section_name: &str) -> Result<Vec<SectionMarker>, WTIsMyCodeError> {
|
||||||
let begin_marker = format!("<!-- ARCHDOC:BEGIN section={} -->", section_name);
|
let begin_marker = format!("<!-- ARCHDOC:BEGIN section={} -->", section_name);
|
||||||
let end_marker = format!("<!-- ARCHDOC:END section={} -->", section_name);
|
let end_marker = format!("<!-- ARCHDOC:END section={} -->", section_name);
|
||||||
|
|
||||||
@@ -155,7 +155,7 @@ impl DiffAwareWriter {
|
|||||||
Ok(markers)
|
Ok(markers)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_symbol_markers(&self, content: &str, symbol_id: &str) -> Result<Vec<SymbolMarker>, ArchDocError> {
|
fn find_symbol_markers(&self, content: &str, symbol_id: &str) -> Result<Vec<SymbolMarker>, WTIsMyCodeError> {
|
||||||
let begin_marker = format!("<!-- ARCHDOC:BEGIN symbol id={} -->", symbol_id);
|
let begin_marker = format!("<!-- ARCHDOC:BEGIN symbol id={} -->", symbol_id);
|
||||||
let end_marker = format!("<!-- ARCHDOC:END symbol id={} -->", symbol_id);
|
let end_marker = format!("<!-- ARCHDOC:END symbol id={} -->", symbol_id);
|
||||||
|
|
||||||
@@ -187,7 +187,7 @@ impl DiffAwareWriter {
|
|||||||
content: &str,
|
content: &str,
|
||||||
marker: &SectionMarker,
|
marker: &SectionMarker,
|
||||||
new_content: &str,
|
new_content: &str,
|
||||||
) -> Result<String, ArchDocError> {
|
) -> Result<String, WTIsMyCodeError> {
|
||||||
let before = &content[..marker.start_pos];
|
let before = &content[..marker.start_pos];
|
||||||
let after = &content[marker.end_pos..];
|
let after = &content[marker.end_pos..];
|
||||||
|
|
||||||
@@ -205,7 +205,7 @@ impl DiffAwareWriter {
|
|||||||
content: &str,
|
content: &str,
|
||||||
marker: &SymbolMarker,
|
marker: &SymbolMarker,
|
||||||
new_content: &str,
|
new_content: &str,
|
||||||
) -> Result<String, ArchDocError> {
|
) -> Result<String, WTIsMyCodeError> {
|
||||||
let before = &content[..marker.start_pos];
|
let before = &content[..marker.start_pos];
|
||||||
let after = &content[marker.end_pos..];
|
let after = &content[marker.end_pos..];
|
||||||
|
|
||||||
@@ -218,7 +218,7 @@ impl DiffAwareWriter {
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_timestamp(&self, content: String) -> Result<String, ArchDocError> {
|
fn update_timestamp(&self, content: String) -> Result<String, WTIsMyCodeError> {
|
||||||
// Update the "Updated" field in the document metadata section
|
// Update the "Updated" field in the document metadata section
|
||||||
// Find the metadata section and update the timestamp
|
// Find the metadata section and update the timestamp
|
||||||
let today = Utc::now().format("%Y-%m-%d").to_string();
|
let today = Utc::now().format("%Y-%m-%d").to_string();
|
||||||
@@ -238,7 +238,7 @@ impl DiffAwareWriter {
|
|||||||
Ok(updated_lines.join("\n"))
|
Ok(updated_lines.join("\n"))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_template_file(&self, _file_path: &Path, template_type: &str) -> Result<String, ArchDocError> {
|
fn create_template_file(&self, _file_path: &Path, template_type: &str) -> Result<String, WTIsMyCodeError> {
|
||||||
// Create file with appropriate template based on type
|
// Create file with appropriate template based on type
|
||||||
match template_type {
|
match template_type {
|
||||||
"architecture" => {
|
"architecture" => {
|
||||||
@@ -261,7 +261,7 @@ impl DiffAwareWriter {
|
|||||||
## Document metadata
|
## Document metadata
|
||||||
- **Created:** <AUTO_ON_INIT: YYYY-MM-DD>
|
- **Created:** <AUTO_ON_INIT: YYYY-MM-DD>
|
||||||
- **Updated:** <AUTO_ON_CHANGE: YYYY-MM-DD>
|
- **Updated:** <AUTO_ON_CHANGE: YYYY-MM-DD>
|
||||||
- **Generated by:** archdoc (cli) v0.1
|
- **Generated by:** wtismycode (cli) v0.1
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -1,11 +1,10 @@
|
|||||||
//! Caching tests for ArchDoc
|
//! Caching tests for WTIsMyCode
|
||||||
//!
|
//!
|
||||||
//! These tests verify that the caching functionality works correctly.
|
//! These tests verify that the caching functionality works correctly.
|
||||||
|
|
||||||
use std::path::Path;
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use archdoc_core::{Config, python_analyzer::PythonAnalyzer};
|
use wtismycode_core::{Config, python_analyzer::PythonAnalyzer};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_cache_store_and_retrieve() {
|
fn test_cache_store_and_retrieve() {
|
||||||
76
wtismycode-core/tests/callee_resolution.rs
Normal file
76
wtismycode-core/tests/callee_resolution.rs
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
//! Tests for resolve_callee_to_symbol_id functionality
|
||||||
|
//!
|
||||||
|
//! Verifies that call expressions are correctly resolved to qualified symbol IDs.
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
use wtismycode_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_resolve_callee_to_symbol_id() {
|
||||||
|
let config_path = "tests/golden/test_project/wtismycode.toml";
|
||||||
|
let config = Config::load_from_file(Path::new(config_path)).expect("Failed to load config");
|
||||||
|
let project_root = Path::new("tests/golden/test_project");
|
||||||
|
let scanner = FileScanner::new(config.clone());
|
||||||
|
let python_files = scanner.scan_python_files(project_root).expect("Failed to scan");
|
||||||
|
let analyzer = PythonAnalyzer::new(config);
|
||||||
|
|
||||||
|
let mut parsed_modules = Vec::new();
|
||||||
|
for file_path in python_files {
|
||||||
|
parsed_modules.push(analyzer.parse_module(&file_path).expect("Failed to parse"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let model = analyzer.resolve_symbols(&parsed_modules).expect("Failed to resolve");
|
||||||
|
|
||||||
|
// Verify that symbol call edges exist and have been resolved
|
||||||
|
assert!(!model.edges.symbol_call_edges.is_empty(), "Should have symbol call edges");
|
||||||
|
|
||||||
|
// Check that at least some edges reference known symbols (resolved correctly)
|
||||||
|
let resolved_count = model.edges.symbol_call_edges.iter()
|
||||||
|
.filter(|edge| model.symbols.contains_key(&edge.to_id))
|
||||||
|
.count();
|
||||||
|
|
||||||
|
println!("Total call edges: {}", model.edges.symbol_call_edges.len());
|
||||||
|
println!("Resolved to known symbols: {}", resolved_count);
|
||||||
|
|
||||||
|
// At least some calls should resolve to known symbols
|
||||||
|
assert!(resolved_count > 0, "At least some calls should resolve to known symbol IDs");
|
||||||
|
|
||||||
|
// Verify that same-module calls are resolved with module:: prefix
|
||||||
|
for edge in &model.edges.symbol_call_edges {
|
||||||
|
assert!(edge.from_id.contains("::"), "from_id should be qualified: {}", edge.from_id);
|
||||||
|
// to_id should also be qualified (module::symbol format)
|
||||||
|
assert!(edge.to_id.contains("::"), "to_id should be qualified: {}", edge.to_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_callee_resolution_cross_module() {
|
||||||
|
let config_path = "tests/golden/test_project/wtismycode.toml";
|
||||||
|
let config = Config::load_from_file(Path::new(config_path)).expect("Failed to load config");
|
||||||
|
let project_root = Path::new("tests/golden/test_project");
|
||||||
|
let scanner = FileScanner::new(config.clone());
|
||||||
|
let python_files = scanner.scan_python_files(project_root).expect("Failed to scan");
|
||||||
|
let analyzer = PythonAnalyzer::new(config);
|
||||||
|
|
||||||
|
let mut parsed_modules = Vec::new();
|
||||||
|
for file_path in python_files {
|
||||||
|
parsed_modules.push(analyzer.parse_module(&file_path).expect("Failed to parse"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let model = analyzer.resolve_symbols(&parsed_modules).expect("Failed to resolve");
|
||||||
|
|
||||||
|
// Check that modules have outbound/inbound relationships
|
||||||
|
let modules_with_outbound = model.modules.values()
|
||||||
|
.filter(|m| !m.outbound_modules.is_empty())
|
||||||
|
.count();
|
||||||
|
|
||||||
|
println!("Modules with outbound deps: {}", modules_with_outbound);
|
||||||
|
|
||||||
|
// Verify fan-in/fan-out metrics were computed
|
||||||
|
let symbols_with_metrics = model.symbols.values()
|
||||||
|
.filter(|s| s.metrics.fan_in > 0 || s.metrics.fan_out > 0)
|
||||||
|
.count();
|
||||||
|
|
||||||
|
println!("Symbols with non-zero metrics: {}", symbols_with_metrics);
|
||||||
|
assert!(symbols_with_metrics > 0, "Some symbols should have fan-in or fan-out > 0");
|
||||||
|
}
|
||||||
@@ -1,11 +1,10 @@
|
|||||||
//! Enhanced analysis tests for ArchDoc
|
//! Enhanced analysis tests for WTIsMyCode
|
||||||
//!
|
//!
|
||||||
//! These tests verify that the enhanced analysis functionality works correctly
|
//! These tests verify that the enhanced analysis functionality works correctly
|
||||||
//! with complex code that includes integrations, calls, and docstrings.
|
//! with complex code that includes integrations, calls, and docstrings.
|
||||||
|
|
||||||
use std::fs;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use archdoc_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
use wtismycode_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_enhanced_analysis_with_integrations() {
|
fn test_enhanced_analysis_with_integrations() {
|
||||||
@@ -15,8 +14,8 @@ fn test_enhanced_analysis_with_integrations() {
|
|||||||
|
|
||||||
// Try different paths for the config file
|
// Try different paths for the config file
|
||||||
let possible_paths = [
|
let possible_paths = [
|
||||||
"tests/golden/test_project/archdoc.toml",
|
"tests/golden/test_project/wtismycode.toml",
|
||||||
"../tests/golden/test_project/archdoc.toml",
|
"../tests/golden/test_project/wtismycode.toml",
|
||||||
];
|
];
|
||||||
|
|
||||||
let config_path = possible_paths.iter().find(|&path| {
|
let config_path = possible_paths.iter().find(|&path| {
|
||||||
@@ -100,17 +99,17 @@ fn test_enhanced_analysis_with_integrations() {
|
|||||||
// Check that we found the UserService class with DB integration
|
// Check that we found the UserService class with DB integration
|
||||||
let user_service_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::UserService"));
|
let user_service_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::UserService"));
|
||||||
assert!(user_service_symbol.is_some());
|
assert!(user_service_symbol.is_some());
|
||||||
assert_eq!(user_service_symbol.unwrap().kind, archdoc_core::model::SymbolKind::Class);
|
assert_eq!(user_service_symbol.unwrap().kind, wtismycode_core::model::SymbolKind::Class);
|
||||||
|
|
||||||
// Check that we found the NotificationService class with queue integration
|
// Check that we found the NotificationService class with queue integration
|
||||||
let notification_service_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::NotificationService"));
|
let notification_service_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::NotificationService"));
|
||||||
assert!(notification_service_symbol.is_some());
|
assert!(notification_service_symbol.is_some());
|
||||||
assert_eq!(notification_service_symbol.unwrap().kind, archdoc_core::model::SymbolKind::Class);
|
assert_eq!(notification_service_symbol.unwrap().kind, wtismycode_core::model::SymbolKind::Class);
|
||||||
|
|
||||||
// Check that we found the fetch_external_user_data function with HTTP integration
|
// Check that we found the fetch_external_user_data function with HTTP integration
|
||||||
let fetch_external_user_data_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::fetch_external_user_data"));
|
let fetch_external_user_data_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::fetch_external_user_data"));
|
||||||
assert!(fetch_external_user_data_symbol.is_some());
|
assert!(fetch_external_user_data_symbol.is_some());
|
||||||
assert_eq!(fetch_external_user_data_symbol.unwrap().kind, archdoc_core::model::SymbolKind::Function);
|
assert_eq!(fetch_external_user_data_symbol.unwrap().kind, wtismycode_core::model::SymbolKind::Function);
|
||||||
|
|
||||||
// Check file imports
|
// Check file imports
|
||||||
let mut found_advanced_file = false;
|
let mut found_advanced_file = false;
|
||||||
@@ -1,12 +1,12 @@
|
|||||||
//! Error handling tests for ArchDoc
|
//! Error handling tests for WTIsMyCode
|
||||||
//!
|
//!
|
||||||
//! These tests verify that ArchDoc properly handles various error conditions
|
//! These tests verify that WTIsMyCode properly handles various error conditions
|
||||||
//! and edge cases.
|
//! and edge cases.
|
||||||
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use archdoc_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
use wtismycode_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_scanner_nonexistent_directory() {
|
fn test_scanner_nonexistent_directory() {
|
||||||
@@ -19,7 +19,7 @@ fn test_scanner_nonexistent_directory() {
|
|||||||
|
|
||||||
// Check that we get an IO error
|
// Check that we get an IO error
|
||||||
match result.unwrap_err() {
|
match result.unwrap_err() {
|
||||||
archdoc_core::errors::ArchDocError::Io(_) => {},
|
wtismycode_core::errors::WTIsMyCodeError::Io(_) => {},
|
||||||
_ => panic!("Expected IO error"),
|
_ => panic!("Expected IO error"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -40,7 +40,7 @@ fn test_scanner_file_instead_of_directory() {
|
|||||||
|
|
||||||
// Check that we get an IO error
|
// Check that we get an IO error
|
||||||
match result.unwrap_err() {
|
match result.unwrap_err() {
|
||||||
archdoc_core::errors::ArchDocError::Io(_) => {},
|
wtismycode_core::errors::WTIsMyCodeError::Io(_) => {},
|
||||||
_ => panic!("Expected IO error"),
|
_ => panic!("Expected IO error"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -56,7 +56,7 @@ fn test_analyzer_nonexistent_file() {
|
|||||||
|
|
||||||
// Check that we get an IO error
|
// Check that we get an IO error
|
||||||
match result.unwrap_err() {
|
match result.unwrap_err() {
|
||||||
archdoc_core::errors::ArchDocError::Io(_) => {},
|
wtismycode_core::errors::WTIsMyCodeError::Io(_) => {},
|
||||||
_ => panic!("Expected IO error"),
|
_ => panic!("Expected IO error"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -77,7 +77,7 @@ fn test_analyzer_invalid_python_syntax() {
|
|||||||
|
|
||||||
// Check that we get a parse error
|
// Check that we get a parse error
|
||||||
match result.unwrap_err() {
|
match result.unwrap_err() {
|
||||||
archdoc_core::errors::ArchDocError::ParseError { .. } => {},
|
wtismycode_core::errors::WTIsMyCodeError::ParseError { .. } => {},
|
||||||
_ => panic!("Expected parse error"),
|
_ => panic!("Expected parse error"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,12 +1,12 @@
|
|||||||
//! Full pipeline integration tests for ArchDoc
|
//! Full pipeline integration tests for WTIsMyCode
|
||||||
//!
|
//!
|
||||||
//! Tests the complete scan → analyze → render pipeline using test-project/.
|
//! Tests the complete scan → analyze → render pipeline using test-project/.
|
||||||
|
|
||||||
use archdoc_core::config::Config;
|
use wtismycode_core::config::Config;
|
||||||
use archdoc_core::cycle_detector;
|
use wtismycode_core::cycle_detector;
|
||||||
use archdoc_core::model::{Module, ProjectModel};
|
use wtismycode_core::model::{Module, ProjectModel};
|
||||||
use archdoc_core::renderer::Renderer;
|
use wtismycode_core::renderer::Renderer;
|
||||||
use archdoc_core::scanner::FileScanner;
|
use wtismycode_core::scanner::FileScanner;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -14,7 +14,7 @@ fn test_config_load_and_validate() {
|
|||||||
let config_path = Path::new(env!("CARGO_MANIFEST_DIR"))
|
let config_path = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||||
.parent()
|
.parent()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.join("test-project/archdoc.toml");
|
.join("test-project/wtismycode.toml");
|
||||||
|
|
||||||
let config = Config::load_from_file(&config_path).expect("Failed to load config");
|
let config = Config::load_from_file(&config_path).expect("Failed to load config");
|
||||||
assert_eq!(config.project.language, "python");
|
assert_eq!(config.project.language, "python");
|
||||||
@@ -26,7 +26,7 @@ fn test_config_validate_on_test_project() {
|
|||||||
let config_path = Path::new(env!("CARGO_MANIFEST_DIR"))
|
let config_path = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||||
.parent()
|
.parent()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.join("test-project/archdoc.toml");
|
.join("test-project/wtismycode.toml");
|
||||||
|
|
||||||
let mut config = Config::load_from_file(&config_path).expect("Failed to load config");
|
let mut config = Config::load_from_file(&config_path).expect("Failed to load config");
|
||||||
// Set root to actual test-project path so validation passes
|
// Set root to actual test-project path so validation passes
|
||||||
@@ -48,7 +48,7 @@ fn test_scan_test_project() {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.join("test-project");
|
.join("test-project");
|
||||||
|
|
||||||
let config_path = test_project.join("archdoc.toml");
|
let config_path = test_project.join("wtismycode.toml");
|
||||||
let mut config = Config::load_from_file(&config_path).expect("Failed to load config");
|
let mut config = Config::load_from_file(&config_path).expect("Failed to load config");
|
||||||
config.project.root = test_project.to_string_lossy().to_string();
|
config.project.root = test_project.to_string_lossy().to_string();
|
||||||
|
|
||||||
@@ -139,7 +139,7 @@ fn test_cycle_detection_no_cycles() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_renderer_produces_output() {
|
fn test_renderer_produces_output() {
|
||||||
let config = Config::default();
|
let _config = Config::default();
|
||||||
let model = ProjectModel::new();
|
let model = ProjectModel::new();
|
||||||
let renderer = Renderer::new();
|
let renderer = Renderer::new();
|
||||||
let result = renderer.render_architecture_md(&model, None);
|
let result = renderer.render_architecture_md(&model, None);
|
||||||
@@ -148,7 +148,7 @@ fn test_renderer_produces_output() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_duration_values() {
|
fn test_parse_duration_values() {
|
||||||
use archdoc_core::config::{parse_duration, parse_file_size};
|
use wtismycode_core::config::{parse_duration, parse_file_size};
|
||||||
|
|
||||||
assert_eq!(parse_duration("24h").unwrap(), 86400);
|
assert_eq!(parse_duration("24h").unwrap(), 86400);
|
||||||
assert_eq!(parse_duration("7d").unwrap(), 604800);
|
assert_eq!(parse_duration("7d").unwrap(), 604800);
|
||||||
@@ -1,13 +1,12 @@
|
|||||||
//! Golden tests for ArchDoc
|
//! Golden tests for WTIsMyCode
|
||||||
//!
|
//!
|
||||||
//! These tests generate documentation for test projects and compare the output
|
//! These tests generate documentation for test projects and compare the output
|
||||||
//! with expected "golden" files to ensure consistency.
|
//! with expected "golden" files to ensure consistency.
|
||||||
|
|
||||||
mod test_utils;
|
mod test_utils;
|
||||||
|
|
||||||
use std::fs;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use archdoc_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
use wtismycode_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_simple_project_generation() {
|
fn test_simple_project_generation() {
|
||||||
@@ -17,8 +16,8 @@ fn test_simple_project_generation() {
|
|||||||
|
|
||||||
// Try different paths for the config file
|
// Try different paths for the config file
|
||||||
let possible_paths = [
|
let possible_paths = [
|
||||||
"tests/golden/test_project/archdoc.toml",
|
"tests/golden/test_project/wtismycode.toml",
|
||||||
"../tests/golden/test_project/archdoc.toml",
|
"../tests/golden/test_project/wtismycode.toml",
|
||||||
];
|
];
|
||||||
|
|
||||||
let config_path = possible_paths.iter().find(|&path| {
|
let config_path = possible_paths.iter().find(|&path| {
|
||||||
@@ -92,12 +91,12 @@ fn test_simple_project_generation() {
|
|||||||
// Check that we found the Calculator class
|
// Check that we found the Calculator class
|
||||||
let calculator_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::Calculator"));
|
let calculator_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::Calculator"));
|
||||||
assert!(calculator_symbol.is_some());
|
assert!(calculator_symbol.is_some());
|
||||||
assert_eq!(calculator_symbol.unwrap().kind, archdoc_core::model::SymbolKind::Class);
|
assert_eq!(calculator_symbol.unwrap().kind, wtismycode_core::model::SymbolKind::Class);
|
||||||
|
|
||||||
// Check that we found the process_numbers function
|
// Check that we found the process_numbers function
|
||||||
let process_numbers_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::process_numbers"));
|
let process_numbers_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::process_numbers"));
|
||||||
assert!(process_numbers_symbol.is_some());
|
assert!(process_numbers_symbol.is_some());
|
||||||
assert_eq!(process_numbers_symbol.unwrap().kind, archdoc_core::model::SymbolKind::Function);
|
assert_eq!(process_numbers_symbol.unwrap().kind, wtismycode_core::model::SymbolKind::Function);
|
||||||
|
|
||||||
// Check file imports
|
// Check file imports
|
||||||
assert!(!project_model.files.is_empty());
|
assert!(!project_model.files.is_empty());
|
||||||
@@ -17,7 +17,7 @@
|
|||||||
## Document metadata
|
## Document metadata
|
||||||
- **Created:** 2026-01-25
|
- **Created:** 2026-01-25
|
||||||
- **Updated:** 2026-01-25
|
- **Updated:** 2026-01-25
|
||||||
- **Generated by:** archdoc (cli) v0.1
|
- **Generated by:** wtismycode (cli) v0.1
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -53,10 +53,10 @@ description_max_length = 200
|
|||||||
|
|
||||||
[logging]
|
[logging]
|
||||||
level = "info"
|
level = "info"
|
||||||
file = "archdoc.log"
|
file = "wtismycode.log"
|
||||||
format = "compact"
|
format = "compact"
|
||||||
|
|
||||||
[caching]
|
[caching]
|
||||||
enabled = true
|
enabled = true
|
||||||
cache_dir = ".archdoc/cache"
|
cache_dir = ".wtismycode/cache"
|
||||||
max_cache_age = "24h"
|
max_cache_age = "24h"
|
||||||
@@ -1,19 +1,21 @@
|
|||||||
//! Test utilities for golden tests
|
//! Test utilities for golden tests
|
||||||
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
/// Read a file and return its contents
|
/// Read a file and return its contents
|
||||||
|
#[allow(dead_code)]
|
||||||
pub fn read_test_file(path: &str) -> String {
|
pub fn read_test_file(path: &str) -> String {
|
||||||
fs::read_to_string(path).expect(&format!("Failed to read test file: {}", path))
|
fs::read_to_string(path).unwrap_or_else(|_| panic!("Failed to read test file: {}", path))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write content to a file for testing
|
/// Write content to a file for testing
|
||||||
|
#[allow(dead_code)]
|
||||||
pub fn write_test_file(path: &str, content: &str) {
|
pub fn write_test_file(path: &str, content: &str) {
|
||||||
fs::write(path, content).expect(&format!("Failed to write test file: {}", path))
|
fs::write(path, content).unwrap_or_else(|_| panic!("Failed to write test file: {}", path))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Compare two strings and panic if they don't match
|
/// Compare two strings and panic if they don't match
|
||||||
|
#[allow(dead_code)]
|
||||||
pub fn assert_strings_equal(actual: &str, expected: &str, message: &str) {
|
pub fn assert_strings_equal(actual: &str, expected: &str, message: &str) {
|
||||||
if actual != expected {
|
if actual != expected {
|
||||||
panic!("{}: Strings do not match\nActual:\n{}\nExpected:\n{}", message, actual, expected);
|
panic!("{}: Strings do not match\nActual:\n{}\nExpected:\n{}", message, actual, expected);
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
//! Integration detection tests for ArchDoc
|
//! Integration detection tests for WTIsMyCode
|
||||||
//!
|
//!
|
||||||
//! These tests verify that the integration detection functionality works correctly.
|
//! These tests verify that the integration detection functionality works correctly.
|
||||||
//! Integration detection now happens at module level during resolve_symbols,
|
//! Integration detection now happens at module level during resolve_symbols,
|
||||||
@@ -6,7 +6,7 @@
|
|||||||
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use archdoc_core::{Config, python_analyzer::PythonAnalyzer};
|
use wtismycode_core::{Config, python_analyzer::PythonAnalyzer};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_http_integration_detection() {
|
fn test_http_integration_detection() {
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
//! Integration tests for ArchDoc
|
//! Integration tests for WTIsMyCode
|
||||||
|
|
||||||
// Include golden tests
|
// Include golden tests
|
||||||
mod golden;
|
mod golden;
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
//! Tests for analyzing the test project
|
//! Tests for analyzing the test project
|
||||||
|
|
||||||
use archdoc_core::{
|
use wtismycode_core::{
|
||||||
config::Config,
|
config::Config,
|
||||||
python_analyzer::PythonAnalyzer,
|
python_analyzer::PythonAnalyzer,
|
||||||
};
|
};
|
||||||
@@ -9,7 +9,7 @@ use std::path::Path;
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_project_analysis() {
|
fn test_project_analysis() {
|
||||||
// Load config from test project
|
// Load config from test project
|
||||||
let config = Config::load_from_file(Path::new("../test-project/archdoc.toml")).unwrap();
|
let config = Config::load_from_file(Path::new("../test-project/wtismycode.toml")).unwrap();
|
||||||
|
|
||||||
// Initialize analyzer
|
// Initialize analyzer
|
||||||
let analyzer = PythonAnalyzer::new(config);
|
let analyzer = PythonAnalyzer::new(config);
|
||||||
@@ -33,9 +33,11 @@ fn test_project_analysis() {
|
|||||||
// Check that we found calls
|
// Check that we found calls
|
||||||
assert!(!core_module.calls.is_empty());
|
assert!(!core_module.calls.is_empty());
|
||||||
|
|
||||||
// Check that integrations are detected
|
// Integration flags are now set during resolve_symbols, not parse_module
|
||||||
let db_integration_found = core_module.symbols.iter().any(|s| s.integrations_flags.db);
|
// So we resolve and check there
|
||||||
let http_integration_found = core_module.symbols.iter().any(|s| s.integrations_flags.http);
|
let project_model = analyzer.resolve_symbols(std::slice::from_ref(&core_module)).unwrap();
|
||||||
|
let db_integration_found = project_model.symbols.values().any(|s| s.integrations_flags.db);
|
||||||
|
let http_integration_found = project_model.symbols.values().any(|s| s.integrations_flags.http);
|
||||||
|
|
||||||
assert!(db_integration_found, "Database integration should be detected");
|
assert!(db_integration_found, "Database integration should be detected");
|
||||||
assert!(http_integration_found, "HTTP integration should be detected");
|
assert!(http_integration_found, "HTTP integration should be detected");
|
||||||
@@ -55,7 +57,7 @@ fn test_project_analysis() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_full_project_resolution() {
|
fn test_full_project_resolution() {
|
||||||
// Load config from test project
|
// Load config from test project
|
||||||
let config = Config::load_from_file(Path::new("../test-project/archdoc.toml")).unwrap();
|
let config = Config::load_from_file(Path::new("../test-project/wtismycode.toml")).unwrap();
|
||||||
|
|
||||||
// Initialize analyzer
|
// Initialize analyzer
|
||||||
let analyzer = PythonAnalyzer::new(config);
|
let analyzer = PythonAnalyzer::new(config);
|
||||||
36
wtismycode-core/tests/renderer_tests.rs
Normal file
36
wtismycode-core/tests/renderer_tests.rs
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
//! Tests for the renderer functionality
|
||||||
|
|
||||||
|
use wtismycode_core::{
|
||||||
|
model::ProjectModel,
|
||||||
|
renderer::Renderer,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_render_with_integrations() {
|
||||||
|
let mut project_model = ProjectModel::new();
|
||||||
|
|
||||||
|
// Add classified integrations (new format)
|
||||||
|
project_model.classified_integrations.insert(
|
||||||
|
"Database".to_string(),
|
||||||
|
vec!["sqlalchemy".to_string(), "asyncpg".to_string()],
|
||||||
|
);
|
||||||
|
project_model.classified_integrations.insert(
|
||||||
|
"HTTP".to_string(),
|
||||||
|
vec!["fastapi".to_string(), "requests".to_string()],
|
||||||
|
);
|
||||||
|
|
||||||
|
let renderer = Renderer::new();
|
||||||
|
let result = renderer.render_architecture_md(&project_model, None);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
|
||||||
|
let rendered = result.unwrap();
|
||||||
|
println!("Rendered:\n{}", rendered);
|
||||||
|
|
||||||
|
assert!(rendered.contains("## Integrations"));
|
||||||
|
assert!(rendered.contains("### Database"));
|
||||||
|
assert!(rendered.contains("- sqlalchemy"));
|
||||||
|
assert!(rendered.contains("- asyncpg"));
|
||||||
|
assert!(rendered.contains("### HTTP"));
|
||||||
|
assert!(rendered.contains("- fastapi"));
|
||||||
|
assert!(rendered.contains("- requests"));
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user