rename: archdoc → wtismycode (WTIsMyCode)
This commit is contained in:
12
wtismycode-core/.gitignore
vendored
Normal file
12
wtismycode-core/.gitignore
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# Compiled files
|
||||
target/
|
||||
|
||||
# IDE files
|
||||
*.swp
|
||||
.DS_Store
|
||||
|
||||
# Backup files
|
||||
*.rs.bk
|
||||
|
||||
# Documentation files
|
||||
doc/
|
||||
1
wtismycode-core/.wtismycode/cache/1dd9479f63eeeea5.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/1dd9479f63eeeea5.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:21.939017204Z","file_modified_at":"2026-02-15T09:12:21.938241573Z","parsed_module":{"path":"/tmp/.tmpjrzBI1/test.py","module_path":"/tmp/.tmpjrzBI1/test.py","imports":[],"symbols":[{"id":"calculate_sum","kind":"Function","module_id":"","file_id":"","qualname":"calculate_sum","signature":"def calculate_sum(a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/22f137dfd1267b44.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/22f137dfd1267b44.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:21.929046662Z","file_modified_at":"2026-02-15T09:12:21.928241645Z","parsed_module":{"path":"/tmp/.tmpucjtMF/test.py","module_path":"/tmp/.tmpucjtMF/test.py","imports":[{"module_name":"redis","alias":null,"line_number":8}],"symbols":[{"id":"process_job","kind":"Function","module_id":"","file_id":"","qualname":"process_job","signature":"def process_job(job_data)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/242d46dd3d930a62.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/242d46dd3d930a62.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:21.901000313Z","file_modified_at":"2026-02-15T09:12:21.900241847Z","parsed_module":{"path":"/tmp/.tmpQwpTTi/test.py","module_path":"/tmp/.tmpQwpTTi/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator","kind":"Class","module_id":"","file_id":"","qualname":"Calculator","signature":"class Calculator","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.add","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.add","signature":"def add(self, a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/2d1d3488fad06abc.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/2d1d3488fad06abc.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:27.638281687Z","file_modified_at":"2026-02-15T09:12:27.637200566Z","parsed_module":{"path":"/tmp/.tmp5HECBh/test.py","module_path":"/tmp/.tmp5HECBh/test.py","imports":[{"module_name":"requests","alias":null,"line_number":8}],"symbols":[{"id":"fetch_data","kind":"Function","module_id":"","file_id":"","qualname":"fetch_data","signature":"def fetch_data()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/323af6c33c893dc9.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/323af6c33c893dc9.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:21.938417589Z","file_modified_at":"2026-02-15T09:12:21.937241580Z","parsed_module":{"path":"/tmp/.tmpHn93FX/test.py","module_path":"/tmp/.tmpHn93FX/test.py","imports":[{"module_name":"requests","alias":null,"line_number":8}],"symbols":[{"id":"fetch_data","kind":"Function","module_id":"","file_id":"","qualname":"fetch_data","signature":"def fetch_data()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/332464b9176fa65a.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/332464b9176fa65a.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:21.900267168Z","file_modified_at":"2026-02-15T09:12:21.899241854Z","parsed_module":{"path":"/tmp/.tmpVPUjB4/test.py","module_path":"/tmp/.tmpVPUjB4/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/34c7d0f0a5859bc4.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/34c7d0f0a5859bc4.json
vendored
Normal file
File diff suppressed because one or more lines are too long
1
wtismycode-core/.wtismycode/cache/3f48e681f7e81aa3.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/3f48e681f7e81aa3.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:21.939756459Z","file_modified_at":"2026-02-15T09:12:21.938241573Z","parsed_module":{"path":"/tmp/.tmp5yAI8O/test.py","module_path":"/tmp/.tmp5yAI8O/test.py","imports":[{"module_name":"redis","alias":null,"line_number":8}],"symbols":[{"id":"process_job","kind":"Function","module_id":"","file_id":"","qualname":"process_job","signature":"def process_job(job_data)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/4427b32031669c3a.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/4427b32031669c3a.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:21.949122466Z","file_modified_at":"2026-02-15T00:22:51.124088300Z","parsed_module":{"path":"../test-project/src/utils.py","module_path":"../test-project/src/utils.py","imports":[{"module_name":"json","alias":null,"line_number":54},{"module_name":"os","alias":null,"line_number":66}],"symbols":[{"id":"load_config","kind":"Function","module_id":"","file_id":"","qualname":"load_config","signature":"def load_config(config_path: str)","annotations":null,"docstring_first_line":"Load configuration from a JSON file.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"save_config","kind":"Function","module_id":"","file_id":"","qualname":"save_config","signature":"def save_config(config: dict, config_path: str)","annotations":null,"docstring_first_line":"Save configuration to a JSON file.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"get_file_size","kind":"Function","module_id":"","file_id":"","qualname":"get_file_size","signature":"def get_file_size(filepath: str)","annotations":null,"docstring_first_line":"Get the size of a file in bytes.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"format_bytes","kind":"Function","module_id":"","file_id":"","qualname":"format_bytes","signature":"def format_bytes(size: int)","annotations":null,"docstring_first_line":"Format bytes into a human-readable string.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"open","line_number":169,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"json.load","line_number":213,"call_type":"Unresolved"},{"caller_symbol":"load_config","callee_expr":"open","line_number":169,"call_type":"Unresolved"},{"caller_symbol":"load_config","callee_expr":"json.load","line_number":213,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"open","line_number":330,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"json.dump","line_number":367,"call_type":"Unresolved"},{"caller_symbol":"save_config","callee_expr":"open","line_number":330,"call_type":"Unresolved"},{"caller_symbol":"save_config","callee_expr":"json.dump","line_number":367,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"os.path.getsize","line_number":494,"call_type":"Unresolved"},{"caller_symbol":"get_file_size","callee_expr":"os.path.getsize","line_number":494,"call_type":"Unresolved"}],"file_docstring":"Utility functions for the test project."}}
|
||||
1
wtismycode-core/.wtismycode/cache/44b31aff14e80d6b.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/44b31aff14e80d6b.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:21.932282950Z","file_modified_at":"2026-02-15T09:12:21.931241624Z","parsed_module":{"path":"/tmp/.tmpMK4GyS/test.py","module_path":"/tmp/.tmpMK4GyS/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"goodbye","kind":"Function","module_id":"","file_id":"","qualname":"goodbye","signature":"def goodbye()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/6b46d7daa9d35ecf.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/6b46d7daa9d35ecf.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:27.646855488Z","file_modified_at":"2026-02-15T09:12:27.645200509Z","parsed_module":{"path":"/tmp/.tmpXh0uQg/test.py","module_path":"/tmp/.tmpXh0uQg/test.py","imports":[],"symbols":[{"id":"calculate_sum","kind":"Function","module_id":"","file_id":"","qualname":"calculate_sum","signature":"def calculate_sum(a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/7ff0f715bb184391.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/7ff0f715bb184391.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:21.932289740Z","file_modified_at":"2026-02-15T09:12:21.931241624Z","parsed_module":{"path":"/tmp/.tmpn1WePQ/test.py","module_path":"/tmp/.tmpn1WePQ/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator","kind":"Class","module_id":"","file_id":"","qualname":"Calculator","signature":"class Calculator","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.add","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.add","signature":"def add(self, a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/80d24a35240626da.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/80d24a35240626da.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:27.646347331Z","file_modified_at":"2026-02-15T09:12:27.645200509Z","parsed_module":{"path":"/tmp/.tmpFFmDl3/test.py","module_path":"/tmp/.tmpFFmDl3/test.py","imports":[{"module_name":"sqlite3","alias":null,"line_number":8}],"symbols":[{"id":"get_user","kind":"Function","module_id":"","file_id":"","qualname":"get_user","signature":"def get_user(user_id)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/8e89f71b0bea2e6d.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/8e89f71b0bea2e6d.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:21.937802033Z","file_modified_at":"2026-02-15T09:12:21.936241587Z","parsed_module":{"path":"/tmp/.tmpU9hOcm/test.py","module_path":"/tmp/.tmpU9hOcm/test.py","imports":[{"module_name":"sqlite3","alias":null,"line_number":8}],"symbols":[{"id":"get_user","kind":"Function","module_id":"","file_id":"","qualname":"get_user","signature":"def get_user(user_id)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/90460d6c369f9d4c.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/90460d6c369f9d4c.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:27.646167123Z","file_modified_at":"2026-02-15T09:12:27.645200509Z","parsed_module":{"path":"/tmp/.tmpj84SS2/test.py","module_path":"/tmp/.tmpj84SS2/test.py","imports":[{"module_name":"requests","alias":null,"line_number":8}],"symbols":[{"id":"fetch_data","kind":"Function","module_id":"","file_id":"","qualname":"fetch_data","signature":"def fetch_data()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/a8dcf5363a5ef953.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/a8dcf5363a5ef953.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:27.647109436Z","file_modified_at":"2026-02-15T09:12:27.646200502Z","parsed_module":{"path":"/tmp/.tmpTS6Kf7/test.py","module_path":"/tmp/.tmpTS6Kf7/test.py","imports":[{"module_name":"redis","alias":null,"line_number":8}],"symbols":[{"id":"process_job","kind":"Function","module_id":"","file_id":"","qualname":"process_job","signature":"def process_job(job_data)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/ae981a5f144a6f7a.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/ae981a5f144a6f7a.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:21.906280597Z","file_modified_at":"2026-02-15T00:21:25.872722975Z","parsed_module":{"path":"tests/golden/test_project/src/example.py","module_path":"tests/golden/test_project/src/example.py","imports":[{"module_name":"os","alias":null,"line_number":42},{"module_name":"typing.List","alias":null,"line_number":64}],"symbols":[{"id":"Calculator","kind":"Class","module_id":"","file_id":"","qualname":"Calculator","signature":"class Calculator","annotations":null,"docstring_first_line":"A simple calculator class.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.__init__","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.__init__","signature":"def __init__(self)","annotations":null,"docstring_first_line":"Initialize the calculator.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.add","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.add","signature":"def add(self, a: int, b: int)","annotations":null,"docstring_first_line":"Add two numbers.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.multiply","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.multiply","signature":"def multiply(self, a: int, b: int)","annotations":null,"docstring_first_line":"Multiply two numbers.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"process_numbers","kind":"Function","module_id":"","file_id":"","qualname":"process_numbers","signature":"def process_numbers(numbers: List[int])","annotations":null,"docstring_first_line":"Process a list of numbers.","purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"Calculator","line_number":519,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"calc.add","line_number":544,"call_type":"Unresolved"},{"caller_symbol":"process_numbers","callee_expr":"Calculator","line_number":519,"call_type":"Unresolved"},{"caller_symbol":"process_numbers","callee_expr":"calc.add","line_number":544,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"process_numbers","line_number":648,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"print","line_number":677,"call_type":"Unresolved"}],"file_docstring":"Example module for testing."}}
|
||||
1
wtismycode-core/.wtismycode/cache/af6c11e9a59f28dd.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/af6c11e9a59f28dd.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:27.639487788Z","file_modified_at":"2026-02-15T09:12:27.638200559Z","parsed_module":{"path":"/tmp/.tmp7gcSsx/test.py","module_path":"/tmp/.tmp7gcSsx/test.py","imports":[{"module_name":"redis","alias":null,"line_number":8}],"symbols":[{"id":"process_job","kind":"Function","module_id":"","file_id":"","qualname":"process_job","signature":"def process_job(job_data)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"redis.Redis","line_number":55,"call_type":"Unresolved"},{"caller_symbol":"process_job","callee_expr":"client.lpush","line_number":73,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/b74dd266405fda26.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/b74dd266405fda26.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:27.623913794Z","file_modified_at":"2026-02-15T09:12:27.622200674Z","parsed_module":{"path":"/tmp/.tmpY5jXEG/test.py","module_path":"/tmp/.tmpY5jXEG/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator","kind":"Class","module_id":"","file_id":"","qualname":"Calculator","signature":"class Calculator","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.add","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.add","signature":"def add(self, a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/b967ef0258ec1d92.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/b967ef0258ec1d92.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:27.623293468Z","file_modified_at":"2026-02-15T09:12:27.622200674Z","parsed_module":{"path":"/tmp/.tmpbimwTO/test.py","module_path":"/tmp/.tmpbimwTO/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/ca89f5c4de39cd5c.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/ca89f5c4de39cd5c.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:27.638405646Z","file_modified_at":"2026-02-15T09:12:27.637200566Z","parsed_module":{"path":"/tmp/.tmpDqAWXp/test.py","module_path":"/tmp/.tmpDqAWXp/test.py","imports":[{"module_name":"sqlite3","alias":null,"line_number":8}],"symbols":[{"id":"get_user","kind":"Function","module_id":"","file_id":"","qualname":"get_user","signature":"def get_user(user_id)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/cc39a913d23e0148.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/cc39a913d23e0148.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:21.928408667Z","file_modified_at":"2026-02-15T09:12:21.927241652Z","parsed_module":{"path":"/tmp/.tmpkuoSO4/test.py","module_path":"/tmp/.tmpkuoSO4/test.py","imports":[],"symbols":[{"id":"calculate_sum","kind":"Function","module_id":"","file_id":"","qualname":"calculate_sum","signature":"def calculate_sum(a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/d49cc1c393cf173e.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/d49cc1c393cf173e.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:27.642603187Z","file_modified_at":"2026-02-15T09:12:27.641200538Z","parsed_module":{"path":"/tmp/.tmplZ7Gfg/test.py","module_path":"/tmp/.tmplZ7Gfg/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator","kind":"Class","module_id":"","file_id":"","qualname":"Calculator","signature":"class Calculator","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"Calculator.add","kind":"Method","module_id":"","file_id":"","qualname":"Calculator.add","signature":"def add(self, a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/d93abaa965fa2d8d.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/d93abaa965fa2d8d.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:27.642573298Z","file_modified_at":"2026-02-15T09:12:27.641200538Z","parsed_module":{"path":"/tmp/.tmpiVOCMi/test.py","module_path":"/tmp/.tmpiVOCMi/test.py","imports":[],"symbols":[{"id":"hello","kind":"Function","module_id":"","file_id":"","qualname":"hello","signature":"def hello()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}},{"id":"goodbye","kind":"Function","module_id":"","file_id":"","qualname":"goodbye","signature":"def goodbye()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/ddc166202153e62e.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/ddc166202153e62e.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:21.927910330Z","file_modified_at":"2026-02-15T09:12:21.926241659Z","parsed_module":{"path":"/tmp/.tmp1gFjk3/test.py","module_path":"/tmp/.tmp1gFjk3/test.py","imports":[{"module_name":"sqlite3","alias":null,"line_number":8}],"symbols":[{"id":"get_user","kind":"Function","module_id":"","file_id":"","qualname":"get_user","signature":"def get_user(user_id)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"sqlite3.connect","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"conn.cursor","line_number":95,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.execute","line_number":113,"call_type":"Unresolved"},{"caller_symbol":"get_user","callee_expr":"cursor.fetchone","line_number":187,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/e9433f25871e418.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/e9433f25871e418.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:21.927753122Z","file_modified_at":"2026-02-15T09:12:21.926241659Z","parsed_module":{"path":"/tmp/.tmpp9A45l/test.py","module_path":"/tmp/.tmpp9A45l/test.py","imports":[{"module_name":"requests","alias":null,"line_number":8}],"symbols":[{"id":"fetch_data","kind":"Function","module_id":"","file_id":"","qualname":"fetch_data","signature":"def fetch_data()","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[{"caller_symbol":"unknown","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"unknown","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"requests.get","line_number":51,"call_type":"Unresolved"},{"caller_symbol":"fetch_data","callee_expr":"response.json","line_number":107,"call_type":"Unresolved"}],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/f1a291dc5a093458.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/f1a291dc5a093458.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"created_at":"2026-02-15T09:12:27.638896492Z","file_modified_at":"2026-02-15T09:12:27.638200559Z","parsed_module":{"path":"/tmp/.tmp7IEFw5/test.py","module_path":"/tmp/.tmp7IEFw5/test.py","imports":[],"symbols":[{"id":"calculate_sum","kind":"Function","module_id":"","file_id":"","qualname":"calculate_sum","signature":"def calculate_sum(a, b)","annotations":null,"docstring_first_line":null,"purpose":"extracted from AST","outbound_calls":[],"inbound_calls":[],"integrations_flags":{"http":false,"db":false,"queue":false,"storage":false,"ai":false},"metrics":{"fan_in":0,"fan_out":0,"is_critical":false,"cycle_participant":false}}],"calls":[],"file_docstring":null}}
|
||||
1
wtismycode-core/.wtismycode/cache/f1b45c4f58b2d0dc.json
vendored
Normal file
1
wtismycode-core/.wtismycode/cache/f1b45c4f58b2d0dc.json
vendored
Normal file
File diff suppressed because one or more lines are too long
1320
wtismycode-core/Cargo.lock
generated
Normal file
1320
wtismycode-core/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
18
wtismycode-core/Cargo.toml
Normal file
18
wtismycode-core/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "wtismycode-core"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
toml = "0.9.11+spec-1.1.0"
|
||||
tracing = "0.1"
|
||||
anyhow = "1.0"
|
||||
thiserror = "2.0.18"
|
||||
walkdir = "2.3"
|
||||
handlebars = "6.4.0"
|
||||
rustpython-parser = "0.4"
|
||||
rustpython-ast = "0.4"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
tempfile = "3.10"
|
||||
168
wtismycode-core/src/cache.rs
Normal file
168
wtismycode-core/src/cache.rs
Normal file
@@ -0,0 +1,168 @@
|
||||
//! Caching module for WTIsMyCode
|
||||
//!
|
||||
//! This module provides caching functionality to speed up repeated analysis
|
||||
//! by storing parsed ASTs and analysis results.
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::errors::WTIsMyCodeError;
|
||||
use crate::model::ParsedModule;
|
||||
use std::path::Path;
|
||||
use std::fs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use chrono::{DateTime, Utc};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct CacheEntry {
|
||||
/// Timestamp when the cache entry was created
|
||||
created_at: DateTime<Utc>,
|
||||
/// Timestamp when the source file was last modified
|
||||
file_modified_at: DateTime<Utc>,
|
||||
/// The parsed module data
|
||||
parsed_module: ParsedModule,
|
||||
}
|
||||
|
||||
pub struct CacheManager {
|
||||
config: Config,
|
||||
cache_dir: String,
|
||||
}
|
||||
|
||||
impl CacheManager {
|
||||
pub fn new(config: Config) -> Self {
|
||||
let cache_dir = config.caching.cache_dir.clone();
|
||||
|
||||
// Create cache directory if it doesn't exist
|
||||
if config.caching.enabled && !Path::new(&cache_dir).exists() {
|
||||
let _ = fs::create_dir_all(&cache_dir);
|
||||
}
|
||||
|
||||
Self { config, cache_dir }
|
||||
}
|
||||
|
||||
/// Get cached parsed module if available and not expired
|
||||
pub fn get_cached_module(&self, file_path: &Path) -> Result<Option<ParsedModule>, WTIsMyCodeError> {
|
||||
if !self.config.caching.enabled {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let cache_key = self.get_cache_key(file_path);
|
||||
let cache_file = Path::new(&self.cache_dir).join(&cache_key);
|
||||
|
||||
if !cache_file.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Read cache file
|
||||
let content = fs::read_to_string(&cache_file)
|
||||
.map_err(WTIsMyCodeError::Io)?;
|
||||
|
||||
let cache_entry: CacheEntry = serde_json::from_str(&content)
|
||||
.map_err(|e| WTIsMyCodeError::AnalysisError(format!("Failed to deserialize cache entry: {}", e)))?;
|
||||
|
||||
// Check if cache is expired
|
||||
let now = Utc::now();
|
||||
let cache_age = now.signed_duration_since(cache_entry.created_at);
|
||||
|
||||
// Parse max_cache_age (simple format: "24h", "7d", etc.)
|
||||
let max_age_seconds = self.parse_duration(&self.config.caching.max_cache_age)?;
|
||||
|
||||
if cache_age.num_seconds() > max_age_seconds as i64 {
|
||||
// Cache expired, remove it
|
||||
let _ = fs::remove_file(&cache_file);
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Check if source file has been modified since caching
|
||||
let metadata = fs::metadata(file_path)
|
||||
.map_err(WTIsMyCodeError::Io)?;
|
||||
|
||||
let modified_time = metadata.modified()
|
||||
.map_err(WTIsMyCodeError::Io)?;
|
||||
|
||||
let modified_time: DateTime<Utc> = modified_time.into();
|
||||
|
||||
if modified_time > cache_entry.file_modified_at {
|
||||
// Source file is newer than cache, invalidate cache
|
||||
let _ = fs::remove_file(&cache_file);
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
Ok(Some(cache_entry.parsed_module))
|
||||
}
|
||||
|
||||
/// Store parsed module in cache
|
||||
pub fn store_module(&self, file_path: &Path, parsed_module: ParsedModule) -> Result<(), WTIsMyCodeError> {
|
||||
if !self.config.caching.enabled {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let cache_key = self.get_cache_key(file_path);
|
||||
let cache_file = Path::new(&self.cache_dir).join(&cache_key);
|
||||
|
||||
// Get file modification time
|
||||
let metadata = fs::metadata(file_path)
|
||||
.map_err(WTIsMyCodeError::Io)?;
|
||||
|
||||
let modified_time = metadata.modified()
|
||||
.map_err(WTIsMyCodeError::Io)?;
|
||||
|
||||
let modified_time: DateTime<Utc> = modified_time.into();
|
||||
|
||||
let cache_entry = CacheEntry {
|
||||
created_at: Utc::now(),
|
||||
file_modified_at: modified_time,
|
||||
parsed_module,
|
||||
};
|
||||
|
||||
let content = serde_json::to_string(&cache_entry)
|
||||
.map_err(|e| WTIsMyCodeError::AnalysisError(format!("Failed to serialize cache entry: {}", e)))?;
|
||||
|
||||
fs::write(&cache_file, content)
|
||||
.map_err(WTIsMyCodeError::Io)
|
||||
}
|
||||
|
||||
/// Generate cache key for a file path
|
||||
fn get_cache_key(&self, file_path: &Path) -> String {
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
let mut hasher = DefaultHasher::new();
|
||||
file_path.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
|
||||
format!("{:x}.json", hash)
|
||||
}
|
||||
|
||||
/// Parse duration string like "24h" or "7d" into seconds
|
||||
fn parse_duration(&self, duration_str: &str) -> Result<u64, WTIsMyCodeError> {
|
||||
if duration_str.is_empty() {
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let chars: Vec<char> = duration_str.chars().collect();
|
||||
let (number_str, unit) = chars.split_at(chars.len() - 1);
|
||||
let number: u64 = number_str.iter().collect::<String>().parse()
|
||||
.map_err(|_| WTIsMyCodeError::AnalysisError(format!("Invalid duration format: {}", duration_str)))?;
|
||||
|
||||
match unit[0] {
|
||||
's' => Ok(number), // seconds
|
||||
'm' => Ok(number * 60), // minutes
|
||||
'h' => Ok(number * 3600), // hours
|
||||
'd' => Ok(number * 86400), // days
|
||||
_ => Err(WTIsMyCodeError::AnalysisError(format!("Unknown duration unit: {}", unit[0]))),
|
||||
}
|
||||
}
|
||||
|
||||
/// Clear all cache entries
|
||||
pub fn clear_cache(&self) -> Result<(), WTIsMyCodeError> {
|
||||
if Path::new(&self.cache_dir).exists() {
|
||||
fs::remove_dir_all(&self.cache_dir)
|
||||
.map_err(WTIsMyCodeError::Io)?;
|
||||
|
||||
// Recreate cache directory
|
||||
fs::create_dir_all(&self.cache_dir)
|
||||
.map_err(WTIsMyCodeError::Io)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
635
wtismycode-core/src/config.rs
Normal file
635
wtismycode-core/src/config.rs
Normal file
@@ -0,0 +1,635 @@
|
||||
//! Configuration management for WTIsMyCode
|
||||
//!
|
||||
//! This module handles loading and validating the wtismycode.toml configuration file.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::Path;
|
||||
use crate::errors::WTIsMyCodeError;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Default)]
|
||||
pub struct Config {
|
||||
#[serde(default)]
|
||||
pub project: ProjectConfig,
|
||||
#[serde(default)]
|
||||
pub scan: ScanConfig,
|
||||
#[serde(default)]
|
||||
pub python: PythonConfig,
|
||||
#[serde(default)]
|
||||
pub analysis: AnalysisConfig,
|
||||
#[serde(default)]
|
||||
pub output: OutputConfig,
|
||||
#[serde(default)]
|
||||
pub diff: DiffConfig,
|
||||
#[serde(default)]
|
||||
pub thresholds: ThresholdsConfig,
|
||||
#[serde(default)]
|
||||
pub rendering: RenderingConfig,
|
||||
#[serde(default)]
|
||||
pub logging: LoggingConfig,
|
||||
#[serde(default)]
|
||||
pub caching: CachingConfig,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProjectConfig {
|
||||
#[serde(default = "default_root")]
|
||||
pub root: String,
|
||||
#[serde(default = "default_out_dir")]
|
||||
pub out_dir: String,
|
||||
#[serde(default = "default_entry_file")]
|
||||
pub entry_file: String,
|
||||
#[serde(default = "default_language")]
|
||||
pub language: String,
|
||||
#[serde(default)]
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
impl Default for ProjectConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
root: default_root(),
|
||||
out_dir: default_out_dir(),
|
||||
entry_file: default_entry_file(),
|
||||
language: default_language(),
|
||||
name: String::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_root() -> String {
|
||||
".".to_string()
|
||||
}
|
||||
|
||||
fn default_out_dir() -> String {
|
||||
"docs/architecture".to_string()
|
||||
}
|
||||
|
||||
fn default_entry_file() -> String {
|
||||
"ARCHITECTURE.md".to_string()
|
||||
}
|
||||
|
||||
fn default_language() -> String {
|
||||
"python".to_string()
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ScanConfig {
|
||||
#[serde(default = "default_include")]
|
||||
pub include: Vec<String>,
|
||||
#[serde(default = "default_exclude")]
|
||||
pub exclude: Vec<String>,
|
||||
#[serde(default)]
|
||||
pub follow_symlinks: bool,
|
||||
#[serde(default = "default_max_file_size")]
|
||||
pub max_file_size: String,
|
||||
}
|
||||
|
||||
impl Default for ScanConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
include: default_include(),
|
||||
exclude: default_exclude(),
|
||||
follow_symlinks: false,
|
||||
max_file_size: default_max_file_size(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_include() -> Vec<String> {
|
||||
vec!["src".to_string(), "app".to_string(), "tests".to_string()]
|
||||
}
|
||||
|
||||
fn default_exclude() -> Vec<String> {
|
||||
vec![
|
||||
".venv".to_string(),
|
||||
"venv".to_string(),
|
||||
"__pycache__".to_string(),
|
||||
".git".to_string(),
|
||||
"dist".to_string(),
|
||||
"build".to_string(),
|
||||
".mypy_cache".to_string(),
|
||||
".ruff_cache".to_string(),
|
||||
".pytest_cache".to_string(),
|
||||
"*.egg-info".to_string(),
|
||||
]
|
||||
}
|
||||
|
||||
fn default_max_file_size() -> String {
|
||||
"10MB".to_string()
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PythonConfig {
|
||||
#[serde(default = "default_src_roots")]
|
||||
pub src_roots: Vec<String>,
|
||||
#[serde(default = "default_include_tests")]
|
||||
pub include_tests: bool,
|
||||
#[serde(default = "default_parse_docstrings")]
|
||||
pub parse_docstrings: bool,
|
||||
#[serde(default = "default_max_parse_errors")]
|
||||
pub max_parse_errors: usize,
|
||||
}
|
||||
|
||||
impl Default for PythonConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
src_roots: default_src_roots(),
|
||||
include_tests: default_include_tests(),
|
||||
parse_docstrings: default_parse_docstrings(),
|
||||
max_parse_errors: default_max_parse_errors(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_src_roots() -> Vec<String> {
|
||||
vec!["src".to_string(), ".".to_string()]
|
||||
}
|
||||
|
||||
fn default_include_tests() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_parse_docstrings() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_max_parse_errors() -> usize {
|
||||
10
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AnalysisConfig {
|
||||
#[serde(default = "default_resolve_calls")]
|
||||
pub resolve_calls: bool,
|
||||
#[serde(default)]
|
||||
pub resolve_inheritance: bool,
|
||||
#[serde(default = "default_detect_integrations")]
|
||||
pub detect_integrations: bool,
|
||||
#[serde(default = "default_integration_patterns")]
|
||||
pub integration_patterns: Vec<IntegrationPattern>,
|
||||
}
|
||||
|
||||
impl Default for AnalysisConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
resolve_calls: default_resolve_calls(),
|
||||
resolve_inheritance: false,
|
||||
detect_integrations: default_detect_integrations(),
|
||||
integration_patterns: default_integration_patterns(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_resolve_calls() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_detect_integrations() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_integration_patterns() -> Vec<IntegrationPattern> {
|
||||
vec![
|
||||
IntegrationPattern {
|
||||
type_: "http".to_string(),
|
||||
patterns: vec!["requests".to_string(), "httpx".to_string(), "aiohttp".to_string()],
|
||||
},
|
||||
IntegrationPattern {
|
||||
type_: "db".to_string(),
|
||||
patterns: vec![
|
||||
"sqlalchemy".to_string(),
|
||||
"psycopg".to_string(),
|
||||
"mysql".to_string(),
|
||||
"sqlite3".to_string(),
|
||||
],
|
||||
},
|
||||
IntegrationPattern {
|
||||
type_: "queue".to_string(),
|
||||
patterns: vec![
|
||||
"celery".to_string(),
|
||||
"kafka".to_string(),
|
||||
"pika".to_string(),
|
||||
"redis".to_string(),
|
||||
],
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct IntegrationPattern {
|
||||
#[serde(rename = "type")]
|
||||
pub type_: String,
|
||||
pub patterns: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct OutputConfig {
|
||||
#[serde(default)]
|
||||
pub single_file: bool,
|
||||
#[serde(default = "default_per_file_docs")]
|
||||
pub per_file_docs: bool,
|
||||
#[serde(default = "default_create_directories")]
|
||||
pub create_directories: bool,
|
||||
#[serde(default)]
|
||||
pub overwrite_manual_sections: bool,
|
||||
}
|
||||
|
||||
impl Default for OutputConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
single_file: false,
|
||||
per_file_docs: default_per_file_docs(),
|
||||
create_directories: default_create_directories(),
|
||||
overwrite_manual_sections: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_per_file_docs() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_create_directories() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DiffConfig {
|
||||
#[serde(default = "default_update_timestamp_on_change_only")]
|
||||
pub update_timestamp_on_change_only: bool,
|
||||
#[serde(default = "default_hash_algorithm")]
|
||||
pub hash_algorithm: String,
|
||||
#[serde(default = "default_preserve_manual_content")]
|
||||
pub preserve_manual_content: bool,
|
||||
}
|
||||
|
||||
impl Default for DiffConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
update_timestamp_on_change_only: default_update_timestamp_on_change_only(),
|
||||
hash_algorithm: default_hash_algorithm(),
|
||||
preserve_manual_content: default_preserve_manual_content(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_update_timestamp_on_change_only() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_hash_algorithm() -> String {
|
||||
"sha256".to_string()
|
||||
}
|
||||
|
||||
fn default_preserve_manual_content() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ThresholdsConfig {
|
||||
#[serde(default = "default_critical_fan_in")]
|
||||
pub critical_fan_in: usize,
|
||||
#[serde(default = "default_critical_fan_out")]
|
||||
pub critical_fan_out: usize,
|
||||
#[serde(default = "default_high_complexity")]
|
||||
pub high_complexity: usize,
|
||||
}
|
||||
|
||||
impl Default for ThresholdsConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
critical_fan_in: default_critical_fan_in(),
|
||||
critical_fan_out: default_critical_fan_out(),
|
||||
high_complexity: default_high_complexity(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_critical_fan_in() -> usize {
|
||||
20
|
||||
}
|
||||
|
||||
fn default_critical_fan_out() -> usize {
|
||||
20
|
||||
}
|
||||
|
||||
fn default_high_complexity() -> usize {
|
||||
50
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct RenderingConfig {
|
||||
#[serde(default = "default_template_engine")]
|
||||
pub template_engine: String,
|
||||
#[serde(default = "default_max_table_rows")]
|
||||
pub max_table_rows: usize,
|
||||
#[serde(default = "default_truncate_long_descriptions")]
|
||||
pub truncate_long_descriptions: bool,
|
||||
#[serde(default = "default_description_max_length")]
|
||||
pub description_max_length: usize,
|
||||
}
|
||||
|
||||
impl Default for RenderingConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
template_engine: default_template_engine(),
|
||||
max_table_rows: default_max_table_rows(),
|
||||
truncate_long_descriptions: default_truncate_long_descriptions(),
|
||||
description_max_length: default_description_max_length(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_template_engine() -> String {
|
||||
"handlebars".to_string()
|
||||
}
|
||||
|
||||
fn default_max_table_rows() -> usize {
|
||||
100
|
||||
}
|
||||
|
||||
fn default_truncate_long_descriptions() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_description_max_length() -> usize {
|
||||
200
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LoggingConfig {
|
||||
#[serde(default = "default_log_level")]
|
||||
pub level: String,
|
||||
#[serde(default = "default_log_file")]
|
||||
pub file: String,
|
||||
#[serde(default = "default_log_format")]
|
||||
pub format: String,
|
||||
}
|
||||
|
||||
impl Default for LoggingConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
level: default_log_level(),
|
||||
file: default_log_file(),
|
||||
format: default_log_format(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_log_level() -> String {
|
||||
"info".to_string()
|
||||
}
|
||||
|
||||
fn default_log_file() -> String {
|
||||
"wtismycode.log".to_string()
|
||||
}
|
||||
|
||||
fn default_log_format() -> String {
|
||||
"compact".to_string()
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CachingConfig {
|
||||
#[serde(default = "default_caching_enabled")]
|
||||
pub enabled: bool,
|
||||
#[serde(default = "default_cache_dir")]
|
||||
pub cache_dir: String,
|
||||
#[serde(default = "default_max_cache_age")]
|
||||
pub max_cache_age: String,
|
||||
}
|
||||
|
||||
impl Default for CachingConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: default_caching_enabled(),
|
||||
cache_dir: default_cache_dir(),
|
||||
max_cache_age: default_max_cache_age(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_caching_enabled() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_cache_dir() -> String {
|
||||
".wtismycode/cache".to_string()
|
||||
}
|
||||
|
||||
fn default_max_cache_age() -> String {
|
||||
"24h".to_string()
|
||||
}
|
||||
|
||||
impl Config {
|
||||
/// Validate the configuration for correctness.
|
||||
///
|
||||
/// Checks that paths exist, values are parseable, and settings are sensible.
|
||||
pub fn validate(&self) -> Result<(), WTIsMyCodeError> {
|
||||
// Check project.root exists and is a directory
|
||||
let root = Path::new(&self.project.root);
|
||||
if !root.exists() {
|
||||
return Err(WTIsMyCodeError::ConfigError(format!(
|
||||
"project.root '{}' does not exist",
|
||||
self.project.root
|
||||
)));
|
||||
}
|
||||
if !root.is_dir() {
|
||||
return Err(WTIsMyCodeError::ConfigError(format!(
|
||||
"project.root '{}' is not a directory",
|
||||
self.project.root
|
||||
)));
|
||||
}
|
||||
|
||||
// Check language is python
|
||||
if self.project.language != "python" {
|
||||
return Err(WTIsMyCodeError::ConfigError(format!(
|
||||
"project.language '{}' is not supported. Only 'python' is currently supported",
|
||||
self.project.language
|
||||
)));
|
||||
}
|
||||
|
||||
// Check scan.include is not empty
|
||||
if self.scan.include.is_empty() {
|
||||
return Err(WTIsMyCodeError::ConfigError(
|
||||
"scan.include must not be empty — at least one directory must be specified".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Check python.src_roots exist relative to project.root
|
||||
for src_root in &self.python.src_roots {
|
||||
let path = root.join(src_root);
|
||||
if !path.exists() {
|
||||
return Err(WTIsMyCodeError::ConfigError(format!(
|
||||
"python.src_roots entry '{}' does not exist (resolved to '{}')",
|
||||
src_root,
|
||||
path.display()
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
// Parse max_cache_age
|
||||
parse_duration(&self.caching.max_cache_age).map_err(|e| {
|
||||
WTIsMyCodeError::ConfigError(format!(
|
||||
"caching.max_cache_age '{}' is not valid: {}. Use formats like '24h', '7d', '30m'",
|
||||
self.caching.max_cache_age, e
|
||||
))
|
||||
})?;
|
||||
|
||||
// Parse max_file_size
|
||||
parse_file_size(&self.scan.max_file_size).map_err(|e| {
|
||||
WTIsMyCodeError::ConfigError(format!(
|
||||
"scan.max_file_size '{}' is not valid: {}. Use formats like '10MB', '1GB', '500KB'",
|
||||
self.scan.max_file_size, e
|
||||
))
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Load configuration from a TOML file
|
||||
pub fn load_from_file(path: &Path) -> Result<Self, WTIsMyCodeError> {
|
||||
let content = std::fs::read_to_string(path)
|
||||
.map_err(|e| WTIsMyCodeError::ConfigError(format!("Failed to read config file: {}", e)))?;
|
||||
|
||||
toml::from_str(&content)
|
||||
.map_err(|e| WTIsMyCodeError::ConfigError(format!("Failed to parse config file: {}", e)))
|
||||
}
|
||||
|
||||
/// Save configuration to a TOML file
|
||||
pub fn save_to_file(&self, path: &Path) -> Result<(), WTIsMyCodeError> {
|
||||
let content = toml::to_string_pretty(self)
|
||||
.map_err(|e| WTIsMyCodeError::ConfigError(format!("Failed to serialize config: {}", e)))?;
|
||||
|
||||
std::fs::write(path, content)
|
||||
.map_err(|e| WTIsMyCodeError::ConfigError(format!("Failed to write config file: {}", e)))
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a duration string like "24h", "7d", "30m" into seconds.
|
||||
pub fn parse_duration(s: &str) -> Result<u64, String> {
|
||||
let s = s.trim();
|
||||
if s.is_empty() {
|
||||
return Err("empty duration string".to_string());
|
||||
}
|
||||
|
||||
let (num_str, suffix) = split_numeric_suffix(s)?;
|
||||
let value: u64 = num_str
|
||||
.parse()
|
||||
.map_err(|_| format!("'{}' is not a valid number", num_str))?;
|
||||
|
||||
match suffix {
|
||||
"s" => Ok(value),
|
||||
"m" => Ok(value * 60),
|
||||
"h" => Ok(value * 3600),
|
||||
"d" => Ok(value * 86400),
|
||||
"w" => Ok(value * 604800),
|
||||
_ => Err(format!("unknown duration suffix '{}'. Use s, m, h, d, or w", suffix)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a file size string like "10MB", "1GB", "500KB" into bytes.
|
||||
pub fn parse_file_size(s: &str) -> Result<u64, String> {
|
||||
let s = s.trim();
|
||||
if s.is_empty() {
|
||||
return Err("empty file size string".to_string());
|
||||
}
|
||||
|
||||
let (num_str, suffix) = split_numeric_suffix(s)?;
|
||||
let value: u64 = num_str
|
||||
.parse()
|
||||
.map_err(|_| format!("'{}' is not a valid number", num_str))?;
|
||||
|
||||
let suffix_upper = suffix.to_uppercase();
|
||||
match suffix_upper.as_str() {
|
||||
"B" => Ok(value),
|
||||
"KB" | "K" => Ok(value * 1024),
|
||||
"MB" | "M" => Ok(value * 1024 * 1024),
|
||||
"GB" | "G" => Ok(value * 1024 * 1024 * 1024),
|
||||
_ => Err(format!("unknown size suffix '{}'. Use B, KB, MB, or GB", suffix)),
|
||||
}
|
||||
}
|
||||
|
||||
fn split_numeric_suffix(s: &str) -> Result<(&str, &str), String> {
|
||||
let pos = s
|
||||
.find(|c: char| !c.is_ascii_digit())
|
||||
.ok_or_else(|| format!("no unit suffix found in '{}'", s))?;
|
||||
if pos == 0 {
|
||||
return Err(format!("no numeric value found in '{}'", s));
|
||||
}
|
||||
Ok((&s[..pos], &s[pos..]))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_duration() {
|
||||
assert_eq!(parse_duration("24h").unwrap(), 86400);
|
||||
assert_eq!(parse_duration("7d").unwrap(), 604800);
|
||||
assert_eq!(parse_duration("30m").unwrap(), 1800);
|
||||
assert_eq!(parse_duration("60s").unwrap(), 60);
|
||||
assert!(parse_duration("abc").is_err());
|
||||
assert!(parse_duration("").is_err());
|
||||
assert!(parse_duration("10x").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_file_size() {
|
||||
assert_eq!(parse_file_size("10MB").unwrap(), 10 * 1024 * 1024);
|
||||
assert_eq!(parse_file_size("1GB").unwrap(), 1024 * 1024 * 1024);
|
||||
assert_eq!(parse_file_size("500KB").unwrap(), 500 * 1024);
|
||||
assert!(parse_file_size("abc").is_err());
|
||||
assert!(parse_file_size("").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_default_config() {
|
||||
// Default config with "." as root should validate if we're in a valid dir
|
||||
let config = Config::default();
|
||||
// This should work since "." exists and is a directory
|
||||
assert!(config.validate().is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_bad_language() {
|
||||
let mut config = Config::default();
|
||||
config.project.language = "java".to_string();
|
||||
let err = config.validate().unwrap_err();
|
||||
assert!(err.to_string().contains("not supported"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_empty_include() {
|
||||
let mut config = Config::default();
|
||||
config.scan.include = vec![];
|
||||
let err = config.validate().unwrap_err();
|
||||
assert!(err.to_string().contains("must not be empty"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_bad_root() {
|
||||
let mut config = Config::default();
|
||||
config.project.root = "/nonexistent/path/xyz".to_string();
|
||||
let err = config.validate().unwrap_err();
|
||||
assert!(err.to_string().contains("does not exist"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_bad_cache_age() {
|
||||
let mut config = Config::default();
|
||||
config.caching.max_cache_age = "invalid".to_string();
|
||||
let err = config.validate().unwrap_err();
|
||||
assert!(err.to_string().contains("not valid"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_bad_file_size() {
|
||||
let mut config = Config::default();
|
||||
config.scan.max_file_size = "notasize".to_string();
|
||||
let err = config.validate().unwrap_err();
|
||||
assert!(err.to_string().contains("not valid"));
|
||||
}
|
||||
}
|
||||
183
wtismycode-core/src/cycle_detector.rs
Normal file
183
wtismycode-core/src/cycle_detector.rs
Normal file
@@ -0,0 +1,183 @@
|
||||
//! Dependency cycle detection for module graphs.
|
||||
//!
|
||||
//! Uses DFS-based cycle detection to find circular dependencies
|
||||
//! in the module dependency graph.
|
||||
|
||||
use crate::model::ProjectModel;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
/// Detect cycles in the module dependency graph.
|
||||
///
|
||||
/// Returns a list of cycles, where each cycle is a list of module IDs
|
||||
/// forming a circular dependency chain.
|
||||
pub fn detect_cycles(model: &ProjectModel) -> Vec<Vec<String>> {
|
||||
let mut visited = HashSet::new();
|
||||
let mut rec_stack = HashSet::new();
|
||||
let mut path = Vec::new();
|
||||
let mut cycles = Vec::new();
|
||||
|
||||
// Build adjacency list from model
|
||||
let adj = build_adjacency_list(model);
|
||||
|
||||
for module_id in model.modules.keys() {
|
||||
if !visited.contains(module_id.as_str()) {
|
||||
dfs(
|
||||
module_id,
|
||||
&adj,
|
||||
&mut visited,
|
||||
&mut rec_stack,
|
||||
&mut path,
|
||||
&mut cycles,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Deduplicate cycles (normalize by rotating to smallest element first)
|
||||
deduplicate_cycles(cycles)
|
||||
}
|
||||
|
||||
fn build_adjacency_list(model: &ProjectModel) -> HashMap<String, Vec<String>> {
|
||||
let mut adj: HashMap<String, Vec<String>> = HashMap::new();
|
||||
|
||||
for (module_id, module) in &model.modules {
|
||||
let neighbors: Vec<String> = module
|
||||
.outbound_modules
|
||||
.iter()
|
||||
.filter(|target| model.modules.contains_key(*target))
|
||||
.cloned()
|
||||
.collect();
|
||||
adj.insert(module_id.clone(), neighbors);
|
||||
}
|
||||
|
||||
adj
|
||||
}
|
||||
|
||||
fn dfs(
|
||||
node: &str,
|
||||
adj: &HashMap<String, Vec<String>>,
|
||||
visited: &mut HashSet<String>,
|
||||
rec_stack: &mut HashSet<String>,
|
||||
path: &mut Vec<String>,
|
||||
cycles: &mut Vec<Vec<String>>,
|
||||
) {
|
||||
visited.insert(node.to_string());
|
||||
rec_stack.insert(node.to_string());
|
||||
path.push(node.to_string());
|
||||
|
||||
if let Some(neighbors) = adj.get(node) {
|
||||
for neighbor in neighbors {
|
||||
if !visited.contains(neighbor.as_str()) {
|
||||
dfs(neighbor, adj, visited, rec_stack, path, cycles);
|
||||
} else if rec_stack.contains(neighbor.as_str()) {
|
||||
// Found a cycle: extract it from path
|
||||
if let Some(start_idx) = path.iter().position(|n| n == neighbor) {
|
||||
let cycle: Vec<String> = path[start_idx..].to_vec();
|
||||
cycles.push(cycle);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
path.pop();
|
||||
rec_stack.remove(node);
|
||||
}
|
||||
|
||||
fn deduplicate_cycles(cycles: Vec<Vec<String>>) -> Vec<Vec<String>> {
|
||||
let mut seen: HashSet<Vec<String>> = HashSet::new();
|
||||
let mut unique = Vec::new();
|
||||
|
||||
for cycle in cycles {
|
||||
if cycle.is_empty() {
|
||||
continue;
|
||||
}
|
||||
// Normalize: rotate so the lexicographically smallest element is first
|
||||
let min_idx = cycle
|
||||
.iter()
|
||||
.enumerate()
|
||||
.min_by_key(|(_, v)| v.as_str())
|
||||
.map(|(i, _)| i)
|
||||
.unwrap_or(0);
|
||||
|
||||
let mut normalized = Vec::with_capacity(cycle.len());
|
||||
for i in 0..cycle.len() {
|
||||
normalized.push(cycle[(min_idx + i) % cycle.len()].clone());
|
||||
}
|
||||
|
||||
if seen.insert(normalized.clone()) {
|
||||
unique.push(normalized);
|
||||
}
|
||||
}
|
||||
|
||||
unique
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::model::{Edges, Module, ProjectModel};
|
||||
use std::collections::HashMap;
|
||||
|
||||
fn make_module(id: &str, outbound: Vec<&str>) -> Module {
|
||||
Module {
|
||||
id: id.to_string(),
|
||||
path: format!("{}.py", id),
|
||||
files: vec![],
|
||||
doc_summary: None,
|
||||
outbound_modules: outbound.into_iter().map(String::from).collect(),
|
||||
inbound_modules: vec![],
|
||||
symbols: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_cycles() {
|
||||
let mut model = ProjectModel::new();
|
||||
model.modules.insert("a".into(), make_module("a", vec!["b"]));
|
||||
model.modules.insert("b".into(), make_module("b", vec!["c"]));
|
||||
model.modules.insert("c".into(), make_module("c", vec![]));
|
||||
|
||||
let cycles = detect_cycles(&model);
|
||||
assert!(cycles.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_simple_cycle() {
|
||||
let mut model = ProjectModel::new();
|
||||
model.modules.insert("a".into(), make_module("a", vec!["b"]));
|
||||
model.modules.insert("b".into(), make_module("b", vec!["a"]));
|
||||
|
||||
let cycles = detect_cycles(&model);
|
||||
assert_eq!(cycles.len(), 1);
|
||||
assert!(cycles[0].contains(&"a".to_string()));
|
||||
assert!(cycles[0].contains(&"b".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_three_node_cycle() {
|
||||
let mut model = ProjectModel::new();
|
||||
model.modules.insert("a".into(), make_module("a", vec!["b"]));
|
||||
model.modules.insert("b".into(), make_module("b", vec!["c"]));
|
||||
model.modules.insert("c".into(), make_module("c", vec!["a"]));
|
||||
|
||||
let cycles = detect_cycles(&model);
|
||||
assert_eq!(cycles.len(), 1);
|
||||
assert_eq!(cycles[0].len(), 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_graph() {
|
||||
let model = ProjectModel::new();
|
||||
let cycles = detect_cycles(&model);
|
||||
assert!(cycles.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_self_cycle() {
|
||||
let mut model = ProjectModel::new();
|
||||
model.modules.insert("a".into(), make_module("a", vec!["a"]));
|
||||
|
||||
let cycles = detect_cycles(&model);
|
||||
assert_eq!(cycles.len(), 1);
|
||||
assert_eq!(cycles[0], vec!["a".to_string()]);
|
||||
}
|
||||
}
|
||||
26
wtismycode-core/src/errors.rs
Normal file
26
wtismycode-core/src/errors.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum WTIsMyCodeError {
|
||||
#[error("IO error: {0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
#[error("Parse error in {file}:{line}: {message}")]
|
||||
ParseError {
|
||||
file: String,
|
||||
line: usize,
|
||||
message: String,
|
||||
},
|
||||
|
||||
#[error("Configuration error: {0}")]
|
||||
ConfigError(String),
|
||||
|
||||
#[error("Analysis error: {0}")]
|
||||
AnalysisError(String),
|
||||
|
||||
#[error("Rendering error: {0}")]
|
||||
RenderingError(String),
|
||||
|
||||
#[error("File consistency check failed: {0}")]
|
||||
ConsistencyError(String),
|
||||
}
|
||||
32
wtismycode-core/src/lib.rs
Normal file
32
wtismycode-core/src/lib.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
//! WTIsMyCode Core Library
|
||||
//!
|
||||
//! This crate provides the core functionality for analyzing Python projects
|
||||
//! and generating architecture documentation.
|
||||
|
||||
// Public modules
|
||||
pub mod errors;
|
||||
pub mod config;
|
||||
pub mod model;
|
||||
pub mod scanner;
|
||||
pub mod python_analyzer;
|
||||
pub mod renderer;
|
||||
pub mod writer;
|
||||
pub mod cache;
|
||||
pub mod cycle_detector;
|
||||
|
||||
// Re-export commonly used types
|
||||
pub use errors::WTIsMyCodeError;
|
||||
pub use config::Config;
|
||||
pub use model::ProjectModel;
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn it_works() {
|
||||
let result = 2 + 2;
|
||||
assert_eq!(result, 4);
|
||||
}
|
||||
}
|
||||
174
wtismycode-core/src/model.rs
Normal file
174
wtismycode-core/src/model.rs
Normal file
@@ -0,0 +1,174 @@
|
||||
//! Intermediate Representation (IR) for WTIsMyCode
|
||||
//!
|
||||
//! This module defines the data structures that represent the analyzed Python project
|
||||
//! and are used for generating documentation.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProjectModel {
|
||||
pub modules: HashMap<String, Module>,
|
||||
pub files: HashMap<String, FileDoc>,
|
||||
pub symbols: HashMap<String, Symbol>,
|
||||
pub edges: Edges,
|
||||
}
|
||||
|
||||
impl ProjectModel {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
modules: HashMap::new(),
|
||||
files: HashMap::new(),
|
||||
symbols: HashMap::new(),
|
||||
edges: Edges::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ProjectModel {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Module {
|
||||
pub id: String,
|
||||
pub path: String,
|
||||
pub files: Vec<String>,
|
||||
pub doc_summary: Option<String>,
|
||||
pub outbound_modules: Vec<String>,
|
||||
pub inbound_modules: Vec<String>,
|
||||
pub symbols: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FileDoc {
|
||||
pub id: String,
|
||||
pub path: String,
|
||||
pub module_id: String,
|
||||
pub imports: Vec<String>, // normalized import strings
|
||||
pub outbound_modules: Vec<String>,
|
||||
pub inbound_files: Vec<String>,
|
||||
pub symbols: Vec<String>,
|
||||
pub file_purpose: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Symbol {
|
||||
pub id: String,
|
||||
pub kind: SymbolKind,
|
||||
pub module_id: String,
|
||||
pub file_id: String,
|
||||
pub qualname: String,
|
||||
pub signature: String,
|
||||
pub annotations: Option<HashMap<String, String>>,
|
||||
pub docstring_first_line: Option<String>,
|
||||
pub purpose: String, // docstring or heuristic
|
||||
pub outbound_calls: Vec<String>,
|
||||
pub inbound_calls: Vec<String>,
|
||||
pub integrations_flags: IntegrationFlags,
|
||||
pub metrics: SymbolMetrics,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub enum SymbolKind {
|
||||
Function,
|
||||
AsyncFunction,
|
||||
Class,
|
||||
Method,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct IntegrationFlags {
|
||||
pub http: bool,
|
||||
pub db: bool,
|
||||
pub queue: bool,
|
||||
#[serde(default)]
|
||||
pub storage: bool,
|
||||
#[serde(default)]
|
||||
pub ai: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SymbolMetrics {
|
||||
pub fan_in: usize,
|
||||
pub fan_out: usize,
|
||||
pub is_critical: bool,
|
||||
pub cycle_participant: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Edges {
|
||||
pub module_import_edges: Vec<Edge>,
|
||||
pub file_import_edges: Vec<Edge>,
|
||||
pub symbol_call_edges: Vec<Edge>,
|
||||
}
|
||||
|
||||
impl Edges {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
module_import_edges: Vec::new(),
|
||||
file_import_edges: Vec::new(),
|
||||
symbol_call_edges: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Edges {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Edge {
|
||||
pub from_id: String,
|
||||
pub to_id: String,
|
||||
pub edge_type: EdgeType,
|
||||
pub meta: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum EdgeType {
|
||||
ModuleImport,
|
||||
FileImport,
|
||||
SymbolCall,
|
||||
ExternalCall,
|
||||
UnresolvedCall,
|
||||
}
|
||||
|
||||
// Additional structures for Python analysis
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||
pub struct ParsedModule {
|
||||
pub path: std::path::PathBuf,
|
||||
pub module_path: String,
|
||||
pub imports: Vec<Import>,
|
||||
pub symbols: Vec<Symbol>,
|
||||
pub calls: Vec<Call>,
|
||||
pub file_docstring: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||
pub struct Import {
|
||||
pub module_name: String,
|
||||
pub alias: Option<String>,
|
||||
pub line_number: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||
pub struct Call {
|
||||
pub caller_symbol: String,
|
||||
pub callee_expr: String,
|
||||
pub line_number: usize,
|
||||
pub call_type: CallType,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||
pub enum CallType {
|
||||
Local,
|
||||
Imported,
|
||||
External,
|
||||
Unresolved,
|
||||
}
|
||||
856
wtismycode-core/src/python_analyzer.rs
Normal file
856
wtismycode-core/src/python_analyzer.rs
Normal file
@@ -0,0 +1,856 @@
|
||||
//! Python AST analyzer for WTIsMyCode
|
||||
//!
|
||||
//! This module handles parsing Python files using AST and extracting
|
||||
//! imports, definitions, and calls.
|
||||
|
||||
use crate::model::{ParsedModule, ProjectModel, Import, Call, CallType, Symbol, Module, FileDoc};
|
||||
use crate::config::Config;
|
||||
use crate::errors::WTIsMyCodeError;
|
||||
use crate::cache::CacheManager;
|
||||
use std::path::Path;
|
||||
use std::fs;
|
||||
use rustpython_parser::{ast, Parse};
|
||||
use rustpython_ast::{Stmt, Expr, Ranged};
|
||||
|
||||
pub struct PythonAnalyzer {
|
||||
config: Config,
|
||||
cache_manager: CacheManager,
|
||||
}
|
||||
|
||||
impl PythonAnalyzer {
|
||||
pub fn new(config: Config) -> Self {
|
||||
let cache_manager = CacheManager::new(config.clone());
|
||||
Self { config, cache_manager }
|
||||
}
|
||||
|
||||
pub fn parse_module(&self, file_path: &Path) -> Result<ParsedModule, WTIsMyCodeError> {
|
||||
// Try to get from cache first
|
||||
if let Some(cached_module) = self.cache_manager.get_cached_module(file_path)? {
|
||||
return Ok(cached_module);
|
||||
}
|
||||
|
||||
let code = fs::read_to_string(file_path)
|
||||
.map_err(WTIsMyCodeError::Io)?;
|
||||
|
||||
let ast = ast::Suite::parse(&code, file_path.to_str().unwrap_or("<unknown>"))
|
||||
.map_err(|e| WTIsMyCodeError::ParseError {
|
||||
file: file_path.to_string_lossy().to_string(),
|
||||
line: 0,
|
||||
message: format!("Failed to parse: {}", e),
|
||||
})?;
|
||||
|
||||
let mut imports = Vec::new();
|
||||
let mut symbols = Vec::new();
|
||||
let mut calls = Vec::new();
|
||||
|
||||
// Extract file-level docstring (first statement if it's a string expression)
|
||||
let file_docstring = self.extract_docstring(&ast);
|
||||
|
||||
for stmt in &ast {
|
||||
self.extract_from_statement(stmt, None, &mut imports, &mut symbols, &mut calls, 0);
|
||||
}
|
||||
|
||||
let parsed_module = ParsedModule {
|
||||
path: file_path.to_path_buf(),
|
||||
module_path: file_path.to_string_lossy().to_string(),
|
||||
imports,
|
||||
symbols,
|
||||
calls,
|
||||
file_docstring,
|
||||
};
|
||||
|
||||
self.cache_manager.store_module(file_path, parsed_module.clone())?;
|
||||
|
||||
Ok(parsed_module)
|
||||
}
|
||||
|
||||
fn extract_from_statement(
|
||||
&self,
|
||||
stmt: &Stmt,
|
||||
parent_class: Option<&str>,
|
||||
imports: &mut Vec<Import>,
|
||||
symbols: &mut Vec<Symbol>,
|
||||
calls: &mut Vec<Call>,
|
||||
_depth: usize,
|
||||
) {
|
||||
match stmt {
|
||||
Stmt::Import(import_stmt) => {
|
||||
for alias in &import_stmt.names {
|
||||
imports.push(Import {
|
||||
module_name: alias.name.to_string(),
|
||||
alias: alias.asname.as_ref().map(|n| n.to_string()),
|
||||
line_number: alias.range().start().into(),
|
||||
});
|
||||
}
|
||||
}
|
||||
Stmt::ImportFrom(import_from_stmt) => {
|
||||
let module_name = import_from_stmt.module.as_ref()
|
||||
.map(|m| m.to_string())
|
||||
.unwrap_or_default();
|
||||
for alias in &import_from_stmt.names {
|
||||
let full_name = if module_name.is_empty() {
|
||||
alias.name.to_string()
|
||||
} else {
|
||||
format!("{}.{}", module_name, alias.name)
|
||||
};
|
||||
imports.push(Import {
|
||||
module_name: full_name,
|
||||
alias: alias.asname.as_ref().map(|n| n.to_string()),
|
||||
line_number: alias.range().start().into(),
|
||||
});
|
||||
}
|
||||
}
|
||||
Stmt::FunctionDef(func_def) => {
|
||||
let (kind, qualname) = if let Some(class_name) = parent_class {
|
||||
(crate::model::SymbolKind::Method, format!("{}.{}", class_name, func_def.name))
|
||||
} else {
|
||||
(crate::model::SymbolKind::Function, func_def.name.to_string())
|
||||
};
|
||||
|
||||
let signature = self.build_function_signature(&func_def.name, &func_def.args);
|
||||
let integrations_flags = self.detect_integrations(&func_def.body, &self.config);
|
||||
let docstring = self.extract_docstring(&func_def.body);
|
||||
|
||||
let symbol = Symbol {
|
||||
id: qualname.clone(),
|
||||
kind,
|
||||
module_id: String::new(),
|
||||
file_id: String::new(),
|
||||
qualname: qualname.clone(),
|
||||
signature,
|
||||
annotations: None,
|
||||
docstring_first_line: docstring,
|
||||
purpose: "extracted from AST".to_string(),
|
||||
outbound_calls: Vec::new(),
|
||||
inbound_calls: Vec::new(),
|
||||
integrations_flags,
|
||||
metrics: crate::model::SymbolMetrics {
|
||||
fan_in: 0,
|
||||
fan_out: 0,
|
||||
is_critical: false,
|
||||
cycle_participant: false,
|
||||
},
|
||||
};
|
||||
symbols.push(symbol);
|
||||
|
||||
for body_stmt in &func_def.body {
|
||||
self.extract_from_statement(body_stmt, parent_class, imports, symbols, calls, _depth + 1);
|
||||
}
|
||||
// Extract calls from body expressions recursively
|
||||
self.extract_calls_from_body(&func_def.body, Some(&qualname), calls);
|
||||
}
|
||||
Stmt::AsyncFunctionDef(func_def) => {
|
||||
let (kind, qualname) = if let Some(class_name) = parent_class {
|
||||
(crate::model::SymbolKind::Method, format!("{}.{}", class_name, func_def.name))
|
||||
} else {
|
||||
(crate::model::SymbolKind::AsyncFunction, func_def.name.to_string())
|
||||
};
|
||||
|
||||
let signature = format!("async {}", self.build_function_signature(&func_def.name, &func_def.args));
|
||||
let integrations_flags = self.detect_integrations(&func_def.body, &self.config);
|
||||
let docstring = self.extract_docstring(&func_def.body);
|
||||
|
||||
let symbol = Symbol {
|
||||
id: qualname.clone(),
|
||||
kind,
|
||||
module_id: String::new(),
|
||||
file_id: String::new(),
|
||||
qualname: qualname.clone(),
|
||||
signature,
|
||||
annotations: None,
|
||||
docstring_first_line: docstring,
|
||||
purpose: "extracted from AST".to_string(),
|
||||
outbound_calls: Vec::new(),
|
||||
inbound_calls: Vec::new(),
|
||||
integrations_flags,
|
||||
metrics: crate::model::SymbolMetrics {
|
||||
fan_in: 0,
|
||||
fan_out: 0,
|
||||
is_critical: false,
|
||||
cycle_participant: false,
|
||||
},
|
||||
};
|
||||
symbols.push(symbol);
|
||||
|
||||
for body_stmt in &func_def.body {
|
||||
self.extract_from_statement(body_stmt, parent_class, imports, symbols, calls, _depth + 1);
|
||||
}
|
||||
self.extract_calls_from_body(&func_def.body, Some(&qualname), calls);
|
||||
}
|
||||
Stmt::ClassDef(class_def) => {
|
||||
let integrations_flags = self.detect_integrations(&class_def.body, &self.config);
|
||||
let docstring = self.extract_docstring(&class_def.body);
|
||||
|
||||
let symbol = Symbol {
|
||||
id: class_def.name.to_string(),
|
||||
kind: crate::model::SymbolKind::Class,
|
||||
module_id: String::new(),
|
||||
file_id: String::new(),
|
||||
qualname: class_def.name.to_string(),
|
||||
signature: format!("class {}", class_def.name),
|
||||
annotations: None,
|
||||
docstring_first_line: docstring,
|
||||
purpose: "extracted from AST".to_string(),
|
||||
outbound_calls: Vec::new(),
|
||||
inbound_calls: Vec::new(),
|
||||
integrations_flags,
|
||||
metrics: crate::model::SymbolMetrics {
|
||||
fan_in: 0,
|
||||
fan_out: 0,
|
||||
is_critical: false,
|
||||
cycle_participant: false,
|
||||
},
|
||||
};
|
||||
symbols.push(symbol);
|
||||
|
||||
// Process class body with class name as parent
|
||||
for body_stmt in &class_def.body {
|
||||
self.extract_from_statement(body_stmt, Some(&class_def.name), imports, symbols, calls, _depth + 1);
|
||||
}
|
||||
}
|
||||
Stmt::Expr(expr_stmt) => {
|
||||
let caller = parent_class.map(|c| c.to_string()).unwrap_or_else(|| "unknown".to_string());
|
||||
self.extract_from_expression(&expr_stmt.value, Some(&caller), calls);
|
||||
}
|
||||
// Recurse into compound statements to find calls
|
||||
Stmt::If(if_stmt) => {
|
||||
let caller = parent_class.map(|c| c.to_string());
|
||||
self.extract_from_expression(&if_stmt.test, caller.as_deref(), calls);
|
||||
self.extract_calls_from_body(&if_stmt.body, caller.as_deref(), calls);
|
||||
self.extract_calls_from_body(&if_stmt.orelse, caller.as_deref(), calls);
|
||||
}
|
||||
Stmt::For(for_stmt) => {
|
||||
let caller = parent_class.map(|c| c.to_string());
|
||||
self.extract_from_expression(&for_stmt.iter, caller.as_deref(), calls);
|
||||
self.extract_calls_from_body(&for_stmt.body, caller.as_deref(), calls);
|
||||
self.extract_calls_from_body(&for_stmt.orelse, caller.as_deref(), calls);
|
||||
}
|
||||
Stmt::While(while_stmt) => {
|
||||
let caller = parent_class.map(|c| c.to_string());
|
||||
self.extract_from_expression(&while_stmt.test, caller.as_deref(), calls);
|
||||
self.extract_calls_from_body(&while_stmt.body, caller.as_deref(), calls);
|
||||
self.extract_calls_from_body(&while_stmt.orelse, caller.as_deref(), calls);
|
||||
}
|
||||
Stmt::With(with_stmt) => {
|
||||
let caller = parent_class.map(|c| c.to_string());
|
||||
for item in &with_stmt.items {
|
||||
self.extract_from_expression(&item.context_expr, caller.as_deref(), calls);
|
||||
}
|
||||
self.extract_calls_from_body(&with_stmt.body, caller.as_deref(), calls);
|
||||
}
|
||||
Stmt::Return(return_stmt) => {
|
||||
if let Some(value) = &return_stmt.value {
|
||||
let caller = parent_class.map(|c| c.to_string());
|
||||
self.extract_from_expression(value, caller.as_deref(), calls);
|
||||
}
|
||||
}
|
||||
Stmt::Assign(assign_stmt) => {
|
||||
let caller = parent_class.map(|c| c.to_string());
|
||||
self.extract_from_expression(&assign_stmt.value, caller.as_deref(), calls);
|
||||
}
|
||||
Stmt::Try(try_stmt) => {
|
||||
let caller = parent_class.map(|c| c.to_string());
|
||||
self.extract_calls_from_body(&try_stmt.body, caller.as_deref(), calls);
|
||||
for handler in &try_stmt.handlers {
|
||||
let rustpython_ast::ExceptHandler::ExceptHandler(h) = handler; {
|
||||
self.extract_calls_from_body(&h.body, caller.as_deref(), calls);
|
||||
}
|
||||
}
|
||||
self.extract_calls_from_body(&try_stmt.orelse, caller.as_deref(), calls);
|
||||
self.extract_calls_from_body(&try_stmt.finalbody, caller.as_deref(), calls);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract calls from a body (list of statements)
|
||||
fn extract_calls_from_body(&self, body: &[Stmt], caller: Option<&str>, calls: &mut Vec<Call>) {
|
||||
for stmt in body {
|
||||
match stmt {
|
||||
Stmt::Expr(expr_stmt) => {
|
||||
self.extract_from_expression(&expr_stmt.value, caller, calls);
|
||||
}
|
||||
Stmt::Return(return_stmt) => {
|
||||
if let Some(value) = &return_stmt.value {
|
||||
self.extract_from_expression(value, caller, calls);
|
||||
}
|
||||
}
|
||||
Stmt::Assign(assign_stmt) => {
|
||||
self.extract_from_expression(&assign_stmt.value, caller, calls);
|
||||
}
|
||||
Stmt::If(if_stmt) => {
|
||||
self.extract_from_expression(&if_stmt.test, caller, calls);
|
||||
self.extract_calls_from_body(&if_stmt.body, caller, calls);
|
||||
self.extract_calls_from_body(&if_stmt.orelse, caller, calls);
|
||||
}
|
||||
Stmt::For(for_stmt) => {
|
||||
self.extract_from_expression(&for_stmt.iter, caller, calls);
|
||||
self.extract_calls_from_body(&for_stmt.body, caller, calls);
|
||||
self.extract_calls_from_body(&for_stmt.orelse, caller, calls);
|
||||
}
|
||||
Stmt::While(while_stmt) => {
|
||||
self.extract_from_expression(&while_stmt.test, caller, calls);
|
||||
self.extract_calls_from_body(&while_stmt.body, caller, calls);
|
||||
self.extract_calls_from_body(&while_stmt.orelse, caller, calls);
|
||||
}
|
||||
Stmt::With(with_stmt) => {
|
||||
for item in &with_stmt.items {
|
||||
self.extract_from_expression(&item.context_expr, caller, calls);
|
||||
}
|
||||
self.extract_calls_from_body(&with_stmt.body, caller, calls);
|
||||
}
|
||||
Stmt::Try(try_stmt) => {
|
||||
self.extract_calls_from_body(&try_stmt.body, caller, calls);
|
||||
for handler in &try_stmt.handlers {
|
||||
let rustpython_ast::ExceptHandler::ExceptHandler(h) = handler; {
|
||||
self.extract_calls_from_body(&h.body, caller, calls);
|
||||
}
|
||||
}
|
||||
self.extract_calls_from_body(&try_stmt.orelse, caller, calls);
|
||||
self.extract_calls_from_body(&try_stmt.finalbody, caller, calls);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build_function_signature(&self, name: &str, args: &rustpython_ast::Arguments) -> String {
|
||||
let mut params = Vec::new();
|
||||
|
||||
for arg in &args.args {
|
||||
let param_name = arg.def.arg.to_string();
|
||||
let annotation = arg.def.annotation.as_ref()
|
||||
.map(|a| format!(": {}", self.expr_to_string(a)))
|
||||
.unwrap_or_default();
|
||||
|
||||
if let Some(default) = &arg.default {
|
||||
params.push(format!("{}{} = {}", param_name, annotation, self.expr_to_string(default)));
|
||||
} else {
|
||||
params.push(format!("{}{}", param_name, annotation));
|
||||
}
|
||||
}
|
||||
|
||||
// Add *args
|
||||
if let Some(vararg) = &args.vararg {
|
||||
let annotation = vararg.annotation.as_ref()
|
||||
.map(|a| format!(": {}", self.expr_to_string(a)))
|
||||
.unwrap_or_default();
|
||||
params.push(format!("*{}{}", vararg.arg, annotation));
|
||||
}
|
||||
|
||||
// Add **kwargs
|
||||
if let Some(kwarg) = &args.kwarg {
|
||||
let annotation = kwarg.annotation.as_ref()
|
||||
.map(|a| format!(": {}", self.expr_to_string(a)))
|
||||
.unwrap_or_default();
|
||||
params.push(format!("**{}{}", kwarg.arg, annotation));
|
||||
}
|
||||
|
||||
format!("def {}({})", name, params.join(", "))
|
||||
}
|
||||
|
||||
fn extract_docstring(&self, body: &[Stmt]) -> Option<String> {
|
||||
if let Some(first_stmt) = body.first()
|
||||
&& let Stmt::Expr(expr_stmt) = first_stmt
|
||||
&& let Expr::Constant(constant_expr) = &*expr_stmt.value
|
||||
&& let Some(docstring) = constant_expr.value.as_str() {
|
||||
// Return full docstring, trimmed
|
||||
let trimmed = docstring.trim();
|
||||
if trimmed.is_empty() {
|
||||
return None;
|
||||
}
|
||||
return Some(trimmed.to_string());
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn detect_integrations(&self, _body: &[Stmt], _config: &Config) -> crate::model::IntegrationFlags {
|
||||
// Integration detection is now done at module level in resolve_symbols
|
||||
// based on actual imports, not AST body debug strings
|
||||
crate::model::IntegrationFlags {
|
||||
http: false,
|
||||
db: false,
|
||||
queue: false,
|
||||
storage: false,
|
||||
ai: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Detect integrations for a module based on its actual imports
|
||||
fn detect_module_integrations(&self, imports: &[Import], config: &Config) -> crate::model::IntegrationFlags {
|
||||
let mut flags = crate::model::IntegrationFlags {
|
||||
http: false,
|
||||
db: false,
|
||||
queue: false,
|
||||
storage: false,
|
||||
ai: false,
|
||||
};
|
||||
|
||||
if !config.analysis.detect_integrations {
|
||||
return flags;
|
||||
}
|
||||
|
||||
// Build a set of all import names (both module names and their parts)
|
||||
let import_names: Vec<String> = imports.iter().flat_map(|imp| {
|
||||
let mut names = vec![imp.module_name.clone()];
|
||||
// Also add individual parts: "from minio import Minio" -> module_name is "minio.Minio"
|
||||
for part in imp.module_name.split('.') {
|
||||
names.push(part.to_lowercase());
|
||||
}
|
||||
names
|
||||
}).collect();
|
||||
|
||||
for pattern in &config.analysis.integration_patterns {
|
||||
for lib in &pattern.patterns {
|
||||
let lib_lower = lib.to_lowercase();
|
||||
let matched = import_names.iter().any(|name| {
|
||||
let name_lower = name.to_lowercase();
|
||||
name_lower.contains(&lib_lower)
|
||||
});
|
||||
if matched {
|
||||
match pattern.type_.as_str() {
|
||||
"http" => flags.http = true,
|
||||
"db" => flags.db = true,
|
||||
"queue" => flags.queue = true,
|
||||
"storage" => flags.storage = true,
|
||||
"ai" => flags.ai = true,
|
||||
_ => {}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
flags
|
||||
}
|
||||
|
||||
fn extract_from_expression(&self, expr: &Expr, current_symbol: Option<&str>, calls: &mut Vec<Call>) {
|
||||
match expr {
|
||||
Expr::Call(call_expr) => {
|
||||
let callee_expr = self.expr_to_string(&call_expr.func);
|
||||
calls.push(Call {
|
||||
caller_symbol: current_symbol.unwrap_or("unknown").to_string(),
|
||||
callee_expr,
|
||||
line_number: call_expr.range().start().into(),
|
||||
call_type: CallType::Unresolved,
|
||||
});
|
||||
|
||||
// Recursively process the function expression itself
|
||||
self.extract_from_expression(&call_expr.func, current_symbol, calls);
|
||||
|
||||
for arg in &call_expr.args {
|
||||
self.extract_from_expression(arg, current_symbol, calls);
|
||||
}
|
||||
for keyword in &call_expr.keywords {
|
||||
self.extract_from_expression(&keyword.value, current_symbol, calls);
|
||||
}
|
||||
}
|
||||
Expr::Attribute(attr_expr) => {
|
||||
self.extract_from_expression(&attr_expr.value, current_symbol, calls);
|
||||
}
|
||||
Expr::BoolOp(bool_op) => {
|
||||
for value in &bool_op.values {
|
||||
self.extract_from_expression(value, current_symbol, calls);
|
||||
}
|
||||
}
|
||||
Expr::BinOp(bin_op) => {
|
||||
self.extract_from_expression(&bin_op.left, current_symbol, calls);
|
||||
self.extract_from_expression(&bin_op.right, current_symbol, calls);
|
||||
}
|
||||
Expr::UnaryOp(unary_op) => {
|
||||
self.extract_from_expression(&unary_op.operand, current_symbol, calls);
|
||||
}
|
||||
Expr::IfExp(if_exp) => {
|
||||
self.extract_from_expression(&if_exp.test, current_symbol, calls);
|
||||
self.extract_from_expression(&if_exp.body, current_symbol, calls);
|
||||
self.extract_from_expression(&if_exp.orelse, current_symbol, calls);
|
||||
}
|
||||
Expr::Dict(dict_expr) => {
|
||||
for k in dict_expr.keys.iter().flatten() {
|
||||
self.extract_from_expression(k, current_symbol, calls);
|
||||
}
|
||||
for value in &dict_expr.values {
|
||||
self.extract_from_expression(value, current_symbol, calls);
|
||||
}
|
||||
}
|
||||
Expr::List(list_expr) => {
|
||||
for elt in &list_expr.elts {
|
||||
self.extract_from_expression(elt, current_symbol, calls);
|
||||
}
|
||||
}
|
||||
Expr::Tuple(tuple_expr) => {
|
||||
for elt in &tuple_expr.elts {
|
||||
self.extract_from_expression(elt, current_symbol, calls);
|
||||
}
|
||||
}
|
||||
Expr::ListComp(comp) => {
|
||||
self.extract_from_expression(&comp.elt, current_symbol, calls);
|
||||
for generator in &comp.generators {
|
||||
self.extract_from_expression(&generator.iter, current_symbol, calls);
|
||||
for if_clause in &generator.ifs {
|
||||
self.extract_from_expression(if_clause, current_symbol, calls);
|
||||
}
|
||||
}
|
||||
}
|
||||
Expr::Compare(compare) => {
|
||||
self.extract_from_expression(&compare.left, current_symbol, calls);
|
||||
for comp in &compare.comparators {
|
||||
self.extract_from_expression(comp, current_symbol, calls);
|
||||
}
|
||||
}
|
||||
Expr::JoinedStr(joined) => {
|
||||
for value in &joined.values {
|
||||
self.extract_from_expression(value, current_symbol, calls);
|
||||
}
|
||||
}
|
||||
Expr::FormattedValue(fv) => {
|
||||
self.extract_from_expression(&fv.value, current_symbol, calls);
|
||||
}
|
||||
Expr::Subscript(sub) => {
|
||||
self.extract_from_expression(&sub.value, current_symbol, calls);
|
||||
self.extract_from_expression(&sub.slice, current_symbol, calls);
|
||||
}
|
||||
Expr::Starred(starred) => {
|
||||
self.extract_from_expression(&starred.value, current_symbol, calls);
|
||||
}
|
||||
Expr::Await(await_expr) => {
|
||||
self.extract_from_expression(&await_expr.value, current_symbol, calls);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn expr_to_string(&self, expr: &Expr) -> String {
|
||||
match expr {
|
||||
Expr::Name(name_expr) => name_expr.id.to_string(),
|
||||
Expr::Attribute(attr_expr) => {
|
||||
format!("{}.{}", self.expr_to_string(&attr_expr.value), attr_expr.attr)
|
||||
}
|
||||
Expr::Constant(c) => {
|
||||
if let Some(s) = c.value.as_str() {
|
||||
format!("\"{}\"", s)
|
||||
} else {
|
||||
format!("{:?}", c.value)
|
||||
}
|
||||
}
|
||||
Expr::Subscript(sub) => {
|
||||
format!("{}[{}]", self.expr_to_string(&sub.value), self.expr_to_string(&sub.slice))
|
||||
}
|
||||
_ => "<complex_expression>".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute Python module path from file path using src_roots from config.
|
||||
/// E.g. `./src/core.py` with src_root `src` → `core`
|
||||
/// `./src/__init__.py` with src_root `src` → `src` (package)
|
||||
/// `back-end/services/chat/agent.py` with src_root `.` → `back-end.services.chat.agent`
|
||||
fn compute_module_path(&self, file_path: &Path) -> String {
|
||||
let path_str = file_path.to_string_lossy().to_string();
|
||||
// Normalize: strip leading ./
|
||||
let normalized = path_str.strip_prefix("./").unwrap_or(&path_str);
|
||||
let path = std::path::Path::new(normalized);
|
||||
|
||||
for src_root in &self.config.python.src_roots {
|
||||
let root = if src_root == "." {
|
||||
std::path::Path::new("")
|
||||
} else {
|
||||
std::path::Path::new(src_root)
|
||||
};
|
||||
|
||||
let relative = if root == std::path::Path::new("") {
|
||||
Some(path.to_path_buf())
|
||||
} else {
|
||||
path.strip_prefix(root).ok().map(|p| p.to_path_buf())
|
||||
};
|
||||
|
||||
if let Some(rel) = relative {
|
||||
let rel_str = rel.to_string_lossy().to_string();
|
||||
// Check if it's an __init__.py → use the parent directory name as module
|
||||
if rel.file_name().map(|f| f == "__init__.py").unwrap_or(false)
|
||||
&& let Some(parent) = rel.parent() {
|
||||
if parent == std::path::Path::new("") {
|
||||
// __init__.py at src_root level → use src_root as module name
|
||||
if src_root == "." {
|
||||
return "__init__".to_string();
|
||||
}
|
||||
return src_root.replace('/', ".");
|
||||
}
|
||||
return parent.to_string_lossy().replace(['/', '\\'], ".");
|
||||
}
|
||||
|
||||
// Strip .py extension and convert path separators to dots
|
||||
let without_ext = rel_str.strip_suffix(".py").unwrap_or(&rel_str);
|
||||
let module_path = without_ext.replace(['/', '\\'], ".");
|
||||
return module_path;
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: use file path as-is
|
||||
normalized.to_string()
|
||||
}
|
||||
|
||||
pub fn resolve_symbols(&self, modules: &[ParsedModule]) -> Result<ProjectModel, WTIsMyCodeError> {
|
||||
let mut project_model = ProjectModel::new();
|
||||
|
||||
// Build import alias map for call resolution
|
||||
// alias_name -> original_module_name
|
||||
let mut import_aliases: std::collections::HashMap<String, String> = std::collections::HashMap::new();
|
||||
for parsed_module in modules {
|
||||
for import in &parsed_module.imports {
|
||||
if let Some(alias) = &import.alias {
|
||||
import_aliases.insert(alias.clone(), import.module_name.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// First pass: collect __init__.py docstrings keyed by module_id
|
||||
let mut init_docstrings: std::collections::HashMap<String, String> = std::collections::HashMap::new();
|
||||
for parsed_module in modules {
|
||||
if parsed_module.path.file_name().map(|f| f == "__init__.py").unwrap_or(false)
|
||||
&& let Some(ref ds) = parsed_module.file_docstring {
|
||||
let module_id = self.compute_module_path(&parsed_module.path);
|
||||
init_docstrings.insert(module_id, ds.clone());
|
||||
}
|
||||
}
|
||||
|
||||
for parsed_module in modules {
|
||||
let module_id = self.compute_module_path(&parsed_module.path);
|
||||
let file_id = parsed_module.path.to_string_lossy().to_string();
|
||||
|
||||
// Use file docstring first line as file purpose
|
||||
let file_purpose = parsed_module.file_docstring.as_ref().map(|ds| {
|
||||
ds.lines().next().unwrap_or(ds).to_string()
|
||||
});
|
||||
|
||||
let file_doc = FileDoc {
|
||||
id: file_id.clone(),
|
||||
path: parsed_module.path.to_string_lossy().to_string(),
|
||||
module_id: module_id.clone(),
|
||||
imports: parsed_module.imports.iter().map(|i| i.module_name.clone()).collect(),
|
||||
outbound_modules: Vec::new(),
|
||||
inbound_files: Vec::new(),
|
||||
symbols: parsed_module.symbols.iter().map(|s| format!("{}::{}", module_id, s.id)).collect(),
|
||||
file_purpose,
|
||||
};
|
||||
project_model.files.insert(file_id.clone(), file_doc);
|
||||
|
||||
// Detect integrations based on actual imports
|
||||
let module_integrations = self.detect_module_integrations(&parsed_module.imports, &self.config);
|
||||
let mut module_symbol_ids = Vec::new();
|
||||
for mut symbol in parsed_module.symbols.clone() {
|
||||
symbol.module_id = module_id.clone();
|
||||
symbol.file_id = file_id.clone();
|
||||
// Make symbol ID unique by prefixing with module
|
||||
let unique_id = format!("{}::{}", module_id, symbol.id);
|
||||
symbol.id = unique_id.clone();
|
||||
// Apply module-level integration flags to all symbols
|
||||
symbol.integrations_flags.http |= module_integrations.http;
|
||||
symbol.integrations_flags.db |= module_integrations.db;
|
||||
symbol.integrations_flags.queue |= module_integrations.queue;
|
||||
symbol.integrations_flags.storage |= module_integrations.storage;
|
||||
symbol.integrations_flags.ai |= module_integrations.ai;
|
||||
module_symbol_ids.push(unique_id.clone());
|
||||
project_model.symbols.insert(unique_id, symbol);
|
||||
}
|
||||
|
||||
// Use __init__.py docstring for module doc_summary, or file docstring for single-file modules
|
||||
let is_init = parsed_module.path.file_name().map(|f| f == "__init__.py").unwrap_or(false);
|
||||
let doc_summary = if is_init {
|
||||
parsed_module.file_docstring.clone()
|
||||
} else {
|
||||
// For non-init files, use file docstring first, then check __init__.py
|
||||
parsed_module.file_docstring.clone()
|
||||
.or_else(|| init_docstrings.get(&module_id).cloned())
|
||||
};
|
||||
|
||||
let module = Module {
|
||||
id: module_id.clone(),
|
||||
path: parsed_module.path.to_string_lossy().to_string(),
|
||||
files: vec![file_id.clone()],
|
||||
doc_summary,
|
||||
outbound_modules: Vec::new(),
|
||||
inbound_modules: Vec::new(),
|
||||
symbols: module_symbol_ids,
|
||||
};
|
||||
project_model.modules.insert(module_id, module);
|
||||
}
|
||||
|
||||
self.build_dependency_graphs(&mut project_model, modules)?;
|
||||
self.resolve_call_types(&mut project_model, modules, &import_aliases);
|
||||
self.compute_metrics(&mut project_model)?;
|
||||
|
||||
Ok(project_model)
|
||||
}
|
||||
|
||||
/// Resolve call types using import information
|
||||
fn resolve_call_types(
|
||||
&self,
|
||||
project_model: &mut ProjectModel,
|
||||
parsed_modules: &[ParsedModule],
|
||||
import_aliases: &std::collections::HashMap<String, String>,
|
||||
) {
|
||||
// Collect all known symbol names
|
||||
let known_symbols: std::collections::HashSet<String> = project_model.symbols.keys().cloned().collect();
|
||||
|
||||
for parsed_module in parsed_modules {
|
||||
let import_map: std::collections::HashMap<String, String> = parsed_module.imports.iter()
|
||||
.filter_map(|i| {
|
||||
i.alias.as_ref().map(|alias| (alias.clone(), i.module_name.clone()))
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Also map plain imported names
|
||||
let mut name_map: std::collections::HashMap<String, String> = import_map;
|
||||
for import in &parsed_module.imports {
|
||||
// For "from foo.bar import baz", map "baz" -> "foo.bar.baz"
|
||||
let parts: Vec<&str> = import.module_name.split('.').collect();
|
||||
if let Some(last) = parts.last() {
|
||||
name_map.insert(last.to_string(), import.module_name.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Update edge call types
|
||||
for edge in &mut project_model.edges.symbol_call_edges {
|
||||
let callee = &edge.to_id;
|
||||
|
||||
// Check if callee is a known local symbol
|
||||
if known_symbols.contains(callee) {
|
||||
edge.edge_type = crate::model::EdgeType::SymbolCall;
|
||||
} else {
|
||||
// Check if it matches an import alias
|
||||
let root_name = callee.split('.').next().unwrap_or(callee);
|
||||
if name_map.contains_key(root_name) || import_aliases.contains_key(root_name) {
|
||||
edge.edge_type = crate::model::EdgeType::ExternalCall;
|
||||
} else {
|
||||
edge.edge_type = crate::model::EdgeType::UnresolvedCall;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build_dependency_graphs(&self, project_model: &mut ProjectModel, parsed_modules: &[ParsedModule]) -> Result<(), WTIsMyCodeError> {
|
||||
// Collect known internal module IDs
|
||||
let known_modules: std::collections::HashSet<String> = project_model.modules.keys().cloned().collect();
|
||||
|
||||
for parsed_module in parsed_modules {
|
||||
let from_module_id = self.compute_module_path(&parsed_module.path);
|
||||
|
||||
for import in &parsed_module.imports {
|
||||
let to_module_id = import.module_name.clone();
|
||||
let edge = crate::model::Edge {
|
||||
from_id: from_module_id.clone(),
|
||||
to_id: to_module_id,
|
||||
edge_type: crate::model::EdgeType::ModuleImport,
|
||||
meta: None,
|
||||
};
|
||||
project_model.edges.module_import_edges.push(edge);
|
||||
}
|
||||
}
|
||||
|
||||
// Populate outbound_modules and inbound_modules from edges
|
||||
// Only include internal modules (ones that exist in project_model.modules)
|
||||
for edge in &project_model.edges.module_import_edges {
|
||||
let from_id = &edge.from_id;
|
||||
// Try to match the import to an internal module
|
||||
// Import "src.core.SomeClass" should match module "src.core"
|
||||
let to_internal = if known_modules.contains(&edge.to_id) {
|
||||
Some(edge.to_id.clone())
|
||||
} else {
|
||||
// Try prefix matching: "foo.bar.baz" -> check "foo.bar", "foo"
|
||||
let parts: Vec<&str> = edge.to_id.split('.').collect();
|
||||
let mut found = None;
|
||||
for i in (1..parts.len()).rev() {
|
||||
let prefix = parts[..i].join(".");
|
||||
if known_modules.contains(&prefix) {
|
||||
found = Some(prefix);
|
||||
break;
|
||||
}
|
||||
}
|
||||
found
|
||||
};
|
||||
|
||||
if let Some(ref target_module) = to_internal
|
||||
&& target_module != from_id {
|
||||
if let Some(module) = project_model.modules.get_mut(from_id)
|
||||
&& !module.outbound_modules.contains(target_module) {
|
||||
module.outbound_modules.push(target_module.clone());
|
||||
}
|
||||
if let Some(module) = project_model.modules.get_mut(target_module)
|
||||
&& !module.inbound_modules.contains(from_id) {
|
||||
module.inbound_modules.push(from_id.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for parsed_module in parsed_modules {
|
||||
for call in &parsed_module.calls {
|
||||
let callee_expr = call.callee_expr.clone();
|
||||
let edge = crate::model::Edge {
|
||||
from_id: call.caller_symbol.clone(),
|
||||
to_id: callee_expr,
|
||||
edge_type: crate::model::EdgeType::SymbolCall,
|
||||
meta: None,
|
||||
};
|
||||
project_model.edges.symbol_call_edges.push(edge);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check if a class symbol is a simple data container (dataclass-like).
|
||||
/// A class is considered a dataclass if it has ≤2 methods (typically __init__ and __repr__/__str__).
|
||||
fn is_dataclass_like(symbol_id: &str, project_model: &ProjectModel) -> bool {
|
||||
let symbol = match project_model.symbols.get(symbol_id) {
|
||||
Some(s) => s,
|
||||
None => return false,
|
||||
};
|
||||
if symbol.kind != crate::model::SymbolKind::Class {
|
||||
return false;
|
||||
}
|
||||
// Count methods belonging to this class
|
||||
let class_name = &symbol.qualname;
|
||||
let method_prefix = format!("{}::{}.", symbol.module_id, class_name);
|
||||
let method_count = project_model.symbols.values()
|
||||
.filter(|s| s.kind == crate::model::SymbolKind::Method && s.id.starts_with(&method_prefix))
|
||||
.count();
|
||||
method_count <= 2
|
||||
}
|
||||
|
||||
fn compute_metrics(&self, project_model: &mut ProjectModel) -> Result<(), WTIsMyCodeError> {
|
||||
// Collect fan-in/fan-out first to avoid borrow issues
|
||||
let mut metrics: std::collections::HashMap<String, (usize, usize)> = std::collections::HashMap::new();
|
||||
|
||||
for symbol_id in project_model.symbols.keys() {
|
||||
let fan_out = project_model.edges.symbol_call_edges
|
||||
.iter()
|
||||
.filter(|edge| edge.from_id == *symbol_id)
|
||||
.count();
|
||||
let fan_in = project_model.edges.symbol_call_edges
|
||||
.iter()
|
||||
.filter(|edge| edge.to_id == *symbol_id)
|
||||
.count();
|
||||
metrics.insert(symbol_id.clone(), (fan_in, fan_out));
|
||||
}
|
||||
|
||||
// Pre-compute which symbols are dataclass-like (need immutable borrow)
|
||||
let dataclass_ids: std::collections::HashSet<String> = metrics.keys()
|
||||
.filter(|id| Self::is_dataclass_like(id, project_model))
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
for (symbol_id, (fan_in, fan_out)) in &metrics {
|
||||
if let Some(symbol) = project_model.symbols.get_mut(symbol_id) {
|
||||
symbol.metrics.fan_in = *fan_in;
|
||||
symbol.metrics.fan_out = *fan_out;
|
||||
// Don't mark dataclass-like classes as critical — they're just data containers
|
||||
let exceeds_threshold = *fan_in > self.config.thresholds.critical_fan_in
|
||||
|| *fan_out > self.config.thresholds.critical_fan_out;
|
||||
symbol.metrics.is_critical = exceeds_threshold && !dataclass_ids.contains(symbol_id);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
961
wtismycode-core/src/renderer.rs
Normal file
961
wtismycode-core/src/renderer.rs
Normal file
@@ -0,0 +1,961 @@
|
||||
//! Markdown renderer for WTIsMyCode
|
||||
//!
|
||||
//! This module handles generating Markdown documentation from the project model
|
||||
//! using templates.
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::cycle_detector;
|
||||
use crate::model::{ProjectModel, SymbolKind};
|
||||
use chrono::Utc;
|
||||
use handlebars::Handlebars;
|
||||
|
||||
fn sanitize_for_link(filename: &str) -> String {
|
||||
let cleaned = filename.strip_prefix("./").unwrap_or(filename);
|
||||
cleaned.replace('/', "__")
|
||||
}
|
||||
|
||||
pub struct Renderer {
|
||||
templates: Handlebars<'static>,
|
||||
}
|
||||
|
||||
impl Default for Renderer {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Renderer {
|
||||
pub fn new() -> Self {
|
||||
let mut handlebars = Handlebars::new();
|
||||
|
||||
// Register templates
|
||||
handlebars.register_template_string("architecture_md", Self::architecture_md_template())
|
||||
.expect("Failed to register architecture_md template");
|
||||
|
||||
// Register module documentation template
|
||||
handlebars.register_template_string("module_md", Self::module_md_template())
|
||||
.expect("Failed to register module_md template");
|
||||
|
||||
Self {
|
||||
templates: handlebars,
|
||||
}
|
||||
}
|
||||
|
||||
fn architecture_md_template() -> &'static str {
|
||||
r#"# ARCHITECTURE — {{{project_name}}}
|
||||
|
||||
<!-- MANUAL:BEGIN -->
|
||||
## Project summary
|
||||
**Name:** {{{project_name}}}
|
||||
**Description:** {{{project_description}}}
|
||||
|
||||
## Key decisions (manual)
|
||||
{{#each key_decisions}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
## Non-goals (manual)
|
||||
{{#each non_goals}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
<!-- MANUAL:END -->
|
||||
|
||||
---
|
||||
|
||||
## Document metadata
|
||||
- **Created:** {{{created_date}}}
|
||||
- **Updated:** {{{updated_date}}}
|
||||
- **Generated by:** wtismycode (cli) v0.1
|
||||
|
||||
---
|
||||
|
||||
## Integrations
|
||||
<!-- ARCHDOC:BEGIN section=integrations -->
|
||||
> Generated. Do not edit inside this block.
|
||||
|
||||
### Database Integrations
|
||||
{{#each db_integrations}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
### HTTP/API Integrations
|
||||
{{#each http_integrations}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
### Queue Integrations
|
||||
{{#each queue_integrations}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
### Storage Integrations
|
||||
{{#each storage_integrations}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
### AI/ML Integrations
|
||||
{{#each ai_integrations}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
<!-- ARCHDOC:END section=integrations -->
|
||||
|
||||
---
|
||||
|
||||
## Rails / Tooling
|
||||
<!-- ARCHDOC:BEGIN section=rails -->
|
||||
> Generated. Do not edit inside this block.
|
||||
{{{rails_summary}}}
|
||||
<!-- ARCHDOC:END section=rails -->
|
||||
|
||||
---
|
||||
|
||||
## Repository layout (top-level)
|
||||
<!-- ARCHDOC:BEGIN section=layout -->
|
||||
> Generated. Do not edit inside this block.
|
||||
| Path | Purpose | Link |
|
||||
|------|---------|------|
|
||||
{{#each layout_items}}
|
||||
| {{{path}}} | {{{purpose}}} | [details]({{{link}}}) |
|
||||
{{/each}}
|
||||
<!-- ARCHDOC:END section=layout -->
|
||||
|
||||
---
|
||||
|
||||
## Modules index
|
||||
<!-- ARCHDOC:BEGIN section=modules_index -->
|
||||
> Generated. Do not edit inside this block.
|
||||
| Module | Symbols | Inbound | Outbound | Link |
|
||||
|--------|---------|---------|----------|------|
|
||||
{{#each modules}}
|
||||
| {{{name}}} | {{{symbol_count}}} | {{{inbound_count}}} | {{{outbound_count}}} | [details]({{{link}}}) |
|
||||
{{/each}}
|
||||
<!-- ARCHDOC:END section=modules_index -->
|
||||
|
||||
---
|
||||
|
||||
## Critical dependency points
|
||||
<!-- ARCHDOC:BEGIN section=critical_points -->
|
||||
> Generated. Do not edit inside this block.
|
||||
### High Fan-in (Most Called)
|
||||
| Symbol | Fan-in | Critical |
|
||||
|--------|--------|----------|
|
||||
{{#each high_fan_in}}
|
||||
| {{{symbol}}} | {{{count}}} | {{{critical}}} |
|
||||
{{/each}}
|
||||
|
||||
### High Fan-out (Calls Many)
|
||||
| Symbol | Fan-out | Critical |
|
||||
|--------|---------|----------|
|
||||
{{#each high_fan_out}}
|
||||
| {{{symbol}}} | {{{count}}} | {{{critical}}} |
|
||||
{{/each}}
|
||||
|
||||
### Module Cycles
|
||||
{{#each cycles}}
|
||||
- {{{cycle_path}}}
|
||||
{{/each}}
|
||||
<!-- ARCHDOC:END section=critical_points -->
|
||||
|
||||
---
|
||||
|
||||
<!-- MANUAL:BEGIN -->
|
||||
## Change notes (manual)
|
||||
{{#each change_notes}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
<!-- MANUAL:END -->
|
||||
"#
|
||||
}
|
||||
|
||||
fn module_md_template() -> &'static str {
|
||||
r#"# Module: {{{module_name}}}
|
||||
|
||||
{{{module_summary}}}
|
||||
|
||||
## Symbols
|
||||
|
||||
{{#each symbols}}
|
||||
### {{{name}}}
|
||||
|
||||
{{{signature}}}
|
||||
|
||||
{{{docstring}}}
|
||||
|
||||
**Type:** {{{kind}}}
|
||||
|
||||
**Metrics:**
|
||||
- Fan-in: {{{fan_in}}}
|
||||
- Fan-out: {{{fan_out}}}
|
||||
{{#if is_critical}}
|
||||
- Critical: Yes
|
||||
{{/if}}
|
||||
|
||||
{{/each}}
|
||||
|
||||
## Dependencies
|
||||
|
||||
### Imports
|
||||
{{#each imports}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
### Outbound Modules
|
||||
{{#each outbound_modules}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
### Inbound Modules
|
||||
{{#each inbound_modules}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
## Integrations
|
||||
|
||||
{{#if has_db_integrations}}
|
||||
### Database Integrations
|
||||
{{#each db_symbols}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
{{/if}}
|
||||
|
||||
{{#if has_http_integrations}}
|
||||
### HTTP/API Integrations
|
||||
{{#each http_symbols}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
{{/if}}
|
||||
|
||||
{{#if has_queue_integrations}}
|
||||
### Queue Integrations
|
||||
{{#each queue_symbols}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
{{/if}}
|
||||
|
||||
{{#if has_storage_integrations}}
|
||||
### Storage Integrations
|
||||
{{#each storage_symbols}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
{{/if}}
|
||||
|
||||
{{#if has_ai_integrations}}
|
||||
### AI/ML Integrations
|
||||
{{#each ai_symbols}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
{{/if}}
|
||||
|
||||
## Usage Examples
|
||||
|
||||
{{#each usage_examples}}
|
||||
```python
|
||||
{{{this}}}
|
||||
```
|
||||
|
||||
{{/each}}
|
||||
"#
|
||||
}
|
||||
|
||||
pub fn render_architecture_md(&self, model: &ProjectModel, config: Option<&Config>) -> Result<String, anyhow::Error> {
|
||||
// Collect integration information
|
||||
let mut db_integrations = Vec::new();
|
||||
let mut http_integrations = Vec::new();
|
||||
let mut queue_integrations = Vec::new();
|
||||
let mut storage_integrations = Vec::new();
|
||||
let mut ai_integrations = Vec::new();
|
||||
|
||||
for (symbol_id, symbol) in &model.symbols {
|
||||
if symbol.integrations_flags.db {
|
||||
db_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
||||
}
|
||||
if symbol.integrations_flags.http {
|
||||
http_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
||||
}
|
||||
if symbol.integrations_flags.queue {
|
||||
queue_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
||||
}
|
||||
if symbol.integrations_flags.storage {
|
||||
storage_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
||||
}
|
||||
if symbol.integrations_flags.ai {
|
||||
ai_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
||||
}
|
||||
}
|
||||
|
||||
// Determine project name: config > pyproject.toml > directory name > fallback
|
||||
let project_name = config
|
||||
.and_then(|c| {
|
||||
if c.project.name.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(c.project.name.clone())
|
||||
}
|
||||
})
|
||||
.or_else(|| {
|
||||
// Try pyproject.toml
|
||||
config.and_then(|c| {
|
||||
let pyproject_path = std::path::Path::new(&c.project.root).join("pyproject.toml");
|
||||
std::fs::read_to_string(&pyproject_path).ok().and_then(|content| {
|
||||
// Simple TOML parsing for [project] name = "..."
|
||||
let mut in_project = false;
|
||||
for line in content.lines() {
|
||||
let trimmed = line.trim();
|
||||
if trimmed == "[project]" {
|
||||
in_project = true;
|
||||
continue;
|
||||
}
|
||||
if trimmed.starts_with('[') {
|
||||
in_project = false;
|
||||
continue;
|
||||
}
|
||||
if in_project && trimmed.starts_with("name") {
|
||||
if let Some(val) = trimmed.split('=').nth(1) {
|
||||
let name = val.trim().trim_matches('"').trim_matches('\'');
|
||||
if !name.is_empty() {
|
||||
return Some(name.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
})
|
||||
})
|
||||
})
|
||||
.or_else(|| {
|
||||
config.map(|c| {
|
||||
std::path::Path::new(&c.project.root)
|
||||
.canonicalize()
|
||||
.ok()
|
||||
.and_then(|p| p.file_name().map(|n| n.to_string_lossy().to_string()))
|
||||
.unwrap_or_else(|| "Project".to_string())
|
||||
})
|
||||
})
|
||||
.unwrap_or_else(|| "Project".to_string());
|
||||
|
||||
let today = Utc::now().format("%Y-%m-%d").to_string();
|
||||
|
||||
// Collect layout items grouped by top-level directory
|
||||
let mut dir_files: std::collections::BTreeMap<String, Vec<String>> = std::collections::BTreeMap::new();
|
||||
for file_doc in model.files.values() {
|
||||
let path = file_doc.path.strip_prefix("./").unwrap_or(&file_doc.path);
|
||||
let top_dir = path.split('/').next().unwrap_or(path);
|
||||
// If file is at root level (no '/'), use the filename itself
|
||||
let top = if path.contains('/') {
|
||||
format!("{}/", top_dir)
|
||||
} else {
|
||||
path.to_string()
|
||||
};
|
||||
dir_files.entry(top).or_default().push(path.to_string());
|
||||
}
|
||||
let mut layout_items = Vec::new();
|
||||
for (dir, files) in &dir_files {
|
||||
let file_count = files.len();
|
||||
let purpose = if dir.ends_with('/') {
|
||||
format!("{} files", file_count)
|
||||
} else {
|
||||
"Root file".to_string()
|
||||
};
|
||||
layout_items.push(serde_json::json!({
|
||||
"path": dir,
|
||||
"purpose": purpose,
|
||||
"link": format!("docs/architecture/files/{}.md", sanitize_for_link(dir.trim_end_matches('/')))
|
||||
}));
|
||||
}
|
||||
|
||||
// Collect module items for template
|
||||
let mut modules_list = Vec::new();
|
||||
for (module_id, module) in &model.modules {
|
||||
modules_list.push(serde_json::json!({
|
||||
"name": module_id,
|
||||
"symbol_count": module.symbols.len(),
|
||||
"inbound_count": module.inbound_modules.len(),
|
||||
"outbound_count": module.outbound_modules.len(),
|
||||
"link": format!("docs/architecture/modules/{}.md", sanitize_for_link(module_id))
|
||||
}));
|
||||
}
|
||||
|
||||
// Collect critical points
|
||||
let mut high_fan_in = Vec::new();
|
||||
let mut high_fan_out = Vec::new();
|
||||
for (symbol_id, symbol) in &model.symbols {
|
||||
if symbol.metrics.fan_in > 5 {
|
||||
high_fan_in.push(serde_json::json!({
|
||||
"symbol": symbol_id,
|
||||
"count": symbol.metrics.fan_in,
|
||||
"critical": symbol.metrics.is_critical,
|
||||
}));
|
||||
}
|
||||
if symbol.metrics.fan_out > 5 {
|
||||
high_fan_out.push(serde_json::json!({
|
||||
"symbol": symbol_id,
|
||||
"count": symbol.metrics.fan_out,
|
||||
"critical": symbol.metrics.is_critical,
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
let cycles: Vec<_> = cycle_detector::detect_cycles(model)
|
||||
.iter()
|
||||
.map(|cycle| {
|
||||
serde_json::json!({
|
||||
"cycle_path": format!("{} → {}", cycle.join(" → "), cycle.first().unwrap_or(&String::new()))
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Project statistics
|
||||
let project_description = format!(
|
||||
"Python project with {} modules, {} files, and {} symbols.",
|
||||
model.modules.len(), model.files.len(), model.symbols.len()
|
||||
);
|
||||
|
||||
// Prepare data for template
|
||||
let data = serde_json::json!({
|
||||
"project_name": project_name,
|
||||
"project_description": project_description,
|
||||
"created_date": &today,
|
||||
"updated_date": &today,
|
||||
"key_decisions": ["<FILL_MANUALLY>"],
|
||||
"non_goals": ["<FILL_MANUALLY>"],
|
||||
"change_notes": ["<FILL_MANUALLY>"],
|
||||
"db_integrations": db_integrations,
|
||||
"http_integrations": http_integrations,
|
||||
"queue_integrations": queue_integrations,
|
||||
"storage_integrations": storage_integrations,
|
||||
"ai_integrations": ai_integrations,
|
||||
"rails_summary": "\n\nNo tooling information available.\n",
|
||||
"layout_items": layout_items,
|
||||
"modules": modules_list,
|
||||
"high_fan_in": high_fan_in,
|
||||
"high_fan_out": high_fan_out,
|
||||
"cycles": cycles,
|
||||
});
|
||||
|
||||
self.templates.render("architecture_md", &data)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to render architecture.md: {}", e))
|
||||
}
|
||||
|
||||
pub fn render_module_md(&self, model: &ProjectModel, module_id: &str) -> Result<String, anyhow::Error> {
|
||||
// Find the module in the project model
|
||||
let module = model.modules.get(module_id)
|
||||
.ok_or_else(|| anyhow::anyhow!("Module {} not found", module_id))?;
|
||||
|
||||
// Collect symbols for this module
|
||||
let mut symbols = Vec::new();
|
||||
for symbol_id in &module.symbols {
|
||||
if let Some(symbol) = model.symbols.get(symbol_id) {
|
||||
symbols.push(serde_json::json!({
|
||||
"name": symbol.qualname,
|
||||
"signature": symbol.signature,
|
||||
"docstring": symbol.docstring_first_line.as_deref().unwrap_or("No documentation available"),
|
||||
"kind": format!("{:?}", symbol.kind),
|
||||
"fan_in": symbol.metrics.fan_in,
|
||||
"fan_out": symbol.metrics.fan_out,
|
||||
"is_critical": symbol.metrics.is_critical,
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
// Collect integration information for this module
|
||||
let mut db_symbols = Vec::new();
|
||||
let mut http_symbols = Vec::new();
|
||||
let mut queue_symbols = Vec::new();
|
||||
let mut storage_symbols = Vec::new();
|
||||
let mut ai_symbols = Vec::new();
|
||||
|
||||
for symbol_id in &module.symbols {
|
||||
if let Some(symbol) = model.symbols.get(symbol_id) {
|
||||
if symbol.integrations_flags.db {
|
||||
db_symbols.push(symbol.qualname.clone());
|
||||
}
|
||||
if symbol.integrations_flags.http {
|
||||
http_symbols.push(symbol.qualname.clone());
|
||||
}
|
||||
if symbol.integrations_flags.queue {
|
||||
queue_symbols.push(symbol.qualname.clone());
|
||||
}
|
||||
if symbol.integrations_flags.storage {
|
||||
storage_symbols.push(symbol.qualname.clone());
|
||||
}
|
||||
if symbol.integrations_flags.ai {
|
||||
ai_symbols.push(symbol.qualname.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Generate usage examples from public symbols
|
||||
let mut usage_examples = Vec::new();
|
||||
for symbol_id in &module.symbols {
|
||||
if let Some(symbol) = model.symbols.get(symbol_id) {
|
||||
let short_name = symbol.qualname.rsplit('.').next().unwrap_or(&symbol.qualname);
|
||||
match symbol.kind {
|
||||
SymbolKind::Function | SymbolKind::AsyncFunction => {
|
||||
// Extract args from signature: "def foo(a, b)" -> "a, b"
|
||||
let args = symbol.signature
|
||||
.find('(')
|
||||
.and_then(|start| symbol.signature.rfind(')').map(|end| (start, end)))
|
||||
.map(|(s, e)| &symbol.signature[s+1..e])
|
||||
.unwrap_or("");
|
||||
let clean_args = args.split(',')
|
||||
.map(|a| a.split(':').next().unwrap_or("").trim())
|
||||
.filter(|a| !a.is_empty() && *a != "self" && *a != "cls")
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
let example_args = if clean_args.is_empty() { String::new() } else {
|
||||
clean_args.split(", ").map(|a| {
|
||||
if a.starts_with('*') { "..." } else { a }
|
||||
}).collect::<Vec<_>>().join(", ")
|
||||
};
|
||||
let prefix = if symbol.kind == SymbolKind::AsyncFunction { "await " } else { "" };
|
||||
usage_examples.push(format!(
|
||||
"from {} import {}\nresult = {}{}({})",
|
||||
module_id, short_name, prefix, short_name, example_args
|
||||
));
|
||||
}
|
||||
SymbolKind::Class => {
|
||||
// Find __init__ method to get constructor args
|
||||
let init_name = format!("{}.__init__", short_name);
|
||||
let init_args = module.symbols.iter()
|
||||
.find_map(|sid| {
|
||||
model.symbols.get(sid).and_then(|s| {
|
||||
if s.qualname == init_name || s.id == init_name {
|
||||
// Extract args from __init__ signature
|
||||
let args = s.signature
|
||||
.find('(')
|
||||
.and_then(|start| s.signature.rfind(')').map(|end| (start, end)))
|
||||
.map(|(st, en)| &s.signature[st+1..en])
|
||||
.unwrap_or("");
|
||||
let clean = args.split(',')
|
||||
.map(|a| a.split(':').next().unwrap_or("").split('=').next().unwrap_or("").trim())
|
||||
.filter(|a| !a.is_empty() && *a != "self" && *a != "cls" && !a.starts_with('*'))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
Some(clean)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
.unwrap_or_default();
|
||||
usage_examples.push(format!(
|
||||
"from {} import {}\ninstance = {}({})",
|
||||
module_id, short_name, short_name, init_args
|
||||
));
|
||||
}
|
||||
SymbolKind::Method => {
|
||||
// Skip methods - they're shown via class usage
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if usage_examples.is_empty() {
|
||||
usage_examples.push(format!("import {}", module_id));
|
||||
}
|
||||
|
||||
// Prepare data for template
|
||||
let data = serde_json::json!({
|
||||
"module_name": module_id,
|
||||
"module_summary": module.doc_summary.as_deref().unwrap_or("No summary available"),
|
||||
"symbols": symbols,
|
||||
"imports": model.files.get(&module.files[0]).map(|f| f.imports.clone()).unwrap_or_default(),
|
||||
"outbound_modules": module.outbound_modules,
|
||||
"inbound_modules": module.inbound_modules,
|
||||
"has_db_integrations": !db_symbols.is_empty(),
|
||||
"has_http_integrations": !http_symbols.is_empty(),
|
||||
"has_queue_integrations": !queue_symbols.is_empty(),
|
||||
"has_storage_integrations": !storage_symbols.is_empty(),
|
||||
"has_ai_integrations": !ai_symbols.is_empty(),
|
||||
"db_symbols": db_symbols,
|
||||
"http_symbols": http_symbols,
|
||||
"queue_symbols": queue_symbols,
|
||||
"storage_symbols": storage_symbols,
|
||||
"ai_symbols": ai_symbols,
|
||||
"usage_examples": usage_examples,
|
||||
});
|
||||
|
||||
self.templates.render("module_md", &data)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to render module.md: {}", e))
|
||||
}
|
||||
|
||||
pub fn render_integrations_section(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
|
||||
// Collect integration information
|
||||
let mut db_integrations = Vec::new();
|
||||
let mut http_integrations = Vec::new();
|
||||
let mut queue_integrations = Vec::new();
|
||||
let mut storage_integrations = Vec::new();
|
||||
let mut ai_integrations = Vec::new();
|
||||
|
||||
for (symbol_id, symbol) in &model.symbols {
|
||||
if symbol.integrations_flags.db {
|
||||
db_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
||||
}
|
||||
if symbol.integrations_flags.http {
|
||||
http_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
||||
}
|
||||
if symbol.integrations_flags.queue {
|
||||
queue_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
||||
}
|
||||
if symbol.integrations_flags.storage {
|
||||
storage_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
||||
}
|
||||
if symbol.integrations_flags.ai {
|
||||
ai_integrations.push(format!("{} in {}", symbol_id, symbol.file_id));
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare data for integrations section
|
||||
let data = serde_json::json!({
|
||||
"db_integrations": db_integrations,
|
||||
"http_integrations": http_integrations,
|
||||
"queue_integrations": queue_integrations,
|
||||
"storage_integrations": storage_integrations,
|
||||
"ai_integrations": ai_integrations,
|
||||
});
|
||||
|
||||
// Create a smaller template just for the integrations section
|
||||
let integrations_template = r#"
|
||||
|
||||
### Database Integrations
|
||||
{{#each db_integrations}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
### HTTP/API Integrations
|
||||
{{#each http_integrations}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
### Queue Integrations
|
||||
{{#each queue_integrations}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
### Storage Integrations
|
||||
{{#each storage_integrations}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
### AI/ML Integrations
|
||||
{{#each ai_integrations}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
"#;
|
||||
|
||||
let mut handlebars = Handlebars::new();
|
||||
handlebars.register_template_string("integrations", integrations_template)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to register integrations template: {}", e))?;
|
||||
|
||||
handlebars.render("integrations", &data)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to render integrations section: {}", e))
|
||||
}
|
||||
|
||||
pub fn render_rails_section(&self, _model: &ProjectModel) -> Result<String, anyhow::Error> {
|
||||
// For now, return a simple placeholder
|
||||
Ok("\n\nNo tooling information available.\n".to_string())
|
||||
}
|
||||
|
||||
pub fn render_layout_section(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
|
||||
// Collect layout items grouped by top-level directory
|
||||
let mut dir_files: std::collections::BTreeMap<String, Vec<String>> = std::collections::BTreeMap::new();
|
||||
for file_doc in model.files.values() {
|
||||
let path = file_doc.path.strip_prefix("./").unwrap_or(&file_doc.path);
|
||||
let top_dir = path.split('/').next().unwrap_or(path);
|
||||
let top = if path.contains('/') {
|
||||
format!("{}/", top_dir)
|
||||
} else {
|
||||
path.to_string()
|
||||
};
|
||||
dir_files.entry(top).or_default().push(path.to_string());
|
||||
}
|
||||
let mut layout_items = Vec::new();
|
||||
for (dir, files) in &dir_files {
|
||||
let file_count = files.len();
|
||||
let purpose = if dir.ends_with('/') {
|
||||
format!("{} files", file_count)
|
||||
} else {
|
||||
"Root file".to_string()
|
||||
};
|
||||
layout_items.push(serde_json::json!({
|
||||
"path": dir,
|
||||
"purpose": purpose,
|
||||
"link": format!("docs/architecture/files/{}.md", sanitize_for_link(dir.trim_end_matches('/')))
|
||||
}));
|
||||
}
|
||||
|
||||
// Prepare data for layout section
|
||||
let data = serde_json::json!({
|
||||
"layout_items": layout_items,
|
||||
});
|
||||
|
||||
// Create a smaller template just for the layout section
|
||||
let layout_template = r#"
|
||||
|
||||
| Path | Purpose | Link |
|
||||
|------|---------|------|
|
||||
{{#each layout_items}}
|
||||
| {{{path}}} | {{{purpose}}} | [details]({{{link}}}) |
|
||||
{{/each}}
|
||||
"#;
|
||||
|
||||
let mut handlebars = Handlebars::new();
|
||||
handlebars.register_template_string("layout", layout_template)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to register layout template: {}", e))?;
|
||||
|
||||
handlebars.render("layout", &data)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to render layout section: {}", e))
|
||||
}
|
||||
|
||||
pub fn render_modules_index_section(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
|
||||
// Collect module information
|
||||
let mut modules = Vec::new();
|
||||
|
||||
for (module_id, module) in &model.modules {
|
||||
modules.push(serde_json::json!({
|
||||
"name": module_id,
|
||||
"symbol_count": module.symbols.len(),
|
||||
"inbound_count": module.inbound_modules.len(),
|
||||
"outbound_count": module.outbound_modules.len(),
|
||||
"link": format!("docs/architecture/modules/{}.md", sanitize_for_link(module_id))
|
||||
}));
|
||||
}
|
||||
|
||||
// Prepare data for modules index section
|
||||
let data = serde_json::json!({
|
||||
"modules": modules,
|
||||
});
|
||||
|
||||
// Create a smaller template just for the modules index section
|
||||
let modules_template = r#"
|
||||
|
||||
| Module | Symbols | Inbound | Outbound | Link |
|
||||
|--------|---------|---------|----------|------|
|
||||
{{#each modules}}
|
||||
| {{{name}}} | {{{symbol_count}}} | {{{inbound_count}}} | {{{outbound_count}}} | [details]({{{link}}}) |
|
||||
{{/each}}
|
||||
"#;
|
||||
|
||||
let mut handlebars = Handlebars::new();
|
||||
handlebars.register_template_string("modules_index", modules_template)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to register modules_index template: {}", e))?;
|
||||
|
||||
handlebars.render("modules_index", &data)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to render modules index section: {}", e))
|
||||
}
|
||||
|
||||
pub fn render_critical_points_section(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
|
||||
// Collect critical points information
|
||||
let mut high_fan_in = Vec::new();
|
||||
let mut high_fan_out = Vec::new();
|
||||
|
||||
for (symbol_id, symbol) in &model.symbols {
|
||||
if symbol.metrics.fan_in > 5 { // Threshold for high fan-in
|
||||
high_fan_in.push(serde_json::json!({
|
||||
"symbol": symbol_id,
|
||||
"count": symbol.metrics.fan_in,
|
||||
"critical": symbol.metrics.is_critical,
|
||||
}));
|
||||
}
|
||||
if symbol.metrics.fan_out > 5 { // Threshold for high fan-out
|
||||
high_fan_out.push(serde_json::json!({
|
||||
"symbol": symbol_id,
|
||||
"count": symbol.metrics.fan_out,
|
||||
"critical": symbol.metrics.is_critical,
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare data for critical points section
|
||||
let data = serde_json::json!({
|
||||
"high_fan_in": high_fan_in,
|
||||
"high_fan_out": high_fan_out,
|
||||
"cycles": cycle_detector::detect_cycles(model)
|
||||
.iter()
|
||||
.map(|cycle| {
|
||||
serde_json::json!({
|
||||
"cycle_path": format!("{} → {}", cycle.join(" → "), cycle.first().unwrap_or(&String::new()))
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
});
|
||||
|
||||
// Create a smaller template just for the critical points section
|
||||
let critical_points_template = r#"
|
||||
|
||||
### High Fan-in (Most Called)
|
||||
| Symbol | Fan-in | Critical |
|
||||
|--------|--------|----------|
|
||||
{{#each high_fan_in}}
|
||||
| {{{symbol}}} | {{{count}}} | {{{critical}}} |
|
||||
{{/each}}
|
||||
|
||||
### High Fan-out (Calls Many)
|
||||
| Symbol | Fan-out | Critical |
|
||||
|--------|---------|----------|
|
||||
{{#each high_fan_out}}
|
||||
| {{{symbol}}} | {{{count}}} | {{{critical}}} |
|
||||
{{/each}}
|
||||
|
||||
### Module Cycles
|
||||
{{#each cycles}}
|
||||
- {{{cycle_path}}}
|
||||
{{/each}}
|
||||
"#;
|
||||
|
||||
let mut handlebars = Handlebars::new();
|
||||
handlebars.register_template_string("critical_points", critical_points_template)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to register critical_points template: {}", e))?;
|
||||
|
||||
handlebars.render("critical_points", &data)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to render critical points section: {}", e))
|
||||
}
|
||||
|
||||
pub fn render_layout_md(&self, model: &ProjectModel) -> Result<String, anyhow::Error> {
|
||||
// Collect layout items grouped by top-level directory
|
||||
let mut dir_files: std::collections::BTreeMap<String, Vec<String>> = std::collections::BTreeMap::new();
|
||||
for file_doc in model.files.values() {
|
||||
let path = file_doc.path.strip_prefix("./").unwrap_or(&file_doc.path);
|
||||
let top_dir = path.split('/').next().unwrap_or(path);
|
||||
let top = if path.contains('/') {
|
||||
format!("{}/", top_dir)
|
||||
} else {
|
||||
path.to_string()
|
||||
};
|
||||
dir_files.entry(top).or_default().push(path.to_string());
|
||||
}
|
||||
let mut layout_items = Vec::new();
|
||||
for (dir, files) in &dir_files {
|
||||
let file_count = files.len();
|
||||
let purpose = if dir.ends_with('/') {
|
||||
format!("{} files", file_count)
|
||||
} else {
|
||||
"Root file".to_string()
|
||||
};
|
||||
layout_items.push(serde_json::json!({
|
||||
"path": dir,
|
||||
"purpose": purpose,
|
||||
"link": format!("files/{}.md", sanitize_for_link(dir.trim_end_matches('/')))
|
||||
}));
|
||||
}
|
||||
|
||||
// Prepare data for layout template
|
||||
let data = serde_json::json!({
|
||||
"layout_items": layout_items,
|
||||
});
|
||||
|
||||
// Create template for layout.md
|
||||
let layout_template = r#"# Repository layout
|
||||
|
||||
<!-- MANUAL:BEGIN -->
|
||||
## Manual overrides
|
||||
- `src/app/` — <FILL_MANUALLY>
|
||||
<!-- MANUAL:END -->
|
||||
|
||||
---
|
||||
|
||||
## Detected structure
|
||||
<!-- ARCHDOC:BEGIN section=layout_detected -->
|
||||
> Generated. Do not edit inside this block.
|
||||
| Path | Purpose | Link |
|
||||
|------|---------|------|
|
||||
{{#each layout_items}}
|
||||
| {{{path}}} | {{{purpose}}} | [details]({{{link}}}) |
|
||||
{{/each}}
|
||||
<!-- ARCHDOC:END section=layout_detected -->
|
||||
"#;
|
||||
|
||||
let mut handlebars = Handlebars::new();
|
||||
handlebars.register_template_string("layout_md", layout_template)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to register layout_md template: {}", e))?;
|
||||
|
||||
handlebars.render("layout_md", &data)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to render layout.md: {}", e))
|
||||
}
|
||||
|
||||
pub fn render_symbol_details(&self, model: &ProjectModel, symbol_id: &str) -> Result<String, anyhow::Error> {
|
||||
// Find the symbol in the project model
|
||||
let symbol = model.symbols.get(symbol_id)
|
||||
.ok_or_else(|| anyhow::anyhow!("Symbol {} not found", symbol_id))?;
|
||||
|
||||
// Prepare data for symbol template
|
||||
let data = serde_json::json!({
|
||||
"symbol_id": symbol_id,
|
||||
"qualname": symbol.qualname,
|
||||
"kind": format!("{:?}", symbol.kind),
|
||||
"signature": symbol.signature,
|
||||
"docstring": symbol.docstring_first_line.as_deref().unwrap_or("No documentation available"),
|
||||
"purpose": symbol.purpose,
|
||||
"integrations": {
|
||||
"http": symbol.integrations_flags.http,
|
||||
"db": symbol.integrations_flags.db,
|
||||
"queue": symbol.integrations_flags.queue,
|
||||
"storage": symbol.integrations_flags.storage,
|
||||
"ai": symbol.integrations_flags.ai,
|
||||
},
|
||||
"metrics": {
|
||||
"fan_in": symbol.metrics.fan_in,
|
||||
"fan_out": symbol.metrics.fan_out,
|
||||
"is_critical": symbol.metrics.is_critical,
|
||||
"cycle_participant": symbol.metrics.cycle_participant,
|
||||
},
|
||||
"outbound_calls": symbol.outbound_calls,
|
||||
"inbound_calls": symbol.inbound_calls,
|
||||
});
|
||||
|
||||
// Create template for symbol details
|
||||
let symbol_template = r#"<a id="{{symbol_id}}"></a>
|
||||
|
||||
### `{{qualname}}`
|
||||
- **Kind:** {{kind}}
|
||||
- **Signature:** `{{{signature}}}`
|
||||
- **Docstring:** `{{{docstring}}}`
|
||||
|
||||
#### What it does
|
||||
<!-- ARCHDOC:BEGIN section=purpose -->
|
||||
{{{purpose}}}
|
||||
<!-- ARCHDOC:END section=purpose -->
|
||||
|
||||
#### Relations
|
||||
<!-- ARCHDOC:BEGIN section=relations -->
|
||||
**Outbound calls (best-effort):**
|
||||
{{#each outbound_calls}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
|
||||
**Inbound (used by) (best-effort):**
|
||||
{{#each inbound_calls}}
|
||||
- {{{this}}}
|
||||
{{/each}}
|
||||
<!-- ARCHDOC:END section=relations -->
|
||||
|
||||
#### Integrations (heuristic)
|
||||
<!-- ARCHDOC:BEGIN section=integrations -->
|
||||
- HTTP: {{#if integrations.http}}yes{{else}}no{{/if}}
|
||||
- DB: {{#if integrations.db}}yes{{else}}no{{/if}}
|
||||
- Queue/Tasks: {{#if integrations.queue}}yes{{else}}no{{/if}}
|
||||
- Storage: {{#if integrations.storage}}yes{{else}}no{{/if}}
|
||||
- AI/ML: {{#if integrations.ai}}yes{{else}}no{{/if}}
|
||||
<!-- ARCHDOC:END section=integrations -->
|
||||
|
||||
#### Risk / impact
|
||||
<!-- ARCHDOC:BEGIN section=impact -->
|
||||
- fan-in: {{{metrics.fan_in}}}
|
||||
- fan-out: {{{metrics.fan_out}}}
|
||||
- cycle participant: {{#if metrics.cycle_participant}}yes{{else}}no{{/if}}
|
||||
- critical: {{#if metrics.is_critical}}yes{{else}}no{{/if}}
|
||||
<!-- ARCHDOC:END section=impact -->
|
||||
|
||||
<!-- MANUAL:BEGIN -->
|
||||
#### Manual notes
|
||||
<FILL_MANUALLY>
|
||||
<!-- MANUAL:END -->
|
||||
"#;
|
||||
|
||||
let mut handlebars = Handlebars::new();
|
||||
handlebars.register_template_string("symbol_details", symbol_template)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to register symbol_details template: {}", e))?;
|
||||
|
||||
handlebars.render("symbol_details", &data)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to render symbol details: {}", e))
|
||||
}
|
||||
}
|
||||
81
wtismycode-core/src/scanner.rs
Normal file
81
wtismycode-core/src/scanner.rs
Normal file
@@ -0,0 +1,81 @@
|
||||
//! File scanner for WTIsMyCode
|
||||
//!
|
||||
//! This module handles scanning the file system for Python files according to
|
||||
//! the configuration settings.
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::errors::WTIsMyCodeError;
|
||||
use std::path::{Path, PathBuf};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
pub struct FileScanner {
|
||||
config: Config,
|
||||
}
|
||||
|
||||
impl FileScanner {
|
||||
pub fn new(config: Config) -> Self {
|
||||
Self { config }
|
||||
}
|
||||
|
||||
pub fn scan_python_files(&self, root: &Path) -> Result<Vec<PathBuf>, WTIsMyCodeError> {
|
||||
// Check if root directory exists
|
||||
if !root.exists() {
|
||||
return Err(WTIsMyCodeError::Io(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
format!("Root directory does not exist: {}", root.display())
|
||||
)));
|
||||
}
|
||||
|
||||
if !root.is_dir() {
|
||||
return Err(WTIsMyCodeError::Io(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
format!("Root path is not a directory: {}", root.display())
|
||||
)));
|
||||
}
|
||||
|
||||
let mut python_files = Vec::new();
|
||||
|
||||
// Walk directory tree respecting include/exclude patterns
|
||||
for entry in WalkDir::new(root)
|
||||
.follow_links(self.config.scan.follow_symlinks)
|
||||
.into_iter() {
|
||||
|
||||
let entry = entry.map_err(|e| {
|
||||
WTIsMyCodeError::Io(std::io::Error::other(
|
||||
format!("Failed to read directory entry: {}", e)
|
||||
))
|
||||
})?;
|
||||
|
||||
let path = entry.path();
|
||||
|
||||
// Skip excluded paths
|
||||
if self.is_excluded(path) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Include Python files
|
||||
if path.extension().and_then(|s| s.to_str()) == Some("py") {
|
||||
python_files.push(path.to_path_buf());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(python_files)
|
||||
}
|
||||
|
||||
fn is_excluded(&self, path: &Path) -> bool {
|
||||
// Convert path to string for pattern matching
|
||||
let path_str = match path.to_str() {
|
||||
Some(s) => s,
|
||||
None => return false, // If we can't convert to string, don't exclude
|
||||
};
|
||||
|
||||
// Check if path matches any exclude patterns
|
||||
for pattern in &self.config.scan.exclude {
|
||||
if path_str.contains(pattern) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
348
wtismycode-core/src/writer.rs
Normal file
348
wtismycode-core/src/writer.rs
Normal file
@@ -0,0 +1,348 @@
|
||||
//! Diff-aware file writer for WTIsMyCode
|
||||
//!
|
||||
//! This module handles writing generated documentation to files while preserving
|
||||
//! manual content and only updating generated sections.
|
||||
|
||||
use crate::errors::WTIsMyCodeError;
|
||||
use std::path::Path;
|
||||
use std::fs;
|
||||
use chrono::Utc;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SectionMarker {
|
||||
pub name: String,
|
||||
pub start_pos: usize,
|
||||
pub end_pos: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolMarker {
|
||||
pub symbol_id: String,
|
||||
pub start_pos: usize,
|
||||
pub end_pos: usize,
|
||||
}
|
||||
|
||||
pub struct DiffAwareWriter {
|
||||
// Configuration
|
||||
}
|
||||
|
||||
impl Default for DiffAwareWriter {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl DiffAwareWriter {
|
||||
pub fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
|
||||
pub fn update_file_with_markers(
|
||||
&self,
|
||||
file_path: &Path,
|
||||
generated_content: &str,
|
||||
section_name: &str,
|
||||
) -> Result<(), WTIsMyCodeError> {
|
||||
// Read existing file
|
||||
let existing_content = if file_path.exists() {
|
||||
fs::read_to_string(file_path)
|
||||
.map_err(WTIsMyCodeError::Io)?
|
||||
} else {
|
||||
// Create new file with template
|
||||
let template_content = self.create_template_file(file_path, section_name)?;
|
||||
// Write template to file
|
||||
fs::write(file_path, &template_content)
|
||||
.map_err(WTIsMyCodeError::Io)?;
|
||||
template_content
|
||||
};
|
||||
|
||||
// Find section markers
|
||||
let markers = self.find_section_markers(&existing_content, section_name)?;
|
||||
|
||||
if let Some(marker) = markers.first() {
|
||||
// Replace content between markers
|
||||
let new_content = self.replace_section_content(
|
||||
&existing_content,
|
||||
marker,
|
||||
generated_content,
|
||||
)?;
|
||||
|
||||
// Check if content has changed
|
||||
let content_changed = existing_content != new_content;
|
||||
|
||||
// Only write if content actually changed (optimization)
|
||||
if content_changed {
|
||||
let updated_content = self.update_timestamp(new_content)?;
|
||||
fs::write(file_path, updated_content)
|
||||
.map_err(WTIsMyCodeError::Io)?;
|
||||
}
|
||||
// If not changed, skip writing entirely
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_symbol_section(
|
||||
&self,
|
||||
file_path: &Path,
|
||||
symbol_id: &str,
|
||||
generated_content: &str,
|
||||
) -> Result<(), WTIsMyCodeError> {
|
||||
// Read existing file
|
||||
let existing_content = if file_path.exists() {
|
||||
fs::read_to_string(file_path)
|
||||
.map_err(WTIsMyCodeError::Io)?
|
||||
} else {
|
||||
// If file doesn't exist, create it with a basic template
|
||||
let template_content = self.create_template_file(file_path, "symbol")?;
|
||||
fs::write(file_path, &template_content)
|
||||
.map_err(WTIsMyCodeError::Io)?;
|
||||
template_content
|
||||
};
|
||||
|
||||
// Find symbol markers
|
||||
let markers = self.find_symbol_markers(&existing_content, symbol_id)?;
|
||||
|
||||
if let Some(marker) = markers.first() {
|
||||
// Replace content between markers
|
||||
let new_content = self.replace_symbol_content(
|
||||
&existing_content,
|
||||
marker,
|
||||
generated_content,
|
||||
)?;
|
||||
|
||||
// Check if content has changed
|
||||
let content_changed = existing_content != new_content;
|
||||
|
||||
// Only write if content actually changed (optimization)
|
||||
if content_changed {
|
||||
let updated_content = self.update_timestamp(new_content)?;
|
||||
fs::write(file_path, updated_content)
|
||||
.map_err(WTIsMyCodeError::Io)?;
|
||||
}
|
||||
// If not changed, skip writing entirely
|
||||
} else {
|
||||
eprintln!("Warning: No symbol marker found for {} in {}", symbol_id, file_path.display());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn find_section_markers(&self, content: &str, section_name: &str) -> Result<Vec<SectionMarker>, WTIsMyCodeError> {
|
||||
let begin_marker = format!("<!-- ARCHDOC:BEGIN section={} -->", section_name);
|
||||
let end_marker = format!("<!-- ARCHDOC:END section={} -->", section_name);
|
||||
|
||||
let mut markers = Vec::new();
|
||||
let mut pos = 0;
|
||||
|
||||
while let Some(begin_pos) = content[pos..].find(&begin_marker) {
|
||||
let absolute_begin = pos + begin_pos;
|
||||
let search_start = absolute_begin + begin_marker.len();
|
||||
|
||||
if let Some(end_pos) = content[search_start..].find(&end_marker) {
|
||||
let absolute_end = search_start + end_pos + end_marker.len();
|
||||
markers.push(SectionMarker {
|
||||
name: section_name.to_string(),
|
||||
start_pos: absolute_begin,
|
||||
end_pos: absolute_end,
|
||||
});
|
||||
pos = absolute_end;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(markers)
|
||||
}
|
||||
|
||||
fn find_symbol_markers(&self, content: &str, symbol_id: &str) -> Result<Vec<SymbolMarker>, WTIsMyCodeError> {
|
||||
let begin_marker = format!("<!-- ARCHDOC:BEGIN symbol id={} -->", symbol_id);
|
||||
let end_marker = format!("<!-- ARCHDOC:END symbol id={} -->", symbol_id);
|
||||
|
||||
let mut markers = Vec::new();
|
||||
let mut pos = 0;
|
||||
|
||||
while let Some(begin_pos) = content[pos..].find(&begin_marker) {
|
||||
let absolute_begin = pos + begin_pos;
|
||||
let search_start = absolute_begin + begin_marker.len();
|
||||
|
||||
if let Some(end_pos) = content[search_start..].find(&end_marker) {
|
||||
let absolute_end = search_start + end_pos + end_marker.len();
|
||||
markers.push(SymbolMarker {
|
||||
symbol_id: symbol_id.to_string(),
|
||||
start_pos: absolute_begin,
|
||||
end_pos: absolute_end,
|
||||
});
|
||||
pos = absolute_end;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(markers)
|
||||
}
|
||||
|
||||
fn replace_section_content(
|
||||
&self,
|
||||
content: &str,
|
||||
marker: &SectionMarker,
|
||||
new_content: &str,
|
||||
) -> Result<String, WTIsMyCodeError> {
|
||||
let before = &content[..marker.start_pos];
|
||||
let after = &content[marker.end_pos..];
|
||||
|
||||
let begin_marker = format!("<!-- ARCHDOC:BEGIN section={} -->", marker.name);
|
||||
let end_marker = format!("<!-- ARCHDOC:END section={} -->", marker.name);
|
||||
|
||||
Ok(format!(
|
||||
"{}{}{}{}{}",
|
||||
before, begin_marker, new_content, end_marker, after
|
||||
))
|
||||
}
|
||||
|
||||
fn replace_symbol_content(
|
||||
&self,
|
||||
content: &str,
|
||||
marker: &SymbolMarker,
|
||||
new_content: &str,
|
||||
) -> Result<String, WTIsMyCodeError> {
|
||||
let before = &content[..marker.start_pos];
|
||||
let after = &content[marker.end_pos..];
|
||||
|
||||
let begin_marker = format!("<!-- ARCHDOC:BEGIN symbol id={} -->", marker.symbol_id);
|
||||
let end_marker = format!("<!-- ARCHDOC:END symbol id={} -->", marker.symbol_id);
|
||||
|
||||
Ok(format!(
|
||||
"{}{}{}{}{}",
|
||||
before, begin_marker, new_content, end_marker, after
|
||||
))
|
||||
}
|
||||
|
||||
fn update_timestamp(&self, content: String) -> Result<String, WTIsMyCodeError> {
|
||||
// Update the "Updated" field in the document metadata section
|
||||
// Find the metadata section and update the timestamp
|
||||
let today = Utc::now().format("%Y-%m-%d").to_string();
|
||||
|
||||
// Look for the "Updated:" line and replace it
|
||||
let lines: Vec<&str> = content.lines().collect();
|
||||
let mut updated_lines = Vec::new();
|
||||
|
||||
for line in lines {
|
||||
if line.trim_start().starts_with("- **Updated:**") {
|
||||
updated_lines.push(format!("- **Updated:** {}", today));
|
||||
} else {
|
||||
updated_lines.push(line.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(updated_lines.join("\n"))
|
||||
}
|
||||
|
||||
fn create_template_file(&self, _file_path: &Path, template_type: &str) -> Result<String, WTIsMyCodeError> {
|
||||
// Create file with appropriate template based on type
|
||||
match template_type {
|
||||
"architecture" => {
|
||||
let template = r#"# ARCHITECTURE — <PROJECT_NAME>
|
||||
|
||||
<!-- MANUAL:BEGIN -->
|
||||
## Project summary
|
||||
**Name:** <PROJECT_NAME>
|
||||
**Description:** <FILL_MANUALLY: what this project does in 3–7 lines>
|
||||
|
||||
## Key decisions (manual)
|
||||
- <FILL_MANUALLY>
|
||||
|
||||
## Non-goals (manual)
|
||||
- <FILL_MANUALLY>
|
||||
<!-- MANUAL:END -->
|
||||
|
||||
---
|
||||
|
||||
## Document metadata
|
||||
- **Created:** <AUTO_ON_INIT: YYYY-MM-DD>
|
||||
- **Updated:** <AUTO_ON_CHANGE: YYYY-MM-DD>
|
||||
- **Generated by:** wtismycode (cli) v0.1
|
||||
|
||||
---
|
||||
|
||||
## Rails / Tooling
|
||||
<!-- ARCHDOC:BEGIN section=rails -->
|
||||
> Generated. Do not edit inside this block.
|
||||
<AUTO: rails summary + links to config files>
|
||||
<!-- ARCHDOC:END section=rails -->
|
||||
|
||||
---
|
||||
|
||||
## Repository layout (top-level)
|
||||
<!-- ARCHDOC:BEGIN section=layout -->
|
||||
> Generated. Do not edit inside this block.
|
||||
<AUTO: table of top-level folders + heuristic purpose + link to layout.md>
|
||||
<!-- ARCHDOC:END section=layout -->
|
||||
|
||||
---
|
||||
|
||||
## Modules index
|
||||
<!-- ARCHDOC:BEGIN section=modules_index -->
|
||||
> Generated. Do not edit inside this block.
|
||||
<AUTO: table modules + deps counts + links to module docs>
|
||||
<!-- ARCHDOC:END section=modules_index -->
|
||||
|
||||
---
|
||||
|
||||
## Critical dependency points
|
||||
<!-- ARCHDOC:BEGIN section=critical_points -->
|
||||
> Generated. Do not edit inside this block.
|
||||
<AUTO: top fan-in/out symbols + cycles>
|
||||
<!-- ARCHDOC:END section=critical_points -->
|
||||
|
||||
---
|
||||
|
||||
<!-- MANUAL:BEGIN -->
|
||||
## Change notes (manual)
|
||||
- <FILL_MANUALLY>
|
||||
<!-- MANUAL:END -->
|
||||
"#;
|
||||
Ok(template.to_string())
|
||||
}
|
||||
"symbol" => {
|
||||
// Template for symbol documentation files
|
||||
let template = r#"# File: <relative_path>
|
||||
|
||||
- **Module:** <AUTO: module_id>
|
||||
- **Defined symbols:** <AUTO>
|
||||
- **Imports:** <AUTO>
|
||||
|
||||
<!-- MANUAL:BEGIN -->
|
||||
## File intent (manual)
|
||||
<FILL_MANUALLY>
|
||||
<!-- MANUAL:END -->
|
||||
|
||||
---
|
||||
|
||||
## Imports & file-level dependencies
|
||||
<!-- ARCHDOC:BEGIN section=file_imports -->
|
||||
> Generated. Do not edit inside this block.
|
||||
<AUTO: imports list + outbound modules + inbound files>
|
||||
<!-- ARCHDOC:END section=file_imports -->
|
||||
|
||||
---
|
||||
|
||||
## Symbols index
|
||||
<!-- ARCHDOC:BEGIN section=symbols_index -->
|
||||
> Generated. Do not edit inside this block.
|
||||
<AUTO: list of links to symbol anchors>
|
||||
<!-- ARCHDOC:END section=symbols_index -->
|
||||
|
||||
---
|
||||
|
||||
## Symbol details
|
||||
<!-- AUTOGENERATED SYMBOL CONTENT WILL BE INSERTED HERE -->
|
||||
"#;
|
||||
Ok(template.to_string())
|
||||
}
|
||||
_ => {
|
||||
Ok("".to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
100
wtismycode-core/tests/caching.rs
Normal file
100
wtismycode-core/tests/caching.rs
Normal file
@@ -0,0 +1,100 @@
|
||||
//! Caching tests for WTIsMyCode
|
||||
//!
|
||||
//! These tests verify that the caching functionality works correctly.
|
||||
|
||||
use std::path::Path;
|
||||
use std::fs;
|
||||
use tempfile::TempDir;
|
||||
use wtismycode_core::{Config, python_analyzer::PythonAnalyzer};
|
||||
|
||||
#[test]
|
||||
fn test_cache_store_and_retrieve() {
|
||||
let config = Config::default();
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Create a temporary Python file
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let temp_file = temp_dir.path().join("test.py");
|
||||
let python_code = r#"
|
||||
def hello():
|
||||
return "Hello, World!"
|
||||
|
||||
class Calculator:
|
||||
def add(self, a, b):
|
||||
return a + b
|
||||
"#;
|
||||
fs::write(&temp_file, python_code).expect("Failed to write test file");
|
||||
|
||||
// Parse the module for the first time
|
||||
let parsed_module1 = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module first time");
|
||||
|
||||
// Parse the module again - should come from cache
|
||||
let parsed_module2 = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module second time");
|
||||
|
||||
// Both parses should return the same data
|
||||
assert_eq!(parsed_module1.path, parsed_module2.path);
|
||||
assert_eq!(parsed_module1.module_path, parsed_module2.module_path);
|
||||
assert_eq!(parsed_module1.imports.len(), parsed_module2.imports.len());
|
||||
assert_eq!(parsed_module1.symbols.len(), parsed_module2.symbols.len());
|
||||
assert_eq!(parsed_module1.calls.len(), parsed_module2.calls.len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_invalidation_on_file_change() {
|
||||
let config = Config::default();
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Create a temporary Python file
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let temp_file = temp_dir.path().join("test.py");
|
||||
let python_code1 = r#"
|
||||
def hello():
|
||||
return "Hello, World!"
|
||||
"#;
|
||||
fs::write(&temp_file, python_code1).expect("Failed to write test file");
|
||||
|
||||
// Parse the module for the first time
|
||||
let parsed_module1 = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module first time");
|
||||
|
||||
// Modify the file
|
||||
let python_code2 = r#"
|
||||
def hello():
|
||||
return "Hello, World!"
|
||||
|
||||
def goodbye():
|
||||
return "Goodbye, World!"
|
||||
"#;
|
||||
fs::write(&temp_file, python_code2).expect("Failed to write test file");
|
||||
|
||||
// Parse the module again - should NOT come from cache due to file change
|
||||
let parsed_module2 = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module second time");
|
||||
|
||||
// The second parse should have more symbols
|
||||
assert!(parsed_module2.symbols.len() >= parsed_module1.symbols.len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_disabled() {
|
||||
let mut config = Config::default();
|
||||
config.caching.enabled = false;
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Create a temporary Python file
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let temp_file = temp_dir.path().join("test.py");
|
||||
let python_code = r#"
|
||||
def hello():
|
||||
return "Hello, World!"
|
||||
"#;
|
||||
fs::write(&temp_file, python_code).expect("Failed to write test file");
|
||||
|
||||
// Parse the module - should work even with caching disabled
|
||||
let parsed_module = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module with caching disabled");
|
||||
|
||||
assert_eq!(parsed_module.symbols.len(), 1);
|
||||
}
|
||||
131
wtismycode-core/tests/enhanced_analysis.rs
Normal file
131
wtismycode-core/tests/enhanced_analysis.rs
Normal file
@@ -0,0 +1,131 @@
|
||||
//! Enhanced analysis tests for WTIsMyCode
|
||||
//!
|
||||
//! These tests verify that the enhanced analysis functionality works correctly
|
||||
//! with complex code that includes integrations, calls, and docstrings.
|
||||
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use wtismycode_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
||||
|
||||
#[test]
|
||||
fn test_enhanced_analysis_with_integrations() {
|
||||
// Print current directory for debugging
|
||||
let current_dir = std::env::current_dir().unwrap();
|
||||
println!("Current directory: {:?}", current_dir);
|
||||
|
||||
// Try different paths for the config file
|
||||
let possible_paths = [
|
||||
"tests/golden/test_project/wtismycode.toml",
|
||||
"../tests/golden/test_project/wtismycode.toml",
|
||||
];
|
||||
|
||||
let config_path = possible_paths.iter().find(|&path| {
|
||||
Path::new(path).exists()
|
||||
}).expect("Could not find config file in any expected location");
|
||||
|
||||
println!("Using config path: {:?}", config_path);
|
||||
|
||||
let config = Config::load_from_file(Path::new(config_path)).expect("Failed to load config");
|
||||
|
||||
// Initialize scanner with the correct root path
|
||||
let project_root = Path::new("tests/golden/test_project");
|
||||
let scanner = FileScanner::new(config.clone());
|
||||
|
||||
// Scan for Python files
|
||||
let python_files = scanner.scan_python_files(project_root)
|
||||
.expect("Failed to scan Python files");
|
||||
|
||||
println!("Found Python files: {:?}", python_files);
|
||||
|
||||
// Should find both example.py and advanced_example.py
|
||||
assert_eq!(python_files.len(), 2);
|
||||
|
||||
// Initialize Python analyzer
|
||||
let analyzer = PythonAnalyzer::new(config.clone());
|
||||
|
||||
// Parse each Python file
|
||||
let mut parsed_modules = Vec::new();
|
||||
for file_path in python_files {
|
||||
println!("Parsing file: {:?}", file_path);
|
||||
match analyzer.parse_module(&file_path) {
|
||||
Ok(module) => {
|
||||
println!("Successfully parsed module: {:?}", module.module_path);
|
||||
println!("Imports: {:?}", module.imports);
|
||||
println!("Symbols: {:?}", module.symbols.len());
|
||||
println!("Calls: {:?}", module.calls.len());
|
||||
parsed_modules.push(module);
|
||||
},
|
||||
Err(e) => {
|
||||
panic!("Failed to parse {}: {}", file_path.display(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!("Parsed {} modules", parsed_modules.len());
|
||||
|
||||
// Resolve symbols and build project model
|
||||
let project_model = analyzer.resolve_symbols(&parsed_modules)
|
||||
.expect("Failed to resolve symbols");
|
||||
|
||||
println!("Project model modules: {}", project_model.modules.len());
|
||||
println!("Project model files: {}", project_model.files.len());
|
||||
println!("Project model symbols: {}", project_model.symbols.len());
|
||||
|
||||
// Add assertions to verify the project model
|
||||
assert!(!project_model.modules.is_empty());
|
||||
assert!(!project_model.files.is_empty());
|
||||
assert!(!project_model.symbols.is_empty());
|
||||
|
||||
// Check that we have the right number of modules (2 files = 2 modules)
|
||||
assert_eq!(project_model.modules.len(), 2);
|
||||
|
||||
// Check that we have the right number of files
|
||||
assert_eq!(project_model.files.len(), 2);
|
||||
|
||||
// Check that we have the right number of symbols
|
||||
// The actual number might be less due to deduplication or other factors
|
||||
// but should be at least the sum of symbols from both files minus duplicates
|
||||
assert!(project_model.symbols.len() >= 10);
|
||||
|
||||
// Check specific details about the advanced example module
|
||||
let mut found_advanced_module = false;
|
||||
for (_, module) in project_model.modules.iter() {
|
||||
if module.path.contains("advanced_example.py") {
|
||||
found_advanced_module = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert!(found_advanced_module);
|
||||
|
||||
// Check that we found the UserService class with DB integration
|
||||
let user_service_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::UserService"));
|
||||
assert!(user_service_symbol.is_some());
|
||||
assert_eq!(user_service_symbol.unwrap().kind, wtismycode_core::model::SymbolKind::Class);
|
||||
|
||||
// Check that we found the NotificationService class with queue integration
|
||||
let notification_service_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::NotificationService"));
|
||||
assert!(notification_service_symbol.is_some());
|
||||
assert_eq!(notification_service_symbol.unwrap().kind, wtismycode_core::model::SymbolKind::Class);
|
||||
|
||||
// Check that we found the fetch_external_user_data function with HTTP integration
|
||||
let fetch_external_user_data_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::fetch_external_user_data"));
|
||||
assert!(fetch_external_user_data_symbol.is_some());
|
||||
assert_eq!(fetch_external_user_data_symbol.unwrap().kind, wtismycode_core::model::SymbolKind::Function);
|
||||
|
||||
// Check file imports
|
||||
let mut found_advanced_file = false;
|
||||
for (_, file_doc) in project_model.files.iter() {
|
||||
if file_doc.path.contains("advanced_example.py") {
|
||||
found_advanced_file = true;
|
||||
assert!(!file_doc.imports.is_empty());
|
||||
// Should have imports for requests, sqlite3, redis, typing
|
||||
let import_names: Vec<&String> = file_doc.imports.iter().collect();
|
||||
assert!(import_names.contains(&&"requests".to_string()));
|
||||
assert!(import_names.contains(&&"sqlite3".to_string()));
|
||||
assert!(import_names.contains(&&"redis".to_string()));
|
||||
assert!(import_names.contains(&&"typing.List".to_string()) || import_names.contains(&&"typing".to_string()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert!(found_advanced_file);
|
||||
}
|
||||
83
wtismycode-core/tests/error_handling.rs
Normal file
83
wtismycode-core/tests/error_handling.rs
Normal file
@@ -0,0 +1,83 @@
|
||||
//! Error handling tests for WTIsMyCode
|
||||
//!
|
||||
//! These tests verify that WTIsMyCode properly handles various error conditions
|
||||
//! and edge cases.
|
||||
|
||||
use std::path::Path;
|
||||
use std::fs;
|
||||
use tempfile::TempDir;
|
||||
use wtismycode_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
||||
|
||||
#[test]
|
||||
fn test_scanner_nonexistent_directory() {
|
||||
let config = Config::default();
|
||||
let scanner = FileScanner::new(config);
|
||||
|
||||
// Try to scan a nonexistent directory
|
||||
let result = scanner.scan_python_files(Path::new("/nonexistent/directory"));
|
||||
assert!(result.is_err());
|
||||
|
||||
// Check that we get an IO error
|
||||
match result.unwrap_err() {
|
||||
wtismycode_core::errors::WTIsMyCodeError::Io(_) => {},
|
||||
_ => panic!("Expected IO error"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scanner_file_instead_of_directory() {
|
||||
let config = Config::default();
|
||||
let scanner = FileScanner::new(config);
|
||||
|
||||
// Create a temporary file
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let temp_file = temp_dir.path().join("test.txt");
|
||||
fs::write(&temp_file, "test content").expect("Failed to write test file");
|
||||
|
||||
// Try to scan a file instead of a directory
|
||||
let result = scanner.scan_python_files(&temp_file);
|
||||
assert!(result.is_err());
|
||||
|
||||
// Check that we get an IO error
|
||||
match result.unwrap_err() {
|
||||
wtismycode_core::errors::WTIsMyCodeError::Io(_) => {},
|
||||
_ => panic!("Expected IO error"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_analyzer_nonexistent_file() {
|
||||
let config = Config::default();
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Try to parse a nonexistent file
|
||||
let result = analyzer.parse_module(Path::new("/nonexistent/file.py"));
|
||||
assert!(result.is_err());
|
||||
|
||||
// Check that we get an IO error
|
||||
match result.unwrap_err() {
|
||||
wtismycode_core::errors::WTIsMyCodeError::Io(_) => {},
|
||||
_ => panic!("Expected IO error"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_analyzer_invalid_python_syntax() {
|
||||
let config = Config::default();
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Create a temporary file with invalid Python syntax
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let temp_file = temp_dir.path().join("invalid.py");
|
||||
fs::write(&temp_file, "invalid python syntax @@#$%").expect("Failed to write test file");
|
||||
|
||||
// Try to parse the file
|
||||
let result = analyzer.parse_module(&temp_file);
|
||||
assert!(result.is_err());
|
||||
|
||||
// Check that we get a parse error
|
||||
match result.unwrap_err() {
|
||||
wtismycode_core::errors::WTIsMyCodeError::ParseError { .. } => {},
|
||||
_ => panic!("Expected parse error"),
|
||||
}
|
||||
}
|
||||
157
wtismycode-core/tests/full_pipeline.rs
Normal file
157
wtismycode-core/tests/full_pipeline.rs
Normal file
@@ -0,0 +1,157 @@
|
||||
//! Full pipeline integration tests for WTIsMyCode
|
||||
//!
|
||||
//! Tests the complete scan → analyze → render pipeline using test-project/.
|
||||
|
||||
use wtismycode_core::config::Config;
|
||||
use wtismycode_core::cycle_detector;
|
||||
use wtismycode_core::model::{Module, ProjectModel};
|
||||
use wtismycode_core::renderer::Renderer;
|
||||
use wtismycode_core::scanner::FileScanner;
|
||||
use std::path::Path;
|
||||
|
||||
#[test]
|
||||
fn test_config_load_and_validate() {
|
||||
let config_path = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join("test-project/wtismycode.toml");
|
||||
|
||||
let config = Config::load_from_file(&config_path).expect("Failed to load config");
|
||||
assert_eq!(config.project.language, "python");
|
||||
assert!(!config.scan.include.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_config_validate_on_test_project() {
|
||||
let config_path = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join("test-project/wtismycode.toml");
|
||||
|
||||
let mut config = Config::load_from_file(&config_path).expect("Failed to load config");
|
||||
// Set root to actual test-project path so validation passes
|
||||
config.project.root = config_path.parent().unwrap().to_string_lossy().to_string();
|
||||
assert!(config.validate().is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_config_validate_rejects_bad_language() {
|
||||
let mut config = Config::default();
|
||||
config.project.language = "java".to_string();
|
||||
assert!(config.validate().is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scan_test_project() {
|
||||
let test_project = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join("test-project");
|
||||
|
||||
let config_path = test_project.join("wtismycode.toml");
|
||||
let mut config = Config::load_from_file(&config_path).expect("Failed to load config");
|
||||
config.project.root = test_project.to_string_lossy().to_string();
|
||||
|
||||
let scanner = FileScanner::new(config);
|
||||
let files = scanner.scan_python_files(&test_project).expect("Scan should succeed");
|
||||
assert!(!files.is_empty(), "Should find Python files in test-project");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cycle_detection_with_known_cycles() {
|
||||
let mut model = ProjectModel::new();
|
||||
|
||||
// Create a known cycle: a → b → c → a
|
||||
model.modules.insert(
|
||||
"mod_a".into(),
|
||||
Module {
|
||||
id: "mod_a".into(),
|
||||
path: "a.py".into(),
|
||||
files: vec![],
|
||||
doc_summary: None,
|
||||
outbound_modules: vec!["mod_b".into()],
|
||||
inbound_modules: vec!["mod_c".into()],
|
||||
symbols: vec![],
|
||||
},
|
||||
);
|
||||
model.modules.insert(
|
||||
"mod_b".into(),
|
||||
Module {
|
||||
id: "mod_b".into(),
|
||||
path: "b.py".into(),
|
||||
files: vec![],
|
||||
doc_summary: None,
|
||||
outbound_modules: vec!["mod_c".into()],
|
||||
inbound_modules: vec!["mod_a".into()],
|
||||
symbols: vec![],
|
||||
},
|
||||
);
|
||||
model.modules.insert(
|
||||
"mod_c".into(),
|
||||
Module {
|
||||
id: "mod_c".into(),
|
||||
path: "c.py".into(),
|
||||
files: vec![],
|
||||
doc_summary: None,
|
||||
outbound_modules: vec!["mod_a".into()],
|
||||
inbound_modules: vec!["mod_b".into()],
|
||||
symbols: vec![],
|
||||
},
|
||||
);
|
||||
|
||||
let cycles = cycle_detector::detect_cycles(&model);
|
||||
assert_eq!(cycles.len(), 1, "Should detect exactly one cycle");
|
||||
assert_eq!(cycles[0].len(), 3, "Cycle should have 3 modules");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cycle_detection_no_cycles() {
|
||||
let mut model = ProjectModel::new();
|
||||
|
||||
model.modules.insert(
|
||||
"mod_a".into(),
|
||||
Module {
|
||||
id: "mod_a".into(),
|
||||
path: "a.py".into(),
|
||||
files: vec![],
|
||||
doc_summary: None,
|
||||
outbound_modules: vec!["mod_b".into()],
|
||||
inbound_modules: vec![],
|
||||
symbols: vec![],
|
||||
},
|
||||
);
|
||||
model.modules.insert(
|
||||
"mod_b".into(),
|
||||
Module {
|
||||
id: "mod_b".into(),
|
||||
path: "b.py".into(),
|
||||
files: vec![],
|
||||
doc_summary: None,
|
||||
outbound_modules: vec![],
|
||||
inbound_modules: vec!["mod_a".into()],
|
||||
symbols: vec![],
|
||||
},
|
||||
);
|
||||
|
||||
let cycles = cycle_detector::detect_cycles(&model);
|
||||
assert!(cycles.is_empty(), "Should detect no cycles in DAG");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_renderer_produces_output() {
|
||||
let config = Config::default();
|
||||
let model = ProjectModel::new();
|
||||
let renderer = Renderer::new();
|
||||
let result = renderer.render_architecture_md(&model, None);
|
||||
assert!(result.is_ok(), "Renderer should produce output for empty model");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_duration_values() {
|
||||
use wtismycode_core::config::{parse_duration, parse_file_size};
|
||||
|
||||
assert_eq!(parse_duration("24h").unwrap(), 86400);
|
||||
assert_eq!(parse_duration("7d").unwrap(), 604800);
|
||||
assert_eq!(parse_file_size("10MB").unwrap(), 10 * 1024 * 1024);
|
||||
assert_eq!(parse_file_size("1GB").unwrap(), 1024 * 1024 * 1024);
|
||||
}
|
||||
60
wtismycode-core/tests/golden/files/example_architecture.md
Normal file
60
wtismycode-core/tests/golden/files/example_architecture.md
Normal file
@@ -0,0 +1,60 @@
|
||||
# Architecture Documentation
|
||||
|
||||
Generated at: 1970-01-01 00:00:00 UTC
|
||||
|
||||
## Overview
|
||||
|
||||
This document provides an overview of the architecture for the project.
|
||||
|
||||
## Modules
|
||||
|
||||
### example.py
|
||||
|
||||
File: `example.py`
|
||||
|
||||
#### Imports
|
||||
|
||||
- `os`
|
||||
- `typing.List`
|
||||
|
||||
#### Symbols
|
||||
|
||||
##### Calculator
|
||||
|
||||
- Type: Class
|
||||
- Signature: `class Calculator`
|
||||
- Purpose: extracted from AST
|
||||
|
||||
##### Calculator.__init__
|
||||
|
||||
- Type: Function
|
||||
- Signature: `def __init__(...)`
|
||||
- Purpose: extracted from AST
|
||||
|
||||
##### Calculator.add
|
||||
|
||||
- Type: Function
|
||||
- Signature: `def add(...)`
|
||||
- Purpose: extracted from AST
|
||||
|
||||
##### Calculator.multiply
|
||||
|
||||
- Type: Function
|
||||
- Signature: `def multiply(...)`
|
||||
- Purpose: extracted from AST
|
||||
|
||||
##### process_numbers
|
||||
|
||||
- Type: Function
|
||||
- Signature: `def process_numbers(...)`
|
||||
- Purpose: extracted from AST
|
||||
|
||||
## Metrics
|
||||
|
||||
### Critical Components
|
||||
|
||||
No critical components identified.
|
||||
|
||||
### Component Dependencies
|
||||
|
||||
Dependency analysis not yet implemented.
|
||||
107
wtismycode-core/tests/golden/mod.rs
Normal file
107
wtismycode-core/tests/golden/mod.rs
Normal file
@@ -0,0 +1,107 @@
|
||||
//! Golden tests for WTIsMyCode
|
||||
//!
|
||||
//! These tests generate documentation for test projects and compare the output
|
||||
//! with expected "golden" files to ensure consistency.
|
||||
|
||||
mod test_utils;
|
||||
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use wtismycode_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
||||
|
||||
#[test]
|
||||
fn test_simple_project_generation() {
|
||||
// Print current directory for debugging
|
||||
let current_dir = std::env::current_dir().unwrap();
|
||||
println!("Current directory: {:?}", current_dir);
|
||||
|
||||
// Try different paths for the config file
|
||||
let possible_paths = [
|
||||
"tests/golden/test_project/wtismycode.toml",
|
||||
"../tests/golden/test_project/wtismycode.toml",
|
||||
];
|
||||
|
||||
let config_path = possible_paths.iter().find(|&path| {
|
||||
Path::new(path).exists()
|
||||
}).expect("Could not find config file in any expected location");
|
||||
|
||||
println!("Using config path: {:?}", config_path);
|
||||
|
||||
let config = Config::load_from_file(Path::new(config_path)).expect("Failed to load config");
|
||||
|
||||
// Initialize scanner with the correct root path
|
||||
let project_root = Path::new("tests/golden/test_project");
|
||||
let scanner = FileScanner::new(config.clone());
|
||||
|
||||
// Scan for Python files
|
||||
let python_files = scanner.scan_python_files(project_root)
|
||||
.expect("Failed to scan Python files");
|
||||
|
||||
println!("Found Python files: {:?}", python_files);
|
||||
|
||||
// Initialize Python analyzer
|
||||
let analyzer = PythonAnalyzer::new(config.clone());
|
||||
|
||||
// Parse each Python file
|
||||
let mut parsed_modules = Vec::new();
|
||||
for file_path in python_files {
|
||||
println!("Parsing file: {:?}", file_path);
|
||||
match analyzer.parse_module(&file_path) {
|
||||
Ok(module) => {
|
||||
println!("Successfully parsed module: {:?}", module.module_path);
|
||||
println!("Imports: {:?}", module.imports);
|
||||
println!("Symbols: {:?}", module.symbols.len());
|
||||
println!("Calls: {:?}", module.calls.len());
|
||||
parsed_modules.push(module);
|
||||
},
|
||||
Err(e) => {
|
||||
panic!("Failed to parse {}: {}", file_path.display(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!("Parsed {} modules", parsed_modules.len());
|
||||
|
||||
// Resolve symbols and build project model
|
||||
let project_model = analyzer.resolve_symbols(&parsed_modules)
|
||||
.expect("Failed to resolve symbols");
|
||||
|
||||
println!("Project model modules: {}", project_model.modules.len());
|
||||
println!("Project model files: {}", project_model.files.len());
|
||||
println!("Project model symbols: {}", project_model.symbols.len());
|
||||
|
||||
// Add assertions to verify the project model
|
||||
assert!(!project_model.modules.is_empty());
|
||||
assert!(!project_model.files.is_empty());
|
||||
assert!(!project_model.symbols.is_empty());
|
||||
|
||||
// Check specific details about the parsed modules
|
||||
// Now we have 2 modules (example.py and advanced_example.py)
|
||||
assert_eq!(project_model.modules.len(), 2);
|
||||
|
||||
// Find the example.py module
|
||||
let mut found_example_module = false;
|
||||
for (_, module) in project_model.modules.iter() {
|
||||
if module.path.contains("example.py") {
|
||||
found_example_module = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert!(found_example_module);
|
||||
|
||||
// Check that we found the Calculator class
|
||||
let calculator_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::Calculator"));
|
||||
assert!(calculator_symbol.is_some());
|
||||
assert_eq!(calculator_symbol.unwrap().kind, wtismycode_core::model::SymbolKind::Class);
|
||||
|
||||
// Check that we found the process_numbers function
|
||||
let process_numbers_symbol = project_model.symbols.values().find(|s| s.id.ends_with("::process_numbers"));
|
||||
assert!(process_numbers_symbol.is_some());
|
||||
assert_eq!(process_numbers_symbol.unwrap().kind, wtismycode_core::model::SymbolKind::Function);
|
||||
|
||||
// Check file imports
|
||||
assert!(!project_model.files.is_empty());
|
||||
let file_entry = project_model.files.iter().next().unwrap();
|
||||
let file_doc = file_entry.1;
|
||||
assert!(!file_doc.imports.is_empty());
|
||||
}
|
||||
73
wtismycode-core/tests/golden/test_project/ARCHITECTURE.md
Normal file
73
wtismycode-core/tests/golden/test_project/ARCHITECTURE.md
Normal file
@@ -0,0 +1,73 @@
|
||||
# ARCHITECTURE — New Project
|
||||
|
||||
<!-- MANUAL:BEGIN -->
|
||||
## Project summary
|
||||
**Name:** New Project
|
||||
**Description:** <FILL_MANUALLY: what this project does in 3–7 lines>
|
||||
|
||||
## Key decisions (manual)
|
||||
- <FILL_MANUALLY>
|
||||
|
||||
## Non-goals (manual)
|
||||
- <FILL_MANUALLY>
|
||||
<!-- MANUAL:END -->
|
||||
|
||||
---
|
||||
|
||||
## Document metadata
|
||||
- **Created:** 2026-01-25
|
||||
- **Updated:** 2026-01-25
|
||||
- **Generated by:** wtismycode (cli) v0.1
|
||||
|
||||
---
|
||||
|
||||
## Rails / Tooling
|
||||
<!-- ARCHDOC:BEGIN section=rails -->
|
||||
|
||||
No tooling information available.
|
||||
<!-- ARCHDOC:END section=rails -->
|
||||
|
||||
---
|
||||
|
||||
## Repository layout (top-level)
|
||||
<!-- ARCHDOC:BEGIN section=layout -->
|
||||
|
||||
| Path | Purpose | Link |
|
||||
|------|---------|------|
|
||||
| ./src/advanced_example.py | Source file | [details](docs/architecture/files/._src_advanced_example.py.md) |
|
||||
| ./src/example.py | Source file | [details](docs/architecture/files/._src_example.py.md) |
|
||||
<!-- ARCHDOC:END section=layout -->
|
||||
|
||||
---
|
||||
|
||||
## Modules index
|
||||
<!-- ARCHDOC:BEGIN section=modules_index -->
|
||||
|
||||
| Module | Symbols | Inbound | Outbound | Link |
|
||||
|--------|---------|---------|----------|------|
|
||||
| ./src/advanced_example.py | 10 | 0 | 0 | [details](docs/architecture/modules/._src_advanced_example.py.md) |
|
||||
| ./src/example.py | 5 | 0 | 0 | [details](docs/architecture/modules/._src_example.py.md) |
|
||||
<!-- ARCHDOC:END section=modules_index -->
|
||||
|
||||
---
|
||||
|
||||
## Critical dependency points
|
||||
<!-- ARCHDOC:BEGIN section=critical_points -->
|
||||
|
||||
### High Fan-in (Most Called)
|
||||
| Symbol | Fan-in | Critical |
|
||||
|--------|--------|----------|
|
||||
|
||||
### High Fan-out (Calls Many)
|
||||
| Symbol | Fan-out | Critical |
|
||||
|--------|---------|----------|
|
||||
|
||||
### Module Cycles
|
||||
<!-- ARCHDOC:END section=critical_points -->
|
||||
|
||||
---
|
||||
|
||||
<!-- MANUAL:BEGIN -->
|
||||
## Change notes (manual)
|
||||
- <FILL_MANUALLY>
|
||||
<!-- MANUAL:END -->
|
||||
@@ -0,0 +1,3 @@
|
||||
# File: ./src/advanced_example.py
|
||||
|
||||
TODO: Add file documentation
|
||||
@@ -0,0 +1,3 @@
|
||||
# File: ./src/example.py
|
||||
|
||||
TODO: Add file documentation
|
||||
@@ -0,0 +1,3 @@
|
||||
# Module: ./src/advanced_example.py
|
||||
|
||||
TODO: Add module documentation
|
||||
@@ -0,0 +1,3 @@
|
||||
# Module: ./src/example.py
|
||||
|
||||
TODO: Add module documentation
|
||||
@@ -0,0 +1,107 @@
|
||||
"""Advanced example module for testing with integrations."""
|
||||
|
||||
import requests
|
||||
import sqlite3
|
||||
import redis
|
||||
from typing import List, Dict
|
||||
|
||||
class UserService:
|
||||
"""A service for managing users with database integration."""
|
||||
|
||||
def __init__(self, db_path: str = "users.db"):
|
||||
"""Initialize the user service with database path."""
|
||||
self.db_path = db_path
|
||||
self._init_db()
|
||||
|
||||
def _init_db(self):
|
||||
"""Initialize the database."""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
email TEXT UNIQUE NOT NULL
|
||||
)
|
||||
""")
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
def create_user(self, name: str, email: str) -> Dict:
|
||||
"""Create a new user in the database."""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"INSERT INTO users (name, email) VALUES (?, ?)",
|
||||
(name, email)
|
||||
)
|
||||
user_id = cursor.lastrowid
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
return {"id": user_id, "name": name, "email": email}
|
||||
|
||||
def get_user(self, user_id: int) -> Dict:
|
||||
"""Get a user by ID from the database."""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT * FROM users WHERE id = ?", (user_id,))
|
||||
row = cursor.fetchone()
|
||||
conn.close()
|
||||
|
||||
if row:
|
||||
return {"id": row[0], "name": row[1], "email": row[2]}
|
||||
return None
|
||||
|
||||
class NotificationService:
|
||||
"""A service for sending notifications with queue integration."""
|
||||
|
||||
def __init__(self, redis_url: str = "redis://localhost:6379"):
|
||||
"""Initialize the notification service with Redis URL."""
|
||||
self.redis_client = redis.Redis.from_url(redis_url)
|
||||
|
||||
def send_email_notification(self, user_id: int, message: str) -> bool:
|
||||
"""Send an email notification by queuing it."""
|
||||
notification = {
|
||||
"user_id": user_id,
|
||||
"message": message,
|
||||
"type": "email"
|
||||
}
|
||||
|
||||
# Push to Redis queue
|
||||
self.redis_client.lpush("notifications", str(notification))
|
||||
return True
|
||||
|
||||
def fetch_external_user_data(user_id: int) -> Dict:
|
||||
"""Fetch user data from an external API."""
|
||||
response = requests.get(f"https://api.example.com/users/{user_id}")
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
return {}
|
||||
|
||||
def process_users(user_ids: List[int]) -> List[Dict]:
|
||||
"""Process a list of users with various integrations."""
|
||||
# Database integration
|
||||
user_service = UserService()
|
||||
|
||||
# Queue integration
|
||||
notification_service = NotificationService()
|
||||
|
||||
results = []
|
||||
for user_id in user_ids:
|
||||
# Database operation
|
||||
user = user_service.get_user(user_id)
|
||||
if user:
|
||||
# External API integration
|
||||
external_data = fetch_external_user_data(user_id)
|
||||
user.update(external_data)
|
||||
|
||||
# Queue operation
|
||||
notification_service.send_email_notification(
|
||||
user_id,
|
||||
f"Processing user {user['name']}"
|
||||
)
|
||||
|
||||
results.append(user)
|
||||
|
||||
return results
|
||||
29
wtismycode-core/tests/golden/test_project/src/example.py
Normal file
29
wtismycode-core/tests/golden/test_project/src/example.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""Example module for testing."""
|
||||
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
class Calculator:
|
||||
"""A simple calculator class."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the calculator."""
|
||||
pass
|
||||
|
||||
def add(self, a: int, b: int) -> int:
|
||||
"""Add two numbers."""
|
||||
return a + b
|
||||
|
||||
def multiply(self, a: int, b: int) -> int:
|
||||
"""Multiply two numbers."""
|
||||
return a * b
|
||||
|
||||
def process_numbers(numbers: List[int]) -> List[int]:
|
||||
"""Process a list of numbers."""
|
||||
calc = Calculator()
|
||||
return [calc.add(n, 1) for n in numbers]
|
||||
|
||||
if __name__ == "__main__":
|
||||
numbers = [1, 2, 3, 4, 5]
|
||||
result = process_numbers(numbers)
|
||||
print(f"Processed numbers: {result}")
|
||||
62
wtismycode-core/tests/golden/test_project/wtismycode.toml
Normal file
62
wtismycode-core/tests/golden/test_project/wtismycode.toml
Normal file
@@ -0,0 +1,62 @@
|
||||
[project]
|
||||
root = "."
|
||||
out_dir = "docs/architecture"
|
||||
entry_file = "ARCHITECTURE.md"
|
||||
language = "python"
|
||||
|
||||
[scan]
|
||||
include = ["src", "app", "tests"]
|
||||
exclude = [
|
||||
".venv", "venv", "__pycache__", ".git", "dist", "build",
|
||||
".mypy_cache", ".ruff_cache", ".pytest_cache", "*.egg-info"
|
||||
]
|
||||
follow_symlinks = false
|
||||
max_file_size = "10MB"
|
||||
|
||||
[python]
|
||||
src_roots = ["src", "."]
|
||||
include_tests = true
|
||||
parse_docstrings = true
|
||||
max_parse_errors = 10
|
||||
|
||||
[analysis]
|
||||
resolve_calls = true
|
||||
resolve_inheritance = false
|
||||
detect_integrations = true
|
||||
integration_patterns = [
|
||||
{ type = "http", patterns = ["requests", "httpx", "aiohttp"] },
|
||||
{ type = "db", patterns = ["sqlalchemy", "psycopg", "mysql", "sqlite3"] },
|
||||
{ type = "queue", patterns = ["celery", "kafka", "pika", "redis"] }
|
||||
]
|
||||
|
||||
[output]
|
||||
single_file = false
|
||||
per_file_docs = true
|
||||
create_directories = true
|
||||
overwrite_manual_sections = false
|
||||
|
||||
[diff]
|
||||
update_timestamp_on_change_only = true
|
||||
hash_algorithm = "sha256"
|
||||
preserve_manual_content = true
|
||||
|
||||
[thresholds]
|
||||
critical_fan_in = 20
|
||||
critical_fan_out = 20
|
||||
high_complexity = 50
|
||||
|
||||
[rendering]
|
||||
template_engine = "handlebars"
|
||||
max_table_rows = 100
|
||||
truncate_long_descriptions = true
|
||||
description_max_length = 200
|
||||
|
||||
[logging]
|
||||
level = "info"
|
||||
file = "wtismycode.log"
|
||||
format = "compact"
|
||||
|
||||
[caching]
|
||||
enabled = true
|
||||
cache_dir = ".wtismycode/cache"
|
||||
max_cache_age = "24h"
|
||||
21
wtismycode-core/tests/golden/test_utils.rs
Normal file
21
wtismycode-core/tests/golden/test_utils.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
//! Test utilities for golden tests
|
||||
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
/// Read a file and return its contents
|
||||
pub fn read_test_file(path: &str) -> String {
|
||||
fs::read_to_string(path).expect(&format!("Failed to read test file: {}", path))
|
||||
}
|
||||
|
||||
/// Write content to a file for testing
|
||||
pub fn write_test_file(path: &str, content: &str) {
|
||||
fs::write(path, content).expect(&format!("Failed to write test file: {}", path))
|
||||
}
|
||||
|
||||
/// Compare two strings and panic if they don't match
|
||||
pub fn assert_strings_equal(actual: &str, expected: &str, message: &str) {
|
||||
if actual != expected {
|
||||
panic!("{}: Strings do not match\nActual:\n{}\nExpected:\n{}", message, actual, expected);
|
||||
}
|
||||
}
|
||||
137
wtismycode-core/tests/integration_detection.rs
Normal file
137
wtismycode-core/tests/integration_detection.rs
Normal file
@@ -0,0 +1,137 @@
|
||||
//! Integration detection tests for WTIsMyCode
|
||||
//!
|
||||
//! These tests verify that the integration detection functionality works correctly.
|
||||
//! Integration detection now happens at module level during resolve_symbols,
|
||||
//! based on actual imports rather than AST body inspection.
|
||||
|
||||
use std::fs;
|
||||
use tempfile::TempDir;
|
||||
use wtismycode_core::{Config, python_analyzer::PythonAnalyzer};
|
||||
|
||||
#[test]
|
||||
fn test_http_integration_detection() {
|
||||
let mut config = Config::default();
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
config.project.root = temp_dir.path().to_string_lossy().to_string();
|
||||
config.python.src_roots = vec![".".to_string()];
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
let temp_file = temp_dir.path().join("test.py");
|
||||
let python_code = r#"
|
||||
import requests
|
||||
|
||||
def fetch_data():
|
||||
response = requests.get("https://api.example.com/data")
|
||||
return response.json()
|
||||
"#;
|
||||
fs::write(&temp_file, python_code).expect("Failed to write test file");
|
||||
|
||||
let parsed_module = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module");
|
||||
|
||||
let model = analyzer.resolve_symbols(&[parsed_module])
|
||||
.expect("Failed to resolve symbols");
|
||||
|
||||
// Find the symbol (now prefixed with module id)
|
||||
let symbol = model.symbols.values().find(|s| s.qualname == "fetch_data")
|
||||
.expect("fetch_data symbol not found");
|
||||
|
||||
assert!(symbol.integrations_flags.http);
|
||||
assert!(!symbol.integrations_flags.db);
|
||||
assert!(!symbol.integrations_flags.queue);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_db_integration_detection() {
|
||||
let mut config = Config::default();
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
config.project.root = temp_dir.path().to_string_lossy().to_string();
|
||||
config.python.src_roots = vec![".".to_string()];
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
let temp_file = temp_dir.path().join("test.py");
|
||||
let python_code = r#"
|
||||
import sqlite3
|
||||
|
||||
def get_user(user_id):
|
||||
conn = sqlite3.connect("database.db")
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT * FROM users WHERE id = ?", (user_id,))
|
||||
return cursor.fetchone()
|
||||
"#;
|
||||
fs::write(&temp_file, python_code).expect("Failed to write test file");
|
||||
|
||||
let parsed_module = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module");
|
||||
|
||||
let model = analyzer.resolve_symbols(&[parsed_module])
|
||||
.expect("Failed to resolve symbols");
|
||||
|
||||
let symbol = model.symbols.values().find(|s| s.qualname == "get_user")
|
||||
.expect("get_user symbol not found");
|
||||
|
||||
assert!(!symbol.integrations_flags.http);
|
||||
assert!(symbol.integrations_flags.db);
|
||||
assert!(!symbol.integrations_flags.queue);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_queue_integration_detection() {
|
||||
let mut config = Config::default();
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
config.project.root = temp_dir.path().to_string_lossy().to_string();
|
||||
config.python.src_roots = vec![".".to_string()];
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
let temp_file = temp_dir.path().join("test.py");
|
||||
let python_code = r#"
|
||||
import redis
|
||||
|
||||
def process_job(job_data):
|
||||
client = redis.Redis()
|
||||
client.lpush("job_queue", job_data)
|
||||
"#;
|
||||
fs::write(&temp_file, python_code).expect("Failed to write test file");
|
||||
|
||||
let parsed_module = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module");
|
||||
|
||||
let model = analyzer.resolve_symbols(&[parsed_module])
|
||||
.expect("Failed to resolve symbols");
|
||||
|
||||
let symbol = model.symbols.values().find(|s| s.qualname == "process_job")
|
||||
.expect("process_job symbol not found");
|
||||
|
||||
assert!(!symbol.integrations_flags.http);
|
||||
assert!(!symbol.integrations_flags.db);
|
||||
assert!(symbol.integrations_flags.queue);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_integration_detection() {
|
||||
let mut config = Config::default();
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
config.project.root = temp_dir.path().to_string_lossy().to_string();
|
||||
config.python.src_roots = vec![".".to_string()];
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
let temp_file = temp_dir.path().join("test.py");
|
||||
let python_code = r#"
|
||||
def calculate_sum(a, b):
|
||||
return a + b
|
||||
"#;
|
||||
fs::write(&temp_file, python_code).expect("Failed to write test file");
|
||||
|
||||
let parsed_module = analyzer.parse_module(&temp_file)
|
||||
.expect("Failed to parse module");
|
||||
|
||||
let model = analyzer.resolve_symbols(&[parsed_module])
|
||||
.expect("Failed to resolve symbols");
|
||||
|
||||
let symbol = model.symbols.values().find(|s| s.qualname == "calculate_sum")
|
||||
.expect("calculate_sum symbol not found");
|
||||
|
||||
assert!(!symbol.integrations_flags.http);
|
||||
assert!(!symbol.integrations_flags.db);
|
||||
assert!(!symbol.integrations_flags.queue);
|
||||
}
|
||||
13
wtismycode-core/tests/integration_tests.rs
Normal file
13
wtismycode-core/tests/integration_tests.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
//! Integration tests for WTIsMyCode
|
||||
|
||||
// Include golden tests
|
||||
mod golden;
|
||||
mod error_handling;
|
||||
mod caching;
|
||||
mod integration_detection;
|
||||
mod enhanced_analysis;
|
||||
|
||||
// Run all tests
|
||||
fn main() {
|
||||
// This is just a placeholder - tests are run by cargo test
|
||||
}
|
||||
93
wtismycode-core/tests/project_analysis.rs
Normal file
93
wtismycode-core/tests/project_analysis.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
//! Tests for analyzing the test project
|
||||
|
||||
use wtismycode_core::{
|
||||
config::Config,
|
||||
python_analyzer::PythonAnalyzer,
|
||||
};
|
||||
use std::path::Path;
|
||||
|
||||
#[test]
|
||||
fn test_project_analysis() {
|
||||
// Load config from test project
|
||||
let config = Config::load_from_file(Path::new("../test-project/wtismycode.toml")).unwrap();
|
||||
|
||||
// Initialize analyzer
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Parse core module
|
||||
let core_module = analyzer.parse_module(Path::new("../test-project/src/core.py")).unwrap();
|
||||
|
||||
println!("Core module symbols: {}", core_module.symbols.len());
|
||||
for symbol in &core_module.symbols {
|
||||
println!(" Symbol: {} ({:?}), DB: {}, HTTP: {}", symbol.id, symbol.kind, symbol.integrations_flags.db, symbol.integrations_flags.http);
|
||||
}
|
||||
|
||||
println!("Core module calls: {}", core_module.calls.len());
|
||||
for call in &core_module.calls {
|
||||
println!(" Call: {} -> {}", call.caller_symbol, call.callee_expr);
|
||||
}
|
||||
|
||||
// Check that we found symbols
|
||||
assert!(!core_module.symbols.is_empty()); // Should find at least the main symbols
|
||||
|
||||
// Check that we found calls
|
||||
assert!(!core_module.calls.is_empty());
|
||||
|
||||
// Check that integrations are detected
|
||||
let db_integration_found = core_module.symbols.iter().any(|s| s.integrations_flags.db);
|
||||
let http_integration_found = core_module.symbols.iter().any(|s| s.integrations_flags.http);
|
||||
|
||||
assert!(db_integration_found, "Database integration should be detected");
|
||||
assert!(http_integration_found, "HTTP integration should be detected");
|
||||
|
||||
// Parse utils module
|
||||
let utils_module = analyzer.parse_module(Path::new("../test-project/src/utils.py")).unwrap();
|
||||
|
||||
println!("Utils module symbols: {}", utils_module.symbols.len());
|
||||
for symbol in &utils_module.symbols {
|
||||
println!(" Symbol: {} ({:?}), DB: {}, HTTP: {}", symbol.id, symbol.kind, symbol.integrations_flags.db, symbol.integrations_flags.http);
|
||||
}
|
||||
|
||||
// Check that we found symbols
|
||||
assert!(!utils_module.symbols.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_full_project_resolution() {
|
||||
// Load config from test project
|
||||
let config = Config::load_from_file(Path::new("../test-project/wtismycode.toml")).unwrap();
|
||||
|
||||
// Initialize analyzer
|
||||
let analyzer = PythonAnalyzer::new(config);
|
||||
|
||||
// Parse all modules
|
||||
let core_module = analyzer.parse_module(Path::new("../test-project/src/core.py")).unwrap();
|
||||
let utils_module = analyzer.parse_module(Path::new("../test-project/src/utils.py")).unwrap();
|
||||
|
||||
let modules = vec![core_module, utils_module];
|
||||
|
||||
// Resolve symbols
|
||||
let project_model = analyzer.resolve_symbols(&modules).unwrap();
|
||||
|
||||
// Check project model
|
||||
assert!(!project_model.modules.is_empty());
|
||||
assert!(!project_model.symbols.is_empty());
|
||||
assert!(!project_model.files.is_empty());
|
||||
|
||||
// Check that integrations are preserved in the project model
|
||||
let db_integration_found = project_model.symbols.values().any(|s| s.integrations_flags.db);
|
||||
let http_integration_found = project_model.symbols.values().any(|s| s.integrations_flags.http);
|
||||
|
||||
assert!(db_integration_found, "Database integration should be preserved in project model");
|
||||
assert!(http_integration_found, "HTTP integration should be preserved in project model");
|
||||
|
||||
println!("Project modules: {:?}", project_model.modules.keys().collect::<Vec<_>>());
|
||||
println!("Project symbols: {}", project_model.symbols.len());
|
||||
|
||||
// Print integration information
|
||||
for (id, symbol) in &project_model.symbols {
|
||||
if symbol.integrations_flags.db || symbol.integrations_flags.http {
|
||||
println!("Symbol {} has DB: {}, HTTP: {}", id, symbol.integrations_flags.db, symbol.integrations_flags.http);
|
||||
}
|
||||
}
|
||||
}
|
||||
89
wtismycode-core/tests/renderer_tests.rs
Normal file
89
wtismycode-core/tests/renderer_tests.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
//! Tests for the renderer functionality
|
||||
|
||||
use wtismycode_core::{
|
||||
model::{ProjectModel, Symbol, SymbolKind, IntegrationFlags, SymbolMetrics},
|
||||
renderer::Renderer,
|
||||
};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[test]
|
||||
fn test_render_with_integrations() {
|
||||
// Create a mock project model with integration information
|
||||
let mut project_model = ProjectModel::new();
|
||||
|
||||
// Add a symbol with database integration
|
||||
let db_symbol = Symbol {
|
||||
id: "DatabaseManager".to_string(),
|
||||
kind: SymbolKind::Class,
|
||||
module_id: "test_module".to_string(),
|
||||
file_id: "test_file.py".to_string(),
|
||||
qualname: "DatabaseManager".to_string(),
|
||||
signature: "class DatabaseManager".to_string(),
|
||||
annotations: None,
|
||||
docstring_first_line: None,
|
||||
purpose: "test".to_string(),
|
||||
outbound_calls: vec![],
|
||||
inbound_calls: vec![],
|
||||
integrations_flags: IntegrationFlags {
|
||||
db: true,
|
||||
http: false,
|
||||
queue: false,
|
||||
storage: false,
|
||||
ai: false,
|
||||
},
|
||||
metrics: SymbolMetrics {
|
||||
fan_in: 0,
|
||||
fan_out: 0,
|
||||
is_critical: false,
|
||||
cycle_participant: false,
|
||||
},
|
||||
};
|
||||
|
||||
// Add a symbol with HTTP integration
|
||||
let http_symbol = Symbol {
|
||||
id: "fetch_data".to_string(),
|
||||
kind: SymbolKind::Function,
|
||||
module_id: "test_module".to_string(),
|
||||
file_id: "test_file.py".to_string(),
|
||||
qualname: "fetch_data".to_string(),
|
||||
signature: "def fetch_data()".to_string(),
|
||||
annotations: None,
|
||||
docstring_first_line: None,
|
||||
purpose: "test".to_string(),
|
||||
outbound_calls: vec![],
|
||||
inbound_calls: vec![],
|
||||
integrations_flags: IntegrationFlags {
|
||||
db: false,
|
||||
http: true,
|
||||
queue: false,
|
||||
storage: false,
|
||||
ai: false,
|
||||
},
|
||||
metrics: SymbolMetrics {
|
||||
fan_in: 0,
|
||||
fan_out: 0,
|
||||
is_critical: false,
|
||||
cycle_participant: false,
|
||||
},
|
||||
};
|
||||
|
||||
project_model.symbols.insert("DatabaseManager".to_string(), db_symbol);
|
||||
project_model.symbols.insert("fetch_data".to_string(), http_symbol);
|
||||
|
||||
// Initialize renderer
|
||||
let renderer = Renderer::new();
|
||||
|
||||
// Render architecture documentation
|
||||
let result = renderer.render_architecture_md(&project_model, None);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let rendered_content = result.unwrap();
|
||||
println!("Rendered content:\n{}", rendered_content);
|
||||
|
||||
// Check that integration sections are present
|
||||
assert!(rendered_content.contains("## Integrations"));
|
||||
assert!(rendered_content.contains("### Database Integrations"));
|
||||
assert!(rendered_content.contains("### HTTP/API Integrations"));
|
||||
assert!(rendered_content.contains("DatabaseManager in test_file.py"));
|
||||
assert!(rendered_content.contains("fetch_data in test_file.py"));
|
||||
}
|
||||
Reference in New Issue
Block a user