fix: resolve all cargo clippy warnings
- Fix toml version requirement metadata warning - Replace clone() with std::slice::from_ref() - Collapse nested if statements - Add #[allow(dead_code)] to test utility functions - Fix unused imports and variables in tests - Use unwrap_or_else instead of expect with format!
This commit is contained in:
@@ -19,8 +19,8 @@ fn detect_project_name(root: &str) -> String {
|
|||||||
in_project = false;
|
in_project = false;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if in_project && trimmed.starts_with("name") {
|
if in_project && trimmed.starts_with("name")
|
||||||
if let Some(val) = trimmed.split('=').nth(1) {
|
&& let Some(val) = trimmed.split('=').nth(1) {
|
||||||
let name = val.trim().trim_matches('"').trim_matches('\'');
|
let name = val.trim().trim_matches('"').trim_matches('\'');
|
||||||
if !name.is_empty() {
|
if !name.is_empty() {
|
||||||
return name.to_string();
|
return name.to_string();
|
||||||
@@ -28,7 +28,6 @@ fn detect_project_name(root: &str) -> String {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback: directory basename
|
// Fallback: directory basename
|
||||||
root_path
|
root_path
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ edition = "2024"
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
toml = "0.9.11+spec-1.1.0"
|
toml = "0.9.11"
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
thiserror = "2.0.18"
|
thiserror = "2.0.18"
|
||||||
|
|||||||
@@ -114,8 +114,8 @@ fn deduplicate_cycles(cycles: Vec<Vec<String>>) -> Vec<Vec<String>> {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::model::{Edges, Module, ProjectModel};
|
use crate::model::{Module, ProjectModel};
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
fn make_module(id: &str, outbound: Vec<&str>) -> Module {
|
fn make_module(id: &str, outbound: Vec<&str>) -> Module {
|
||||||
Module {
|
Module {
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ pub use model::ProjectModel;
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_works() {
|
fn it_works() {
|
||||||
|
|||||||
@@ -180,12 +180,11 @@ impl PackageClassifier {
|
|||||||
|
|
||||||
fn load_pypi_cache(&mut self, dir: &str) {
|
fn load_pypi_cache(&mut self, dir: &str) {
|
||||||
let cache_path = Path::new(dir).join("pypi.json");
|
let cache_path = Path::new(dir).join("pypi.json");
|
||||||
if let Ok(content) = std::fs::read_to_string(&cache_path) {
|
if let Ok(content) = std::fs::read_to_string(&cache_path)
|
||||||
if let Ok(cache) = serde_json::from_str::<HashMap<String, Option<PackageCategory>>>(&content) {
|
&& let Ok(cache) = serde_json::from_str::<HashMap<String, Option<PackageCategory>>>(&content) {
|
||||||
self.pypi_cache = cache;
|
self.pypi_cache = cache;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
fn pypi_lookup(&self, package_name: &str) -> Option<PackageCategory> {
|
fn pypi_lookup(&self, package_name: &str) -> Option<PackageCategory> {
|
||||||
let url = format!("https://pypi.org/pypi/{}/json", package_name);
|
let url = format!("https://pypi.org/pypi/{}/json", package_name);
|
||||||
@@ -209,13 +208,12 @@ impl PackageClassifier {
|
|||||||
// Check classifiers
|
// Check classifiers
|
||||||
if let Some(classifiers) = info.get("classifiers").and_then(|c: &serde_json::Value| c.as_array()) {
|
if let Some(classifiers) = info.get("classifiers").and_then(|c: &serde_json::Value| c.as_array()) {
|
||||||
for classifier in classifiers {
|
for classifier in classifiers {
|
||||||
if let Some(s) = classifier.as_str() {
|
if let Some(s) = classifier.as_str()
|
||||||
if let Some(cat) = classify_from_pypi_classifier(s) {
|
&& let Some(cat) = classify_from_pypi_classifier(s) {
|
||||||
return Some(cat);
|
return Some(cat);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Check summary and keywords for hints
|
// Check summary and keywords for hints
|
||||||
let summary = info.get("summary").and_then(|s: &serde_json::Value| s.as_str()).unwrap_or("");
|
let summary = info.get("summary").and_then(|s: &serde_json::Value| s.as_str()).unwrap_or("");
|
||||||
|
|||||||
@@ -245,15 +245,14 @@ impl Renderer {
|
|||||||
let category_order = ["HTTP", "Database", "Queue", "Storage", "AI/ML", "Auth", "Testing", "Logging", "Internal", "Third-party"];
|
let category_order = ["HTTP", "Database", "Queue", "Storage", "AI/ML", "Auth", "Testing", "Logging", "Internal", "Third-party"];
|
||||||
let mut integration_sections: Vec<serde_json::Value> = Vec::new();
|
let mut integration_sections: Vec<serde_json::Value> = Vec::new();
|
||||||
for cat_name in &category_order {
|
for cat_name in &category_order {
|
||||||
if let Some(pkgs) = model.classified_integrations.get(*cat_name) {
|
if let Some(pkgs) = model.classified_integrations.get(*cat_name)
|
||||||
if !pkgs.is_empty() {
|
&& !pkgs.is_empty() {
|
||||||
integration_sections.push(serde_json::json!({
|
integration_sections.push(serde_json::json!({
|
||||||
"category": cat_name,
|
"category": cat_name,
|
||||||
"packages": pkgs,
|
"packages": pkgs,
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Determine project name: config > pyproject.toml > directory name > fallback
|
// Determine project name: config > pyproject.toml > directory name > fallback
|
||||||
let project_name = config
|
let project_name = config
|
||||||
@@ -281,15 +280,14 @@ impl Renderer {
|
|||||||
in_project = false;
|
in_project = false;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if in_project && trimmed.starts_with("name") {
|
if in_project && trimmed.starts_with("name")
|
||||||
if let Some(val) = trimmed.split('=').nth(1) {
|
&& let Some(val) = trimmed.split('=').nth(1) {
|
||||||
let name = val.trim().trim_matches('"').trim_matches('\'');
|
let name = val.trim().trim_matches('"').trim_matches('\'');
|
||||||
if !name.is_empty() {
|
if !name.is_empty() {
|
||||||
return Some(name.to_string());
|
return Some(name.to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
None
|
None
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@@ -550,15 +548,14 @@ impl Renderer {
|
|||||||
let category_order = ["HTTP", "Database", "Queue", "Storage", "AI/ML", "Auth", "Testing", "Logging", "Internal", "Third-party"];
|
let category_order = ["HTTP", "Database", "Queue", "Storage", "AI/ML", "Auth", "Testing", "Logging", "Internal", "Third-party"];
|
||||||
let mut integration_sections: Vec<serde_json::Value> = Vec::new();
|
let mut integration_sections: Vec<serde_json::Value> = Vec::new();
|
||||||
for cat_name in &category_order {
|
for cat_name in &category_order {
|
||||||
if let Some(pkgs) = model.classified_integrations.get(*cat_name) {
|
if let Some(pkgs) = model.classified_integrations.get(*cat_name)
|
||||||
if !pkgs.is_empty() {
|
&& !pkgs.is_empty() {
|
||||||
integration_sections.push(serde_json::json!({
|
integration_sections.push(serde_json::json!({
|
||||||
"category": cat_name,
|
"category": cat_name,
|
||||||
"packages": pkgs,
|
"packages": pkgs,
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
let data = serde_json::json!({
|
let data = serde_json::json!({
|
||||||
"integration_sections": integration_sections,
|
"integration_sections": integration_sections,
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
//!
|
//!
|
||||||
//! These tests verify that the caching functionality works correctly.
|
//! These tests verify that the caching functionality works correctly.
|
||||||
|
|
||||||
use std::path::Path;
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use wtismycode_core::{Config, python_analyzer::PythonAnalyzer};
|
use wtismycode_core::{Config, python_analyzer::PythonAnalyzer};
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
//! These tests verify that the enhanced analysis functionality works correctly
|
//! These tests verify that the enhanced analysis functionality works correctly
|
||||||
//! with complex code that includes integrations, calls, and docstrings.
|
//! with complex code that includes integrations, calls, and docstrings.
|
||||||
|
|
||||||
use std::fs;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use wtismycode_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
use wtismycode_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
||||||
|
|
||||||
|
|||||||
@@ -139,7 +139,7 @@ fn test_cycle_detection_no_cycles() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_renderer_produces_output() {
|
fn test_renderer_produces_output() {
|
||||||
let config = Config::default();
|
let _config = Config::default();
|
||||||
let model = ProjectModel::new();
|
let model = ProjectModel::new();
|
||||||
let renderer = Renderer::new();
|
let renderer = Renderer::new();
|
||||||
let result = renderer.render_architecture_md(&model, None);
|
let result = renderer.render_architecture_md(&model, None);
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
|
|
||||||
mod test_utils;
|
mod test_utils;
|
||||||
|
|
||||||
use std::fs;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use wtismycode_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
use wtismycode_core::{Config, scanner::FileScanner, python_analyzer::PythonAnalyzer};
|
||||||
|
|
||||||
|
|||||||
@@ -1,19 +1,21 @@
|
|||||||
//! Test utilities for golden tests
|
//! Test utilities for golden tests
|
||||||
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
/// Read a file and return its contents
|
/// Read a file and return its contents
|
||||||
|
#[allow(dead_code)]
|
||||||
pub fn read_test_file(path: &str) -> String {
|
pub fn read_test_file(path: &str) -> String {
|
||||||
fs::read_to_string(path).expect(&format!("Failed to read test file: {}", path))
|
fs::read_to_string(path).unwrap_or_else(|_| panic!("Failed to read test file: {}", path))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write content to a file for testing
|
/// Write content to a file for testing
|
||||||
|
#[allow(dead_code)]
|
||||||
pub fn write_test_file(path: &str, content: &str) {
|
pub fn write_test_file(path: &str, content: &str) {
|
||||||
fs::write(path, content).expect(&format!("Failed to write test file: {}", path))
|
fs::write(path, content).unwrap_or_else(|_| panic!("Failed to write test file: {}", path))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Compare two strings and panic if they don't match
|
/// Compare two strings and panic if they don't match
|
||||||
|
#[allow(dead_code)]
|
||||||
pub fn assert_strings_equal(actual: &str, expected: &str, message: &str) {
|
pub fn assert_strings_equal(actual: &str, expected: &str, message: &str) {
|
||||||
if actual != expected {
|
if actual != expected {
|
||||||
panic!("{}: Strings do not match\nActual:\n{}\nExpected:\n{}", message, actual, expected);
|
panic!("{}: Strings do not match\nActual:\n{}\nExpected:\n{}", message, actual, expected);
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ fn test_project_analysis() {
|
|||||||
|
|
||||||
// Integration flags are now set during resolve_symbols, not parse_module
|
// Integration flags are now set during resolve_symbols, not parse_module
|
||||||
// So we resolve and check there
|
// So we resolve and check there
|
||||||
let project_model = analyzer.resolve_symbols(&[core_module.clone()]).unwrap();
|
let project_model = analyzer.resolve_symbols(std::slice::from_ref(&core_module)).unwrap();
|
||||||
let db_integration_found = project_model.symbols.values().any(|s| s.integrations_flags.db);
|
let db_integration_found = project_model.symbols.values().any(|s| s.integrations_flags.db);
|
||||||
let http_integration_found = project_model.symbols.values().any(|s| s.integrations_flags.http);
|
let http_integration_found = project_model.symbols.values().any(|s| s.integrations_flags.http);
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user