From 94d77e709aa364e95ca6ff8b366631bd7ed16445 Mon Sep 17 00:00:00 2001 From: Don Jayamanne Date: Wed, 16 Jul 2025 09:36:12 +1000 Subject: [PATCH 1/7] AI ready instructions --- .github/copilot-instructions.md | 128 ++++++++++++++++++++++++++++ .github/prompts/analyze.prompt.md | 38 +++++++++ .github/prompts/explain.prompt.md | 40 +++++++++ .github/prompts/implement.prompt.md | 13 +++ .github/prompts/plan.prompt.md | 40 +++++++++ 5 files changed, 259 insertions(+) create mode 100644 .github/copilot-instructions.md create mode 100644 .github/prompts/analyze.prompt.md create mode 100644 .github/prompts/explain.prompt.md create mode 100644 .github/prompts/implement.prompt.md create mode 100644 .github/prompts/plan.prompt.md diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 00000000..e4bccb65 --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,128 @@ +# Python Environment Tools (PET) - AI Coding Agent Instructions + +## Project Overview + +This is a high-performance Rust-based tool for discovering Python environments and virtual environments. It operates as a JSONRPC server consumed by the VS Code Python extension to avoid spawning Python processes repeatedly. + +## Architecture + +### Core Concepts + +- **Locators**: Modular environment discovery components implementing the `Locator` trait (`crates/pet-core/src/lib.rs`) +- **JSONRPC Server**: Main communication interface (`crates/pet/src/jsonrpc.rs`) with stdio/stdout protocol +- **Environment Types**: 15+ supported Python installations (Conda, Poetry, PyEnv, Homebrew, Windows Store, etc.) +- **Reporter Pattern**: Asynchronous environment discovery reporting via the `Reporter` trait + +### Key Architecture Files + +- `crates/pet/src/locators.rs` - Ordered locator creation and fallback identification logic +- `crates/pet/src/find.rs` - Multi-threaded environment discovery coordination +- `crates/pet-core/src/lib.rs` - Core traits and configuration structures +- `docs/JSONRPC.md` - Complete API specification with TypeScript interfaces + +## Development Workflow + +### Building & Testing + +```bash +# Standard build +cargo build + +# Release build (optimized for performance) +cargo build --release + +# Run tests with specific CI features +cargo test --features ci +cargo test --features ci-poetry-global + +# Run JSONRPC server +./target/debug/pet server +``` + +### Feature-Gated Testing + +Tests use feature flags for different environments: + +- `ci` - General CI environment tests +- `ci-jupyter-container` - Jupyter container-specific tests +- `ci-homebrew-container` - Homebrew container tests +- `ci-poetry-*` - Poetry-specific test variants + +### Locator Development Pattern + +When adding new environment types: + +1. **Create new crate**: `crates/pet-{name}/` +2. **Implement Locator trait**: Key methods are `try_from()` (identification) and `find()` (discovery) +3. **Add to locator chain**: Update `create_locators()` in `crates/pet/src/locators.rs` - ORDER MATTERS +4. **Platform-specific**: Use `#[cfg(windows)]`, `#[cfg(unix)]`, `#[cfg(target_os = "macos")]` + +Example structure: + +```rust +impl Locator for MyLocator { + fn get_kind(&self) -> LocatorKind { LocatorKind::MyType } + fn supported_categories(&self) -> Vec { vec![PythonEnvironmentKind::MyType] } + fn try_from(&self, env: &PythonEnv) -> Option { /* identification logic */ } + fn find(&self, reporter: &dyn Reporter) { /* discovery logic */ } +} +``` + +## Critical Patterns + +### Performance Principles (from `crates/pet/README.md`) + +1. **Avoid spawning processes** - Extract info from files/filesystem when possible +2. **Report immediately** - Use Reporter pattern for async discovery +3. **Complete information** - Gather all environment details in one pass, not incrementally + +### JSONRPC Communication Flow + +1. Client sends `configure` request (must be first) +2. Client sends `refresh` request to discover environments +3. Server sends `environment` notifications as discoveries happen +4. Optional: `resolve` request for individual Python executables + +### Testing Verification Pattern + +Tests validate discovered environments using 4 verification methods: + +1. Spawn Python to verify `sys.prefix` and `sys.version` +2. Use `try_from()` with executable to get same info +3. Test symlink identification +4. Use `resolve` method for consistency + +## Environment-Specific Notes + +### Conda Environments + +- Supports detection from history files and conda-meta directories +- Manager detection via spawning conda executable in background threads +- Complex prefix/name relationships for base vs named environments + +### Poetry Environments + +- Hash-based environment naming: `{project-name}-{hash}-py` +- Project-specific virtual environments in configured cache directories +- Configuration hierarchy: local poetry.toml → global config + +### Platform Differences + +- **Windows**: Registry + Windows Store detection, different path separators +- **macOS**: Xcode Command Line Tools, python.org, Homebrew paths +- **Linux**: Global system paths (`/usr/bin`, `/usr/local/bin`) + +## Common Gotchas + +- **Locator order matters** in `create_locators()` - more specific before generic +- **Thread safety** - Heavy use of Arc/Mutex for concurrent discovery +- **Feature flags** - Many tests only run with specific CI features enabled +- **Path canonicalization** - Symlink resolution varies by platform +- **Caching** - Optional cache directory for expensive operations (conda spawning) + +## Files to Read First + +1. `docs/JSONRPC.md` - Understanding the external API +2. `crates/pet/src/locators.rs` - Core architecture patterns +3. `crates/pet-core/src/lib.rs` - Essential traits and types +4. `crates/pet/tests/ci_test.rs` - Comprehensive testing patterns diff --git a/.github/prompts/analyze.prompt.md b/.github/prompts/analyze.prompt.md new file mode 100644 index 00000000..7bd218a9 --- /dev/null +++ b/.github/prompts/analyze.prompt.md @@ -0,0 +1,38 @@ +--- +mode: agent +description: Root cause analysis for a bug in the codebase. +tools: ['codebase', 'editFiles', 'fetch', 'findTestFiles', 'githubRepo', 'search', 'searchResults', 'usages', 'vscodeAPI', 'github', 'get_file_contents', 'get_issue', 'get_issue_comments', 'list_issues', 'list_pull_requests', 'search_code', 'search_issues', 'memory', 'sequentialthinking', 'activePullRequest', 'websearch'] +--- +You are an expert in this codebase. + +Your goal is to analyze a bug or add the new feature, for this you first need to: +* Understand the context of the bug or feature by reading the issue description and comments. +* Ask for clarification from user only if the issue description is not clear. +* Understand the codebase by reading the relevant instruction files and code. +* If its a bug, then identify the root cause of the bug, and explain this to the user. +* If just a number is provided by the user, assume it is an issue number and fetch the issue details. + +Based on your above understanding generate a summary of your analysis. +Ensure the plan consists of a Markdown document that has the following sections: + +* Overview: A brief description of the bug/feature. If its a bug, then is this bydesign or a bug? +* Root Cause: A detailed explanation of the root cause of the bug, including any relevant code snippets or references to the codebase. (only if it's a bug) +* Requirements: A list of requirements to resolve the bug or add the new feature. +* Additional Considerations: Mention any potential challenges or risks associated with the implementation. +* Proposal: Can and should a solution be implemented? Is it a bug, or is this by design? What are the risks or challenges associated with a solution if it is a feature? + +Do not make any code edits, just generate a plan. Use thinking and reasoning skills to outline the steps needed to achieve the desired outcome. + + +MUST: +- Read instruction file(s) before analyzing code +- Understand codebase, issue and architecture thoroughly +- Perform root cause analysis only if the issue is a bug +- Never make any assumptions, always strive to be thorough and accurate +- Avoid unnecessary repetition and verbosity +- Be concise, but thorough. + +MUST NOT: +- Make code changes +- Mention all new or updated lines of code + diff --git a/.github/prompts/explain.prompt.md b/.github/prompts/explain.prompt.md new file mode 100644 index 00000000..f9b7fe3c --- /dev/null +++ b/.github/prompts/explain.prompt.md @@ -0,0 +1,40 @@ +--- +mode: agent +description: Analyze the codebase and explain a feature/component in detail. +tools: ['codebase', 'editFiles', 'fetch', 'findTestFiles', 'githubRepo', 'search', 'searchResults', 'usages', 'vscodeAPI', 'search_code', 'memory', 'sequentialthinking', 'websearch'] +--- +# Code Explanation Guide +You are an expert in this codebase. +Your task is to analyze the user requests and explain the feature/component in detail. Where possible use diagrams to depict the architecture and or flow. + +Start by first: +* Understand what needs explaining. +- Read instruction files for the relevant area +- Examine code with appropriate tools +- Understand the codebase by reading the relevant instruction files and code. +- Identify design patterns and architectural decisions +- Use available tools to gather information +- Be thorough before presenting any explanation + +Based on your above understanding generate a markdown document that explains the feature/component in detail. +Use thinking and reasoning skills when generating the explanation & ensure the document has the following sections: + +* Overview: Brief summary of the feature/component and its purpose. +* Architecture: High-level architecture diagram (if applicable). +* Key Components: List and describe key components involved. +* Data Flow: Explain how data moves through the system. +* Control Flow: Describe the control flow and how components interact. +* Integration Points: Explain how this feature/component integrates with others. +* Additional Considerations: Mention any potential challenges or risks associated with understanding or modifying this feature/component. +Mention any other relevant information that would help in understanding the feature/component. + + + +MUST: +- Do not make any other code edits. +- Read instruction file(s) before analyzing code +- Understand codebase, issue and architecture thoroughly +- Never make any assumptions, always strive to be thorough and accurate +- Avoid unnecessary repetition and verbosity +- Be concise, but thorough. + diff --git a/.github/prompts/implement.prompt.md b/.github/prompts/implement.prompt.md new file mode 100644 index 00000000..7d3c0603 --- /dev/null +++ b/.github/prompts/implement.prompt.md @@ -0,0 +1,13 @@ +--- +mode: agent +description: Executed after a plan has been created to implement a bug fix or feature request. +tools: ['codebase', 'editFiles', 'fetch', 'findTestFiles', 'githubRepo', 'problems', 'runTasks', 'runTests', 'search', 'searchResults', 'terminalLastCommand', 'terminalSelection', 'testFailure', 'usages', 'vscodeAPI', 'github', 'get_file_contents', 'get_issue', 'get_issue_comments', 'list_issues', 'list_pull_requests', 'search_code', 'search_issues', 'memory', 'sequentialthinking', 'activePullRequest', 'copilotCodingAgent', 'websearch'] +--- +You are an expert in this codebase. +Your task is to now implement the solution. + + +MUST: +- Adhere to patterns and best practices of the project +- Add required tests to ensure the fix works + diff --git a/.github/prompts/plan.prompt.md b/.github/prompts/plan.prompt.md new file mode 100644 index 00000000..02813e38 --- /dev/null +++ b/.github/prompts/plan.prompt.md @@ -0,0 +1,40 @@ +--- +mode: agent +description: Analyze a bug/issue in the codebase and report findings without making code changes. +tools: ['codebase', 'editFiles', 'fetch', 'findTestFiles', 'githubRepo', 'search', 'searchResults', 'usages', 'vscodeAPI', 'github', 'get_file_contents', 'get_issue', 'get_issue_comments', 'list_issues', 'list_pull_requests', 'search_code', 'search_issues', 'memory', 'sequentialthinking', 'activePullRequest', 'websearch'] +--- +You are an expert in this codebase. + +Your goal is to prepare a detailed plan to fix the bug or add the new feature, for this you first need to: +* Understand the context of the bug or feature by reading the issue description and comments. +* Ask for clarification from user only if the issue description is not clear. +* Understand the codebase by reading the relevant instruction files and code. +* If its a bug, then identify the root cause of the bug, and explain this to the user. +* If just a number is provided by the user, assume it is an issue number and fetch the issue details. + +Based on your above understanding generate a plan to fix the bug or add the new feature. +Ensure the plan consists of a Markdown document that has the following sections: + +* Overview: A brief description of the bug/feature. +* Problem: A detailed explanation of the root cause of the bug, including any relevant code snippets or references to the codebase. (only if it's a bug) +* Solution: A brief summary of the solution including a list of requirements to resolve the bug or add the new feature. +* Additional Considerations: Mention any potential challenges or risks associated with the implementation. +* Implementation Steps: A detailed list of steps to implement the bug fix or new feature. +Note: Limit information to what is necessary for developers and AI assistants to understand the implementation steps. +Note: Adhere to architecture, development and testing patterns in instruction files + +Do not make any code edits, just generate a plan. Use thinking and reasoning skills to outline the steps needed to achieve the desired outcome. + + +MUST: +- Understand codebase, issue and architecture thoroughly +- Adhere to patterns and best practices of the project +- Perform root cause analysis only if the issue is a bug +- Never make any assumptions, always strive to be thorough and accurate +- Avoid unnecessary repetition and verbosity +- Be concise, but thorough. + +MUST NOT: +- Make code changes +- Mention all new or updated lines of code + From 597d9373f85e9819023555660d3645c30be46ff3 Mon Sep 17 00:00:00 2001 From: Don Jayamanne Date: Wed, 16 Jul 2025 09:40:40 +1000 Subject: [PATCH 2/7] Fix linter issues --- crates/pet-python-utils/src/fs_cache.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/pet-python-utils/src/fs_cache.rs b/crates/pet-python-utils/src/fs_cache.rs index e53c75ad..ece5a3b5 100644 --- a/crates/pet-python-utils/src/fs_cache.rs +++ b/crates/pet-python-utils/src/fs_cache.rs @@ -115,7 +115,7 @@ fn generate_hash(executable: &PathBuf) -> String { let h_bytes = hasher.finalize(); // Convert 256 bits => Hext and then take 16 of the hex chars (that should be unique enough) // We will handle collisions if they happen. - format!("{:x}", h_bytes)[..16].to_string() + format!("{h_bytes:x}")[..16].to_string() } #[cfg(test)] From a51ce1d0697b90d606eb43cd832cfdf064841911 Mon Sep 17 00:00:00 2001 From: Don Jayamanne Date: Wed, 16 Jul 2025 09:53:42 +1000 Subject: [PATCH 3/7] Udpates --- .github/copilot-instructions.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index e4bccb65..96368cdb 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -126,3 +126,15 @@ Tests validate discovered environments using 4 verification methods: 2. `crates/pet/src/locators.rs` - Core architecture patterns 3. `crates/pet-core/src/lib.rs` - Essential traits and types 4. `crates/pet/tests/ci_test.rs` - Comprehensive testing patterns + + +## Scripts +- Use `cargo fetch` to download all dependencies +- Use `rustup component add clippy` to install Clippy linter +- Use `cargo fmt --all` to format code in all packages +- Use `cargo clippy --all-features -- -Dwarnings` to check for linter issues +- Use `cargo build` to build the project +- Use `cargo test --all` to test all packages (this can take a few seconds) +- Use `cargo test [TESTNAME]` to test a specific test +- Use `cargo test -p [SPEC]` to test a specific package +- Use `cargo test --all` to test all packages From 8d534902c9e3fbd99aaf1797ba0e6d311a1bdceb Mon Sep 17 00:00:00 2001 From: Don Jayamanne Date: Wed, 16 Jul 2025 10:08:30 +1000 Subject: [PATCH 4/7] Updates --- .github/copilot-instructions.md | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 96368cdb..87aaa0e8 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -133,6 +133,7 @@ Tests validate discovered environments using 4 verification methods: - Use `rustup component add clippy` to install Clippy linter - Use `cargo fmt --all` to format code in all packages - Use `cargo clippy --all-features -- -Dwarnings` to check for linter issues +- Use `cargo clippy --all-features --fix --allow-dirty` to automatically fix linter issues - Use `cargo build` to build the project - Use `cargo test --all` to test all packages (this can take a few seconds) - Use `cargo test [TESTNAME]` to test a specific test From 499fa1355ebfab306ecb603e110adae850c1c9e5 Mon Sep 17 00:00:00 2001 From: Don Jayamanne Date: Wed, 16 Jul 2025 10:10:34 +1000 Subject: [PATCH 5/7] Fix formatting and linter issues --- .github/copilot-instructions.md | 2 +- crates/pet-conda/src/conda_rc.rs | 10 +-- crates/pet-conda/src/environments.rs | 6 +- crates/pet-conda/tests/ci_test.rs | 46 +++-------- .../tests/environment_locations_test.rs | 4 +- crates/pet-conda/tests/lib_test.rs | 8 +- crates/pet-conda/tests/manager_test.rs | 2 +- crates/pet-conda/tests/package_test.rs | 10 +-- crates/pet-conda/tests/utils_test.rs | 28 +++---- crates/pet-core/src/python_environment.rs | 1 - crates/pet-poetry/src/config.rs | 68 ++++++---------- crates/pet-poetry/src/pyproject_toml.rs | 2 +- crates/pet-poetry/tests/config_test.rs | 26 +++--- crates/pet-pyenv/tests/pyenv_test.rs | 28 +++---- crates/pet-python-utils/src/executable.rs | 80 +++++++++---------- .../pet-python-utils/tests/executable_test.rs | 16 ++-- .../pet-python-utils/tests/sys_prefix_test.rs | 26 +++--- crates/pet/src/jsonrpc.rs | 4 +- crates/pet/src/lib.rs | 7 +- crates/pet/tests/ci_homebrew_container.rs | 2 +- crates/pet/tests/ci_jupyter_container.rs | 2 +- crates/pet/tests/ci_test.rs | 80 ++++++++----------- 22 files changed, 193 insertions(+), 265 deletions(-) diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 87aaa0e8..03c704d4 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -133,7 +133,7 @@ Tests validate discovered environments using 4 verification methods: - Use `rustup component add clippy` to install Clippy linter - Use `cargo fmt --all` to format code in all packages - Use `cargo clippy --all-features -- -Dwarnings` to check for linter issues -- Use `cargo clippy --all-features --fix --allow-dirty` to automatically fix linter issues +- Use `cargo clippy --all-features --fix --allow-dirty -- -Dwarnings` to automatically fix linter issues - Use `cargo build` to build the project - Use `cargo test --all` to test all packages (this can take a few seconds) - Use `cargo test [TESTNAME]` to test a specific test diff --git a/crates/pet-conda/src/conda_rc.rs b/crates/pet-conda/src/conda_rc.rs index 64fa88cd..83374830 100644 --- a/crates/pet-conda/src/conda_rc.rs +++ b/crates/pet-conda/src/conda_rc.rs @@ -353,7 +353,7 @@ envs_path: "#; assert_eq!( - parse_conda_rc_contents(&cfg).unwrap().env_dirs, + parse_conda_rc_contents(cfg).unwrap().env_dirs, [ PathBuf::from("/Users/username/dev/envs"), PathBuf::from("/opt/conda/envs"), @@ -373,7 +373,7 @@ envs_dirs: "#; assert_eq!( - parse_conda_rc_contents(&cfg).unwrap().env_dirs, + parse_conda_rc_contents(cfg).unwrap().env_dirs, ["/Users/username/dev/envs", "/opt/conda/envs",].map(PathBuf::from) ); @@ -388,7 +388,7 @@ envs_path: "#; assert_eq!( - parse_conda_rc_contents(&cfg).unwrap().env_dirs, + parse_conda_rc_contents(cfg).unwrap().env_dirs, [ PathBuf::from("/opt/somep lace/envs"), expand_path(PathBuf::from("~/dev/envs2")) @@ -402,7 +402,7 @@ channels: channel_priority: strict "#; - assert!(parse_conda_rc_contents(&cfg).unwrap().env_dirs.is_empty(),); - assert!(parse_conda_rc_contents(&cfg).unwrap().files.is_empty(),); + assert!(parse_conda_rc_contents(cfg).unwrap().env_dirs.is_empty(),); + assert!(parse_conda_rc_contents(cfg).unwrap().files.is_empty(),); } } diff --git a/crates/pet-conda/src/environments.rs b/crates/pet-conda/src/environments.rs index 3d11cdda..bbffc607 100644 --- a/crates/pet-conda/src/environments.rs +++ b/crates/pet-conda/src/environments.rs @@ -295,8 +295,8 @@ fn is_conda_env_name_in_cmd(cmd_line: String, name: &str) -> bool { // # cmd: /Users/donjayamanne/miniconda3/bin/conda create -n conda1 // # cmd_line: "# cmd: /usr/bin/conda create -p ./prefix-envs/.conda1 python=3.12 -y" // Look for "-n " in the command line - cmd_line.contains(format!("-n {}", name).as_str()) - || cmd_line.contains(format!("--name {}", name).as_str()) + cmd_line.contains(format!("-n {name}").as_str()) + || cmd_line.contains(format!("--name {name}").as_str()) } pub fn get_activation_command( @@ -364,7 +364,7 @@ mod tests { #[test] #[cfg(unix)] fn verify_conda_env_name() { - let mut line = "# cmd: /Users/donjayamanne/.pyenv/versions/mambaforge-22.11.1-3/lib/python3.10/site-packages/conda/__main__.py create --yes --name .conda python=3.12"; + let line = "# cmd: /Users/donjayamanne/.pyenv/versions/mambaforge-22.11.1-3/lib/python3.10/site-packages/conda/__main__.py create --yes --name .conda python=3.12"; assert!(is_conda_env_name_in_cmd(line.to_string(), ".conda")); let mut line = "# cmd: /Users/donjayamanne/.pyenv/versions/mambaforge-22.11.1-3/lib/python3.10/site-packages/conda/__main__.py create --yes -n .conda python=3.12"; diff --git a/crates/pet-conda/tests/ci_test.rs b/crates/pet-conda/tests/ci_test.rs index 0bfd0a6a..03cb4d33 100644 --- a/crates/pet-conda/tests/ci_test.rs +++ b/crates/pet-conda/tests/ci_test.rs @@ -84,7 +84,7 @@ fn detect_conda_root_from_path() { let python_env = PythonEnv::new(exe, Some(conda_dir.clone()), None); let env = conda.try_from(&python_env).unwrap(); - assert_eq!(env.manager.is_some(), true); + assert!(env.manager.is_some()); let manager = env.manager.unwrap(); assert_eq!(manager.executable, conda_dir.join("bin").join("conda")); @@ -132,13 +132,7 @@ fn detect_new_conda_env() { let env = environments .iter() .find(|x| x.name == Some(env_name.into())) - .expect( - format!( - "New Environment not created, detected envs {:?}", - environments - ) - .as_str(), - ); + .unwrap_or_else(|| panic!("New Environment not created, detected envs {environments:?}")); let prefix = conda_dir.clone().join("envs").join(env_name); assert_eq!(env.prefix, prefix.clone().into()); @@ -182,7 +176,7 @@ fn detect_conda_env_from_path() { let python_env = PythonEnv::new(exe.clone(), Some(prefix.clone()), None); let env = conda.try_from(&python_env).unwrap(); - assert_eq!(env.manager.is_some(), true); + assert!(env.manager.is_some()); let manager = env.manager.unwrap(); assert_eq!(manager.executable, conda_dir.join("bin").join("conda")); @@ -231,20 +225,14 @@ fn detect_new_conda_env_without_python() { let env = environments .iter() .find(|x| x.name == Some(env_name.into())) - .expect( - format!( - "New Environment not created, detected envs {:?}", - environments - ) - .as_str(), - ); + .unwrap_or_else(|| panic!("New Environment not created, detected envs {environments:?}")); let prefix = conda_dir.clone().join("envs").join(env_name); assert_eq!(env.prefix, prefix.clone().into()); assert_eq!(env.name, Some(env_name.into())); assert_eq!(env.kind, Some(PythonEnvironmentKind::Conda)); - assert_eq!(env.executable.is_none(), true); - assert_eq!(env.version.is_none(), true); + assert!(env.executable.is_none()); + assert!(env.version.is_none()); assert_eq!(env.manager, Some(manager.clone())); } @@ -281,19 +269,15 @@ fn detect_new_conda_env_created_with_p_flag_without_python() { let env = environments .iter() .find(|x| x.prefix == Some(prefix.clone())) - .expect( - format!( - "New Environment ({:?}) not created, detected envs {:?}", - prefix, environments - ) - .as_str(), - ); + .unwrap_or_else(|| { + panic!("New Environment ({prefix:?}) not created, detected envs {environments:?}") + }); assert_eq!(env.prefix, prefix.clone().into()); assert_eq!(env.name, None); assert_eq!(env.kind, Some(PythonEnvironmentKind::Conda)); - assert_eq!(env.executable.is_none(), true); - assert_eq!(env.version.is_none(), true); + assert!(env.executable.is_none()); + assert!(env.version.is_none()); assert_eq!(env.manager, Some(manager.clone())); } @@ -334,13 +318,7 @@ fn detect_new_conda_env_created_with_p_flag_with_python() { let env = environments .iter() .find(|x| x.prefix == Some(prefix.clone())) - .expect( - format!( - "New Environment not created, detected envs {:?}", - environments - ) - .as_str(), - ); + .unwrap_or_else(|| panic!("New Environment not created, detected envs {environments:?}")); assert_eq!(env.prefix, prefix.clone().into()); assert_eq!(env.name, None); diff --git a/crates/pet-conda/tests/environment_locations_test.rs b/crates/pet-conda/tests/environment_locations_test.rs index c438d61d..0d3a61c0 100644 --- a/crates/pet-conda/tests/environment_locations_test.rs +++ b/crates/pet-conda/tests/environment_locations_test.rs @@ -9,8 +9,8 @@ fn non_existent_envrionments_txt() { use common::{create_env_variables, resolve_test_path}; use pet_conda::environment_locations::get_conda_envs_from_environment_txt; - let root = resolve_test_path(&["unix", "root_empty"]).into(); - let home = resolve_test_path(&["unix", "bogus directory"]).into(); + let root = resolve_test_path(&["unix", "root_empty"]); + let home = resolve_test_path(&["unix", "bogus directory"]); let env = create_env_variables(home, root); let environments = get_conda_envs_from_environment_txt(&env); diff --git a/crates/pet-conda/tests/lib_test.rs b/crates/pet-conda/tests/lib_test.rs index 7efa4ff1..ff62f5aa 100644 --- a/crates/pet-conda/tests/lib_test.rs +++ b/crates/pet-conda/tests/lib_test.rs @@ -20,8 +20,8 @@ fn find_conda_env_without_manager() { let env = locator .try_from(&PythonEnv::new( - path.join("bin").join("python").into(), - Some(path.clone().into()), + path.join("bin").join("python"), + Some(path.clone()), None, )) .unwrap(); @@ -71,8 +71,8 @@ fn find_conda_env_without_manager_but_detect_manager_from_history() { let env = locator .try_from(&PythonEnv::new( - path.join("bin").join("python").into(), - Some(path.clone().into()), + path.join("bin").join("python"), + Some(path.clone()), None, )) .unwrap(); diff --git a/crates/pet-conda/tests/manager_test.rs b/crates/pet-conda/tests/manager_test.rs index 1face145..f19481ef 100644 --- a/crates/pet-conda/tests/manager_test.rs +++ b/crates/pet-conda/tests/manager_test.rs @@ -48,5 +48,5 @@ fn does_not_find_conda_env_for_bogus_dirs() { let path = resolve_test_path(&["unix", "bogus_directory"]); - assert_eq!(CondaManager::from(&path).is_none(), true); + assert!(CondaManager::from(&path).is_none()); } diff --git a/crates/pet-conda/tests/package_test.rs b/crates/pet-conda/tests/package_test.rs index 828c07a9..0dd83cc5 100644 --- a/crates/pet-conda/tests/package_test.rs +++ b/crates/pet-conda/tests/package_test.rs @@ -10,7 +10,7 @@ use common::resolve_test_path; #[cfg(unix)] #[test] fn empty_result_for_bogus_paths() { - let path: PathBuf = resolve_test_path(&["unix", "bogus_path"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "bogus_path"]); let pkg = CondaPackageInfo::from(&path, &package::Package::Conda); assert!(pkg.is_none()); @@ -19,7 +19,7 @@ fn empty_result_for_bogus_paths() { #[cfg(unix)] #[test] fn get_conda_package_info() { - let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03"]); let pkg = CondaPackageInfo::from(&path, &package::Package::Conda).unwrap(); assert_eq!(pkg.package, package::Package::Conda); @@ -38,7 +38,7 @@ fn get_conda_package_info() { #[cfg(unix)] #[test] fn get_python_package_info() { - let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03"]); let pkg = CondaPackageInfo::from(&path, &package::Package::Python).unwrap(); assert_eq!(pkg.package, package::Package::Python); @@ -57,7 +57,7 @@ fn get_python_package_info() { #[cfg(unix)] #[test] fn get_conda_package_info_without_history() { - let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03-without-history"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03-without-history"]); let pkg = CondaPackageInfo::from(&path, &package::Package::Conda).unwrap(); assert_eq!(pkg.package, package::Package::Conda); @@ -76,7 +76,7 @@ fn get_conda_package_info_without_history() { #[cfg(unix)] #[test] fn get_python_package_info_without_history() { - let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03-without-history"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03-without-history"]); let pkg = CondaPackageInfo::from(&path, &package::Package::Python).unwrap(); assert_eq!(pkg.package, package::Package::Python); diff --git a/crates/pet-conda/tests/utils_test.rs b/crates/pet-conda/tests/utils_test.rs index 94192b97..db56247c 100644 --- a/crates/pet-conda/tests/utils_test.rs +++ b/crates/pet-conda/tests/utils_test.rs @@ -9,48 +9,46 @@ use std::path::PathBuf; #[cfg(unix)] #[test] fn is_conda_install() { - let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03"]); assert!(utils::is_conda_install(&path)); - let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03-without-history"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03-without-history"]); assert!(utils::is_conda_install(&path)); } #[cfg(unix)] #[test] fn is_not_conda_install() { - let path: PathBuf = resolve_test_path(&["unix", "some bogus directory"]).into(); - assert_eq!(utils::is_conda_install(&path), false); + let path: PathBuf = resolve_test_path(&["unix", "some bogus directory"]); + assert!(!utils::is_conda_install(&path)); // Conda env is not an install location. - let path: PathBuf = - resolve_test_path(&["unix", "anaconda3-2023.03", "envs", "env_python_3"]).into(); - assert_eq!(utils::is_conda_install(&path), false); + let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03", "envs", "env_python_3"]); + assert!(!utils::is_conda_install(&path)); } #[cfg(unix)] #[test] fn is_conda_env() { - let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03"]); assert!(utils::is_conda_env(&path)); - let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03-without-history"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03-without-history"]); assert!(utils::is_conda_env(&path)); - let path: PathBuf = - resolve_test_path(&["unix", "anaconda3-2023.03", "envs", "env_python_3"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03", "envs", "env_python_3"]); assert!(utils::is_conda_env(&path)); } #[cfg(unix)] #[test] fn is_not_conda_env() { - let path: PathBuf = resolve_test_path(&["unix", "some bogus directory"]).into(); - assert_eq!(utils::is_conda_env(&path), false); + let path: PathBuf = resolve_test_path(&["unix", "some bogus directory"]); + assert!(!utils::is_conda_env(&path)); - let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03"]); assert!(utils::is_conda_env(&path)); - let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03-without-history"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "anaconda3-2023.03-without-history"]); assert!(utils::is_conda_env(&path)); } diff --git a/crates/pet-core/src/python_environment.rs b/crates/pet-core/src/python_environment.rs index 9531a70a..2fe0ffa8 100644 --- a/crates/pet-core/src/python_environment.rs +++ b/crates/pet-core/src/python_environment.rs @@ -416,7 +416,6 @@ pub fn get_environment_key(env: &PythonEnvironment) -> Option { #[cfg(test)] mod tests { - use super::*; #[test] #[cfg(windows)] diff --git a/crates/pet-poetry/src/config.rs b/crates/pet-poetry/src/config.rs index 8b4f8632..07e518d2 100644 --- a/crates/pet-poetry/src/config.rs +++ b/crates/pet-poetry/src/config.rs @@ -241,13 +241,10 @@ create = false "#; - assert_eq!( - parse_contents(&cfg.to_string()) - .unwrap() - .virtualenvs_in_project - .unwrap_or_default(), - false - ); + assert!(!parse_contents(cfg) + .unwrap() + .virtualenvs_in_project + .unwrap_or_default()); let cfg = r#" [virtualenvs] @@ -255,47 +252,35 @@ in-project = true create = false "#; - assert_eq!( - parse_contents(&cfg.to_string()) - .unwrap() - .virtualenvs_in_project - .unwrap_or_default(), - true - ); + assert!(parse_contents(cfg) + .unwrap() + .virtualenvs_in_project + .unwrap_or_default()); let cfg = r#" [virtualenvs] create = false "#; - assert_eq!( - parse_contents(&cfg.to_string()) - .unwrap() - .virtualenvs_in_project - .unwrap_or_default(), - false - ); + assert!(!parse_contents(cfg) + .unwrap() + .virtualenvs_in_project + .unwrap_or_default()); let cfg = r#" virtualenvs.in-project = true # comment "#; - assert_eq!( - parse_contents(&cfg.to_string()) - .unwrap() - .virtualenvs_in_project - .unwrap_or_default(), - true - ); + assert!(parse_contents(cfg) + .unwrap() + .virtualenvs_in_project + .unwrap_or_default()); let cfg = r#" "#; - assert_eq!( - parse_contents(&cfg.to_string()) - .unwrap() - .virtualenvs_in_project - .unwrap_or_default(), - false - ); + assert!(!parse_contents(cfg) + .unwrap() + .virtualenvs_in_project + .unwrap_or_default()); } #[test] @@ -305,7 +290,7 @@ cache-dir = "/path/to/cache/directory" "#; assert_eq!( - parse_contents(&cfg.to_string()).unwrap().cache_dir, + parse_contents(cfg).unwrap().cache_dir, Some(PathBuf::from("/path/to/cache/directory".to_string())) ); @@ -313,7 +298,7 @@ cache-dir = "/path/to/cache/directory" some-other-value = 1234 "#; - assert_eq!(parse_contents(&cfg.to_string()).unwrap().cache_dir, None); + assert_eq!(parse_contents(cfg).unwrap().cache_dir, None); } #[test] @@ -323,7 +308,7 @@ virtualenvs.path = "/path/to/virtualenvs" "#; assert_eq!( - parse_contents(&cfg.to_string()).unwrap().virtualenvs_path, + parse_contents(cfg).unwrap().virtualenvs_path, Some(PathBuf::from("/path/to/virtualenvs".to_string())) ); @@ -331,10 +316,7 @@ virtualenvs.path = "/path/to/virtualenvs" some-other-value = 1234 "#; - assert_eq!( - parse_contents(&cfg.to_string()).unwrap().virtualenvs_path, - None - ); + assert_eq!(parse_contents(cfg).unwrap().virtualenvs_path, None); } #[test] @@ -343,7 +325,7 @@ some-other-value = 1234 cache-dir = "/path/to/cache/directory" "#; assert_eq!( - parse_contents(&cfg.to_string()).unwrap().virtualenvs_path, + parse_contents(cfg).unwrap().virtualenvs_path, Some(PathBuf::from("/path/to/cache/directory/virtualenvs")) ); } diff --git a/crates/pet-poetry/src/pyproject_toml.rs b/crates/pet-poetry/src/pyproject_toml.rs index de5dc69c..0bd785ad 100644 --- a/crates/pet-poetry/src/pyproject_toml.rs +++ b/crates/pet-poetry/src/pyproject_toml.rs @@ -90,7 +90,7 @@ requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" "#; assert_eq!( - parse_contents(&cfg.to_string(), Path::new("pyproject.toml")) + parse_contents(cfg, Path::new("pyproject.toml")) .unwrap() .name, "poetry-demo" diff --git a/crates/pet-poetry/tests/config_test.rs b/crates/pet-poetry/tests/config_test.rs index 19d100d2..92172b6c 100644 --- a/crates/pet-poetry/tests/config_test.rs +++ b/crates/pet-poetry/tests/config_test.rs @@ -69,14 +69,11 @@ fn global_config_with_specific_values() { "config.toml" ])) ); - assert_eq!( - config - .clone() - .unwrap() - .virtualenvs_in_project - .unwrap_or_default(), - true - ); + assert!(config + .clone() + .unwrap() + .virtualenvs_in_project + .unwrap_or_default()); assert_eq!( config.clone().unwrap().virtualenvs_path, PathBuf::from("some/path/virtualenvs".to_string()) @@ -117,14 +114,11 @@ fn local_config_with_specific_values() { "poetry.toml" ])) ); - assert_eq!( - config - .clone() - .unwrap() - .virtualenvs_in_project - .unwrap_or_default(), - false - ); + assert!(!config + .clone() + .unwrap() + .virtualenvs_in_project + .unwrap_or_default()); assert_eq!( config.clone().unwrap().virtualenvs_path, PathBuf::from("/directory/virtualenvs".to_string()) diff --git a/crates/pet-pyenv/tests/pyenv_test.rs b/crates/pet-pyenv/tests/pyenv_test.rs index ea46aee5..210d42e3 100644 --- a/crates/pet-pyenv/tests/pyenv_test.rs +++ b/crates/pet-pyenv/tests/pyenv_test.rs @@ -29,8 +29,8 @@ fn does_not_find_any_pyenv_envs() { let environments = reporter.environments.lock().unwrap().clone(); let managers = reporter.managers.lock().unwrap().clone(); - assert_eq!(managers.is_empty(), true); - assert_eq!(environments.is_empty(), true); + assert!(managers.is_empty()); + assert!(environments.is_empty()); } #[test] @@ -48,7 +48,7 @@ fn does_not_find_any_pyenv_envs_even_with_pyenv_installed() { use pet_pyenv::PyEnv; use pet_reporter::{cache::CacheReporter, collect}; use serde_json::json; - use std::{collections::HashMap, path::PathBuf, sync::Arc}; + use std::{collections::HashMap, sync::Arc}; let home = resolve_test_path(&["unix", "pyenv_without_envs", "user_home"]); let homebrew_bin = resolve_test_path(&[ @@ -60,12 +60,8 @@ fn does_not_find_any_pyenv_envs_even_with_pyenv_installed() { "bin", ]); let pyenv_exe = resolve_test_path(&[homebrew_bin.to_str().unwrap(), "pyenv"]); - let environment = create_test_environment( - HashMap::new(), - Some(home.clone()), - vec![PathBuf::from(homebrew_bin)], - None, - ); + let environment = + create_test_environment(HashMap::new(), Some(home.clone()), vec![homebrew_bin], None); let conda = Arc::new(Conda::from(&environment)); let locator = PyEnv::from(&environment, conda); @@ -101,7 +97,7 @@ fn find_pyenv_envs() { use pet_pyenv::PyEnv; use pet_reporter::{cache::CacheReporter, collect}; use serde_json::json; - use std::{collections::HashMap, path::PathBuf, sync::Arc}; + use std::{collections::HashMap, sync::Arc}; let home = resolve_test_path(&["unix", "pyenv", "user_home"]); let homebrew_bin = resolve_test_path(&["unix", "pyenv", "home", "opt", "homebrew", "bin"]); @@ -116,12 +112,8 @@ fn find_pyenv_envs() { ]); let conda_exe = conda_dir.join("bin").join("conda"); - let environment = create_test_environment( - HashMap::new(), - Some(home.clone()), - vec![PathBuf::from(homebrew_bin)], - None, - ); + let environment = + create_test_environment(HashMap::new(), Some(home.clone()), vec![homebrew_bin], None); let conda = Arc::new(Conda::from(&environment)); let locator = PyEnv::from(&environment, conda); @@ -479,7 +471,7 @@ fn resolve_pyenv_environment() { None, )); - assert_eq!(result.is_none(), true); + assert!(result.is_none()); // Should not resolve conda envs using Conda Locator let result = conda.try_from(&PythonEnv::new( @@ -494,6 +486,6 @@ fn resolve_pyenv_environment() { None, )); - assert_eq!(result.is_some(), true); + assert!(result.is_some()); assert_eq!(result.unwrap().kind, Some(PythonEnvironmentKind::Conda)); } diff --git a/crates/pet-python-utils/src/executable.rs b/crates/pet-python-utils/src/executable.rs index 9808584b..84b9286a 100644 --- a/crates/pet-python-utils/src/executable.rs +++ b/crates/pet-python-utils/src/executable.rs @@ -119,6 +119,46 @@ fn is_python_executable_name(exe: &Path) -> bool { } } +pub fn should_search_for_environments_in_path>(path: &P) -> bool { + // Never search in the .git folder + // Never search in the node_modules folder + // Mostly copied from https://github.com/github/gitignore/blob/main/Python.gitignore + let folders_to_ignore = [ + "node_modules", + ".cargo", + ".devcontainer", + ".github", + ".git", + ".tox", + ".nox", + ".hypothesis", + ".ipynb_checkpoints", + ".eggs", + ".coverage", + ".cache", + ".pyre", + ".ptype", + ".pytest_cache", + ".vscode", + "__pycache__", + "__pypackages__", + ".mypy_cache", + "cython_debug", + "env.bak", + "venv.bak", + "Scripts", // If the folder ends bin/scripts, then ignore it, as the parent is most likely an env. + "bin", // If the folder ends bin/scripts, then ignore it, as the parent is most likely an env. + ]; + for folder in folders_to_ignore.iter() { + if path.as_ref().ends_with(folder) { + trace!("Ignoring folder: {:?}", path.as_ref()); + return false; + } + } + + true +} + #[cfg(test)] mod tests { use super::*; @@ -186,43 +226,3 @@ mod tests { )); } } - -pub fn should_search_for_environments_in_path>(path: &P) -> bool { - // Never search in the .git folder - // Never search in the node_modules folder - // Mostly copied from https://github.com/github/gitignore/blob/main/Python.gitignore - let folders_to_ignore = [ - "node_modules", - ".cargo", - ".devcontainer", - ".github", - ".git", - ".tox", - ".nox", - ".hypothesis", - ".ipynb_checkpoints", - ".eggs", - ".coverage", - ".cache", - ".pyre", - ".ptype", - ".pytest_cache", - ".vscode", - "__pycache__", - "__pypackages__", - ".mypy_cache", - "cython_debug", - "env.bak", - "venv.bak", - "Scripts", // If the folder ends bin/scripts, then ignore it, as the parent is most likely an env. - "bin", // If the folder ends bin/scripts, then ignore it, as the parent is most likely an env. - ]; - for folder in folders_to_ignore.iter() { - if path.as_ref().ends_with(folder) { - trace!("Ignoring folder: {:?}", path.as_ref()); - return false; - } - } - - true -} diff --git a/crates/pet-python-utils/tests/executable_test.rs b/crates/pet-python-utils/tests/executable_test.rs index bfbcd76c..a578142c 100644 --- a/crates/pet-python-utils/tests/executable_test.rs +++ b/crates/pet-python-utils/tests/executable_test.rs @@ -11,8 +11,8 @@ use common::resolve_test_path; #[test] fn find_executables() { // .venv - let path: PathBuf = resolve_test_path(&["unix", "executables", ".venv"]).into(); - let mut executables = executable::find_executables(&path.clone()); + let path: PathBuf = resolve_test_path(&["unix", "executables", ".venv"]); + let mut executables = executable::find_executables(path.clone()); executables.sort(); assert_eq!( @@ -24,8 +24,8 @@ fn find_executables() { ); // Python3.9.9 - let path: PathBuf = resolve_test_path(&["unix", "executables", "python3.9.9"]).into(); - let mut executables = executable::find_executables(&path.clone()); + let path: PathBuf = resolve_test_path(&["unix", "executables", "python3.9.9"]); + let mut executables = executable::find_executables(path.clone()); executables.sort(); assert_eq!( @@ -37,14 +37,14 @@ fn find_executables() { ); // Conda without Python. - let path: PathBuf = resolve_test_path(&["unix", "executables", "conda_without_python"]).into(); - let executables = executable::find_executables(&path.clone()); + let path: PathBuf = resolve_test_path(&["unix", "executables", "conda_without_python"]); + let executables = executable::find_executables(path.clone()); assert_eq!(executables.len(), 0); // Bogus dir - let path: PathBuf = resolve_test_path(&["unix_bogus_dir"]).into(); - let executables = executable::find_executables(&path.clone()); + let path: PathBuf = resolve_test_path(&["unix_bogus_dir"]); + let executables = executable::find_executables(path.clone()); assert_eq!(executables.len(), 0); } diff --git a/crates/pet-python-utils/tests/sys_prefix_test.rs b/crates/pet-python-utils/tests/sys_prefix_test.rs index 379c2cf4..ba5c5f71 100644 --- a/crates/pet-python-utils/tests/sys_prefix_test.rs +++ b/crates/pet-python-utils/tests/sys_prefix_test.rs @@ -10,11 +10,11 @@ use common::resolve_test_path; #[cfg(unix)] #[test] fn version_from_sys_prefix() { - let path: PathBuf = resolve_test_path(&["unix", "pyvenv_cfg", ".venv"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "pyvenv_cfg", ".venv"]); let version = version::from_prefix(&path).unwrap(); assert_eq!(version, "3.12.1"); - let path: PathBuf = resolve_test_path(&["unix", "pyvenv_cfg", ".venv", "bin"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "pyvenv_cfg", ".venv", "bin"]); let version = version::from_prefix(&path).unwrap(); assert_eq!(version, "3.12.1"); } @@ -22,11 +22,11 @@ fn version_from_sys_prefix() { #[cfg(unix)] #[test] fn version_from_sys_prefix_using_version_info_format() { - let path: PathBuf = resolve_test_path(&["unix", "pyvenv_cfg", "hatch_env"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "pyvenv_cfg", "hatch_env"]); let version = version::from_prefix(&path).unwrap(); assert_eq!(version, "3.9.6.final.0"); - let path: PathBuf = resolve_test_path(&["unix", "pyvenv_cfg", "hatch_env", "bin"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "pyvenv_cfg", "hatch_env", "bin"]); let version = version::from_prefix(&path).unwrap(); assert_eq!(version, "3.9.6.final.0"); } @@ -34,13 +34,12 @@ fn version_from_sys_prefix_using_version_info_format() { #[cfg(unix)] #[test] fn no_version_without_pyvenv_cfg_and_without_headers() { - let path: PathBuf = - resolve_test_path(&["unix", "pyvenv_cfg", "python3.9.9_without_headers"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "pyvenv_cfg", "python3.9.9_without_headers"]); let version = version::from_prefix(&path); assert!(version.is_none()); let path: PathBuf = - resolve_test_path(&["unix", "pyvenv_cfg", "python3.9.9_without_headers", "bin"]).into(); + resolve_test_path(&["unix", "pyvenv_cfg", "python3.9.9_without_headers", "bin"]); let version = version::from_prefix(&path); assert!(version.is_none()); @@ -50,8 +49,7 @@ fn no_version_without_pyvenv_cfg_and_without_headers() { "python3.9.9_without_headers", "bin", "python", - ]) - .into(); + ]); let version = version::from_prefix(&path); assert!(version.is_none()); } @@ -59,7 +57,7 @@ fn no_version_without_pyvenv_cfg_and_without_headers() { #[cfg(unix)] #[test] fn no_version_for_invalid_paths() { - let path: PathBuf = resolve_test_path(&["unix_1234"]).into(); + let path: PathBuf = resolve_test_path(&["unix_1234"]); let version = version::from_prefix(&path); assert!(version.is_none()); } @@ -67,19 +65,19 @@ fn no_version_for_invalid_paths() { #[cfg(unix)] #[test] fn version_from_header_files() { - let path: PathBuf = resolve_test_path(&["unix", "headers", "python3.9.9"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "headers", "python3.9.9"]); let version = version::from_prefix(&path).unwrap(); assert_eq!(version, "3.9.9"); - let path: PathBuf = resolve_test_path(&["unix", "headers", "python3.9.9", "bin"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "headers", "python3.9.9", "bin"]); let version = version::from_prefix(&path).unwrap(); assert_eq!(version, "3.9.9"); - let path: PathBuf = resolve_test_path(&["unix", "headers", "python3.10-dev", "bin"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "headers", "python3.10-dev", "bin"]); let version = version::from_prefix(&path).unwrap(); assert_eq!(version, "3.10.14+"); - let path: PathBuf = resolve_test_path(&["unix", "headers", "python3.13", "bin"]).into(); + let path: PathBuf = resolve_test_path(&["unix", "headers", "python3.13", "bin"]); let version = version::from_prefix(&path).unwrap(); assert_eq!(version, "3.13.0a5"); } diff --git a/crates/pet/src/jsonrpc.rs b/crates/pet/src/jsonrpc.rs index 8a805ace..eaf87446 100644 --- a/crates/pet/src/jsonrpc.rs +++ b/crates/pet/src/jsonrpc.rs @@ -245,7 +245,7 @@ pub fn handle_refresh(context: Arc, id: u32, params: Value) { .locators .clone() .iter() - .map(|(k, v)| (format!("{:?}", k), v.as_millis())) + .map(|(k, v)| (format!("{k:?}"), v.as_millis())) .collect::>(), breakdown: summary .breakdown @@ -450,7 +450,7 @@ pub fn handle_clear_cache(_context: Arc, id: u32, _params: Value) { thread::spawn(move || { if let Err(e) = clear_cache() { error!("Failed to clear cache {:?}", e); - send_error(Some(id), -4, format!("Failed to clear cache {:?}", e)); + send_error(Some(id), -4, format!("Failed to clear cache {e:?}")); } else { info!("Cleared cache"); send_reply(id, None::<()>); diff --git a/crates/pet/src/lib.rs b/crates/pet/src/lib.rs index 735d36ab..68f9aed4 100644 --- a/crates/pet/src/lib.rs +++ b/crates/pet/src/lib.rs @@ -182,8 +182,7 @@ fn find_envs( .into_iter() .map(|(k, v)| { ( - k.map(|v| format!("{:?}", v)) - .unwrap_or("Unknown".to_string()), + k.map(|v| format!("{v:?}")).unwrap_or("Unknown".to_string()), v, ) }) @@ -226,14 +225,14 @@ pub fn resolve_report_stdio(executable: PathBuf, verbose: bool, cache_directory: if let Some(result) = resolve_environment(&executable, &locators, &environment) { // - println!("Environment found for {:?}", executable); + println!("Environment found for {executable:?}"); let env = &result.resolved.unwrap_or(result.discovered); if let Some(manager) = &env.manager { reporter.report_manager(manager); } reporter.report_environment(env); } else { - println!("No environment found for {:?}", executable); + println!("No environment found for {executable:?}"); } println!( diff --git a/crates/pet/tests/ci_homebrew_container.rs b/crates/pet/tests/ci_homebrew_container.rs index 9b0e67ea..3777d453 100644 --- a/crates/pet/tests/ci_homebrew_container.rs +++ b/crates/pet/tests/ci_homebrew_container.rs @@ -107,7 +107,7 @@ fn verify_python_in_homebrew_contaner() { let python_env = environments .iter() .find(|e| e.executable == env.executable) - .expect(format!("Expected to find python environment {:?}", env.executable).as_str()); + .unwrap_or_else(|| panic!("Expected to find python environment {:?}", env.executable)); assert_eq!(python_env.executable, env.executable); assert_eq!(python_env.kind, env.kind); assert_eq!(python_env.manager, env.manager); diff --git a/crates/pet/tests/ci_jupyter_container.rs b/crates/pet/tests/ci_jupyter_container.rs index 4ecdbb18..0e61efd2 100644 --- a/crates/pet/tests/ci_jupyter_container.rs +++ b/crates/pet/tests/ci_jupyter_container.rs @@ -137,7 +137,7 @@ fn verify_python_in_jupyter_contaner() { let python_env = environments .iter() .find(|e| e.executable == env.executable) - .expect(format!("Expected to find python environment {:?}", env.executable).as_str()); + .unwrap_or_else(|| panic!("Expected to find python environment {:?}", env.executable)); assert_eq!( python_env.executable, env.executable, "Expected exe to be same when comparing {python_env:?} and {env:?}" diff --git a/crates/pet/tests/ci_test.rs b/crates/pet/tests/ci_test.rs index b4d432aa..7c315efa 100644 --- a/crates/pet/tests/ci_test.rs +++ b/crates/pet/tests/ci_test.rs @@ -66,7 +66,7 @@ fn verify_validity_of_discovered_envs() { use pet::{find::find_and_report_envs, locators::create_locators}; use pet_conda::Conda; use pet_core::{os_environment::EnvironmentApi, Configuration}; - use std::{env, sync::Arc, thread}; + use std::{env, sync::Arc}; setup(); @@ -205,7 +205,7 @@ fn check_if_pipenv_exists() { env.kind == Some(PythonEnvironmentKind::Pipenv) && env.project == Some(workspace_dir.clone()) }) - .expect(format!("Pipenv environment not found, found {environments:?}").as_str()); + .unwrap_or_else(|| panic!("Pipenv environment not found, found {environments:?}")); } #[cfg(unix)] @@ -268,7 +268,7 @@ fn verify_validity_of_interpreter_info(environment: PythonEnvironment) { .symlinks .clone() .unwrap_or_default() - .contains(&PathBuf::from(expected_executable)), + .contains(&expected_executable), "Executable mismatch for {:?}", environment.clone() ); @@ -367,10 +367,9 @@ fn verify_we_can_get_same_env_info_using_from_with_exe( let env = PythonEnv::new(executable.clone(), None, None); let resolved = identify_python_environment_using_locators(&env, &locators, &global_env_search_paths) - .expect( - format!("Failed to resolve environment using `resolve` for {environment:?}") - .as_str(), - ); + .unwrap_or_else(|| { + panic!("Failed to resolve environment using `resolve` for {environment:?}") + }); trace!( "For exe {:?} we got Environment = {:?}, To compare against {:?}", executable, @@ -428,10 +427,7 @@ fn verify_we_can_get_same_env_info_using_find_with_exe( let envs = collect_reporter.environments.lock().unwrap().clone(); if envs.is_empty() { - panic!( - "Failed to find Python environment {:?}, details => {:?}", - executable, environment - ); + panic!("Failed to find Python environment {executable:?}, details => {environment:?}"); } trace!( "For exe {:?} we got Environment = {:?}, To compare against {:?}", @@ -519,13 +515,8 @@ fn compare_environments(actual: PythonEnvironment, expected: PythonEnvironment, .iter() .filter(|p| { // This is in the path, but not easy to figure out, unless we add support for codespaces or CI. - if p.starts_with("/Users/runner/hostedtoolcache/Python") - && p.to_string_lossy().contains("arm64") - { - false - } else { - true - } + !(p.starts_with("/Users/runner/hostedtoolcache/Python") + && p.to_string_lossy().contains("arm64")) }) .map(|p| p.to_path_buf()) .collect::>(), @@ -538,37 +529,34 @@ fn compare_environments(actual: PythonEnvironment, expected: PythonEnvironment, .iter() .filter(|p| { // This is in the path, but not easy to figure out, unless we add support for codespaces or CI. - if p.starts_with("/Users/runner/hostedtoolcache/Python") - && p.to_string_lossy().contains("arm64") - { - false - } else { - true - } + !(p.starts_with("/Users/runner/hostedtoolcache/Python") + && p.to_string_lossy().contains("arm64")) }) .map(|p| p.to_path_buf()) .collect::>(), ); // if we know the arch, then verify it - if expected.arch.as_ref().is_some() && actual.arch.as_ref().is_some() { - if actual.arch.as_ref() != expected.arch.as_ref() { - error!( - "Arch mismatch when using {} for {:?} and {:?}", - method, expected, actual - ); - } + if expected.arch.as_ref().is_some() + && actual.arch.as_ref().is_some() + && actual.arch.as_ref() != expected.arch.as_ref() + { + error!( + "Arch mismatch when using {} for {:?} and {:?}", + method, expected, actual + ); } actual.arch = expected.clone().arch; // if we know the prefix, then verify it - if expected.prefix.as_ref().is_some() && actual.prefix.as_ref().is_some() { - if actual.prefix.as_ref() != expected.prefix.as_ref() { - error!( - "Prefirx mismatch when using {} for {:?} and {:?}", - method, expected, actual - ); - } + if expected.prefix.as_ref().is_some() + && actual.prefix.as_ref().is_some() + && actual.prefix.as_ref() != expected.prefix.as_ref() + { + error!( + "Prefirx mismatch when using {} for {:?} and {:?}", + method, expected, actual + ); } actual.prefix = expected.clone().prefix; @@ -614,9 +602,9 @@ fn verify_we_can_get_same_env_info_using_resolve_with_exe( locator.configure(&config); } - let env = resolve_environment(&executable, &locators, &os_environment).expect( - format!("Failed to resolve environment using `resolve` for {environment:?}").as_str(), - ); + let env = resolve_environment(executable, &locators, &os_environment).unwrap_or_else(|| { + panic!("Failed to resolve environment using `resolve` for {environment:?}") + }); trace!( "For exe {:?} we got Environment = {:?}, To compare against {:?}", executable, @@ -724,13 +712,13 @@ fn get_python_run_command(env: &PythonEnvironment) -> Vec { None => get_conda_exe().to_string(), }; if let Some(name) = env.name.clone() { - return vec![ + vec![ conda_exe, "run".to_string(), "-n".to_string(), name, "python".to_string(), - ]; + ] } else if let Some(prefix) = env.prefix.clone() { return vec![ conda_exe, @@ -765,13 +753,13 @@ fn get_python_interpreter_info(cli: &Vec) -> InterpreterInfo { let output = std::process::Command::new(cli.first().unwrap()) .args(&cli[1..]) .output() - .expect(format!("Failed to execute command {cli:?}").as_str()); + .unwrap_or_else(|_| panic!("Failed to execute command {cli:?}")); let output = String::from_utf8(output.stdout).unwrap(); trace!("Get Interpreter Info: {:?} => {:?}", cli, output); let output = output .split_once("503bebe7-c838-4cea-a1bc-0f2963bcb657") .unwrap() .1; - let info: InterpreterInfo = serde_json::from_str(&output).unwrap(); + let info: InterpreterInfo = serde_json::from_str(output).unwrap(); info } From a39a918a1f2b59b594dd3eab4542991df2f8e35a Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Wed, 16 Jul 2025 00:12:50 +0000 Subject: [PATCH 6/7] Initial plan From f0c94075fb6147851b1a8fcf1390e2f0415bd2f3 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Wed, 16 Jul 2025 00:26:04 +0000 Subject: [PATCH 7/7] Implement UV virtual environment support Co-authored-by: DonJayamanne <1948812+DonJayamanne@users.noreply.github.com> --- Cargo.lock | 13 +- crates/pet-uv/Cargo.toml | 11 ++ crates/pet-uv/src/lib.rs | 155 ++++++++++++++++++++ crates/pet/Cargo.toml | 1 + crates/pet/src/find.rs | 6 + crates/pet/src/lib.rs | 3 + crates/pet/src/tests.rs | 4 + crates/pet/src/tests/test_uv_integration.rs | 34 +++++ 8 files changed, 226 insertions(+), 1 deletion(-) create mode 100644 crates/pet-uv/Cargo.toml create mode 100644 crates/pet-uv/src/lib.rs create mode 100644 crates/pet/src/tests.rs create mode 100644 crates/pet/src/tests/test_uv_integration.rs diff --git a/Cargo.lock b/Cargo.lock index 3e0837f9..656f6d99 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "ahash" @@ -360,6 +360,7 @@ dependencies = [ "pet-python-utils", "pet-reporter", "pet-telemetry", + "pet-uv", "pet-venv", "pet-virtualenv", "pet-virtualenvwrapper", @@ -617,6 +618,16 @@ dependencies = [ "regex", ] +[[package]] +name = "pet-uv" +version = "0.1.0" +dependencies = [ + "log", + "pet-conda", + "pet-core", + "pet-fs", +] + [[package]] name = "pet-venv" version = "0.1.0" diff --git a/crates/pet-uv/Cargo.toml b/crates/pet-uv/Cargo.toml new file mode 100644 index 00000000..4f3205b1 --- /dev/null +++ b/crates/pet-uv/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "pet-uv" +version = "0.1.0" +edition = "2021" +license = "MIT" + +[dependencies] +pet-core = { path = "../pet-core" } +pet-fs = { path = "../pet-fs" } +pet-conda = { path = "../pet-conda" } +log = "0.4.21" \ No newline at end of file diff --git a/crates/pet-uv/src/lib.rs b/crates/pet-uv/src/lib.rs new file mode 100644 index 00000000..44946b21 --- /dev/null +++ b/crates/pet-uv/src/lib.rs @@ -0,0 +1,155 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +use pet_conda::utils::is_conda_env; +use pet_fs::path::{expand_path, norm_case}; +use std::{fs, path::PathBuf}; + +/// Get the UV cache directory. +/// UV uses the following priority order: +/// 1. UV_CACHE_DIR environment variable +/// 2. XDG cache directories on Unix / %LOCALAPPDATA% on Windows +/// 3. Platform-specific cache directories +fn get_uv_cache_dir( + uv_cache_dir_env_var: Option, + xdg_cache_home: Option, + user_home: Option, +) -> Option { + // 1. Check UV_CACHE_DIR environment variable + if let Some(cache_dir) = uv_cache_dir_env_var { + let cache_dir = norm_case(expand_path(PathBuf::from(cache_dir))); + if cache_dir.exists() { + return Some(cache_dir); + } + } + + // 2. Check XDG_CACHE_HOME on Unix + if let Some(xdg_cache) = xdg_cache_home.map(|d| PathBuf::from(d).join("uv")) { + if xdg_cache.exists() { + return Some(xdg_cache); + } + } + + // 3. Platform-specific cache directories + if let Some(home) = user_home { + let cache_dirs = if cfg!(target_os = "windows") { + // On Windows: %LOCALAPPDATA%\uv + vec![home.join("AppData").join("Local").join("uv")] + } else if cfg!(target_os = "macos") { + // On macOS: ~/Library/Caches/uv + vec![home.join("Library").join("Caches").join("uv")] + } else { + // On other Unix systems: ~/.cache/uv + vec![home.join(".cache").join("uv")] + }; + + for cache_dir in cache_dirs { + if cache_dir.exists() { + return Some(cache_dir); + } + } + } + + None +} + +/// Get UV environment cache directories. +/// UV stores virtual environments in {cache_dir}/environments-v2/ +fn get_uv_environment_dirs( + uv_cache_dir_env_var: Option, + xdg_cache_home: Option, + user_home: Option, +) -> Vec { + let mut env_dirs = Vec::new(); + + if let Some(cache_dir) = get_uv_cache_dir(uv_cache_dir_env_var, xdg_cache_home, user_home) { + let environments_dir = cache_dir.join("environments-v2"); + if environments_dir.exists() { + env_dirs.push(environments_dir); + } + } + + env_dirs +} + +/// List UV virtual environment paths. +/// This function discovers UV cache directories and enumerates the virtual environments within them. +/// It filters out conda environments to avoid conflicts. +pub fn list_uv_virtual_envs_paths( + uv_cache_dir_env_var: Option, + xdg_cache_home: Option, + user_home: Option, +) -> Vec { + let mut python_envs: Vec = vec![]; + + for env_cache_dir in get_uv_environment_dirs(uv_cache_dir_env_var, xdg_cache_home, user_home) { + if let Ok(dirs) = fs::read_dir(&env_cache_dir) { + python_envs.append( + &mut dirs + .filter_map(Result::ok) + .map(|e| e.path()) + .filter(|p| p.is_dir() && !is_conda_env(p)) + .collect(), + ); + } + } + + python_envs.sort(); + python_envs.dedup(); + + python_envs +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + + #[test] + fn test_uv_cache_dir_from_env_var() { + let temp_dir = std::env::temp_dir().join("test_uv_cache"); + fs::create_dir_all(&temp_dir).unwrap(); + + let cache_dir = get_uv_cache_dir( + Some(temp_dir.to_string_lossy().to_string()), + None, + None, + ); + + assert_eq!(cache_dir, Some(temp_dir.clone())); + fs::remove_dir_all(&temp_dir).ok(); + } + + #[test] + fn test_uv_environment_dirs() { + let temp_dir = std::env::temp_dir().join("test_uv_env"); + let env_dir = temp_dir.join("environments-v2"); + fs::create_dir_all(&env_dir).unwrap(); + + let env_dirs = get_uv_environment_dirs( + Some(temp_dir.to_string_lossy().to_string()), + None, + None, + ); + + assert_eq!(env_dirs, vec![env_dir.clone()]); + fs::remove_dir_all(&temp_dir).ok(); + } + + #[test] + fn test_list_uv_virtual_envs_paths() { + let temp_dir = std::env::temp_dir().join("test_uv_list"); + let env_dir = temp_dir.join("environments-v2"); + let test_env = env_dir.join("test-venv"); + fs::create_dir_all(&test_env).unwrap(); + + let envs = list_uv_virtual_envs_paths( + Some(temp_dir.to_string_lossy().to_string()), + None, + None, + ); + + assert!(envs.contains(&test_env)); + fs::remove_dir_all(&temp_dir).ok(); + } +} \ No newline at end of file diff --git a/crates/pet/Cargo.toml b/crates/pet/Cargo.toml index ee2efdf7..99f7b23c 100644 --- a/crates/pet/Cargo.toml +++ b/crates/pet/Cargo.toml @@ -35,6 +35,7 @@ pet-virtualenv = { path = "../pet-virtualenv" } pet-pipenv = { path = "../pet-pipenv" } pet-telemetry = { path = "../pet-telemetry" } pet-global-virtualenvs = { path = "../pet-global-virtualenvs" } +pet-uv = { path = "../pet-uv" } log = "0.4.21" clap = { version = "4.5.4", features = ["derive", "cargo"] } serde = { version = "1.0.152", features = ["derive"] } diff --git a/crates/pet/src/find.rs b/crates/pet/src/find.rs index 96929684..018b9994 100644 --- a/crates/pet/src/find.rs +++ b/crates/pet/src/find.rs @@ -11,6 +11,7 @@ use pet_core::{Configuration, Locator, LocatorKind}; use pet_env_var_path::get_search_paths_from_env_variables; use pet_global_virtualenvs::list_global_virtual_envs_paths; use pet_pixi::is_pixi_env; +use pet_uv::list_uv_virtual_envs_paths; use pet_python_utils::executable::{ find_executable, find_executables, should_search_for_environments_in_path, }; @@ -165,6 +166,11 @@ pub fn find_and_report_envs( environment.get_env_var("XDG_DATA_HOME".into()), environment.get_user_home(), ), + list_uv_virtual_envs_paths( + environment.get_env_var("UV_CACHE_DIR".into()), + environment.get_env_var("XDG_CACHE_HOME".into()), + environment.get_user_home(), + ), possible_environments, ] .concat(); diff --git a/crates/pet/src/lib.rs b/crates/pet/src/lib.rs index 68f9aed4..f8f59d52 100644 --- a/crates/pet/src/lib.rs +++ b/crates/pet/src/lib.rs @@ -22,6 +22,9 @@ pub mod find; pub mod locators; pub mod resolve; +#[cfg(test)] +mod tests; + #[derive(Debug, Clone)] pub struct FindOptions { pub print_list: bool, diff --git a/crates/pet/src/tests.rs b/crates/pet/src/tests.rs new file mode 100644 index 00000000..2b0bd82f --- /dev/null +++ b/crates/pet/src/tests.rs @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +mod test_uv_integration; \ No newline at end of file diff --git a/crates/pet/src/tests/test_uv_integration.rs b/crates/pet/src/tests/test_uv_integration.rs new file mode 100644 index 00000000..f4a68eef --- /dev/null +++ b/crates/pet/src/tests/test_uv_integration.rs @@ -0,0 +1,34 @@ +use std::fs; +use pet_uv::list_uv_virtual_envs_paths; + +#[test] +fn test_uv_environment_discovery() { + // Set up a temporary UV cache structure + let temp_dir = std::env::temp_dir().join("test_pet_uv_integration"); + let cache_dir = temp_dir.join("uv"); + let env_dir = cache_dir.join("environments-v2"); + let test_env = env_dir.join("my-project-abc123-py3.12"); + let bin_dir = test_env.join("bin"); + + // Create the directory structure + fs::create_dir_all(&bin_dir).unwrap(); + + // Create python executable and activate script to make it look like a virtual environment + let python_exe = bin_dir.join("python"); + fs::write(&python_exe, "#!/bin/bash\necho 'python'").unwrap(); + let activate_script = bin_dir.join("activate"); + fs::write(&activate_script, "# Activate script").unwrap(); + + // Test UV path discovery + let uv_paths = list_uv_virtual_envs_paths( + Some(cache_dir.to_string_lossy().to_string()), + None, + None, + ); + + // Verify that our test environment is discovered + assert!(uv_paths.contains(&test_env), "UV environment should be discovered: {:?}", uv_paths); + + // Clean up + fs::remove_dir_all(&temp_dir).ok(); +} \ No newline at end of file