mirror of
https://github.com/tw93/Mole.git
synced 2026-02-15 16:20:08 +00:00
Merge branch 'dev' into dev
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -47,6 +47,7 @@ tests/tmp-*
|
|||||||
.kiro/
|
.kiro/
|
||||||
CLAUDE.md
|
CLAUDE.md
|
||||||
GEMINI.md
|
GEMINI.md
|
||||||
|
ANTIGRAVITY.md
|
||||||
.cursorrules
|
.cursorrules
|
||||||
|
|
||||||
# Go build artifacts (development)
|
# Go build artifacts (development)
|
||||||
|
|||||||
41
AGENTS.md
41
AGENTS.md
@@ -35,12 +35,14 @@ Before any operation:
|
|||||||
- Validate syntax before suggesting changes: `bash -n <file>`
|
- Validate syntax before suggesting changes: `bash -n <file>`
|
||||||
- Use `gh` CLI for all GitHub operations (issues, PRs, releases, etc.)
|
- Use `gh` CLI for all GitHub operations (issues, PRs, releases, etc.)
|
||||||
- Never commit code unless explicitly requested by user
|
- Never commit code unless explicitly requested by user
|
||||||
|
- Review and update `SECURITY_AUDIT.md` when modifying `clean` or `optimize` logic
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Quick Reference
|
## Quick Reference
|
||||||
|
|
||||||
### Build Commands
|
### Build Commands
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Build Go binaries for current platform
|
# Build Go binaries for current platform
|
||||||
make build
|
make build
|
||||||
@@ -54,6 +56,7 @@ make clean
|
|||||||
```
|
```
|
||||||
|
|
||||||
### Test Commands
|
### Test Commands
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Run full test suite (recommended before commits)
|
# Run full test suite (recommended before commits)
|
||||||
./scripts/test.sh
|
./scripts/test.sh
|
||||||
@@ -79,6 +82,7 @@ shellcheck --rcfile .shellcheckrc lib/**/*.sh bin/**/*.sh
|
|||||||
```
|
```
|
||||||
|
|
||||||
### Development Commands
|
### Development Commands
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Test cleanup in dry-run mode
|
# Test cleanup in dry-run mode
|
||||||
MO_DRY_RUN=1 ./mole clean
|
MO_DRY_RUN=1 ./mole clean
|
||||||
@@ -108,11 +112,17 @@ mole/ # Main CLI entrypoint (menu + routing)
|
|||||||
│ ├── purge.sh # Aggressive cleanup mode
|
│ ├── purge.sh # Aggressive cleanup mode
|
||||||
│ ├── touchid.sh # Touch ID sudo enabler
|
│ ├── touchid.sh # Touch ID sudo enabler
|
||||||
│ ├── analyze.sh # Disk usage explorer wrapper
|
│ ├── analyze.sh # Disk usage explorer wrapper
|
||||||
│ └── status.sh # System health dashboard wrapper
|
│ ├── status.sh # System health dashboard wrapper
|
||||||
|
│ ├── installer.sh # Core installation logic
|
||||||
|
│ └── completion.sh # Shell completion support
|
||||||
├── lib/ # Reusable shell logic
|
├── lib/ # Reusable shell logic
|
||||||
│ ├── core/ # base.sh, log.sh, sudo.sh, ui.sh
|
│ ├── core/ # base.sh, log.sh, sudo.sh, ui.sh
|
||||||
│ ├── clean/ # Cleanup modules (user, apps, dev, caches, system)
|
│ ├── clean/ # Cleanup modules (user, apps, brew, system...)
|
||||||
│ └── ui/ # Confirmation dialogs, progress bars
|
│ ├── optimize/ # Optimization modules
|
||||||
|
│ ├── check/ # Health check modules
|
||||||
|
│ ├── manage/ # Management utilities
|
||||||
|
│ ├── ui/ # UI components (balloons, spinners)
|
||||||
|
│ └── uninstall/ # Uninstallation logic
|
||||||
├── cmd/ # Go applications
|
├── cmd/ # Go applications
|
||||||
│ ├── analyze/ # Disk analysis tool
|
│ ├── analyze/ # Disk analysis tool
|
||||||
│ └── status/ # Real-time monitoring
|
│ └── status/ # Real-time monitoring
|
||||||
@@ -130,6 +140,7 @@ mole/ # Main CLI entrypoint (menu + routing)
|
|||||||
- Tests → `tests/<test>.bats`
|
- Tests → `tests/<test>.bats`
|
||||||
|
|
||||||
### Language Stack
|
### Language Stack
|
||||||
|
|
||||||
- **Shell (Bash 3.2)**: Core cleanup and system operations (`lib/`, `bin/`)
|
- **Shell (Bash 3.2)**: Core cleanup and system operations (`lib/`, `bin/`)
|
||||||
- **Go**: Performance-critical tools (`cmd/analyze/`, `cmd/status/`)
|
- **Go**: Performance-critical tools (`cmd/analyze/`, `cmd/status/`)
|
||||||
- **BATS**: Integration testing (`tests/`)
|
- **BATS**: Integration testing (`tests/`)
|
||||||
@@ -139,6 +150,7 @@ mole/ # Main CLI entrypoint (menu + routing)
|
|||||||
## Code Style Guidelines
|
## Code Style Guidelines
|
||||||
|
|
||||||
### Shell Scripts
|
### Shell Scripts
|
||||||
|
|
||||||
- **Indentation**: 4 spaces (configured in .editorconfig)
|
- **Indentation**: 4 spaces (configured in .editorconfig)
|
||||||
- **Variables**: `lowercase_with_underscores`
|
- **Variables**: `lowercase_with_underscores`
|
||||||
- **Functions**: `verb_noun` format (e.g., `clean_caches`, `get_size`)
|
- **Functions**: `verb_noun` format (e.g., `clean_caches`, `get_size`)
|
||||||
@@ -149,12 +161,14 @@ mole/ # Main CLI entrypoint (menu + routing)
|
|||||||
- **Error handling**: Use `set -euo pipefail` at top of files
|
- **Error handling**: Use `set -euo pipefail` at top of files
|
||||||
|
|
||||||
### Go Code
|
### Go Code
|
||||||
|
|
||||||
- **Formatting**: Follow standard Go conventions (`gofmt`, `go vet`)
|
- **Formatting**: Follow standard Go conventions (`gofmt`, `go vet`)
|
||||||
- **Package docs**: Add package-level documentation for exported functions
|
- **Package docs**: Add package-level documentation for exported functions
|
||||||
- **Error handling**: Never ignore errors, always handle them explicitly
|
- **Error handling**: Never ignore errors, always handle them explicitly
|
||||||
- **Build tags**: Use `//go:build darwin` for macOS-specific code
|
- **Build tags**: Use `//go:build darwin` for macOS-specific code
|
||||||
|
|
||||||
### Comments
|
### Comments
|
||||||
|
|
||||||
- **Language**: English only
|
- **Language**: English only
|
||||||
- **Focus**: Explain "why" not "what" (code should be self-documenting)
|
- **Focus**: Explain "why" not "what" (code should be self-documenting)
|
||||||
- **Safety**: Document safety boundaries explicitly
|
- **Safety**: Document safety boundaries explicitly
|
||||||
@@ -165,12 +179,14 @@ mole/ # Main CLI entrypoint (menu + routing)
|
|||||||
## Key Helper Functions
|
## Key Helper Functions
|
||||||
|
|
||||||
### Safety Helpers (lib/core/base.sh)
|
### Safety Helpers (lib/core/base.sh)
|
||||||
|
|
||||||
- `safe_rm <path>`: Safe deletion with validation
|
- `safe_rm <path>`: Safe deletion with validation
|
||||||
- `safe_find_delete <base> <pattern> <days> <type>`: Protected find+delete
|
- `safe_find_delete <base> <pattern> <days> <type>`: Protected find+delete
|
||||||
- `is_protected <path>`: Check if path is system-protected
|
- `is_protected <path>`: Check if path is system-protected
|
||||||
- `is_whitelisted <name>`: Check user whitelist
|
- `is_whitelisted <name>`: Check user whitelist
|
||||||
|
|
||||||
### Logging (lib/core/log.sh)
|
### Logging (lib/core/log.sh)
|
||||||
|
|
||||||
- `log_info <msg>`: Informational messages
|
- `log_info <msg>`: Informational messages
|
||||||
- `log_success <msg>`: Success notifications
|
- `log_success <msg>`: Success notifications
|
||||||
- `log_warn <msg>`: Warnings
|
- `log_warn <msg>`: Warnings
|
||||||
@@ -178,6 +194,7 @@ mole/ # Main CLI entrypoint (menu + routing)
|
|||||||
- `debug <msg>`: Debug output (requires MO_DEBUG=1)
|
- `debug <msg>`: Debug output (requires MO_DEBUG=1)
|
||||||
|
|
||||||
### UI Helpers (lib/core/ui.sh)
|
### UI Helpers (lib/core/ui.sh)
|
||||||
|
|
||||||
- `confirm <prompt>`: Yes/no confirmation
|
- `confirm <prompt>`: Yes/no confirmation
|
||||||
- `show_progress <current> <total> <msg>`: Progress display
|
- `show_progress <current> <total> <msg>`: Progress display
|
||||||
|
|
||||||
@@ -186,6 +203,7 @@ mole/ # Main CLI entrypoint (menu + routing)
|
|||||||
## Testing Strategy
|
## Testing Strategy
|
||||||
|
|
||||||
### Test Types
|
### Test Types
|
||||||
|
|
||||||
1. **Syntax Validation**: `bash -n <file>` - catches basic errors
|
1. **Syntax Validation**: `bash -n <file>` - catches basic errors
|
||||||
2. **Unit Tests**: BATS tests for individual functions
|
2. **Unit Tests**: BATS tests for individual functions
|
||||||
3. **Integration Tests**: Full command execution with BATS
|
3. **Integration Tests**: Full command execution with BATS
|
||||||
@@ -193,6 +211,7 @@ mole/ # Main CLI entrypoint (menu + routing)
|
|||||||
5. **Go Tests**: `go test -v ./cmd/...`
|
5. **Go Tests**: `go test -v ./cmd/...`
|
||||||
|
|
||||||
### Test Environment Variables
|
### Test Environment Variables
|
||||||
|
|
||||||
- `MO_DRY_RUN=1`: Preview changes without execution
|
- `MO_DRY_RUN=1`: Preview changes without execution
|
||||||
- `MO_DEBUG=1`: Enable detailed debug logging
|
- `MO_DEBUG=1`: Enable detailed debug logging
|
||||||
- `BATS_FORMATTER=pretty`: Use pretty output for BATS (default)
|
- `BATS_FORMATTER=pretty`: Use pretty output for BATS (default)
|
||||||
@@ -203,6 +222,7 @@ mole/ # Main CLI entrypoint (menu + routing)
|
|||||||
## Common Development Tasks
|
## Common Development Tasks
|
||||||
|
|
||||||
### Adding New Cleanup Module
|
### Adding New Cleanup Module
|
||||||
|
|
||||||
1. Create `lib/clean/new_module.sh`
|
1. Create `lib/clean/new_module.sh`
|
||||||
2. Implement cleanup logic using `safe_*` helpers
|
2. Implement cleanup logic using `safe_*` helpers
|
||||||
3. Source it in `bin/clean.sh`
|
3. Source it in `bin/clean.sh`
|
||||||
@@ -211,6 +231,7 @@ mole/ # Main CLI entrypoint (menu + routing)
|
|||||||
6. Test with `MO_DRY_RUN=1` first
|
6. Test with `MO_DRY_RUN=1` first
|
||||||
|
|
||||||
### Modifying Go Tools
|
### Modifying Go Tools
|
||||||
|
|
||||||
1. Navigate to `cmd/<tool>/`
|
1. Navigate to `cmd/<tool>/`
|
||||||
2. Make changes to Go files
|
2. Make changes to Go files
|
||||||
3. Test with `go run .` or `make build && ./bin/<tool>-go`
|
3. Test with `go run .` or `make build && ./bin/<tool>-go`
|
||||||
@@ -218,6 +239,7 @@ mole/ # Main CLI entrypoint (menu + routing)
|
|||||||
5. Check integration: `./mole <command>`
|
5. Check integration: `./mole <command>`
|
||||||
|
|
||||||
### Debugging Issues
|
### Debugging Issues
|
||||||
|
|
||||||
1. Enable debug mode: `MO_DEBUG=1 ./mole clean`
|
1. Enable debug mode: `MO_DEBUG=1 ./mole clean`
|
||||||
2. Check logs for error messages
|
2. Check logs for error messages
|
||||||
3. Verify sudo permissions: `sudo -n true` or `./mole touchid`
|
3. Verify sudo permissions: `sudo -n true` or `./mole touchid`
|
||||||
@@ -229,15 +251,18 @@ mole/ # Main CLI entrypoint (menu + routing)
|
|||||||
## Linting and Quality
|
## Linting and Quality
|
||||||
|
|
||||||
### Shell Script Linting
|
### Shell Script Linting
|
||||||
|
|
||||||
- **Tool**: shellcheck with custom `.shellcheckrc`
|
- **Tool**: shellcheck with custom `.shellcheckrc`
|
||||||
- **Disabled rules**: SC2155, SC2034, SC2059, SC1091, SC2038
|
- **Disabled rules**: SC2155, SC2034, SC2059, SC1091, SC2038
|
||||||
- **Command**: `shellcheck --rcfile .shellcheckrc lib/**/*.sh bin/**/*.sh`
|
- **Command**: `shellcheck --rcfile .shellcheckrc lib/**/*.sh bin/**/*.sh`
|
||||||
|
|
||||||
### Go Code Quality
|
### Go Code Quality
|
||||||
|
|
||||||
- **Tools**: `go vet`, `go fmt`, `go test`
|
- **Tools**: `go vet`, `go fmt`, `go test`
|
||||||
- **Command**: `go vet ./cmd/... && go test ./cmd/...`
|
- **Command**: `go vet ./cmd/... && go test ./cmd/...`
|
||||||
|
|
||||||
### CI/CD Pipeline
|
### CI/CD Pipeline
|
||||||
|
|
||||||
- **Triggers**: Push/PR to main, dev branches
|
- **Triggers**: Push/PR to main, dev branches
|
||||||
- **Platforms**: macOS 14, macOS 15
|
- **Platforms**: macOS 14, macOS 15
|
||||||
- **Tools**: bats-core, shellcheck, Go 1.24.6
|
- **Tools**: bats-core, shellcheck, Go 1.24.6
|
||||||
@@ -248,12 +273,14 @@ mole/ # Main CLI entrypoint (menu + routing)
|
|||||||
## File Organization Patterns
|
## File Organization Patterns
|
||||||
|
|
||||||
### Shell Modules
|
### Shell Modules
|
||||||
|
|
||||||
- Entry scripts in `bin/` should be thin wrappers
|
- Entry scripts in `bin/` should be thin wrappers
|
||||||
- Reusable logic goes in `lib/`
|
- Reusable logic goes in `lib/`
|
||||||
- Core utilities in `lib/core/`
|
- Core utilities in `lib/core/`
|
||||||
- Feature-specific modules in `lib/clean/`, `lib/ui/`, etc.
|
- Feature-specific modules in `lib/clean/`, `lib/ui/`, etc.
|
||||||
|
|
||||||
### Go Packages
|
### Go Packages
|
||||||
|
|
||||||
- Each tool in its own `cmd/<tool>/` directory
|
- Each tool in its own `cmd/<tool>/` directory
|
||||||
- Main entry point in `main.go`
|
- Main entry point in `main.go`
|
||||||
- Use standard Go project layout
|
- Use standard Go project layout
|
||||||
@@ -266,6 +293,7 @@ mole/ # Main CLI entrypoint (menu + routing)
|
|||||||
### Use gh CLI for All GitHub Work
|
### Use gh CLI for All GitHub Work
|
||||||
|
|
||||||
**Preferred Commands**:
|
**Preferred Commands**:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Issues
|
# Issues
|
||||||
gh issue view 123 # View issue details
|
gh issue view 123 # View issue details
|
||||||
@@ -286,6 +314,7 @@ gh api repos/owner/repo/issues # Raw API access
|
|||||||
```
|
```
|
||||||
|
|
||||||
**NEVER use raw git commands for GitHub operations** when `gh` is available:
|
**NEVER use raw git commands for GitHub operations** when `gh` is available:
|
||||||
|
|
||||||
- ❌ `git log --oneline origin/main..HEAD` → ✅ `gh pr view`
|
- ❌ `git log --oneline origin/main..HEAD` → ✅ `gh pr view`
|
||||||
- ❌ `git remote get-url origin` → ✅ `gh repo view`
|
- ❌ `git remote get-url origin` → ✅ `gh repo view`
|
||||||
- ❌ Manual GitHub API curl commands → ✅ `gh api`
|
- ❌ Manual GitHub API curl commands → ✅ `gh api`
|
||||||
@@ -293,12 +322,14 @@ gh api repos/owner/repo/issues # Raw API access
|
|||||||
## Error Handling Patterns
|
## Error Handling Patterns
|
||||||
|
|
||||||
### Shell Scripts
|
### Shell Scripts
|
||||||
|
|
||||||
- Use `set -euo pipefail` for strict error handling
|
- Use `set -euo pipefail` for strict error handling
|
||||||
- Check command exit codes: `if command; then ...`
|
- Check command exit codes: `if command; then ...`
|
||||||
- Provide meaningful error messages with `log_error`
|
- Provide meaningful error messages with `log_error`
|
||||||
- Use cleanup traps for temporary resources
|
- Use cleanup traps for temporary resources
|
||||||
|
|
||||||
### Go Code
|
### Go Code
|
||||||
|
|
||||||
- Never ignore errors: `if err != nil { return err }`
|
- Never ignore errors: `if err != nil { return err }`
|
||||||
- Use structured error messages
|
- Use structured error messages
|
||||||
- Handle context cancellation appropriately
|
- Handle context cancellation appropriately
|
||||||
@@ -309,12 +340,14 @@ gh api repos/owner/repo/issues # Raw API access
|
|||||||
## Performance Considerations
|
## Performance Considerations
|
||||||
|
|
||||||
### Shell Optimization
|
### Shell Optimization
|
||||||
|
|
||||||
- Use built-in shell operations over external commands
|
- Use built-in shell operations over external commands
|
||||||
- Prefer `find -delete` over `-exec rm`
|
- Prefer `find -delete` over `-exec rm`
|
||||||
- Minimize subprocess creation
|
- Minimize subprocess creation
|
||||||
- Use appropriate timeout mechanisms
|
- Use appropriate timeout mechanisms
|
||||||
|
|
||||||
### Go Optimization
|
### Go Optimization
|
||||||
|
|
||||||
- Use concurrency for I/O-bound operations
|
- Use concurrency for I/O-bound operations
|
||||||
- Implement proper caching for expensive operations
|
- Implement proper caching for expensive operations
|
||||||
- Profile memory usage in scanning operations
|
- Profile memory usage in scanning operations
|
||||||
@@ -325,12 +358,14 @@ gh api repos/owner/repo/issues # Raw API access
|
|||||||
## Security Best Practices
|
## Security Best Practices
|
||||||
|
|
||||||
### Path Validation
|
### Path Validation
|
||||||
|
|
||||||
- Always validate user-provided paths
|
- Always validate user-provided paths
|
||||||
- Check against protection lists before operations
|
- Check against protection lists before operations
|
||||||
- Use absolute paths to prevent directory traversal
|
- Use absolute paths to prevent directory traversal
|
||||||
- Implement proper sandboxing for destructive operations
|
- Implement proper sandboxing for destructive operations
|
||||||
|
|
||||||
### Permission Management
|
### Permission Management
|
||||||
|
|
||||||
- Request sudo only when necessary
|
- Request sudo only when necessary
|
||||||
- Use `sudo -n true` to check sudo availability
|
- Use `sudo -n true` to check sudo availability
|
||||||
- Implement proper Touch ID integration
|
- Implement proper Touch ID integration
|
||||||
|
|||||||
@@ -342,7 +342,11 @@ safe_clean() {
|
|||||||
valid_targets+=("$target")
|
valid_targets+=("$target")
|
||||||
done
|
done
|
||||||
|
|
||||||
|
if [[ ${#valid_targets[@]} -gt 0 ]]; then
|
||||||
targets=("${valid_targets[@]}")
|
targets=("${valid_targets[@]}")
|
||||||
|
else
|
||||||
|
targets=()
|
||||||
|
fi
|
||||||
if [[ ${#targets[@]} -eq 0 ]]; then
|
if [[ ${#targets[@]} -eq 0 ]]; then
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|||||||
13
bin/purge.sh
13
bin/purge.sh
@@ -130,15 +130,20 @@ perform_purge() {
|
|||||||
|
|
||||||
# Show title on first line, spinner and scanning info on second line
|
# Show title on first line, spinner and scanning info on second line
|
||||||
if [[ -n "$display_path" ]]; then
|
if [[ -n "$display_path" ]]; then
|
||||||
printf '\r%s\n%s %sScanning %s\033[K\033[A' \
|
# Line 1: Move to start, clear, print title
|
||||||
"${PURPLE_BOLD}Purge Project Artifacts${NC}" \
|
printf '\r\033[K%s\n' "${PURPLE_BOLD}Purge Project Artifacts${NC}"
|
||||||
|
# Line 2: Move to start, clear, print scanning info
|
||||||
|
printf '\r\033[K%s %sScanning %s' \
|
||||||
"${BLUE}${spin_char}${NC}" \
|
"${BLUE}${spin_char}${NC}" \
|
||||||
"${GRAY}" "$display_path"
|
"${GRAY}" "$display_path"
|
||||||
|
# Move up THEN to start (important order!)
|
||||||
|
printf '\033[A\r'
|
||||||
else
|
else
|
||||||
printf '\r%s\n%s %sScanning...\033[K\033[A' \
|
printf '\r\033[K%s\n' "${PURPLE_BOLD}Purge Project Artifacts${NC}"
|
||||||
"${PURPLE_BOLD}Purge Project Artifacts${NC}" \
|
printf '\r\033[K%s %sScanning...' \
|
||||||
"${BLUE}${spin_char}${NC}" \
|
"${BLUE}${spin_char}${NC}" \
|
||||||
"${GRAY}"
|
"${GRAY}"
|
||||||
|
printf '\033[A\r'
|
||||||
fi
|
fi
|
||||||
|
|
||||||
sleep 0.05
|
sleep 0.05
|
||||||
|
|||||||
@@ -135,9 +135,11 @@ scan_installed_apps() {
|
|||||||
) &
|
) &
|
||||||
pids+=($!)
|
pids+=($!)
|
||||||
debug_log "Waiting for ${#pids[@]} background processes: ${pids[*]}"
|
debug_log "Waiting for ${#pids[@]} background processes: ${pids[*]}"
|
||||||
|
if [[ ${#pids[@]} -gt 0 ]]; then
|
||||||
for pid in "${pids[@]}"; do
|
for pid in "${pids[@]}"; do
|
||||||
wait "$pid" 2> /dev/null || true
|
wait "$pid" 2> /dev/null || true
|
||||||
done
|
done
|
||||||
|
fi
|
||||||
debug_log "All background processes completed"
|
debug_log "All background processes completed"
|
||||||
cat "$scan_tmp_dir"/*.txt >> "$installed_bundles" 2> /dev/null || true
|
cat "$scan_tmp_dir"/*.txt >> "$installed_bundles" 2> /dev/null || true
|
||||||
safe_remove "$scan_tmp_dir" true
|
safe_remove "$scan_tmp_dir" true
|
||||||
@@ -279,6 +281,7 @@ clean_orphaned_app_data() {
|
|||||||
for pat in "${pattern_arr[@]}"; do
|
for pat in "${pattern_arr[@]}"; do
|
||||||
file_patterns+=("$base_path/$pat")
|
file_patterns+=("$base_path/$pat")
|
||||||
done
|
done
|
||||||
|
if [[ ${#file_patterns[@]} -gt 0 ]]; then
|
||||||
for item_path in "${file_patterns[@]}"; do
|
for item_path in "${file_patterns[@]}"; do
|
||||||
local iteration_count=0
|
local iteration_count=0
|
||||||
for match in $item_path; do
|
for match in $item_path; do
|
||||||
@@ -302,6 +305,7 @@ clean_orphaned_app_data() {
|
|||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
done
|
done
|
||||||
|
fi
|
||||||
done
|
done
|
||||||
stop_section_spinner
|
stop_section_spinner
|
||||||
if [[ $orphaned_count -gt 0 ]]; then
|
if [[ $orphaned_count -gt 0 ]]; then
|
||||||
|
|||||||
@@ -75,6 +75,41 @@ clean_dev_rust() {
|
|||||||
safe_clean ~/.cargo/git/* "Cargo git cache"
|
safe_clean ~/.cargo/git/* "Cargo git cache"
|
||||||
safe_clean ~/.rustup/downloads/* "Rust downloads cache"
|
safe_clean ~/.rustup/downloads/* "Rust downloads cache"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Helper: Check for multiple versions in a directory.
|
||||||
|
# Args: $1=directory, $2=tool_name, $3+=additional_lines
|
||||||
|
check_multiple_versions() {
|
||||||
|
local dir="$1"
|
||||||
|
local tool_name="$2"
|
||||||
|
shift 2
|
||||||
|
local -a additional_lines=("$@")
|
||||||
|
|
||||||
|
if [[ ! -d "$dir" ]]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
local count
|
||||||
|
count=$(find "$dir" -mindepth 1 -maxdepth 1 -type d 2> /dev/null | wc -l | tr -d ' ')
|
||||||
|
|
||||||
|
if [[ "$count" -gt 1 ]]; then
|
||||||
|
note_activity
|
||||||
|
echo -e " Found ${GREEN}${count}${NC} ${tool_name}"
|
||||||
|
for line in "${additional_lines[@]}"; do
|
||||||
|
echo -e " $line"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check for multiple Rust toolchains.
|
||||||
|
check_rust_toolchains() {
|
||||||
|
command -v rustup > /dev/null 2>&1 || return 0
|
||||||
|
|
||||||
|
check_multiple_versions \
|
||||||
|
"$HOME/.rustup/toolchains" \
|
||||||
|
"Rust toolchains" \
|
||||||
|
"You can list them with: ${GRAY}rustup toolchain list${NC}" \
|
||||||
|
"Remove unused with: ${GRAY}rustup toolchain uninstall <name>${NC}"
|
||||||
|
}
|
||||||
# Docker caches (guarded by daemon check).
|
# Docker caches (guarded by daemon check).
|
||||||
clean_dev_docker() {
|
clean_dev_docker() {
|
||||||
if command -v docker > /dev/null 2>&1; then
|
if command -v docker > /dev/null 2>&1; then
|
||||||
@@ -130,19 +165,13 @@ clean_dev_frontend() {
|
|||||||
safe_clean ~/.cache/eslint/* "ESLint cache"
|
safe_clean ~/.cache/eslint/* "ESLint cache"
|
||||||
safe_clean ~/.cache/prettier/* "Prettier cache"
|
safe_clean ~/.cache/prettier/* "Prettier cache"
|
||||||
}
|
}
|
||||||
# Mobile dev caches (can be large).
|
|
||||||
# Check for multiple Android NDK versions.
|
# Check for multiple Android NDK versions.
|
||||||
check_android_ndk() {
|
check_android_ndk() {
|
||||||
local ndk_dir="$HOME/Library/Android/sdk/ndk"
|
check_multiple_versions \
|
||||||
if [[ -d "$ndk_dir" ]]; then
|
"$HOME/Library/Android/sdk/ndk" \
|
||||||
local count
|
"Android NDK versions" \
|
||||||
count=$(find "$ndk_dir" -mindepth 1 -maxdepth 1 -type d 2> /dev/null | wc -l | tr -d ' ')
|
"Manage in: ${GRAY}Android Studio → SDK Manager${NC}" \
|
||||||
if [[ "$count" -gt 1 ]]; then
|
"Or manually at: ${GRAY}\$HOME/Library/Android/sdk/ndk${NC}"
|
||||||
note_activity
|
|
||||||
echo -e " Found ${GREEN}${count}${NC} Android NDK versions"
|
|
||||||
echo -e " You can delete unused versions manually: ${ndk_dir}"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
clean_dev_mobile() {
|
clean_dev_mobile() {
|
||||||
@@ -233,6 +262,23 @@ clean_dev_misc() {
|
|||||||
safe_clean ~/Library/Caches/SentryCrash/* "Sentry crash reports"
|
safe_clean ~/Library/Caches/SentryCrash/* "Sentry crash reports"
|
||||||
safe_clean ~/Library/Caches/KSCrash/* "KSCrash reports"
|
safe_clean ~/Library/Caches/KSCrash/* "KSCrash reports"
|
||||||
safe_clean ~/Library/Caches/com.crashlytics.data/* "Crashlytics data"
|
safe_clean ~/Library/Caches/com.crashlytics.data/* "Crashlytics data"
|
||||||
|
safe_clean ~/Library/Application\ Support/Antigravity/Cache/* "Antigravity cache"
|
||||||
|
safe_clean ~/Library/Application\ Support/Antigravity/Code\ Cache/* "Antigravity code cache"
|
||||||
|
safe_clean ~/Library/Application\ Support/Antigravity/GPUCache/* "Antigravity GPU cache"
|
||||||
|
safe_clean ~/Library/Application\ Support/Antigravity/DawnGraphiteCache/* "Antigravity Dawn cache"
|
||||||
|
safe_clean ~/Library/Application\ Support/Antigravity/DawnWebGPUCache/* "Antigravity WebGPU cache"
|
||||||
|
# Filo (Electron)
|
||||||
|
safe_clean ~/Library/Application\ Support/Filo/production/Cache/* "Filo cache"
|
||||||
|
safe_clean ~/Library/Application\ Support/Filo/production/Code\ Cache/* "Filo code cache"
|
||||||
|
safe_clean ~/Library/Application\ Support/Filo/production/GPUCache/* "Filo GPU cache"
|
||||||
|
safe_clean ~/Library/Application\ Support/Filo/production/DawnGraphiteCache/* "Filo Dawn cache"
|
||||||
|
safe_clean ~/Library/Application\ Support/Filo/production/DawnWebGPUCache/* "Filo WebGPU cache"
|
||||||
|
# Claude (Electron)
|
||||||
|
safe_clean ~/Library/Application\ Support/Claude/Cache/* "Claude cache"
|
||||||
|
safe_clean ~/Library/Application\ Support/Claude/Code\ Cache/* "Claude code cache"
|
||||||
|
safe_clean ~/Library/Application\ Support/Claude/GPUCache/* "Claude GPU cache"
|
||||||
|
safe_clean ~/Library/Application\ Support/Claude/DawnGraphiteCache/* "Claude Dawn cache"
|
||||||
|
safe_clean ~/Library/Application\ Support/Claude/DawnWebGPUCache/* "Claude WebGPU cache"
|
||||||
}
|
}
|
||||||
# Shell and VCS leftovers.
|
# Shell and VCS leftovers.
|
||||||
clean_dev_shell() {
|
clean_dev_shell() {
|
||||||
@@ -256,24 +302,28 @@ clean_sqlite_temp_files() {
|
|||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
# Elixir/Erlang ecosystem.
|
# Elixir/Erlang ecosystem.
|
||||||
|
# Note: ~/.mix/archives contains installed Mix tools - excluded from cleanup
|
||||||
clean_dev_elixir() {
|
clean_dev_elixir() {
|
||||||
# safe_clean ~/.mix/archives/* "Mix cache"
|
|
||||||
safe_clean ~/.hex/cache/* "Hex cache"
|
safe_clean ~/.hex/cache/* "Hex cache"
|
||||||
}
|
}
|
||||||
# Haskell ecosystem.
|
# Haskell ecosystem.
|
||||||
|
# Note: ~/.stack/programs contains Stack-installed GHC compilers - excluded from cleanup
|
||||||
clean_dev_haskell() {
|
clean_dev_haskell() {
|
||||||
safe_clean ~/.cabal/packages/* "Cabal install cache"
|
safe_clean ~/.cabal/packages/* "Cabal install cache"
|
||||||
# safe_clean ~/.stack/programs/* "Stack cache"
|
|
||||||
}
|
}
|
||||||
# OCaml ecosystem.
|
# OCaml ecosystem.
|
||||||
clean_dev_ocaml() {
|
clean_dev_ocaml() {
|
||||||
safe_clean ~/.opam/download-cache/* "Opam cache"
|
safe_clean ~/.opam/download-cache/* "Opam cache"
|
||||||
}
|
}
|
||||||
# Editor caches.
|
# Editor caches.
|
||||||
|
# Note: ~/Library/Application Support/Code/User/workspaceStorage contains workspace settings - excluded from cleanup
|
||||||
clean_dev_editors() {
|
clean_dev_editors() {
|
||||||
safe_clean ~/Library/Caches/com.microsoft.VSCode/Cache/* "VS Code cached data"
|
safe_clean ~/Library/Caches/com.microsoft.VSCode/Cache/* "VS Code cached data"
|
||||||
safe_clean ~/Library/Application\ Support/Code/CachedData/* "VS Code cached data"
|
safe_clean ~/Library/Application\ Support/Code/CachedData/* "VS Code cached data"
|
||||||
# safe_clean ~/Library/Application\ Support/Code/User/workspaceStorage/* "VS Code workspace storage"
|
safe_clean ~/Library/Application\ Support/Code/DawnGraphiteCache/* "VS Code Dawn cache"
|
||||||
|
safe_clean ~/Library/Application\ Support/Code/DawnWebGPUCache/* "VS Code WebGPU cache"
|
||||||
|
safe_clean ~/Library/Application\ Support/Code/GPUCache/* "VS Code GPU cache"
|
||||||
|
safe_clean ~/Library/Application\ Support/Code/CachedExtensionVSIXs/* "VS Code extension cache"
|
||||||
safe_clean ~/Library/Caches/Zed/* "Zed cache"
|
safe_clean ~/Library/Caches/Zed/* "Zed cache"
|
||||||
}
|
}
|
||||||
# Main developer tools cleanup sequence.
|
# Main developer tools cleanup sequence.
|
||||||
@@ -284,6 +334,7 @@ clean_developer_tools() {
|
|||||||
clean_dev_python
|
clean_dev_python
|
||||||
clean_dev_go
|
clean_dev_go
|
||||||
clean_dev_rust
|
clean_dev_rust
|
||||||
|
check_rust_toolchains
|
||||||
clean_dev_docker
|
clean_dev_docker
|
||||||
clean_dev_cloud
|
clean_dev_cloud
|
||||||
clean_dev_nix
|
clean_dev_nix
|
||||||
|
|||||||
@@ -65,6 +65,14 @@ readonly PURGE_CONFIG_FILE="$HOME/.config/mole/purge_paths"
|
|||||||
PURGE_SEARCH_PATHS=()
|
PURGE_SEARCH_PATHS=()
|
||||||
|
|
||||||
# Project indicators for container detection.
|
# Project indicators for container detection.
|
||||||
|
# Monorepo indicators (higher priority)
|
||||||
|
readonly MONOREPO_INDICATORS=(
|
||||||
|
"lerna.json"
|
||||||
|
"pnpm-workspace.yaml"
|
||||||
|
"nx.json"
|
||||||
|
"rush.json"
|
||||||
|
)
|
||||||
|
|
||||||
readonly PROJECT_INDICATORS=(
|
readonly PROJECT_INDICATORS=(
|
||||||
"package.json"
|
"package.json"
|
||||||
"Cargo.toml"
|
"Cargo.toml"
|
||||||
@@ -348,7 +356,7 @@ scan_purge_targets() {
|
|||||||
# Escape regex special characters in target names for fd patterns
|
# Escape regex special characters in target names for fd patterns
|
||||||
local escaped_targets=()
|
local escaped_targets=()
|
||||||
for target in "${PURGE_TARGETS[@]}"; do
|
for target in "${PURGE_TARGETS[@]}"; do
|
||||||
escaped_targets+=("$(printf '%s' "$target" | sed -e 's/[][(){}.^$*+?|\\]/\\&/g')")
|
escaped_targets+=("^$(printf '%s' "$target" | sed -e 's/[][(){}.^$*+?|\\]/\\&/g')\$")
|
||||||
done
|
done
|
||||||
local pattern="($(
|
local pattern="($(
|
||||||
IFS='|'
|
IFS='|'
|
||||||
@@ -762,6 +770,18 @@ clean_project_artifacts() {
|
|||||||
for pid in "${scan_pids[@]+"${scan_pids[@]}"}"; do
|
for pid in "${scan_pids[@]+"${scan_pids[@]}"}"; do
|
||||||
wait "$pid" 2> /dev/null || true
|
wait "$pid" 2> /dev/null || true
|
||||||
done
|
done
|
||||||
|
|
||||||
|
# Stop the scanning monitor (removes purge_scanning file to signal completion)
|
||||||
|
local stats_dir="${XDG_CACHE_HOME:-$HOME/.cache}/mole"
|
||||||
|
rm -f "$stats_dir/purge_scanning" 2> /dev/null || true
|
||||||
|
|
||||||
|
# Give monitor process time to exit and clear its output
|
||||||
|
if [[ -t 1 ]]; then
|
||||||
|
sleep 0.2
|
||||||
|
# Clear the scanning line but preserve the title
|
||||||
|
printf '\n\033[K'
|
||||||
|
fi
|
||||||
|
|
||||||
# Collect all results
|
# Collect all results
|
||||||
for scan_output in "${scan_temps[@]+"${scan_temps[@]}"}"; do
|
for scan_output in "${scan_temps[@]+"${scan_temps[@]}"}"; do
|
||||||
if [[ -f "$scan_output" ]]; then
|
if [[ -f "$scan_output" ]]; then
|
||||||
@@ -803,27 +823,57 @@ clean_project_artifacts() {
|
|||||||
# Strategy: Find the nearest ancestor directory containing a project indicator file
|
# Strategy: Find the nearest ancestor directory containing a project indicator file
|
||||||
get_project_name() {
|
get_project_name() {
|
||||||
local path="$1"
|
local path="$1"
|
||||||
local artifact_name
|
|
||||||
artifact_name=$(basename "$path")
|
|
||||||
|
|
||||||
# Start from the parent of the artifact and walk up
|
|
||||||
local current_dir
|
local current_dir
|
||||||
current_dir=$(dirname "$path")
|
current_dir=$(dirname "$path")
|
||||||
|
local monorepo_root=""
|
||||||
|
local project_root=""
|
||||||
|
|
||||||
|
# Single pass: check both monorepo and project indicators
|
||||||
while [[ "$current_dir" != "/" && "$current_dir" != "$HOME" && -n "$current_dir" ]]; do
|
while [[ "$current_dir" != "/" && "$current_dir" != "$HOME" && -n "$current_dir" ]]; do
|
||||||
# Check if current directory contains any project indicator
|
# First check for monorepo indicators (higher priority)
|
||||||
for indicator in "${PROJECT_INDICATORS[@]}"; do
|
if [[ -z "$monorepo_root" ]]; then
|
||||||
|
for indicator in "${MONOREPO_INDICATORS[@]}"; do
|
||||||
if [[ -e "$current_dir/$indicator" ]]; then
|
if [[ -e "$current_dir/$indicator" ]]; then
|
||||||
# Found a project root, return its name
|
monorepo_root="$current_dir"
|
||||||
basename "$current_dir"
|
break
|
||||||
return 0
|
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
# Move up one level
|
fi
|
||||||
|
|
||||||
|
# Then check for project indicators (save first match)
|
||||||
|
if [[ -z "$project_root" ]]; then
|
||||||
|
for indicator in "${PROJECT_INDICATORS[@]}"; do
|
||||||
|
if [[ -e "$current_dir/$indicator" ]]; then
|
||||||
|
project_root="$current_dir"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If we found monorepo, we can stop (monorepo always wins)
|
||||||
|
if [[ -n "$monorepo_root" ]]; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If we found project but still checking for monorepo above
|
||||||
|
# (only stop if we're beyond reasonable depth)
|
||||||
|
local depth=$(echo "${current_dir#"$HOME"}" | LC_ALL=C tr -cd '/' | wc -c | tr -d ' ')
|
||||||
|
if [[ -n "$project_root" && $depth -lt 2 ]]; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
|
||||||
current_dir=$(dirname "$current_dir")
|
current_dir=$(dirname "$current_dir")
|
||||||
done
|
done
|
||||||
|
|
||||||
# Fallback: try the old logic (first directory under search root)
|
# Determine result: monorepo > project > fallback
|
||||||
|
local result=""
|
||||||
|
if [[ -n "$monorepo_root" ]]; then
|
||||||
|
result=$(basename "$monorepo_root")
|
||||||
|
elif [[ -n "$project_root" ]]; then
|
||||||
|
result=$(basename "$project_root")
|
||||||
|
else
|
||||||
|
# Fallback: first directory under search root
|
||||||
local search_roots=()
|
local search_roots=()
|
||||||
if [[ ${#PURGE_SEARCH_PATHS[@]} -gt 0 ]]; then
|
if [[ ${#PURGE_SEARCH_PATHS[@]} -gt 0 ]]; then
|
||||||
search_roots=("${PURGE_SEARCH_PATHS[@]}")
|
search_roots=("${PURGE_SEARCH_PATHS[@]}")
|
||||||
@@ -834,40 +884,143 @@ clean_project_artifacts() {
|
|||||||
root="${root%/}"
|
root="${root%/}"
|
||||||
if [[ -n "$root" && "$path" == "$root/"* ]]; then
|
if [[ -n "$root" && "$path" == "$root/"* ]]; then
|
||||||
local relative_path="${path#"$root"/}"
|
local relative_path="${path#"$root"/}"
|
||||||
echo "$relative_path" | cut -d'/' -f1
|
result=$(echo "$relative_path" | cut -d'/' -f1)
|
||||||
return 0
|
break
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
# Final fallback: use grandparent directory
|
# Final fallback: use grandparent directory
|
||||||
dirname "$(dirname "$path")" | xargs basename
|
if [[ -z "$result" ]]; then
|
||||||
|
result=$(dirname "$(dirname "$path")" | xargs basename)
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "$result"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Helper to get project path (more complete than just project name)
|
||||||
|
# For ~/www/pake/src-tauri/target -> returns "~/www/pake"
|
||||||
|
# For ~/work/code/MyProject/node_modules -> returns "~/work/code/MyProject"
|
||||||
|
# Shows the full path relative to HOME with ~ prefix for better clarity
|
||||||
|
get_project_path() {
|
||||||
|
local path="$1"
|
||||||
|
|
||||||
|
local current_dir
|
||||||
|
current_dir=$(dirname "$path")
|
||||||
|
local monorepo_root=""
|
||||||
|
local project_root=""
|
||||||
|
|
||||||
|
# Single pass: check both monorepo and project indicators
|
||||||
|
while [[ "$current_dir" != "/" && "$current_dir" != "$HOME" && -n "$current_dir" ]]; do
|
||||||
|
# First check for monorepo indicators (higher priority)
|
||||||
|
if [[ -z "$monorepo_root" ]]; then
|
||||||
|
for indicator in "${MONOREPO_INDICATORS[@]}"; do
|
||||||
|
if [[ -e "$current_dir/$indicator" ]]; then
|
||||||
|
monorepo_root="$current_dir"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Then check for project indicators (save first match)
|
||||||
|
if [[ -z "$project_root" ]]; then
|
||||||
|
for indicator in "${PROJECT_INDICATORS[@]}"; do
|
||||||
|
if [[ -e "$current_dir/$indicator" ]]; then
|
||||||
|
project_root="$current_dir"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If we found monorepo, we can stop (monorepo always wins)
|
||||||
|
if [[ -n "$monorepo_root" ]]; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If we found project but still checking for monorepo above
|
||||||
|
local depth=$(echo "${current_dir#"$HOME"}" | LC_ALL=C tr -cd '/' | wc -c | tr -d ' ')
|
||||||
|
if [[ -n "$project_root" && $depth -lt 2 ]]; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
|
||||||
|
current_dir=$(dirname "$current_dir")
|
||||||
|
done
|
||||||
|
|
||||||
|
# Determine result: monorepo > project > fallback
|
||||||
|
local result=""
|
||||||
|
if [[ -n "$monorepo_root" ]]; then
|
||||||
|
result="$monorepo_root"
|
||||||
|
elif [[ -n "$project_root" ]]; then
|
||||||
|
result="$project_root"
|
||||||
|
else
|
||||||
|
# Fallback: use parent directory of artifact
|
||||||
|
result=$(dirname "$path")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Convert to ~ format for cleaner display
|
||||||
|
result="${result/#$HOME/~}"
|
||||||
|
echo "$result"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Helper to get artifact display name
|
||||||
|
# For duplicate artifact names within same project, include parent directory for context
|
||||||
|
get_artifact_display_name() {
|
||||||
|
local path="$1"
|
||||||
|
local artifact_name=$(basename "$path")
|
||||||
|
local project_name=$(get_project_name "$path")
|
||||||
|
local parent_name=$(basename "$(dirname "$path")")
|
||||||
|
|
||||||
|
# Check if there are other items with same artifact name AND same project
|
||||||
|
local has_duplicate=false
|
||||||
|
for other_item in "${safe_to_clean[@]}"; do
|
||||||
|
if [[ "$other_item" != "$path" && "$(basename "$other_item")" == "$artifact_name" ]]; then
|
||||||
|
# Same artifact name, check if same project
|
||||||
|
if [[ "$(get_project_name "$other_item")" == "$project_name" ]]; then
|
||||||
|
has_duplicate=true
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# If duplicate exists in same project and parent is not the project itself, show parent/artifact
|
||||||
|
if [[ "$has_duplicate" == "true" && "$parent_name" != "$project_name" && "$parent_name" != "." && "$parent_name" != "/" ]]; then
|
||||||
|
echo "$parent_name/$artifact_name"
|
||||||
|
else
|
||||||
|
echo "$artifact_name"
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
# Format display with alignment (like app_selector)
|
# Format display with alignment (like app_selector)
|
||||||
format_purge_display() {
|
format_purge_display() {
|
||||||
local project_name="$1"
|
local project_path="$1"
|
||||||
local artifact_type="$2"
|
local artifact_type="$2"
|
||||||
local size_str="$3"
|
local size_str="$3"
|
||||||
# Terminal width for alignment
|
# Terminal width for alignment
|
||||||
local terminal_width=$(tput cols 2> /dev/null || echo 80)
|
local terminal_width=$(tput cols 2> /dev/null || echo 80)
|
||||||
local fixed_width=28 # Reserve for type and size
|
local fixed_width=28 # Reserve for size and artifact type (9 + 3 + 16)
|
||||||
local available_width=$((terminal_width - fixed_width))
|
local available_width=$((terminal_width - fixed_width))
|
||||||
# Bounds: 24-35 chars for project name
|
# Bounds: 30-50 chars for project path (increased to accommodate full paths)
|
||||||
[[ $available_width -lt 24 ]] && available_width=24
|
[[ $available_width -lt 30 ]] && available_width=30
|
||||||
[[ $available_width -gt 35 ]] && available_width=35
|
[[ $available_width -gt 50 ]] && available_width=50
|
||||||
# Truncate project name if needed
|
# Truncate project path if needed
|
||||||
local truncated_name=$(truncate_by_display_width "$project_name" "$available_width")
|
local truncated_path=$(truncate_by_display_width "$project_path" "$available_width")
|
||||||
local current_width=$(get_display_width "$truncated_name")
|
local current_width=$(get_display_width "$truncated_path")
|
||||||
local char_count=${#truncated_name}
|
local char_count=${#truncated_path}
|
||||||
local padding=$((available_width - current_width))
|
local padding=$((available_width - current_width))
|
||||||
local printf_width=$((char_count + padding))
|
local printf_width=$((char_count + padding))
|
||||||
# Format: "project_name size | artifact_type"
|
# Format: "project_path size | artifact_type"
|
||||||
printf "%-*s %9s | %-13s" "$printf_width" "$truncated_name" "$size_str" "$artifact_type"
|
printf "%-*s %9s | %-13s" "$printf_width" "$truncated_path" "$size_str" "$artifact_type"
|
||||||
}
|
}
|
||||||
# Build menu options - one line per artifact
|
# Build menu options - one line per artifact
|
||||||
for item in "${safe_to_clean[@]}"; do
|
for item in "${safe_to_clean[@]}"; do
|
||||||
local project_name=$(get_project_name "$item")
|
local project_path=$(get_project_path "$item")
|
||||||
local artifact_type=$(basename "$item")
|
local artifact_type=$(get_artifact_display_name "$item")
|
||||||
local size_kb=$(get_dir_size_kb "$item")
|
local size_kb=$(get_dir_size_kb "$item")
|
||||||
|
|
||||||
|
# Skip empty directories (0 bytes)
|
||||||
|
if [[ $size_kb -eq 0 ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
local size_human=$(bytes_to_human "$((size_kb * 1024))")
|
local size_human=$(bytes_to_human "$((size_kb * 1024))")
|
||||||
# Check if recent
|
# Check if recent
|
||||||
local is_recent=false
|
local is_recent=false
|
||||||
@@ -877,11 +1030,48 @@ clean_project_artifacts() {
|
|||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
menu_options+=("$(format_purge_display "$project_name" "$artifact_type" "$size_human")")
|
menu_options+=("$(format_purge_display "$project_path" "$artifact_type" "$size_human")")
|
||||||
item_paths+=("$item")
|
item_paths+=("$item")
|
||||||
item_sizes+=("$size_kb")
|
item_sizes+=("$size_kb")
|
||||||
item_recent_flags+=("$is_recent")
|
item_recent_flags+=("$is_recent")
|
||||||
done
|
done
|
||||||
|
|
||||||
|
# Sort by size descending (largest first) - requested in issue #311
|
||||||
|
# Use external sort for better performance with many items
|
||||||
|
if [[ ${#item_sizes[@]} -gt 0 ]]; then
|
||||||
|
# Create temporary file with index|size pairs
|
||||||
|
local sort_temp
|
||||||
|
sort_temp=$(mktemp)
|
||||||
|
for ((i = 0; i < ${#item_sizes[@]}; i++)); do
|
||||||
|
printf '%d|%d\n' "$i" "${item_sizes[i]}"
|
||||||
|
done > "$sort_temp"
|
||||||
|
|
||||||
|
# Sort by size (field 2) descending, extract indices
|
||||||
|
local -a sorted_indices=()
|
||||||
|
while IFS='|' read -r idx size; do
|
||||||
|
sorted_indices+=("$idx")
|
||||||
|
done < <(sort -t'|' -k2,2nr "$sort_temp")
|
||||||
|
rm -f "$sort_temp"
|
||||||
|
|
||||||
|
# Rebuild arrays in sorted order
|
||||||
|
local -a sorted_menu_options=()
|
||||||
|
local -a sorted_item_paths=()
|
||||||
|
local -a sorted_item_sizes=()
|
||||||
|
local -a sorted_item_recent_flags=()
|
||||||
|
|
||||||
|
for idx in "${sorted_indices[@]}"; do
|
||||||
|
sorted_menu_options+=("${menu_options[idx]}")
|
||||||
|
sorted_item_paths+=("${item_paths[idx]}")
|
||||||
|
sorted_item_sizes+=("${item_sizes[idx]}")
|
||||||
|
sorted_item_recent_flags+=("${item_recent_flags[idx]}")
|
||||||
|
done
|
||||||
|
|
||||||
|
# Replace original arrays with sorted versions
|
||||||
|
menu_options=("${sorted_menu_options[@]}")
|
||||||
|
item_paths=("${sorted_item_paths[@]}")
|
||||||
|
item_sizes=("${sorted_item_sizes[@]}")
|
||||||
|
item_recent_flags=("${sorted_item_recent_flags[@]}")
|
||||||
|
fi
|
||||||
if [[ -t 1 ]]; then
|
if [[ -t 1 ]]; then
|
||||||
stop_inline_spinner
|
stop_inline_spinner
|
||||||
fi
|
fi
|
||||||
@@ -925,7 +1115,7 @@ clean_project_artifacts() {
|
|||||||
for idx in "${selected_indices[@]}"; do
|
for idx in "${selected_indices[@]}"; do
|
||||||
local item_path="${item_paths[idx]}"
|
local item_path="${item_paths[idx]}"
|
||||||
local artifact_type=$(basename "$item_path")
|
local artifact_type=$(basename "$item_path")
|
||||||
local project_name=$(get_project_name "$item_path")
|
local project_path=$(get_project_path "$item_path")
|
||||||
local size_kb="${item_sizes[idx]}"
|
local size_kb="${item_sizes[idx]}"
|
||||||
local size_human=$(bytes_to_human "$((size_kb * 1024))")
|
local size_human=$(bytes_to_human "$((size_kb * 1024))")
|
||||||
# Safety checks
|
# Safety checks
|
||||||
@@ -933,7 +1123,7 @@ clean_project_artifacts() {
|
|||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
if [[ -t 1 ]]; then
|
if [[ -t 1 ]]; then
|
||||||
start_inline_spinner "Cleaning $project_name/$artifact_type..."
|
start_inline_spinner "Cleaning $project_path/$artifact_type..."
|
||||||
fi
|
fi
|
||||||
if [[ -e "$item_path" ]]; then
|
if [[ -e "$item_path" ]]; then
|
||||||
safe_remove "$item_path" true
|
safe_remove "$item_path" true
|
||||||
@@ -945,7 +1135,7 @@ clean_project_artifacts() {
|
|||||||
fi
|
fi
|
||||||
if [[ -t 1 ]]; then
|
if [[ -t 1 ]]; then
|
||||||
stop_inline_spinner
|
stop_inline_spinner
|
||||||
echo -e "${GREEN}${ICON_SUCCESS}${NC} $project_name - $artifact_type ${GREEN}($size_human)${NC}"
|
echo -e "${GREEN}${ICON_SUCCESS}${NC} $project_path - $artifact_type ${GREEN}($size_human)${NC}"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
# Update count
|
# Update count
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ clean_empty_library_items() {
|
|||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# 1. Clean top-level empty directories in Library
|
# 1. Clean top-level empty directories and files in Library
|
||||||
local -a empty_dirs=()
|
local -a empty_dirs=()
|
||||||
while IFS= read -r -d '' dir; do
|
while IFS= read -r -d '' dir; do
|
||||||
[[ -d "$dir" ]] && empty_dirs+=("$dir")
|
[[ -d "$dir" ]] && empty_dirs+=("$dir")
|
||||||
@@ -32,6 +32,24 @@ clean_empty_library_items() {
|
|||||||
safe_clean "${empty_dirs[@]}" "Empty Library folders"
|
safe_clean "${empty_dirs[@]}" "Empty Library folders"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Clean empty files in Library root (skipping .localized and other sentinels)
|
||||||
|
local -a empty_files=()
|
||||||
|
while IFS= read -r -d '' file; do
|
||||||
|
[[ -f "$file" ]] || continue
|
||||||
|
# Protect .localized and potential system sentinels
|
||||||
|
if [[ "$(basename "$file")" == ".localized" ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
if is_path_whitelisted "$file"; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
empty_files+=("$file")
|
||||||
|
done < <(find "$HOME/Library" -mindepth 1 -maxdepth 1 -type f -empty -print0 2> /dev/null)
|
||||||
|
|
||||||
|
if [[ ${#empty_files[@]} -gt 0 ]]; then
|
||||||
|
safe_clean "${empty_files[@]}" "Empty Library files"
|
||||||
|
fi
|
||||||
|
|
||||||
# 2. Clean empty subdirectories in Application Support and other key locations
|
# 2. Clean empty subdirectories in Application Support and other key locations
|
||||||
# Iteratively remove empty directories until no more are found
|
# Iteratively remove empty directories until no more are found
|
||||||
local -a key_locations=(
|
local -a key_locations=(
|
||||||
|
|||||||
@@ -99,91 +99,119 @@ update_via_homebrew() {
|
|||||||
rm -f "$HOME/.cache/mole/version_check" "$HOME/.cache/mole/update_message" 2> /dev/null || true
|
rm -f "$HOME/.cache/mole/version_check" "$HOME/.cache/mole/update_message" 2> /dev/null || true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Get Homebrew cask name for an application bundle
|
||||||
|
get_brew_cask_name() {
|
||||||
|
local app_path="$1"
|
||||||
|
[[ -z "$app_path" || ! -d "$app_path" ]] && return 1
|
||||||
|
|
||||||
|
# Check if brew command exists
|
||||||
|
command -v brew > /dev/null 2>&1 || return 1
|
||||||
|
|
||||||
|
local app_bundle_name
|
||||||
|
app_bundle_name=$(basename "$app_path")
|
||||||
|
|
||||||
|
# 1. Search in Homebrew Caskroom for the app bundle (most reliable for name mismatches)
|
||||||
|
# Checks /opt/homebrew (Apple Silicon) and /usr/local (Intel)
|
||||||
|
# Note: Modern Homebrew uses symlinks in Caskroom, not directories
|
||||||
|
local cask_match
|
||||||
|
for room in "/opt/homebrew/Caskroom" "/usr/local/Caskroom"; do
|
||||||
|
[[ -d "$room" ]] || continue
|
||||||
|
# Path is room/token/version/App.app (can be directory or symlink)
|
||||||
|
cask_match=$(find "$room" -maxdepth 3 -name "$app_bundle_name" 2> /dev/null | head -1 || echo "")
|
||||||
|
if [[ -n "$cask_match" ]]; then
|
||||||
|
local relative="${cask_match#"$room"/}"
|
||||||
|
echo "${relative%%/*}"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# 2. Check for symlink from Caskroom
|
||||||
|
if [[ -L "$app_path" ]]; then
|
||||||
|
local target
|
||||||
|
target=$(readlink "$app_path")
|
||||||
|
for room in "/opt/homebrew/Caskroom" "/usr/local/Caskroom"; do
|
||||||
|
if [[ "$target" == "$room/"* ]]; then
|
||||||
|
local relative="${target#"$room"/}"
|
||||||
|
echo "${relative%%/*}"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 3. Fallback: Direct list check (handles some cases where app is moved)
|
||||||
|
local app_name_only="${app_bundle_name%.app}"
|
||||||
|
local cask_name
|
||||||
|
cask_name=$(brew list --cask 2> /dev/null | grep -Fx "$(echo "$app_name_only" | LC_ALL=C tr '[:upper:]' '[:lower:]')" || echo "")
|
||||||
|
if [[ -n "$cask_name" ]]; then
|
||||||
|
if brew info --cask "$cask_name" 2> /dev/null | grep -q "$app_path"; then
|
||||||
|
echo "$cask_name"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
# Remove applications from Dock
|
# Remove applications from Dock
|
||||||
remove_apps_from_dock() {
|
remove_apps_from_dock() {
|
||||||
if [[ $# -eq 0 ]]; then
|
if [[ $# -eq 0 ]]; then
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
local plist="$HOME/Library/Preferences/com.apple.dock.plist"
|
local -a targets=()
|
||||||
[[ -f "$plist" ]] || return 0
|
for arg in "$@"; do
|
||||||
|
[[ -n "$arg" ]] && targets+=("$arg")
|
||||||
|
done
|
||||||
|
|
||||||
if ! command -v python3 > /dev/null 2>&1; then
|
if [[ ${#targets[@]} -eq 0 ]]; then
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Prune dock entries using Python helper
|
# Use pure shell (PlistBuddy) to remove items from Dock
|
||||||
python3 - "$@" << 'PY' 2> /dev/null || return 0
|
# This avoids dependencies on Python 3 or osascript (AppleScript)
|
||||||
import os
|
local plist="$HOME/Library/Preferences/com.apple.dock.plist"
|
||||||
import plistlib
|
[[ -f "$plist" ]] || return 0
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import urllib.parse
|
|
||||||
|
|
||||||
plist_path = os.path.expanduser('~/Library/Preferences/com.apple.dock.plist')
|
command -v PlistBuddy > /dev/null 2>&1 || return 0
|
||||||
if not os.path.exists(plist_path):
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
def normalise(path):
|
local changed=false
|
||||||
if not path:
|
for target in "${targets[@]}"; do
|
||||||
return ''
|
local app_path="$target"
|
||||||
return os.path.normpath(os.path.realpath(path.rstrip('/')))
|
local app_name
|
||||||
|
app_name=$(basename "$app_path" .app)
|
||||||
|
|
||||||
targets = {normalise(arg) for arg in sys.argv[1:] if arg}
|
# Normalize path for comparison - realpath might fail if app is already deleted
|
||||||
targets = {t for t in targets if t}
|
local full_path
|
||||||
if not targets:
|
full_path=$(cd "$(dirname "$app_path")" 2> /dev/null && pwd || echo "")
|
||||||
sys.exit(0)
|
[[ -n "$full_path" ]] && full_path="$full_path/$(basename "$app_path")"
|
||||||
|
|
||||||
with open(plist_path, 'rb') as fh:
|
# Find the index of the app in persistent-apps
|
||||||
try:
|
local i=0
|
||||||
data = plistlib.load(fh)
|
while true; do
|
||||||
except Exception:
|
local label
|
||||||
sys.exit(0)
|
label=$(/usr/libexec/PlistBuddy -c "Print :persistent-apps:$i:tile-data:file-label" "$plist" 2> /dev/null || echo "")
|
||||||
|
[[ -z "$label" ]] && break
|
||||||
|
|
||||||
apps = data.get('persistent-apps')
|
local url
|
||||||
if not isinstance(apps, list):
|
url=$(/usr/libexec/PlistBuddy -c "Print :persistent-apps:$i:tile-data:file-data:_CFURLString" "$plist" 2> /dev/null || echo "")
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
changed = False
|
# Match by label or by path (parsing the CFURLString which is usually a file:// URL)
|
||||||
filtered = []
|
if [[ "$label" == "$app_name" ]] || [[ "$url" == *"$app_name.app"* ]]; then
|
||||||
for item in apps:
|
# Double check path if possible to avoid false positives for similarly named apps
|
||||||
try:
|
if [[ -n "$full_path" && "$url" == *"$full_path"* ]] || [[ "$label" == "$app_name" ]]; then
|
||||||
url = item['tile-data']['file-data']['_CFURLString']
|
if /usr/libexec/PlistBuddy -c "Delete :persistent-apps:$i" "$plist" 2> /dev/null; then
|
||||||
except (KeyError, TypeError):
|
changed=true
|
||||||
filtered.append(item)
|
# After deletion, current index i now points to the next item
|
||||||
continue
|
continue
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
((i++))
|
||||||
|
done
|
||||||
|
done
|
||||||
|
|
||||||
if not isinstance(url, str):
|
if [[ "$changed" == "true" ]]; then
|
||||||
filtered.append(item)
|
# Restart Dock to apply changes from the plist
|
||||||
continue
|
killall Dock 2> /dev/null || true
|
||||||
|
fi
|
||||||
parsed = urllib.parse.urlparse(url)
|
|
||||||
path = urllib.parse.unquote(parsed.path or '')
|
|
||||||
if not path:
|
|
||||||
filtered.append(item)
|
|
||||||
continue
|
|
||||||
|
|
||||||
candidate = normalise(path)
|
|
||||||
if any(candidate == t or candidate.startswith(t + os.sep) for t in targets):
|
|
||||||
changed = True
|
|
||||||
continue
|
|
||||||
|
|
||||||
filtered.append(item)
|
|
||||||
|
|
||||||
if not changed:
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
data['persistent-apps'] = filtered
|
|
||||||
with open(plist_path, 'wb') as fh:
|
|
||||||
try:
|
|
||||||
plistlib.dump(data, fh, fmt=plistlib.FMT_BINARY)
|
|
||||||
except Exception:
|
|
||||||
plistlib.dump(data, fh)
|
|
||||||
|
|
||||||
# Restart Dock to apply changes
|
|
||||||
try:
|
|
||||||
subprocess.run(['killall', 'Dock'], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=False)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
PY
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -632,10 +632,29 @@ paginated_multi_select() {
|
|||||||
prev_cursor_pos=$cursor_pos
|
prev_cursor_pos=$cursor_pos
|
||||||
continue # Skip full redraw
|
continue # Skip full redraw
|
||||||
elif [[ $top_index -gt 0 ]]; then
|
elif [[ $top_index -gt 0 ]]; then
|
||||||
|
# Scroll up - redraw visible items only
|
||||||
((top_index--))
|
((top_index--))
|
||||||
|
|
||||||
|
# Redraw all visible items (faster than full screen redraw)
|
||||||
|
local start_idx=$top_index
|
||||||
|
local end_idx=$((top_index + items_per_page - 1))
|
||||||
|
local visible_total=${#view_indices[@]}
|
||||||
|
[[ $end_idx -ge $visible_total ]] && end_idx=$((visible_total - 1))
|
||||||
|
|
||||||
|
for ((i = start_idx; i <= end_idx; i++)); do
|
||||||
|
local row=$((i - start_idx + 3)) # +3 for header
|
||||||
|
printf "\033[%d;1H" "$row" >&2
|
||||||
|
local is_current=false
|
||||||
|
[[ $((i - start_idx)) -eq $cursor_pos ]] && is_current=true
|
||||||
|
render_item $((i - start_idx)) $is_current
|
||||||
|
done
|
||||||
|
|
||||||
|
# Move cursor to footer
|
||||||
|
printf "\033[%d;1H" "$((items_per_page + 4))" >&2
|
||||||
|
|
||||||
prev_cursor_pos=$cursor_pos
|
prev_cursor_pos=$cursor_pos
|
||||||
prev_top_index=$top_index
|
prev_top_index=$top_index
|
||||||
need_full_redraw=true # Scrolling requires full redraw
|
continue
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
"DOWN")
|
"DOWN")
|
||||||
@@ -670,15 +689,34 @@ paginated_multi_select() {
|
|||||||
prev_cursor_pos=$cursor_pos
|
prev_cursor_pos=$cursor_pos
|
||||||
continue # Skip full redraw
|
continue # Skip full redraw
|
||||||
elif [[ $((top_index + visible_count)) -lt ${#view_indices[@]} ]]; then
|
elif [[ $((top_index + visible_count)) -lt ${#view_indices[@]} ]]; then
|
||||||
|
# Scroll down - redraw visible items only
|
||||||
((top_index++))
|
((top_index++))
|
||||||
visible_count=$((${#view_indices[@]} - top_index))
|
visible_count=$((${#view_indices[@]} - top_index))
|
||||||
[[ $visible_count -gt $items_per_page ]] && visible_count=$items_per_page
|
[[ $visible_count -gt $items_per_page ]] && visible_count=$items_per_page
|
||||||
if [[ $cursor_pos -ge $visible_count ]]; then
|
if [[ $cursor_pos -ge $visible_count ]]; then
|
||||||
cursor_pos=$((visible_count - 1))
|
cursor_pos=$((visible_count - 1))
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Redraw all visible items (faster than full screen redraw)
|
||||||
|
local start_idx=$top_index
|
||||||
|
local end_idx=$((top_index + items_per_page - 1))
|
||||||
|
local visible_total=${#view_indices[@]}
|
||||||
|
[[ $end_idx -ge $visible_total ]] && end_idx=$((visible_total - 1))
|
||||||
|
|
||||||
|
for ((i = start_idx; i <= end_idx; i++)); do
|
||||||
|
local row=$((i - start_idx + 3)) # +3 for header
|
||||||
|
printf "\033[%d;1H" "$row" >&2
|
||||||
|
local is_current=false
|
||||||
|
[[ $((i - start_idx)) -eq $cursor_pos ]] && is_current=true
|
||||||
|
render_item $((i - start_idx)) $is_current
|
||||||
|
done
|
||||||
|
|
||||||
|
# Move cursor to footer
|
||||||
|
printf "\033[%d;1H" "$((items_per_page + 4))" >&2
|
||||||
|
|
||||||
prev_cursor_pos=$cursor_pos
|
prev_cursor_pos=$cursor_pos
|
||||||
prev_top_index=$top_index
|
prev_top_index=$top_index
|
||||||
need_full_redraw=true # Scrolling requires full redraw
|
continue
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -146,7 +146,29 @@ batch_uninstall_applications() {
|
|||||||
running_apps+=("$app_name")
|
running_apps+=("$app_name")
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Sudo needed if bundle owner/dir is not writable or system files exist.
|
# Check if it's a Homebrew cask
|
||||||
|
local cask_name=""
|
||||||
|
cask_name=$(get_brew_cask_name "$app_path" || echo "")
|
||||||
|
local is_brew_cask="false"
|
||||||
|
[[ -n "$cask_name" ]] && is_brew_cask="true"
|
||||||
|
|
||||||
|
# For Homebrew casks, skip detailed file scanning since brew handles it
|
||||||
|
if [[ "$is_brew_cask" == "true" ]]; then
|
||||||
|
local app_size_kb=$(get_path_size_kb "$app_path")
|
||||||
|
local total_kb=$app_size_kb
|
||||||
|
((total_estimated_size += total_kb))
|
||||||
|
|
||||||
|
# Homebrew may need sudo for system-wide installations
|
||||||
|
local needs_sudo=false
|
||||||
|
if [[ "$app_path" == "/Applications/"* ]]; then
|
||||||
|
needs_sudo=true
|
||||||
|
sudo_apps+=("$app_name")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Store minimal details for Homebrew apps
|
||||||
|
app_details+=("$app_name|$app_path|$bundle_id|$total_kb|||false|$needs_sudo|$is_brew_cask|$cask_name")
|
||||||
|
else
|
||||||
|
# For non-Homebrew apps, do full file scanning
|
||||||
local needs_sudo=false
|
local needs_sudo=false
|
||||||
local app_owner=$(get_file_owner "$app_path")
|
local app_owner=$(get_file_owner "$app_path")
|
||||||
local current_user=$(whoami)
|
local current_user=$(whoami)
|
||||||
@@ -188,7 +210,8 @@ batch_uninstall_applications() {
|
|||||||
encoded_files=$(printf '%s' "$related_files" | base64 | tr -d '\n')
|
encoded_files=$(printf '%s' "$related_files" | base64 | tr -d '\n')
|
||||||
local encoded_system_files
|
local encoded_system_files
|
||||||
encoded_system_files=$(printf '%s' "$system_files" | base64 | tr -d '\n')
|
encoded_system_files=$(printf '%s' "$system_files" | base64 | tr -d '\n')
|
||||||
app_details+=("$app_name|$app_path|$bundle_id|$total_kb|$encoded_files|$encoded_system_files|$has_sensitive_data|$needs_sudo")
|
app_details+=("$app_name|$app_path|$bundle_id|$total_kb|$encoded_files|$encoded_system_files|$has_sensitive_data|$needs_sudo|$is_brew_cask|$cask_name")
|
||||||
|
fi
|
||||||
done
|
done
|
||||||
if [[ -t 1 ]]; then stop_inline_spinner; fi
|
if [[ -t 1 ]]; then stop_inline_spinner; fi
|
||||||
|
|
||||||
@@ -214,12 +237,19 @@ batch_uninstall_applications() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
for detail in "${app_details[@]}"; do
|
for detail in "${app_details[@]}"; do
|
||||||
IFS='|' read -r app_name app_path bundle_id total_kb encoded_files encoded_system_files has_sensitive_data needs_sudo_flag <<< "$detail"
|
IFS='|' read -r app_name app_path bundle_id total_kb encoded_files encoded_system_files has_sensitive_data needs_sudo_flag is_brew_cask cask_name <<< "$detail"
|
||||||
local related_files=$(decode_file_list "$encoded_files" "$app_name")
|
|
||||||
local system_files=$(decode_file_list "$encoded_system_files" "$app_name")
|
|
||||||
local app_size_display=$(bytes_to_human "$((total_kb * 1024))")
|
local app_size_display=$(bytes_to_human "$((total_kb * 1024))")
|
||||||
|
|
||||||
echo -e "${BLUE}${ICON_CONFIRM}${NC} ${app_name} ${GRAY}(${app_size_display})${NC}"
|
local brew_tag=""
|
||||||
|
[[ "$is_brew_cask" == "true" ]] && brew_tag=" ${CYAN}[Brew]${NC}"
|
||||||
|
echo -e "${BLUE}${ICON_CONFIRM}${NC} ${app_name}${brew_tag} ${GRAY}(${app_size_display})${NC}"
|
||||||
|
|
||||||
|
# For Homebrew apps, [Brew] tag is enough indication
|
||||||
|
# For non-Homebrew apps, show detailed file list
|
||||||
|
if [[ "$is_brew_cask" != "true" ]]; then
|
||||||
|
local related_files=$(decode_file_list "$encoded_files" "$app_name")
|
||||||
|
local system_files=$(decode_file_list "$encoded_system_files" "$app_name")
|
||||||
|
|
||||||
echo -e " ${GREEN}${ICON_SUCCESS}${NC} ${app_path/$HOME/~}"
|
echo -e " ${GREEN}${ICON_SUCCESS}${NC} ${app_path/$HOME/~}"
|
||||||
|
|
||||||
# Show related files (limit to 5).
|
# Show related files (limit to 5).
|
||||||
@@ -250,6 +280,7 @@ batch_uninstall_applications() {
|
|||||||
if [[ $total_hidden -gt 0 ]]; then
|
if [[ $total_hidden -gt 0 ]]; then
|
||||||
echo -e " ${GRAY} ... and ${total_hidden} more files${NC}"
|
echo -e " ${GRAY} ... and ${total_hidden} more files${NC}"
|
||||||
fi
|
fi
|
||||||
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
# Confirmation before requesting sudo.
|
# Confirmation before requesting sudo.
|
||||||
@@ -275,7 +306,7 @@ batch_uninstall_applications() {
|
|||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
"" | $'\n' | $'\r' | y | Y)
|
"" | $'\n' | $'\r' | y | Y)
|
||||||
printf "\r\033[K" # Clear the prompt line
|
echo "" # Move to next line
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
echo ""
|
echo ""
|
||||||
@@ -305,19 +336,29 @@ batch_uninstall_applications() {
|
|||||||
sudo_keepalive_pid=$!
|
sudo_keepalive_pid=$!
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -t 1 ]]; then start_inline_spinner "Uninstalling apps..."; fi
|
# Perform uninstallations with per-app progress feedback
|
||||||
|
|
||||||
# Perform uninstallations (silent mode, show results at end).
|
|
||||||
if [[ -t 1 ]]; then stop_inline_spinner; fi
|
|
||||||
local success_count=0 failed_count=0
|
local success_count=0 failed_count=0
|
||||||
local -a failed_items=()
|
local -a failed_items=()
|
||||||
local -a success_items=()
|
local -a success_items=()
|
||||||
|
local current_index=0
|
||||||
for detail in "${app_details[@]}"; do
|
for detail in "${app_details[@]}"; do
|
||||||
IFS='|' read -r app_name app_path bundle_id total_kb encoded_files encoded_system_files has_sensitive_data needs_sudo <<< "$detail"
|
((current_index++))
|
||||||
|
IFS='|' read -r app_name app_path bundle_id total_kb encoded_files encoded_system_files has_sensitive_data needs_sudo is_brew_cask cask_name <<< "$detail"
|
||||||
local related_files=$(decode_file_list "$encoded_files" "$app_name")
|
local related_files=$(decode_file_list "$encoded_files" "$app_name")
|
||||||
local system_files=$(decode_file_list "$encoded_system_files" "$app_name")
|
local system_files=$(decode_file_list "$encoded_system_files" "$app_name")
|
||||||
local reason=""
|
local reason=""
|
||||||
|
|
||||||
|
# Show progress for current app
|
||||||
|
local brew_tag=""
|
||||||
|
[[ "$is_brew_cask" == "true" ]] && brew_tag=" ${CYAN}[Brew]${NC}"
|
||||||
|
if [[ -t 1 ]]; then
|
||||||
|
if [[ ${#app_details[@]} -gt 1 ]]; then
|
||||||
|
start_inline_spinner "[$current_index/${#app_details[@]}] Uninstalling ${app_name}${brew_tag}..."
|
||||||
|
else
|
||||||
|
start_inline_spinner "Uninstalling ${app_name}${brew_tag}..."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
# Stop Launch Agents/Daemons before removal.
|
# Stop Launch Agents/Daemons before removal.
|
||||||
local has_system_files="false"
|
local has_system_files="false"
|
||||||
[[ -n "$system_files" ]] && has_system_files="true"
|
[[ -n "$system_files" ]] && has_system_files="true"
|
||||||
@@ -329,7 +370,19 @@ batch_uninstall_applications() {
|
|||||||
|
|
||||||
# Remove the application only if not running.
|
# Remove the application only if not running.
|
||||||
if [[ -z "$reason" ]]; then
|
if [[ -z "$reason" ]]; then
|
||||||
|
if [[ "$is_brew_cask" == "true" && -n "$cask_name" ]]; then
|
||||||
|
# Use brew uninstall --cask with progress indicator
|
||||||
|
local brew_output_file=$(mktemp)
|
||||||
|
if ! run_with_timeout 120 brew uninstall --cask "$cask_name" > "$brew_output_file" 2>&1; then
|
||||||
|
# Fallback to manual removal if brew fails
|
||||||
if [[ "$needs_sudo" == true ]]; then
|
if [[ "$needs_sudo" == true ]]; then
|
||||||
|
safe_sudo_remove "$app_path" || reason="remove failed"
|
||||||
|
else
|
||||||
|
safe_remove "$app_path" true || reason="remove failed"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
rm -f "$brew_output_file"
|
||||||
|
elif [[ "$needs_sudo" == true ]]; then
|
||||||
if ! safe_sudo_remove "$app_path"; then
|
if ! safe_sudo_remove "$app_path"; then
|
||||||
local app_owner=$(get_file_owner "$app_path")
|
local app_owner=$(get_file_owner "$app_path")
|
||||||
local current_user=$(whoami)
|
local current_user=$(whoami)
|
||||||
@@ -361,12 +414,32 @@ batch_uninstall_applications() {
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Stop spinner and show success
|
||||||
|
if [[ -t 1 ]]; then
|
||||||
|
stop_inline_spinner
|
||||||
|
if [[ ${#app_details[@]} -gt 1 ]]; then
|
||||||
|
echo -e "\r\033[K${GREEN}✓${NC} [$current_index/${#app_details[@]}] ${app_name}"
|
||||||
|
else
|
||||||
|
echo -e "\r\033[K${GREEN}✓${NC} ${app_name}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
((total_size_freed += total_kb))
|
((total_size_freed += total_kb))
|
||||||
((success_count++))
|
((success_count++))
|
||||||
((files_cleaned++))
|
((files_cleaned++))
|
||||||
((total_items++))
|
((total_items++))
|
||||||
success_items+=("$app_name")
|
success_items+=("$app_name")
|
||||||
else
|
else
|
||||||
|
# Stop spinner and show failure
|
||||||
|
if [[ -t 1 ]]; then
|
||||||
|
stop_inline_spinner
|
||||||
|
if [[ ${#app_details[@]} -gt 1 ]]; then
|
||||||
|
echo -e "\r\033[K${RED}✗${NC} [$current_index/${#app_details[@]}] ${app_name} ${GRAY}($reason)${NC}"
|
||||||
|
else
|
||||||
|
echo -e "\r\033[K${RED}✗${NC} ${app_name} failed: $reason"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
((failed_count++))
|
((failed_count++))
|
||||||
failed_items+=("$app_name:$reason")
|
failed_items+=("$app_name:$reason")
|
||||||
fi
|
fi
|
||||||
@@ -454,6 +527,7 @@ batch_uninstall_applications() {
|
|||||||
title="Uninstall incomplete"
|
title="Uninstall incomplete"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
print_summary_block "$title" "${summary_details[@]}"
|
print_summary_block "$title" "${summary_details[@]}"
|
||||||
printf '\n'
|
printf '\n'
|
||||||
|
|
||||||
|
|||||||
122
tests/brew_uninstall.bats
Normal file
122
tests/brew_uninstall.bats
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
|
setup_file() {
|
||||||
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
|
export PROJECT_ROOT
|
||||||
|
|
||||||
|
ORIGINAL_HOME="${HOME:-}"
|
||||||
|
export ORIGINAL_HOME
|
||||||
|
|
||||||
|
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-brew-uninstall-home.XXXXXX")"
|
||||||
|
export HOME
|
||||||
|
}
|
||||||
|
|
||||||
|
teardown_file() {
|
||||||
|
rm -rf "$HOME"
|
||||||
|
export HOME="$ORIGINAL_HOME"
|
||||||
|
}
|
||||||
|
|
||||||
|
setup() {
|
||||||
|
mkdir -p "$HOME/Applications"
|
||||||
|
mkdir -p "$HOME/Library/Caches"
|
||||||
|
# Create fake Caskroom
|
||||||
|
mkdir -p "$HOME/Caskroom/test-app/1.2.3/TestApp.app"
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "get_brew_cask_name detects app in Caskroom (simulated)" {
|
||||||
|
# Create fake Caskroom structure with symlink (modern Homebrew style)
|
||||||
|
mkdir -p "$HOME/Caskroom/test-app/1.0.0"
|
||||||
|
mkdir -p "$HOME/Applications/TestApp.app"
|
||||||
|
ln -s "$HOME/Applications/TestApp.app" "$HOME/Caskroom/test-app/1.0.0/TestApp.app"
|
||||||
|
|
||||||
|
run bash <<EOF
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
|
||||||
|
# Override the function to use our test Caskroom
|
||||||
|
get_brew_cask_name() {
|
||||||
|
local app_path="\$1"
|
||||||
|
[[ -z "\$app_path" || ! -d "\$app_path" ]] && return 1
|
||||||
|
command -v brew > /dev/null 2>&1 || return 1
|
||||||
|
|
||||||
|
local app_bundle_name=\$(basename "\$app_path")
|
||||||
|
local cask_match
|
||||||
|
# Use test Caskroom
|
||||||
|
cask_match=\$(find "$HOME/Caskroom" -maxdepth 3 -name "\$app_bundle_name" 2> /dev/null | head -1 || echo "")
|
||||||
|
if [[ -n "\$cask_match" ]]; then
|
||||||
|
local relative="\${cask_match#$HOME/Caskroom/}"
|
||||||
|
echo "\${relative%%/*}"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
get_brew_cask_name "$HOME/Applications/TestApp.app"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == "test-app" ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "get_brew_cask_name handles non-brew apps" {
|
||||||
|
mkdir -p "$HOME/Applications/ManualApp.app"
|
||||||
|
|
||||||
|
result=$(bash <<EOF
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
# Mock brew to return nothing for this
|
||||||
|
brew() { return 1; }
|
||||||
|
export -f brew
|
||||||
|
get_brew_cask_name "$HOME/Applications/ManualApp.app" || echo "not_found"
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
|
||||||
|
[[ "$result" == "not_found" ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "batch_uninstall_applications uses brew uninstall for casks (mocked)" {
|
||||||
|
# Setup fake app
|
||||||
|
local app_bundle="$HOME/Applications/BrewApp.app"
|
||||||
|
mkdir -p "$app_bundle"
|
||||||
|
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/uninstall/batch.sh"
|
||||||
|
|
||||||
|
# Mock dependencies
|
||||||
|
request_sudo_access() { return 0; }
|
||||||
|
start_inline_spinner() { :; }
|
||||||
|
stop_inline_spinner() { :; }
|
||||||
|
get_file_owner() { whoami; }
|
||||||
|
get_path_size_kb() { echo "100"; }
|
||||||
|
bytes_to_human() { echo "$1"; }
|
||||||
|
drain_pending_input() { :; }
|
||||||
|
print_summary_block() { :; }
|
||||||
|
remove_apps_from_dock() { :; }
|
||||||
|
force_kill_app() { return 0; }
|
||||||
|
run_with_timeout() { shift; "$@"; }
|
||||||
|
export -f run_with_timeout
|
||||||
|
|
||||||
|
# Mock brew to track calls
|
||||||
|
brew() {
|
||||||
|
echo "brew call: $*" >> "$HOME/brew_calls.log"
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
export -f brew
|
||||||
|
|
||||||
|
# Mock get_brew_cask_name to return a name
|
||||||
|
get_brew_cask_name() { echo "brew-app-cask"; return 0; }
|
||||||
|
export -f get_brew_cask_name
|
||||||
|
|
||||||
|
selected_apps=("0|$HOME/Applications/BrewApp.app|BrewApp|com.example.brewapp|0|Never")
|
||||||
|
files_cleaned=0
|
||||||
|
total_items=0
|
||||||
|
total_size_cleaned=0
|
||||||
|
|
||||||
|
# Simulate 'Enter' for confirmation
|
||||||
|
printf '\n' | batch_uninstall_applications > /dev/null 2>&1
|
||||||
|
|
||||||
|
grep -q "uninstall --cask brew-app-cask" "$HOME/brew_calls.log"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
}
|
||||||
@@ -83,6 +83,8 @@ clean_project_caches() { :; }
|
|||||||
clean_dev_python() { :; }
|
clean_dev_python() { :; }
|
||||||
clean_dev_go() { :; }
|
clean_dev_go() { :; }
|
||||||
clean_dev_rust() { :; }
|
clean_dev_rust() { :; }
|
||||||
|
check_rust_toolchains() { :; }
|
||||||
|
check_android_ndk() { :; }
|
||||||
clean_dev_docker() { :; }
|
clean_dev_docker() { :; }
|
||||||
clean_dev_cloud() { :; }
|
clean_dev_cloud() { :; }
|
||||||
clean_dev_nix() { :; }
|
clean_dev_nix() { :; }
|
||||||
@@ -96,6 +98,10 @@ clean_dev_database() { :; }
|
|||||||
clean_dev_api_tools() { :; }
|
clean_dev_api_tools() { :; }
|
||||||
clean_dev_network() { :; }
|
clean_dev_network() { :; }
|
||||||
clean_dev_misc() { :; }
|
clean_dev_misc() { :; }
|
||||||
|
clean_dev_elixir() { :; }
|
||||||
|
clean_dev_haskell() { :; }
|
||||||
|
clean_dev_ocaml() { :; }
|
||||||
|
clean_dev_editors() { :; }
|
||||||
safe_clean() { :; }
|
safe_clean() { :; }
|
||||||
debug_log() { :; }
|
debug_log() { :; }
|
||||||
clean_developer_tools
|
clean_developer_tools
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ teardown_file() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "clean_dev_elixir cleans mix and hex caches" {
|
@test "clean_dev_elixir cleans hex cache" {
|
||||||
mkdir -p "$HOME/.mix" "$HOME/.hex"
|
mkdir -p "$HOME/.mix" "$HOME/.hex"
|
||||||
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
@@ -31,11 +31,25 @@ clean_dev_elixir
|
|||||||
EOF
|
EOF
|
||||||
|
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
|
|
||||||
[[ "$output" == *"Hex cache"* ]]
|
[[ "$output" == *"Hex cache"* ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "clean_dev_haskell cleans cabal install and stack caches" {
|
@test "clean_dev_elixir does not clean mix archives" {
|
||||||
|
mkdir -p "$HOME/.mix/archives"
|
||||||
|
touch "$HOME/.mix/archives/test_tool.ez"
|
||||||
|
|
||||||
|
# Source and run the function
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/dev.sh"
|
||||||
|
# shellcheck disable=SC2329
|
||||||
|
safe_clean() { :; }
|
||||||
|
clean_dev_elixir > /dev/null 2>&1 || true
|
||||||
|
|
||||||
|
# Verify the file still exists
|
||||||
|
[ -f "$HOME/.mix/archives/test_tool.ez" ]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_dev_haskell cleans cabal install cache" {
|
||||||
mkdir -p "$HOME/.cabal" "$HOME/.stack"
|
mkdir -p "$HOME/.cabal" "$HOME/.stack"
|
||||||
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
@@ -47,7 +61,21 @@ EOF
|
|||||||
|
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
[[ "$output" == *"Cabal install cache"* ]]
|
[[ "$output" == *"Cabal install cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_dev_haskell does not clean stack programs" {
|
||||||
|
mkdir -p "$HOME/.stack/programs/x86_64-osx"
|
||||||
|
touch "$HOME/.stack/programs/x86_64-osx/ghc-9.2.8.tar.xz"
|
||||||
|
|
||||||
|
# Source and run the function
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/dev.sh"
|
||||||
|
# shellcheck disable=SC2329
|
||||||
|
safe_clean() { :; }
|
||||||
|
clean_dev_haskell > /dev/null 2>&1 || true
|
||||||
|
|
||||||
|
# Verify the file still exists
|
||||||
|
[ -f "$HOME/.stack/programs/x86_64-osx/ghc-9.2.8.tar.xz" ]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "clean_dev_ocaml cleans opam cache" {
|
@test "clean_dev_ocaml cleans opam cache" {
|
||||||
@@ -76,6 +104,48 @@ EOF
|
|||||||
|
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
[[ "$output" == *"VS Code cached data"* ]]
|
[[ "$output" == *"VS Code cached data"* ]]
|
||||||
|
|
||||||
[[ "$output" == *"Zed cache"* ]]
|
[[ "$output" == *"Zed cache"* ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@test "clean_dev_editors does not clean VS Code workspace storage" {
|
||||||
|
mkdir -p "$HOME/Library/Application Support/Code/User/workspaceStorage/abc123"
|
||||||
|
touch "$HOME/Library/Application Support/Code/User/workspaceStorage/abc123/workspace.json"
|
||||||
|
|
||||||
|
# Source and run the function
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/dev.sh"
|
||||||
|
# shellcheck disable=SC2329
|
||||||
|
safe_clean() { :; }
|
||||||
|
clean_dev_editors > /dev/null 2>&1 || true
|
||||||
|
|
||||||
|
# Verify the file still exists
|
||||||
|
[ -f "$HOME/Library/Application Support/Code/User/workspaceStorage/abc123/workspace.json" ]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "check_android_ndk reports multiple NDK versions" {
|
||||||
|
run bash -c 'HOME=$(mktemp -d) && mkdir -p "$HOME/Library/Android/sdk/ndk"/{21.0.1,22.0.0,20.0.0} && source "$0" && note_activity() { :; } && NC="" && GREEN="" && GRAY="" && check_android_ndk' "$PROJECT_ROOT/lib/clean/dev.sh"
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Found 3 Android NDK versions"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "check_android_ndk silent when only one NDK" {
|
||||||
|
run bash -c 'HOME=$(mktemp -d) && mkdir -p "$HOME/Library/Android/sdk/ndk/22.0.0" && source "$0" && note_activity() { :; } && NC="" && GREEN="" && GRAY="" && check_android_ndk' "$PROJECT_ROOT/lib/clean/dev.sh"
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" != *"Found"*"NDK"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "check_rust_toolchains reports multiple toolchains" {
|
||||||
|
run bash -c 'HOME=$(mktemp -d) && mkdir -p "$HOME/.rustup/toolchains"/{stable,nightly,1.75.0}-aarch64-apple-darwin && source "$0" && note_activity() { :; } && NC="" && GREEN="" && GRAY="" && rustup() { :; } && export -f rustup && check_rust_toolchains' "$PROJECT_ROOT/lib/clean/dev.sh"
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Found 3 Rust toolchains"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "check_rust_toolchains silent when only one toolchain" {
|
||||||
|
run bash -c 'HOME=$(mktemp -d) && mkdir -p "$HOME/.rustup/toolchains/stable-aarch64-apple-darwin" && source "$0" && note_activity() { :; } && NC="" && GREEN="" && GRAY="" && rustup() { :; } && export -f rustup && check_rust_toolchains' "$PROJECT_ROOT/lib/clean/dev.sh"
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" != *"Found"*"Rust"* ]]
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user