1
0
mirror of https://github.com/tw93/Mole.git synced 2026-02-04 09:46:44 +00:00

Code support format detection

This commit is contained in:
Tw93
2025-10-12 20:49:10 +08:00
parent d111f18cea
commit cf821cdc4b
25 changed files with 1482 additions and 1152 deletions

25
.editorconfig Normal file
View File

@@ -0,0 +1,25 @@
# EditorConfig for Mole project
# https://editorconfig.org
root = true
[*]
charset = utf-8
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
[*.{sh,bash}]
indent_style = space
indent_size = 4
# shfmt will use these settings automatically
[*.{yml,yaml}]
indent_style = space
indent_size = 2
[*.md]
trim_trailing_whitespace = false
[Makefile]
indent_style = tab

View File

@@ -1,4 +1,4 @@
name: Mole Tests name: Tests
on: on:
push: push:
@@ -13,10 +13,17 @@ jobs:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Install bats-core - name: Install tools
run: | run: brew install bats-core shfmt shellcheck
brew update
brew install bats-core
- name: Run test suite - name: Check formatting
run: ./scripts/format.sh --check
- name: Run shellcheck
run: |
find . -type f \( -name "*.sh" -o -name "mole" \) \
! -path "./.git/*" \
-exec shellcheck -S warning {} +
- name: Run tests
run: tests/run.sh run: tests/run.sh

54
CONTRIBUTING.md Normal file
View File

@@ -0,0 +1,54 @@
# Contributing to Mole
## Setup
```bash
# Install tools
brew install shfmt shellcheck bats-core
# Install git hooks (optional)
./scripts/install-hooks.sh
```
## Development
```bash
# Format code
./scripts/format.sh
# Run tests
./tests/run.sh
# Check quality
shellcheck -S warning mole bin/*.sh lib/*.sh
```
## Git Hooks
Pre-commit hook will auto-format your code. Install with:
```bash
./scripts/install-hooks.sh
```
Skip if needed: `git commit --no-verify`
## Code Style
- Bash 3.2+ compatible
- 4 spaces indent
- Use `set -euo pipefail`
- Quote all variables
- BSD commands not GNU
Config: `.editorconfig` and `.shellcheckrc`
## Pull Requests
1. Fork and create branch
2. Make changes
3. Format: `./scripts/format.sh`
4. Test: `./tests/run.sh`
5. Commit and push
6. Open PR
CI will check formatting, lint, and run tests.

View File

@@ -19,9 +19,9 @@ source "$LIB_DIR/common.sh"
# Constants # Constants
readonly CACHE_DIR="${HOME}/.config/mole/cache" readonly CACHE_DIR="${HOME}/.config/mole/cache"
readonly TEMP_PREFIX="/tmp/mole_analyze_$$" readonly TEMP_PREFIX="/tmp/mole_analyze_$$"
readonly MIN_LARGE_FILE_SIZE="1000000000" # 1GB readonly MIN_LARGE_FILE_SIZE="1000000000" # 1GB
readonly MIN_MEDIUM_FILE_SIZE="100000000" # 100MB readonly MIN_MEDIUM_FILE_SIZE="100000000" # 100MB
readonly MIN_SMALL_FILE_SIZE="10000000" # 10MB readonly MIN_SMALL_FILE_SIZE="10000000" # 10MB
# Emoji badges for list displays only # Emoji badges for list displays only
readonly BADGE_DIR="🍞" readonly BADGE_DIR="🍞"
@@ -42,16 +42,16 @@ declare CURRENT_DEPTH=1
# UI State # UI State
declare CURSOR_POS=0 declare CURSOR_POS=0
declare SORT_MODE="size" # size, name, time declare SORT_MODE="size" # size, name, time
declare VIEW_MODE="overview" # overview, detail, files declare VIEW_MODE="overview" # overview, detail, files
# Cleanup on exit # Cleanup on exit
cleanup() { cleanup() {
show_cursor show_cursor
# Cleanup temp files using glob pattern (analyze uses many temp files) # Cleanup temp files using glob pattern (analyze uses many temp files)
rm -f "$TEMP_PREFIX"* 2>/dev/null || true rm -f "$TEMP_PREFIX"* 2> /dev/null || true
if [[ -n "$SCAN_PID" ]] && kill -0 "$SCAN_PID" 2>/dev/null; then if [[ -n "$SCAN_PID" ]] && kill -0 "$SCAN_PID" 2> /dev/null; then
kill "$SCAN_PID" 2>/dev/null || true kill "$SCAN_PID" 2> /dev/null || true
fi fi
} }
@@ -66,7 +66,7 @@ scan_large_files() {
local target_path="$1" local target_path="$1"
local output_file="$2" local output_file="$2"
if ! command -v mdfind &>/dev/null; then if ! command -v mdfind &> /dev/null; then
return 1 return 1
fi fi
@@ -75,10 +75,10 @@ scan_large_files() {
while IFS= read -r file; do while IFS= read -r file; do
if [[ -f "$file" ]]; then if [[ -f "$file" ]]; then
local size local size
size=$(stat -f%z "$file" 2>/dev/null || echo "0") size=$(stat -f%z "$file" 2> /dev/null || echo "0")
echo "$size|$file" echo "$size|$file"
fi fi
done < <(mdfind -onlyin "$target_path" "kMDItemFSSize > $MIN_LARGE_FILE_SIZE" 2>/dev/null) | \ done < <(mdfind -onlyin "$target_path" "kMDItemFSSize > $MIN_LARGE_FILE_SIZE" 2> /dev/null) |
sort -t'|' -k1 -rn > "$output_file" sort -t'|' -k1 -rn > "$output_file"
} }
@@ -87,7 +87,7 @@ scan_medium_files() {
local target_path="$1" local target_path="$1"
local output_file="$2" local output_file="$2"
if ! command -v mdfind &>/dev/null; then if ! command -v mdfind &> /dev/null; then
return 1 return 1
fi fi
@@ -95,11 +95,11 @@ scan_medium_files() {
while IFS= read -r file; do while IFS= read -r file; do
if [[ -f "$file" ]]; then if [[ -f "$file" ]]; then
local size local size
size=$(stat -f%z "$file" 2>/dev/null || echo "0") size=$(stat -f%z "$file" 2> /dev/null || echo "0")
echo "$size|$file" echo "$size|$file"
fi fi
done < <(mdfind -onlyin "$target_path" \ done < <(mdfind -onlyin "$target_path" \
"kMDItemFSSize > $MIN_MEDIUM_FILE_SIZE && kMDItemFSSize < $MIN_LARGE_FILE_SIZE" 2>/dev/null) | \ "kMDItemFSSize > $MIN_MEDIUM_FILE_SIZE && kMDItemFSSize < $MIN_LARGE_FILE_SIZE" 2> /dev/null) |
sort -t'|' -k1 -rn > "$output_file" sort -t'|' -k1 -rn > "$output_file"
} }
@@ -110,18 +110,18 @@ scan_directories() {
local depth="${3:-1}" local depth="${3:-1}"
# Check if we can use parallel processing # Check if we can use parallel processing
if command -v xargs &>/dev/null && [[ $depth -eq 1 ]]; then if command -v xargs &> /dev/null && [[ $depth -eq 1 ]]; then
# Fast parallel scan for depth 1 # Fast parallel scan for depth 1
find "$target_path" -mindepth 1 -maxdepth 1 -type d -print0 2>/dev/null | \ find "$target_path" -mindepth 1 -maxdepth 1 -type d -print0 2> /dev/null |
xargs -0 -P 4 -I {} du -sk {} 2>/dev/null | \ xargs -0 -P 4 -I {} du -sk {} 2> /dev/null |
sort -rn | \ sort -rn |
while IFS=$'\t' read -r size path; do while IFS=$'\t' read -r size path; do
echo "$((size * 1024))|$path" echo "$((size * 1024))|$path"
done > "$output_file" done > "$output_file"
else else
# Standard du scan # Standard du scan
du -d "$depth" -k "$target_path" 2>/dev/null | \ du -d "$depth" -k "$target_path" 2> /dev/null |
sort -rn | \ sort -rn |
while IFS=$'\t' read -r size path; do while IFS=$'\t' read -r size path; do
# Skip if path is the target itself at depth > 0 # Skip if path is the target itself at depth > 0
if [[ "$path" != "$target_path" ]]; then if [[ "$path" != "$target_path" ]]; then
@@ -161,21 +161,21 @@ aggregate_by_directory() {
get_cache_file() { get_cache_file() {
local target_path="$1" local target_path="$1"
local path_hash local path_hash
path_hash=$(echo "$target_path" | md5 2>/dev/null || echo "$target_path" | shasum | cut -d' ' -f1) path_hash=$(echo "$target_path" | md5 2> /dev/null || echo "$target_path" | shasum | cut -d' ' -f1)
echo "$CACHE_DIR/scan_${path_hash}.cache" echo "$CACHE_DIR/scan_${path_hash}.cache"
} }
# Check if cache is valid (less than 1 hour old) # Check if cache is valid (less than 1 hour old)
is_cache_valid() { is_cache_valid() {
local cache_file="$1" local cache_file="$1"
local max_age="${2:-3600}" # Default 1 hour local max_age="${2:-3600}" # Default 1 hour
if [[ ! -f "$cache_file" ]]; then if [[ ! -f "$cache_file" ]]; then
return 1 return 1
fi fi
local cache_age local cache_age
cache_age=$(($(date +%s) - $(stat -f%m "$cache_file" 2>/dev/null || echo 0))) cache_age=$(($(date +%s) - $(stat -f%m "$cache_file" 2> /dev/null || echo 0)))
if [[ $cache_age -lt $max_age ]]; then if [[ $cache_age -lt $max_age ]]; then
return 0 return 0
fi fi
@@ -192,7 +192,7 @@ save_to_cache() {
local temp_agg="$TEMP_PREFIX.agg" local temp_agg="$TEMP_PREFIX.agg"
# Create cache directory # Create cache directory
mkdir -p "$(dirname "$cache_file")" 2>/dev/null || return 1 mkdir -p "$(dirname "$cache_file")" 2> /dev/null || return 1
# Bundle all scan results into cache file # Bundle all scan results into cache file
{ {
@@ -204,7 +204,7 @@ save_to_cache() {
[[ -f "$temp_dirs" ]] && cat "$temp_dirs" [[ -f "$temp_dirs" ]] && cat "$temp_dirs"
echo "### AGG ###" echo "### AGG ###"
[[ -f "$temp_agg" ]] && cat "$temp_agg" [[ -f "$temp_agg" ]] && cat "$temp_agg"
} > "$cache_file" 2>/dev/null } > "$cache_file" 2> /dev/null
} }
# Load scan results from cache # Load scan results from cache
@@ -283,7 +283,7 @@ perform_scan() {
) )
local msg_idx=0 local msg_idx=0
while kill -0 "$SCAN_PID" 2>/dev/null; do while kill -0 "$SCAN_PID" 2> /dev/null; do
# Show different messages based on elapsed time # Show different messages based on elapsed time
local current_msg="" local current_msg=""
if [[ $elapsed -lt 5 ]]; then if [[ $elapsed -lt 5 ]]; then
@@ -299,12 +299,12 @@ perform_scan() {
printf "\r${BLUE}%s${NC} %s" \ printf "\r${BLUE}%s${NC} %s" \
"${spinner_chars:$i:1}" "$current_msg" "${spinner_chars:$i:1}" "$current_msg"
i=$(( (i + 1) % 10 )) i=$(((i + 1) % 10))
((elapsed++)) ((elapsed++))
sleep 0.1 sleep 0.1
done done
wait "$SCAN_PID" 2>/dev/null || true wait "$SCAN_PID" 2> /dev/null || true
printf "\r%80s\r" "" # Clear spinner line printf "\r%80s\r" "" # Clear spinner line
show_cursor show_cursor
# Aggregate results # Aggregate results
@@ -508,7 +508,7 @@ display_directories_compact() {
# Simple bar (10 chars) # Simple bar (10 chars)
local bar_width=10 local bar_width=10
local percentage_int=${percentage%.*} # Remove decimal part local percentage_int=${percentage%.*} # Remove decimal part
local filled local filled
filled=$((percentage_int * bar_width / 100)) filled=$((percentage_int * bar_width / 100))
[[ $filled -gt $bar_width ]] && filled=$bar_width [[ $filled -gt $bar_width ]] && filled=$bar_width
@@ -622,8 +622,8 @@ display_cleanup_suggestions_compact() {
if [[ "$CURRENT_PATH" == "$HOME/Library/Caches"* ]] || [[ "$CURRENT_PATH" == "$HOME/Library"* ]]; then if [[ "$CURRENT_PATH" == "$HOME/Library/Caches"* ]] || [[ "$CURRENT_PATH" == "$HOME/Library"* ]]; then
if [[ -d "$HOME/Library/Caches" ]]; then if [[ -d "$HOME/Library/Caches" ]]; then
local cache_size local cache_size
cache_size=$(du -sk "$HOME/Library/Caches" 2>/dev/null | cut -f1) cache_size=$(du -sk "$HOME/Library/Caches" 2> /dev/null | cut -f1)
if [[ $cache_size -gt 1048576 ]]; then # > 1GB if [[ $cache_size -gt 1048576 ]]; then # > 1GB
local human local human
human=$(bytes_to_human $((cache_size * 1024))) human=$(bytes_to_human $((cache_size * 1024)))
top_suggestion="Clear app caches ($human)" top_suggestion="Clear app caches ($human)"
@@ -637,7 +637,7 @@ display_cleanup_suggestions_compact() {
# Check Downloads folder (only if analyzing Downloads) # Check Downloads folder (only if analyzing Downloads)
if [[ "$CURRENT_PATH" == "$HOME/Downloads"* ]]; then if [[ "$CURRENT_PATH" == "$HOME/Downloads"* ]]; then
local old_files local old_files
old_files=$(find "$CURRENT_PATH" -type f -mtime +90 2>/dev/null | wc -l | tr -d ' ') old_files=$(find "$CURRENT_PATH" -type f -mtime +90 2> /dev/null | wc -l | tr -d ' ')
if [[ $old_files -gt 0 ]]; then if [[ $old_files -gt 0 ]]; then
[[ -z "$top_suggestion" ]] && top_suggestion="$old_files files older than 90 days found" [[ -z "$top_suggestion" ]] && top_suggestion="$old_files files older than 90 days found"
[[ -z "$action_command" ]] && action_command="manually review old files" [[ -z "$action_command" ]] && action_command="manually review old files"
@@ -646,13 +646,13 @@ display_cleanup_suggestions_compact() {
fi fi
# Check for large disk images in current path # Check for large disk images in current path
if command -v mdfind &>/dev/null; then if command -v mdfind &> /dev/null; then
local dmg_count=$(mdfind -onlyin "$CURRENT_PATH" \ local dmg_count=$(mdfind -onlyin "$CURRENT_PATH" \
"kMDItemFSSize > 500000000 && kMDItemDisplayName == '*.dmg'" 2>/dev/null | wc -l | tr -d ' ') "kMDItemFSSize > 500000000 && kMDItemDisplayName == '*.dmg'" 2> /dev/null | wc -l | tr -d ' ')
if [[ $dmg_count -gt 0 ]]; then if [[ $dmg_count -gt 0 ]]; then
local dmg_size=$(mdfind -onlyin "$CURRENT_PATH" \ local dmg_size=$(mdfind -onlyin "$CURRENT_PATH" \
"kMDItemFSSize > 500000000 && kMDItemDisplayName == '*.dmg'" 2>/dev/null | \ "kMDItemFSSize > 500000000 && kMDItemDisplayName == '*.dmg'" 2> /dev/null |
xargs stat -f%z 2>/dev/null | awk '{sum+=$1} END {print sum}') xargs stat -f%z 2> /dev/null | awk '{sum+=$1} END {print sum}')
local dmg_human local dmg_human
dmg_human=$(bytes_to_human "$dmg_size") dmg_human=$(bytes_to_human "$dmg_size")
[[ -z "$top_suggestion" ]] && top_suggestion="$dmg_count DMG files ($dmg_human) can be removed" [[ -z "$top_suggestion" ]] && top_suggestion="$dmg_count DMG files ($dmg_human) can be removed"
@@ -665,7 +665,7 @@ display_cleanup_suggestions_compact() {
# Check Xcode (only if in developer paths) # Check Xcode (only if in developer paths)
if [[ "$CURRENT_PATH" == "$HOME/Library/Developer"* ]] && [[ -d "$HOME/Library/Developer/Xcode/DerivedData" ]]; then if [[ "$CURRENT_PATH" == "$HOME/Library/Developer"* ]] && [[ -d "$HOME/Library/Developer/Xcode/DerivedData" ]]; then
local xcode_size local xcode_size
xcode_size=$(du -sk "$HOME/Library/Developer/Xcode/DerivedData" 2>/dev/null | cut -f1) xcode_size=$(du -sk "$HOME/Library/Developer/Xcode/DerivedData" 2> /dev/null | cut -f1)
if [[ $xcode_size -gt 10485760 ]]; then if [[ $xcode_size -gt 10485760 ]]; then
local xcode_human local xcode_human
xcode_human=$(bytes_to_human $((xcode_size * 1024))) xcode_human=$(bytes_to_human $((xcode_size * 1024)))
@@ -677,9 +677,9 @@ display_cleanup_suggestions_compact() {
fi fi
# Check for duplicates in current path # Check for duplicates in current path
if command -v mdfind &>/dev/null; then if command -v mdfind &> /dev/null; then
local dup_count=$(mdfind -onlyin "$CURRENT_PATH" "kMDItemFSSize > 10000000" 2>/dev/null | \ local dup_count=$(mdfind -onlyin "$CURRENT_PATH" "kMDItemFSSize > 10000000" 2> /dev/null |
xargs -I {} stat -f "%z" {} 2>/dev/null | sort | uniq -d | wc -l | tr -d ' ') xargs -I {} stat -f "%z" {} 2> /dev/null | sort | uniq -d | wc -l | tr -d ' ')
if [[ $dup_count -gt 5 ]]; then if [[ $dup_count -gt 5 ]]; then
[[ -z "$top_suggestion" ]] && top_suggestion="$dup_count potential duplicate files detected" [[ -z "$top_suggestion" ]] && top_suggestion="$dup_count potential duplicate files detected"
((suggestions_count++)) ((suggestions_count++))
@@ -720,8 +720,8 @@ display_cleanup_suggestions() {
# Check common cache locations # Check common cache locations
if [[ -d "$HOME/Library/Caches" ]]; then if [[ -d "$HOME/Library/Caches" ]]; then
local cache_size local cache_size
cache_size=$(du -sk "$HOME/Library/Caches" 2>/dev/null | cut -f1) cache_size=$(du -sk "$HOME/Library/Caches" 2> /dev/null | cut -f1)
if [[ $cache_size -gt 1048576 ]]; then # > 1GB if [[ $cache_size -gt 1048576 ]]; then # > 1GB
local human local human
human=$(bytes_to_human $((cache_size * 1024))) human=$(bytes_to_human $((cache_size * 1024)))
suggestions+=(" Clear application caches: $human") suggestions+=(" Clear application caches: $human")
@@ -731,16 +731,16 @@ display_cleanup_suggestions() {
# Check Downloads folder # Check Downloads folder
if [[ -d "$HOME/Downloads" ]]; then if [[ -d "$HOME/Downloads" ]]; then
local old_files local old_files
old_files=$(find "$HOME/Downloads" -type f -mtime +90 2>/dev/null | wc -l | tr -d ' ') old_files=$(find "$HOME/Downloads" -type f -mtime +90 2> /dev/null | wc -l | tr -d ' ')
if [[ $old_files -gt 0 ]]; then if [[ $old_files -gt 0 ]]; then
suggestions+=(" Clean old downloads: $old_files files older than 90 days") suggestions+=(" Clean old downloads: $old_files files older than 90 days")
fi fi
fi fi
# Check for large disk images # Check for large disk images
if command -v mdfind &>/dev/null; then if command -v mdfind &> /dev/null; then
local dmg_count=$(mdfind -onlyin "$HOME" \ local dmg_count=$(mdfind -onlyin "$HOME" \
"kMDItemFSSize > 500000000 && kMDItemDisplayName == '*.dmg'" 2>/dev/null | wc -l | tr -d ' ') "kMDItemFSSize > 500000000 && kMDItemDisplayName == '*.dmg'" 2> /dev/null | wc -l | tr -d ' ')
if [[ $dmg_count -gt 0 ]]; then if [[ $dmg_count -gt 0 ]]; then
suggestions+=(" Remove disk images: $dmg_count DMG files >500MB") suggestions+=(" Remove disk images: $dmg_count DMG files >500MB")
fi fi
@@ -749,8 +749,8 @@ display_cleanup_suggestions() {
# Check Xcode derived data # Check Xcode derived data
if [[ -d "$HOME/Library/Developer/Xcode/DerivedData" ]]; then if [[ -d "$HOME/Library/Developer/Xcode/DerivedData" ]]; then
local xcode_size local xcode_size
xcode_size=$(du -sk "$HOME/Library/Developer/Xcode/DerivedData" 2>/dev/null | cut -f1) xcode_size=$(du -sk "$HOME/Library/Developer/Xcode/DerivedData" 2> /dev/null | cut -f1)
if [[ $xcode_size -gt 10485760 ]]; then # > 10GB if [[ $xcode_size -gt 10485760 ]]; then # > 10GB
local human local human
human=$(bytes_to_human $((xcode_size * 1024))) human=$(bytes_to_human $((xcode_size * 1024)))
suggestions+=(" Clear Xcode cache: $human") suggestions+=(" Clear Xcode cache: $human")
@@ -760,8 +760,8 @@ display_cleanup_suggestions() {
# Check iOS device backups # Check iOS device backups
if [[ -d "$HOME/Library/Application Support/MobileSync/Backup" ]]; then if [[ -d "$HOME/Library/Application Support/MobileSync/Backup" ]]; then
local backup_size local backup_size
backup_size=$(du -sk "$HOME/Library/Application Support/MobileSync/Backup" 2>/dev/null | cut -f1) backup_size=$(du -sk "$HOME/Library/Application Support/MobileSync/Backup" 2> /dev/null | cut -f1)
if [[ $backup_size -gt 5242880 ]]; then # > 5GB if [[ $backup_size -gt 5242880 ]]; then # > 5GB
local human local human
human=$(bytes_to_human $((backup_size * 1024))) human=$(bytes_to_human $((backup_size * 1024)))
suggestions+=(" 📱 Review iOS backups: $human") suggestions+=(" 📱 Review iOS backups: $human")
@@ -769,13 +769,13 @@ display_cleanup_suggestions() {
fi fi
# Check for duplicate files (by size, quick heuristic) # Check for duplicate files (by size, quick heuristic)
if command -v mdfind &>/dev/null; then if command -v mdfind &> /dev/null; then
local temp_dup="$TEMP_PREFIX.dup_check" local temp_dup="$TEMP_PREFIX.dup_check"
mdfind -onlyin "$CURRENT_PATH" "kMDItemFSSize > 10000000" 2>/dev/null | \ mdfind -onlyin "$CURRENT_PATH" "kMDItemFSSize > 10000000" 2> /dev/null |
xargs -I {} stat -f "%z" {} 2>/dev/null | \ xargs -I {} stat -f "%z" {} 2> /dev/null |
sort | uniq -d | wc -l | tr -d ' ' > "$temp_dup" 2>/dev/null || echo "0" > "$temp_dup" sort | uniq -d | wc -l | tr -d ' ' > "$temp_dup" 2> /dev/null || echo "0" > "$temp_dup"
local dup_count local dup_count
dup_count=$(cat "$temp_dup" 2>/dev/null || echo "0") dup_count=$(cat "$temp_dup" 2> /dev/null || echo "0")
if [[ $dup_count -gt 5 ]]; then if [[ $dup_count -gt 5 ]]; then
suggestions+=(" ♻️ Possible duplicates: $dup_count size matches in large files (>10MB)") suggestions+=(" ♻️ Possible duplicates: $dup_count size matches in large files (>10MB)")
fi fi
@@ -804,14 +804,14 @@ display_disk_summary() {
local total_dirs_count=0 local total_dirs_count=0
if [[ -f "$temp_large" ]]; then if [[ -f "$temp_large" ]]; then
total_large_count=$(wc -l < "$temp_large" 2>/dev/null | tr -d ' ') total_large_count=$(wc -l < "$temp_large" 2> /dev/null | tr -d ' ')
while IFS='|' read -r size path; do while IFS='|' read -r size path; do
((total_large_size += size)) ((total_large_size += size))
done < "$temp_large" done < "$temp_large"
fi fi
if [[ -f "$temp_dirs" ]]; then if [[ -f "$temp_dirs" ]]; then
total_dirs_count=$(wc -l < "$temp_dirs" 2>/dev/null | tr -d ' ') total_dirs_count=$(wc -l < "$temp_dirs" 2> /dev/null | tr -d ' ')
while IFS='|' read -r size path; do while IFS='|' read -r size path; do
((total_dirs_size += size)) ((total_dirs_size += size))
done < "$temp_dirs" done < "$temp_dirs"
@@ -841,20 +841,24 @@ get_file_info() {
local type="File" local type="File"
case "$ext" in case "$ext" in
dmg|iso|pkg|zip|tar|gz|rar|7z) dmg | iso | pkg | zip | tar | gz | rar | 7z)
badge="$BADGE_BUNDLE" ; type="Bundle" badge="$BADGE_BUNDLE"
type="Bundle"
;; ;;
mov|mp4|avi|mkv|webm|jpg|jpeg|png|gif|heic) mov | mp4 | avi | mkv | webm | jpg | jpeg | png | gif | heic)
badge="$BADGE_MEDIA" ; type="Media" badge="$BADGE_MEDIA"
type="Media"
;; ;;
pdf|key|ppt|pptx) pdf | key | ppt | pptx)
type="Document" type="Document"
;; ;;
log) log)
badge="$BADGE_LOG" ; type="Log" badge="$BADGE_LOG"
type="Log"
;; ;;
app) app)
badge="$BADGE_APP" ; type="App" badge="$BADGE_APP"
type="App"
;; ;;
esac esac
@@ -870,7 +874,7 @@ get_file_age() {
fi fi
local mtime local mtime
mtime=$(stat -f%m "$path" 2>/dev/null || echo "0") mtime=$(stat -f%m "$path" 2> /dev/null || echo "0")
local now local now
now=$(date +%s) now=$(date +%s)
local diff local diff
@@ -936,8 +940,8 @@ display_large_files_table() {
# Color based on file type # Color based on file type
local color="" local color=""
case "$ext" in case "$ext" in
dmg|iso|pkg) color="${RED}" ;; dmg | iso | pkg) color="${RED}" ;;
mov|mp4|avi|mkv|webm|zip|tar|gz|rar|7z) color="${YELLOW}" ;; mov | mp4 | avi | mkv | webm | zip | tar | gz | rar | 7z) color="${YELLOW}" ;;
log) color="${GRAY}" ;; log) color="${GRAY}" ;;
*) color="${NC}" ;; *) color="${NC}" ;;
esac esac
@@ -1104,7 +1108,7 @@ display_recent_large_files() {
log_header "Recent Large Files (Last 30 Days)" log_header "Recent Large Files (Last 30 Days)"
echo "" echo ""
if ! command -v mdfind &>/dev/null; then if ! command -v mdfind &> /dev/null; then
echo " ${YELLOW}Note: mdfind not available${NC}" echo " ${YELLOW}Note: mdfind not available${NC}"
echo "" echo ""
return return
@@ -1114,13 +1118,13 @@ display_recent_large_files() {
# Find files created in last 30 days, larger than 100MB # Find files created in last 30 days, larger than 100MB
mdfind -onlyin "$CURRENT_PATH" \ mdfind -onlyin "$CURRENT_PATH" \
"kMDItemFSSize > 100000000 && kMDItemContentCreationDate >= \$time.today(-30)" 2>/dev/null | \ "kMDItemFSSize > 100000000 && kMDItemContentCreationDate >= \$time.today(-30)" 2> /dev/null |
while IFS= read -r file; do while IFS= read -r file; do
if [[ -f "$file" ]]; then if [[ -f "$file" ]]; then
local size local size
size=$(stat -f%z "$file" 2>/dev/null || echo "0") size=$(stat -f%z "$file" 2> /dev/null || echo "0")
local mtime local mtime
mtime=$(stat -f%m "$file" 2>/dev/null || echo "0") mtime=$(stat -f%m "$file" 2> /dev/null || echo "0")
echo "$size|$mtime|$file" echo "$size|$mtime|$file"
fi fi
done | sort -t'|' -k1 -rn | head -10 > "$temp_recent" done | sort -t'|' -k1 -rn | head -10 > "$temp_recent"
@@ -1140,7 +1144,7 @@ display_recent_large_files() {
local dirname local dirname
dirname=$(dirname "$path" | sed "s|^$HOME|~|") dirname=$(dirname "$path" | sed "s|^$HOME|~|")
local days_ago local days_ago
days_ago=$(( ($(date +%s) - mtime) / 86400 )) days_ago=$((($(date +%s) - mtime) / 86400))
local info local info
info=$(get_file_info "$path") info=$(get_file_info "$path")
@@ -1162,10 +1166,10 @@ get_subdirectories() {
local target="$1" local target="$1"
local temp_file="$2" local temp_file="$2"
find "$target" -mindepth 1 -maxdepth 1 -type d 2>/dev/null | \ find "$target" -mindepth 1 -maxdepth 1 -type d 2> /dev/null |
while IFS= read -r dir; do while IFS= read -r dir; do
local size local size
size=$(du -sk "$dir" 2>/dev/null | cut -f1) size=$(du -sk "$dir" 2> /dev/null | cut -f1)
echo "$((size * 1024))|$dir" echo "$((size * 1024))|$dir"
done | sort -t'|' -k1 -rn > "$temp_file" done | sort -t'|' -k1 -rn > "$temp_file"
} }
@@ -1298,7 +1302,7 @@ display_file_types() {
log_header "File Types Analysis" log_header "File Types Analysis"
echo "" echo ""
if ! command -v mdfind &>/dev/null; then if ! command -v mdfind &> /dev/null; then
echo " ${YELLOW}Note: mdfind not available, limited analysis${NC}" echo " ${YELLOW}Note: mdfind not available, limited analysis${NC}"
return return
fi fi
@@ -1336,7 +1340,7 @@ display_file_types() {
esac esac
local files local files
files=$(mdfind -onlyin "$CURRENT_PATH" "$query" 2>/dev/null) files=$(mdfind -onlyin "$CURRENT_PATH" "$query" 2> /dev/null)
local count local count
count=$(echo "$files" | grep -c . || echo "0") count=$(echo "$files" | grep -c . || echo "0")
local total_size=0 local total_size=0
@@ -1345,7 +1349,7 @@ display_file_types() {
while IFS= read -r file; do while IFS= read -r file; do
if [[ -f "$file" ]]; then if [[ -f "$file" ]]; then
local fsize local fsize
fsize=$(stat -f%z "$file" 2>/dev/null || echo "0") fsize=$(stat -f%z "$file" 2> /dev/null || echo "0")
((total_size += fsize)) ((total_size += fsize))
fi fi
done <<< "$files" done <<< "$files"
@@ -1364,7 +1368,7 @@ display_file_types() {
read_single_key() { read_single_key() {
local key="" local key=""
# Read single character without waiting for Enter # Read single character without waiting for Enter
if read -rsn1 key 2>/dev/null; then if read -rsn1 key 2> /dev/null; then
echo "$key" echo "$key"
else else
echo "q" echo "q"
@@ -1396,13 +1400,13 @@ scan_directory_contents_fast() {
fi fi
# Ultra-fast file scanning - batch stat for maximum speed # Ultra-fast file scanning - batch stat for maximum speed
find "$dir_path" -mindepth 1 -maxdepth 1 -type f -print0 2>/dev/null | \ find "$dir_path" -mindepth 1 -maxdepth 1 -type f -print0 2> /dev/null |
xargs -0 -n 20 -P "$num_jobs" stat -f "%z|file|%N" 2>/dev/null > "$temp_files" & xargs -0 -n 20 -P "$num_jobs" stat -f "%z|file|%N" 2> /dev/null > "$temp_files" &
local file_pid=$! local file_pid=$!
# Smart directory scanning with aggressive optimization # Smart directory scanning with aggressive optimization
# Strategy: Fast estimation first, accurate on-demand # Strategy: Fast estimation first, accurate on-demand
find "$dir_path" -mindepth 1 -maxdepth 1 -type d -print0 2>/dev/null | \ find "$dir_path" -mindepth 1 -maxdepth 1 -type d -print0 2> /dev/null |
xargs -0 -n 1 -P "$num_jobs" sh -c ' xargs -0 -n 1 -P "$num_jobs" sh -c '
dir="$1" dir="$1"
size="" size=""
@@ -1436,7 +1440,7 @@ scan_directory_contents_fast() {
[[ -z "$size" ]] || [[ "$size" -eq 0 ]] && size=1 [[ -z "$size" ]] || [[ "$size" -eq 0 ]] && size=1
fi fi
echo "$((size * 1024))|dir|$dir" echo "$((size * 1024))|dir|$dir"
' _ > "$temp_dirs" 2>/dev/null & ' _ > "$temp_dirs" 2> /dev/null &
local dir_pid=$! local dir_pid=$!
# Show progress while waiting # Show progress while waiting
@@ -1448,22 +1452,22 @@ scan_directory_contents_fast() {
local spinner_chars local spinner_chars
spinner_chars="$(mo_spinner_chars)" spinner_chars="$(mo_spinner_chars)"
local chars_len=${#spinner_chars} local chars_len=${#spinner_chars}
for ((idx=0; idx<chars_len; idx++)); do for ((idx = 0; idx < chars_len; idx++)); do
spinner+=("${spinner_chars:idx:1}") spinner+=("${spinner_chars:idx:1}")
done done
fi fi
[[ ${#spinner[@]} -eq 0 ]] && spinner=('|' '/' '-' '\\') [[ ${#spinner[@]} -eq 0 ]] && spinner=('|' '/' '-' '\\')
local i=0 local i=0
local max_wait=30 # Reduced to 30 seconds (fast fail) local max_wait=30 # Reduced to 30 seconds (fast fail)
local elapsed=0 local elapsed=0
local tick=0 local tick=0
local spin_len=${#spinner[@]} local spin_len=${#spinner[@]}
(( spin_len == 0 )) && spinner=('|' '/' '-' '\\') && spin_len=${#spinner[@]} ((spin_len == 0)) && spinner=('|' '/' '-' '\\') && spin_len=${#spinner[@]}
while ( kill -0 "$dir_pid" 2>/dev/null || kill -0 "$file_pid" 2>/dev/null ); do while (kill -0 "$dir_pid" 2> /dev/null || kill -0 "$file_pid" 2> /dev/null); do
printf "\r ${BLUE}Scanning${NC} ${spinner[$((i % spin_len))]} (%ds)" "$elapsed" >&2 printf "\r ${BLUE}Scanning${NC} ${spinner[$((i % spin_len))]} (%ds)" "$elapsed" >&2
((i++)) ((i++))
sleep 0.1 # Faster animation (100ms per frame) sleep 0.1 # Faster animation (100ms per frame)
((tick++)) ((tick++))
# Update elapsed seconds every 10 ticks (1 second) # Update elapsed seconds every 10 ticks (1 second)
@@ -1473,10 +1477,10 @@ scan_directory_contents_fast() {
# Force kill if taking too long (30 seconds for fast response) # Force kill if taking too long (30 seconds for fast response)
if [[ $elapsed -ge $max_wait ]]; then if [[ $elapsed -ge $max_wait ]]; then
kill -9 "$dir_pid" 2>/dev/null || true kill -9 "$dir_pid" 2> /dev/null || true
kill -9 "$file_pid" 2>/dev/null || true kill -9 "$file_pid" 2> /dev/null || true
wait "$dir_pid" 2>/dev/null || true wait "$dir_pid" 2> /dev/null || true
wait "$file_pid" 2>/dev/null || true wait "$file_pid" 2> /dev/null || true
printf "\r ${YELLOW}Large directory - showing estimated sizes${NC}\n" >&2 printf "\r ${YELLOW}Large directory - showing estimated sizes${NC}\n" >&2
sleep 0.3 sleep 0.3
break break
@@ -1488,8 +1492,8 @@ scan_directory_contents_fast() {
fi fi
# Wait for completion (non-blocking if already killed) # Wait for completion (non-blocking if already killed)
wait "$file_pid" 2>/dev/null || true wait "$file_pid" 2> /dev/null || true
wait "$dir_pid" 2>/dev/null || true wait "$dir_pid" 2> /dev/null || true
# Small delay only if scan was very fast (let user see the spinner briefly) # Small delay only if scan was very fast (let user see the spinner briefly)
if [[ "$show_progress" == "true" ]] && [[ ${elapsed:-0} -lt 1 ]]; then if [[ "$show_progress" == "true" ]] && [[ ${elapsed:-0} -lt 1 ]]; then
@@ -1498,19 +1502,19 @@ scan_directory_contents_fast() {
# Combine and sort - only keep top items # Combine and sort - only keep top items
# Ensure we handle empty files gracefully # Ensure we handle empty files gracefully
> "$output_file" true > "$output_file"
if [[ -f "$temp_dirs" ]] || [[ -f "$temp_files" ]]; then if [[ -f "$temp_dirs" ]] || [[ -f "$temp_files" ]]; then
cat "$temp_dirs" "$temp_files" 2>/dev/null | sort -t'|' -k1 -rn | head -"$max_items" > "$output_file" || true cat "$temp_dirs" "$temp_files" 2> /dev/null | sort -t'|' -k1 -rn | head -"$max_items" > "$output_file" || true
fi fi
# Cleanup # Cleanup
rm -f "$temp_dirs" "$temp_files" 2>/dev/null rm -f "$temp_dirs" "$temp_files" 2> /dev/null
} }
# Calculate directory sizes and update (now only used for deep refresh) # Calculate directory sizes and update (now only used for deep refresh)
calculate_dir_sizes() { calculate_dir_sizes() {
local items_file="$1" local items_file="$1"
local max_items="${2:-15}" # Only recalculate first 15 by default local max_items="${2:-15}" # Only recalculate first 15 by default
local temp_file="${items_file}.calc" local temp_file="${items_file}.calc"
# Since we now scan with actual sizes, this function is mainly for refresh # Since we now scan with actual sizes, this function is mainly for refresh
@@ -1519,9 +1523,9 @@ calculate_dir_sizes() {
# Only update if source file still exists (might have been deleted if user quit) # Only update if source file still exists (might have been deleted if user quit)
if [[ -f "$items_file" ]]; then if [[ -f "$items_file" ]]; then
mv "$temp_file" "$items_file" 2>/dev/null || true mv "$temp_file" "$items_file" 2> /dev/null || true
else else
rm -f "$temp_file" 2>/dev/null || true rm -f "$temp_file" 2> /dev/null || true
fi fi
} }
@@ -1531,7 +1535,7 @@ combine_initial_scan_results() {
local temp_large="$TEMP_PREFIX.large" local temp_large="$TEMP_PREFIX.large"
local temp_dirs="$TEMP_PREFIX.dirs" local temp_dirs="$TEMP_PREFIX.dirs"
> "$output_file" true > "$output_file"
# Add directories # Add directories
if [[ -f "$temp_dirs" ]]; then if [[ -f "$temp_dirs" ]]; then
@@ -1572,7 +1576,7 @@ show_volumes_overview() {
# External volumes (if any) # External volumes (if any)
if [[ -d "/Volumes" ]]; then if [[ -d "/Volumes" ]]; then
local vol_priority=500 local vol_priority=500
find /Volumes -mindepth 1 -maxdepth 1 -type d 2>/dev/null | while IFS= read -r vol; do find /Volumes -mindepth 1 -maxdepth 1 -type d 2> /dev/null | while IFS= read -r vol; do
local vol_name local vol_name
vol_name=$(basename "$vol") vol_name=$(basename "$vol")
echo "$((vol_priority))|$vol|Volume: $vol_name" echo "$((vol_priority))|$vol|Volume: $vol_name"
@@ -1582,17 +1586,17 @@ show_volumes_overview() {
} | sort -t'|' -k1 -rn > "$temp_volumes" } | sort -t'|' -k1 -rn > "$temp_volumes"
# Setup alternate screen and hide cursor (keep hidden throughout) # Setup alternate screen and hide cursor (keep hidden throughout)
tput smcup 2>/dev/null || true tput smcup 2> /dev/null || true
printf "\033[?25l" >&2 # Hide cursor printf "\033[?25l" >&2 # Hide cursor
cleanup_volumes() { cleanup_volumes() {
printf "\033[?25h" >&2 # Show cursor printf "\033[?25h" >&2 # Show cursor
tput rmcup 2>/dev/null || true tput rmcup 2> /dev/null || true
} }
trap cleanup_volumes EXIT INT TERM trap cleanup_volumes EXIT INT TERM
# Force cursor hidden at the start # Force cursor hidden at the start
stty -echo 2>/dev/null || true stty -echo 2> /dev/null || true
local cursor=0 local cursor=0
local total_items local total_items
@@ -1603,10 +1607,10 @@ show_volumes_overview() {
printf "\033[?25l" >&2 printf "\033[?25l" >&2
# Drain burst input (trackpad scroll -> many arrows) # Drain burst input (trackpad scroll -> many arrows)
type drain_pending_input >/dev/null 2>&1 && drain_pending_input type drain_pending_input > /dev/null 2>&1 && drain_pending_input
# Build output buffer to reduce flicker # Build output buffer to reduce flicker
local output="" local output=""
output+="\033[?25l" # Hide cursor output+="\033[?25l" # Hide cursor
output+="\033[H\033[J" output+="\033[H\033[J"
output+=$'\n' output+=$'\n'
output+="\033[0;35mSelect a location to explore\033[0m"$'\n' output+="\033[0;35mSelect a location to explore\033[0m"$'\n'
@@ -1633,7 +1637,7 @@ show_volumes_overview() {
# Read key (suppress any escape sequences that might leak) # Read key (suppress any escape sequences that might leak)
local key local key
key=$(read_key 2>/dev/null || echo "OTHER") key=$(read_key 2> /dev/null || echo "OTHER")
case "$key" in case "$key" in
"UP") "UP")
@@ -1642,7 +1646,7 @@ show_volumes_overview() {
"DOWN") "DOWN")
((cursor < total_items - 1)) && ((cursor++)) ((cursor < total_items - 1)) && ((cursor++))
;; ;;
"ENTER"|"RIGHT") "ENTER" | "RIGHT")
# Get selected path and enter it # Get selected path and enter it
local selected_path="" local selected_path=""
idx=0 idx=0
@@ -1679,7 +1683,7 @@ show_volumes_overview() {
# In volumes view, LEFT does nothing (already at top level) # In volumes view, LEFT does nothing (already at top level)
# User must press q/ESC to quit # User must press q/ESC to quit
;; ;;
"QUIT"|"q") "QUIT" | "q")
# Quit the volumes view # Quit the volumes view
break break
;; ;;
@@ -1693,13 +1697,13 @@ show_volumes_overview() {
# Interactive drill-down mode # Interactive drill-down mode
interactive_drill_down() { interactive_drill_down() {
local start_path="$1" local start_path="$1"
local initial_items="${2:-}" # Pre-scanned items for first level local initial_items="${2:-}" # Pre-scanned items for first level
local current_path="$start_path" local current_path="$start_path"
local path_stack=() local path_stack=()
local cursor=0 local cursor=0
local scroll_offset=0 # New: for scrolling local scroll_offset=0 # New: for scrolling
local need_scan=true local need_scan=true
local wait_for_calc=false # Don't wait on first load, let user press 'r' local wait_for_calc=false # Don't wait on first load, let user press 'r'
local temp_items="$TEMP_PREFIX.items" local temp_items="$TEMP_PREFIX.items"
local status_message="" local status_message=""
@@ -1711,33 +1715,33 @@ interactive_drill_down() {
# Directory cache: store scan results for each visited directory # Directory cache: store scan results for each visited directory
# Use temp files because bash 3.2 doesn't have associative arrays # Use temp files because bash 3.2 doesn't have associative arrays
local cache_dir="$TEMP_PREFIX.cache.$$" local cache_dir="$TEMP_PREFIX.cache.$$"
mkdir -p "$cache_dir" 2>/dev/null || true mkdir -p "$cache_dir" 2> /dev/null || true
# Note: We're already in alternate screen from show_volumes_overview # Note: We're already in alternate screen from show_volumes_overview
# Just hide cursor, don't re-enter alternate screen # Just hide cursor, don't re-enter alternate screen
printf "\033[?25l" # Hide cursor printf "\033[?25l" # Hide cursor
# Save terminal settings and disable echo # Save terminal settings and disable echo
local old_tty_settings="" local old_tty_settings=""
if [[ -t 0 ]]; then if [[ -t 0 ]]; then
old_tty_settings=$(stty -g 2>/dev/null || echo "") old_tty_settings=$(stty -g 2> /dev/null || echo "")
stty -echo 2>/dev/null || true stty -echo 2> /dev/null || true
fi fi
# Cleanup on exit (but don't exit alternate screen - may return to menu) # Cleanup on exit (but don't exit alternate screen - may return to menu)
cleanup_drill_down() { cleanup_drill_down() {
# Restore terminal settings # Restore terminal settings
if [[ -n "${old_tty_settings:-}" ]]; then if [[ -n "${old_tty_settings:-}" ]]; then
stty "$old_tty_settings" 2>/dev/null || true stty "$old_tty_settings" 2> /dev/null || true
fi fi
printf "\033[?25h" # Show cursor printf "\033[?25h" # Show cursor
# Don't call tput rmcup - we may be returning to volumes menu # Don't call tput rmcup - we may be returning to volumes menu
[[ -d "${cache_dir:-}" ]] && rm -rf "$cache_dir" 2>/dev/null || true # Clean up cache [[ -d "${cache_dir:-}" ]] && rm -rf "$cache_dir" 2> /dev/null || true # Clean up cache
} }
trap cleanup_drill_down EXIT INT TERM trap cleanup_drill_down EXIT INT TERM
# Drain any input that accumulated before entering interactive mode # Drain any input that accumulated before entering interactive mode
type drain_pending_input >/dev/null 2>&1 && drain_pending_input type drain_pending_input > /dev/null 2>&1 && drain_pending_input
while true; do while true; do
# Ensure cursor is always hidden during navigation # Ensure cursor is always hidden during navigation
@@ -1747,7 +1751,7 @@ interactive_drill_down() {
if [[ "$need_scan" == "true" ]]; then if [[ "$need_scan" == "true" ]]; then
# Generate cache key (use md5 hash of path) # Generate cache key (use md5 hash of path)
local cache_key local cache_key
cache_key=$(echo "$current_path" | md5 2>/dev/null || echo "$current_path" | shasum | cut -d' ' -f1) cache_key=$(echo "$current_path" | md5 2> /dev/null || echo "$current_path" | shasum | cut -d' ' -f1)
local cache_file="$cache_dir/$cache_key" local cache_file="$cache_dir/$cache_key"
# Check if we have cached results for this directory # Check if we have cached results for this directory
@@ -1760,12 +1764,12 @@ interactive_drill_down() {
# Use || true to prevent exit on scan failure # Use || true to prevent exit on scan failure
scan_directory_contents_fast "$current_path" "$temp_items" 50 true || { scan_directory_contents_fast "$current_path" "$temp_items" 50 true || {
# Scan failed - create empty result file # Scan failed - create empty result file
> "$temp_items" true > "$temp_items"
} }
# Save to cache for next time (only if not empty) # Save to cache for next time (only if not empty)
if [[ -s "$temp_items" ]]; then if [[ -s "$temp_items" ]]; then
cp "$temp_items" "$cache_file" 2>/dev/null || true cp "$temp_items" "$cache_file" 2> /dev/null || true
fi fi
fi fi
@@ -1787,7 +1791,7 @@ interactive_drill_down() {
scroll_offset=0 scroll_offset=0
# Drain any input accumulated during scanning # Drain any input accumulated during scanning
type drain_pending_input >/dev/null 2>&1 && drain_pending_input type drain_pending_input > /dev/null 2>&1 && drain_pending_input
# Check if empty or scan failed # Check if empty or scan failed
if [[ $total_items -eq 0 ]]; then if [[ $total_items -eq 0 ]]; then
@@ -1800,7 +1804,7 @@ interactive_drill_down() {
echo " ${GRAY}Path: $current_path${NC}" >&2 echo " ${GRAY}Path: $current_path${NC}" >&2
echo "" >&2 echo "" >&2
echo " ${GRAY}Press any key to go back...${NC}" >&2 echo " ${GRAY}Press any key to go back...${NC}" >&2
read_key >/dev/null 2>&1 read_key > /dev/null 2>&1
else else
# Directory exists but scan returned nothing (timeout or empty) # Directory exists but scan returned nothing (timeout or empty)
printf "\033[H\033[J" >&2 printf "\033[H\033[J" >&2
@@ -1811,7 +1815,7 @@ interactive_drill_down() {
echo " ${GRAY}Press ${NC}${GREEN}R${NC}${GRAY} to retry, any other key to go back${NC}" >&2 echo " ${GRAY}Press ${NC}${GREEN}R${NC}${GRAY} to retry, any other key to go back${NC}" >&2
local retry_key local retry_key
retry_key=$(read_key 2>/dev/null || echo "OTHER") retry_key=$(read_key 2> /dev/null || echo "OTHER")
if [[ "$retry_key" == "RETRY" ]]; then if [[ "$retry_key" == "RETRY" ]]; then
# Retry scan # Retry scan
@@ -1842,13 +1846,13 @@ interactive_drill_down() {
# Build output buffer once for smooth rendering # Build output buffer once for smooth rendering
local output="" local output=""
output+="\033[?25l" # Hide cursor output+="\033[?25l" # Hide cursor
output+="\033[H\033[J" # Clear screen output+="\033[H\033[J" # Clear screen
output+=$'\n' output+=$'\n'
output+="\033[0;35mDisk space explorer > $(echo "$current_path" | sed "s|^$HOME|~|")\033[0m"$'\n' output+="\033[0;35mDisk space explorer > $(echo "$current_path" | sed "s|^$HOME|~|")\033[0m"$'\n'
output+=$'\n' output+=$'\n'
local max_show=15 # Show 15 items per page local max_show=15 # Show 15 items per page
local page_start=$scroll_offset local page_start=$scroll_offset
local page_end local page_end
page_end=$((scroll_offset + max_show)) page_end=$((scroll_offset + max_show))
@@ -1886,8 +1890,10 @@ interactive_drill_down() {
local badge="$BADGE_FILE" color="${NC}" local badge="$BADGE_FILE" color="${NC}"
if [[ "$type" == "dir" ]]; then if [[ "$type" == "dir" ]]; then
badge="$BADGE_DIR" color="${BLUE}" badge="$BADGE_DIR" color="${BLUE}"
if [[ $size -gt 10737418240 ]]; then color="${RED}" if [[ $size -gt 10737418240 ]]; then
elif [[ $size -gt 1073741824 ]]; then color="${YELLOW}" color="${RED}"
elif [[ $size -gt 1073741824 ]]; then
color="${YELLOW}"
fi fi
else else
local ext="${name##*.}" local ext="${name##*.}"
@@ -1895,10 +1901,10 @@ interactive_drill_down() {
info=$(get_file_info "$path") info=$(get_file_info "$path")
badge="${info%|*}" badge="${info%|*}"
case "$ext" in case "$ext" in
dmg|iso|pkg|zip|tar|gz|rar|7z) dmg | iso | pkg | zip | tar | gz | rar | 7z)
color="${YELLOW}" color="${YELLOW}"
;; ;;
mov|mp4|avi|mkv|webm|jpg|jpeg|png|gif|heic) mov | mp4 | avi | mkv | webm | jpg | jpeg | png | gif | heic)
color="${YELLOW}" color="${YELLOW}"
;; ;;
log) log)
@@ -1945,7 +1951,7 @@ interactive_drill_down() {
# Read key directly without draining (to preserve all user input) # Read key directly without draining (to preserve all user input)
local key local key
key=$(read_key 2>/dev/null || echo "OTHER") key=$(read_key 2> /dev/null || echo "OTHER")
# Debug: uncomment to see what keys are being received # Debug: uncomment to see what keys are being received
# printf "\rDEBUG: Received key=[%s] " "$key" >&2 # printf "\rDEBUG: Received key=[%s] " "$key" >&2
@@ -1974,7 +1980,7 @@ interactive_drill_down() {
fi fi
fi fi
;; ;;
"ENTER"|"RIGHT") "ENTER" | "RIGHT")
# Enter selected item - directory or file # Enter selected item - directory or file
if [[ $cursor -lt ${#items[@]} ]]; then if [[ $cursor -lt ${#items[@]} ]]; then
local selected="${items[$cursor]}" local selected="${items[$cursor]}"
@@ -1998,7 +2004,7 @@ interactive_drill_down() {
# For text-like files, use less or fallback to open # For text-like files, use less or fallback to open
case "$file_ext" in case "$file_ext" in
txt|log|md|json|xml|yaml|yml|conf|cfg|ini|sh|bash|zsh|py|js|ts|go|rs|c|cpp|h|java|rb|php|html|css|sql) txt | log | md | json | xml | yaml | yml | conf | cfg | ini | sh | bash | zsh | py | js | ts | go | rs | c | cpp | h | java | rb | php | html | css | sql)
# Clear screen and show loading message # Clear screen and show loading message
printf "\033[H\033[J" printf "\033[H\033[J"
echo "" echo ""
@@ -2006,21 +2012,21 @@ interactive_drill_down() {
echo "" echo ""
# Try less first (best for text viewing) # Try less first (best for text viewing)
if command -v less &>/dev/null; then if command -v less &> /dev/null; then
# Exit alternate screen only for less # Exit alternate screen only for less
printf "\033[?25h" # Show cursor printf "\033[?25h" # Show cursor
tput rmcup 2>/dev/null || true tput rmcup 2> /dev/null || true
less -F "$selected_path" 2>/dev/null && open_success=true less -F "$selected_path" 2> /dev/null && open_success=true
# Return to alternate screen # Return to alternate screen
tput smcup 2>/dev/null || true tput smcup 2> /dev/null || true
printf "\033[?25l" # Hide cursor printf "\033[?25l" # Hide cursor
else else
# Fallback to system open if less is not available # Fallback to system open if less is not available
echo " ${GRAY}Launching default application...${NC}" echo " ${GRAY}Launching default application...${NC}"
if command -v open &>/dev/null; then if command -v open &> /dev/null; then
open "$selected_path" 2>/dev/null && open_success=true open "$selected_path" 2> /dev/null && open_success=true
if [[ "$open_success" == "true" ]]; then if [[ "$open_success" == "true" ]]; then
echo "" echo ""
echo " ${GREEN}${ICON_SUCCESS}${NC} File opened in external app" echo " ${GREEN}${ICON_SUCCESS}${NC} File opened in external app"
@@ -2038,8 +2044,8 @@ interactive_drill_down() {
echo "" echo ""
echo " ${GRAY}Launching default application...${NC}" echo " ${GRAY}Launching default application...${NC}"
if command -v open &>/dev/null; then if command -v open &> /dev/null; then
open "$selected_path" 2>/dev/null && open_success=true open "$selected_path" 2> /dev/null && open_success=true
# Show brief success message # Show brief success message
if [[ "$open_success" == "true" ]]; then if [[ "$open_success" == "true" ]]; then
@@ -2059,7 +2065,7 @@ interactive_drill_down() {
echo "" echo ""
echo " ${GRAY}File: $selected_path${NC}" echo " ${GRAY}File: $selected_path${NC}"
echo " ${GRAY}Press any key to return...${NC}" echo " ${GRAY}Press any key to return...${NC}"
read -n 1 -s 2>/dev/null read -n 1 -s 2> /dev/null
fi fi
fi fi
fi fi
@@ -2081,16 +2087,16 @@ interactive_drill_down() {
# Already at start path - return to volumes menu # Already at start path - return to volumes menu
# Don't show cursor or exit screen - menu will handle it # Don't show cursor or exit screen - menu will handle it
if [[ -n "${old_tty_settings:-}" ]]; then if [[ -n "${old_tty_settings:-}" ]]; then
stty "$old_tty_settings" 2>/dev/null || true stty "$old_tty_settings" 2> /dev/null || true
fi fi
[[ -d "${cache_dir:-}" ]] && rm -rf "$cache_dir" 2>/dev/null || true [[ -d "${cache_dir:-}" ]] && rm -rf "$cache_dir" 2> /dev/null || true
trap - EXIT INT TERM trap - EXIT INT TERM
return 1 # Return to menu return 1 # Return to menu
fi fi
;; ;;
"OPEN") "OPEN")
if command -v open >/dev/null 2>&1; then if command -v open > /dev/null 2>&1; then
if open "$current_path" >/dev/null 2>&1; then if open "$current_path" > /dev/null 2>&1; then
status_message="${GREEN}${ICON_SUCCESS}${NC} Finder opened: ${GRAY}$current_path${NC}" status_message="${GREEN}${ICON_SUCCESS}${NC} Finder opened: ${GRAY}$current_path${NC}"
else else
status_message="${YELLOW}Warning:${NC} Could not open ${GRAY}$current_path${NC}" status_message="${YELLOW}Warning:${NC} Could not open ${GRAY}$current_path${NC}"
@@ -2155,7 +2161,7 @@ interactive_drill_down() {
# Read confirmation # Read confirmation
local confirm local confirm
confirm=$(read_key 2>/dev/null || echo "QUIT") confirm=$(read_key 2> /dev/null || echo "QUIT")
if [[ "$confirm" == "ENTER" ]]; then if [[ "$confirm" == "ENTER" ]]; then
# Request sudo if needed before deletion # Request sudo if needed before deletion
@@ -2180,11 +2186,11 @@ interactive_drill_down() {
# Try to delete with sudo if needed # Try to delete with sudo if needed
local delete_success=false local delete_success=false
if [[ "$needs_sudo" == "true" ]]; then if [[ "$needs_sudo" == "true" ]]; then
if sudo rm -rf "$selected_path" 2>/dev/null; then if sudo rm -rf "$selected_path" 2> /dev/null; then
delete_success=true delete_success=true
fi fi
else else
if rm -rf "$selected_path" 2>/dev/null; then if rm -rf "$selected_path" 2> /dev/null; then
delete_success=true delete_success=true
fi fi
fi fi
@@ -2195,9 +2201,9 @@ interactive_drill_down() {
# Clear cache to force rescan # Clear cache to force rescan
local cache_key local cache_key
cache_key=$(echo "$current_path" | md5 2>/dev/null || echo "$current_path" | shasum | cut -d' ' -f1) cache_key=$(echo "$current_path" | md5 2> /dev/null || echo "$current_path" | shasum | cut -d' ' -f1)
local cache_file="$cache_dir/$cache_key" local cache_file="$cache_dir/$cache_key"
rm -f "$cache_file" 2>/dev/null || true rm -f "$cache_file" 2> /dev/null || true
# Refresh the view # Refresh the view
need_scan=true need_scan=true
@@ -2215,16 +2221,16 @@ interactive_drill_down() {
echo " ${ICON_LIST} System protection (SIP) prevents deletion" echo " ${ICON_LIST} System protection (SIP) prevents deletion"
echo "" echo ""
echo " ${GRAY}Press any key to continue...${NC}" echo " ${GRAY}Press any key to continue...${NC}"
read_key >/dev/null 2>&1 read_key > /dev/null 2>&1
fi fi
fi fi
fi fi
;; ;;
"QUIT"|"q") "QUIT" | "q")
# Quit the explorer # Quit the explorer
cleanup_drill_down cleanup_drill_down
trap - EXIT INT TERM trap - EXIT INT TERM
return 0 # Return true to indicate normal exit return 0 # Return true to indicate normal exit
;; ;;
*) *)
# Unknown key - ignore it # Unknown key - ignore it
@@ -2233,7 +2239,7 @@ interactive_drill_down() {
done done
# Cleanup is handled by trap # Cleanup is handled by trap
return 0 # Normal exit if loop ends return 0 # Normal exit if loop ends
} }
# Main interactive loop # Main interactive loop
@@ -2242,7 +2248,7 @@ interactive_mode() {
VIEW_MODE="overview" VIEW_MODE="overview"
while true; do while true; do
type drain_pending_input >/dev/null 2>&1 && drain_pending_input type drain_pending_input > /dev/null 2>&1 && drain_pending_input
display_interactive_menu display_interactive_menu
local key local key
@@ -2291,10 +2297,10 @@ interactive_mode() {
VIEW_MODE="overview" VIEW_MODE="overview"
fi fi
;; ;;
"f"|"F") "f" | "F")
VIEW_MODE="files" VIEW_MODE="files"
;; ;;
"t"|"T") "t" | "T")
VIEW_MODE="types" VIEW_MODE="types"
;; ;;
"ENTER") "ENTER")
@@ -2402,7 +2408,7 @@ main() {
# Parse arguments - only support --help # Parse arguments - only support --help
while [[ $# -gt 0 ]]; do while [[ $# -gt 0 ]]; do
case "$1" in case "$1" in
-h|--help) -h | --help)
echo "Usage: mole analyze" echo "Usage: mole analyze"
echo "" echo ""
echo "Interactive disk space explorer - Navigate folders sorted by size" echo "Interactive disk space explorer - Navigate folders sorted by size"
@@ -2446,7 +2452,7 @@ main() {
CURRENT_PATH="$target_path" CURRENT_PATH="$target_path"
# Create cache directory # Create cache directory
mkdir -p "$CACHE_DIR" 2>/dev/null || true mkdir -p "$CACHE_DIR" 2> /dev/null || true
# Start with volumes overview to let user choose location # Start with volumes overview to let user choose location
show_volumes_overview show_volumes_overview

View File

@@ -18,11 +18,11 @@ DRY_RUN=false
IS_M_SERIES=$([ "$(uname -m)" = "arm64" ] && echo "true" || echo "false") IS_M_SERIES=$([ "$(uname -m)" = "arm64" ] && echo "true" || echo "false")
# Constants # Constants
readonly MAX_PARALLEL_JOBS=15 # Maximum parallel background jobs readonly MAX_PARALLEL_JOBS=15 # Maximum parallel background jobs
readonly TEMP_FILE_AGE_DAYS=7 # Age threshold for temp file cleanup readonly TEMP_FILE_AGE_DAYS=7 # Age threshold for temp file cleanup
readonly ORPHAN_AGE_DAYS=60 # Age threshold for orphaned data readonly ORPHAN_AGE_DAYS=60 # Age threshold for orphaned data
readonly SIZE_1GB_KB=1048576 # 1GB in kilobytes readonly SIZE_1GB_KB=1048576 # 1GB in kilobytes
readonly SIZE_1MB_KB=1024 # 1MB in kilobytes readonly SIZE_1MB_KB=1024 # 1MB in kilobytes
# Default whitelist patterns (preselected, user can disable) # Default whitelist patterns (preselected, user can disable)
declare -a DEFAULT_WHITELIST_PATTERNS=( declare -a DEFAULT_WHITELIST_PATTERNS=(
"$HOME/Library/Caches/ms-playwright*" "$HOME/Library/Caches/ms-playwright*"
@@ -52,7 +52,7 @@ if [[ -f "$HOME/.config/mole/whitelist" ]]; then
# Prevent absolute path to critical system directories # Prevent absolute path to critical system directories
case "$line" in case "$line" in
/System/*|/bin/*|/sbin/*|/usr/bin/*|/usr/sbin/*) /System/* | /bin/* | /sbin/* | /usr/bin/* | /usr/sbin/*)
WHITELIST_WARNINGS+=("System path: $line") WHITELIST_WARNINGS+=("System path: $line")
continue continue
;; ;;
@@ -104,14 +104,14 @@ cleanup() {
# Stop all spinners and clear the line # Stop all spinners and clear the line
if [[ -n "$SPINNER_PID" ]]; then if [[ -n "$SPINNER_PID" ]]; then
kill "$SPINNER_PID" 2>/dev/null || true kill "$SPINNER_PID" 2> /dev/null || true
wait "$SPINNER_PID" 2>/dev/null || true wait "$SPINNER_PID" 2> /dev/null || true
SPINNER_PID="" SPINNER_PID=""
fi fi
if [[ -n "$INLINE_SPINNER_PID" ]]; then if [[ -n "$INLINE_SPINNER_PID" ]]; then
kill "$INLINE_SPINNER_PID" 2>/dev/null || true kill "$INLINE_SPINNER_PID" 2> /dev/null || true
wait "$INLINE_SPINNER_PID" 2>/dev/null || true wait "$INLINE_SPINNER_PID" 2> /dev/null || true
INLINE_SPINNER_PID="" INLINE_SPINNER_PID=""
fi fi
@@ -122,8 +122,8 @@ cleanup() {
# Stop sudo keepalive # Stop sudo keepalive
if [[ -n "$SUDO_KEEPALIVE_PID" ]]; then if [[ -n "$SUDO_KEEPALIVE_PID" ]]; then
kill "$SUDO_KEEPALIVE_PID" 2>/dev/null || true kill "$SUDO_KEEPALIVE_PID" 2> /dev/null || true
wait "$SUDO_KEEPALIVE_PID" 2>/dev/null || true wait "$SUDO_KEEPALIVE_PID" 2> /dev/null || true
SUDO_KEEPALIVE_PID="" SUDO_KEEPALIVE_PID=""
fi fi
@@ -176,8 +176,8 @@ stop_spinner() {
fi fi
if [[ -n "$SPINNER_PID" ]]; then if [[ -n "$SPINNER_PID" ]]; then
kill "$SPINNER_PID" 2>/dev/null kill "$SPINNER_PID" 2> /dev/null
wait "$SPINNER_PID" 2>/dev/null wait "$SPINNER_PID" 2> /dev/null
SPINNER_PID="" SPINNER_PID=""
printf "\r ${GREEN}${ICON_SUCCESS}${NC} %s\n" "$result_message" printf "\r ${GREEN}${ICON_SUCCESS}${NC} %s\n" "$result_message"
else else
@@ -229,7 +229,7 @@ safe_clean() {
if [[ ${#WHITELIST_PATTERNS[@]} -gt 0 ]]; then if [[ ${#WHITELIST_PATTERNS[@]} -gt 0 ]]; then
for w in "${WHITELIST_PATTERNS[@]}"; do for w in "${WHITELIST_PATTERNS[@]}"; do
# Match both exact path and glob pattern # Match both exact path and glob pattern
if [[ "$path" == "$w" ]] || [[ "$path" == $w ]]; then if [[ "$path" == "$w" ]] || [[ "$path" == "$w" ]]; then
skip=true skip=true
((skipped_count++)) ((skipped_count++))
break break
@@ -239,7 +239,7 @@ safe_clean() {
[[ "$skip" == "true" ]] && continue [[ "$skip" == "true" ]] && continue
[[ -e "$path" ]] && existing_paths+=("$path") [[ -e "$path" ]] && existing_paths+=("$path")
done done
# Update global whitelist skip counter # Update global whitelist skip counter
if [[ $skipped_count -gt 0 ]]; then if [[ $skipped_count -gt 0 ]]; then
((whitelist_skipped_count += skipped_count)) ((whitelist_skipped_count += skipped_count))
@@ -253,31 +253,34 @@ safe_clean() {
# Show progress indicator for potentially slow operations # Show progress indicator for potentially slow operations
if [[ ${#existing_paths[@]} -gt 3 ]]; then if [[ ${#existing_paths[@]} -gt 3 ]]; then
if [[ -t 1 ]]; then MOLE_SPINNER_PREFIX=" " start_inline_spinner "Checking items with whitelist safety..."; fi if [[ -t 1 ]]; then MOLE_SPINNER_PREFIX=" " start_inline_spinner "Checking items with whitelist safety..."; fi
local temp_dir=$(create_temp_dir) local temp_dir
temp_dir=$(create_temp_dir)
# Parallel processing (bash 3.2 compatible) # Parallel processing (bash 3.2 compatible)
local -a pids=() local -a pids=()
local idx=0 local idx=0
for path in "${existing_paths[@]}"; do for path in "${existing_paths[@]}"; do
( (
local size=$(du -sk "$path" 2>/dev/null | awk '{print $1}' || echo "0") local size
local count=$(find "$path" -type f 2>/dev/null | wc -l | tr -d ' ') size=$(du -sk "$path" 2> /dev/null | awk '{print $1}' || echo "0")
local count
count=$(find "$path" -type f 2> /dev/null | wc -l | tr -d ' ')
# Use index + PID for unique filename # Use index + PID for unique filename
local tmp_file="$temp_dir/result_${idx}.$$" local tmp_file="$temp_dir/result_${idx}.$$"
echo "$size $count" > "$tmp_file" echo "$size $count" > "$tmp_file"
mv "$tmp_file" "$temp_dir/result_${idx}" 2>/dev/null || true mv "$tmp_file" "$temp_dir/result_${idx}" 2> /dev/null || true
) & ) &
pids+=($!) pids+=($!)
((idx++)) ((idx++))
if (( ${#pids[@]} >= MAX_PARALLEL_JOBS )); then if ((${#pids[@]} >= MAX_PARALLEL_JOBS)); then
wait "${pids[0]}" 2>/dev/null || true wait "${pids[0]}" 2> /dev/null || true
pids=("${pids[@]:1}") pids=("${pids[@]:1}")
fi fi
done done
for pid in "${pids[@]}"; do for pid in "${pids[@]}"; do
wait "$pid" 2>/dev/null || true wait "$pid" 2> /dev/null || true
done done
# Read results using same index # Read results using same index
@@ -285,10 +288,10 @@ safe_clean() {
for path in "${existing_paths[@]}"; do for path in "${existing_paths[@]}"; do
local result_file="$temp_dir/result_${idx}" local result_file="$temp_dir/result_${idx}"
if [[ -f "$result_file" ]]; then if [[ -f "$result_file" ]]; then
read -r size count < "$result_file" 2>/dev/null || true read -r size count < "$result_file" 2> /dev/null || true
if [[ "$count" -gt 0 && "$size" -gt 0 ]]; then if [[ "$count" -gt 0 && "$size" -gt 0 ]]; then
if [[ "$DRY_RUN" != "true" ]]; then if [[ "$DRY_RUN" != "true" ]]; then
rm -rf "$path" 2>/dev/null || true rm -rf "$path" 2> /dev/null || true
fi fi
((total_size_bytes += size)) ((total_size_bytes += size))
((total_count += count)) ((total_count += count))
@@ -304,12 +307,14 @@ safe_clean() {
if [[ -t 1 ]]; then MOLE_SPINNER_PREFIX=" " start_inline_spinner "Checking items with whitelist safety..."; fi if [[ -t 1 ]]; then MOLE_SPINNER_PREFIX=" " start_inline_spinner "Checking items with whitelist safety..."; fi
for path in "${existing_paths[@]}"; do for path in "${existing_paths[@]}"; do
local size_bytes=$(du -sk "$path" 2>/dev/null | awk '{print $1}' || echo "0") local size_bytes
local count=$(find "$path" -type f 2>/dev/null | wc -l | tr -d ' ') size_bytes=$(du -sk "$path" 2> /dev/null | awk '{print $1}' || echo "0")
local count
count=$(find "$path" -type f 2> /dev/null | wc -l | tr -d ' ')
if [[ "$count" -gt 0 && "$size_bytes" -gt 0 ]]; then if [[ "$count" -gt 0 && "$size_bytes" -gt 0 ]]; then
if [[ "$DRY_RUN" != "true" ]]; then if [[ "$DRY_RUN" != "true" ]]; then
rm -rf "$path" 2>/dev/null || true rm -rf "$path" 2> /dev/null || true
fi fi
((total_size_bytes += size_bytes)) ((total_size_bytes += size_bytes))
((total_count += count)) ((total_count += count))
@@ -319,7 +324,10 @@ safe_clean() {
fi fi
# Clear progress / stop spinner before showing result # Clear progress / stop spinner before showing result
if [[ -t 1 ]]; then stop_inline_spinner; echo -ne "\r\033[K"; fi if [[ -t 1 ]]; then
stop_inline_spinner
echo -ne "\r\033[K"
fi
if [[ $removed_any -eq 1 ]]; then if [[ $removed_any -eq 1 ]]; then
# Convert KB to bytes for bytes_to_human() # Convert KB to bytes for bytes_to_human()
@@ -335,8 +343,8 @@ safe_clean() {
else else
echo -e " ${GREEN}${ICON_SUCCESS}${NC} $label ${GREEN}($size_human)${NC}" echo -e " ${GREEN}${ICON_SUCCESS}${NC} $label ${GREEN}($size_human)${NC}"
fi fi
((files_cleaned+=total_count)) ((files_cleaned += total_count))
((total_size_cleaned+=total_size_bytes)) ((total_size_cleaned += total_size_bytes))
((total_items++)) ((total_items++))
note_activity note_activity
fi fi
@@ -349,7 +357,7 @@ start_cleanup() {
clear clear
printf '\n' printf '\n'
echo -e "${PURPLE}Clean Your Mac${NC}" echo -e "${PURPLE}Clean Your Mac${NC}"
if [[ "$DRY_RUN" != "true" && -t 0 ]]; then if [[ "$DRY_RUN" != "true" && -t 0 ]]; then
echo "" echo ""
echo -e "${YELLOW}Tip:${NC} Safety first—run 'mo clean --dry-run'. Important Macs should stop." echo -e "${YELLOW}Tip:${NC} Safety first—run 'mo clean --dry-run'. Important Macs should stop."
@@ -384,7 +392,7 @@ start_cleanup() {
# Enter = yes, do system cleanup # Enter = yes, do system cleanup
if [[ -z "$choice" ]] || [[ "$choice" == $'\n' ]]; then if [[ -z "$choice" ]] || [[ "$choice" == $'\n' ]]; then
printf "\r\033[K" # Clear the prompt line printf "\r\033[K" # Clear the prompt line
if request_sudo_access "System cleanup requires admin access"; then if request_sudo_access "System cleanup requires admin access"; then
SYSTEM_CLEAN=true SYSTEM_CLEAN=true
echo -e "${GREEN}${ICON_SUCCESS}${NC} Admin access granted" echo -e "${GREEN}${ICON_SUCCESS}${NC} Admin access granted"
@@ -393,7 +401,7 @@ start_cleanup() {
( (
local retry_count=0 local retry_count=0
while true; do while true; do
if ! sudo -n true 2>/dev/null; then if ! sudo -n true 2> /dev/null; then
((retry_count++)) ((retry_count++))
if [[ $retry_count -ge 3 ]]; then if [[ $retry_count -ge 3 ]]; then
exit 1 exit 1
@@ -403,9 +411,9 @@ start_cleanup() {
fi fi
retry_count=0 retry_count=0
sleep 30 sleep 30
kill -0 "$$" 2>/dev/null || exit kill -0 "$$" 2> /dev/null || exit
done done
) 2>/dev/null & ) 2> /dev/null &
SUDO_KEEPALIVE_PID=$! SUDO_KEEPALIVE_PID=$!
else else
SYSTEM_CLEAN=false SYSTEM_CLEAN=false
@@ -430,7 +438,7 @@ start_cleanup() {
perform_cleanup() { perform_cleanup() {
echo -e "${BLUE}${ICON_ADMIN}${NC} $(detect_architecture) | Free space: $(get_free_space)" echo -e "${BLUE}${ICON_ADMIN}${NC} $(detect_architecture) | Free space: $(get_free_space)"
# Show whitelist info if patterns are active # Show whitelist info if patterns are active
local active_count=${#WHITELIST_PATTERNS[@]} local active_count=${#WHITELIST_PATTERNS[@]}
if [[ $active_count -gt 2 ]]; then if [[ $active_count -gt 2 ]]; then
@@ -453,25 +461,25 @@ perform_cleanup() {
start_section "Deep system-level cleanup" start_section "Deep system-level cleanup"
# Clean system caches more safely # Clean system caches more safely
sudo find /Library/Caches -name "*.cache" -delete 2>/dev/null || true sudo find /Library/Caches -name "*.cache" -delete 2> /dev/null || true
sudo find /Library/Caches -name "*.tmp" -delete 2>/dev/null || true sudo find /Library/Caches -name "*.tmp" -delete 2> /dev/null || true
sudo find /Library/Caches -type f -name "*.log" -delete 2>/dev/null || true sudo find /Library/Caches -type f -name "*.log" -delete 2> /dev/null || true
# Clean old temp files only (avoid breaking running processes) # Clean old temp files only (avoid breaking running processes)
local tmp_cleaned=0 local tmp_cleaned=0
local tmp_count=$(sudo find /tmp -type f -mtime +${TEMP_FILE_AGE_DAYS} 2>/dev/null | wc -l | tr -d ' ') local tmp_count=$(sudo find /tmp -type f -mtime +${TEMP_FILE_AGE_DAYS} 2> /dev/null | wc -l | tr -d ' ')
if [[ "$tmp_count" -gt 0 ]]; then if [[ "$tmp_count" -gt 0 ]]; then
sudo find /tmp -type f -mtime +${TEMP_FILE_AGE_DAYS} -delete 2>/dev/null || true sudo find /tmp -type f -mtime +${TEMP_FILE_AGE_DAYS} -delete 2> /dev/null || true
tmp_cleaned=1 tmp_cleaned=1
fi fi
local var_tmp_count=$(sudo find /var/tmp -type f -mtime +${TEMP_FILE_AGE_DAYS} 2>/dev/null | wc -l | tr -d ' ') local var_tmp_count=$(sudo find /var/tmp -type f -mtime +${TEMP_FILE_AGE_DAYS} 2> /dev/null | wc -l | tr -d ' ')
if [[ "$var_tmp_count" -gt 0 ]]; then if [[ "$var_tmp_count" -gt 0 ]]; then
sudo find /var/tmp -type f -mtime +${TEMP_FILE_AGE_DAYS} -delete 2>/dev/null || true sudo find /var/tmp -type f -mtime +${TEMP_FILE_AGE_DAYS} -delete 2> /dev/null || true
tmp_cleaned=1 tmp_cleaned=1
fi fi
[[ $tmp_cleaned -eq 1 ]] && log_success "Old system temp files (${TEMP_FILE_AGE_DAYS}+ days)" [[ $tmp_cleaned -eq 1 ]] && log_success "Old system temp files (${TEMP_FILE_AGE_DAYS}+ days)"
sudo rm -rf /Library/Updates/* 2>/dev/null || true sudo rm -rf /Library/Updates/* 2> /dev/null || true
log_success "System library caches and updates" log_success "System library caches and updates"
end_section end_section
@@ -497,15 +505,15 @@ perform_cleanup() {
[[ -d "$volume" && -d "$volume/.Trashes" && -w "$volume" ]] || continue [[ -d "$volume" && -d "$volume/.Trashes" && -w "$volume" ]] || continue
# Skip network volumes # Skip network volumes
local fs_type=$(df -T "$volume" 2>/dev/null | tail -1 | awk '{print $2}') local fs_type=$(df -T "$volume" 2> /dev/null | tail -1 | awk '{print $2}')
case "$fs_type" in case "$fs_type" in
nfs|smbfs|afpfs|cifs|webdav) continue ;; nfs | smbfs | afpfs | cifs | webdav) continue ;;
esac esac
# Verify volume is mounted # Verify volume is mounted
if mount | grep -q "on $volume "; then if mount | grep -q "on $volume "; then
if [[ "$DRY_RUN" != "true" ]]; then if [[ "$DRY_RUN" != "true" ]]; then
find "$volume/.Trashes" -mindepth 1 -maxdepth 1 -exec rm -rf {} \; 2>/dev/null || true find "$volume/.Trashes" -mindepth 1 -maxdepth 1 -exec rm -rf {} \; 2> /dev/null || true
fi fi
fi fi
done done
@@ -526,7 +534,6 @@ perform_cleanup() {
safe_clean ~/Downloads/*.part "Incomplete downloads (partial)" safe_clean ~/Downloads/*.part "Incomplete downloads (partial)"
end_section end_section
# ===== 3. macOS System Caches ===== # ===== 3. macOS System Caches =====
start_section "macOS system caches" start_section "macOS system caches"
safe_clean ~/Library/Saved\ Application\ State/* "Saved application states" safe_clean ~/Library/Saved\ Application\ State/* "Saved application states"
@@ -542,7 +549,6 @@ perform_cleanup() {
safe_clean ~/Library/Application\ Support/CloudDocs/session/db/* "iCloud session cache" safe_clean ~/Library/Application\ Support/CloudDocs/session/db/* "iCloud session cache"
end_section end_section
# ===== 4. Sandboxed App Caches ===== # ===== 4. Sandboxed App Caches =====
start_section "Sandboxed app caches" start_section "Sandboxed app caches"
# Clean specific high-usage apps first for better user feedback # Clean specific high-usage apps first for better user feedback
@@ -553,7 +559,6 @@ perform_cleanup() {
safe_clean ~/Library/Containers/*/Data/Library/Caches/* "Sandboxed app caches" safe_clean ~/Library/Containers/*/Data/Library/Caches/* "Sandboxed app caches"
end_section end_section
# ===== 5. Browsers ===== # ===== 5. Browsers =====
start_section "Browser cleanup" start_section "Browser cleanup"
# Safari (cache only, NOT local storage or databases to preserve login states) # Safari (cache only, NOT local storage or databases to preserve login states)
@@ -577,7 +582,6 @@ perform_cleanup() {
safe_clean ~/Library/Application\ Support/Firefox/Profiles/*/cache2/* "Firefox profile cache" safe_clean ~/Library/Application\ Support/Firefox/Profiles/*/cache2/* "Firefox profile cache"
end_section end_section
# ===== 6. Cloud Storage ===== # ===== 6. Cloud Storage =====
start_section "Cloud storage caches" start_section "Cloud storage caches"
# Only cache files, not sync state or login credentials # Only cache files, not sync state or login credentials
@@ -590,7 +594,6 @@ perform_cleanup() {
safe_clean ~/Library/Caches/com.microsoft.OneDrive "OneDrive cache" safe_clean ~/Library/Caches/com.microsoft.OneDrive "OneDrive cache"
end_section end_section
# ===== 7. Office Applications ===== # ===== 7. Office Applications =====
start_section "Office applications" start_section "Office applications"
safe_clean ~/Library/Caches/com.microsoft.Word "Microsoft Word cache" safe_clean ~/Library/Caches/com.microsoft.Word "Microsoft Word cache"
@@ -603,11 +606,10 @@ perform_cleanup() {
safe_clean ~/Library/Caches/com.apple.mail/* "Apple Mail cache" safe_clean ~/Library/Caches/com.apple.mail/* "Apple Mail cache"
end_section end_section
# ===== 8. Developer tools ===== # ===== 8. Developer tools =====
start_section "Developer tools" start_section "Developer tools"
# Node.js ecosystem # Node.js ecosystem
if command -v npm >/dev/null 2>&1; then if command -v npm > /dev/null 2>&1; then
if [[ "$DRY_RUN" != "true" ]]; then if [[ "$DRY_RUN" != "true" ]]; then
clean_tool_cache "npm cache" npm cache clean --force clean_tool_cache "npm cache" npm cache clean --force
else else
@@ -622,7 +624,7 @@ perform_cleanup() {
safe_clean ~/.bun/install/cache/* "Bun cache" safe_clean ~/.bun/install/cache/* "Bun cache"
# Python ecosystem # Python ecosystem
if command -v pip3 >/dev/null 2>&1; then if command -v pip3 > /dev/null 2>&1; then
if [[ "$DRY_RUN" != "true" ]]; then if [[ "$DRY_RUN" != "true" ]]; then
clean_tool_cache "pip cache" pip3 cache purge clean_tool_cache "pip cache" pip3 cache purge
else else
@@ -636,7 +638,7 @@ perform_cleanup() {
safe_clean ~/.pyenv/cache/* "pyenv cache" safe_clean ~/.pyenv/cache/* "pyenv cache"
# Go ecosystem # Go ecosystem
if command -v go >/dev/null 2>&1; then if command -v go > /dev/null 2>&1; then
if [[ "$DRY_RUN" != "true" ]]; then if [[ "$DRY_RUN" != "true" ]]; then
clean_tool_cache "Go cache" bash -c 'go clean -modcache >/dev/null 2>&1 || true; go clean -cache >/dev/null 2>&1 || true' clean_tool_cache "Go cache" bash -c 'go clean -modcache >/dev/null 2>&1 || true; go clean -cache >/dev/null 2>&1 || true'
else else
@@ -652,7 +654,7 @@ perform_cleanup() {
safe_clean ~/.cargo/registry/cache/* "Rust cargo cache" safe_clean ~/.cargo/registry/cache/* "Rust cargo cache"
# Docker (only clean build cache, preserve images and volumes) # Docker (only clean build cache, preserve images and volumes)
if command -v docker >/dev/null 2>&1; then if command -v docker > /dev/null 2>&1; then
if [[ "$DRY_RUN" != "true" ]]; then if [[ "$DRY_RUN" != "true" ]]; then
clean_tool_cache "Docker build cache" docker builder prune -af clean_tool_cache "Docker build cache" docker builder prune -af
else else
@@ -674,9 +676,10 @@ perform_cleanup() {
safe_clean ~/Library/Caches/Homebrew/* "Homebrew cache" safe_clean ~/Library/Caches/Homebrew/* "Homebrew cache"
safe_clean /opt/homebrew/var/homebrew/locks/* "Homebrew lock files (M series)" safe_clean /opt/homebrew/var/homebrew/locks/* "Homebrew lock files (M series)"
safe_clean /usr/local/var/homebrew/locks/* "Homebrew lock files (Intel)" safe_clean /usr/local/var/homebrew/locks/* "Homebrew lock files (Intel)"
if command -v brew >/dev/null 2>&1; then if command -v brew > /dev/null 2>&1; then
if [[ "$DRY_RUN" != "true" ]]; then if [[ "$DRY_RUN" != "true" ]]; then
clean_tool_cache "Homebrew cleanup" brew cleanup # Use -s (scrub cache) for faster cleanup, --prune=all removes old versions
MOLE_CMD_TIMEOUT=300 clean_tool_cache "Homebrew cleanup" brew cleanup -s --prune=all
else else
echo -e " ${YELLOW}${NC} Homebrew (would cleanup)" echo -e " ${YELLOW}${NC} Homebrew (would cleanup)"
fi fi
@@ -818,7 +821,6 @@ perform_cleanup() {
end_section end_section
# ===== 10. Applications ===== # ===== 10. Applications =====
start_section "Applications" start_section "Applications"
@@ -983,7 +985,6 @@ perform_cleanup() {
end_section end_section
# ===== 11. Virtualization Tools ===== # ===== 11. Virtualization Tools =====
start_section "Virtualization tools" start_section "Virtualization tools"
safe_clean ~/Library/Caches/com.vmware.fusion "VMware Fusion cache" safe_clean ~/Library/Caches/com.vmware.fusion "VMware Fusion cache"
@@ -992,7 +993,6 @@ perform_cleanup() {
safe_clean ~/.vagrant.d/tmp/* "Vagrant temporary files" safe_clean ~/.vagrant.d/tmp/* "Vagrant temporary files"
end_section end_section
# ===== 12. Application Support logs cleanup ===== # ===== 12. Application Support logs cleanup =====
start_section "Application Support logs" start_section "Application Support logs"
@@ -1003,7 +1003,7 @@ perform_cleanup() {
# Skip system and protected apps # Skip system and protected apps
case "$app_name" in case "$app_name" in
com.apple.*|Adobe*|1Password|Claude) com.apple.* | Adobe* | 1Password | Claude)
continue continue
;; ;;
esac esac
@@ -1022,7 +1022,6 @@ perform_cleanup() {
end_section end_section
# ===== 13. Orphaned app data cleanup ===== # ===== 13. Orphaned app data cleanup =====
# Deep cleanup of leftover files from uninstalled apps # Deep cleanup of leftover files from uninstalled apps
# #
@@ -1074,32 +1073,32 @@ perform_cleanup() {
if [[ -d "$search_path" ]]; then if [[ -d "$search_path" ]]; then
while IFS= read -r app; do while IFS= read -r app; do
[[ -f "$app/Contents/Info.plist" ]] || continue [[ -f "$app/Contents/Info.plist" ]] || continue
bundle_id=$(defaults read "$app/Contents/Info.plist" CFBundleIdentifier 2>/dev/null || echo "") bundle_id=$(defaults read "$app/Contents/Info.plist" CFBundleIdentifier 2> /dev/null || echo "")
[[ -n "$bundle_id" ]] && echo "$bundle_id" >> "$installed_bundles" [[ -n "$bundle_id" ]] && echo "$bundle_id" >> "$installed_bundles"
done < <(find "$search_path" -maxdepth 3 -type d -name "*.app" 2>/dev/null || true) done < <(find "$search_path" -maxdepth 3 -type d -name "*.app" 2> /dev/null || true)
fi fi
done done
# Use Spotlight as fallback to catch apps in unusual locations # Use Spotlight as fallback to catch apps in unusual locations
# This significantly reduces false positives # This significantly reduces false positives
if command -v mdfind >/dev/null 2>&1; then if command -v mdfind > /dev/null 2>&1; then
while IFS= read -r app; do while IFS= read -r app; do
[[ -f "$app/Contents/Info.plist" ]] || continue [[ -f "$app/Contents/Info.plist" ]] || continue
bundle_id=$(defaults read "$app/Contents/Info.plist" CFBundleIdentifier 2>/dev/null || echo "") bundle_id=$(defaults read "$app/Contents/Info.plist" CFBundleIdentifier 2> /dev/null || echo "")
[[ -n "$bundle_id" ]] && echo "$bundle_id" >> "$installed_bundles" [[ -n "$bundle_id" ]] && echo "$bundle_id" >> "$installed_bundles"
done < <(mdfind "kMDItemKind == 'Application'" 2>/dev/null | grep "\.app$" || true) done < <(mdfind "kMDItemKind == 'Application'" 2> /dev/null | grep "\.app$" || true)
fi fi
# Get running applications (if an app is running, it's definitely not orphaned) # Get running applications (if an app is running, it's definitely not orphaned)
local running_apps=$(osascript -e 'tell application "System Events" to get bundle identifier of every application process' 2>/dev/null || echo "") local running_apps=$(osascript -e 'tell application "System Events" to get bundle identifier of every application process' 2> /dev/null || echo "")
echo "$running_apps" | tr ',' '\n' | sed 's/^ *//;s/ *$//' | grep -v '^$' > "$running_bundles" echo "$running_apps" | tr ',' '\n' | sed 's/^ *//;s/ *$//' | grep -v '^$' > "$running_bundles"
# Check LaunchAgents and LaunchDaemons (if app has launch items, it likely exists) # Check LaunchAgents and LaunchDaemons (if app has launch items, it likely exists)
find ~/Library/LaunchAgents /Library/LaunchAgents /Library/LaunchDaemons \ find ~/Library/LaunchAgents /Library/LaunchAgents /Library/LaunchDaemons \
-name "*.plist" -type f 2>/dev/null | while IFS= read -r plist; do -name "*.plist" -type f 2> /dev/null | while IFS= read -r plist; do
bundle_id=$(basename "$plist" .plist) bundle_id=$(basename "$plist" .plist)
echo "$bundle_id" >> "$launch_agents" echo "$bundle_id" >> "$launch_agents"
done 2>/dev/null || true done 2> /dev/null || true
# Combine and deduplicate all bundle IDs # Combine and deduplicate all bundle IDs
sort -u "$installed_bundles" "$running_bundles" "$launch_agents" > "${installed_bundles}.final" sort -u "$installed_bundles" "$running_bundles" "$launch_agents" > "${installed_bundles}.final"
@@ -1117,7 +1116,7 @@ perform_cleanup() {
# Returns 0 (true) only if we are VERY CONFIDENT the app is uninstalled # Returns 0 (true) only if we are VERY CONFIDENT the app is uninstalled
is_orphaned() { is_orphaned() {
local bundle_id="$1" local bundle_id="$1"
local directory_path="$2" # The actual directory we're considering deleting local directory_path="$2" # The actual directory we're considering deleting
# SAFETY CHECK 1: Skip system-critical and protected apps (MOST IMPORTANT) # SAFETY CHECK 1: Skip system-critical and protected apps (MOST IMPORTANT)
if should_protect_data "$bundle_id"; then if should_protect_data "$bundle_id"; then
@@ -1125,13 +1124,13 @@ perform_cleanup() {
fi fi
# SAFETY CHECK 2: Check if app bundle exists in our comprehensive scan # SAFETY CHECK 2: Check if app bundle exists in our comprehensive scan
if grep -q "^$bundle_id$" "$installed_bundles" 2>/dev/null; then if grep -q "^$bundle_id$" "$installed_bundles" 2> /dev/null; then
return 1 return 1
fi fi
# SAFETY CHECK 3: Extra check for system bundles (belt and suspenders) # SAFETY CHECK 3: Extra check for system bundles (belt and suspenders)
case "$bundle_id" in case "$bundle_id" in
com.apple.*|loginwindow|dock|systempreferences|finder|safari) com.apple.* | loginwindow | dock | systempreferences | finder | safari)
return 1 return 1
;; ;;
esac esac
@@ -1139,7 +1138,7 @@ perform_cleanup() {
# SAFETY CHECK 4: If it's a very common/important prefix, be extra careful # SAFETY CHECK 4: If it's a very common/important prefix, be extra careful
# For major vendors, we NEVER auto-clean (too risky) # For major vendors, we NEVER auto-clean (too risky)
case "$bundle_id" in case "$bundle_id" in
com.adobe.*|com.microsoft.*|com.google.*|org.mozilla.*|com.jetbrains.*|com.docker.*) com.adobe.* | com.microsoft.* | com.google.* | org.mozilla.* | com.jetbrains.* | com.docker.*)
return 1 return 1
;; ;;
esac esac
@@ -1149,9 +1148,9 @@ perform_cleanup() {
# This protects against apps in unusual locations we didn't scan # This protects against apps in unusual locations we didn't scan
if [[ -e "$directory_path" ]]; then if [[ -e "$directory_path" ]]; then
# Get last access time (days ago) # Get last access time (days ago)
local last_access_epoch=$(stat -f%a "$directory_path" 2>/dev/null || echo "0") local last_access_epoch=$(stat -f%a "$directory_path" 2> /dev/null || echo "0")
local current_epoch=$(date +%s) local current_epoch=$(date +%s)
local days_since_access=$(( (current_epoch - last_access_epoch) / 86400 )) local days_since_access=$(((current_epoch - last_access_epoch) / 86400))
# If accessed in the last 60 days, DO NOT DELETE # If accessed in the last 60 days, DO NOT DELETE
# This means app is likely still installed somewhere # This means app is likely still installed somewhere
@@ -1167,12 +1166,12 @@ perform_cleanup() {
# Clean orphaned caches # Clean orphaned caches
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned caches..." MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned caches..."
local cache_found=0 local cache_found=0
if ls ~/Library/Caches/com.* >/dev/null 2>&1; then if ls ~/Library/Caches/com.* > /dev/null 2>&1; then
for cache_dir in ~/Library/Caches/com.* ~/Library/Caches/org.* ~/Library/Caches/net.* ~/Library/Caches/io.*; do for cache_dir in ~/Library/Caches/com.* ~/Library/Caches/org.* ~/Library/Caches/net.* ~/Library/Caches/io.*; do
[[ -d "$cache_dir" ]] || continue [[ -d "$cache_dir" ]] || continue
local bundle_id=$(basename "$cache_dir") local bundle_id=$(basename "$cache_dir")
if is_orphaned "$bundle_id" "$cache_dir"; then if is_orphaned "$bundle_id" "$cache_dir"; then
local size_kb=$(du -sk "$cache_dir" 2>/dev/null | awk '{print $1}' || echo "0") local size_kb=$(du -sk "$cache_dir" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then if [[ "$size_kb" -gt 0 ]]; then
safe_clean "$cache_dir" "Orphaned cache: $bundle_id" safe_clean "$cache_dir" "Orphaned cache: $bundle_id"
((cache_found++)) ((cache_found++))
@@ -1187,12 +1186,12 @@ perform_cleanup() {
# Clean orphaned logs # Clean orphaned logs
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned logs..." MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned logs..."
local logs_found=0 local logs_found=0
if ls ~/Library/Logs/com.* >/dev/null 2>&1; then if ls ~/Library/Logs/com.* > /dev/null 2>&1; then
for log_dir in ~/Library/Logs/com.* ~/Library/Logs/org.* ~/Library/Logs/net.* ~/Library/Logs/io.*; do for log_dir in ~/Library/Logs/com.* ~/Library/Logs/org.* ~/Library/Logs/net.* ~/Library/Logs/io.*; do
[[ -d "$log_dir" ]] || continue [[ -d "$log_dir" ]] || continue
local bundle_id=$(basename "$log_dir") local bundle_id=$(basename "$log_dir")
if is_orphaned "$bundle_id" "$log_dir"; then if is_orphaned "$bundle_id" "$log_dir"; then
local size_kb=$(du -sk "$log_dir" 2>/dev/null | awk '{print $1}' || echo "0") local size_kb=$(du -sk "$log_dir" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then if [[ "$size_kb" -gt 0 ]]; then
safe_clean "$log_dir" "Orphaned logs: $bundle_id" safe_clean "$log_dir" "Orphaned logs: $bundle_id"
((logs_found++)) ((logs_found++))
@@ -1207,12 +1206,12 @@ perform_cleanup() {
# Clean orphaned saved states # Clean orphaned saved states
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned saved states..." MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned saved states..."
local states_found=0 local states_found=0
if ls ~/Library/Saved\ Application\ State/*.savedState >/dev/null 2>&1; then if ls ~/Library/Saved\ Application\ State/*.savedState > /dev/null 2>&1; then
for state_dir in ~/Library/Saved\ Application\ State/*.savedState; do for state_dir in ~/Library/Saved\ Application\ State/*.savedState; do
[[ -d "$state_dir" ]] || continue [[ -d "$state_dir" ]] || continue
local bundle_id=$(basename "$state_dir" .savedState) local bundle_id=$(basename "$state_dir" .savedState)
if is_orphaned "$bundle_id" "$state_dir"; then if is_orphaned "$bundle_id" "$state_dir"; then
local size_kb=$(du -sk "$state_dir" 2>/dev/null | awk '{print $1}' || echo "0") local size_kb=$(du -sk "$state_dir" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then if [[ "$size_kb" -gt 0 ]]; then
safe_clean "$state_dir" "Orphaned state: $bundle_id" safe_clean "$state_dir" "Orphaned state: $bundle_id"
((states_found++)) ((states_found++))
@@ -1231,13 +1230,13 @@ perform_cleanup() {
# To avoid deleting data from installed apps, we skip container cleanup. # To avoid deleting data from installed apps, we skip container cleanup.
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned containers..." MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned containers..."
local containers_found=0 local containers_found=0
if ls ~/Library/Containers/com.* >/dev/null 2>&1; then if ls ~/Library/Containers/com.* > /dev/null 2>&1; then
# Count potential orphaned containers but don't delete them # Count potential orphaned containers but don't delete them
for container_dir in ~/Library/Containers/com.* ~/Library/Containers/org.* ~/Library/Containers/net.* ~/Library/Containers/io.*; do for container_dir in ~/Library/Containers/com.* ~/Library/Containers/org.* ~/Library/Containers/net.* ~/Library/Containers/io.*; do
[[ -d "$container_dir" ]] || continue [[ -d "$container_dir" ]] || continue
local bundle_id=$(basename "$container_dir") local bundle_id=$(basename "$container_dir")
if is_orphaned "$bundle_id" "$container_dir"; then if is_orphaned "$bundle_id" "$container_dir"; then
local size_kb=$(du -sk "$container_dir" 2>/dev/null | awk '{print $1}' || echo "0") local size_kb=$(du -sk "$container_dir" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then if [[ "$size_kb" -gt 0 ]]; then
# DISABLED: safe_clean "$container_dir" "Orphaned container: $bundle_id" # DISABLED: safe_clean "$container_dir" "Orphaned container: $bundle_id"
((containers_found++)) ((containers_found++))
@@ -1252,12 +1251,12 @@ perform_cleanup() {
# Clean orphaned WebKit data # Clean orphaned WebKit data
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned WebKit data..." MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned WebKit data..."
local webkit_found=0 local webkit_found=0
if ls ~/Library/WebKit/com.* >/dev/null 2>&1; then if ls ~/Library/WebKit/com.* > /dev/null 2>&1; then
for webkit_dir in ~/Library/WebKit/com.* ~/Library/WebKit/org.* ~/Library/WebKit/net.* ~/Library/WebKit/io.*; do for webkit_dir in ~/Library/WebKit/com.* ~/Library/WebKit/org.* ~/Library/WebKit/net.* ~/Library/WebKit/io.*; do
[[ -d "$webkit_dir" ]] || continue [[ -d "$webkit_dir" ]] || continue
local bundle_id=$(basename "$webkit_dir") local bundle_id=$(basename "$webkit_dir")
if is_orphaned "$bundle_id" "$webkit_dir"; then if is_orphaned "$bundle_id" "$webkit_dir"; then
local size_kb=$(du -sk "$webkit_dir" 2>/dev/null | awk '{print $1}' || echo "0") local size_kb=$(du -sk "$webkit_dir" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then if [[ "$size_kb" -gt 0 ]]; then
safe_clean "$webkit_dir" "Orphaned WebKit: $bundle_id" safe_clean "$webkit_dir" "Orphaned WebKit: $bundle_id"
((webkit_found++)) ((webkit_found++))
@@ -1272,12 +1271,12 @@ perform_cleanup() {
# Clean orphaned HTTP storages # Clean orphaned HTTP storages
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned HTTP storages..." MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned HTTP storages..."
local http_found=0 local http_found=0
if ls ~/Library/HTTPStorages/com.* >/dev/null 2>&1; then if ls ~/Library/HTTPStorages/com.* > /dev/null 2>&1; then
for http_dir in ~/Library/HTTPStorages/com.* ~/Library/HTTPStorages/org.* ~/Library/HTTPStorages/net.* ~/Library/HTTPStorages/io.*; do for http_dir in ~/Library/HTTPStorages/com.* ~/Library/HTTPStorages/org.* ~/Library/HTTPStorages/net.* ~/Library/HTTPStorages/io.*; do
[[ -d "$http_dir" ]] || continue [[ -d "$http_dir" ]] || continue
local bundle_id=$(basename "$http_dir") local bundle_id=$(basename "$http_dir")
if is_orphaned "$bundle_id" "$http_dir"; then if is_orphaned "$bundle_id" "$http_dir"; then
local size_kb=$(du -sk "$http_dir" 2>/dev/null | awk '{print $1}' || echo "0") local size_kb=$(du -sk "$http_dir" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then if [[ "$size_kb" -gt 0 ]]; then
safe_clean "$http_dir" "Orphaned HTTP storage: $bundle_id" safe_clean "$http_dir" "Orphaned HTTP storage: $bundle_id"
((http_found++)) ((http_found++))
@@ -1292,12 +1291,12 @@ perform_cleanup() {
# Clean orphaned cookies # Clean orphaned cookies
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned cookies..." MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned cookies..."
local cookies_found=0 local cookies_found=0
if ls ~/Library/Cookies/*.binarycookies >/dev/null 2>&1; then if ls ~/Library/Cookies/*.binarycookies > /dev/null 2>&1; then
for cookie_file in ~/Library/Cookies/*.binarycookies; do for cookie_file in ~/Library/Cookies/*.binarycookies; do
[[ -f "$cookie_file" ]] || continue [[ -f "$cookie_file" ]] || continue
local bundle_id=$(basename "$cookie_file" .binarycookies) local bundle_id=$(basename "$cookie_file" .binarycookies)
if is_orphaned "$bundle_id" "$cookie_file"; then if is_orphaned "$bundle_id" "$cookie_file"; then
local size_kb=$(du -sk "$cookie_file" 2>/dev/null | awk '{print $1}' || echo "0") local size_kb=$(du -sk "$cookie_file" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then if [[ "$size_kb" -gt 0 ]]; then
safe_clean "$cookie_file" "Orphaned cookies: $bundle_id" safe_clean "$cookie_file" "Orphaned cookies: $bundle_id"
((cookies_found++)) ((cookies_found++))
@@ -1340,9 +1339,9 @@ perform_cleanup() {
start_section "iOS device backups" start_section "iOS device backups"
backup_dir="$HOME/Library/Application Support/MobileSync/Backup" backup_dir="$HOME/Library/Application Support/MobileSync/Backup"
if [[ -d "$backup_dir" ]] && find "$backup_dir" -mindepth 1 -maxdepth 1 | read -r _; then if [[ -d "$backup_dir" ]] && find "$backup_dir" -mindepth 1 -maxdepth 1 | read -r _; then
backup_kb=$(du -sk "$backup_dir" 2>/dev/null | awk '{print $1}') backup_kb=$(du -sk "$backup_dir" 2> /dev/null | awk '{print $1}')
if [[ -n "${backup_kb:-}" && "$backup_kb" -gt 102400 ]]; then if [[ -n "${backup_kb:-}" && "$backup_kb" -gt 102400 ]]; then
backup_human=$(du -sh "$backup_dir" 2>/dev/null | awk '{print $1}') backup_human=$(du -sh "$backup_dir" 2> /dev/null | awk '{print $1}')
note_activity note_activity
echo -e " Found ${GREEN}${backup_human}${NC} iOS backups" echo -e " Found ${GREEN}${backup_human}${NC} iOS backups"
echo -e " You can delete them manually: ${backup_dir}" echo -e " You can delete them manually: ${backup_dir}"
@@ -1361,9 +1360,9 @@ perform_cleanup() {
# Skip system volume and network volumes # Skip system volume and network volumes
[[ "$volume" == "/Volumes/MacintoshHD" || "$volume" == "/" ]] && continue [[ "$volume" == "/Volumes/MacintoshHD" || "$volume" == "/" ]] && continue
local fs_type=$(df -T "$volume" 2>/dev/null | tail -1 | awk '{print $2}') local fs_type=$(df -T "$volume" 2> /dev/null | tail -1 | awk '{print $2}')
case "$fs_type" in case "$fs_type" in
nfs|smbfs|afpfs|cifs|webdav) continue ;; nfs | smbfs | afpfs | cifs | webdav) continue ;;
esac esac
# Look for HFS+ style backups (Backups.backupdb) # Look for HFS+ style backups (Backups.backupdb)
@@ -1374,19 +1373,19 @@ perform_cleanup() {
while IFS= read -r inprogress_file; do while IFS= read -r inprogress_file; do
[[ -d "$inprogress_file" ]] || continue [[ -d "$inprogress_file" ]] || continue
local size_kb=$(du -sk "$inprogress_file" 2>/dev/null | awk '{print $1}' || echo "0") local size_kb=$(du -sk "$inprogress_file" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then if [[ "$size_kb" -gt 0 ]]; then
local backup_name=$(basename "$inprogress_file") local backup_name=$(basename "$inprogress_file")
if [[ "$DRY_RUN" != "true" ]]; then if [[ "$DRY_RUN" != "true" ]]; then
# Use tmutil to safely delete the failed backup # Use tmutil to safely delete the failed backup
if command -v tmutil >/dev/null 2>&1; then if command -v tmutil > /dev/null 2>&1; then
if tmutil delete "$inprogress_file" 2>/dev/null; then if tmutil delete "$inprogress_file" 2> /dev/null; then
local size_human=$(bytes_to_human "$((size_kb * 1024))") local size_human=$(bytes_to_human "$((size_kb * 1024))")
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Failed backup: $backup_name ${GREEN}($size_human)${NC}" echo -e " ${GREEN}${ICON_SUCCESS}${NC} Failed backup: $backup_name ${GREEN}($size_human)${NC}"
((tm_cleaned++)) ((tm_cleaned++))
((files_cleaned++)) ((files_cleaned++))
((total_size_cleaned+=size_kb)) ((total_size_cleaned += size_kb))
((total_items++)) ((total_items++))
note_activity note_activity
else else
@@ -1402,7 +1401,7 @@ perform_cleanup() {
note_activity note_activity
fi fi
fi fi
done < <(find "$backupdb_dir" -type d \( -name "*.inProgress" -o -name "*.inprogress" \) 2>/dev/null || true) done < <(find "$backupdb_dir" -type d \( -name "*.inProgress" -o -name "*.inprogress" \) 2> /dev/null || true)
fi fi
# Look for APFS style backups (.backupbundle or .sparsebundle) # Look for APFS style backups (.backupbundle or .sparsebundle)
@@ -1414,25 +1413,25 @@ perform_cleanup() {
# Check if bundle is already mounted by looking at hdiutil info # Check if bundle is already mounted by looking at hdiutil info
local bundle_name=$(basename "$bundle") local bundle_name=$(basename "$bundle")
local mounted_path=$(hdiutil info 2>/dev/null | grep -A 5 "image-path.*$bundle_name" | grep "/Volumes/" | awk '{print $1}' | head -1 || echo "") local mounted_path=$(hdiutil info 2> /dev/null | grep -A 5 "image-path.*$bundle_name" | grep "/Volumes/" | awk '{print $1}' | head -1 || echo "")
if [[ -n "$mounted_path" && -d "$mounted_path" ]]; then if [[ -n "$mounted_path" && -d "$mounted_path" ]]; then
# Bundle is already mounted, safe to check # Bundle is already mounted, safe to check
while IFS= read -r inprogress_file; do while IFS= read -r inprogress_file; do
[[ -d "$inprogress_file" ]] || continue [[ -d "$inprogress_file" ]] || continue
local size_kb=$(du -sk "$inprogress_file" 2>/dev/null | awk '{print $1}' || echo "0") local size_kb=$(du -sk "$inprogress_file" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then if [[ "$size_kb" -gt 0 ]]; then
local backup_name=$(basename "$inprogress_file") local backup_name=$(basename "$inprogress_file")
if [[ "$DRY_RUN" != "true" ]]; then if [[ "$DRY_RUN" != "true" ]]; then
if command -v tmutil >/dev/null 2>&1; then if command -v tmutil > /dev/null 2>&1; then
if tmutil delete "$inprogress_file" 2>/dev/null; then if tmutil delete "$inprogress_file" 2> /dev/null; then
local size_human=$(bytes_to_human "$((size_kb * 1024))") local size_human=$(bytes_to_human "$((size_kb * 1024))")
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Failed APFS backup in $bundle_name: $backup_name ${GREEN}($size_human)${NC}" echo -e " ${GREEN}${ICON_SUCCESS}${NC} Failed APFS backup in $bundle_name: $backup_name ${GREEN}($size_human)${NC}"
((tm_cleaned++)) ((tm_cleaned++))
((files_cleaned++)) ((files_cleaned++))
((total_size_cleaned+=size_kb)) ((total_size_cleaned += size_kb))
((total_items++)) ((total_items++))
note_activity note_activity
else else
@@ -1446,7 +1445,7 @@ perform_cleanup() {
note_activity note_activity
fi fi
fi fi
done < <(find "$mounted_path" -type d \( -name "*.inProgress" -o -name "*.inprogress" \) 2>/dev/null || true) done < <(find "$mounted_path" -type d \( -name "*.inProgress" -o -name "*.inprogress" \) 2> /dev/null || true)
fi fi
done done
done done
@@ -1525,12 +1524,11 @@ perform_cleanup() {
printf '\n' printf '\n'
} }
main() { main() {
# Parse args (only dry-run and help for minimal impact) # Parse args (only dry-run and help for minimal impact)
for arg in "$@"; do for arg in "$@"; do
case "$arg" in case "$arg" in
"--dry-run"|"-n") "--dry-run" | "-n")
DRY_RUN=true DRY_RUN=true
;; ;;
"--whitelist") "--whitelist")
@@ -1538,7 +1536,7 @@ main() {
manage_whitelist manage_whitelist
exit 0 exit 0
;; ;;
"--help"|"-h") "--help" | "-h")
echo "Mole - Deeper system cleanup" echo "Mole - Deeper system cleanup"
echo "Usage: clean.sh [options]" echo "Usage: clean.sh [options]"
echo "" echo ""

View File

@@ -20,14 +20,14 @@ is_touchid_configured() {
if [[ ! -f "$PAM_SUDO_FILE" ]]; then if [[ ! -f "$PAM_SUDO_FILE" ]]; then
return 1 return 1
fi fi
grep -q "pam_tid.so" "$PAM_SUDO_FILE" 2>/dev/null grep -q "pam_tid.so" "$PAM_SUDO_FILE" 2> /dev/null
} }
# Check if system supports Touch ID # Check if system supports Touch ID
supports_touchid() { supports_touchid() {
# Check if bioutil exists and has Touch ID capability # Check if bioutil exists and has Touch ID capability
if command -v bioutil &>/dev/null; then if command -v bioutil &> /dev/null; then
bioutil -r 2>/dev/null | grep -q "Touch ID" && return 0 bioutil -r 2> /dev/null | grep -q "Touch ID" && return 0
fi fi
# Fallback: check if running on Apple Silicon or modern Intel Mac # Fallback: check if running on Apple Silicon or modern Intel Mac
@@ -39,7 +39,7 @@ supports_touchid() {
# For Intel Macs, check if it's 2018 or later (approximation) # For Intel Macs, check if it's 2018 or later (approximation)
local model_year local model_year
model_year=$(system_profiler SPHardwareDataType 2>/dev/null | grep "Model Identifier" | grep -o "[0-9]\{4\}" | head -1) model_year=$(system_profiler SPHardwareDataType 2> /dev/null | grep "Model Identifier" | grep -o "[0-9]\{4\}" | head -1)
if [[ -n "$model_year" ]] && [[ "$model_year" -ge 2018 ]]; then if [[ -n "$model_year" ]] && [[ "$model_year" -ge 2018 ]]; then
return 0 return 0
fi fi
@@ -76,7 +76,7 @@ enable_touchid() {
fi fi
# Create backup and apply changes # Create backup and apply changes
if ! sudo cp "$PAM_SUDO_FILE" "${PAM_SUDO_FILE}.mole-backup" 2>/dev/null; then if ! sudo cp "$PAM_SUDO_FILE" "${PAM_SUDO_FILE}.mole-backup" 2> /dev/null; then
log_error "Failed to create backup" log_error "Failed to create backup"
return 1 return 1
fi fi
@@ -97,12 +97,12 @@ enable_touchid() {
' "$PAM_SUDO_FILE" > "$temp_file" ' "$PAM_SUDO_FILE" > "$temp_file"
# Apply the changes # Apply the changes
if sudo mv "$temp_file" "$PAM_SUDO_FILE" 2>/dev/null; then if sudo mv "$temp_file" "$PAM_SUDO_FILE" 2> /dev/null; then
echo -e "${GREEN}${ICON_SUCCESS} Touch ID enabled${NC} ${GRAY}- try: sudo ls${NC}" echo -e "${GREEN}${ICON_SUCCESS} Touch ID enabled${NC} ${GRAY}- try: sudo ls${NC}"
echo "" echo ""
return 0 return 0
else else
rm -f "$temp_file" 2>/dev/null || true rm -f "$temp_file" 2> /dev/null || true
log_error "Failed to enable Touch ID" log_error "Failed to enable Touch ID"
return 1 return 1
fi fi
@@ -116,7 +116,7 @@ disable_touchid() {
fi fi
# Create backup and remove configuration # Create backup and remove configuration
if ! sudo cp "$PAM_SUDO_FILE" "${PAM_SUDO_FILE}.mole-backup" 2>/dev/null; then if ! sudo cp "$PAM_SUDO_FILE" "${PAM_SUDO_FILE}.mole-backup" 2> /dev/null; then
log_error "Failed to create backup" log_error "Failed to create backup"
return 1 return 1
fi fi
@@ -126,12 +126,12 @@ disable_touchid() {
temp_file=$(mktemp) temp_file=$(mktemp)
grep -v "pam_tid.so" "$PAM_SUDO_FILE" > "$temp_file" grep -v "pam_tid.so" "$PAM_SUDO_FILE" > "$temp_file"
if sudo mv "$temp_file" "$PAM_SUDO_FILE" 2>/dev/null; then if sudo mv "$temp_file" "$PAM_SUDO_FILE" 2> /dev/null; then
echo -e "${GREEN}${ICON_SUCCESS} Touch ID disabled${NC}" echo -e "${GREEN}${ICON_SUCCESS} Touch ID disabled${NC}"
echo "" echo ""
return 0 return 0
else else
rm -f "$temp_file" 2>/dev/null || true rm -f "$temp_file" 2> /dev/null || true
log_error "Failed to disable Touch ID" log_error "Failed to disable Touch ID"
return 1 return 1
fi fi
@@ -174,11 +174,11 @@ show_menu() {
echo "" echo ""
case "$key" in case "$key" in
$'\e') # ESC $'\e') # ESC
return 0 return 0
;; ;;
""|$'\n'|$'\r') # Enter "" | $'\n' | $'\r') # Enter
printf "\r\033[K" # Clear the prompt line printf "\r\033[K" # Clear the prompt line
disable_touchid disable_touchid
;; ;;
*) *)
@@ -191,11 +191,11 @@ show_menu() {
IFS= read -r -s -n1 key || key="" IFS= read -r -s -n1 key || key=""
case "$key" in case "$key" in
$'\e') # ESC $'\e') # ESC
return 0 return 0
;; ;;
""|$'\n'|$'\r') # Enter "" | $'\n' | $'\r') # Enter
printf "\r\033[K" # Clear the prompt line printf "\r\033[K" # Clear the prompt line
enable_touchid enable_touchid
;; ;;
*) *)
@@ -220,7 +220,7 @@ main() {
status) status)
show_status show_status
;; ;;
help|--help|-h) help | --help | -h)
show_help show_help
;; ;;
"") "")

View File

@@ -56,10 +56,9 @@ if [[ "${1:-}" == "--help" || "${1:-}" == "-h" ]]; then
fi fi
# Initialize global variables # Initialize global variables
selected_apps=() # Global array for app selection selected_apps=() # Global array for app selection
declare -a apps_data=() declare -a apps_data=()
declare -a selection_state=() declare -a selection_state=()
current_line=0
total_items=0 total_items=0
files_cleaned=0 files_cleaned=0
total_size_cleaned=0 total_size_cleaned=0
@@ -68,16 +67,16 @@ total_size_cleaned=0
get_app_last_used() { get_app_last_used() {
local app_path="$1" local app_path="$1"
local last_used local last_used
last_used=$(mdls -name kMDItemLastUsedDate -raw "$app_path" 2>/dev/null) last_used=$(mdls -name kMDItemLastUsedDate -raw "$app_path" 2> /dev/null)
if [[ "$last_used" == "(null)" || -z "$last_used" ]]; then if [[ "$last_used" == "(null)" || -z "$last_used" ]]; then
echo "Never" echo "Never"
else else
local last_used_epoch local last_used_epoch
last_used_epoch=$(date -j -f "%Y-%m-%d %H:%M:%S %z" "$last_used" "+%s" 2>/dev/null) last_used_epoch=$(date -j -f "%Y-%m-%d %H:%M:%S %z" "$last_used" "+%s" 2> /dev/null)
local current_epoch local current_epoch
current_epoch=$(date "+%s") current_epoch=$(date "+%s")
local days_ago=$(( (current_epoch - last_used_epoch) / 86400 )) local days_ago=$(((current_epoch - last_used_epoch) / 86400))
if [[ $days_ago -eq 0 ]]; then if [[ $days_ago -eq 0 ]]; then
echo "Today" echo "Today"
@@ -86,10 +85,10 @@ get_app_last_used() {
elif [[ $days_ago -lt 30 ]]; then elif [[ $days_ago -lt 30 ]]; then
echo "${days_ago} days ago" echo "${days_ago} days ago"
elif [[ $days_ago -lt 365 ]]; then elif [[ $days_ago -lt 365 ]]; then
local months_ago=$(( days_ago / 30 )) local months_ago=$((days_ago / 30))
echo "${months_ago} month(s) ago" echo "${months_ago} month(s) ago"
else else
local years_ago=$(( days_ago / 365 )) local years_ago=$((days_ago / 365))
echo "${years_ago} year(s) ago" echo "${years_ago} year(s) ago"
fi fi
fi fi
@@ -101,22 +100,24 @@ scan_applications() {
local cache_dir="$HOME/.cache/mole" local cache_dir="$HOME/.cache/mole"
local cache_file="$cache_dir/app_scan_cache" local cache_file="$cache_dir/app_scan_cache"
local cache_meta="$cache_dir/app_scan_meta" local cache_meta="$cache_dir/app_scan_meta"
local cache_ttl=3600 # 1 hour cache validity local cache_ttl=3600 # 1 hour cache validity
mkdir -p "$cache_dir" 2>/dev/null mkdir -p "$cache_dir" 2> /dev/null
# Quick count of current apps (system + user directories) # Quick count of current apps (system + user directories)
local current_app_count local current_app_count
current_app_count=$( current_app_count=$(
(find /Applications -name "*.app" -maxdepth 1 2>/dev/null; (
find ~/Applications -name "*.app" -maxdepth 1 2>/dev/null) | wc -l | tr -d ' ' find /Applications -name "*.app" -maxdepth 1 2> /dev/null
find ~/Applications -name "*.app" -maxdepth 1 2> /dev/null
) | wc -l | tr -d ' '
) )
# Check if cache is valid unless explicitly disabled # Check if cache is valid unless explicitly disabled
if [[ -f "$cache_file" && -f "$cache_meta" ]]; then if [[ -f "$cache_file" && -f "$cache_meta" ]]; then
local cache_age=$(($(date +%s) - $(stat -f%m "$cache_file" 2>/dev/null || echo 0))) local cache_age=$(($(date +%s) - $(stat -f%m "$cache_file" 2> /dev/null || echo 0)))
local cached_app_count local cached_app_count
cached_app_count=$(cat "$cache_meta" 2>/dev/null || echo "0") cached_app_count=$(cat "$cache_meta" 2> /dev/null || echo "0")
# Cache is valid if: age < TTL AND app count matches # Cache is valid if: age < TTL AND app count matches
if [[ $cache_age -lt $cache_ttl && "$cached_app_count" == "$current_app_count" ]]; then if [[ $cache_age -lt $cache_ttl && "$cached_app_count" == "$current_app_count" ]]; then
@@ -149,26 +150,26 @@ scan_applications() {
local bundle_id="unknown" local bundle_id="unknown"
local display_name="$app_name" local display_name="$app_name"
if [[ -f "$app_path/Contents/Info.plist" ]]; then if [[ -f "$app_path/Contents/Info.plist" ]]; then
bundle_id=$(defaults read "$app_path/Contents/Info.plist" CFBundleIdentifier 2>/dev/null || echo "unknown") bundle_id=$(defaults read "$app_path/Contents/Info.plist" CFBundleIdentifier 2> /dev/null || echo "unknown")
# Try to get English name from bundle info # Try to get English name from bundle info
local bundle_executable local bundle_executable
bundle_executable=$(defaults read "$app_path/Contents/Info.plist" CFBundleExecutable 2>/dev/null) bundle_executable=$(defaults read "$app_path/Contents/Info.plist" CFBundleExecutable 2> /dev/null)
# Smart display name selection - prefer descriptive names over generic ones # Smart display name selection - prefer descriptive names over generic ones
local candidates=() local candidates=()
# Get all potential names # Get all potential names
local bundle_display_name local bundle_display_name
bundle_display_name=$(plutil -extract CFBundleDisplayName raw "$app_path/Contents/Info.plist" 2>/dev/null) bundle_display_name=$(plutil -extract CFBundleDisplayName raw "$app_path/Contents/Info.plist" 2> /dev/null)
local bundle_name local bundle_name
bundle_name=$(plutil -extract CFBundleName raw "$app_path/Contents/Info.plist" 2>/dev/null) bundle_name=$(plutil -extract CFBundleName raw "$app_path/Contents/Info.plist" 2> /dev/null)
# Check if executable name is generic/technical (should be avoided) # Check if executable name is generic/technical (should be avoided)
local is_generic_executable=false local is_generic_executable=false
if [[ -n "$bundle_executable" ]]; then if [[ -n "$bundle_executable" ]]; then
case "$bundle_executable" in case "$bundle_executable" in
"pake"|"Electron"|"electron"|"nwjs"|"node"|"helper"|"main"|"app"|"binary") "pake" | "Electron" | "electron" | "nwjs" | "node" | "helper" | "main" | "app" | "binary")
is_generic_executable=true is_generic_executable=true
;; ;;
esac esac
@@ -219,19 +220,19 @@ scan_applications() {
app_data_tuples+=("${app_path}|${app_name}|${bundle_id}|${display_name}") app_data_tuples+=("${app_path}|${app_name}|${bundle_id}|${display_name}")
done < <( done < <(
# Scan both system and user application directories # Scan both system and user application directories
find /Applications -name "*.app" -maxdepth 1 -print0 2>/dev/null find /Applications -name "*.app" -maxdepth 1 -print0 2> /dev/null
find ~/Applications -name "*.app" -maxdepth 1 -print0 2>/dev/null find ~/Applications -name "*.app" -maxdepth 1 -print0 2> /dev/null
) )
# Second pass: process each app with parallel size calculation # Second pass: process each app with parallel size calculation
local app_count=0 local app_count=0
local total_apps=${#app_data_tuples[@]} local total_apps=${#app_data_tuples[@]}
local max_parallel=10 # Process 10 apps in parallel local max_parallel=10 # Process 10 apps in parallel
local pids=() local pids=()
local inline_loading=false local inline_loading=false
if [[ "${MOLE_INLINE_LOADING:-}" == "1" || "${MOLE_INLINE_LOADING:-}" == "true" ]]; then if [[ "${MOLE_INLINE_LOADING:-}" == "1" || "${MOLE_INLINE_LOADING:-}" == "true" ]]; then
inline_loading=true inline_loading=true
printf "\033[H" >&2 # Position cursor at top of screen printf "\033[H" >&2 # Position cursor at top of screen
fi fi
# Process app metadata extraction function # Process app metadata extraction function
@@ -245,7 +246,7 @@ scan_applications() {
# Parallel size calculation # Parallel size calculation
local app_size="N/A" local app_size="N/A"
if [[ -d "$app_path" ]]; then if [[ -d "$app_path" ]]; then
app_size=$(du -sh "$app_path" 2>/dev/null | cut -f1 || echo "N/A") app_size=$(du -sh "$app_path" 2> /dev/null | cut -f1 || echo "N/A")
fi fi
# Get real last used date from macOS metadata # Get real last used date from macOS metadata
@@ -254,13 +255,13 @@ scan_applications() {
if [[ -d "$app_path" ]]; then if [[ -d "$app_path" ]]; then
local metadata_date local metadata_date
metadata_date=$(mdls -name kMDItemLastUsedDate -raw "$app_path" 2>/dev/null) metadata_date=$(mdls -name kMDItemLastUsedDate -raw "$app_path" 2> /dev/null)
if [[ "$metadata_date" != "(null)" && -n "$metadata_date" ]]; then if [[ "$metadata_date" != "(null)" && -n "$metadata_date" ]]; then
last_used_epoch=$(date -j -f "%Y-%m-%d %H:%M:%S %z" "$metadata_date" "+%s" 2>/dev/null || echo "0") last_used_epoch=$(date -j -f "%Y-%m-%d %H:%M:%S %z" "$metadata_date" "+%s" 2> /dev/null || echo "0")
if [[ $last_used_epoch -gt 0 ]]; then if [[ $last_used_epoch -gt 0 ]]; then
local days_ago=$(( (current_epoch - last_used_epoch) / 86400 )) local days_ago=$(((current_epoch - last_used_epoch) / 86400))
if [[ $days_ago -eq 0 ]]; then if [[ $days_ago -eq 0 ]]; then
last_used="Today" last_used="Today"
@@ -269,21 +270,21 @@ scan_applications() {
elif [[ $days_ago -lt 7 ]]; then elif [[ $days_ago -lt 7 ]]; then
last_used="${days_ago} days ago" last_used="${days_ago} days ago"
elif [[ $days_ago -lt 30 ]]; then elif [[ $days_ago -lt 30 ]]; then
local weeks_ago=$(( days_ago / 7 )) local weeks_ago=$((days_ago / 7))
[[ $weeks_ago -eq 1 ]] && last_used="1 week ago" || last_used="${weeks_ago} weeks ago" [[ $weeks_ago -eq 1 ]] && last_used="1 week ago" || last_used="${weeks_ago} weeks ago"
elif [[ $days_ago -lt 365 ]]; then elif [[ $days_ago -lt 365 ]]; then
local months_ago=$(( days_ago / 30 )) local months_ago=$((days_ago / 30))
[[ $months_ago -eq 1 ]] && last_used="1 month ago" || last_used="${months_ago} months ago" [[ $months_ago -eq 1 ]] && last_used="1 month ago" || last_used="${months_ago} months ago"
else else
local years_ago=$(( days_ago / 365 )) local years_ago=$((days_ago / 365))
[[ $years_ago -eq 1 ]] && last_used="1 year ago" || last_used="${years_ago} years ago" [[ $years_ago -eq 1 ]] && last_used="1 year ago" || last_used="${years_ago} years ago"
fi fi
fi fi
else else
# Fallback to file modification time # Fallback to file modification time
last_used_epoch=$(stat -f%m "$app_path" 2>/dev/null || echo "0") last_used_epoch=$(stat -f%m "$app_path" 2> /dev/null || echo "0")
if [[ $last_used_epoch -gt 0 ]]; then if [[ $last_used_epoch -gt 0 ]]; then
local days_ago=$(( (current_epoch - last_used_epoch) / 86400 )) local days_ago=$(((current_epoch - last_used_epoch) / 86400))
if [[ $days_ago -lt 30 ]]; then if [[ $days_ago -lt 30 ]]; then
last_used="Recent" last_used="Recent"
elif [[ $days_ago -lt 365 ]]; then elif [[ $days_ago -lt 365 ]]; then
@@ -319,15 +320,15 @@ scan_applications() {
((spinner_idx++)) ((spinner_idx++))
# Wait if we've hit max parallel limit # Wait if we've hit max parallel limit
if (( ${#pids[@]} >= max_parallel )); then if ((${#pids[@]} >= max_parallel)); then
wait "${pids[0]}" 2>/dev/null wait "${pids[0]}" 2> /dev/null
pids=("${pids[@]:1}") # Remove first pid pids=("${pids[@]:1}") # Remove first pid
fi fi
done done
# Wait for remaining background processes # Wait for remaining background processes
for pid in "${pids[@]}"; do for pid in "${pids[@]}"; do
wait "$pid" 2>/dev/null wait "$pid" 2> /dev/null
done done
# Check if we found any applications # Check if we found any applications
@@ -347,12 +348,15 @@ scan_applications() {
fi fi
# Sort by last used (oldest first) and cache the result # Sort by last used (oldest first) and cache the result
sort -t'|' -k1,1n "$temp_file" > "${temp_file}.sorted" || { rm -f "$temp_file"; return 1; } sort -t'|' -k1,1n "$temp_file" > "${temp_file}.sorted" || {
rm -f "$temp_file"
return 1
}
rm -f "$temp_file" rm -f "$temp_file"
# Update cache with app count metadata # Update cache with app count metadata
cp "${temp_file}.sorted" "$cache_file" 2>/dev/null || true cp "${temp_file}.sorted" "$cache_file" 2> /dev/null || true
echo "$current_app_count" > "$cache_meta" 2>/dev/null || true echo "$current_app_count" > "$cache_meta" 2> /dev/null || true
# Verify sorted file exists before returning # Verify sorted file exists before returning
if [[ -f "${temp_file}.sorted" ]]; then if [[ -f "${temp_file}.sorted" ]]; then
@@ -415,12 +419,12 @@ uninstall_applications() {
echo "" echo ""
# Check if app is running (use app path for precise matching) # Check if app is running (use app path for precise matching)
if pgrep -f "$app_path" >/dev/null 2>&1; then if pgrep -f "$app_path" > /dev/null 2>&1; then
echo -e "${YELLOW}${ICON_ERROR} $app_name is currently running${NC}" echo -e "${YELLOW}${ICON_ERROR} $app_name is currently running${NC}"
read -p " Force quit $app_name? (y/N): " -n 1 -r read -p " Force quit $app_name? (y/N): " -n 1 -r
echo echo
if [[ $REPLY =~ ^[Yy]$ ]]; then if [[ $REPLY =~ ^[Yy]$ ]]; then
pkill -f "$app_path" 2>/dev/null || true pkill -f "$app_path" 2> /dev/null || true
sleep 2 sleep 2
else else
echo -e " ${BLUE}${ICON_EMPTY}${NC} Skipped $app_name" echo -e " ${BLUE}${ICON_EMPTY}${NC} Skipped $app_name"
@@ -438,7 +442,7 @@ uninstall_applications() {
# Calculate total size # Calculate total size
local app_size_kb local app_size_kb
app_size_kb=$(du -sk "$app_path" 2>/dev/null | awk '{print $1}' || echo "0") app_size_kb=$(du -sk "$app_path" 2> /dev/null | awk '{print $1}' || echo "0")
local related_size_kb local related_size_kb
related_size_kb=$(calculate_total_size "$related_files") related_size_kb=$(calculate_total_size "$related_files")
local system_size_kb local system_size_kb
@@ -461,12 +465,13 @@ uninstall_applications() {
done <<< "$system_files" done <<< "$system_files"
fi fi
if [[ $total_kb -gt 1048576 ]]; then # > 1GB local size_display
local size_display=$(echo "$total_kb" | awk '{printf "%.2fGB", $1/1024/1024}') if [[ $total_kb -gt 1048576 ]]; then # > 1GB
elif [[ $total_kb -gt 1024 ]]; then # > 1MB size_display=$(echo "$total_kb" | awk '{printf "%.2fGB", $1/1024/1024}')
local size_display=$(echo "$total_kb" | awk '{printf "%.1fMB", $1/1024}') elif [[ $total_kb -gt 1024 ]]; then # > 1MB
size_display=$(echo "$total_kb" | awk '{printf "%.1fMB", $1/1024}')
else else
local size_display="${total_kb}KB" size_display="${total_kb}KB"
fi fi
echo -e " ${BLUE}Total size: $size_display${NC}" echo -e " ${BLUE}Total size: $size_display${NC}"
@@ -477,7 +482,7 @@ uninstall_applications() {
if [[ $REPLY =~ ^[Yy]$ ]]; then if [[ $REPLY =~ ^[Yy]$ ]]; then
# Remove the application # Remove the application
if rm -rf "$app_path" 2>/dev/null; then if rm -rf "$app_path" 2> /dev/null; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Removed application" echo -e " ${GREEN}${ICON_SUCCESS}${NC} Removed application"
else else
echo -e " ${RED}${ICON_ERROR}${NC} Failed to remove $app_path" echo -e " ${RED}${ICON_ERROR}${NC} Failed to remove $app_path"
@@ -487,7 +492,7 @@ uninstall_applications() {
# Remove user-level related files # Remove user-level related files
while IFS= read -r file; do while IFS= read -r file; do
if [[ -n "$file" && -e "$file" ]]; then if [[ -n "$file" && -e "$file" ]]; then
if rm -rf "$file" 2>/dev/null; then if rm -rf "$file" 2> /dev/null; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Removed $(echo "$file" | sed "s|$HOME|~|" | xargs basename)" echo -e " ${GREEN}${ICON_SUCCESS}${NC} Removed $(echo "$file" | sed "s|$HOME|~|" | xargs basename)"
fi fi
fi fi
@@ -498,7 +503,7 @@ uninstall_applications() {
echo -e " ${BLUE}${ICON_SOLID}${NC} Admin access required for system files" echo -e " ${BLUE}${ICON_SOLID}${NC} Admin access required for system files"
while IFS= read -r file; do while IFS= read -r file; do
if [[ -n "$file" && -e "$file" ]]; then if [[ -n "$file" && -e "$file" ]]; then
if sudo rm -rf "$file" 2>/dev/null; then if sudo rm -rf "$file" 2> /dev/null; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Removed $(basename "$file")" echo -e " ${GREEN}${ICON_SUCCESS}${NC} Removed $(basename "$file")"
else else
echo -e " ${YELLOW}${ICON_ERROR}${NC} Failed to remove: $file" echo -e " ${YELLOW}${ICON_ERROR}${NC} Failed to remove: $file"
@@ -521,12 +526,13 @@ uninstall_applications() {
echo -e "${PURPLE}${ICON_ARROW} Uninstallation Summary${NC}" echo -e "${PURPLE}${ICON_ARROW} Uninstallation Summary${NC}"
if [[ $total_size_freed -gt 0 ]]; then if [[ $total_size_freed -gt 0 ]]; then
if [[ $total_size_freed -gt 1048576 ]]; then # > 1GB local freed_display
local freed_display=$(echo "$total_size_freed" | awk '{printf "%.2fGB", $1/1024/1024}') if [[ $total_size_freed -gt 1048576 ]]; then # > 1GB
elif [[ $total_size_freed -gt 1024 ]]; then # > 1MB freed_display=$(echo "$total_size_freed" | awk '{printf "%.2fGB", $1/1024/1024}')
local freed_display=$(echo "$total_size_freed" | awk '{printf "%.1fMB", $1/1024}') elif [[ $total_size_freed -gt 1024 ]]; then # > 1MB
freed_display=$(echo "$total_size_freed" | awk '{printf "%.1fMB", $1/1024}')
else else
local freed_display="${total_size_freed}KB" freed_display="${total_size_freed}KB"
fi fi
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Freed $freed_display of disk space" echo -e " ${GREEN}${ICON_SUCCESS}${NC} Freed $freed_display of disk space"
@@ -544,8 +550,8 @@ cleanup() {
unset MOLE_ALT_SCREEN_ACTIVE unset MOLE_ALT_SCREEN_ACTIVE
fi fi
if [[ -n "${sudo_keepalive_pid:-}" ]]; then if [[ -n "${sudo_keepalive_pid:-}" ]]; then
kill "$sudo_keepalive_pid" 2>/dev/null || true kill "$sudo_keepalive_pid" 2> /dev/null || true
wait "$sudo_keepalive_pid" 2>/dev/null || true wait "$sudo_keepalive_pid" 2> /dev/null || true
sudo_keepalive_pid="" sudo_keepalive_pid=""
fi fi
show_cursor show_cursor
@@ -634,7 +640,9 @@ main() {
clear clear
local selection_count=${#selected_apps[@]} local selection_count=${#selected_apps[@]}
if [[ $selection_count -eq 0 ]]; then if [[ $selection_count -eq 0 ]]; then
echo "No apps selected"; rm -f "$apps_file"; return 0 echo "No apps selected"
rm -f "$apps_file"
return 0
fi fi
# Show selected apps, max 3 per line # Show selected apps, max 3 per line
echo -e "${BLUE}${ICON_CONFIRM}${NC} Selected ${selection_count} app(s):" echo -e "${BLUE}${ICON_CONFIRM}${NC} Selected ${selection_count} app(s):"
@@ -644,7 +652,7 @@ main() {
IFS='|' read -r epoch app_path app_name bundle_id size last_used <<< "$selected_app" IFS='|' read -r epoch app_path app_name bundle_id size last_used <<< "$selected_app"
local display_item="${app_name}(${size})" local display_item="${app_name}(${size})"
if (( idx % 3 == 0 )); then if ((idx % 3 == 0)); then
# Start new line # Start new line
[[ -n "$line" ]] && echo " $line" [[ -n "$line" ]] && echo " $line"
line="$display_item" line="$display_item"

View File

@@ -13,14 +13,28 @@ NC='\033[0m'
# Simple spinner # Simple spinner
_SPINNER_PID="" _SPINNER_PID=""
start_line_spinner() { start_line_spinner() {
local msg="$1"; [[ ! -t 1 ]] && { echo -e "${BLUE}|${NC} $msg"; return; } local msg="$1"
local chars="${MO_SPINNER_CHARS:-|/-\\}"; [[ -z "$chars" ]] && chars='|/-\\' [[ ! -t 1 ]] && {
echo -e "${BLUE}|${NC} $msg"
return
}
local chars="${MO_SPINNER_CHARS:-|/-\\}"
[[ -z "$chars" ]] && chars='|/-\\'
local i=0 local i=0
( while true; do c="${chars:$((i % ${#chars})):1}"; printf "\r${BLUE}%s${NC} %s" "$c" "$msg"; ((i++)); sleep 0.12; done ) & (while true; do
c="${chars:$((i % ${#chars})):1}"
printf "\r${BLUE}%s${NC} %s" "$c" "$msg"
((i++))
sleep 0.12
done) &
_SPINNER_PID=$! _SPINNER_PID=$!
} }
stop_line_spinner() { if [[ -n "$_SPINNER_PID" ]]; then kill "$_SPINNER_PID" 2>/dev/null || true; wait "$_SPINNER_PID" 2>/dev/null || true; _SPINNER_PID=""; printf "\r\033[K"; fi; } stop_line_spinner() { if [[ -n "$_SPINNER_PID" ]]; then
kill "$_SPINNER_PID" 2> /dev/null || true
wait "$_SPINNER_PID" 2> /dev/null || true
_SPINNER_PID=""
printf "\r\033[K"
fi; }
# Verbosity (0 = quiet, 1 = verbose) # Verbosity (0 = quiet, 1 = verbose)
VERBOSE=1 VERBOSE=1
@@ -105,7 +119,7 @@ resolve_source_dir() {
trap "rm -rf '$tmp'" EXIT trap "rm -rf '$tmp'" EXIT
start_line_spinner "Fetching Mole source..." start_line_spinner "Fetching Mole source..."
if command -v curl >/dev/null 2>&1; then if command -v curl > /dev/null 2>&1; then
if curl -fsSL -o "$tmp/mole.tar.gz" "https://github.com/tw93/mole/archive/refs/heads/main.tar.gz"; then if curl -fsSL -o "$tmp/mole.tar.gz" "https://github.com/tw93/mole/archive/refs/heads/main.tar.gz"; then
stop_line_spinner stop_line_spinner
tar -xzf "$tmp/mole.tar.gz" -C "$tmp" tar -xzf "$tmp/mole.tar.gz" -C "$tmp"
@@ -119,8 +133,8 @@ resolve_source_dir() {
stop_line_spinner stop_line_spinner
start_line_spinner "Cloning Mole source..." start_line_spinner "Cloning Mole source..."
if command -v git >/dev/null 2>&1; then if command -v git > /dev/null 2>&1; then
if git clone --depth=1 https://github.com/tw93/mole.git "$tmp/mole" >/dev/null 2>&1; then if git clone --depth=1 https://github.com/tw93/mole.git "$tmp/mole" > /dev/null 2>&1; then
stop_line_spinner stop_line_spinner
SOURCE_DIR="$tmp/mole" SOURCE_DIR="$tmp/mole"
return 0 return 0
@@ -142,7 +156,7 @@ get_source_version() {
get_installed_version() { get_installed_version() {
local binary="$INSTALL_DIR/mole" local binary="$INSTALL_DIR/mole"
if [[ -x "$binary" ]]; then if [[ -x "$binary" ]]; then
"$binary" --version 2>/dev/null | awk 'NF {print $NF; exit}' "$binary" --version 2> /dev/null | awk 'NF {print $NF; exit}'
fi fi
} }
@@ -166,11 +180,11 @@ parse_args() {
uninstall_mole uninstall_mole
exit 0 exit 0
;; ;;
--verbose|-v) --verbose | -v)
VERBOSE=1 VERBOSE=1
shift 1 shift 1
;; ;;
--help|-h) --help | -h)
show_help show_help
exit 0 exit 0
;; ;;
@@ -192,7 +206,7 @@ check_requirements() {
fi fi
# Check if already installed via Homebrew # Check if already installed via Homebrew
if command -v brew >/dev/null 2>&1 && brew list mole >/dev/null 2>&1; then if command -v brew > /dev/null 2>&1 && brew list mole > /dev/null 2>&1; then
if [[ "$ACTION" == "update" ]]; then if [[ "$ACTION" == "update" ]]; then
return 0 return 0
fi fi
@@ -330,7 +344,7 @@ verify_installation() {
if [[ -x "$INSTALL_DIR/mole" ]] && [[ -f "$CONFIG_DIR/lib/common.sh" ]]; then if [[ -x "$INSTALL_DIR/mole" ]] && [[ -f "$CONFIG_DIR/lib/common.sh" ]]; then
# Test if mole command works # Test if mole command works
if "$INSTALL_DIR/mole" --help >/dev/null 2>&1; then if "$INSTALL_DIR/mole" --help > /dev/null 2>&1; then
return 0 return 0
else else
log_warning "Mole command installed but may not be working properly" log_warning "Mole command installed but may not be working properly"
@@ -369,7 +383,7 @@ print_usage_summary() {
fi fi
echo "" echo ""
local message="Mole ${action} successfully" local message="Mole ${action} successfully"
if [[ "$action" == "updated" && -n "$previous_version" && -n "$new_version" && "$previous_version" != "$new_version" ]]; then if [[ "$action" == "updated" && -n "$previous_version" && -n "$new_version" && "$previous_version" != "$new_version" ]]; then
@@ -433,15 +447,15 @@ uninstall_mole() {
# Additional safety: never delete system critical paths (check first) # Additional safety: never delete system critical paths (check first)
case "$CONFIG_DIR" in case "$CONFIG_DIR" in
/|/usr|/usr/local|/usr/local/bin|/usr/local/lib|/usr/local/share|\ / | /usr | /usr/local | /usr/local/bin | /usr/local/lib | /usr/local/share | \
/Library|/System|/bin|/sbin|/etc|/var|/opt|"$HOME"|"$HOME/Library"|\ /Library | /System | /bin | /sbin | /etc | /var | /opt | "$HOME" | "$HOME/Library" | \
/usr/local/lib/*|/usr/local/share/*|/Library/*|/System/*) /usr/local/lib/* | /usr/local/share/* | /Library/* | /System/*)
is_safe=0 is_safe=0
;; ;;
*) *)
# Safe patterns: must be in user's home and end with 'mole' # Safe patterns: must be in user's home and end with 'mole'
if [[ "$CONFIG_DIR" == "$HOME/.config/mole" ]] || if [[ "$CONFIG_DIR" == "$HOME/.config/mole" ]] ||
[[ "$CONFIG_DIR" == "$HOME"/.*/mole ]]; then [[ "$CONFIG_DIR" == "$HOME"/.*/mole ]]; then
is_safe=1 is_safe=1
fi fi
;; ;;
@@ -457,7 +471,9 @@ uninstall_mole() {
echo " $CONFIG_DIR" echo " $CONFIG_DIR"
else else
echo "" echo ""
read -p "Remove configuration directory $CONFIG_DIR? (y/N): " -n 1 -r; echo ""; if [[ $REPLY =~ ^[Yy]$ ]]; then read -p "Remove configuration directory $CONFIG_DIR? (y/N): " -n 1 -r
echo ""
if [[ $REPLY =~ ^[Yy]$ ]]; then
rm -rf "$CONFIG_DIR" rm -rf "$CONFIG_DIR"
log_success "Removed configuration" log_success "Removed configuration"
else else
@@ -495,9 +511,9 @@ perform_install() {
perform_update() { perform_update() {
check_requirements check_requirements
if command -v brew >/dev/null 2>&1 && brew list mole >/dev/null 2>&1; then if command -v brew > /dev/null 2>&1 && brew list mole > /dev/null 2>&1; then
# Try to use shared function if available (when running from installed Mole) # Try to use shared function if available (when running from installed Mole)
resolve_source_dir 2>/dev/null || true resolve_source_dir 2> /dev/null || true
if [[ -f "$SOURCE_DIR/lib/common.sh" ]]; then if [[ -f "$SOURCE_DIR/lib/common.sh" ]]; then
# shellcheck disable=SC1090,SC1091 # shellcheck disable=SC1090,SC1091
source "$SOURCE_DIR/lib/common.sh" source "$SOURCE_DIR/lib/common.sh"
@@ -527,7 +543,7 @@ perform_update() {
if echo "$upgrade_output" | grep -q "already installed"; then if echo "$upgrade_output" | grep -q "already installed"; then
local current_version local current_version
current_version=$(brew list --versions mole 2>/dev/null | awk '{print $2}') current_version=$(brew list --versions mole 2> /dev/null | awk '{print $2}')
echo -e "${GREEN}${NC} Already on latest version (${current_version:-$VERSION})" echo -e "${GREEN}${NC} Already on latest version (${current_version:-$VERSION})"
elif echo "$upgrade_output" | grep -q "Error:"; then elif echo "$upgrade_output" | grep -q "Error:"; then
log_error "Homebrew upgrade failed" log_error "Homebrew upgrade failed"
@@ -536,7 +552,7 @@ perform_update() {
else else
echo "$upgrade_output" | grep -Ev "^(==>|Updating Homebrew|Warning:)" || true echo "$upgrade_output" | grep -Ev "^(==>|Updating Homebrew|Warning:)" || true
local new_version local new_version
new_version=$(brew list --versions mole 2>/dev/null | awk '{print $2}') new_version=$(brew list --versions mole 2> /dev/null | awk '{print $2}')
echo -e "${GREEN}${NC} Updated to latest version (${new_version:-$VERSION})" echo -e "${GREEN}${NC} Updated to latest version (${new_version:-$VERSION})"
fi fi
@@ -571,9 +587,21 @@ perform_update() {
# Update with minimal output (suppress info/success, show errors only) # Update with minimal output (suppress info/success, show errors only)
local old_verbose=$VERBOSE local old_verbose=$VERBOSE
VERBOSE=0 VERBOSE=0
create_directories || { VERBOSE=$old_verbose; log_error "Failed to create directories"; exit 1; } create_directories || {
install_files || { VERBOSE=$old_verbose; log_error "Failed to install files"; exit 1; } VERBOSE=$old_verbose
verify_installation || { VERBOSE=$old_verbose; log_error "Failed to verify installation"; exit 1; } log_error "Failed to create directories"
exit 1
}
install_files || {
VERBOSE=$old_verbose
log_error "Failed to install files"
exit 1
}
verify_installation || {
VERBOSE=$old_verbose
log_error "Failed to verify installation"
exit 1
}
setup_path setup_path
VERBOSE=$old_verbose VERBOSE=$old_verbose

View File

@@ -33,17 +33,17 @@ batch_uninstall_applications() {
IFS='|' read -r epoch app_path app_name bundle_id size last_used <<< "$selected_app" IFS='|' read -r epoch app_path app_name bundle_id size last_used <<< "$selected_app"
# Check if app is running (use app path for precise matching) # Check if app is running (use app path for precise matching)
if pgrep -f "$app_path" >/dev/null 2>&1; then if pgrep -f "$app_path" > /dev/null 2>&1; then
running_apps+=("$app_name") running_apps+=("$app_name")
fi fi
# Check if app requires sudo to delete # Check if app requires sudo to delete
if [[ ! -w "$(dirname "$app_path")" ]] || [[ "$(stat -f%Su "$app_path" 2>/dev/null)" == "root" ]]; then if [[ ! -w "$(dirname "$app_path")" ]] || [[ "$(stat -f%Su "$app_path" 2> /dev/null)" == "root" ]]; then
sudo_apps+=("$app_name") sudo_apps+=("$app_name")
fi fi
# Calculate size for summary # Calculate size for summary
local app_size_kb=$(du -sk "$app_path" 2>/dev/null | awk '{print $1}' || echo "0") local app_size_kb=$(du -sk "$app_path" 2> /dev/null | awk '{print $1}' || echo "0")
local related_files=$(find_app_files "$bundle_id" "$app_name") local related_files=$(find_app_files "$bundle_id" "$app_name")
local related_size_kb=$(calculate_total_size "$related_files") local related_size_kb=$(calculate_total_size "$related_files")
local total_kb=$((app_size_kb + related_size_kb)) local total_kb=$((app_size_kb + related_size_kb))
@@ -104,13 +104,13 @@ batch_uninstall_applications() {
IFS= read -r -s -n1 key || key="" IFS= read -r -s -n1 key || key=""
case "$key" in case "$key" in
$'\e'|q|Q) $'\e' | q | Q)
echo "" echo ""
echo "" echo ""
return 0 return 0
;; ;;
""|$'\n'|$'\r'|y|Y) "" | $'\n' | $'\r' | y | Y)
printf "\r\033[K" # Clear the prompt line printf "\r\033[K" # Clear the prompt line
;; ;;
*) *)
echo "" echo ""
@@ -122,14 +122,18 @@ batch_uninstall_applications() {
# User confirmed, now request sudo access if needed # User confirmed, now request sudo access if needed
if [[ ${#sudo_apps[@]} -gt 0 ]]; then if [[ ${#sudo_apps[@]} -gt 0 ]]; then
# Check if sudo is already cached # Check if sudo is already cached
if ! sudo -n true 2>/dev/null; then if ! sudo -n true 2> /dev/null; then
if ! request_sudo_access "Admin required for system apps: ${sudo_apps[*]}"; then if ! request_sudo_access "Admin required for system apps: ${sudo_apps[*]}"; then
echo "" echo ""
log_error "Admin access denied" log_error "Admin access denied"
return 1 return 1
fi fi
fi fi
(while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null) & (while true; do
sudo -n true
sleep 60
kill -0 "$$" || exit
done 2> /dev/null) &
sudo_keepalive_pid=$! sudo_keepalive_pid=$!
fi fi
@@ -148,22 +152,22 @@ batch_uninstall_applications() {
local related_files=$(printf '%s' "$encoded_files" | base64 -d) local related_files=$(printf '%s' "$encoded_files" | base64 -d)
local reason="" local reason=""
local needs_sudo=false local needs_sudo=false
[[ ! -w "$(dirname "$app_path")" || "$(stat -f%Su "$app_path" 2>/dev/null)" == "root" ]] && needs_sudo=true [[ ! -w "$(dirname "$app_path")" || "$(stat -f%Su "$app_path" 2> /dev/null)" == "root" ]] && needs_sudo=true
if ! force_kill_app "$app_name" "$app_path"; then if ! force_kill_app "$app_name" "$app_path"; then
reason="still running" reason="still running"
fi fi
if [[ -z "$reason" ]]; then if [[ -z "$reason" ]]; then
if [[ "$needs_sudo" == true ]]; then if [[ "$needs_sudo" == true ]]; then
sudo rm -rf "$app_path" 2>/dev/null || reason="remove failed" sudo rm -rf "$app_path" 2> /dev/null || reason="remove failed"
else else
rm -rf "$app_path" 2>/dev/null || reason="remove failed" rm -rf "$app_path" 2> /dev/null || reason="remove failed"
fi fi
fi fi
if [[ -z "$reason" ]]; then if [[ -z "$reason" ]]; then
local files_removed=0 local files_removed=0
while IFS= read -r file; do while IFS= read -r file; do
[[ -n "$file" && -e "$file" ]] || continue [[ -n "$file" && -e "$file" ]] || continue
rm -rf "$file" 2>/dev/null && ((files_removed++)) || true rm -rf "$file" 2> /dev/null && ((files_removed++)) || true
done <<< "$related_files" done <<< "$related_files"
((total_size_freed += total_kb)) ((total_size_freed += total_kb))
((success_count++)) ((success_count++))
@@ -202,7 +206,7 @@ batch_uninstall_applications() {
for app_name in "${success_items[@]}"; do for app_name in "${success_items[@]}"; do
local display_item="${GREEN}${app_name}${NC}" local display_item="${GREEN}${app_name}${NC}"
if (( idx % 3 == 0 )); then if ((idx % 3 == 0)); then
# Start new line # Start new line
if [[ -n "$current_line" ]]; then if [[ -n "$current_line" ]]; then
summary_details+=("$current_line") summary_details+=("$current_line")
@@ -267,8 +271,8 @@ batch_uninstall_applications() {
# Clean up sudo keepalive if it was started # Clean up sudo keepalive if it was started
if [[ -n "${sudo_keepalive_pid:-}" ]]; then if [[ -n "${sudo_keepalive_pid:-}" ]]; then
kill "$sudo_keepalive_pid" 2>/dev/null || true kill "$sudo_keepalive_pid" 2> /dev/null || true
wait "$sudo_keepalive_pid" 2>/dev/null || true wait "$sudo_keepalive_pid" 2> /dev/null || true
sudo_keepalive_pid="" sudo_keepalive_pid=""
fi fi

File diff suppressed because it is too large Load Diff

View File

@@ -4,8 +4,8 @@
set -euo pipefail set -euo pipefail
# Terminal control functions # Terminal control functions
enter_alt_screen() { tput smcup 2>/dev/null || true; } enter_alt_screen() { tput smcup 2> /dev/null || true; }
leave_alt_screen() { tput rmcup 2>/dev/null || true; } leave_alt_screen() { tput rmcup 2> /dev/null || true; }
# Main paginated multi-select menu function # Main paginated multi-select menu function
paginated_multi_select() { paginated_multi_select() {
@@ -47,16 +47,16 @@ paginated_multi_select() {
# Preserve original TTY settings so we can restore them reliably # Preserve original TTY settings so we can restore them reliably
local original_stty="" local original_stty=""
if [[ -t 0 ]] && command -v stty >/dev/null 2>&1; then if [[ -t 0 ]] && command -v stty > /dev/null 2>&1; then
original_stty=$(stty -g 2>/dev/null || echo "") original_stty=$(stty -g 2> /dev/null || echo "")
fi fi
restore_terminal() { restore_terminal() {
show_cursor show_cursor
if [[ -n "${original_stty-}" ]]; then if [[ -n "${original_stty-}" ]]; then
stty "${original_stty}" 2>/dev/null || stty sane 2>/dev/null || stty echo icanon 2>/dev/null || true stty "${original_stty}" 2> /dev/null || stty sane 2> /dev/null || stty echo icanon 2> /dev/null || true
else else
stty sane 2>/dev/null || stty echo icanon 2>/dev/null || true stty sane 2> /dev/null || stty echo icanon 2> /dev/null || true
fi fi
if [[ "${external_alt_screen:-false}" == false ]]; then if [[ "${external_alt_screen:-false}" == false ]]; then
leave_alt_screen leave_alt_screen
@@ -72,14 +72,14 @@ paginated_multi_select() {
# Interrupt handler # Interrupt handler
handle_interrupt() { handle_interrupt() {
cleanup cleanup
exit 130 # Standard exit code for Ctrl+C exit 130 # Standard exit code for Ctrl+C
} }
trap cleanup EXIT trap cleanup EXIT
trap handle_interrupt INT TERM trap handle_interrupt INT TERM
# Setup terminal - preserve interrupt character # Setup terminal - preserve interrupt character
stty -echo -icanon intr ^C 2>/dev/null || true stty -echo -icanon intr ^C 2> /dev/null || true
if [[ $external_alt_screen == false ]]; then if [[ $external_alt_screen == false ]]; then
enter_alt_screen enter_alt_screen
# Clear screen once on entry to alt screen # Clear screen once on entry to alt screen
@@ -108,7 +108,7 @@ paginated_multi_select() {
draw_menu() { draw_menu() {
# Move to home position without clearing (reduces flicker) # Move to home position without clearing (reduces flicker)
printf "\033[H" >&2 printf "\033[H" >&2
# Clear each line as we go instead of clearing entire screen # Clear each line as we go instead of clearing entire screen
local clear_line="\r\033[2K" local clear_line="\r\033[2K"
@@ -169,7 +169,7 @@ paginated_multi_select() {
# Clear any remaining lines at bottom # Clear any remaining lines at bottom
printf "${clear_line}\n" >&2 printf "${clear_line}\n" >&2
printf "${clear_line}${GRAY}${ICON_NAV_UP}/${ICON_NAV_DOWN}${NC} Navigate ${GRAY}|${NC} ${GRAY}Space${NC} Select ${GRAY}|${NC} ${GRAY}Enter${NC} Confirm ${GRAY}|${NC} ${GRAY}Q/ESC${NC} Quit\n" >&2 printf "${clear_line}${GRAY}${ICON_NAV_UP}/${ICON_NAV_DOWN}${NC} Navigate ${GRAY}|${NC} ${GRAY}Space${NC} Select ${GRAY}|${NC} ${GRAY}Enter${NC} Confirm ${GRAY}|${NC} ${GRAY}Q/ESC${NC} Quit\n" >&2
# Clear one more line to ensure no artifacts # Clear one more line to ensure no artifacts
printf "${clear_line}" >&2 printf "${clear_line}" >&2
} }
@@ -177,7 +177,7 @@ paginated_multi_select() {
# Show help screen # Show help screen
show_help() { show_help() {
printf "\033[H\033[J" >&2 printf "\033[H\033[J" >&2
cat >&2 <<EOF cat >&2 << EOF
Help - Navigation Controls Help - Navigation Controls
========================== ==========================
@@ -269,7 +269,7 @@ EOF
local IFS=',' local IFS=','
final_result="${selected_indices[*]}" final_result="${selected_indices[*]}"
fi fi
# Remove the trap to avoid cleanup on normal exit # Remove the trap to avoid cleanup on normal exit
trap - EXIT INT TERM trap - EXIT INT TERM

View File

@@ -114,7 +114,6 @@ patterns_equivalent() {
return 1 return 1
} }
load_whitelist() { load_whitelist() {
local -a patterns=() local -a patterns=()
@@ -163,14 +162,13 @@ is_whitelisted() {
if [[ "$check_pattern" == "$existing_expanded" ]]; then if [[ "$check_pattern" == "$existing_expanded" ]]; then
return 0 return 0
fi fi
if [[ "$check_pattern" == $existing_expanded ]]; then if [[ "$check_pattern" == "$existing_expanded" ]]; then
return 0 return 0
fi fi
done done
return 1 return 1
} }
manage_whitelist() { manage_whitelist() {
manage_whitelist_categories manage_whitelist_categories
} }
@@ -286,7 +284,6 @@ manage_whitelist_categories() {
printf '\n' printf '\n'
} }
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
manage_whitelist manage_whitelist
fi fi

85
mole
View File

@@ -28,7 +28,7 @@ MOLE_TAGLINE="can dig deep to clean your Mac."
# Get latest version from remote repository # Get latest version from remote repository
get_latest_version() { get_latest_version() {
curl -fsSL --connect-timeout 2 --max-time 3 -H "Cache-Control: no-cache" \ curl -fsSL --connect-timeout 2 --max-time 3 -H "Cache-Control: no-cache" \
"https://raw.githubusercontent.com/tw93/mole/main/mole" 2>/dev/null | \ "https://raw.githubusercontent.com/tw93/mole/main/mole" 2> /dev/null |
grep '^VERSION=' | head -1 | sed 's/VERSION="\(.*\)"/\1/' grep '^VERSION=' | head -1 | sed 's/VERSION="\(.*\)"/\1/'
} }
@@ -37,11 +37,11 @@ check_for_updates() {
local cache="$HOME/.cache/mole/version_check" local cache="$HOME/.cache/mole/version_check"
local msg_cache="$HOME/.cache/mole/update_message" local msg_cache="$HOME/.cache/mole/update_message"
local ttl="${MO_UPDATE_CHECK_TTL:-3600}" local ttl="${MO_UPDATE_CHECK_TTL:-3600}"
mkdir -p "$(dirname "$cache")" 2>/dev/null mkdir -p "$(dirname "$cache")" 2> /dev/null
# Skip if checked recently # Skip if checked recently
if [[ -f "$cache" ]]; then if [[ -f "$cache" ]]; then
local age=$(($(date +%s) - $(stat -f%m "$cache" 2>/dev/null || echo 0))) local age=$(($(date +%s) - $(stat -f%m "$cache" 2> /dev/null || echo 0)))
[[ $age -lt $ttl ]] && return [[ $age -lt $ttl ]] && return
fi fi
@@ -55,9 +55,9 @@ check_for_updates() {
else else
echo -n > "$msg_cache" echo -n > "$msg_cache"
fi fi
touch "$cache" 2>/dev/null touch "$cache" 2> /dev/null
) & ) &
disown 2>/dev/null || true disown 2> /dev/null || true
} }
# Show update notification if available # Show update notification if available
@@ -93,7 +93,7 @@ animate_mole_intro() {
local -a mole_lines=() local -a mole_lines=()
while IFS= read -r line; do while IFS= read -r line; do
mole_lines+=("$line") mole_lines+=("$line")
done <<'EOF' done << 'EOF'
/\_/\ /\_/\
____/ o o \ ____/ o o \
/~____ =o= / /~____ =o= /
@@ -108,7 +108,7 @@ EOF
local body_color="${PURPLE}" local body_color="${PURPLE}"
local ground_color="${GREEN}" local ground_color="${GREEN}"
for idx in "${!mole_lines[@]}"; do for idx in "${!mole_lines[@]}"; do
if (( idx < body_cutoff )); then if ((idx < body_cutoff)); then
printf "%s\n" "${body_color}${mole_lines[$idx]}${NC}" printf "%s\n" "${body_color}${mole_lines[$idx]}${NC}"
else else
printf "%s\n" "${ground_color}${mole_lines[$idx]}${NC}" printf "%s\n" "${ground_color}${mole_lines[$idx]}${NC}"
@@ -150,7 +150,7 @@ show_help() {
# Simple update function # Simple update function
update_mole() { update_mole() {
# Check if installed via Homebrew # Check if installed via Homebrew
if command -v brew >/dev/null 2>&1 && brew list mole >/dev/null 2>&1; then if command -v brew > /dev/null 2>&1 && brew list mole > /dev/null 2>&1; then
update_via_homebrew "$VERSION" update_via_homebrew "$VERSION"
exit 0 exit 0
fi fi
@@ -180,17 +180,20 @@ update_mole() {
local installer_url="https://raw.githubusercontent.com/tw93/mole/main/install.sh" local installer_url="https://raw.githubusercontent.com/tw93/mole/main/install.sh"
local tmp_installer local tmp_installer
tmp_installer="$(mktemp_file)" || { log_error "Update failed"; exit 1; } tmp_installer="$(mktemp_file)" || {
log_error "Update failed"
exit 1
}
# Download installer with progress # Download installer with progress
if command -v curl >/dev/null 2>&1; then if command -v curl > /dev/null 2>&1; then
if ! curl -fsSL --connect-timeout 10 --max-time 60 "$installer_url" -o "$tmp_installer" 2>&1; then if ! curl -fsSL --connect-timeout 10 --max-time 60 "$installer_url" -o "$tmp_installer" 2>&1; then
if [[ -t 1 ]]; then stop_inline_spinner; fi if [[ -t 1 ]]; then stop_inline_spinner; fi
rm -f "$tmp_installer" rm -f "$tmp_installer"
log_error "Update failed. Check network connection." log_error "Update failed. Check network connection."
exit 1 exit 1
fi fi
elif command -v wget >/dev/null 2>&1; then elif command -v wget > /dev/null 2>&1; then
if ! wget --timeout=10 --tries=3 -qO "$tmp_installer" "$installer_url" 2>&1; then if ! wget --timeout=10 --tries=3 -qO "$tmp_installer" "$installer_url" 2>&1; then
if [[ -t 1 ]]; then stop_inline_spinner; fi if [[ -t 1 ]]; then stop_inline_spinner; fi
rm -f "$tmp_installer" rm -f "$tmp_installer"
@@ -209,7 +212,7 @@ update_mole() {
# Determine install directory # Determine install directory
local mole_path local mole_path
mole_path="$(command -v mole 2>/dev/null || echo "$0")" mole_path="$(command -v mole 2> /dev/null || echo "$0")"
local install_dir local install_dir
install_dir="$(cd "$(dirname "$mole_path")" && pwd)" install_dir="$(cd "$(dirname "$mole_path")" && pwd)"
@@ -231,7 +234,7 @@ update_mole() {
# Only show success message if installer didn't already do so # Only show success message if installer didn't already do so
if ! printf '%s\n' "$install_output" | grep -Eq "Updated to latest version|Already on latest version"; then if ! printf '%s\n' "$install_output" | grep -Eq "Updated to latest version|Already on latest version"; then
local new_version local new_version
new_version=$("$mole_path" --version 2>/dev/null | awk 'NF {print $NF}' || echo "") new_version=$("$mole_path" --version 2> /dev/null | awk 'NF {print $NF}' || echo "")
printf '\n%s\n\n' "${GREEN}${ICON_SUCCESS}${NC} Updated to latest version (${new_version:-unknown})" printf '\n%s\n\n' "${GREEN}${ICON_SUCCESS}${NC} Updated to latest version (${new_version:-unknown})"
else else
printf '\n' printf '\n'
@@ -247,7 +250,7 @@ update_mole() {
fi fi
if ! printf '%s\n' "$install_output" | grep -Eq "Updated to latest version|Already on latest version"; then if ! printf '%s\n' "$install_output" | grep -Eq "Updated to latest version|Already on latest version"; then
local new_version local new_version
new_version=$("$mole_path" --version 2>/dev/null | awk 'NF {print $NF}' || echo "") new_version=$("$mole_path" --version 2> /dev/null | awk 'NF {print $NF}' || echo "")
printf '\n%s\n\n' "${GREEN}${ICON_SUCCESS}${NC} Updated to latest version (${new_version:-unknown})" printf '\n%s\n\n' "${GREEN}${ICON_SUCCESS}${NC} Updated to latest version (${new_version:-unknown})"
else else
printf '\n' printf '\n'
@@ -256,7 +259,7 @@ update_mole() {
if [[ -t 1 ]]; then stop_inline_spinner; fi if [[ -t 1 ]]; then stop_inline_spinner; fi
rm -f "$tmp_installer" rm -f "$tmp_installer"
log_error "Update failed" log_error "Update failed"
echo "$install_output" | tail -10 >&2 # Show last 10 lines of error echo "$install_output" | tail -10 >&2 # Show last 10 lines of error
exit 1 exit 1
fi fi
fi fi
@@ -279,7 +282,7 @@ remove_mole() {
local -a alias_installs=() local -a alias_installs=()
# Check Homebrew # Check Homebrew
if command -v brew >/dev/null 2>&1 && brew list mole >/dev/null 2>&1; then if command -v brew > /dev/null 2>&1 && brew list mole > /dev/null 2>&1; then
is_homebrew=true is_homebrew=true
fi fi
@@ -318,7 +321,9 @@ remove_mole() {
printf '\n' printf '\n'
# Check if anything to remove # Check if anything to remove
if [[ "$is_homebrew" == "false" && ${#manual_installs[@]:-0} -eq 0 && ${#alias_installs[@]:-0} -eq 0 ]]; then local manual_count=${#manual_installs[@]}
local alias_count=${#alias_installs[@]}
if [[ "$is_homebrew" == "false" && ${manual_count:-0} -eq 0 && ${alias_count:-0} -eq 0 ]]; then
printf '%s\n\n' "${YELLOW}No Mole installation detected${NC}" printf '%s\n\n' "${YELLOW}No Mole installation detected${NC}"
exit 0 exit 0
fi fi
@@ -343,8 +348,8 @@ remove_mole() {
echo "" echo ""
exit 0 exit 0
;; ;;
""|$'\n'|$'\r') "" | $'\n' | $'\r')
printf "\r\033[K" # Clear the prompt line printf "\r\033[K" # Clear the prompt line
# Continue with removal # Continue with removal
;; ;;
*) *)
@@ -357,32 +362,32 @@ remove_mole() {
# Remove Homebrew installation (silent) # Remove Homebrew installation (silent)
local has_error=false local has_error=false
if [[ "$is_homebrew" == "true" ]]; then if [[ "$is_homebrew" == "true" ]]; then
if ! brew uninstall mole >/dev/null 2>&1; then if ! brew uninstall mole > /dev/null 2>&1; then
has_error=true has_error=true
fi fi
fi fi
# Remove manual installations (silent) # Remove manual installations (silent)
if [[ ${#manual_installs[@]:-0} -gt 0 ]]; then if [[ ${manual_count:-0} -gt 0 ]]; then
for install in "${manual_installs[@]}"; do for install in "${manual_installs[@]}"; do
if [[ -f "$install" ]]; then if [[ -f "$install" ]]; then
rm -f "$install" 2>/dev/null || has_error=true rm -f "$install" 2> /dev/null || has_error=true
fi fi
done done
fi fi
if [[ ${#alias_installs[@]} -gt 0 ]]; then if [[ ${alias_count:-0} -gt 0 ]]; then
for alias in "${alias_installs[@]}"; do for alias in "${alias_installs[@]}"; do
if [[ -f "$alias" ]]; then if [[ -f "$alias" ]]; then
rm -f "$alias" 2>/dev/null || true rm -f "$alias" 2> /dev/null || true
fi fi
done done
fi fi
# Clean up cache first (silent) # Clean up cache first (silent)
if [[ -d "$HOME/.cache/mole" ]]; then if [[ -d "$HOME/.cache/mole" ]]; then
rm -rf "$HOME/.cache/mole" 2>/dev/null || true rm -rf "$HOME/.cache/mole" 2> /dev/null || true
fi fi
# Clean up configuration last (silent) # Clean up configuration last (silent)
if [[ -d "$HOME/.config/mole" ]]; then if [[ -d "$HOME/.config/mole" ]]; then
rm -rf "$HOME/.config/mole" 2>/dev/null || true rm -rf "$HOME/.config/mole" 2> /dev/null || true
fi fi
# Show final result # Show final result
@@ -400,7 +405,7 @@ remove_mole() {
# Display main menu options with minimal refresh to avoid flicker # Display main menu options with minimal refresh to avoid flicker
show_main_menu() { show_main_menu() {
local selected="${1:-1}" local selected="${1:-1}"
local _full_draw="${2:-true}" # Kept for compatibility (unused) local _full_draw="${2:-true}" # Kept for compatibility (unused)
local banner="${MAIN_MENU_BANNER:-}" local banner="${MAIN_MENU_BANNER:-}"
local update_message="${MAIN_MENU_UPDATE_MESSAGE:-}" local update_message="${MAIN_MENU_UPDATE_MESSAGE:-}"
@@ -410,7 +415,7 @@ show_main_menu() {
MAIN_MENU_BANNER="$banner" MAIN_MENU_BANNER="$banner"
fi fi
printf '\033[H' # Move cursor to home printf '\033[H' # Move cursor to home
local line="" local line=""
# Leading spacer # Leading spacer
@@ -452,13 +457,13 @@ interactive_main_menu() {
# Show intro animation only once per terminal tab # Show intro animation only once per terminal tab
if [[ -t 1 ]]; then if [[ -t 1 ]]; then
local tty_name local tty_name
tty_name=$(tty 2>/dev/null || echo "") tty_name=$(tty 2> /dev/null || echo "")
if [[ -n "$tty_name" ]]; then if [[ -n "$tty_name" ]]; then
local flag_file local flag_file
flag_file="/tmp/mole_intro_$(echo "$tty_name" | tr -c '[:alnum:]_' '_')" flag_file="/tmp/mole_intro_$(echo "$tty_name" | tr -c '[:alnum:]_' '_')"
if [[ ! -f "$flag_file" ]]; then if [[ ! -f "$flag_file" ]]; then
animate_mole_intro animate_mole_intro
touch "$flag_file" 2>/dev/null || true touch "$flag_file" 2> /dev/null || true
fi fi
fi fi
fi fi
@@ -472,7 +477,7 @@ interactive_main_menu() {
MAIN_MENU_BANNER="$brand_banner" MAIN_MENU_BANNER="$brand_banner"
if [[ -f "$msg_cache" && -s "$msg_cache" ]]; then if [[ -f "$msg_cache" && -s "$msg_cache" ]]; then
update_message="$(cat "$msg_cache" 2>/dev/null || echo "")" update_message="$(cat "$msg_cache" 2> /dev/null || echo "")"
fi fi
MAIN_MENU_UPDATE_MESSAGE="$update_message" MAIN_MENU_UPDATE_MESSAGE="$update_message"
@@ -501,7 +506,7 @@ interactive_main_menu() {
case "$key" in case "$key" in
"UP") ((current_option > 1)) && ((current_option--)) ;; "UP") ((current_option > 1)) && ((current_option--)) ;;
"DOWN") ((current_option < 5)) && ((current_option++)) ;; "DOWN") ((current_option < 5)) && ((current_option++)) ;;
"ENTER"|"$current_option") "ENTER" | "$current_option")
show_cursor show_cursor
case $current_option in case $current_option in
1) 1)
@@ -509,7 +514,11 @@ interactive_main_menu() {
;; ;;
2) exec "$SCRIPT_DIR/bin/uninstall.sh" ;; 2) exec "$SCRIPT_DIR/bin/uninstall.sh" ;;
3) exec "$SCRIPT_DIR/bin/analyze.sh" ;; 3) exec "$SCRIPT_DIR/bin/analyze.sh" ;;
4) clear; show_help; exit 0 ;; 4)
clear
show_help
exit 0
;;
5) cleanup_and_exit ;; 5) cleanup_and_exit ;;
esac esac
;; ;;
@@ -522,7 +531,11 @@ interactive_main_menu() {
;; ;;
2) exec "$SCRIPT_DIR/bin/uninstall.sh" ;; 2) exec "$SCRIPT_DIR/bin/uninstall.sh" ;;
3) exec "$SCRIPT_DIR/bin/analyze.sh" ;; 3) exec "$SCRIPT_DIR/bin/analyze.sh" ;;
4) clear; show_help; exit 0 ;; 4)
clear
show_help
exit 0
;;
5) cleanup_and_exit ;; 5) cleanup_and_exit ;;
esac esac
;; ;;
@@ -552,11 +565,11 @@ main() {
remove_mole remove_mole
exit 0 exit 0
;; ;;
"help"|"--help"|"-h") "help" | "--help" | "-h")
show_help show_help
exit 0 exit 0
;; ;;
"version"|"--version"|"-V") "version" | "--version" | "-V")
show_version show_version
exit 0 exit 0
;; ;;

60
scripts/format.sh Executable file
View File

@@ -0,0 +1,60 @@
#!/bin/bash
# Format all shell scripts in the Mole project
#
# Usage:
# ./scripts/format.sh # Format all scripts
# ./scripts/format.sh --check # Check only, don't modify
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
CHECK_ONLY=false
# Parse arguments
if [[ "${1:-}" == "--check" ]]; then
CHECK_ONLY=true
elif [[ "${1:-}" == "--help" || "${1:-}" == "-h" ]]; then
cat << 'EOF'
Usage: ./scripts/format.sh [--check]
Format shell scripts using shfmt.
Options:
--check Check formatting without modifying files
--help Show this help
Install: brew install shfmt
EOF
exit 0
fi
# Check if shfmt is installed
if ! command -v shfmt > /dev/null 2>&1; then
echo "Error: shfmt not installed"
echo "Install: brew install shfmt"
exit 1
fi
# Find all shell scripts
cd "$PROJECT_ROOT"
# shfmt options: -i 4 (4 spaces), -ci (indent switch cases), -sr (space after redirect)
if [[ "$CHECK_ONLY" == "true" ]]; then
echo "Checking formatting..."
if shfmt -i 4 -ci -sr -d . > /dev/null 2>&1; then
echo "✓ All scripts properly formatted"
exit 0
else
echo "✗ Some scripts need formatting:"
shfmt -i 4 -ci -sr -d .
echo ""
echo "Run './scripts/format.sh' to fix"
exit 1
fi
else
echo "Formatting scripts..."
shfmt -i 4 -ci -sr -w .
echo "✓ Done"
fi

44
scripts/install-hooks.sh Executable file
View File

@@ -0,0 +1,44 @@
#!/bin/bash
# Install git hooks for Mole project
#
# Usage:
# ./scripts/install-hooks.sh
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
GREEN='\033[0;32m'
BLUE='\033[0;34m'
NC='\033[0m'
cd "$PROJECT_ROOT"
# Check if this is a git repository
if [ ! -d ".git" ]; then
echo "Error: Not a git repository"
exit 1
fi
echo -e "${BLUE}Installing git hooks...${NC}"
# Install pre-commit hook
if [ -f ".git/hooks/pre-commit" ]; then
echo "Pre-commit hook already exists, creating backup..."
mv .git/hooks/pre-commit .git/hooks/pre-commit.backup
fi
ln -s ../../scripts/pre-commit.sh .git/hooks/pre-commit
chmod +x .git/hooks/pre-commit
echo -e "${GREEN}✓ Pre-commit hook installed${NC}"
echo ""
echo "The hook will:"
echo " • Auto-format shell scripts before commit"
echo " • Run shellcheck on changed files"
echo " • Show warnings but won't block commits"
echo ""
echo "To uninstall:"
echo " rm .git/hooks/pre-commit"
echo ""

67
scripts/pre-commit.sh Executable file
View File

@@ -0,0 +1,67 @@
#!/bin/bash
# Git pre-commit hook for Mole
# Automatically formats shell scripts before commit
#
# Installation:
# ln -s ../../scripts/pre-commit.sh .git/hooks/pre-commit
# chmod +x .git/hooks/pre-commit
#
# Or use the install script:
# ./scripts/install-hooks.sh
set -e
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'
# Only check shell files that are staged
STAGED_SH_FILES=$(git diff --cached --name-only --diff-filter=ACMR | grep -E '\.sh$|^mole$' || true)
if [ -z "$STAGED_SH_FILES" ]; then
exit 0
fi
echo -e "${YELLOW}Running pre-commit checks on shell files...${NC}"
# Check if shfmt is installed
if ! command -v shfmt &> /dev/null; then
echo -e "${RED}shfmt is not installed. Install with: brew install shfmt${NC}"
exit 1
fi
# Check if shellcheck is installed
if ! command -v shellcheck &> /dev/null; then
echo -e "${RED}shellcheck is not installed. Install with: brew install shellcheck${NC}"
exit 1
fi
NEEDS_FORMAT=0
# Check formatting
for file in $STAGED_SH_FILES; do
if ! shfmt -i 4 -ci -sr -d "$file" > /dev/null 2>&1; then
echo -e "${YELLOW}Formatting $file...${NC}"
shfmt -i 4 -ci -sr -w "$file"
git add "$file"
NEEDS_FORMAT=1
fi
done
# Run shellcheck
for file in $STAGED_SH_FILES; do
if ! shellcheck -S warning "$file" > /dev/null 2>&1; then
echo -e "${YELLOW}ShellCheck warnings in $file:${NC}"
shellcheck -S warning "$file"
echo -e "${YELLOW}Continuing with commit (warnings are non-critical)...${NC}"
fi
done
if [ $NEEDS_FORMAT -eq 1 ]; then
echo -e "${GREEN}✓ Files formatted and re-staged${NC}"
fi
echo -e "${GREEN}✓ Pre-commit checks passed${NC}"
exit 0

View File

@@ -1,31 +1,31 @@
#!/usr/bin/env bats #!/usr/bin/env bats
setup_file() { setup_file() {
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)" PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT export PROJECT_ROOT
ORIGINAL_HOME="${HOME:-}" ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME export ORIGINAL_HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-analyze-home.XXXXXX")" HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-analyze-home.XXXXXX")"
export HOME export HOME
} }
teardown_file() { teardown_file() {
rm -rf "$HOME" rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME" export HOME="$ORIGINAL_HOME"
fi fi
} }
setup() { setup() {
export TERM="dumb" export TERM="dumb"
rm -rf "${HOME:?}"/* rm -rf "${HOME:?}"/*
mkdir -p "$HOME" mkdir -p "$HOME"
} }
@test "scan_directories lists largest folders first" { @test "scan_directories lists largest folders first" {
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF' run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
set -euo pipefail set -euo pipefail
source "$PROJECT_ROOT/bin/analyze.sh" source "$PROJECT_ROOT/bin/analyze.sh"
@@ -40,12 +40,12 @@ scan_directories "$root" "$output_file" 1
head -n1 "$output_file" head -n1 "$output_file"
EOF EOF
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
[[ "$output" == *"Large"* ]] [[ "$output" == *"Large"* ]]
} }
@test "aggregate_by_directory sums child sizes per parent" { @test "aggregate_by_directory sums child sizes per parent" {
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF' run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
set -euo pipefail set -euo pipefail
source "$PROJECT_ROOT/bin/analyze.sh" source "$PROJECT_ROOT/bin/analyze.sh"
@@ -65,7 +65,7 @@ aggregate_by_directory "$input_file" "$output_file"
cat "$output_file" cat "$output_file"
EOF EOF
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
[[ "$output" == *"3072|$HOME/group/a/"* ]] [[ "$output" == *"3072|$HOME/group/a/"* ]]
[[ "$output" == *"512|$HOME/group/b/"* ]] [[ "$output" == *"512|$HOME/group/b/"* ]]
} }

View File

@@ -1,60 +1,60 @@
#!/usr/bin/env bats #!/usr/bin/env bats
setup_file() { setup_file() {
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)" PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT export PROJECT_ROOT
ORIGINAL_HOME="${HOME:-}" ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME export ORIGINAL_HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-clean-home.XXXXXX")" HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-clean-home.XXXXXX")"
export HOME export HOME
mkdir -p "$HOME" mkdir -p "$HOME"
} }
teardown_file() { teardown_file() {
rm -rf "$HOME" rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME" export HOME="$ORIGINAL_HOME"
fi fi
} }
setup() { setup() {
export TERM="xterm-256color" export TERM="xterm-256color"
rm -rf "${HOME:?}"/* rm -rf "${HOME:?}"/*
rm -rf "$HOME/Library" "$HOME/.config" rm -rf "$HOME/Library" "$HOME/.config"
mkdir -p "$HOME/Library/Caches" "$HOME/.config/mole" mkdir -p "$HOME/Library/Caches" "$HOME/.config/mole"
} }
@test "mo clean --dry-run skips system cleanup in non-interactive mode" { @test "mo clean --dry-run skips system cleanup in non-interactive mode" {
run env HOME="$HOME" "$PROJECT_ROOT/mole" clean --dry-run run env HOME="$HOME" "$PROJECT_ROOT/mole" clean --dry-run
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
[[ "$output" == *"Dry Run Mode"* ]] [[ "$output" == *"Dry Run Mode"* ]]
[[ "$output" != *"Deep system-level cleanup"* ]] [[ "$output" != *"Deep system-level cleanup"* ]]
} }
@test "mo clean --dry-run reports user cache without deleting it" { @test "mo clean --dry-run reports user cache without deleting it" {
mkdir -p "$HOME/Library/Caches/TestApp" mkdir -p "$HOME/Library/Caches/TestApp"
echo "cache data" > "$HOME/Library/Caches/TestApp/cache.tmp" echo "cache data" > "$HOME/Library/Caches/TestApp/cache.tmp"
run env HOME="$HOME" "$PROJECT_ROOT/mole" clean --dry-run run env HOME="$HOME" "$PROJECT_ROOT/mole" clean --dry-run
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
[[ "$output" == *"User app cache"* ]] [[ "$output" == *"User app cache"* ]]
[[ "$output" == *"Potential space"* ]] [[ "$output" == *"Potential space"* ]]
[ -f "$HOME/Library/Caches/TestApp/cache.tmp" ] [ -f "$HOME/Library/Caches/TestApp/cache.tmp" ]
} }
@test "mo clean honors whitelist entries" { @test "mo clean honors whitelist entries" {
mkdir -p "$HOME/Library/Caches/WhitelistedApp" mkdir -p "$HOME/Library/Caches/WhitelistedApp"
echo "keep me" > "$HOME/Library/Caches/WhitelistedApp/data.tmp" echo "keep me" > "$HOME/Library/Caches/WhitelistedApp/data.tmp"
cat > "$HOME/.config/mole/whitelist" <<EOF cat > "$HOME/.config/mole/whitelist" << EOF
$HOME/Library/Caches/WhitelistedApp* $HOME/Library/Caches/WhitelistedApp*
EOF EOF
run env HOME="$HOME" "$PROJECT_ROOT/mole" clean --dry-run run env HOME="$HOME" "$PROJECT_ROOT/mole" clean --dry-run
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
[[ "$output" == *"Protected: 1"* ]] [[ "$output" == *"Protected: 1"* ]]
[ -f "$HOME/Library/Caches/WhitelistedApp/data.tmp" ] [ -f "$HOME/Library/Caches/WhitelistedApp/data.tmp" ]
} }

View File

@@ -1,80 +1,80 @@
#!/usr/bin/env bats #!/usr/bin/env bats
setup_file() { setup_file() {
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)" PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT export PROJECT_ROOT
ORIGINAL_HOME="${HOME:-}" ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME export ORIGINAL_HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-cli-home.XXXXXX")" HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-cli-home.XXXXXX")"
export HOME export HOME
mkdir -p "$HOME" mkdir -p "$HOME"
} }
teardown_file() { teardown_file() {
rm -rf "$HOME" rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME" export HOME="$ORIGINAL_HOME"
fi fi
} }
setup() { setup() {
rm -rf "$HOME/.config" rm -rf "$HOME/.config"
mkdir -p "$HOME" mkdir -p "$HOME"
} }
@test "mole --help prints command overview" { @test "mole --help prints command overview" {
run env HOME="$HOME" "$PROJECT_ROOT/mole" --help run env HOME="$HOME" "$PROJECT_ROOT/mole" --help
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
[[ "$output" == *"mo clean"* ]] [[ "$output" == *"mo clean"* ]]
[[ "$output" == *"mo analyze"* ]] [[ "$output" == *"mo analyze"* ]]
} }
@test "mole --version reports script version" { @test "mole --version reports script version" {
expected_version="$(grep '^VERSION=' "$PROJECT_ROOT/mole" | head -1 | sed 's/VERSION=\"\(.*\)\"/\1/')" expected_version="$(grep '^VERSION=' "$PROJECT_ROOT/mole" | head -1 | sed 's/VERSION=\"\(.*\)\"/\1/')"
run env HOME="$HOME" "$PROJECT_ROOT/mole" --version run env HOME="$HOME" "$PROJECT_ROOT/mole" --version
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
[[ "$output" == *"$expected_version"* ]] [[ "$output" == *"$expected_version"* ]]
} }
@test "mole unknown command returns error" { @test "mole unknown command returns error" {
run env HOME="$HOME" "$PROJECT_ROOT/mole" unknown-command run env HOME="$HOME" "$PROJECT_ROOT/mole" unknown-command
[ "$status" -ne 0 ] [ "$status" -ne 0 ]
[[ "$output" == *"Unknown command: unknown-command"* ]] [[ "$output" == *"Unknown command: unknown-command"* ]]
} }
@test "clean.sh --help shows usage details" { @test "clean.sh --help shows usage details" {
run env HOME="$HOME" "$PROJECT_ROOT/bin/clean.sh" --help run env HOME="$HOME" "$PROJECT_ROOT/bin/clean.sh" --help
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
[[ "$output" == *"Mole - Deeper system cleanup"* ]] [[ "$output" == *"Mole - Deeper system cleanup"* ]]
[[ "$output" == *"--dry-run"* ]] [[ "$output" == *"--dry-run"* ]]
} }
@test "uninstall.sh --help highlights controls" { @test "uninstall.sh --help highlights controls" {
run env HOME="$HOME" "$PROJECT_ROOT/bin/uninstall.sh" --help run env HOME="$HOME" "$PROJECT_ROOT/bin/uninstall.sh" --help
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
[[ "$output" == *"Usage: mole uninstall"* ]] [[ "$output" == *"Usage: mole uninstall"* ]]
[[ "$output" == *"Keyboard Controls"* ]] [[ "$output" == *"Keyboard Controls"* ]]
} }
@test "analyze.sh --help outlines explorer features" { @test "analyze.sh --help outlines explorer features" {
run env HOME="$HOME" "$PROJECT_ROOT/bin/analyze.sh" --help run env HOME="$HOME" "$PROJECT_ROOT/bin/analyze.sh" --help
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
[[ "$output" == *"Interactive disk space explorer"* ]] [[ "$output" == *"Interactive disk space explorer"* ]]
[[ "$output" == *"mole analyze"* ]] [[ "$output" == *"mole analyze"* ]]
} }
@test "touchid --help describes configuration options" { @test "touchid --help describes configuration options" {
run env HOME="$HOME" "$PROJECT_ROOT/mole" touchid --help run env HOME="$HOME" "$PROJECT_ROOT/mole" touchid --help
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
[[ "$output" == *"Touch ID"* ]] [[ "$output" == *"Touch ID"* ]]
[[ "$output" == *"mo touchid enable"* ]] [[ "$output" == *"mo touchid enable"* ]]
} }
@test "touchid status reports current configuration" { @test "touchid status reports current configuration" {
run env HOME="$HOME" "$PROJECT_ROOT/mole" touchid status run env HOME="$HOME" "$PROJECT_ROOT/mole" touchid status
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
[[ "$output" == *"Touch ID"* ]] [[ "$output" == *"Touch ID"* ]]
} }

View File

@@ -1,123 +1,124 @@
#!/usr/bin/env bats #!/usr/bin/env bats
setup_file() { setup_file() {
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)" PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT export PROJECT_ROOT
ORIGINAL_HOME="${HOME:-}" ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME export ORIGINAL_HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-home.XXXXXX")" HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-home.XXXXXX")"
export HOME export HOME
mkdir -p "$HOME" mkdir -p "$HOME"
} }
teardown_file() { teardown_file() {
rm -rf "$HOME" rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME" export HOME="$ORIGINAL_HOME"
fi fi
} }
setup() { setup() {
rm -rf "$HOME/.config" rm -rf "$HOME/.config"
mkdir -p "$HOME" mkdir -p "$HOME"
} }
teardown() { teardown() {
unset MO_SPINNER_CHARS || true unset MO_SPINNER_CHARS || true
} }
@test "mo_spinner_chars returns default sequence when unset" { @test "mo_spinner_chars returns default sequence when unset" {
result="$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; mo_spinner_chars")" result="$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; mo_spinner_chars")"
[ "$result" = "|/-\\" ] [ "$result" = "|/-\\" ]
} }
@test "mo_spinner_chars respects MO_SPINNER_CHARS override" { @test "mo_spinner_chars respects MO_SPINNER_CHARS override" {
export MO_SPINNER_CHARS="abcd" export MO_SPINNER_CHARS="abcd"
result="$(HOME="$HOME" MO_SPINNER_CHARS="$MO_SPINNER_CHARS" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; mo_spinner_chars")" result="$(HOME="$HOME" MO_SPINNER_CHARS="$MO_SPINNER_CHARS" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; mo_spinner_chars")"
[ "$result" = "abcd" ] [ "$result" = "abcd" ]
} }
@test "detect_architecture maps current CPU to friendly label" { @test "detect_architecture maps current CPU to friendly label" {
expected="Intel" expected="Intel"
if [[ "$(uname -m)" == "arm64" ]]; then if [[ "$(uname -m)" == "arm64" ]]; then
expected="Apple Silicon" expected="Apple Silicon"
fi fi
result="$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; detect_architecture")" result="$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; detect_architecture")"
[ "$result" = "$expected" ] [ "$result" = "$expected" ]
} }
@test "get_free_space returns a non-empty value" { @test "get_free_space returns a non-empty value" {
result="$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; get_free_space")" result="$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; get_free_space")"
[[ -n "$result" ]] [[ -n "$result" ]]
} }
@test "log_info prints message and appends to log file" { @test "log_info prints message and appends to log file" {
local message="Informational message from test" local message="Informational message from test"
local stdout_output local stdout_output
stdout_output="$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; log_info '$message'")" stdout_output="$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; log_info '$message'")"
[[ "$stdout_output" == *"$message"* ]] [[ "$stdout_output" == *"$message"* ]]
local log_file="$HOME/.config/mole/mole.log" local log_file="$HOME/.config/mole/mole.log"
[[ -f "$log_file" ]] [[ -f "$log_file" ]]
grep -q "INFO: $message" "$log_file" grep -q "INFO: $message" "$log_file"
} }
@test "log_error writes to stderr and log file" { @test "log_error writes to stderr and log file" {
local message="Something went wrong" local message="Something went wrong"
local stderr_file="$HOME/log_error_stderr.txt" local stderr_file="$HOME/log_error_stderr.txt"
HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; log_error '$message' 1>/dev/null 2>'$stderr_file'" HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; log_error '$message' 1>/dev/null 2>'$stderr_file'"
[[ -s "$stderr_file" ]] [[ -s "$stderr_file" ]]
grep -q "$message" "$stderr_file" grep -q "$message" "$stderr_file"
local log_file="$HOME/.config/mole/mole.log" local log_file="$HOME/.config/mole/mole.log"
[[ -f "$log_file" ]] [[ -f "$log_file" ]]
grep -q "ERROR: $message" "$log_file" grep -q "ERROR: $message" "$log_file"
} }
@test "bytes_to_human converts byte counts into readable units" { @test "bytes_to_human converts byte counts into readable units" {
output="$(HOME="$HOME" bash --noprofile --norc <<'EOF' output="$(
HOME="$HOME" bash --noprofile --norc << 'EOF'
source "$PROJECT_ROOT/lib/common.sh" source "$PROJECT_ROOT/lib/common.sh"
bytes_to_human 512 bytes_to_human 512
bytes_to_human 2048 bytes_to_human 2048
bytes_to_human $((5 * 1024 * 1024)) bytes_to_human $((5 * 1024 * 1024))
bytes_to_human $((3 * 1024 * 1024 * 1024)) bytes_to_human $((3 * 1024 * 1024 * 1024))
EOF EOF
)" )"
bytes_lines=() bytes_lines=()
while IFS= read -r line; do while IFS= read -r line; do
bytes_lines+=("$line") bytes_lines+=("$line")
done <<< "$output" done <<< "$output"
[ "${bytes_lines[0]}" = "512B" ] [ "${bytes_lines[0]}" = "512B" ]
[ "${bytes_lines[1]}" = "2KB" ] [ "${bytes_lines[1]}" = "2KB" ]
[ "${bytes_lines[2]}" = "5.0MB" ] [ "${bytes_lines[2]}" = "5.0MB" ]
[ "${bytes_lines[3]}" = "3.00GB" ] [ "${bytes_lines[3]}" = "3.00GB" ]
} }
@test "create_temp_file and create_temp_dir are tracked and cleaned" { @test "create_temp_file and create_temp_dir are tracked and cleaned" {
HOME="$HOME" bash --noprofile --norc <<'EOF' HOME="$HOME" bash --noprofile --norc << 'EOF'
source "$PROJECT_ROOT/lib/common.sh" source "$PROJECT_ROOT/lib/common.sh"
create_temp_file > "$HOME/temp_file_path.txt" create_temp_file > "$HOME/temp_file_path.txt"
create_temp_dir > "$HOME/temp_dir_path.txt" create_temp_dir > "$HOME/temp_dir_path.txt"
cleanup_temp_files cleanup_temp_files
EOF EOF
file_path="$(cat "$HOME/temp_file_path.txt")" file_path="$(cat "$HOME/temp_file_path.txt")"
dir_path="$(cat "$HOME/temp_dir_path.txt")" dir_path="$(cat "$HOME/temp_dir_path.txt")"
[ ! -e "$file_path" ] [ ! -e "$file_path" ]
[ ! -e "$dir_path" ] [ ! -e "$dir_path" ]
rm -f "$HOME/temp_file_path.txt" "$HOME/temp_dir_path.txt" rm -f "$HOME/temp_file_path.txt" "$HOME/temp_dir_path.txt"
} }
@test "parallel_execute runs worker across all items" { @test "parallel_execute runs worker across all items" {
output_file="$HOME/parallel_output.txt" output_file="$HOME/parallel_output.txt"
HOME="$HOME" bash --noprofile --norc <<'EOF' HOME="$HOME" bash --noprofile --norc << 'EOF'
source "$PROJECT_ROOT/lib/common.sh" source "$PROJECT_ROOT/lib/common.sh"
worker() { worker() {
echo "$1" >> "$HOME/parallel_output.txt" echo "$1" >> "$HOME/parallel_output.txt"
@@ -125,16 +126,16 @@ worker() {
parallel_execute 2 worker "first" "second" "third" parallel_execute 2 worker "first" "second" "third"
EOF EOF
sort "$output_file" > "$output_file.sorted" sort "$output_file" > "$output_file.sorted"
results=() results=()
while IFS= read -r line; do while IFS= read -r line; do
results+=("$line") results+=("$line")
done < "$output_file.sorted" done < "$output_file.sorted"
[ "${#results[@]}" -eq 3 ] [ "${#results[@]}" -eq 3 ]
joined=" ${results[*]} " joined=" ${results[*]} "
[[ "$joined" == *" first "* ]] [[ "$joined" == *" first "* ]]
[[ "$joined" == *" second "* ]] [[ "$joined" == *" second "* ]]
[[ "$joined" == *" third "* ]] [[ "$joined" == *" third "* ]]
rm -f "$output_file" "$output_file.sorted" rm -f "$output_file" "$output_file.sorted"
} }

View File

@@ -4,41 +4,41 @@ set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
if command -v shellcheck >/dev/null 2>&1; then if command -v shellcheck > /dev/null 2>&1; then
SHELLCHECK_TARGETS=() SHELLCHECK_TARGETS=()
while IFS= read -r file; do while IFS= read -r file; do
SHELLCHECK_TARGETS+=("$file") SHELLCHECK_TARGETS+=("$file")
done < <(find "$PROJECT_ROOT/tests" -type f \( -name '*.bats' -o -name '*.sh' \) | sort) done < <(find "$PROJECT_ROOT/tests" -type f \( -name '*.bats' -o -name '*.sh' \) | sort)
if [[ ${#SHELLCHECK_TARGETS[@]} -gt 0 ]]; then if [[ ${#SHELLCHECK_TARGETS[@]} -gt 0 ]]; then
shellcheck --rcfile "$PROJECT_ROOT/.shellcheckrc" "${SHELLCHECK_TARGETS[@]}" shellcheck --rcfile "$PROJECT_ROOT/.shellcheckrc" "${SHELLCHECK_TARGETS[@]}"
else else
echo "No shell files to lint under tests/." >&2 echo "No shell files to lint under tests/." >&2
fi fi
else else
echo "shellcheck not found; skipping linting." >&2 echo "shellcheck not found; skipping linting." >&2
fi fi
if command -v bats >/dev/null 2>&1; then if command -v bats > /dev/null 2>&1; then
cd "$PROJECT_ROOT" cd "$PROJECT_ROOT"
if [[ -z "${TERM:-}" ]]; then if [[ -z "${TERM:-}" ]]; then
export TERM="xterm-256color" export TERM="xterm-256color"
fi fi
if [[ $# -eq 0 ]]; then if [[ $# -eq 0 ]]; then
set -- tests set -- tests
fi fi
if [[ -t 1 ]]; then if [[ -t 1 ]]; then
bats -p "$@" bats -p "$@"
else else
TERM="${TERM:-xterm-256color}" bats --tap "$@" TERM="${TERM:-xterm-256color}" bats --tap "$@"
fi fi
else else
cat <<'EOF' >&2 cat << 'EOF' >&2
bats is required to run Mole's test suite. bats is required to run Mole's test suite.
Install via Homebrew with 'brew install bats-core' or via npm with 'npm install -g bats'. Install via Homebrew with 'brew install bats-core' or via npm with 'npm install -g bats'.
EOF EOF
exit 1 exit 1
fi fi

View File

@@ -1,79 +1,81 @@
#!/usr/bin/env bats #!/usr/bin/env bats
setup_file() { setup_file() {
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)" PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT export PROJECT_ROOT
ORIGINAL_HOME="${HOME:-}" ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME export ORIGINAL_HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-uninstall-home.XXXXXX")" HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-uninstall-home.XXXXXX")"
export HOME export HOME
} }
teardown_file() { teardown_file() {
rm -rf "$HOME" rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME" export HOME="$ORIGINAL_HOME"
fi fi
} }
setup() { setup() {
export TERM="dumb" export TERM="dumb"
rm -rf "${HOME:?}"/* rm -rf "${HOME:?}"/*
mkdir -p "$HOME" mkdir -p "$HOME"
} }
create_app_artifacts() { create_app_artifacts() {
mkdir -p "$HOME/Applications/TestApp.app" mkdir -p "$HOME/Applications/TestApp.app"
mkdir -p "$HOME/Library/Application Support/TestApp" mkdir -p "$HOME/Library/Application Support/TestApp"
mkdir -p "$HOME/Library/Caches/TestApp" mkdir -p "$HOME/Library/Caches/TestApp"
mkdir -p "$HOME/Library/Containers/com.example.TestApp" mkdir -p "$HOME/Library/Containers/com.example.TestApp"
mkdir -p "$HOME/Library/Preferences" mkdir -p "$HOME/Library/Preferences"
touch "$HOME/Library/Preferences/com.example.TestApp.plist" touch "$HOME/Library/Preferences/com.example.TestApp.plist"
mkdir -p "$HOME/Library/Preferences/ByHost" mkdir -p "$HOME/Library/Preferences/ByHost"
touch "$HOME/Library/Preferences/ByHost/com.example.TestApp.ABC123.plist" touch "$HOME/Library/Preferences/ByHost/com.example.TestApp.ABC123.plist"
mkdir -p "$HOME/Library/Saved Application State/com.example.TestApp.savedState" mkdir -p "$HOME/Library/Saved Application State/com.example.TestApp.savedState"
} }
@test "find_app_files discovers user-level leftovers" { @test "find_app_files discovers user-level leftovers" {
create_app_artifacts create_app_artifacts
result="$(HOME="$HOME" bash --noprofile --norc <<'EOF' result="$(
HOME="$HOME" bash --noprofile --norc << 'EOF'
set -euo pipefail set -euo pipefail
source "$PROJECT_ROOT/lib/common.sh" source "$PROJECT_ROOT/lib/common.sh"
find_app_files "com.example.TestApp" "TestApp" find_app_files "com.example.TestApp" "TestApp"
EOF EOF
)" )"
[[ "$result" == *"Application Support/TestApp"* ]] [[ "$result" == *"Application Support/TestApp"* ]]
[[ "$result" == *"Caches/TestApp"* ]] [[ "$result" == *"Caches/TestApp"* ]]
[[ "$result" == *"Preferences/com.example.TestApp.plist"* ]] [[ "$result" == *"Preferences/com.example.TestApp.plist"* ]]
[[ "$result" == *"Saved Application State/com.example.TestApp.savedState"* ]] [[ "$result" == *"Saved Application State/com.example.TestApp.savedState"* ]]
[[ "$result" == *"Containers/com.example.TestApp"* ]] [[ "$result" == *"Containers/com.example.TestApp"* ]]
} }
@test "calculate_total_size returns aggregate kilobytes" { @test "calculate_total_size returns aggregate kilobytes" {
mkdir -p "$HOME/sized" mkdir -p "$HOME/sized"
dd if=/dev/zero of="$HOME/sized/file1" bs=1024 count=1 >/dev/null 2>&1 dd if=/dev/zero of="$HOME/sized/file1" bs=1024 count=1 > /dev/null 2>&1
dd if=/dev/zero of="$HOME/sized/file2" bs=1024 count=2 >/dev/null 2>&1 dd if=/dev/zero of="$HOME/sized/file2" bs=1024 count=2 > /dev/null 2>&1
result="$(HOME="$HOME" bash --noprofile --norc <<'EOF' result="$(
HOME="$HOME" bash --noprofile --norc << 'EOF'
set -euo pipefail set -euo pipefail
source "$PROJECT_ROOT/lib/common.sh" source "$PROJECT_ROOT/lib/common.sh"
files="$(printf '%s\n%s\n' "$HOME/sized/file1" "$HOME/sized/file2")" files="$(printf '%s\n%s\n' "$HOME/sized/file1" "$HOME/sized/file2")"
calculate_total_size "$files" calculate_total_size "$files"
EOF EOF
)" )"
# Result should be >=3 KB (some filesystems allocate slightly more) # Result should be >=3 KB (some filesystems allocate slightly more)
[ "$result" -ge 3 ] [ "$result" -ge 3 ]
} }
@test "batch_uninstall_applications removes selected app data" { @test "batch_uninstall_applications removes selected app data" {
create_app_artifacts create_app_artifacts
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF' run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
set -euo pipefail set -euo pipefail
source "$PROJECT_ROOT/lib/common.sh" source "$PROJECT_ROOT/lib/common.sh"
source "$PROJECT_ROOT/lib/batch_uninstall.sh" source "$PROJECT_ROOT/lib/batch_uninstall.sh"
@@ -111,5 +113,5 @@ printf '\n' | batch_uninstall_applications >/dev/null
[[ ! -f "$HOME/Library/Preferences/com.example.TestApp.plist" ]] || exit 1 [[ ! -f "$HOME/Library/Preferences/com.example.TestApp.plist" ]] || exit 1
EOF EOF
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
} }

View File

@@ -1,31 +1,31 @@
#!/usr/bin/env bats #!/usr/bin/env bats
setup_file() { setup_file() {
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)" PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT export PROJECT_ROOT
ORIGINAL_HOME="${HOME:-}" ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME export ORIGINAL_HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-update-home.XXXXXX")" HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-update-home.XXXXXX")"
export HOME export HOME
} }
teardown_file() { teardown_file() {
rm -rf "$HOME" rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME" export HOME="$ORIGINAL_HOME"
fi fi
} }
setup() { setup() {
export TERM="dumb" export TERM="dumb"
rm -rf "${HOME:?}"/* rm -rf "${HOME:?}"/*
mkdir -p "$HOME" mkdir -p "$HOME"
} }
@test "update_via_homebrew reports already on latest version" { @test "update_via_homebrew reports already on latest version" {
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF' run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
set -euo pipefail set -euo pipefail
MOLE_TEST_BREW_UPDATE_OUTPUT="Updated 0 formulae" MOLE_TEST_BREW_UPDATE_OUTPUT="Updated 0 formulae"
MOLE_TEST_BREW_UPGRADE_OUTPUT="Warning: mole 1.7.9 already installed" MOLE_TEST_BREW_UPGRADE_OUTPUT="Warning: mole 1.7.9 already installed"
@@ -44,12 +44,12 @@ source "$PROJECT_ROOT/lib/common.sh"
update_via_homebrew "1.7.9" update_via_homebrew "1.7.9"
EOF EOF
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
[[ "$output" == *"Already on latest version"* ]] [[ "$output" == *"Already on latest version"* ]]
} }
@test "update_mole skips download when already latest" { @test "update_mole skips download when already latest" {
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" PATH="$HOME/fake-bin:/usr/bin:/bin" TERM="dumb" bash --noprofile --norc <<'EOF' run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" PATH="$HOME/fake-bin:/usr/bin:/bin" TERM="dumb" bash --noprofile --norc << 'EOF'
set -euo pipefail set -euo pipefail
mkdir -p "$HOME/fake-bin" mkdir -p "$HOME/fake-bin"
cat > "$HOME/fake-bin/curl" <<'SCRIPT' cat > "$HOME/fake-bin/curl" <<'SCRIPT'
@@ -85,17 +85,17 @@ chmod +x "$HOME/fake-bin/brew"
"$PROJECT_ROOT/mole" update "$PROJECT_ROOT/mole" update
EOF EOF
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
[[ "$output" == *"Already on latest version"* ]] [[ "$output" == *"Already on latest version"* ]]
} }
@test "remove_mole deletes manual binaries and caches" { @test "remove_mole deletes manual binaries and caches" {
mkdir -p "$HOME/.local/bin" mkdir -p "$HOME/.local/bin"
touch "$HOME/.local/bin/mole" touch "$HOME/.local/bin/mole"
touch "$HOME/.local/bin/mo" touch "$HOME/.local/bin/mo"
mkdir -p "$HOME/.config/mole" "$HOME/.cache/mole" mkdir -p "$HOME/.config/mole" "$HOME/.cache/mole"
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" PATH="/usr/bin:/bin" bash --noprofile --norc <<'EOF' run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" PATH="/usr/bin:/bin" bash --noprofile --norc << 'EOF'
set -euo pipefail set -euo pipefail
start_inline_spinner() { :; } start_inline_spinner() { :; }
stop_inline_spinner() { :; } stop_inline_spinner() { :; }
@@ -103,9 +103,9 @@ export -f start_inline_spinner stop_inline_spinner
printf '\n' | "$PROJECT_ROOT/mole" remove printf '\n' | "$PROJECT_ROOT/mole" remove
EOF EOF
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
[ ! -f "$HOME/.local/bin/mole" ] [ ! -f "$HOME/.local/bin/mole" ]
[ ! -f "$HOME/.local/bin/mo" ] [ ! -f "$HOME/.local/bin/mo" ]
[ ! -d "$HOME/.config/mole" ] [ ! -d "$HOME/.config/mole" ]
[ ! -d "$HOME/.cache/mole" ] [ ! -d "$HOME/.cache/mole" ]
} }

View File

@@ -1,99 +1,99 @@
#!/usr/bin/env bats #!/usr/bin/env bats
setup_file() { setup_file() {
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)" PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT export PROJECT_ROOT
ORIGINAL_HOME="${HOME:-}" ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME export ORIGINAL_HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-whitelist-home.XXXXXX")" HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-whitelist-home.XXXXXX")"
export HOME export HOME
mkdir -p "$HOME" mkdir -p "$HOME"
} }
teardown_file() { teardown_file() {
rm -rf "$HOME" rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME" export HOME="$ORIGINAL_HOME"
fi fi
} }
setup() { setup() {
rm -rf "$HOME/.config" rm -rf "$HOME/.config"
mkdir -p "$HOME" mkdir -p "$HOME"
WHITELIST_PATH="$HOME/.config/mole/whitelist" WHITELIST_PATH="$HOME/.config/mole/whitelist"
} }
@test "patterns_equivalent treats paths with tilde expansion as equal" { @test "patterns_equivalent treats paths with tilde expansion as equal" {
local status local status
if HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; patterns_equivalent '~/.cache/test' \"\$HOME/.cache/test\""; then if HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; patterns_equivalent '~/.cache/test' \"\$HOME/.cache/test\""; then
status=0 status=0
else else
status=$? status=$?
fi fi
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
} }
@test "patterns_equivalent distinguishes different paths" { @test "patterns_equivalent distinguishes different paths" {
local status local status
if HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; patterns_equivalent '~/.cache/test' \"\$HOME/.cache/other\""; then if HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; patterns_equivalent '~/.cache/test' \"\$HOME/.cache/other\""; then
status=0 status=0
else else
status=$? status=$?
fi fi
[ "$status" -ne 0 ] [ "$status" -ne 0 ]
} }
@test "save_whitelist_patterns keeps unique entries and preserves header" { @test "save_whitelist_patterns keeps unique entries and preserves header" {
HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; save_whitelist_patterns \"\$HOME/.cache/foo\" \"\$HOME/.cache/foo\" \"\$HOME/.cache/bar\"" HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; save_whitelist_patterns \"\$HOME/.cache/foo\" \"\$HOME/.cache/foo\" \"\$HOME/.cache/bar\""
[[ -f "$WHITELIST_PATH" ]] [[ -f "$WHITELIST_PATH" ]]
lines=() lines=()
while IFS= read -r line; do while IFS= read -r line; do
lines+=("$line") lines+=("$line")
done < "$WHITELIST_PATH" done < "$WHITELIST_PATH"
# Header is at least two lines (comments), plus two unique patterns # Header is at least two lines (comments), plus two unique patterns
[ "${#lines[@]}" -ge 4 ] [ "${#lines[@]}" -ge 4 ]
# Ensure duplicate was not written twice # Ensure duplicate was not written twice
occurrences=$(grep -c "$HOME/.cache/foo" "$WHITELIST_PATH") occurrences=$(grep -c "$HOME/.cache/foo" "$WHITELIST_PATH")
[ "$occurrences" -eq 1 ] [ "$occurrences" -eq 1 ]
} }
@test "load_whitelist falls back to defaults when config missing" { @test "load_whitelist falls back to defaults when config missing" {
rm -f "$WHITELIST_PATH" rm -f "$WHITELIST_PATH"
HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; rm -f \"\$HOME/.config/mole/whitelist\"; load_whitelist; printf '%s\n' \"\${CURRENT_WHITELIST_PATTERNS[@]}\"" > "$HOME/current_whitelist.txt" HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; rm -f \"\$HOME/.config/mole/whitelist\"; load_whitelist; printf '%s\n' \"\${CURRENT_WHITELIST_PATTERNS[@]}\"" > "$HOME/current_whitelist.txt"
HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; printf '%s\n' \"\${DEFAULT_WHITELIST_PATTERNS[@]}\"" > "$HOME/default_whitelist.txt" HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; printf '%s\n' \"\${DEFAULT_WHITELIST_PATTERNS[@]}\"" > "$HOME/default_whitelist.txt"
current=() current=()
while IFS= read -r line; do while IFS= read -r line; do
current+=("$line") current+=("$line")
done < "$HOME/current_whitelist.txt" done < "$HOME/current_whitelist.txt"
defaults=() defaults=()
while IFS= read -r line; do while IFS= read -r line; do
defaults+=("$line") defaults+=("$line")
done < "$HOME/default_whitelist.txt" done < "$HOME/default_whitelist.txt"
[ "${#current[@]}" -eq "${#defaults[@]}" ] [ "${#current[@]}" -eq "${#defaults[@]}" ]
[ "${current[0]}" = "${defaults[0]/\$HOME/$HOME}" ] [ "${current[0]}" = "${defaults[0]/\$HOME/$HOME}" ]
} }
@test "is_whitelisted matches saved patterns exactly" { @test "is_whitelisted matches saved patterns exactly" {
local status local status
if HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; save_whitelist_patterns \"\$HOME/.cache/unique-pattern\"; load_whitelist; is_whitelisted \"\$HOME/.cache/unique-pattern\""; then if HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; save_whitelist_patterns \"\$HOME/.cache/unique-pattern\"; load_whitelist; is_whitelisted \"\$HOME/.cache/unique-pattern\""; then
status=0 status=0
else else
status=$? status=$?
fi fi
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
if HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; save_whitelist_patterns \"\$HOME/.cache/unique-pattern\"; load_whitelist; is_whitelisted \"\$HOME/.cache/other-pattern\""; then if HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; save_whitelist_patterns \"\$HOME/.cache/unique-pattern\"; load_whitelist; is_whitelisted \"\$HOME/.cache/other-pattern\""; then
status=0 status=0
else else
status=$? status=$?
fi fi
[ "$status" -ne 0 ] [ "$status" -ne 0 ]
} }

View File

@@ -1,16 +1,16 @@
#!/usr/bin/env bats #!/usr/bin/env bats
setup_file() { setup_file() {
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)" PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT export PROJECT_ROOT
} }
@test "shellcheck passes for test scripts" { @test "shellcheck passes for test scripts" {
if ! command -v shellcheck >/dev/null 2>&1; then if ! command -v shellcheck > /dev/null 2>&1; then
skip "shellcheck not installed" skip "shellcheck not installed"
fi fi
run env PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF' run env PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
set -euo pipefail set -euo pipefail
cd "$PROJECT_ROOT" cd "$PROJECT_ROOT"
targets=() targets=()
@@ -24,6 +24,6 @@ fi
shellcheck --rcfile "$PROJECT_ROOT/.shellcheckrc" "${targets[@]}" shellcheck --rcfile "$PROJECT_ROOT/.shellcheckrc" "${targets[@]}"
EOF EOF
printf '%s\n' "$output" >&3 printf '%s\n' "$output" >&3
[ "$status" -eq 0 ] [ "$status" -eq 0 ]
} }