1
0
mirror of https://github.com/tw93/Mole.git synced 2026-02-04 08:06:43 +00:00

Code support format detection

This commit is contained in:
Tw93
2025-10-12 20:49:10 +08:00
parent d111f18cea
commit cf821cdc4b
25 changed files with 1482 additions and 1152 deletions

25
.editorconfig Normal file
View File

@@ -0,0 +1,25 @@
# EditorConfig for Mole project
# https://editorconfig.org
root = true
[*]
charset = utf-8
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
[*.{sh,bash}]
indent_style = space
indent_size = 4
# shfmt will use these settings automatically
[*.{yml,yaml}]
indent_style = space
indent_size = 2
[*.md]
trim_trailing_whitespace = false
[Makefile]
indent_style = tab

View File

@@ -1,4 +1,4 @@
name: Mole Tests
name: Tests
on:
push:
@@ -13,10 +13,17 @@ jobs:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install bats-core
run: |
brew update
brew install bats-core
- name: Install tools
run: brew install bats-core shfmt shellcheck
- name: Run test suite
- name: Check formatting
run: ./scripts/format.sh --check
- name: Run shellcheck
run: |
find . -type f \( -name "*.sh" -o -name "mole" \) \
! -path "./.git/*" \
-exec shellcheck -S warning {} +
- name: Run tests
run: tests/run.sh

54
CONTRIBUTING.md Normal file
View File

@@ -0,0 +1,54 @@
# Contributing to Mole
## Setup
```bash
# Install tools
brew install shfmt shellcheck bats-core
# Install git hooks (optional)
./scripts/install-hooks.sh
```
## Development
```bash
# Format code
./scripts/format.sh
# Run tests
./tests/run.sh
# Check quality
shellcheck -S warning mole bin/*.sh lib/*.sh
```
## Git Hooks
Pre-commit hook will auto-format your code. Install with:
```bash
./scripts/install-hooks.sh
```
Skip if needed: `git commit --no-verify`
## Code Style
- Bash 3.2+ compatible
- 4 spaces indent
- Use `set -euo pipefail`
- Quote all variables
- BSD commands not GNU
Config: `.editorconfig` and `.shellcheckrc`
## Pull Requests
1. Fork and create branch
2. Make changes
3. Format: `./scripts/format.sh`
4. Test: `./tests/run.sh`
5. Commit and push
6. Open PR
CI will check formatting, lint, and run tests.

View File

@@ -19,9 +19,9 @@ source "$LIB_DIR/common.sh"
# Constants
readonly CACHE_DIR="${HOME}/.config/mole/cache"
readonly TEMP_PREFIX="/tmp/mole_analyze_$$"
readonly MIN_LARGE_FILE_SIZE="1000000000" # 1GB
readonly MIN_MEDIUM_FILE_SIZE="100000000" # 100MB
readonly MIN_SMALL_FILE_SIZE="10000000" # 10MB
readonly MIN_LARGE_FILE_SIZE="1000000000" # 1GB
readonly MIN_MEDIUM_FILE_SIZE="100000000" # 100MB
readonly MIN_SMALL_FILE_SIZE="10000000" # 10MB
# Emoji badges for list displays only
readonly BADGE_DIR="🍞"
@@ -42,16 +42,16 @@ declare CURRENT_DEPTH=1
# UI State
declare CURSOR_POS=0
declare SORT_MODE="size" # size, name, time
declare VIEW_MODE="overview" # overview, detail, files
declare SORT_MODE="size" # size, name, time
declare VIEW_MODE="overview" # overview, detail, files
# Cleanup on exit
cleanup() {
show_cursor
# Cleanup temp files using glob pattern (analyze uses many temp files)
rm -f "$TEMP_PREFIX"* 2>/dev/null || true
if [[ -n "$SCAN_PID" ]] && kill -0 "$SCAN_PID" 2>/dev/null; then
kill "$SCAN_PID" 2>/dev/null || true
rm -f "$TEMP_PREFIX"* 2> /dev/null || true
if [[ -n "$SCAN_PID" ]] && kill -0 "$SCAN_PID" 2> /dev/null; then
kill "$SCAN_PID" 2> /dev/null || true
fi
}
@@ -66,7 +66,7 @@ scan_large_files() {
local target_path="$1"
local output_file="$2"
if ! command -v mdfind &>/dev/null; then
if ! command -v mdfind &> /dev/null; then
return 1
fi
@@ -75,10 +75,10 @@ scan_large_files() {
while IFS= read -r file; do
if [[ -f "$file" ]]; then
local size
size=$(stat -f%z "$file" 2>/dev/null || echo "0")
size=$(stat -f%z "$file" 2> /dev/null || echo "0")
echo "$size|$file"
fi
done < <(mdfind -onlyin "$target_path" "kMDItemFSSize > $MIN_LARGE_FILE_SIZE" 2>/dev/null) | \
done < <(mdfind -onlyin "$target_path" "kMDItemFSSize > $MIN_LARGE_FILE_SIZE" 2> /dev/null) |
sort -t'|' -k1 -rn > "$output_file"
}
@@ -87,7 +87,7 @@ scan_medium_files() {
local target_path="$1"
local output_file="$2"
if ! command -v mdfind &>/dev/null; then
if ! command -v mdfind &> /dev/null; then
return 1
fi
@@ -95,11 +95,11 @@ scan_medium_files() {
while IFS= read -r file; do
if [[ -f "$file" ]]; then
local size
size=$(stat -f%z "$file" 2>/dev/null || echo "0")
size=$(stat -f%z "$file" 2> /dev/null || echo "0")
echo "$size|$file"
fi
done < <(mdfind -onlyin "$target_path" \
"kMDItemFSSize > $MIN_MEDIUM_FILE_SIZE && kMDItemFSSize < $MIN_LARGE_FILE_SIZE" 2>/dev/null) | \
"kMDItemFSSize > $MIN_MEDIUM_FILE_SIZE && kMDItemFSSize < $MIN_LARGE_FILE_SIZE" 2> /dev/null) |
sort -t'|' -k1 -rn > "$output_file"
}
@@ -110,18 +110,18 @@ scan_directories() {
local depth="${3:-1}"
# Check if we can use parallel processing
if command -v xargs &>/dev/null && [[ $depth -eq 1 ]]; then
if command -v xargs &> /dev/null && [[ $depth -eq 1 ]]; then
# Fast parallel scan for depth 1
find "$target_path" -mindepth 1 -maxdepth 1 -type d -print0 2>/dev/null | \
xargs -0 -P 4 -I {} du -sk {} 2>/dev/null | \
sort -rn | \
find "$target_path" -mindepth 1 -maxdepth 1 -type d -print0 2> /dev/null |
xargs -0 -P 4 -I {} du -sk {} 2> /dev/null |
sort -rn |
while IFS=$'\t' read -r size path; do
echo "$((size * 1024))|$path"
done > "$output_file"
else
# Standard du scan
du -d "$depth" -k "$target_path" 2>/dev/null | \
sort -rn | \
du -d "$depth" -k "$target_path" 2> /dev/null |
sort -rn |
while IFS=$'\t' read -r size path; do
# Skip if path is the target itself at depth > 0
if [[ "$path" != "$target_path" ]]; then
@@ -161,21 +161,21 @@ aggregate_by_directory() {
get_cache_file() {
local target_path="$1"
local path_hash
path_hash=$(echo "$target_path" | md5 2>/dev/null || echo "$target_path" | shasum | cut -d' ' -f1)
path_hash=$(echo "$target_path" | md5 2> /dev/null || echo "$target_path" | shasum | cut -d' ' -f1)
echo "$CACHE_DIR/scan_${path_hash}.cache"
}
# Check if cache is valid (less than 1 hour old)
is_cache_valid() {
local cache_file="$1"
local max_age="${2:-3600}" # Default 1 hour
local max_age="${2:-3600}" # Default 1 hour
if [[ ! -f "$cache_file" ]]; then
return 1
fi
local cache_age
cache_age=$(($(date +%s) - $(stat -f%m "$cache_file" 2>/dev/null || echo 0)))
cache_age=$(($(date +%s) - $(stat -f%m "$cache_file" 2> /dev/null || echo 0)))
if [[ $cache_age -lt $max_age ]]; then
return 0
fi
@@ -192,7 +192,7 @@ save_to_cache() {
local temp_agg="$TEMP_PREFIX.agg"
# Create cache directory
mkdir -p "$(dirname "$cache_file")" 2>/dev/null || return 1
mkdir -p "$(dirname "$cache_file")" 2> /dev/null || return 1
# Bundle all scan results into cache file
{
@@ -204,7 +204,7 @@ save_to_cache() {
[[ -f "$temp_dirs" ]] && cat "$temp_dirs"
echo "### AGG ###"
[[ -f "$temp_agg" ]] && cat "$temp_agg"
} > "$cache_file" 2>/dev/null
} > "$cache_file" 2> /dev/null
}
# Load scan results from cache
@@ -283,7 +283,7 @@ perform_scan() {
)
local msg_idx=0
while kill -0 "$SCAN_PID" 2>/dev/null; do
while kill -0 "$SCAN_PID" 2> /dev/null; do
# Show different messages based on elapsed time
local current_msg=""
if [[ $elapsed -lt 5 ]]; then
@@ -299,12 +299,12 @@ perform_scan() {
printf "\r${BLUE}%s${NC} %s" \
"${spinner_chars:$i:1}" "$current_msg"
i=$(( (i + 1) % 10 ))
i=$(((i + 1) % 10))
((elapsed++))
sleep 0.1
done
wait "$SCAN_PID" 2>/dev/null || true
printf "\r%80s\r" "" # Clear spinner line
wait "$SCAN_PID" 2> /dev/null || true
printf "\r%80s\r" "" # Clear spinner line
show_cursor
# Aggregate results
@@ -508,7 +508,7 @@ display_directories_compact() {
# Simple bar (10 chars)
local bar_width=10
local percentage_int=${percentage%.*} # Remove decimal part
local percentage_int=${percentage%.*} # Remove decimal part
local filled
filled=$((percentage_int * bar_width / 100))
[[ $filled -gt $bar_width ]] && filled=$bar_width
@@ -622,8 +622,8 @@ display_cleanup_suggestions_compact() {
if [[ "$CURRENT_PATH" == "$HOME/Library/Caches"* ]] || [[ "$CURRENT_PATH" == "$HOME/Library"* ]]; then
if [[ -d "$HOME/Library/Caches" ]]; then
local cache_size
cache_size=$(du -sk "$HOME/Library/Caches" 2>/dev/null | cut -f1)
if [[ $cache_size -gt 1048576 ]]; then # > 1GB
cache_size=$(du -sk "$HOME/Library/Caches" 2> /dev/null | cut -f1)
if [[ $cache_size -gt 1048576 ]]; then # > 1GB
local human
human=$(bytes_to_human $((cache_size * 1024)))
top_suggestion="Clear app caches ($human)"
@@ -637,7 +637,7 @@ display_cleanup_suggestions_compact() {
# Check Downloads folder (only if analyzing Downloads)
if [[ "$CURRENT_PATH" == "$HOME/Downloads"* ]]; then
local old_files
old_files=$(find "$CURRENT_PATH" -type f -mtime +90 2>/dev/null | wc -l | tr -d ' ')
old_files=$(find "$CURRENT_PATH" -type f -mtime +90 2> /dev/null | wc -l | tr -d ' ')
if [[ $old_files -gt 0 ]]; then
[[ -z "$top_suggestion" ]] && top_suggestion="$old_files files older than 90 days found"
[[ -z "$action_command" ]] && action_command="manually review old files"
@@ -646,13 +646,13 @@ display_cleanup_suggestions_compact() {
fi
# Check for large disk images in current path
if command -v mdfind &>/dev/null; then
if command -v mdfind &> /dev/null; then
local dmg_count=$(mdfind -onlyin "$CURRENT_PATH" \
"kMDItemFSSize > 500000000 && kMDItemDisplayName == '*.dmg'" 2>/dev/null | wc -l | tr -d ' ')
"kMDItemFSSize > 500000000 && kMDItemDisplayName == '*.dmg'" 2> /dev/null | wc -l | tr -d ' ')
if [[ $dmg_count -gt 0 ]]; then
local dmg_size=$(mdfind -onlyin "$CURRENT_PATH" \
"kMDItemFSSize > 500000000 && kMDItemDisplayName == '*.dmg'" 2>/dev/null | \
xargs stat -f%z 2>/dev/null | awk '{sum+=$1} END {print sum}')
"kMDItemFSSize > 500000000 && kMDItemDisplayName == '*.dmg'" 2> /dev/null |
xargs stat -f%z 2> /dev/null | awk '{sum+=$1} END {print sum}')
local dmg_human
dmg_human=$(bytes_to_human "$dmg_size")
[[ -z "$top_suggestion" ]] && top_suggestion="$dmg_count DMG files ($dmg_human) can be removed"
@@ -665,7 +665,7 @@ display_cleanup_suggestions_compact() {
# Check Xcode (only if in developer paths)
if [[ "$CURRENT_PATH" == "$HOME/Library/Developer"* ]] && [[ -d "$HOME/Library/Developer/Xcode/DerivedData" ]]; then
local xcode_size
xcode_size=$(du -sk "$HOME/Library/Developer/Xcode/DerivedData" 2>/dev/null | cut -f1)
xcode_size=$(du -sk "$HOME/Library/Developer/Xcode/DerivedData" 2> /dev/null | cut -f1)
if [[ $xcode_size -gt 10485760 ]]; then
local xcode_human
xcode_human=$(bytes_to_human $((xcode_size * 1024)))
@@ -677,9 +677,9 @@ display_cleanup_suggestions_compact() {
fi
# Check for duplicates in current path
if command -v mdfind &>/dev/null; then
local dup_count=$(mdfind -onlyin "$CURRENT_PATH" "kMDItemFSSize > 10000000" 2>/dev/null | \
xargs -I {} stat -f "%z" {} 2>/dev/null | sort | uniq -d | wc -l | tr -d ' ')
if command -v mdfind &> /dev/null; then
local dup_count=$(mdfind -onlyin "$CURRENT_PATH" "kMDItemFSSize > 10000000" 2> /dev/null |
xargs -I {} stat -f "%z" {} 2> /dev/null | sort | uniq -d | wc -l | tr -d ' ')
if [[ $dup_count -gt 5 ]]; then
[[ -z "$top_suggestion" ]] && top_suggestion="$dup_count potential duplicate files detected"
((suggestions_count++))
@@ -720,8 +720,8 @@ display_cleanup_suggestions() {
# Check common cache locations
if [[ -d "$HOME/Library/Caches" ]]; then
local cache_size
cache_size=$(du -sk "$HOME/Library/Caches" 2>/dev/null | cut -f1)
if [[ $cache_size -gt 1048576 ]]; then # > 1GB
cache_size=$(du -sk "$HOME/Library/Caches" 2> /dev/null | cut -f1)
if [[ $cache_size -gt 1048576 ]]; then # > 1GB
local human
human=$(bytes_to_human $((cache_size * 1024)))
suggestions+=(" Clear application caches: $human")
@@ -731,16 +731,16 @@ display_cleanup_suggestions() {
# Check Downloads folder
if [[ -d "$HOME/Downloads" ]]; then
local old_files
old_files=$(find "$HOME/Downloads" -type f -mtime +90 2>/dev/null | wc -l | tr -d ' ')
old_files=$(find "$HOME/Downloads" -type f -mtime +90 2> /dev/null | wc -l | tr -d ' ')
if [[ $old_files -gt 0 ]]; then
suggestions+=(" Clean old downloads: $old_files files older than 90 days")
fi
fi
# Check for large disk images
if command -v mdfind &>/dev/null; then
if command -v mdfind &> /dev/null; then
local dmg_count=$(mdfind -onlyin "$HOME" \
"kMDItemFSSize > 500000000 && kMDItemDisplayName == '*.dmg'" 2>/dev/null | wc -l | tr -d ' ')
"kMDItemFSSize > 500000000 && kMDItemDisplayName == '*.dmg'" 2> /dev/null | wc -l | tr -d ' ')
if [[ $dmg_count -gt 0 ]]; then
suggestions+=(" Remove disk images: $dmg_count DMG files >500MB")
fi
@@ -749,8 +749,8 @@ display_cleanup_suggestions() {
# Check Xcode derived data
if [[ -d "$HOME/Library/Developer/Xcode/DerivedData" ]]; then
local xcode_size
xcode_size=$(du -sk "$HOME/Library/Developer/Xcode/DerivedData" 2>/dev/null | cut -f1)
if [[ $xcode_size -gt 10485760 ]]; then # > 10GB
xcode_size=$(du -sk "$HOME/Library/Developer/Xcode/DerivedData" 2> /dev/null | cut -f1)
if [[ $xcode_size -gt 10485760 ]]; then # > 10GB
local human
human=$(bytes_to_human $((xcode_size * 1024)))
suggestions+=(" Clear Xcode cache: $human")
@@ -760,8 +760,8 @@ display_cleanup_suggestions() {
# Check iOS device backups
if [[ -d "$HOME/Library/Application Support/MobileSync/Backup" ]]; then
local backup_size
backup_size=$(du -sk "$HOME/Library/Application Support/MobileSync/Backup" 2>/dev/null | cut -f1)
if [[ $backup_size -gt 5242880 ]]; then # > 5GB
backup_size=$(du -sk "$HOME/Library/Application Support/MobileSync/Backup" 2> /dev/null | cut -f1)
if [[ $backup_size -gt 5242880 ]]; then # > 5GB
local human
human=$(bytes_to_human $((backup_size * 1024)))
suggestions+=(" 📱 Review iOS backups: $human")
@@ -769,13 +769,13 @@ display_cleanup_suggestions() {
fi
# Check for duplicate files (by size, quick heuristic)
if command -v mdfind &>/dev/null; then
if command -v mdfind &> /dev/null; then
local temp_dup="$TEMP_PREFIX.dup_check"
mdfind -onlyin "$CURRENT_PATH" "kMDItemFSSize > 10000000" 2>/dev/null | \
xargs -I {} stat -f "%z" {} 2>/dev/null | \
sort | uniq -d | wc -l | tr -d ' ' > "$temp_dup" 2>/dev/null || echo "0" > "$temp_dup"
mdfind -onlyin "$CURRENT_PATH" "kMDItemFSSize > 10000000" 2> /dev/null |
xargs -I {} stat -f "%z" {} 2> /dev/null |
sort | uniq -d | wc -l | tr -d ' ' > "$temp_dup" 2> /dev/null || echo "0" > "$temp_dup"
local dup_count
dup_count=$(cat "$temp_dup" 2>/dev/null || echo "0")
dup_count=$(cat "$temp_dup" 2> /dev/null || echo "0")
if [[ $dup_count -gt 5 ]]; then
suggestions+=(" ♻️ Possible duplicates: $dup_count size matches in large files (>10MB)")
fi
@@ -804,14 +804,14 @@ display_disk_summary() {
local total_dirs_count=0
if [[ -f "$temp_large" ]]; then
total_large_count=$(wc -l < "$temp_large" 2>/dev/null | tr -d ' ')
total_large_count=$(wc -l < "$temp_large" 2> /dev/null | tr -d ' ')
while IFS='|' read -r size path; do
((total_large_size += size))
done < "$temp_large"
fi
if [[ -f "$temp_dirs" ]]; then
total_dirs_count=$(wc -l < "$temp_dirs" 2>/dev/null | tr -d ' ')
total_dirs_count=$(wc -l < "$temp_dirs" 2> /dev/null | tr -d ' ')
while IFS='|' read -r size path; do
((total_dirs_size += size))
done < "$temp_dirs"
@@ -841,20 +841,24 @@ get_file_info() {
local type="File"
case "$ext" in
dmg|iso|pkg|zip|tar|gz|rar|7z)
badge="$BADGE_BUNDLE" ; type="Bundle"
dmg | iso | pkg | zip | tar | gz | rar | 7z)
badge="$BADGE_BUNDLE"
type="Bundle"
;;
mov|mp4|avi|mkv|webm|jpg|jpeg|png|gif|heic)
badge="$BADGE_MEDIA" ; type="Media"
mov | mp4 | avi | mkv | webm | jpg | jpeg | png | gif | heic)
badge="$BADGE_MEDIA"
type="Media"
;;
pdf|key|ppt|pptx)
pdf | key | ppt | pptx)
type="Document"
;;
log)
badge="$BADGE_LOG" ; type="Log"
badge="$BADGE_LOG"
type="Log"
;;
app)
badge="$BADGE_APP" ; type="App"
badge="$BADGE_APP"
type="App"
;;
esac
@@ -870,7 +874,7 @@ get_file_age() {
fi
local mtime
mtime=$(stat -f%m "$path" 2>/dev/null || echo "0")
mtime=$(stat -f%m "$path" 2> /dev/null || echo "0")
local now
now=$(date +%s)
local diff
@@ -936,8 +940,8 @@ display_large_files_table() {
# Color based on file type
local color=""
case "$ext" in
dmg|iso|pkg) color="${RED}" ;;
mov|mp4|avi|mkv|webm|zip|tar|gz|rar|7z) color="${YELLOW}" ;;
dmg | iso | pkg) color="${RED}" ;;
mov | mp4 | avi | mkv | webm | zip | tar | gz | rar | 7z) color="${YELLOW}" ;;
log) color="${GRAY}" ;;
*) color="${NC}" ;;
esac
@@ -1104,7 +1108,7 @@ display_recent_large_files() {
log_header "Recent Large Files (Last 30 Days)"
echo ""
if ! command -v mdfind &>/dev/null; then
if ! command -v mdfind &> /dev/null; then
echo " ${YELLOW}Note: mdfind not available${NC}"
echo ""
return
@@ -1114,13 +1118,13 @@ display_recent_large_files() {
# Find files created in last 30 days, larger than 100MB
mdfind -onlyin "$CURRENT_PATH" \
"kMDItemFSSize > 100000000 && kMDItemContentCreationDate >= \$time.today(-30)" 2>/dev/null | \
"kMDItemFSSize > 100000000 && kMDItemContentCreationDate >= \$time.today(-30)" 2> /dev/null |
while IFS= read -r file; do
if [[ -f "$file" ]]; then
local size
size=$(stat -f%z "$file" 2>/dev/null || echo "0")
size=$(stat -f%z "$file" 2> /dev/null || echo "0")
local mtime
mtime=$(stat -f%m "$file" 2>/dev/null || echo "0")
mtime=$(stat -f%m "$file" 2> /dev/null || echo "0")
echo "$size|$mtime|$file"
fi
done | sort -t'|' -k1 -rn | head -10 > "$temp_recent"
@@ -1140,7 +1144,7 @@ display_recent_large_files() {
local dirname
dirname=$(dirname "$path" | sed "s|^$HOME|~|")
local days_ago
days_ago=$(( ($(date +%s) - mtime) / 86400 ))
days_ago=$((($(date +%s) - mtime) / 86400))
local info
info=$(get_file_info "$path")
@@ -1162,10 +1166,10 @@ get_subdirectories() {
local target="$1"
local temp_file="$2"
find "$target" -mindepth 1 -maxdepth 1 -type d 2>/dev/null | \
find "$target" -mindepth 1 -maxdepth 1 -type d 2> /dev/null |
while IFS= read -r dir; do
local size
size=$(du -sk "$dir" 2>/dev/null | cut -f1)
size=$(du -sk "$dir" 2> /dev/null | cut -f1)
echo "$((size * 1024))|$dir"
done | sort -t'|' -k1 -rn > "$temp_file"
}
@@ -1298,7 +1302,7 @@ display_file_types() {
log_header "File Types Analysis"
echo ""
if ! command -v mdfind &>/dev/null; then
if ! command -v mdfind &> /dev/null; then
echo " ${YELLOW}Note: mdfind not available, limited analysis${NC}"
return
fi
@@ -1336,7 +1340,7 @@ display_file_types() {
esac
local files
files=$(mdfind -onlyin "$CURRENT_PATH" "$query" 2>/dev/null)
files=$(mdfind -onlyin "$CURRENT_PATH" "$query" 2> /dev/null)
local count
count=$(echo "$files" | grep -c . || echo "0")
local total_size=0
@@ -1345,7 +1349,7 @@ display_file_types() {
while IFS= read -r file; do
if [[ -f "$file" ]]; then
local fsize
fsize=$(stat -f%z "$file" 2>/dev/null || echo "0")
fsize=$(stat -f%z "$file" 2> /dev/null || echo "0")
((total_size += fsize))
fi
done <<< "$files"
@@ -1364,7 +1368,7 @@ display_file_types() {
read_single_key() {
local key=""
# Read single character without waiting for Enter
if read -rsn1 key 2>/dev/null; then
if read -rsn1 key 2> /dev/null; then
echo "$key"
else
echo "q"
@@ -1396,13 +1400,13 @@ scan_directory_contents_fast() {
fi
# Ultra-fast file scanning - batch stat for maximum speed
find "$dir_path" -mindepth 1 -maxdepth 1 -type f -print0 2>/dev/null | \
xargs -0 -n 20 -P "$num_jobs" stat -f "%z|file|%N" 2>/dev/null > "$temp_files" &
find "$dir_path" -mindepth 1 -maxdepth 1 -type f -print0 2> /dev/null |
xargs -0 -n 20 -P "$num_jobs" stat -f "%z|file|%N" 2> /dev/null > "$temp_files" &
local file_pid=$!
# Smart directory scanning with aggressive optimization
# Strategy: Fast estimation first, accurate on-demand
find "$dir_path" -mindepth 1 -maxdepth 1 -type d -print0 2>/dev/null | \
find "$dir_path" -mindepth 1 -maxdepth 1 -type d -print0 2> /dev/null |
xargs -0 -n 1 -P "$num_jobs" sh -c '
dir="$1"
size=""
@@ -1436,7 +1440,7 @@ scan_directory_contents_fast() {
[[ -z "$size" ]] || [[ "$size" -eq 0 ]] && size=1
fi
echo "$((size * 1024))|dir|$dir"
' _ > "$temp_dirs" 2>/dev/null &
' _ > "$temp_dirs" 2> /dev/null &
local dir_pid=$!
# Show progress while waiting
@@ -1448,22 +1452,22 @@ scan_directory_contents_fast() {
local spinner_chars
spinner_chars="$(mo_spinner_chars)"
local chars_len=${#spinner_chars}
for ((idx=0; idx<chars_len; idx++)); do
for ((idx = 0; idx < chars_len; idx++)); do
spinner+=("${spinner_chars:idx:1}")
done
fi
[[ ${#spinner[@]} -eq 0 ]] && spinner=('|' '/' '-' '\\')
local i=0
local max_wait=30 # Reduced to 30 seconds (fast fail)
local max_wait=30 # Reduced to 30 seconds (fast fail)
local elapsed=0
local tick=0
local spin_len=${#spinner[@]}
(( spin_len == 0 )) && spinner=('|' '/' '-' '\\') && spin_len=${#spinner[@]}
((spin_len == 0)) && spinner=('|' '/' '-' '\\') && spin_len=${#spinner[@]}
while ( kill -0 "$dir_pid" 2>/dev/null || kill -0 "$file_pid" 2>/dev/null ); do
while (kill -0 "$dir_pid" 2> /dev/null || kill -0 "$file_pid" 2> /dev/null); do
printf "\r ${BLUE}Scanning${NC} ${spinner[$((i % spin_len))]} (%ds)" "$elapsed" >&2
((i++))
sleep 0.1 # Faster animation (100ms per frame)
sleep 0.1 # Faster animation (100ms per frame)
((tick++))
# Update elapsed seconds every 10 ticks (1 second)
@@ -1473,10 +1477,10 @@ scan_directory_contents_fast() {
# Force kill if taking too long (30 seconds for fast response)
if [[ $elapsed -ge $max_wait ]]; then
kill -9 "$dir_pid" 2>/dev/null || true
kill -9 "$file_pid" 2>/dev/null || true
wait "$dir_pid" 2>/dev/null || true
wait "$file_pid" 2>/dev/null || true
kill -9 "$dir_pid" 2> /dev/null || true
kill -9 "$file_pid" 2> /dev/null || true
wait "$dir_pid" 2> /dev/null || true
wait "$file_pid" 2> /dev/null || true
printf "\r ${YELLOW}Large directory - showing estimated sizes${NC}\n" >&2
sleep 0.3
break
@@ -1488,8 +1492,8 @@ scan_directory_contents_fast() {
fi
# Wait for completion (non-blocking if already killed)
wait "$file_pid" 2>/dev/null || true
wait "$dir_pid" 2>/dev/null || true
wait "$file_pid" 2> /dev/null || true
wait "$dir_pid" 2> /dev/null || true
# Small delay only if scan was very fast (let user see the spinner briefly)
if [[ "$show_progress" == "true" ]] && [[ ${elapsed:-0} -lt 1 ]]; then
@@ -1498,19 +1502,19 @@ scan_directory_contents_fast() {
# Combine and sort - only keep top items
# Ensure we handle empty files gracefully
> "$output_file"
true > "$output_file"
if [[ -f "$temp_dirs" ]] || [[ -f "$temp_files" ]]; then
cat "$temp_dirs" "$temp_files" 2>/dev/null | sort -t'|' -k1 -rn | head -"$max_items" > "$output_file" || true
cat "$temp_dirs" "$temp_files" 2> /dev/null | sort -t'|' -k1 -rn | head -"$max_items" > "$output_file" || true
fi
# Cleanup
rm -f "$temp_dirs" "$temp_files" 2>/dev/null
rm -f "$temp_dirs" "$temp_files" 2> /dev/null
}
# Calculate directory sizes and update (now only used for deep refresh)
calculate_dir_sizes() {
local items_file="$1"
local max_items="${2:-15}" # Only recalculate first 15 by default
local max_items="${2:-15}" # Only recalculate first 15 by default
local temp_file="${items_file}.calc"
# Since we now scan with actual sizes, this function is mainly for refresh
@@ -1519,9 +1523,9 @@ calculate_dir_sizes() {
# Only update if source file still exists (might have been deleted if user quit)
if [[ -f "$items_file" ]]; then
mv "$temp_file" "$items_file" 2>/dev/null || true
mv "$temp_file" "$items_file" 2> /dev/null || true
else
rm -f "$temp_file" 2>/dev/null || true
rm -f "$temp_file" 2> /dev/null || true
fi
}
@@ -1531,7 +1535,7 @@ combine_initial_scan_results() {
local temp_large="$TEMP_PREFIX.large"
local temp_dirs="$TEMP_PREFIX.dirs"
> "$output_file"
true > "$output_file"
# Add directories
if [[ -f "$temp_dirs" ]]; then
@@ -1572,7 +1576,7 @@ show_volumes_overview() {
# External volumes (if any)
if [[ -d "/Volumes" ]]; then
local vol_priority=500
find /Volumes -mindepth 1 -maxdepth 1 -type d 2>/dev/null | while IFS= read -r vol; do
find /Volumes -mindepth 1 -maxdepth 1 -type d 2> /dev/null | while IFS= read -r vol; do
local vol_name
vol_name=$(basename "$vol")
echo "$((vol_priority))|$vol|Volume: $vol_name"
@@ -1582,17 +1586,17 @@ show_volumes_overview() {
} | sort -t'|' -k1 -rn > "$temp_volumes"
# Setup alternate screen and hide cursor (keep hidden throughout)
tput smcup 2>/dev/null || true
printf "\033[?25l" >&2 # Hide cursor
tput smcup 2> /dev/null || true
printf "\033[?25l" >&2 # Hide cursor
cleanup_volumes() {
printf "\033[?25h" >&2 # Show cursor
tput rmcup 2>/dev/null || true
printf "\033[?25h" >&2 # Show cursor
tput rmcup 2> /dev/null || true
}
trap cleanup_volumes EXIT INT TERM
# Force cursor hidden at the start
stty -echo 2>/dev/null || true
stty -echo 2> /dev/null || true
local cursor=0
local total_items
@@ -1603,10 +1607,10 @@ show_volumes_overview() {
printf "\033[?25l" >&2
# Drain burst input (trackpad scroll -> many arrows)
type drain_pending_input >/dev/null 2>&1 && drain_pending_input
type drain_pending_input > /dev/null 2>&1 && drain_pending_input
# Build output buffer to reduce flicker
local output=""
output+="\033[?25l" # Hide cursor
output+="\033[?25l" # Hide cursor
output+="\033[H\033[J"
output+=$'\n'
output+="\033[0;35mSelect a location to explore\033[0m"$'\n'
@@ -1633,7 +1637,7 @@ show_volumes_overview() {
# Read key (suppress any escape sequences that might leak)
local key
key=$(read_key 2>/dev/null || echo "OTHER")
key=$(read_key 2> /dev/null || echo "OTHER")
case "$key" in
"UP")
@@ -1642,7 +1646,7 @@ show_volumes_overview() {
"DOWN")
((cursor < total_items - 1)) && ((cursor++))
;;
"ENTER"|"RIGHT")
"ENTER" | "RIGHT")
# Get selected path and enter it
local selected_path=""
idx=0
@@ -1679,7 +1683,7 @@ show_volumes_overview() {
# In volumes view, LEFT does nothing (already at top level)
# User must press q/ESC to quit
;;
"QUIT"|"q")
"QUIT" | "q")
# Quit the volumes view
break
;;
@@ -1693,13 +1697,13 @@ show_volumes_overview() {
# Interactive drill-down mode
interactive_drill_down() {
local start_path="$1"
local initial_items="${2:-}" # Pre-scanned items for first level
local initial_items="${2:-}" # Pre-scanned items for first level
local current_path="$start_path"
local path_stack=()
local cursor=0
local scroll_offset=0 # New: for scrolling
local scroll_offset=0 # New: for scrolling
local need_scan=true
local wait_for_calc=false # Don't wait on first load, let user press 'r'
local wait_for_calc=false # Don't wait on first load, let user press 'r'
local temp_items="$TEMP_PREFIX.items"
local status_message=""
@@ -1711,33 +1715,33 @@ interactive_drill_down() {
# Directory cache: store scan results for each visited directory
# Use temp files because bash 3.2 doesn't have associative arrays
local cache_dir="$TEMP_PREFIX.cache.$$"
mkdir -p "$cache_dir" 2>/dev/null || true
mkdir -p "$cache_dir" 2> /dev/null || true
# Note: We're already in alternate screen from show_volumes_overview
# Just hide cursor, don't re-enter alternate screen
printf "\033[?25l" # Hide cursor
printf "\033[?25l" # Hide cursor
# Save terminal settings and disable echo
local old_tty_settings=""
if [[ -t 0 ]]; then
old_tty_settings=$(stty -g 2>/dev/null || echo "")
stty -echo 2>/dev/null || true
old_tty_settings=$(stty -g 2> /dev/null || echo "")
stty -echo 2> /dev/null || true
fi
# Cleanup on exit (but don't exit alternate screen - may return to menu)
cleanup_drill_down() {
# Restore terminal settings
if [[ -n "${old_tty_settings:-}" ]]; then
stty "$old_tty_settings" 2>/dev/null || true
stty "$old_tty_settings" 2> /dev/null || true
fi
printf "\033[?25h" # Show cursor
printf "\033[?25h" # Show cursor
# Don't call tput rmcup - we may be returning to volumes menu
[[ -d "${cache_dir:-}" ]] && rm -rf "$cache_dir" 2>/dev/null || true # Clean up cache
[[ -d "${cache_dir:-}" ]] && rm -rf "$cache_dir" 2> /dev/null || true # Clean up cache
}
trap cleanup_drill_down EXIT INT TERM
# Drain any input that accumulated before entering interactive mode
type drain_pending_input >/dev/null 2>&1 && drain_pending_input
type drain_pending_input > /dev/null 2>&1 && drain_pending_input
while true; do
# Ensure cursor is always hidden during navigation
@@ -1747,7 +1751,7 @@ interactive_drill_down() {
if [[ "$need_scan" == "true" ]]; then
# Generate cache key (use md5 hash of path)
local cache_key
cache_key=$(echo "$current_path" | md5 2>/dev/null || echo "$current_path" | shasum | cut -d' ' -f1)
cache_key=$(echo "$current_path" | md5 2> /dev/null || echo "$current_path" | shasum | cut -d' ' -f1)
local cache_file="$cache_dir/$cache_key"
# Check if we have cached results for this directory
@@ -1760,12 +1764,12 @@ interactive_drill_down() {
# Use || true to prevent exit on scan failure
scan_directory_contents_fast "$current_path" "$temp_items" 50 true || {
# Scan failed - create empty result file
> "$temp_items"
true > "$temp_items"
}
# Save to cache for next time (only if not empty)
if [[ -s "$temp_items" ]]; then
cp "$temp_items" "$cache_file" 2>/dev/null || true
cp "$temp_items" "$cache_file" 2> /dev/null || true
fi
fi
@@ -1787,7 +1791,7 @@ interactive_drill_down() {
scroll_offset=0
# Drain any input accumulated during scanning
type drain_pending_input >/dev/null 2>&1 && drain_pending_input
type drain_pending_input > /dev/null 2>&1 && drain_pending_input
# Check if empty or scan failed
if [[ $total_items -eq 0 ]]; then
@@ -1800,7 +1804,7 @@ interactive_drill_down() {
echo " ${GRAY}Path: $current_path${NC}" >&2
echo "" >&2
echo " ${GRAY}Press any key to go back...${NC}" >&2
read_key >/dev/null 2>&1
read_key > /dev/null 2>&1
else
# Directory exists but scan returned nothing (timeout or empty)
printf "\033[H\033[J" >&2
@@ -1811,7 +1815,7 @@ interactive_drill_down() {
echo " ${GRAY}Press ${NC}${GREEN}R${NC}${GRAY} to retry, any other key to go back${NC}" >&2
local retry_key
retry_key=$(read_key 2>/dev/null || echo "OTHER")
retry_key=$(read_key 2> /dev/null || echo "OTHER")
if [[ "$retry_key" == "RETRY" ]]; then
# Retry scan
@@ -1842,13 +1846,13 @@ interactive_drill_down() {
# Build output buffer once for smooth rendering
local output=""
output+="\033[?25l" # Hide cursor
output+="\033[H\033[J" # Clear screen
output+="\033[?25l" # Hide cursor
output+="\033[H\033[J" # Clear screen
output+=$'\n'
output+="\033[0;35mDisk space explorer > $(echo "$current_path" | sed "s|^$HOME|~|")\033[0m"$'\n'
output+=$'\n'
local max_show=15 # Show 15 items per page
local max_show=15 # Show 15 items per page
local page_start=$scroll_offset
local page_end
page_end=$((scroll_offset + max_show))
@@ -1886,8 +1890,10 @@ interactive_drill_down() {
local badge="$BADGE_FILE" color="${NC}"
if [[ "$type" == "dir" ]]; then
badge="$BADGE_DIR" color="${BLUE}"
if [[ $size -gt 10737418240 ]]; then color="${RED}"
elif [[ $size -gt 1073741824 ]]; then color="${YELLOW}"
if [[ $size -gt 10737418240 ]]; then
color="${RED}"
elif [[ $size -gt 1073741824 ]]; then
color="${YELLOW}"
fi
else
local ext="${name##*.}"
@@ -1895,10 +1901,10 @@ interactive_drill_down() {
info=$(get_file_info "$path")
badge="${info%|*}"
case "$ext" in
dmg|iso|pkg|zip|tar|gz|rar|7z)
dmg | iso | pkg | zip | tar | gz | rar | 7z)
color="${YELLOW}"
;;
mov|mp4|avi|mkv|webm|jpg|jpeg|png|gif|heic)
mov | mp4 | avi | mkv | webm | jpg | jpeg | png | gif | heic)
color="${YELLOW}"
;;
log)
@@ -1945,7 +1951,7 @@ interactive_drill_down() {
# Read key directly without draining (to preserve all user input)
local key
key=$(read_key 2>/dev/null || echo "OTHER")
key=$(read_key 2> /dev/null || echo "OTHER")
# Debug: uncomment to see what keys are being received
# printf "\rDEBUG: Received key=[%s] " "$key" >&2
@@ -1974,7 +1980,7 @@ interactive_drill_down() {
fi
fi
;;
"ENTER"|"RIGHT")
"ENTER" | "RIGHT")
# Enter selected item - directory or file
if [[ $cursor -lt ${#items[@]} ]]; then
local selected="${items[$cursor]}"
@@ -1998,7 +2004,7 @@ interactive_drill_down() {
# For text-like files, use less or fallback to open
case "$file_ext" in
txt|log|md|json|xml|yaml|yml|conf|cfg|ini|sh|bash|zsh|py|js|ts|go|rs|c|cpp|h|java|rb|php|html|css|sql)
txt | log | md | json | xml | yaml | yml | conf | cfg | ini | sh | bash | zsh | py | js | ts | go | rs | c | cpp | h | java | rb | php | html | css | sql)
# Clear screen and show loading message
printf "\033[H\033[J"
echo ""
@@ -2006,21 +2012,21 @@ interactive_drill_down() {
echo ""
# Try less first (best for text viewing)
if command -v less &>/dev/null; then
if command -v less &> /dev/null; then
# Exit alternate screen only for less
printf "\033[?25h" # Show cursor
tput rmcup 2>/dev/null || true
printf "\033[?25h" # Show cursor
tput rmcup 2> /dev/null || true
less -F "$selected_path" 2>/dev/null && open_success=true
less -F "$selected_path" 2> /dev/null && open_success=true
# Return to alternate screen
tput smcup 2>/dev/null || true
printf "\033[?25l" # Hide cursor
tput smcup 2> /dev/null || true
printf "\033[?25l" # Hide cursor
else
# Fallback to system open if less is not available
echo " ${GRAY}Launching default application...${NC}"
if command -v open &>/dev/null; then
open "$selected_path" 2>/dev/null && open_success=true
if command -v open &> /dev/null; then
open "$selected_path" 2> /dev/null && open_success=true
if [[ "$open_success" == "true" ]]; then
echo ""
echo " ${GREEN}${ICON_SUCCESS}${NC} File opened in external app"
@@ -2038,8 +2044,8 @@ interactive_drill_down() {
echo ""
echo " ${GRAY}Launching default application...${NC}"
if command -v open &>/dev/null; then
open "$selected_path" 2>/dev/null && open_success=true
if command -v open &> /dev/null; then
open "$selected_path" 2> /dev/null && open_success=true
# Show brief success message
if [[ "$open_success" == "true" ]]; then
@@ -2059,7 +2065,7 @@ interactive_drill_down() {
echo ""
echo " ${GRAY}File: $selected_path${NC}"
echo " ${GRAY}Press any key to return...${NC}"
read -n 1 -s 2>/dev/null
read -n 1 -s 2> /dev/null
fi
fi
fi
@@ -2081,16 +2087,16 @@ interactive_drill_down() {
# Already at start path - return to volumes menu
# Don't show cursor or exit screen - menu will handle it
if [[ -n "${old_tty_settings:-}" ]]; then
stty "$old_tty_settings" 2>/dev/null || true
stty "$old_tty_settings" 2> /dev/null || true
fi
[[ -d "${cache_dir:-}" ]] && rm -rf "$cache_dir" 2>/dev/null || true
[[ -d "${cache_dir:-}" ]] && rm -rf "$cache_dir" 2> /dev/null || true
trap - EXIT INT TERM
return 1 # Return to menu
return 1 # Return to menu
fi
;;
"OPEN")
if command -v open >/dev/null 2>&1; then
if open "$current_path" >/dev/null 2>&1; then
if command -v open > /dev/null 2>&1; then
if open "$current_path" > /dev/null 2>&1; then
status_message="${GREEN}${ICON_SUCCESS}${NC} Finder opened: ${GRAY}$current_path${NC}"
else
status_message="${YELLOW}Warning:${NC} Could not open ${GRAY}$current_path${NC}"
@@ -2155,7 +2161,7 @@ interactive_drill_down() {
# Read confirmation
local confirm
confirm=$(read_key 2>/dev/null || echo "QUIT")
confirm=$(read_key 2> /dev/null || echo "QUIT")
if [[ "$confirm" == "ENTER" ]]; then
# Request sudo if needed before deletion
@@ -2180,11 +2186,11 @@ interactive_drill_down() {
# Try to delete with sudo if needed
local delete_success=false
if [[ "$needs_sudo" == "true" ]]; then
if sudo rm -rf "$selected_path" 2>/dev/null; then
if sudo rm -rf "$selected_path" 2> /dev/null; then
delete_success=true
fi
else
if rm -rf "$selected_path" 2>/dev/null; then
if rm -rf "$selected_path" 2> /dev/null; then
delete_success=true
fi
fi
@@ -2195,9 +2201,9 @@ interactive_drill_down() {
# Clear cache to force rescan
local cache_key
cache_key=$(echo "$current_path" | md5 2>/dev/null || echo "$current_path" | shasum | cut -d' ' -f1)
cache_key=$(echo "$current_path" | md5 2> /dev/null || echo "$current_path" | shasum | cut -d' ' -f1)
local cache_file="$cache_dir/$cache_key"
rm -f "$cache_file" 2>/dev/null || true
rm -f "$cache_file" 2> /dev/null || true
# Refresh the view
need_scan=true
@@ -2215,16 +2221,16 @@ interactive_drill_down() {
echo " ${ICON_LIST} System protection (SIP) prevents deletion"
echo ""
echo " ${GRAY}Press any key to continue...${NC}"
read_key >/dev/null 2>&1
read_key > /dev/null 2>&1
fi
fi
fi
;;
"QUIT"|"q")
"QUIT" | "q")
# Quit the explorer
cleanup_drill_down
trap - EXIT INT TERM
return 0 # Return true to indicate normal exit
return 0 # Return true to indicate normal exit
;;
*)
# Unknown key - ignore it
@@ -2233,7 +2239,7 @@ interactive_drill_down() {
done
# Cleanup is handled by trap
return 0 # Normal exit if loop ends
return 0 # Normal exit if loop ends
}
# Main interactive loop
@@ -2242,7 +2248,7 @@ interactive_mode() {
VIEW_MODE="overview"
while true; do
type drain_pending_input >/dev/null 2>&1 && drain_pending_input
type drain_pending_input > /dev/null 2>&1 && drain_pending_input
display_interactive_menu
local key
@@ -2291,10 +2297,10 @@ interactive_mode() {
VIEW_MODE="overview"
fi
;;
"f"|"F")
"f" | "F")
VIEW_MODE="files"
;;
"t"|"T")
"t" | "T")
VIEW_MODE="types"
;;
"ENTER")
@@ -2402,7 +2408,7 @@ main() {
# Parse arguments - only support --help
while [[ $# -gt 0 ]]; do
case "$1" in
-h|--help)
-h | --help)
echo "Usage: mole analyze"
echo ""
echo "Interactive disk space explorer - Navigate folders sorted by size"
@@ -2446,7 +2452,7 @@ main() {
CURRENT_PATH="$target_path"
# Create cache directory
mkdir -p "$CACHE_DIR" 2>/dev/null || true
mkdir -p "$CACHE_DIR" 2> /dev/null || true
# Start with volumes overview to let user choose location
show_volumes_overview

View File

@@ -18,11 +18,11 @@ DRY_RUN=false
IS_M_SERIES=$([ "$(uname -m)" = "arm64" ] && echo "true" || echo "false")
# Constants
readonly MAX_PARALLEL_JOBS=15 # Maximum parallel background jobs
readonly TEMP_FILE_AGE_DAYS=7 # Age threshold for temp file cleanup
readonly ORPHAN_AGE_DAYS=60 # Age threshold for orphaned data
readonly SIZE_1GB_KB=1048576 # 1GB in kilobytes
readonly SIZE_1MB_KB=1024 # 1MB in kilobytes
readonly MAX_PARALLEL_JOBS=15 # Maximum parallel background jobs
readonly TEMP_FILE_AGE_DAYS=7 # Age threshold for temp file cleanup
readonly ORPHAN_AGE_DAYS=60 # Age threshold for orphaned data
readonly SIZE_1GB_KB=1048576 # 1GB in kilobytes
readonly SIZE_1MB_KB=1024 # 1MB in kilobytes
# Default whitelist patterns (preselected, user can disable)
declare -a DEFAULT_WHITELIST_PATTERNS=(
"$HOME/Library/Caches/ms-playwright*"
@@ -52,7 +52,7 @@ if [[ -f "$HOME/.config/mole/whitelist" ]]; then
# Prevent absolute path to critical system directories
case "$line" in
/System/*|/bin/*|/sbin/*|/usr/bin/*|/usr/sbin/*)
/System/* | /bin/* | /sbin/* | /usr/bin/* | /usr/sbin/*)
WHITELIST_WARNINGS+=("System path: $line")
continue
;;
@@ -104,14 +104,14 @@ cleanup() {
# Stop all spinners and clear the line
if [[ -n "$SPINNER_PID" ]]; then
kill "$SPINNER_PID" 2>/dev/null || true
wait "$SPINNER_PID" 2>/dev/null || true
kill "$SPINNER_PID" 2> /dev/null || true
wait "$SPINNER_PID" 2> /dev/null || true
SPINNER_PID=""
fi
if [[ -n "$INLINE_SPINNER_PID" ]]; then
kill "$INLINE_SPINNER_PID" 2>/dev/null || true
wait "$INLINE_SPINNER_PID" 2>/dev/null || true
kill "$INLINE_SPINNER_PID" 2> /dev/null || true
wait "$INLINE_SPINNER_PID" 2> /dev/null || true
INLINE_SPINNER_PID=""
fi
@@ -122,8 +122,8 @@ cleanup() {
# Stop sudo keepalive
if [[ -n "$SUDO_KEEPALIVE_PID" ]]; then
kill "$SUDO_KEEPALIVE_PID" 2>/dev/null || true
wait "$SUDO_KEEPALIVE_PID" 2>/dev/null || true
kill "$SUDO_KEEPALIVE_PID" 2> /dev/null || true
wait "$SUDO_KEEPALIVE_PID" 2> /dev/null || true
SUDO_KEEPALIVE_PID=""
fi
@@ -176,8 +176,8 @@ stop_spinner() {
fi
if [[ -n "$SPINNER_PID" ]]; then
kill "$SPINNER_PID" 2>/dev/null
wait "$SPINNER_PID" 2>/dev/null
kill "$SPINNER_PID" 2> /dev/null
wait "$SPINNER_PID" 2> /dev/null
SPINNER_PID=""
printf "\r ${GREEN}${ICON_SUCCESS}${NC} %s\n" "$result_message"
else
@@ -229,7 +229,7 @@ safe_clean() {
if [[ ${#WHITELIST_PATTERNS[@]} -gt 0 ]]; then
for w in "${WHITELIST_PATTERNS[@]}"; do
# Match both exact path and glob pattern
if [[ "$path" == "$w" ]] || [[ "$path" == $w ]]; then
if [[ "$path" == "$w" ]] || [[ "$path" == "$w" ]]; then
skip=true
((skipped_count++))
break
@@ -239,7 +239,7 @@ safe_clean() {
[[ "$skip" == "true" ]] && continue
[[ -e "$path" ]] && existing_paths+=("$path")
done
# Update global whitelist skip counter
if [[ $skipped_count -gt 0 ]]; then
((whitelist_skipped_count += skipped_count))
@@ -253,31 +253,34 @@ safe_clean() {
# Show progress indicator for potentially slow operations
if [[ ${#existing_paths[@]} -gt 3 ]]; then
if [[ -t 1 ]]; then MOLE_SPINNER_PREFIX=" " start_inline_spinner "Checking items with whitelist safety..."; fi
local temp_dir=$(create_temp_dir)
local temp_dir
temp_dir=$(create_temp_dir)
# Parallel processing (bash 3.2 compatible)
local -a pids=()
local idx=0
for path in "${existing_paths[@]}"; do
(
local size=$(du -sk "$path" 2>/dev/null | awk '{print $1}' || echo "0")
local count=$(find "$path" -type f 2>/dev/null | wc -l | tr -d ' ')
local size
size=$(du -sk "$path" 2> /dev/null | awk '{print $1}' || echo "0")
local count
count=$(find "$path" -type f 2> /dev/null | wc -l | tr -d ' ')
# Use index + PID for unique filename
local tmp_file="$temp_dir/result_${idx}.$$"
echo "$size $count" > "$tmp_file"
mv "$tmp_file" "$temp_dir/result_${idx}" 2>/dev/null || true
mv "$tmp_file" "$temp_dir/result_${idx}" 2> /dev/null || true
) &
pids+=($!)
((idx++))
if (( ${#pids[@]} >= MAX_PARALLEL_JOBS )); then
wait "${pids[0]}" 2>/dev/null || true
if ((${#pids[@]} >= MAX_PARALLEL_JOBS)); then
wait "${pids[0]}" 2> /dev/null || true
pids=("${pids[@]:1}")
fi
done
for pid in "${pids[@]}"; do
wait "$pid" 2>/dev/null || true
wait "$pid" 2> /dev/null || true
done
# Read results using same index
@@ -285,10 +288,10 @@ safe_clean() {
for path in "${existing_paths[@]}"; do
local result_file="$temp_dir/result_${idx}"
if [[ -f "$result_file" ]]; then
read -r size count < "$result_file" 2>/dev/null || true
read -r size count < "$result_file" 2> /dev/null || true
if [[ "$count" -gt 0 && "$size" -gt 0 ]]; then
if [[ "$DRY_RUN" != "true" ]]; then
rm -rf "$path" 2>/dev/null || true
rm -rf "$path" 2> /dev/null || true
fi
((total_size_bytes += size))
((total_count += count))
@@ -304,12 +307,14 @@ safe_clean() {
if [[ -t 1 ]]; then MOLE_SPINNER_PREFIX=" " start_inline_spinner "Checking items with whitelist safety..."; fi
for path in "${existing_paths[@]}"; do
local size_bytes=$(du -sk "$path" 2>/dev/null | awk '{print $1}' || echo "0")
local count=$(find "$path" -type f 2>/dev/null | wc -l | tr -d ' ')
local size_bytes
size_bytes=$(du -sk "$path" 2> /dev/null | awk '{print $1}' || echo "0")
local count
count=$(find "$path" -type f 2> /dev/null | wc -l | tr -d ' ')
if [[ "$count" -gt 0 && "$size_bytes" -gt 0 ]]; then
if [[ "$DRY_RUN" != "true" ]]; then
rm -rf "$path" 2>/dev/null || true
rm -rf "$path" 2> /dev/null || true
fi
((total_size_bytes += size_bytes))
((total_count += count))
@@ -319,7 +324,10 @@ safe_clean() {
fi
# Clear progress / stop spinner before showing result
if [[ -t 1 ]]; then stop_inline_spinner; echo -ne "\r\033[K"; fi
if [[ -t 1 ]]; then
stop_inline_spinner
echo -ne "\r\033[K"
fi
if [[ $removed_any -eq 1 ]]; then
# Convert KB to bytes for bytes_to_human()
@@ -335,8 +343,8 @@ safe_clean() {
else
echo -e " ${GREEN}${ICON_SUCCESS}${NC} $label ${GREEN}($size_human)${NC}"
fi
((files_cleaned+=total_count))
((total_size_cleaned+=total_size_bytes))
((files_cleaned += total_count))
((total_size_cleaned += total_size_bytes))
((total_items++))
note_activity
fi
@@ -349,7 +357,7 @@ start_cleanup() {
clear
printf '\n'
echo -e "${PURPLE}Clean Your Mac${NC}"
if [[ "$DRY_RUN" != "true" && -t 0 ]]; then
echo ""
echo -e "${YELLOW}Tip:${NC} Safety first—run 'mo clean --dry-run'. Important Macs should stop."
@@ -384,7 +392,7 @@ start_cleanup() {
# Enter = yes, do system cleanup
if [[ -z "$choice" ]] || [[ "$choice" == $'\n' ]]; then
printf "\r\033[K" # Clear the prompt line
printf "\r\033[K" # Clear the prompt line
if request_sudo_access "System cleanup requires admin access"; then
SYSTEM_CLEAN=true
echo -e "${GREEN}${ICON_SUCCESS}${NC} Admin access granted"
@@ -393,7 +401,7 @@ start_cleanup() {
(
local retry_count=0
while true; do
if ! sudo -n true 2>/dev/null; then
if ! sudo -n true 2> /dev/null; then
((retry_count++))
if [[ $retry_count -ge 3 ]]; then
exit 1
@@ -403,9 +411,9 @@ start_cleanup() {
fi
retry_count=0
sleep 30
kill -0 "$$" 2>/dev/null || exit
kill -0 "$$" 2> /dev/null || exit
done
) 2>/dev/null &
) 2> /dev/null &
SUDO_KEEPALIVE_PID=$!
else
SYSTEM_CLEAN=false
@@ -430,7 +438,7 @@ start_cleanup() {
perform_cleanup() {
echo -e "${BLUE}${ICON_ADMIN}${NC} $(detect_architecture) | Free space: $(get_free_space)"
# Show whitelist info if patterns are active
local active_count=${#WHITELIST_PATTERNS[@]}
if [[ $active_count -gt 2 ]]; then
@@ -453,25 +461,25 @@ perform_cleanup() {
start_section "Deep system-level cleanup"
# Clean system caches more safely
sudo find /Library/Caches -name "*.cache" -delete 2>/dev/null || true
sudo find /Library/Caches -name "*.tmp" -delete 2>/dev/null || true
sudo find /Library/Caches -type f -name "*.log" -delete 2>/dev/null || true
sudo find /Library/Caches -name "*.cache" -delete 2> /dev/null || true
sudo find /Library/Caches -name "*.tmp" -delete 2> /dev/null || true
sudo find /Library/Caches -type f -name "*.log" -delete 2> /dev/null || true
# Clean old temp files only (avoid breaking running processes)
local tmp_cleaned=0
local tmp_count=$(sudo find /tmp -type f -mtime +${TEMP_FILE_AGE_DAYS} 2>/dev/null | wc -l | tr -d ' ')
local tmp_count=$(sudo find /tmp -type f -mtime +${TEMP_FILE_AGE_DAYS} 2> /dev/null | wc -l | tr -d ' ')
if [[ "$tmp_count" -gt 0 ]]; then
sudo find /tmp -type f -mtime +${TEMP_FILE_AGE_DAYS} -delete 2>/dev/null || true
sudo find /tmp -type f -mtime +${TEMP_FILE_AGE_DAYS} -delete 2> /dev/null || true
tmp_cleaned=1
fi
local var_tmp_count=$(sudo find /var/tmp -type f -mtime +${TEMP_FILE_AGE_DAYS} 2>/dev/null | wc -l | tr -d ' ')
local var_tmp_count=$(sudo find /var/tmp -type f -mtime +${TEMP_FILE_AGE_DAYS} 2> /dev/null | wc -l | tr -d ' ')
if [[ "$var_tmp_count" -gt 0 ]]; then
sudo find /var/tmp -type f -mtime +${TEMP_FILE_AGE_DAYS} -delete 2>/dev/null || true
sudo find /var/tmp -type f -mtime +${TEMP_FILE_AGE_DAYS} -delete 2> /dev/null || true
tmp_cleaned=1
fi
[[ $tmp_cleaned -eq 1 ]] && log_success "Old system temp files (${TEMP_FILE_AGE_DAYS}+ days)"
sudo rm -rf /Library/Updates/* 2>/dev/null || true
sudo rm -rf /Library/Updates/* 2> /dev/null || true
log_success "System library caches and updates"
end_section
@@ -497,15 +505,15 @@ perform_cleanup() {
[[ -d "$volume" && -d "$volume/.Trashes" && -w "$volume" ]] || continue
# Skip network volumes
local fs_type=$(df -T "$volume" 2>/dev/null | tail -1 | awk '{print $2}')
local fs_type=$(df -T "$volume" 2> /dev/null | tail -1 | awk '{print $2}')
case "$fs_type" in
nfs|smbfs|afpfs|cifs|webdav) continue ;;
nfs | smbfs | afpfs | cifs | webdav) continue ;;
esac
# Verify volume is mounted
if mount | grep -q "on $volume "; then
if [[ "$DRY_RUN" != "true" ]]; then
find "$volume/.Trashes" -mindepth 1 -maxdepth 1 -exec rm -rf {} \; 2>/dev/null || true
find "$volume/.Trashes" -mindepth 1 -maxdepth 1 -exec rm -rf {} \; 2> /dev/null || true
fi
fi
done
@@ -526,7 +534,6 @@ perform_cleanup() {
safe_clean ~/Downloads/*.part "Incomplete downloads (partial)"
end_section
# ===== 3. macOS System Caches =====
start_section "macOS system caches"
safe_clean ~/Library/Saved\ Application\ State/* "Saved application states"
@@ -542,7 +549,6 @@ perform_cleanup() {
safe_clean ~/Library/Application\ Support/CloudDocs/session/db/* "iCloud session cache"
end_section
# ===== 4. Sandboxed App Caches =====
start_section "Sandboxed app caches"
# Clean specific high-usage apps first for better user feedback
@@ -553,7 +559,6 @@ perform_cleanup() {
safe_clean ~/Library/Containers/*/Data/Library/Caches/* "Sandboxed app caches"
end_section
# ===== 5. Browsers =====
start_section "Browser cleanup"
# Safari (cache only, NOT local storage or databases to preserve login states)
@@ -577,7 +582,6 @@ perform_cleanup() {
safe_clean ~/Library/Application\ Support/Firefox/Profiles/*/cache2/* "Firefox profile cache"
end_section
# ===== 6. Cloud Storage =====
start_section "Cloud storage caches"
# Only cache files, not sync state or login credentials
@@ -590,7 +594,6 @@ perform_cleanup() {
safe_clean ~/Library/Caches/com.microsoft.OneDrive "OneDrive cache"
end_section
# ===== 7. Office Applications =====
start_section "Office applications"
safe_clean ~/Library/Caches/com.microsoft.Word "Microsoft Word cache"
@@ -603,11 +606,10 @@ perform_cleanup() {
safe_clean ~/Library/Caches/com.apple.mail/* "Apple Mail cache"
end_section
# ===== 8. Developer tools =====
start_section "Developer tools"
# Node.js ecosystem
if command -v npm >/dev/null 2>&1; then
if command -v npm > /dev/null 2>&1; then
if [[ "$DRY_RUN" != "true" ]]; then
clean_tool_cache "npm cache" npm cache clean --force
else
@@ -622,7 +624,7 @@ perform_cleanup() {
safe_clean ~/.bun/install/cache/* "Bun cache"
# Python ecosystem
if command -v pip3 >/dev/null 2>&1; then
if command -v pip3 > /dev/null 2>&1; then
if [[ "$DRY_RUN" != "true" ]]; then
clean_tool_cache "pip cache" pip3 cache purge
else
@@ -636,7 +638,7 @@ perform_cleanup() {
safe_clean ~/.pyenv/cache/* "pyenv cache"
# Go ecosystem
if command -v go >/dev/null 2>&1; then
if command -v go > /dev/null 2>&1; then
if [[ "$DRY_RUN" != "true" ]]; then
clean_tool_cache "Go cache" bash -c 'go clean -modcache >/dev/null 2>&1 || true; go clean -cache >/dev/null 2>&1 || true'
else
@@ -652,7 +654,7 @@ perform_cleanup() {
safe_clean ~/.cargo/registry/cache/* "Rust cargo cache"
# Docker (only clean build cache, preserve images and volumes)
if command -v docker >/dev/null 2>&1; then
if command -v docker > /dev/null 2>&1; then
if [[ "$DRY_RUN" != "true" ]]; then
clean_tool_cache "Docker build cache" docker builder prune -af
else
@@ -674,9 +676,10 @@ perform_cleanup() {
safe_clean ~/Library/Caches/Homebrew/* "Homebrew cache"
safe_clean /opt/homebrew/var/homebrew/locks/* "Homebrew lock files (M series)"
safe_clean /usr/local/var/homebrew/locks/* "Homebrew lock files (Intel)"
if command -v brew >/dev/null 2>&1; then
if command -v brew > /dev/null 2>&1; then
if [[ "$DRY_RUN" != "true" ]]; then
clean_tool_cache "Homebrew cleanup" brew cleanup
# Use -s (scrub cache) for faster cleanup, --prune=all removes old versions
MOLE_CMD_TIMEOUT=300 clean_tool_cache "Homebrew cleanup" brew cleanup -s --prune=all
else
echo -e " ${YELLOW}${NC} Homebrew (would cleanup)"
fi
@@ -818,7 +821,6 @@ perform_cleanup() {
end_section
# ===== 10. Applications =====
start_section "Applications"
@@ -983,7 +985,6 @@ perform_cleanup() {
end_section
# ===== 11. Virtualization Tools =====
start_section "Virtualization tools"
safe_clean ~/Library/Caches/com.vmware.fusion "VMware Fusion cache"
@@ -992,7 +993,6 @@ perform_cleanup() {
safe_clean ~/.vagrant.d/tmp/* "Vagrant temporary files"
end_section
# ===== 12. Application Support logs cleanup =====
start_section "Application Support logs"
@@ -1003,7 +1003,7 @@ perform_cleanup() {
# Skip system and protected apps
case "$app_name" in
com.apple.*|Adobe*|1Password|Claude)
com.apple.* | Adobe* | 1Password | Claude)
continue
;;
esac
@@ -1022,7 +1022,6 @@ perform_cleanup() {
end_section
# ===== 13. Orphaned app data cleanup =====
# Deep cleanup of leftover files from uninstalled apps
#
@@ -1074,32 +1073,32 @@ perform_cleanup() {
if [[ -d "$search_path" ]]; then
while IFS= read -r app; do
[[ -f "$app/Contents/Info.plist" ]] || continue
bundle_id=$(defaults read "$app/Contents/Info.plist" CFBundleIdentifier 2>/dev/null || echo "")
bundle_id=$(defaults read "$app/Contents/Info.plist" CFBundleIdentifier 2> /dev/null || echo "")
[[ -n "$bundle_id" ]] && echo "$bundle_id" >> "$installed_bundles"
done < <(find "$search_path" -maxdepth 3 -type d -name "*.app" 2>/dev/null || true)
done < <(find "$search_path" -maxdepth 3 -type d -name "*.app" 2> /dev/null || true)
fi
done
# Use Spotlight as fallback to catch apps in unusual locations
# This significantly reduces false positives
if command -v mdfind >/dev/null 2>&1; then
if command -v mdfind > /dev/null 2>&1; then
while IFS= read -r app; do
[[ -f "$app/Contents/Info.plist" ]] || continue
bundle_id=$(defaults read "$app/Contents/Info.plist" CFBundleIdentifier 2>/dev/null || echo "")
bundle_id=$(defaults read "$app/Contents/Info.plist" CFBundleIdentifier 2> /dev/null || echo "")
[[ -n "$bundle_id" ]] && echo "$bundle_id" >> "$installed_bundles"
done < <(mdfind "kMDItemKind == 'Application'" 2>/dev/null | grep "\.app$" || true)
done < <(mdfind "kMDItemKind == 'Application'" 2> /dev/null | grep "\.app$" || true)
fi
# Get running applications (if an app is running, it's definitely not orphaned)
local running_apps=$(osascript -e 'tell application "System Events" to get bundle identifier of every application process' 2>/dev/null || echo "")
local running_apps=$(osascript -e 'tell application "System Events" to get bundle identifier of every application process' 2> /dev/null || echo "")
echo "$running_apps" | tr ',' '\n' | sed 's/^ *//;s/ *$//' | grep -v '^$' > "$running_bundles"
# Check LaunchAgents and LaunchDaemons (if app has launch items, it likely exists)
find ~/Library/LaunchAgents /Library/LaunchAgents /Library/LaunchDaemons \
-name "*.plist" -type f 2>/dev/null | while IFS= read -r plist; do
-name "*.plist" -type f 2> /dev/null | while IFS= read -r plist; do
bundle_id=$(basename "$plist" .plist)
echo "$bundle_id" >> "$launch_agents"
done 2>/dev/null || true
done 2> /dev/null || true
# Combine and deduplicate all bundle IDs
sort -u "$installed_bundles" "$running_bundles" "$launch_agents" > "${installed_bundles}.final"
@@ -1117,7 +1116,7 @@ perform_cleanup() {
# Returns 0 (true) only if we are VERY CONFIDENT the app is uninstalled
is_orphaned() {
local bundle_id="$1"
local directory_path="$2" # The actual directory we're considering deleting
local directory_path="$2" # The actual directory we're considering deleting
# SAFETY CHECK 1: Skip system-critical and protected apps (MOST IMPORTANT)
if should_protect_data "$bundle_id"; then
@@ -1125,13 +1124,13 @@ perform_cleanup() {
fi
# SAFETY CHECK 2: Check if app bundle exists in our comprehensive scan
if grep -q "^$bundle_id$" "$installed_bundles" 2>/dev/null; then
if grep -q "^$bundle_id$" "$installed_bundles" 2> /dev/null; then
return 1
fi
# SAFETY CHECK 3: Extra check for system bundles (belt and suspenders)
case "$bundle_id" in
com.apple.*|loginwindow|dock|systempreferences|finder|safari)
com.apple.* | loginwindow | dock | systempreferences | finder | safari)
return 1
;;
esac
@@ -1139,7 +1138,7 @@ perform_cleanup() {
# SAFETY CHECK 4: If it's a very common/important prefix, be extra careful
# For major vendors, we NEVER auto-clean (too risky)
case "$bundle_id" in
com.adobe.*|com.microsoft.*|com.google.*|org.mozilla.*|com.jetbrains.*|com.docker.*)
com.adobe.* | com.microsoft.* | com.google.* | org.mozilla.* | com.jetbrains.* | com.docker.*)
return 1
;;
esac
@@ -1149,9 +1148,9 @@ perform_cleanup() {
# This protects against apps in unusual locations we didn't scan
if [[ -e "$directory_path" ]]; then
# Get last access time (days ago)
local last_access_epoch=$(stat -f%a "$directory_path" 2>/dev/null || echo "0")
local last_access_epoch=$(stat -f%a "$directory_path" 2> /dev/null || echo "0")
local current_epoch=$(date +%s)
local days_since_access=$(( (current_epoch - last_access_epoch) / 86400 ))
local days_since_access=$(((current_epoch - last_access_epoch) / 86400))
# If accessed in the last 60 days, DO NOT DELETE
# This means app is likely still installed somewhere
@@ -1167,12 +1166,12 @@ perform_cleanup() {
# Clean orphaned caches
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned caches..."
local cache_found=0
if ls ~/Library/Caches/com.* >/dev/null 2>&1; then
if ls ~/Library/Caches/com.* > /dev/null 2>&1; then
for cache_dir in ~/Library/Caches/com.* ~/Library/Caches/org.* ~/Library/Caches/net.* ~/Library/Caches/io.*; do
[[ -d "$cache_dir" ]] || continue
local bundle_id=$(basename "$cache_dir")
if is_orphaned "$bundle_id" "$cache_dir"; then
local size_kb=$(du -sk "$cache_dir" 2>/dev/null | awk '{print $1}' || echo "0")
local size_kb=$(du -sk "$cache_dir" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then
safe_clean "$cache_dir" "Orphaned cache: $bundle_id"
((cache_found++))
@@ -1187,12 +1186,12 @@ perform_cleanup() {
# Clean orphaned logs
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned logs..."
local logs_found=0
if ls ~/Library/Logs/com.* >/dev/null 2>&1; then
if ls ~/Library/Logs/com.* > /dev/null 2>&1; then
for log_dir in ~/Library/Logs/com.* ~/Library/Logs/org.* ~/Library/Logs/net.* ~/Library/Logs/io.*; do
[[ -d "$log_dir" ]] || continue
local bundle_id=$(basename "$log_dir")
if is_orphaned "$bundle_id" "$log_dir"; then
local size_kb=$(du -sk "$log_dir" 2>/dev/null | awk '{print $1}' || echo "0")
local size_kb=$(du -sk "$log_dir" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then
safe_clean "$log_dir" "Orphaned logs: $bundle_id"
((logs_found++))
@@ -1207,12 +1206,12 @@ perform_cleanup() {
# Clean orphaned saved states
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned saved states..."
local states_found=0
if ls ~/Library/Saved\ Application\ State/*.savedState >/dev/null 2>&1; then
if ls ~/Library/Saved\ Application\ State/*.savedState > /dev/null 2>&1; then
for state_dir in ~/Library/Saved\ Application\ State/*.savedState; do
[[ -d "$state_dir" ]] || continue
local bundle_id=$(basename "$state_dir" .savedState)
if is_orphaned "$bundle_id" "$state_dir"; then
local size_kb=$(du -sk "$state_dir" 2>/dev/null | awk '{print $1}' || echo "0")
local size_kb=$(du -sk "$state_dir" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then
safe_clean "$state_dir" "Orphaned state: $bundle_id"
((states_found++))
@@ -1231,13 +1230,13 @@ perform_cleanup() {
# To avoid deleting data from installed apps, we skip container cleanup.
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned containers..."
local containers_found=0
if ls ~/Library/Containers/com.* >/dev/null 2>&1; then
if ls ~/Library/Containers/com.* > /dev/null 2>&1; then
# Count potential orphaned containers but don't delete them
for container_dir in ~/Library/Containers/com.* ~/Library/Containers/org.* ~/Library/Containers/net.* ~/Library/Containers/io.*; do
[[ -d "$container_dir" ]] || continue
local bundle_id=$(basename "$container_dir")
if is_orphaned "$bundle_id" "$container_dir"; then
local size_kb=$(du -sk "$container_dir" 2>/dev/null | awk '{print $1}' || echo "0")
local size_kb=$(du -sk "$container_dir" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then
# DISABLED: safe_clean "$container_dir" "Orphaned container: $bundle_id"
((containers_found++))
@@ -1252,12 +1251,12 @@ perform_cleanup() {
# Clean orphaned WebKit data
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned WebKit data..."
local webkit_found=0
if ls ~/Library/WebKit/com.* >/dev/null 2>&1; then
if ls ~/Library/WebKit/com.* > /dev/null 2>&1; then
for webkit_dir in ~/Library/WebKit/com.* ~/Library/WebKit/org.* ~/Library/WebKit/net.* ~/Library/WebKit/io.*; do
[[ -d "$webkit_dir" ]] || continue
local bundle_id=$(basename "$webkit_dir")
if is_orphaned "$bundle_id" "$webkit_dir"; then
local size_kb=$(du -sk "$webkit_dir" 2>/dev/null | awk '{print $1}' || echo "0")
local size_kb=$(du -sk "$webkit_dir" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then
safe_clean "$webkit_dir" "Orphaned WebKit: $bundle_id"
((webkit_found++))
@@ -1272,12 +1271,12 @@ perform_cleanup() {
# Clean orphaned HTTP storages
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned HTTP storages..."
local http_found=0
if ls ~/Library/HTTPStorages/com.* >/dev/null 2>&1; then
if ls ~/Library/HTTPStorages/com.* > /dev/null 2>&1; then
for http_dir in ~/Library/HTTPStorages/com.* ~/Library/HTTPStorages/org.* ~/Library/HTTPStorages/net.* ~/Library/HTTPStorages/io.*; do
[[ -d "$http_dir" ]] || continue
local bundle_id=$(basename "$http_dir")
if is_orphaned "$bundle_id" "$http_dir"; then
local size_kb=$(du -sk "$http_dir" 2>/dev/null | awk '{print $1}' || echo "0")
local size_kb=$(du -sk "$http_dir" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then
safe_clean "$http_dir" "Orphaned HTTP storage: $bundle_id"
((http_found++))
@@ -1292,12 +1291,12 @@ perform_cleanup() {
# Clean orphaned cookies
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning orphaned cookies..."
local cookies_found=0
if ls ~/Library/Cookies/*.binarycookies >/dev/null 2>&1; then
if ls ~/Library/Cookies/*.binarycookies > /dev/null 2>&1; then
for cookie_file in ~/Library/Cookies/*.binarycookies; do
[[ -f "$cookie_file" ]] || continue
local bundle_id=$(basename "$cookie_file" .binarycookies)
if is_orphaned "$bundle_id" "$cookie_file"; then
local size_kb=$(du -sk "$cookie_file" 2>/dev/null | awk '{print $1}' || echo "0")
local size_kb=$(du -sk "$cookie_file" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then
safe_clean "$cookie_file" "Orphaned cookies: $bundle_id"
((cookies_found++))
@@ -1340,9 +1339,9 @@ perform_cleanup() {
start_section "iOS device backups"
backup_dir="$HOME/Library/Application Support/MobileSync/Backup"
if [[ -d "$backup_dir" ]] && find "$backup_dir" -mindepth 1 -maxdepth 1 | read -r _; then
backup_kb=$(du -sk "$backup_dir" 2>/dev/null | awk '{print $1}')
backup_kb=$(du -sk "$backup_dir" 2> /dev/null | awk '{print $1}')
if [[ -n "${backup_kb:-}" && "$backup_kb" -gt 102400 ]]; then
backup_human=$(du -sh "$backup_dir" 2>/dev/null | awk '{print $1}')
backup_human=$(du -sh "$backup_dir" 2> /dev/null | awk '{print $1}')
note_activity
echo -e " Found ${GREEN}${backup_human}${NC} iOS backups"
echo -e " You can delete them manually: ${backup_dir}"
@@ -1361,9 +1360,9 @@ perform_cleanup() {
# Skip system volume and network volumes
[[ "$volume" == "/Volumes/MacintoshHD" || "$volume" == "/" ]] && continue
local fs_type=$(df -T "$volume" 2>/dev/null | tail -1 | awk '{print $2}')
local fs_type=$(df -T "$volume" 2> /dev/null | tail -1 | awk '{print $2}')
case "$fs_type" in
nfs|smbfs|afpfs|cifs|webdav) continue ;;
nfs | smbfs | afpfs | cifs | webdav) continue ;;
esac
# Look for HFS+ style backups (Backups.backupdb)
@@ -1374,19 +1373,19 @@ perform_cleanup() {
while IFS= read -r inprogress_file; do
[[ -d "$inprogress_file" ]] || continue
local size_kb=$(du -sk "$inprogress_file" 2>/dev/null | awk '{print $1}' || echo "0")
local size_kb=$(du -sk "$inprogress_file" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then
local backup_name=$(basename "$inprogress_file")
if [[ "$DRY_RUN" != "true" ]]; then
# Use tmutil to safely delete the failed backup
if command -v tmutil >/dev/null 2>&1; then
if tmutil delete "$inprogress_file" 2>/dev/null; then
if command -v tmutil > /dev/null 2>&1; then
if tmutil delete "$inprogress_file" 2> /dev/null; then
local size_human=$(bytes_to_human "$((size_kb * 1024))")
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Failed backup: $backup_name ${GREEN}($size_human)${NC}"
((tm_cleaned++))
((files_cleaned++))
((total_size_cleaned+=size_kb))
((total_size_cleaned += size_kb))
((total_items++))
note_activity
else
@@ -1402,7 +1401,7 @@ perform_cleanup() {
note_activity
fi
fi
done < <(find "$backupdb_dir" -type d \( -name "*.inProgress" -o -name "*.inprogress" \) 2>/dev/null || true)
done < <(find "$backupdb_dir" -type d \( -name "*.inProgress" -o -name "*.inprogress" \) 2> /dev/null || true)
fi
# Look for APFS style backups (.backupbundle or .sparsebundle)
@@ -1414,25 +1413,25 @@ perform_cleanup() {
# Check if bundle is already mounted by looking at hdiutil info
local bundle_name=$(basename "$bundle")
local mounted_path=$(hdiutil info 2>/dev/null | grep -A 5 "image-path.*$bundle_name" | grep "/Volumes/" | awk '{print $1}' | head -1 || echo "")
local mounted_path=$(hdiutil info 2> /dev/null | grep -A 5 "image-path.*$bundle_name" | grep "/Volumes/" | awk '{print $1}' | head -1 || echo "")
if [[ -n "$mounted_path" && -d "$mounted_path" ]]; then
# Bundle is already mounted, safe to check
while IFS= read -r inprogress_file; do
[[ -d "$inprogress_file" ]] || continue
local size_kb=$(du -sk "$inprogress_file" 2>/dev/null | awk '{print $1}' || echo "0")
local size_kb=$(du -sk "$inprogress_file" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then
local backup_name=$(basename "$inprogress_file")
if [[ "$DRY_RUN" != "true" ]]; then
if command -v tmutil >/dev/null 2>&1; then
if tmutil delete "$inprogress_file" 2>/dev/null; then
if command -v tmutil > /dev/null 2>&1; then
if tmutil delete "$inprogress_file" 2> /dev/null; then
local size_human=$(bytes_to_human "$((size_kb * 1024))")
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Failed APFS backup in $bundle_name: $backup_name ${GREEN}($size_human)${NC}"
((tm_cleaned++))
((files_cleaned++))
((total_size_cleaned+=size_kb))
((total_size_cleaned += size_kb))
((total_items++))
note_activity
else
@@ -1446,7 +1445,7 @@ perform_cleanup() {
note_activity
fi
fi
done < <(find "$mounted_path" -type d \( -name "*.inProgress" -o -name "*.inprogress" \) 2>/dev/null || true)
done < <(find "$mounted_path" -type d \( -name "*.inProgress" -o -name "*.inprogress" \) 2> /dev/null || true)
fi
done
done
@@ -1525,12 +1524,11 @@ perform_cleanup() {
printf '\n'
}
main() {
# Parse args (only dry-run and help for minimal impact)
for arg in "$@"; do
case "$arg" in
"--dry-run"|"-n")
"--dry-run" | "-n")
DRY_RUN=true
;;
"--whitelist")
@@ -1538,7 +1536,7 @@ main() {
manage_whitelist
exit 0
;;
"--help"|"-h")
"--help" | "-h")
echo "Mole - Deeper system cleanup"
echo "Usage: clean.sh [options]"
echo ""

View File

@@ -20,14 +20,14 @@ is_touchid_configured() {
if [[ ! -f "$PAM_SUDO_FILE" ]]; then
return 1
fi
grep -q "pam_tid.so" "$PAM_SUDO_FILE" 2>/dev/null
grep -q "pam_tid.so" "$PAM_SUDO_FILE" 2> /dev/null
}
# Check if system supports Touch ID
supports_touchid() {
# Check if bioutil exists and has Touch ID capability
if command -v bioutil &>/dev/null; then
bioutil -r 2>/dev/null | grep -q "Touch ID" && return 0
if command -v bioutil &> /dev/null; then
bioutil -r 2> /dev/null | grep -q "Touch ID" && return 0
fi
# Fallback: check if running on Apple Silicon or modern Intel Mac
@@ -39,7 +39,7 @@ supports_touchid() {
# For Intel Macs, check if it's 2018 or later (approximation)
local model_year
model_year=$(system_profiler SPHardwareDataType 2>/dev/null | grep "Model Identifier" | grep -o "[0-9]\{4\}" | head -1)
model_year=$(system_profiler SPHardwareDataType 2> /dev/null | grep "Model Identifier" | grep -o "[0-9]\{4\}" | head -1)
if [[ -n "$model_year" ]] && [[ "$model_year" -ge 2018 ]]; then
return 0
fi
@@ -76,7 +76,7 @@ enable_touchid() {
fi
# Create backup and apply changes
if ! sudo cp "$PAM_SUDO_FILE" "${PAM_SUDO_FILE}.mole-backup" 2>/dev/null; then
if ! sudo cp "$PAM_SUDO_FILE" "${PAM_SUDO_FILE}.mole-backup" 2> /dev/null; then
log_error "Failed to create backup"
return 1
fi
@@ -97,12 +97,12 @@ enable_touchid() {
' "$PAM_SUDO_FILE" > "$temp_file"
# Apply the changes
if sudo mv "$temp_file" "$PAM_SUDO_FILE" 2>/dev/null; then
if sudo mv "$temp_file" "$PAM_SUDO_FILE" 2> /dev/null; then
echo -e "${GREEN}${ICON_SUCCESS} Touch ID enabled${NC} ${GRAY}- try: sudo ls${NC}"
echo ""
return 0
else
rm -f "$temp_file" 2>/dev/null || true
rm -f "$temp_file" 2> /dev/null || true
log_error "Failed to enable Touch ID"
return 1
fi
@@ -116,7 +116,7 @@ disable_touchid() {
fi
# Create backup and remove configuration
if ! sudo cp "$PAM_SUDO_FILE" "${PAM_SUDO_FILE}.mole-backup" 2>/dev/null; then
if ! sudo cp "$PAM_SUDO_FILE" "${PAM_SUDO_FILE}.mole-backup" 2> /dev/null; then
log_error "Failed to create backup"
return 1
fi
@@ -126,12 +126,12 @@ disable_touchid() {
temp_file=$(mktemp)
grep -v "pam_tid.so" "$PAM_SUDO_FILE" > "$temp_file"
if sudo mv "$temp_file" "$PAM_SUDO_FILE" 2>/dev/null; then
if sudo mv "$temp_file" "$PAM_SUDO_FILE" 2> /dev/null; then
echo -e "${GREEN}${ICON_SUCCESS} Touch ID disabled${NC}"
echo ""
return 0
else
rm -f "$temp_file" 2>/dev/null || true
rm -f "$temp_file" 2> /dev/null || true
log_error "Failed to disable Touch ID"
return 1
fi
@@ -174,11 +174,11 @@ show_menu() {
echo ""
case "$key" in
$'\e') # ESC
$'\e') # ESC
return 0
;;
""|$'\n'|$'\r') # Enter
printf "\r\033[K" # Clear the prompt line
"" | $'\n' | $'\r') # Enter
printf "\r\033[K" # Clear the prompt line
disable_touchid
;;
*)
@@ -191,11 +191,11 @@ show_menu() {
IFS= read -r -s -n1 key || key=""
case "$key" in
$'\e') # ESC
$'\e') # ESC
return 0
;;
""|$'\n'|$'\r') # Enter
printf "\r\033[K" # Clear the prompt line
"" | $'\n' | $'\r') # Enter
printf "\r\033[K" # Clear the prompt line
enable_touchid
;;
*)
@@ -220,7 +220,7 @@ main() {
status)
show_status
;;
help|--help|-h)
help | --help | -h)
show_help
;;
"")

View File

@@ -56,10 +56,9 @@ if [[ "${1:-}" == "--help" || "${1:-}" == "-h" ]]; then
fi
# Initialize global variables
selected_apps=() # Global array for app selection
selected_apps=() # Global array for app selection
declare -a apps_data=()
declare -a selection_state=()
current_line=0
total_items=0
files_cleaned=0
total_size_cleaned=0
@@ -68,16 +67,16 @@ total_size_cleaned=0
get_app_last_used() {
local app_path="$1"
local last_used
last_used=$(mdls -name kMDItemLastUsedDate -raw "$app_path" 2>/dev/null)
last_used=$(mdls -name kMDItemLastUsedDate -raw "$app_path" 2> /dev/null)
if [[ "$last_used" == "(null)" || -z "$last_used" ]]; then
echo "Never"
else
local last_used_epoch
last_used_epoch=$(date -j -f "%Y-%m-%d %H:%M:%S %z" "$last_used" "+%s" 2>/dev/null)
last_used_epoch=$(date -j -f "%Y-%m-%d %H:%M:%S %z" "$last_used" "+%s" 2> /dev/null)
local current_epoch
current_epoch=$(date "+%s")
local days_ago=$(( (current_epoch - last_used_epoch) / 86400 ))
local days_ago=$(((current_epoch - last_used_epoch) / 86400))
if [[ $days_ago -eq 0 ]]; then
echo "Today"
@@ -86,10 +85,10 @@ get_app_last_used() {
elif [[ $days_ago -lt 30 ]]; then
echo "${days_ago} days ago"
elif [[ $days_ago -lt 365 ]]; then
local months_ago=$(( days_ago / 30 ))
local months_ago=$((days_ago / 30))
echo "${months_ago} month(s) ago"
else
local years_ago=$(( days_ago / 365 ))
local years_ago=$((days_ago / 365))
echo "${years_ago} year(s) ago"
fi
fi
@@ -101,22 +100,24 @@ scan_applications() {
local cache_dir="$HOME/.cache/mole"
local cache_file="$cache_dir/app_scan_cache"
local cache_meta="$cache_dir/app_scan_meta"
local cache_ttl=3600 # 1 hour cache validity
local cache_ttl=3600 # 1 hour cache validity
mkdir -p "$cache_dir" 2>/dev/null
mkdir -p "$cache_dir" 2> /dev/null
# Quick count of current apps (system + user directories)
local current_app_count
current_app_count=$(
(find /Applications -name "*.app" -maxdepth 1 2>/dev/null;
find ~/Applications -name "*.app" -maxdepth 1 2>/dev/null) | wc -l | tr -d ' '
(
find /Applications -name "*.app" -maxdepth 1 2> /dev/null
find ~/Applications -name "*.app" -maxdepth 1 2> /dev/null
) | wc -l | tr -d ' '
)
# Check if cache is valid unless explicitly disabled
if [[ -f "$cache_file" && -f "$cache_meta" ]]; then
local cache_age=$(($(date +%s) - $(stat -f%m "$cache_file" 2>/dev/null || echo 0)))
local cache_age=$(($(date +%s) - $(stat -f%m "$cache_file" 2> /dev/null || echo 0)))
local cached_app_count
cached_app_count=$(cat "$cache_meta" 2>/dev/null || echo "0")
cached_app_count=$(cat "$cache_meta" 2> /dev/null || echo "0")
# Cache is valid if: age < TTL AND app count matches
if [[ $cache_age -lt $cache_ttl && "$cached_app_count" == "$current_app_count" ]]; then
@@ -149,26 +150,26 @@ scan_applications() {
local bundle_id="unknown"
local display_name="$app_name"
if [[ -f "$app_path/Contents/Info.plist" ]]; then
bundle_id=$(defaults read "$app_path/Contents/Info.plist" CFBundleIdentifier 2>/dev/null || echo "unknown")
bundle_id=$(defaults read "$app_path/Contents/Info.plist" CFBundleIdentifier 2> /dev/null || echo "unknown")
# Try to get English name from bundle info
local bundle_executable
bundle_executable=$(defaults read "$app_path/Contents/Info.plist" CFBundleExecutable 2>/dev/null)
bundle_executable=$(defaults read "$app_path/Contents/Info.plist" CFBundleExecutable 2> /dev/null)
# Smart display name selection - prefer descriptive names over generic ones
local candidates=()
# Get all potential names
local bundle_display_name
bundle_display_name=$(plutil -extract CFBundleDisplayName raw "$app_path/Contents/Info.plist" 2>/dev/null)
bundle_display_name=$(plutil -extract CFBundleDisplayName raw "$app_path/Contents/Info.plist" 2> /dev/null)
local bundle_name
bundle_name=$(plutil -extract CFBundleName raw "$app_path/Contents/Info.plist" 2>/dev/null)
bundle_name=$(plutil -extract CFBundleName raw "$app_path/Contents/Info.plist" 2> /dev/null)
# Check if executable name is generic/technical (should be avoided)
local is_generic_executable=false
if [[ -n "$bundle_executable" ]]; then
case "$bundle_executable" in
"pake"|"Electron"|"electron"|"nwjs"|"node"|"helper"|"main"|"app"|"binary")
"pake" | "Electron" | "electron" | "nwjs" | "node" | "helper" | "main" | "app" | "binary")
is_generic_executable=true
;;
esac
@@ -219,19 +220,19 @@ scan_applications() {
app_data_tuples+=("${app_path}|${app_name}|${bundle_id}|${display_name}")
done < <(
# Scan both system and user application directories
find /Applications -name "*.app" -maxdepth 1 -print0 2>/dev/null
find ~/Applications -name "*.app" -maxdepth 1 -print0 2>/dev/null
find /Applications -name "*.app" -maxdepth 1 -print0 2> /dev/null
find ~/Applications -name "*.app" -maxdepth 1 -print0 2> /dev/null
)
# Second pass: process each app with parallel size calculation
local app_count=0
local total_apps=${#app_data_tuples[@]}
local max_parallel=10 # Process 10 apps in parallel
local max_parallel=10 # Process 10 apps in parallel
local pids=()
local inline_loading=false
if [[ "${MOLE_INLINE_LOADING:-}" == "1" || "${MOLE_INLINE_LOADING:-}" == "true" ]]; then
inline_loading=true
printf "\033[H" >&2 # Position cursor at top of screen
printf "\033[H" >&2 # Position cursor at top of screen
fi
# Process app metadata extraction function
@@ -245,7 +246,7 @@ scan_applications() {
# Parallel size calculation
local app_size="N/A"
if [[ -d "$app_path" ]]; then
app_size=$(du -sh "$app_path" 2>/dev/null | cut -f1 || echo "N/A")
app_size=$(du -sh "$app_path" 2> /dev/null | cut -f1 || echo "N/A")
fi
# Get real last used date from macOS metadata
@@ -254,13 +255,13 @@ scan_applications() {
if [[ -d "$app_path" ]]; then
local metadata_date
metadata_date=$(mdls -name kMDItemLastUsedDate -raw "$app_path" 2>/dev/null)
metadata_date=$(mdls -name kMDItemLastUsedDate -raw "$app_path" 2> /dev/null)
if [[ "$metadata_date" != "(null)" && -n "$metadata_date" ]]; then
last_used_epoch=$(date -j -f "%Y-%m-%d %H:%M:%S %z" "$metadata_date" "+%s" 2>/dev/null || echo "0")
last_used_epoch=$(date -j -f "%Y-%m-%d %H:%M:%S %z" "$metadata_date" "+%s" 2> /dev/null || echo "0")
if [[ $last_used_epoch -gt 0 ]]; then
local days_ago=$(( (current_epoch - last_used_epoch) / 86400 ))
local days_ago=$(((current_epoch - last_used_epoch) / 86400))
if [[ $days_ago -eq 0 ]]; then
last_used="Today"
@@ -269,21 +270,21 @@ scan_applications() {
elif [[ $days_ago -lt 7 ]]; then
last_used="${days_ago} days ago"
elif [[ $days_ago -lt 30 ]]; then
local weeks_ago=$(( days_ago / 7 ))
local weeks_ago=$((days_ago / 7))
[[ $weeks_ago -eq 1 ]] && last_used="1 week ago" || last_used="${weeks_ago} weeks ago"
elif [[ $days_ago -lt 365 ]]; then
local months_ago=$(( days_ago / 30 ))
local months_ago=$((days_ago / 30))
[[ $months_ago -eq 1 ]] && last_used="1 month ago" || last_used="${months_ago} months ago"
else
local years_ago=$(( days_ago / 365 ))
local years_ago=$((days_ago / 365))
[[ $years_ago -eq 1 ]] && last_used="1 year ago" || last_used="${years_ago} years ago"
fi
fi
else
# Fallback to file modification time
last_used_epoch=$(stat -f%m "$app_path" 2>/dev/null || echo "0")
last_used_epoch=$(stat -f%m "$app_path" 2> /dev/null || echo "0")
if [[ $last_used_epoch -gt 0 ]]; then
local days_ago=$(( (current_epoch - last_used_epoch) / 86400 ))
local days_ago=$(((current_epoch - last_used_epoch) / 86400))
if [[ $days_ago -lt 30 ]]; then
last_used="Recent"
elif [[ $days_ago -lt 365 ]]; then
@@ -319,15 +320,15 @@ scan_applications() {
((spinner_idx++))
# Wait if we've hit max parallel limit
if (( ${#pids[@]} >= max_parallel )); then
wait "${pids[0]}" 2>/dev/null
pids=("${pids[@]:1}") # Remove first pid
if ((${#pids[@]} >= max_parallel)); then
wait "${pids[0]}" 2> /dev/null
pids=("${pids[@]:1}") # Remove first pid
fi
done
# Wait for remaining background processes
for pid in "${pids[@]}"; do
wait "$pid" 2>/dev/null
wait "$pid" 2> /dev/null
done
# Check if we found any applications
@@ -347,12 +348,15 @@ scan_applications() {
fi
# Sort by last used (oldest first) and cache the result
sort -t'|' -k1,1n "$temp_file" > "${temp_file}.sorted" || { rm -f "$temp_file"; return 1; }
sort -t'|' -k1,1n "$temp_file" > "${temp_file}.sorted" || {
rm -f "$temp_file"
return 1
}
rm -f "$temp_file"
# Update cache with app count metadata
cp "${temp_file}.sorted" "$cache_file" 2>/dev/null || true
echo "$current_app_count" > "$cache_meta" 2>/dev/null || true
cp "${temp_file}.sorted" "$cache_file" 2> /dev/null || true
echo "$current_app_count" > "$cache_meta" 2> /dev/null || true
# Verify sorted file exists before returning
if [[ -f "${temp_file}.sorted" ]]; then
@@ -415,12 +419,12 @@ uninstall_applications() {
echo ""
# Check if app is running (use app path for precise matching)
if pgrep -f "$app_path" >/dev/null 2>&1; then
if pgrep -f "$app_path" > /dev/null 2>&1; then
echo -e "${YELLOW}${ICON_ERROR} $app_name is currently running${NC}"
read -p " Force quit $app_name? (y/N): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
pkill -f "$app_path" 2>/dev/null || true
pkill -f "$app_path" 2> /dev/null || true
sleep 2
else
echo -e " ${BLUE}${ICON_EMPTY}${NC} Skipped $app_name"
@@ -438,7 +442,7 @@ uninstall_applications() {
# Calculate total size
local app_size_kb
app_size_kb=$(du -sk "$app_path" 2>/dev/null | awk '{print $1}' || echo "0")
app_size_kb=$(du -sk "$app_path" 2> /dev/null | awk '{print $1}' || echo "0")
local related_size_kb
related_size_kb=$(calculate_total_size "$related_files")
local system_size_kb
@@ -461,12 +465,13 @@ uninstall_applications() {
done <<< "$system_files"
fi
if [[ $total_kb -gt 1048576 ]]; then # > 1GB
local size_display=$(echo "$total_kb" | awk '{printf "%.2fGB", $1/1024/1024}')
elif [[ $total_kb -gt 1024 ]]; then # > 1MB
local size_display=$(echo "$total_kb" | awk '{printf "%.1fMB", $1/1024}')
local size_display
if [[ $total_kb -gt 1048576 ]]; then # > 1GB
size_display=$(echo "$total_kb" | awk '{printf "%.2fGB", $1/1024/1024}')
elif [[ $total_kb -gt 1024 ]]; then # > 1MB
size_display=$(echo "$total_kb" | awk '{printf "%.1fMB", $1/1024}')
else
local size_display="${total_kb}KB"
size_display="${total_kb}KB"
fi
echo -e " ${BLUE}Total size: $size_display${NC}"
@@ -477,7 +482,7 @@ uninstall_applications() {
if [[ $REPLY =~ ^[Yy]$ ]]; then
# Remove the application
if rm -rf "$app_path" 2>/dev/null; then
if rm -rf "$app_path" 2> /dev/null; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Removed application"
else
echo -e " ${RED}${ICON_ERROR}${NC} Failed to remove $app_path"
@@ -487,7 +492,7 @@ uninstall_applications() {
# Remove user-level related files
while IFS= read -r file; do
if [[ -n "$file" && -e "$file" ]]; then
if rm -rf "$file" 2>/dev/null; then
if rm -rf "$file" 2> /dev/null; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Removed $(echo "$file" | sed "s|$HOME|~|" | xargs basename)"
fi
fi
@@ -498,7 +503,7 @@ uninstall_applications() {
echo -e " ${BLUE}${ICON_SOLID}${NC} Admin access required for system files"
while IFS= read -r file; do
if [[ -n "$file" && -e "$file" ]]; then
if sudo rm -rf "$file" 2>/dev/null; then
if sudo rm -rf "$file" 2> /dev/null; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Removed $(basename "$file")"
else
echo -e " ${YELLOW}${ICON_ERROR}${NC} Failed to remove: $file"
@@ -521,12 +526,13 @@ uninstall_applications() {
echo -e "${PURPLE}${ICON_ARROW} Uninstallation Summary${NC}"
if [[ $total_size_freed -gt 0 ]]; then
if [[ $total_size_freed -gt 1048576 ]]; then # > 1GB
local freed_display=$(echo "$total_size_freed" | awk '{printf "%.2fGB", $1/1024/1024}')
elif [[ $total_size_freed -gt 1024 ]]; then # > 1MB
local freed_display=$(echo "$total_size_freed" | awk '{printf "%.1fMB", $1/1024}')
local freed_display
if [[ $total_size_freed -gt 1048576 ]]; then # > 1GB
freed_display=$(echo "$total_size_freed" | awk '{printf "%.2fGB", $1/1024/1024}')
elif [[ $total_size_freed -gt 1024 ]]; then # > 1MB
freed_display=$(echo "$total_size_freed" | awk '{printf "%.1fMB", $1/1024}')
else
local freed_display="${total_size_freed}KB"
freed_display="${total_size_freed}KB"
fi
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Freed $freed_display of disk space"
@@ -544,8 +550,8 @@ cleanup() {
unset MOLE_ALT_SCREEN_ACTIVE
fi
if [[ -n "${sudo_keepalive_pid:-}" ]]; then
kill "$sudo_keepalive_pid" 2>/dev/null || true
wait "$sudo_keepalive_pid" 2>/dev/null || true
kill "$sudo_keepalive_pid" 2> /dev/null || true
wait "$sudo_keepalive_pid" 2> /dev/null || true
sudo_keepalive_pid=""
fi
show_cursor
@@ -634,7 +640,9 @@ main() {
clear
local selection_count=${#selected_apps[@]}
if [[ $selection_count -eq 0 ]]; then
echo "No apps selected"; rm -f "$apps_file"; return 0
echo "No apps selected"
rm -f "$apps_file"
return 0
fi
# Show selected apps, max 3 per line
echo -e "${BLUE}${ICON_CONFIRM}${NC} Selected ${selection_count} app(s):"
@@ -644,7 +652,7 @@ main() {
IFS='|' read -r epoch app_path app_name bundle_id size last_used <<< "$selected_app"
local display_item="${app_name}(${size})"
if (( idx % 3 == 0 )); then
if ((idx % 3 == 0)); then
# Start new line
[[ -n "$line" ]] && echo " $line"
line="$display_item"

View File

@@ -13,14 +13,28 @@ NC='\033[0m'
# Simple spinner
_SPINNER_PID=""
start_line_spinner() {
local msg="$1"; [[ ! -t 1 ]] && { echo -e "${BLUE}|${NC} $msg"; return; }
local chars="${MO_SPINNER_CHARS:-|/-\\}"; [[ -z "$chars" ]] && chars='|/-\\'
local msg="$1"
[[ ! -t 1 ]] && {
echo -e "${BLUE}|${NC} $msg"
return
}
local chars="${MO_SPINNER_CHARS:-|/-\\}"
[[ -z "$chars" ]] && chars='|/-\\'
local i=0
( while true; do c="${chars:$((i % ${#chars})):1}"; printf "\r${BLUE}%s${NC} %s" "$c" "$msg"; ((i++)); sleep 0.12; done ) &
(while true; do
c="${chars:$((i % ${#chars})):1}"
printf "\r${BLUE}%s${NC} %s" "$c" "$msg"
((i++))
sleep 0.12
done) &
_SPINNER_PID=$!
}
stop_line_spinner() { if [[ -n "$_SPINNER_PID" ]]; then kill "$_SPINNER_PID" 2>/dev/null || true; wait "$_SPINNER_PID" 2>/dev/null || true; _SPINNER_PID=""; printf "\r\033[K"; fi; }
stop_line_spinner() { if [[ -n "$_SPINNER_PID" ]]; then
kill "$_SPINNER_PID" 2> /dev/null || true
wait "$_SPINNER_PID" 2> /dev/null || true
_SPINNER_PID=""
printf "\r\033[K"
fi; }
# Verbosity (0 = quiet, 1 = verbose)
VERBOSE=1
@@ -105,7 +119,7 @@ resolve_source_dir() {
trap "rm -rf '$tmp'" EXIT
start_line_spinner "Fetching Mole source..."
if command -v curl >/dev/null 2>&1; then
if command -v curl > /dev/null 2>&1; then
if curl -fsSL -o "$tmp/mole.tar.gz" "https://github.com/tw93/mole/archive/refs/heads/main.tar.gz"; then
stop_line_spinner
tar -xzf "$tmp/mole.tar.gz" -C "$tmp"
@@ -119,8 +133,8 @@ resolve_source_dir() {
stop_line_spinner
start_line_spinner "Cloning Mole source..."
if command -v git >/dev/null 2>&1; then
if git clone --depth=1 https://github.com/tw93/mole.git "$tmp/mole" >/dev/null 2>&1; then
if command -v git > /dev/null 2>&1; then
if git clone --depth=1 https://github.com/tw93/mole.git "$tmp/mole" > /dev/null 2>&1; then
stop_line_spinner
SOURCE_DIR="$tmp/mole"
return 0
@@ -142,7 +156,7 @@ get_source_version() {
get_installed_version() {
local binary="$INSTALL_DIR/mole"
if [[ -x "$binary" ]]; then
"$binary" --version 2>/dev/null | awk 'NF {print $NF; exit}'
"$binary" --version 2> /dev/null | awk 'NF {print $NF; exit}'
fi
}
@@ -166,11 +180,11 @@ parse_args() {
uninstall_mole
exit 0
;;
--verbose|-v)
--verbose | -v)
VERBOSE=1
shift 1
;;
--help|-h)
--help | -h)
show_help
exit 0
;;
@@ -192,7 +206,7 @@ check_requirements() {
fi
# Check if already installed via Homebrew
if command -v brew >/dev/null 2>&1 && brew list mole >/dev/null 2>&1; then
if command -v brew > /dev/null 2>&1 && brew list mole > /dev/null 2>&1; then
if [[ "$ACTION" == "update" ]]; then
return 0
fi
@@ -330,7 +344,7 @@ verify_installation() {
if [[ -x "$INSTALL_DIR/mole" ]] && [[ -f "$CONFIG_DIR/lib/common.sh" ]]; then
# Test if mole command works
if "$INSTALL_DIR/mole" --help >/dev/null 2>&1; then
if "$INSTALL_DIR/mole" --help > /dev/null 2>&1; then
return 0
else
log_warning "Mole command installed but may not be working properly"
@@ -369,7 +383,7 @@ print_usage_summary() {
fi
echo ""
local message="Mole ${action} successfully"
if [[ "$action" == "updated" && -n "$previous_version" && -n "$new_version" && "$previous_version" != "$new_version" ]]; then
@@ -433,15 +447,15 @@ uninstall_mole() {
# Additional safety: never delete system critical paths (check first)
case "$CONFIG_DIR" in
/|/usr|/usr/local|/usr/local/bin|/usr/local/lib|/usr/local/share|\
/Library|/System|/bin|/sbin|/etc|/var|/opt|"$HOME"|"$HOME/Library"|\
/usr/local/lib/*|/usr/local/share/*|/Library/*|/System/*)
/ | /usr | /usr/local | /usr/local/bin | /usr/local/lib | /usr/local/share | \
/Library | /System | /bin | /sbin | /etc | /var | /opt | "$HOME" | "$HOME/Library" | \
/usr/local/lib/* | /usr/local/share/* | /Library/* | /System/*)
is_safe=0
;;
*)
# Safe patterns: must be in user's home and end with 'mole'
if [[ "$CONFIG_DIR" == "$HOME/.config/mole" ]] ||
[[ "$CONFIG_DIR" == "$HOME"/.*/mole ]]; then
[[ "$CONFIG_DIR" == "$HOME"/.*/mole ]]; then
is_safe=1
fi
;;
@@ -457,7 +471,9 @@ uninstall_mole() {
echo " $CONFIG_DIR"
else
echo ""
read -p "Remove configuration directory $CONFIG_DIR? (y/N): " -n 1 -r; echo ""; if [[ $REPLY =~ ^[Yy]$ ]]; then
read -p "Remove configuration directory $CONFIG_DIR? (y/N): " -n 1 -r
echo ""
if [[ $REPLY =~ ^[Yy]$ ]]; then
rm -rf "$CONFIG_DIR"
log_success "Removed configuration"
else
@@ -495,9 +511,9 @@ perform_install() {
perform_update() {
check_requirements
if command -v brew >/dev/null 2>&1 && brew list mole >/dev/null 2>&1; then
if command -v brew > /dev/null 2>&1 && brew list mole > /dev/null 2>&1; then
# Try to use shared function if available (when running from installed Mole)
resolve_source_dir 2>/dev/null || true
resolve_source_dir 2> /dev/null || true
if [[ -f "$SOURCE_DIR/lib/common.sh" ]]; then
# shellcheck disable=SC1090,SC1091
source "$SOURCE_DIR/lib/common.sh"
@@ -527,7 +543,7 @@ perform_update() {
if echo "$upgrade_output" | grep -q "already installed"; then
local current_version
current_version=$(brew list --versions mole 2>/dev/null | awk '{print $2}')
current_version=$(brew list --versions mole 2> /dev/null | awk '{print $2}')
echo -e "${GREEN}${NC} Already on latest version (${current_version:-$VERSION})"
elif echo "$upgrade_output" | grep -q "Error:"; then
log_error "Homebrew upgrade failed"
@@ -536,7 +552,7 @@ perform_update() {
else
echo "$upgrade_output" | grep -Ev "^(==>|Updating Homebrew|Warning:)" || true
local new_version
new_version=$(brew list --versions mole 2>/dev/null | awk '{print $2}')
new_version=$(brew list --versions mole 2> /dev/null | awk '{print $2}')
echo -e "${GREEN}${NC} Updated to latest version (${new_version:-$VERSION})"
fi
@@ -571,9 +587,21 @@ perform_update() {
# Update with minimal output (suppress info/success, show errors only)
local old_verbose=$VERBOSE
VERBOSE=0
create_directories || { VERBOSE=$old_verbose; log_error "Failed to create directories"; exit 1; }
install_files || { VERBOSE=$old_verbose; log_error "Failed to install files"; exit 1; }
verify_installation || { VERBOSE=$old_verbose; log_error "Failed to verify installation"; exit 1; }
create_directories || {
VERBOSE=$old_verbose
log_error "Failed to create directories"
exit 1
}
install_files || {
VERBOSE=$old_verbose
log_error "Failed to install files"
exit 1
}
verify_installation || {
VERBOSE=$old_verbose
log_error "Failed to verify installation"
exit 1
}
setup_path
VERBOSE=$old_verbose

View File

@@ -33,17 +33,17 @@ batch_uninstall_applications() {
IFS='|' read -r epoch app_path app_name bundle_id size last_used <<< "$selected_app"
# Check if app is running (use app path for precise matching)
if pgrep -f "$app_path" >/dev/null 2>&1; then
if pgrep -f "$app_path" > /dev/null 2>&1; then
running_apps+=("$app_name")
fi
# Check if app requires sudo to delete
if [[ ! -w "$(dirname "$app_path")" ]] || [[ "$(stat -f%Su "$app_path" 2>/dev/null)" == "root" ]]; then
if [[ ! -w "$(dirname "$app_path")" ]] || [[ "$(stat -f%Su "$app_path" 2> /dev/null)" == "root" ]]; then
sudo_apps+=("$app_name")
fi
# Calculate size for summary
local app_size_kb=$(du -sk "$app_path" 2>/dev/null | awk '{print $1}' || echo "0")
local app_size_kb=$(du -sk "$app_path" 2> /dev/null | awk '{print $1}' || echo "0")
local related_files=$(find_app_files "$bundle_id" "$app_name")
local related_size_kb=$(calculate_total_size "$related_files")
local total_kb=$((app_size_kb + related_size_kb))
@@ -104,13 +104,13 @@ batch_uninstall_applications() {
IFS= read -r -s -n1 key || key=""
case "$key" in
$'\e'|q|Q)
$'\e' | q | Q)
echo ""
echo ""
return 0
;;
""|$'\n'|$'\r'|y|Y)
printf "\r\033[K" # Clear the prompt line
"" | $'\n' | $'\r' | y | Y)
printf "\r\033[K" # Clear the prompt line
;;
*)
echo ""
@@ -122,14 +122,18 @@ batch_uninstall_applications() {
# User confirmed, now request sudo access if needed
if [[ ${#sudo_apps[@]} -gt 0 ]]; then
# Check if sudo is already cached
if ! sudo -n true 2>/dev/null; then
if ! sudo -n true 2> /dev/null; then
if ! request_sudo_access "Admin required for system apps: ${sudo_apps[*]}"; then
echo ""
log_error "Admin access denied"
return 1
fi
fi
(while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null) &
(while true; do
sudo -n true
sleep 60
kill -0 "$$" || exit
done 2> /dev/null) &
sudo_keepalive_pid=$!
fi
@@ -148,22 +152,22 @@ batch_uninstall_applications() {
local related_files=$(printf '%s' "$encoded_files" | base64 -d)
local reason=""
local needs_sudo=false
[[ ! -w "$(dirname "$app_path")" || "$(stat -f%Su "$app_path" 2>/dev/null)" == "root" ]] && needs_sudo=true
[[ ! -w "$(dirname "$app_path")" || "$(stat -f%Su "$app_path" 2> /dev/null)" == "root" ]] && needs_sudo=true
if ! force_kill_app "$app_name" "$app_path"; then
reason="still running"
fi
if [[ -z "$reason" ]]; then
if [[ "$needs_sudo" == true ]]; then
sudo rm -rf "$app_path" 2>/dev/null || reason="remove failed"
sudo rm -rf "$app_path" 2> /dev/null || reason="remove failed"
else
rm -rf "$app_path" 2>/dev/null || reason="remove failed"
rm -rf "$app_path" 2> /dev/null || reason="remove failed"
fi
fi
if [[ -z "$reason" ]]; then
local files_removed=0
while IFS= read -r file; do
[[ -n "$file" && -e "$file" ]] || continue
rm -rf "$file" 2>/dev/null && ((files_removed++)) || true
rm -rf "$file" 2> /dev/null && ((files_removed++)) || true
done <<< "$related_files"
((total_size_freed += total_kb))
((success_count++))
@@ -202,7 +206,7 @@ batch_uninstall_applications() {
for app_name in "${success_items[@]}"; do
local display_item="${GREEN}${app_name}${NC}"
if (( idx % 3 == 0 )); then
if ((idx % 3 == 0)); then
# Start new line
if [[ -n "$current_line" ]]; then
summary_details+=("$current_line")
@@ -267,8 +271,8 @@ batch_uninstall_applications() {
# Clean up sudo keepalive if it was started
if [[ -n "${sudo_keepalive_pid:-}" ]]; then
kill "$sudo_keepalive_pid" 2>/dev/null || true
wait "$sudo_keepalive_pid" 2>/dev/null || true
kill "$sudo_keepalive_pid" 2> /dev/null || true
wait "$sudo_keepalive_pid" 2> /dev/null || true
sudo_keepalive_pid=""
fi

File diff suppressed because it is too large Load Diff

View File

@@ -4,8 +4,8 @@
set -euo pipefail
# Terminal control functions
enter_alt_screen() { tput smcup 2>/dev/null || true; }
leave_alt_screen() { tput rmcup 2>/dev/null || true; }
enter_alt_screen() { tput smcup 2> /dev/null || true; }
leave_alt_screen() { tput rmcup 2> /dev/null || true; }
# Main paginated multi-select menu function
paginated_multi_select() {
@@ -47,16 +47,16 @@ paginated_multi_select() {
# Preserve original TTY settings so we can restore them reliably
local original_stty=""
if [[ -t 0 ]] && command -v stty >/dev/null 2>&1; then
original_stty=$(stty -g 2>/dev/null || echo "")
if [[ -t 0 ]] && command -v stty > /dev/null 2>&1; then
original_stty=$(stty -g 2> /dev/null || echo "")
fi
restore_terminal() {
show_cursor
if [[ -n "${original_stty-}" ]]; then
stty "${original_stty}" 2>/dev/null || stty sane 2>/dev/null || stty echo icanon 2>/dev/null || true
stty "${original_stty}" 2> /dev/null || stty sane 2> /dev/null || stty echo icanon 2> /dev/null || true
else
stty sane 2>/dev/null || stty echo icanon 2>/dev/null || true
stty sane 2> /dev/null || stty echo icanon 2> /dev/null || true
fi
if [[ "${external_alt_screen:-false}" == false ]]; then
leave_alt_screen
@@ -72,14 +72,14 @@ paginated_multi_select() {
# Interrupt handler
handle_interrupt() {
cleanup
exit 130 # Standard exit code for Ctrl+C
exit 130 # Standard exit code for Ctrl+C
}
trap cleanup EXIT
trap handle_interrupt INT TERM
# Setup terminal - preserve interrupt character
stty -echo -icanon intr ^C 2>/dev/null || true
stty -echo -icanon intr ^C 2> /dev/null || true
if [[ $external_alt_screen == false ]]; then
enter_alt_screen
# Clear screen once on entry to alt screen
@@ -108,7 +108,7 @@ paginated_multi_select() {
draw_menu() {
# Move to home position without clearing (reduces flicker)
printf "\033[H" >&2
# Clear each line as we go instead of clearing entire screen
local clear_line="\r\033[2K"
@@ -169,7 +169,7 @@ paginated_multi_select() {
# Clear any remaining lines at bottom
printf "${clear_line}\n" >&2
printf "${clear_line}${GRAY}${ICON_NAV_UP}/${ICON_NAV_DOWN}${NC} Navigate ${GRAY}|${NC} ${GRAY}Space${NC} Select ${GRAY}|${NC} ${GRAY}Enter${NC} Confirm ${GRAY}|${NC} ${GRAY}Q/ESC${NC} Quit\n" >&2
# Clear one more line to ensure no artifacts
printf "${clear_line}" >&2
}
@@ -177,7 +177,7 @@ paginated_multi_select() {
# Show help screen
show_help() {
printf "\033[H\033[J" >&2
cat >&2 <<EOF
cat >&2 << EOF
Help - Navigation Controls
==========================
@@ -269,7 +269,7 @@ EOF
local IFS=','
final_result="${selected_indices[*]}"
fi
# Remove the trap to avoid cleanup on normal exit
trap - EXIT INT TERM

View File

@@ -114,7 +114,6 @@ patterns_equivalent() {
return 1
}
load_whitelist() {
local -a patterns=()
@@ -163,14 +162,13 @@ is_whitelisted() {
if [[ "$check_pattern" == "$existing_expanded" ]]; then
return 0
fi
if [[ "$check_pattern" == $existing_expanded ]]; then
if [[ "$check_pattern" == "$existing_expanded" ]]; then
return 0
fi
done
return 1
}
manage_whitelist() {
manage_whitelist_categories
}
@@ -286,7 +284,6 @@ manage_whitelist_categories() {
printf '\n'
}
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
manage_whitelist
fi

85
mole
View File

@@ -28,7 +28,7 @@ MOLE_TAGLINE="can dig deep to clean your Mac."
# Get latest version from remote repository
get_latest_version() {
curl -fsSL --connect-timeout 2 --max-time 3 -H "Cache-Control: no-cache" \
"https://raw.githubusercontent.com/tw93/mole/main/mole" 2>/dev/null | \
"https://raw.githubusercontent.com/tw93/mole/main/mole" 2> /dev/null |
grep '^VERSION=' | head -1 | sed 's/VERSION="\(.*\)"/\1/'
}
@@ -37,11 +37,11 @@ check_for_updates() {
local cache="$HOME/.cache/mole/version_check"
local msg_cache="$HOME/.cache/mole/update_message"
local ttl="${MO_UPDATE_CHECK_TTL:-3600}"
mkdir -p "$(dirname "$cache")" 2>/dev/null
mkdir -p "$(dirname "$cache")" 2> /dev/null
# Skip if checked recently
if [[ -f "$cache" ]]; then
local age=$(($(date +%s) - $(stat -f%m "$cache" 2>/dev/null || echo 0)))
local age=$(($(date +%s) - $(stat -f%m "$cache" 2> /dev/null || echo 0)))
[[ $age -lt $ttl ]] && return
fi
@@ -55,9 +55,9 @@ check_for_updates() {
else
echo -n > "$msg_cache"
fi
touch "$cache" 2>/dev/null
touch "$cache" 2> /dev/null
) &
disown 2>/dev/null || true
disown 2> /dev/null || true
}
# Show update notification if available
@@ -93,7 +93,7 @@ animate_mole_intro() {
local -a mole_lines=()
while IFS= read -r line; do
mole_lines+=("$line")
done <<'EOF'
done << 'EOF'
/\_/\
____/ o o \
/~____ =o= /
@@ -108,7 +108,7 @@ EOF
local body_color="${PURPLE}"
local ground_color="${GREEN}"
for idx in "${!mole_lines[@]}"; do
if (( idx < body_cutoff )); then
if ((idx < body_cutoff)); then
printf "%s\n" "${body_color}${mole_lines[$idx]}${NC}"
else
printf "%s\n" "${ground_color}${mole_lines[$idx]}${NC}"
@@ -150,7 +150,7 @@ show_help() {
# Simple update function
update_mole() {
# Check if installed via Homebrew
if command -v brew >/dev/null 2>&1 && brew list mole >/dev/null 2>&1; then
if command -v brew > /dev/null 2>&1 && brew list mole > /dev/null 2>&1; then
update_via_homebrew "$VERSION"
exit 0
fi
@@ -180,17 +180,20 @@ update_mole() {
local installer_url="https://raw.githubusercontent.com/tw93/mole/main/install.sh"
local tmp_installer
tmp_installer="$(mktemp_file)" || { log_error "Update failed"; exit 1; }
tmp_installer="$(mktemp_file)" || {
log_error "Update failed"
exit 1
}
# Download installer with progress
if command -v curl >/dev/null 2>&1; then
if command -v curl > /dev/null 2>&1; then
if ! curl -fsSL --connect-timeout 10 --max-time 60 "$installer_url" -o "$tmp_installer" 2>&1; then
if [[ -t 1 ]]; then stop_inline_spinner; fi
rm -f "$tmp_installer"
log_error "Update failed. Check network connection."
exit 1
fi
elif command -v wget >/dev/null 2>&1; then
elif command -v wget > /dev/null 2>&1; then
if ! wget --timeout=10 --tries=3 -qO "$tmp_installer" "$installer_url" 2>&1; then
if [[ -t 1 ]]; then stop_inline_spinner; fi
rm -f "$tmp_installer"
@@ -209,7 +212,7 @@ update_mole() {
# Determine install directory
local mole_path
mole_path="$(command -v mole 2>/dev/null || echo "$0")"
mole_path="$(command -v mole 2> /dev/null || echo "$0")"
local install_dir
install_dir="$(cd "$(dirname "$mole_path")" && pwd)"
@@ -231,7 +234,7 @@ update_mole() {
# Only show success message if installer didn't already do so
if ! printf '%s\n' "$install_output" | grep -Eq "Updated to latest version|Already on latest version"; then
local new_version
new_version=$("$mole_path" --version 2>/dev/null | awk 'NF {print $NF}' || echo "")
new_version=$("$mole_path" --version 2> /dev/null | awk 'NF {print $NF}' || echo "")
printf '\n%s\n\n' "${GREEN}${ICON_SUCCESS}${NC} Updated to latest version (${new_version:-unknown})"
else
printf '\n'
@@ -247,7 +250,7 @@ update_mole() {
fi
if ! printf '%s\n' "$install_output" | grep -Eq "Updated to latest version|Already on latest version"; then
local new_version
new_version=$("$mole_path" --version 2>/dev/null | awk 'NF {print $NF}' || echo "")
new_version=$("$mole_path" --version 2> /dev/null | awk 'NF {print $NF}' || echo "")
printf '\n%s\n\n' "${GREEN}${ICON_SUCCESS}${NC} Updated to latest version (${new_version:-unknown})"
else
printf '\n'
@@ -256,7 +259,7 @@ update_mole() {
if [[ -t 1 ]]; then stop_inline_spinner; fi
rm -f "$tmp_installer"
log_error "Update failed"
echo "$install_output" | tail -10 >&2 # Show last 10 lines of error
echo "$install_output" | tail -10 >&2 # Show last 10 lines of error
exit 1
fi
fi
@@ -279,7 +282,7 @@ remove_mole() {
local -a alias_installs=()
# Check Homebrew
if command -v brew >/dev/null 2>&1 && brew list mole >/dev/null 2>&1; then
if command -v brew > /dev/null 2>&1 && brew list mole > /dev/null 2>&1; then
is_homebrew=true
fi
@@ -318,7 +321,9 @@ remove_mole() {
printf '\n'
# Check if anything to remove
if [[ "$is_homebrew" == "false" && ${#manual_installs[@]:-0} -eq 0 && ${#alias_installs[@]:-0} -eq 0 ]]; then
local manual_count=${#manual_installs[@]}
local alias_count=${#alias_installs[@]}
if [[ "$is_homebrew" == "false" && ${manual_count:-0} -eq 0 && ${alias_count:-0} -eq 0 ]]; then
printf '%s\n\n' "${YELLOW}No Mole installation detected${NC}"
exit 0
fi
@@ -343,8 +348,8 @@ remove_mole() {
echo ""
exit 0
;;
""|$'\n'|$'\r')
printf "\r\033[K" # Clear the prompt line
"" | $'\n' | $'\r')
printf "\r\033[K" # Clear the prompt line
# Continue with removal
;;
*)
@@ -357,32 +362,32 @@ remove_mole() {
# Remove Homebrew installation (silent)
local has_error=false
if [[ "$is_homebrew" == "true" ]]; then
if ! brew uninstall mole >/dev/null 2>&1; then
if ! brew uninstall mole > /dev/null 2>&1; then
has_error=true
fi
fi
# Remove manual installations (silent)
if [[ ${#manual_installs[@]:-0} -gt 0 ]]; then
if [[ ${manual_count:-0} -gt 0 ]]; then
for install in "${manual_installs[@]}"; do
if [[ -f "$install" ]]; then
rm -f "$install" 2>/dev/null || has_error=true
rm -f "$install" 2> /dev/null || has_error=true
fi
done
fi
if [[ ${#alias_installs[@]} -gt 0 ]]; then
if [[ ${alias_count:-0} -gt 0 ]]; then
for alias in "${alias_installs[@]}"; do
if [[ -f "$alias" ]]; then
rm -f "$alias" 2>/dev/null || true
rm -f "$alias" 2> /dev/null || true
fi
done
fi
# Clean up cache first (silent)
if [[ -d "$HOME/.cache/mole" ]]; then
rm -rf "$HOME/.cache/mole" 2>/dev/null || true
rm -rf "$HOME/.cache/mole" 2> /dev/null || true
fi
# Clean up configuration last (silent)
if [[ -d "$HOME/.config/mole" ]]; then
rm -rf "$HOME/.config/mole" 2>/dev/null || true
rm -rf "$HOME/.config/mole" 2> /dev/null || true
fi
# Show final result
@@ -400,7 +405,7 @@ remove_mole() {
# Display main menu options with minimal refresh to avoid flicker
show_main_menu() {
local selected="${1:-1}"
local _full_draw="${2:-true}" # Kept for compatibility (unused)
local _full_draw="${2:-true}" # Kept for compatibility (unused)
local banner="${MAIN_MENU_BANNER:-}"
local update_message="${MAIN_MENU_UPDATE_MESSAGE:-}"
@@ -410,7 +415,7 @@ show_main_menu() {
MAIN_MENU_BANNER="$banner"
fi
printf '\033[H' # Move cursor to home
printf '\033[H' # Move cursor to home
local line=""
# Leading spacer
@@ -452,13 +457,13 @@ interactive_main_menu() {
# Show intro animation only once per terminal tab
if [[ -t 1 ]]; then
local tty_name
tty_name=$(tty 2>/dev/null || echo "")
tty_name=$(tty 2> /dev/null || echo "")
if [[ -n "$tty_name" ]]; then
local flag_file
flag_file="/tmp/mole_intro_$(echo "$tty_name" | tr -c '[:alnum:]_' '_')"
if [[ ! -f "$flag_file" ]]; then
animate_mole_intro
touch "$flag_file" 2>/dev/null || true
touch "$flag_file" 2> /dev/null || true
fi
fi
fi
@@ -472,7 +477,7 @@ interactive_main_menu() {
MAIN_MENU_BANNER="$brand_banner"
if [[ -f "$msg_cache" && -s "$msg_cache" ]]; then
update_message="$(cat "$msg_cache" 2>/dev/null || echo "")"
update_message="$(cat "$msg_cache" 2> /dev/null || echo "")"
fi
MAIN_MENU_UPDATE_MESSAGE="$update_message"
@@ -501,7 +506,7 @@ interactive_main_menu() {
case "$key" in
"UP") ((current_option > 1)) && ((current_option--)) ;;
"DOWN") ((current_option < 5)) && ((current_option++)) ;;
"ENTER"|"$current_option")
"ENTER" | "$current_option")
show_cursor
case $current_option in
1)
@@ -509,7 +514,11 @@ interactive_main_menu() {
;;
2) exec "$SCRIPT_DIR/bin/uninstall.sh" ;;
3) exec "$SCRIPT_DIR/bin/analyze.sh" ;;
4) clear; show_help; exit 0 ;;
4)
clear
show_help
exit 0
;;
5) cleanup_and_exit ;;
esac
;;
@@ -522,7 +531,11 @@ interactive_main_menu() {
;;
2) exec "$SCRIPT_DIR/bin/uninstall.sh" ;;
3) exec "$SCRIPT_DIR/bin/analyze.sh" ;;
4) clear; show_help; exit 0 ;;
4)
clear
show_help
exit 0
;;
5) cleanup_and_exit ;;
esac
;;
@@ -552,11 +565,11 @@ main() {
remove_mole
exit 0
;;
"help"|"--help"|"-h")
"help" | "--help" | "-h")
show_help
exit 0
;;
"version"|"--version"|"-V")
"version" | "--version" | "-V")
show_version
exit 0
;;

60
scripts/format.sh Executable file
View File

@@ -0,0 +1,60 @@
#!/bin/bash
# Format all shell scripts in the Mole project
#
# Usage:
# ./scripts/format.sh # Format all scripts
# ./scripts/format.sh --check # Check only, don't modify
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
CHECK_ONLY=false
# Parse arguments
if [[ "${1:-}" == "--check" ]]; then
CHECK_ONLY=true
elif [[ "${1:-}" == "--help" || "${1:-}" == "-h" ]]; then
cat << 'EOF'
Usage: ./scripts/format.sh [--check]
Format shell scripts using shfmt.
Options:
--check Check formatting without modifying files
--help Show this help
Install: brew install shfmt
EOF
exit 0
fi
# Check if shfmt is installed
if ! command -v shfmt > /dev/null 2>&1; then
echo "Error: shfmt not installed"
echo "Install: brew install shfmt"
exit 1
fi
# Find all shell scripts
cd "$PROJECT_ROOT"
# shfmt options: -i 4 (4 spaces), -ci (indent switch cases), -sr (space after redirect)
if [[ "$CHECK_ONLY" == "true" ]]; then
echo "Checking formatting..."
if shfmt -i 4 -ci -sr -d . > /dev/null 2>&1; then
echo "✓ All scripts properly formatted"
exit 0
else
echo "✗ Some scripts need formatting:"
shfmt -i 4 -ci -sr -d .
echo ""
echo "Run './scripts/format.sh' to fix"
exit 1
fi
else
echo "Formatting scripts..."
shfmt -i 4 -ci -sr -w .
echo "✓ Done"
fi

44
scripts/install-hooks.sh Executable file
View File

@@ -0,0 +1,44 @@
#!/bin/bash
# Install git hooks for Mole project
#
# Usage:
# ./scripts/install-hooks.sh
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
GREEN='\033[0;32m'
BLUE='\033[0;34m'
NC='\033[0m'
cd "$PROJECT_ROOT"
# Check if this is a git repository
if [ ! -d ".git" ]; then
echo "Error: Not a git repository"
exit 1
fi
echo -e "${BLUE}Installing git hooks...${NC}"
# Install pre-commit hook
if [ -f ".git/hooks/pre-commit" ]; then
echo "Pre-commit hook already exists, creating backup..."
mv .git/hooks/pre-commit .git/hooks/pre-commit.backup
fi
ln -s ../../scripts/pre-commit.sh .git/hooks/pre-commit
chmod +x .git/hooks/pre-commit
echo -e "${GREEN}✓ Pre-commit hook installed${NC}"
echo ""
echo "The hook will:"
echo " • Auto-format shell scripts before commit"
echo " • Run shellcheck on changed files"
echo " • Show warnings but won't block commits"
echo ""
echo "To uninstall:"
echo " rm .git/hooks/pre-commit"
echo ""

67
scripts/pre-commit.sh Executable file
View File

@@ -0,0 +1,67 @@
#!/bin/bash
# Git pre-commit hook for Mole
# Automatically formats shell scripts before commit
#
# Installation:
# ln -s ../../scripts/pre-commit.sh .git/hooks/pre-commit
# chmod +x .git/hooks/pre-commit
#
# Or use the install script:
# ./scripts/install-hooks.sh
set -e
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'
# Only check shell files that are staged
STAGED_SH_FILES=$(git diff --cached --name-only --diff-filter=ACMR | grep -E '\.sh$|^mole$' || true)
if [ -z "$STAGED_SH_FILES" ]; then
exit 0
fi
echo -e "${YELLOW}Running pre-commit checks on shell files...${NC}"
# Check if shfmt is installed
if ! command -v shfmt &> /dev/null; then
echo -e "${RED}shfmt is not installed. Install with: brew install shfmt${NC}"
exit 1
fi
# Check if shellcheck is installed
if ! command -v shellcheck &> /dev/null; then
echo -e "${RED}shellcheck is not installed. Install with: brew install shellcheck${NC}"
exit 1
fi
NEEDS_FORMAT=0
# Check formatting
for file in $STAGED_SH_FILES; do
if ! shfmt -i 4 -ci -sr -d "$file" > /dev/null 2>&1; then
echo -e "${YELLOW}Formatting $file...${NC}"
shfmt -i 4 -ci -sr -w "$file"
git add "$file"
NEEDS_FORMAT=1
fi
done
# Run shellcheck
for file in $STAGED_SH_FILES; do
if ! shellcheck -S warning "$file" > /dev/null 2>&1; then
echo -e "${YELLOW}ShellCheck warnings in $file:${NC}"
shellcheck -S warning "$file"
echo -e "${YELLOW}Continuing with commit (warnings are non-critical)...${NC}"
fi
done
if [ $NEEDS_FORMAT -eq 1 ]; then
echo -e "${GREEN}✓ Files formatted and re-staged${NC}"
fi
echo -e "${GREEN}✓ Pre-commit checks passed${NC}"
exit 0

View File

@@ -1,31 +1,31 @@
#!/usr/bin/env bats
setup_file() {
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT
ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME
ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-analyze-home.XXXXXX")"
export HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-analyze-home.XXXXXX")"
export HOME
}
teardown_file() {
rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME"
fi
rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME"
fi
}
setup() {
export TERM="dumb"
rm -rf "${HOME:?}"/*
mkdir -p "$HOME"
export TERM="dumb"
rm -rf "${HOME:?}"/*
mkdir -p "$HOME"
}
@test "scan_directories lists largest folders first" {
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
set -euo pipefail
source "$PROJECT_ROOT/bin/analyze.sh"
@@ -40,12 +40,12 @@ scan_directories "$root" "$output_file" 1
head -n1 "$output_file"
EOF
[ "$status" -eq 0 ]
[[ "$output" == *"Large"* ]]
[ "$status" -eq 0 ]
[[ "$output" == *"Large"* ]]
}
@test "aggregate_by_directory sums child sizes per parent" {
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
set -euo pipefail
source "$PROJECT_ROOT/bin/analyze.sh"
@@ -65,7 +65,7 @@ aggregate_by_directory "$input_file" "$output_file"
cat "$output_file"
EOF
[ "$status" -eq 0 ]
[[ "$output" == *"3072|$HOME/group/a/"* ]]
[[ "$output" == *"512|$HOME/group/b/"* ]]
[ "$status" -eq 0 ]
[[ "$output" == *"3072|$HOME/group/a/"* ]]
[[ "$output" == *"512|$HOME/group/b/"* ]]
}

View File

@@ -1,60 +1,60 @@
#!/usr/bin/env bats
setup_file() {
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT
ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME
ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-clean-home.XXXXXX")"
export HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-clean-home.XXXXXX")"
export HOME
mkdir -p "$HOME"
mkdir -p "$HOME"
}
teardown_file() {
rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME"
fi
rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME"
fi
}
setup() {
export TERM="xterm-256color"
rm -rf "${HOME:?}"/*
rm -rf "$HOME/Library" "$HOME/.config"
mkdir -p "$HOME/Library/Caches" "$HOME/.config/mole"
export TERM="xterm-256color"
rm -rf "${HOME:?}"/*
rm -rf "$HOME/Library" "$HOME/.config"
mkdir -p "$HOME/Library/Caches" "$HOME/.config/mole"
}
@test "mo clean --dry-run skips system cleanup in non-interactive mode" {
run env HOME="$HOME" "$PROJECT_ROOT/mole" clean --dry-run
[ "$status" -eq 0 ]
[[ "$output" == *"Dry Run Mode"* ]]
[[ "$output" != *"Deep system-level cleanup"* ]]
run env HOME="$HOME" "$PROJECT_ROOT/mole" clean --dry-run
[ "$status" -eq 0 ]
[[ "$output" == *"Dry Run Mode"* ]]
[[ "$output" != *"Deep system-level cleanup"* ]]
}
@test "mo clean --dry-run reports user cache without deleting it" {
mkdir -p "$HOME/Library/Caches/TestApp"
echo "cache data" > "$HOME/Library/Caches/TestApp/cache.tmp"
mkdir -p "$HOME/Library/Caches/TestApp"
echo "cache data" > "$HOME/Library/Caches/TestApp/cache.tmp"
run env HOME="$HOME" "$PROJECT_ROOT/mole" clean --dry-run
[ "$status" -eq 0 ]
[[ "$output" == *"User app cache"* ]]
[[ "$output" == *"Potential space"* ]]
[ -f "$HOME/Library/Caches/TestApp/cache.tmp" ]
run env HOME="$HOME" "$PROJECT_ROOT/mole" clean --dry-run
[ "$status" -eq 0 ]
[[ "$output" == *"User app cache"* ]]
[[ "$output" == *"Potential space"* ]]
[ -f "$HOME/Library/Caches/TestApp/cache.tmp" ]
}
@test "mo clean honors whitelist entries" {
mkdir -p "$HOME/Library/Caches/WhitelistedApp"
echo "keep me" > "$HOME/Library/Caches/WhitelistedApp/data.tmp"
mkdir -p "$HOME/Library/Caches/WhitelistedApp"
echo "keep me" > "$HOME/Library/Caches/WhitelistedApp/data.tmp"
cat > "$HOME/.config/mole/whitelist" <<EOF
cat > "$HOME/.config/mole/whitelist" << EOF
$HOME/Library/Caches/WhitelistedApp*
EOF
run env HOME="$HOME" "$PROJECT_ROOT/mole" clean --dry-run
[ "$status" -eq 0 ]
[[ "$output" == *"Protected: 1"* ]]
[ -f "$HOME/Library/Caches/WhitelistedApp/data.tmp" ]
run env HOME="$HOME" "$PROJECT_ROOT/mole" clean --dry-run
[ "$status" -eq 0 ]
[[ "$output" == *"Protected: 1"* ]]
[ -f "$HOME/Library/Caches/WhitelistedApp/data.tmp" ]
}

View File

@@ -1,80 +1,80 @@
#!/usr/bin/env bats
setup_file() {
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT
ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME
ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-cli-home.XXXXXX")"
export HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-cli-home.XXXXXX")"
export HOME
mkdir -p "$HOME"
mkdir -p "$HOME"
}
teardown_file() {
rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME"
fi
rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME"
fi
}
setup() {
rm -rf "$HOME/.config"
mkdir -p "$HOME"
rm -rf "$HOME/.config"
mkdir -p "$HOME"
}
@test "mole --help prints command overview" {
run env HOME="$HOME" "$PROJECT_ROOT/mole" --help
[ "$status" -eq 0 ]
[[ "$output" == *"mo clean"* ]]
[[ "$output" == *"mo analyze"* ]]
run env HOME="$HOME" "$PROJECT_ROOT/mole" --help
[ "$status" -eq 0 ]
[[ "$output" == *"mo clean"* ]]
[[ "$output" == *"mo analyze"* ]]
}
@test "mole --version reports script version" {
expected_version="$(grep '^VERSION=' "$PROJECT_ROOT/mole" | head -1 | sed 's/VERSION=\"\(.*\)\"/\1/')"
run env HOME="$HOME" "$PROJECT_ROOT/mole" --version
[ "$status" -eq 0 ]
[[ "$output" == *"$expected_version"* ]]
expected_version="$(grep '^VERSION=' "$PROJECT_ROOT/mole" | head -1 | sed 's/VERSION=\"\(.*\)\"/\1/')"
run env HOME="$HOME" "$PROJECT_ROOT/mole" --version
[ "$status" -eq 0 ]
[[ "$output" == *"$expected_version"* ]]
}
@test "mole unknown command returns error" {
run env HOME="$HOME" "$PROJECT_ROOT/mole" unknown-command
[ "$status" -ne 0 ]
[[ "$output" == *"Unknown command: unknown-command"* ]]
run env HOME="$HOME" "$PROJECT_ROOT/mole" unknown-command
[ "$status" -ne 0 ]
[[ "$output" == *"Unknown command: unknown-command"* ]]
}
@test "clean.sh --help shows usage details" {
run env HOME="$HOME" "$PROJECT_ROOT/bin/clean.sh" --help
[ "$status" -eq 0 ]
[[ "$output" == *"Mole - Deeper system cleanup"* ]]
[[ "$output" == *"--dry-run"* ]]
run env HOME="$HOME" "$PROJECT_ROOT/bin/clean.sh" --help
[ "$status" -eq 0 ]
[[ "$output" == *"Mole - Deeper system cleanup"* ]]
[[ "$output" == *"--dry-run"* ]]
}
@test "uninstall.sh --help highlights controls" {
run env HOME="$HOME" "$PROJECT_ROOT/bin/uninstall.sh" --help
[ "$status" -eq 0 ]
[[ "$output" == *"Usage: mole uninstall"* ]]
[[ "$output" == *"Keyboard Controls"* ]]
run env HOME="$HOME" "$PROJECT_ROOT/bin/uninstall.sh" --help
[ "$status" -eq 0 ]
[[ "$output" == *"Usage: mole uninstall"* ]]
[[ "$output" == *"Keyboard Controls"* ]]
}
@test "analyze.sh --help outlines explorer features" {
run env HOME="$HOME" "$PROJECT_ROOT/bin/analyze.sh" --help
[ "$status" -eq 0 ]
[[ "$output" == *"Interactive disk space explorer"* ]]
[[ "$output" == *"mole analyze"* ]]
run env HOME="$HOME" "$PROJECT_ROOT/bin/analyze.sh" --help
[ "$status" -eq 0 ]
[[ "$output" == *"Interactive disk space explorer"* ]]
[[ "$output" == *"mole analyze"* ]]
}
@test "touchid --help describes configuration options" {
run env HOME="$HOME" "$PROJECT_ROOT/mole" touchid --help
[ "$status" -eq 0 ]
[[ "$output" == *"Touch ID"* ]]
[[ "$output" == *"mo touchid enable"* ]]
run env HOME="$HOME" "$PROJECT_ROOT/mole" touchid --help
[ "$status" -eq 0 ]
[[ "$output" == *"Touch ID"* ]]
[[ "$output" == *"mo touchid enable"* ]]
}
@test "touchid status reports current configuration" {
run env HOME="$HOME" "$PROJECT_ROOT/mole" touchid status
[ "$status" -eq 0 ]
[[ "$output" == *"Touch ID"* ]]
run env HOME="$HOME" "$PROJECT_ROOT/mole" touchid status
[ "$status" -eq 0 ]
[[ "$output" == *"Touch ID"* ]]
}

View File

@@ -1,123 +1,124 @@
#!/usr/bin/env bats
setup_file() {
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT
ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME
ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-home.XXXXXX")"
export HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-home.XXXXXX")"
export HOME
mkdir -p "$HOME"
mkdir -p "$HOME"
}
teardown_file() {
rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME"
fi
rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME"
fi
}
setup() {
rm -rf "$HOME/.config"
mkdir -p "$HOME"
rm -rf "$HOME/.config"
mkdir -p "$HOME"
}
teardown() {
unset MO_SPINNER_CHARS || true
unset MO_SPINNER_CHARS || true
}
@test "mo_spinner_chars returns default sequence when unset" {
result="$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; mo_spinner_chars")"
[ "$result" = "|/-\\" ]
result="$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; mo_spinner_chars")"
[ "$result" = "|/-\\" ]
}
@test "mo_spinner_chars respects MO_SPINNER_CHARS override" {
export MO_SPINNER_CHARS="abcd"
result="$(HOME="$HOME" MO_SPINNER_CHARS="$MO_SPINNER_CHARS" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; mo_spinner_chars")"
[ "$result" = "abcd" ]
export MO_SPINNER_CHARS="abcd"
result="$(HOME="$HOME" MO_SPINNER_CHARS="$MO_SPINNER_CHARS" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; mo_spinner_chars")"
[ "$result" = "abcd" ]
}
@test "detect_architecture maps current CPU to friendly label" {
expected="Intel"
if [[ "$(uname -m)" == "arm64" ]]; then
expected="Apple Silicon"
fi
result="$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; detect_architecture")"
[ "$result" = "$expected" ]
expected="Intel"
if [[ "$(uname -m)" == "arm64" ]]; then
expected="Apple Silicon"
fi
result="$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; detect_architecture")"
[ "$result" = "$expected" ]
}
@test "get_free_space returns a non-empty value" {
result="$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; get_free_space")"
[[ -n "$result" ]]
result="$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; get_free_space")"
[[ -n "$result" ]]
}
@test "log_info prints message and appends to log file" {
local message="Informational message from test"
local stdout_output
stdout_output="$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; log_info '$message'")"
[[ "$stdout_output" == *"$message"* ]]
local message="Informational message from test"
local stdout_output
stdout_output="$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; log_info '$message'")"
[[ "$stdout_output" == *"$message"* ]]
local log_file="$HOME/.config/mole/mole.log"
[[ -f "$log_file" ]]
grep -q "INFO: $message" "$log_file"
local log_file="$HOME/.config/mole/mole.log"
[[ -f "$log_file" ]]
grep -q "INFO: $message" "$log_file"
}
@test "log_error writes to stderr and log file" {
local message="Something went wrong"
local stderr_file="$HOME/log_error_stderr.txt"
local message="Something went wrong"
local stderr_file="$HOME/log_error_stderr.txt"
HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; log_error '$message' 1>/dev/null 2>'$stderr_file'"
HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/common.sh'; log_error '$message' 1>/dev/null 2>'$stderr_file'"
[[ -s "$stderr_file" ]]
grep -q "$message" "$stderr_file"
[[ -s "$stderr_file" ]]
grep -q "$message" "$stderr_file"
local log_file="$HOME/.config/mole/mole.log"
[[ -f "$log_file" ]]
grep -q "ERROR: $message" "$log_file"
local log_file="$HOME/.config/mole/mole.log"
[[ -f "$log_file" ]]
grep -q "ERROR: $message" "$log_file"
}
@test "bytes_to_human converts byte counts into readable units" {
output="$(HOME="$HOME" bash --noprofile --norc <<'EOF'
output="$(
HOME="$HOME" bash --noprofile --norc << 'EOF'
source "$PROJECT_ROOT/lib/common.sh"
bytes_to_human 512
bytes_to_human 2048
bytes_to_human $((5 * 1024 * 1024))
bytes_to_human $((3 * 1024 * 1024 * 1024))
EOF
)"
)"
bytes_lines=()
while IFS= read -r line; do
bytes_lines+=("$line")
done <<< "$output"
bytes_lines=()
while IFS= read -r line; do
bytes_lines+=("$line")
done <<< "$output"
[ "${bytes_lines[0]}" = "512B" ]
[ "${bytes_lines[1]}" = "2KB" ]
[ "${bytes_lines[2]}" = "5.0MB" ]
[ "${bytes_lines[3]}" = "3.00GB" ]
[ "${bytes_lines[0]}" = "512B" ]
[ "${bytes_lines[1]}" = "2KB" ]
[ "${bytes_lines[2]}" = "5.0MB" ]
[ "${bytes_lines[3]}" = "3.00GB" ]
}
@test "create_temp_file and create_temp_dir are tracked and cleaned" {
HOME="$HOME" bash --noprofile --norc <<'EOF'
HOME="$HOME" bash --noprofile --norc << 'EOF'
source "$PROJECT_ROOT/lib/common.sh"
create_temp_file > "$HOME/temp_file_path.txt"
create_temp_dir > "$HOME/temp_dir_path.txt"
cleanup_temp_files
EOF
file_path="$(cat "$HOME/temp_file_path.txt")"
dir_path="$(cat "$HOME/temp_dir_path.txt")"
[ ! -e "$file_path" ]
[ ! -e "$dir_path" ]
rm -f "$HOME/temp_file_path.txt" "$HOME/temp_dir_path.txt"
file_path="$(cat "$HOME/temp_file_path.txt")"
dir_path="$(cat "$HOME/temp_dir_path.txt")"
[ ! -e "$file_path" ]
[ ! -e "$dir_path" ]
rm -f "$HOME/temp_file_path.txt" "$HOME/temp_dir_path.txt"
}
@test "parallel_execute runs worker across all items" {
output_file="$HOME/parallel_output.txt"
HOME="$HOME" bash --noprofile --norc <<'EOF'
output_file="$HOME/parallel_output.txt"
HOME="$HOME" bash --noprofile --norc << 'EOF'
source "$PROJECT_ROOT/lib/common.sh"
worker() {
echo "$1" >> "$HOME/parallel_output.txt"
@@ -125,16 +126,16 @@ worker() {
parallel_execute 2 worker "first" "second" "third"
EOF
sort "$output_file" > "$output_file.sorted"
results=()
while IFS= read -r line; do
results+=("$line")
done < "$output_file.sorted"
sort "$output_file" > "$output_file.sorted"
results=()
while IFS= read -r line; do
results+=("$line")
done < "$output_file.sorted"
[ "${#results[@]}" -eq 3 ]
joined=" ${results[*]} "
[[ "$joined" == *" first "* ]]
[[ "$joined" == *" second "* ]]
[[ "$joined" == *" third "* ]]
rm -f "$output_file" "$output_file.sorted"
[ "${#results[@]}" -eq 3 ]
joined=" ${results[*]} "
[[ "$joined" == *" first "* ]]
[[ "$joined" == *" second "* ]]
[[ "$joined" == *" third "* ]]
rm -f "$output_file" "$output_file.sorted"
}

View File

@@ -4,41 +4,41 @@ set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
if command -v shellcheck >/dev/null 2>&1; then
SHELLCHECK_TARGETS=()
while IFS= read -r file; do
SHELLCHECK_TARGETS+=("$file")
done < <(find "$PROJECT_ROOT/tests" -type f \( -name '*.bats' -o -name '*.sh' \) | sort)
if command -v shellcheck > /dev/null 2>&1; then
SHELLCHECK_TARGETS=()
while IFS= read -r file; do
SHELLCHECK_TARGETS+=("$file")
done < <(find "$PROJECT_ROOT/tests" -type f \( -name '*.bats' -o -name '*.sh' \) | sort)
if [[ ${#SHELLCHECK_TARGETS[@]} -gt 0 ]]; then
shellcheck --rcfile "$PROJECT_ROOT/.shellcheckrc" "${SHELLCHECK_TARGETS[@]}"
else
echo "No shell files to lint under tests/." >&2
fi
if [[ ${#SHELLCHECK_TARGETS[@]} -gt 0 ]]; then
shellcheck --rcfile "$PROJECT_ROOT/.shellcheckrc" "${SHELLCHECK_TARGETS[@]}"
else
echo "No shell files to lint under tests/." >&2
fi
else
echo "shellcheck not found; skipping linting." >&2
echo "shellcheck not found; skipping linting." >&2
fi
if command -v bats >/dev/null 2>&1; then
cd "$PROJECT_ROOT"
if command -v bats > /dev/null 2>&1; then
cd "$PROJECT_ROOT"
if [[ -z "${TERM:-}" ]]; then
export TERM="xterm-256color"
fi
if [[ -z "${TERM:-}" ]]; then
export TERM="xterm-256color"
fi
if [[ $# -eq 0 ]]; then
set -- tests
fi
if [[ $# -eq 0 ]]; then
set -- tests
fi
if [[ -t 1 ]]; then
bats -p "$@"
else
TERM="${TERM:-xterm-256color}" bats --tap "$@"
fi
if [[ -t 1 ]]; then
bats -p "$@"
else
TERM="${TERM:-xterm-256color}" bats --tap "$@"
fi
else
cat <<'EOF' >&2
cat << 'EOF' >&2
bats is required to run Mole's test suite.
Install via Homebrew with 'brew install bats-core' or via npm with 'npm install -g bats'.
EOF
exit 1
exit 1
fi

View File

@@ -1,79 +1,81 @@
#!/usr/bin/env bats
setup_file() {
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT
ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME
ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-uninstall-home.XXXXXX")"
export HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-uninstall-home.XXXXXX")"
export HOME
}
teardown_file() {
rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME"
fi
rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME"
fi
}
setup() {
export TERM="dumb"
rm -rf "${HOME:?}"/*
mkdir -p "$HOME"
export TERM="dumb"
rm -rf "${HOME:?}"/*
mkdir -p "$HOME"
}
create_app_artifacts() {
mkdir -p "$HOME/Applications/TestApp.app"
mkdir -p "$HOME/Library/Application Support/TestApp"
mkdir -p "$HOME/Library/Caches/TestApp"
mkdir -p "$HOME/Library/Containers/com.example.TestApp"
mkdir -p "$HOME/Library/Preferences"
touch "$HOME/Library/Preferences/com.example.TestApp.plist"
mkdir -p "$HOME/Library/Preferences/ByHost"
touch "$HOME/Library/Preferences/ByHost/com.example.TestApp.ABC123.plist"
mkdir -p "$HOME/Library/Saved Application State/com.example.TestApp.savedState"
mkdir -p "$HOME/Applications/TestApp.app"
mkdir -p "$HOME/Library/Application Support/TestApp"
mkdir -p "$HOME/Library/Caches/TestApp"
mkdir -p "$HOME/Library/Containers/com.example.TestApp"
mkdir -p "$HOME/Library/Preferences"
touch "$HOME/Library/Preferences/com.example.TestApp.plist"
mkdir -p "$HOME/Library/Preferences/ByHost"
touch "$HOME/Library/Preferences/ByHost/com.example.TestApp.ABC123.plist"
mkdir -p "$HOME/Library/Saved Application State/com.example.TestApp.savedState"
}
@test "find_app_files discovers user-level leftovers" {
create_app_artifacts
create_app_artifacts
result="$(HOME="$HOME" bash --noprofile --norc <<'EOF'
result="$(
HOME="$HOME" bash --noprofile --norc << 'EOF'
set -euo pipefail
source "$PROJECT_ROOT/lib/common.sh"
find_app_files "com.example.TestApp" "TestApp"
EOF
)"
)"
[[ "$result" == *"Application Support/TestApp"* ]]
[[ "$result" == *"Caches/TestApp"* ]]
[[ "$result" == *"Preferences/com.example.TestApp.plist"* ]]
[[ "$result" == *"Saved Application State/com.example.TestApp.savedState"* ]]
[[ "$result" == *"Containers/com.example.TestApp"* ]]
[[ "$result" == *"Application Support/TestApp"* ]]
[[ "$result" == *"Caches/TestApp"* ]]
[[ "$result" == *"Preferences/com.example.TestApp.plist"* ]]
[[ "$result" == *"Saved Application State/com.example.TestApp.savedState"* ]]
[[ "$result" == *"Containers/com.example.TestApp"* ]]
}
@test "calculate_total_size returns aggregate kilobytes" {
mkdir -p "$HOME/sized"
dd if=/dev/zero of="$HOME/sized/file1" bs=1024 count=1 >/dev/null 2>&1
dd if=/dev/zero of="$HOME/sized/file2" bs=1024 count=2 >/dev/null 2>&1
mkdir -p "$HOME/sized"
dd if=/dev/zero of="$HOME/sized/file1" bs=1024 count=1 > /dev/null 2>&1
dd if=/dev/zero of="$HOME/sized/file2" bs=1024 count=2 > /dev/null 2>&1
result="$(HOME="$HOME" bash --noprofile --norc <<'EOF'
result="$(
HOME="$HOME" bash --noprofile --norc << 'EOF'
set -euo pipefail
source "$PROJECT_ROOT/lib/common.sh"
files="$(printf '%s\n%s\n' "$HOME/sized/file1" "$HOME/sized/file2")"
calculate_total_size "$files"
EOF
)"
)"
# Result should be >=3 KB (some filesystems allocate slightly more)
[ "$result" -ge 3 ]
# Result should be >=3 KB (some filesystems allocate slightly more)
[ "$result" -ge 3 ]
}
@test "batch_uninstall_applications removes selected app data" {
create_app_artifacts
create_app_artifacts
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
set -euo pipefail
source "$PROJECT_ROOT/lib/common.sh"
source "$PROJECT_ROOT/lib/batch_uninstall.sh"
@@ -111,5 +113,5 @@ printf '\n' | batch_uninstall_applications >/dev/null
[[ ! -f "$HOME/Library/Preferences/com.example.TestApp.plist" ]] || exit 1
EOF
[ "$status" -eq 0 ]
[ "$status" -eq 0 ]
}

View File

@@ -1,31 +1,31 @@
#!/usr/bin/env bats
setup_file() {
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT
ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME
ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-update-home.XXXXXX")"
export HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-update-home.XXXXXX")"
export HOME
}
teardown_file() {
rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME"
fi
rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME"
fi
}
setup() {
export TERM="dumb"
rm -rf "${HOME:?}"/*
mkdir -p "$HOME"
export TERM="dumb"
rm -rf "${HOME:?}"/*
mkdir -p "$HOME"
}
@test "update_via_homebrew reports already on latest version" {
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
set -euo pipefail
MOLE_TEST_BREW_UPDATE_OUTPUT="Updated 0 formulae"
MOLE_TEST_BREW_UPGRADE_OUTPUT="Warning: mole 1.7.9 already installed"
@@ -44,12 +44,12 @@ source "$PROJECT_ROOT/lib/common.sh"
update_via_homebrew "1.7.9"
EOF
[ "$status" -eq 0 ]
[[ "$output" == *"Already on latest version"* ]]
[ "$status" -eq 0 ]
[[ "$output" == *"Already on latest version"* ]]
}
@test "update_mole skips download when already latest" {
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" PATH="$HOME/fake-bin:/usr/bin:/bin" TERM="dumb" bash --noprofile --norc <<'EOF'
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" PATH="$HOME/fake-bin:/usr/bin:/bin" TERM="dumb" bash --noprofile --norc << 'EOF'
set -euo pipefail
mkdir -p "$HOME/fake-bin"
cat > "$HOME/fake-bin/curl" <<'SCRIPT'
@@ -85,17 +85,17 @@ chmod +x "$HOME/fake-bin/brew"
"$PROJECT_ROOT/mole" update
EOF
[ "$status" -eq 0 ]
[[ "$output" == *"Already on latest version"* ]]
[ "$status" -eq 0 ]
[[ "$output" == *"Already on latest version"* ]]
}
@test "remove_mole deletes manual binaries and caches" {
mkdir -p "$HOME/.local/bin"
touch "$HOME/.local/bin/mole"
touch "$HOME/.local/bin/mo"
mkdir -p "$HOME/.config/mole" "$HOME/.cache/mole"
mkdir -p "$HOME/.local/bin"
touch "$HOME/.local/bin/mole"
touch "$HOME/.local/bin/mo"
mkdir -p "$HOME/.config/mole" "$HOME/.cache/mole"
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" PATH="/usr/bin:/bin" bash --noprofile --norc <<'EOF'
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" PATH="/usr/bin:/bin" bash --noprofile --norc << 'EOF'
set -euo pipefail
start_inline_spinner() { :; }
stop_inline_spinner() { :; }
@@ -103,9 +103,9 @@ export -f start_inline_spinner stop_inline_spinner
printf '\n' | "$PROJECT_ROOT/mole" remove
EOF
[ "$status" -eq 0 ]
[ ! -f "$HOME/.local/bin/mole" ]
[ ! -f "$HOME/.local/bin/mo" ]
[ ! -d "$HOME/.config/mole" ]
[ ! -d "$HOME/.cache/mole" ]
[ "$status" -eq 0 ]
[ ! -f "$HOME/.local/bin/mole" ]
[ ! -f "$HOME/.local/bin/mo" ]
[ ! -d "$HOME/.config/mole" ]
[ ! -d "$HOME/.cache/mole" ]
}

View File

@@ -1,99 +1,99 @@
#!/usr/bin/env bats
setup_file() {
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT
ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME
ORIGINAL_HOME="${HOME:-}"
export ORIGINAL_HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-whitelist-home.XXXXXX")"
export HOME
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-whitelist-home.XXXXXX")"
export HOME
mkdir -p "$HOME"
mkdir -p "$HOME"
}
teardown_file() {
rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME"
fi
rm -rf "$HOME"
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
export HOME="$ORIGINAL_HOME"
fi
}
setup() {
rm -rf "$HOME/.config"
mkdir -p "$HOME"
WHITELIST_PATH="$HOME/.config/mole/whitelist"
rm -rf "$HOME/.config"
mkdir -p "$HOME"
WHITELIST_PATH="$HOME/.config/mole/whitelist"
}
@test "patterns_equivalent treats paths with tilde expansion as equal" {
local status
if HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; patterns_equivalent '~/.cache/test' \"\$HOME/.cache/test\""; then
status=0
else
status=$?
fi
[ "$status" -eq 0 ]
local status
if HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; patterns_equivalent '~/.cache/test' \"\$HOME/.cache/test\""; then
status=0
else
status=$?
fi
[ "$status" -eq 0 ]
}
@test "patterns_equivalent distinguishes different paths" {
local status
if HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; patterns_equivalent '~/.cache/test' \"\$HOME/.cache/other\""; then
status=0
else
status=$?
fi
[ "$status" -ne 0 ]
local status
if HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; patterns_equivalent '~/.cache/test' \"\$HOME/.cache/other\""; then
status=0
else
status=$?
fi
[ "$status" -ne 0 ]
}
@test "save_whitelist_patterns keeps unique entries and preserves header" {
HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; save_whitelist_patterns \"\$HOME/.cache/foo\" \"\$HOME/.cache/foo\" \"\$HOME/.cache/bar\""
HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; save_whitelist_patterns \"\$HOME/.cache/foo\" \"\$HOME/.cache/foo\" \"\$HOME/.cache/bar\""
[[ -f "$WHITELIST_PATH" ]]
[[ -f "$WHITELIST_PATH" ]]
lines=()
while IFS= read -r line; do
lines+=("$line")
done < "$WHITELIST_PATH"
# Header is at least two lines (comments), plus two unique patterns
[ "${#lines[@]}" -ge 4 ]
# Ensure duplicate was not written twice
occurrences=$(grep -c "$HOME/.cache/foo" "$WHITELIST_PATH")
[ "$occurrences" -eq 1 ]
lines=()
while IFS= read -r line; do
lines+=("$line")
done < "$WHITELIST_PATH"
# Header is at least two lines (comments), plus two unique patterns
[ "${#lines[@]}" -ge 4 ]
# Ensure duplicate was not written twice
occurrences=$(grep -c "$HOME/.cache/foo" "$WHITELIST_PATH")
[ "$occurrences" -eq 1 ]
}
@test "load_whitelist falls back to defaults when config missing" {
rm -f "$WHITELIST_PATH"
HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; rm -f \"\$HOME/.config/mole/whitelist\"; load_whitelist; printf '%s\n' \"\${CURRENT_WHITELIST_PATTERNS[@]}\"" > "$HOME/current_whitelist.txt"
HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; printf '%s\n' \"\${DEFAULT_WHITELIST_PATTERNS[@]}\"" > "$HOME/default_whitelist.txt"
rm -f "$WHITELIST_PATH"
HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; rm -f \"\$HOME/.config/mole/whitelist\"; load_whitelist; printf '%s\n' \"\${CURRENT_WHITELIST_PATTERNS[@]}\"" > "$HOME/current_whitelist.txt"
HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; printf '%s\n' \"\${DEFAULT_WHITELIST_PATTERNS[@]}\"" > "$HOME/default_whitelist.txt"
current=()
while IFS= read -r line; do
current+=("$line")
done < "$HOME/current_whitelist.txt"
current=()
while IFS= read -r line; do
current+=("$line")
done < "$HOME/current_whitelist.txt"
defaults=()
while IFS= read -r line; do
defaults+=("$line")
done < "$HOME/default_whitelist.txt"
defaults=()
while IFS= read -r line; do
defaults+=("$line")
done < "$HOME/default_whitelist.txt"
[ "${#current[@]}" -eq "${#defaults[@]}" ]
[ "${current[0]}" = "${defaults[0]/\$HOME/$HOME}" ]
[ "${#current[@]}" -eq "${#defaults[@]}" ]
[ "${current[0]}" = "${defaults[0]/\$HOME/$HOME}" ]
}
@test "is_whitelisted matches saved patterns exactly" {
local status
if HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; save_whitelist_patterns \"\$HOME/.cache/unique-pattern\"; load_whitelist; is_whitelisted \"\$HOME/.cache/unique-pattern\""; then
status=0
else
status=$?
fi
[ "$status" -eq 0 ]
local status
if HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; save_whitelist_patterns \"\$HOME/.cache/unique-pattern\"; load_whitelist; is_whitelisted \"\$HOME/.cache/unique-pattern\""; then
status=0
else
status=$?
fi
[ "$status" -eq 0 ]
if HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; save_whitelist_patterns \"\$HOME/.cache/unique-pattern\"; load_whitelist; is_whitelisted \"\$HOME/.cache/other-pattern\""; then
status=0
else
status=$?
fi
[ "$status" -ne 0 ]
if HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/whitelist_manager.sh'; save_whitelist_patterns \"\$HOME/.cache/unique-pattern\"; load_whitelist; is_whitelisted \"\$HOME/.cache/other-pattern\""; then
status=0
else
status=$?
fi
[ "$status" -ne 0 ]
}

View File

@@ -1,16 +1,16 @@
#!/usr/bin/env bats
setup_file() {
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
export PROJECT_ROOT
}
@test "shellcheck passes for test scripts" {
if ! command -v shellcheck >/dev/null 2>&1; then
skip "shellcheck not installed"
fi
if ! command -v shellcheck > /dev/null 2>&1; then
skip "shellcheck not installed"
fi
run env PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
run env PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
set -euo pipefail
cd "$PROJECT_ROOT"
targets=()
@@ -24,6 +24,6 @@ fi
shellcheck --rcfile "$PROJECT_ROOT/.shellcheckrc" "${targets[@]}"
EOF
printf '%s\n' "$output" >&3
[ "$status" -eq 0 ]
printf '%s\n' "$output" >&3
[ "$status" -eq 0 ]
}