1
0
mirror of https://github.com/tw93/Mole.git synced 2026-02-06 11:22:57 +00:00

chore: restructure windows branch (move windows/ content to root, remove macos files)

This commit is contained in:
Tw93
2026-01-10 13:23:29 +08:00
parent e84a457c2f
commit edf5ed09a9
140 changed files with 1472 additions and 34059 deletions

View File

@@ -1,595 +0,0 @@
#!/bin/bash
# System Checks Module
# Combines configuration, security, updates, and health checks
set -euo pipefail
# ============================================================================
# Helper Functions
# ============================================================================
list_login_items() {
if ! command -v osascript > /dev/null 2>&1; then
return
fi
local raw_items
raw_items=$(osascript -e 'tell application "System Events" to get the name of every login item' 2> /dev/null || echo "")
[[ -z "$raw_items" || "$raw_items" == "missing value" ]] && return
IFS=',' read -ra login_items_array <<< "$raw_items"
for entry in "${login_items_array[@]}"; do
local trimmed
trimmed=$(echo "$entry" | sed 's/^[[:space:]]*//; s/[[:space:]]*$//')
[[ -n "$trimmed" ]] && printf "%s\n" "$trimmed"
done
}
# ============================================================================
# Configuration Checks
# ============================================================================
check_touchid_sudo() {
# Check whitelist
if command -v is_whitelisted > /dev/null && is_whitelisted "check_touchid"; then return; fi
# Check if Touch ID is configured for sudo
local pam_file="/etc/pam.d/sudo"
if [[ -f "$pam_file" ]] && grep -q "pam_tid.so" "$pam_file" 2> /dev/null; then
echo -e " ${GREEN}${NC} Touch ID Biometric authentication enabled"
else
# Check if Touch ID is supported
local is_supported=false
if command -v bioutil > /dev/null 2>&1; then
if bioutil -r 2> /dev/null | grep -q "Touch ID"; then
is_supported=true
fi
elif [[ "$(uname -m)" == "arm64" ]]; then
is_supported=true
fi
if [[ "$is_supported" == "true" ]]; then
echo -e " ${YELLOW}${ICON_WARNING}${NC} Touch ID ${YELLOW}Not configured for sudo${NC}"
export TOUCHID_NOT_CONFIGURED=true
fi
fi
}
check_rosetta() {
# Check whitelist
if command -v is_whitelisted > /dev/null && is_whitelisted "check_rosetta"; then return; fi
# Check Rosetta 2 (for Apple Silicon Macs)
if [[ "$(uname -m)" == "arm64" ]]; then
if [[ -f "/Library/Apple/usr/share/rosetta/rosetta" ]]; then
echo -e " ${GREEN}${NC} Rosetta 2 Intel app translation ready"
else
echo -e " ${YELLOW}${ICON_WARNING}${NC} Rosetta 2 ${YELLOW}Intel app support missing${NC}"
export ROSETTA_NOT_INSTALLED=true
fi
fi
}
check_git_config() {
# Check whitelist
if command -v is_whitelisted > /dev/null && is_whitelisted "check_git_config"; then return; fi
# Check basic Git configuration
if command -v git > /dev/null 2>&1; then
local git_name=$(git config --global user.name 2> /dev/null || echo "")
local git_email=$(git config --global user.email 2> /dev/null || echo "")
if [[ -n "$git_name" && -n "$git_email" ]]; then
echo -e " ${GREEN}${NC} Git Global identity configured"
else
echo -e " ${YELLOW}${ICON_WARNING}${NC} Git ${YELLOW}User identity not set${NC}"
fi
fi
}
check_all_config() {
echo -e "${BLUE}${ICON_ARROW}${NC} System Configuration"
check_touchid_sudo
check_rosetta
check_git_config
}
# ============================================================================
# Security Checks
# ============================================================================
check_filevault() {
# Check whitelist
if command -v is_whitelisted > /dev/null && is_whitelisted "check_filevault"; then return; fi
# Check FileVault encryption status
if command -v fdesetup > /dev/null 2>&1; then
local fv_status=$(fdesetup status 2> /dev/null || echo "")
if echo "$fv_status" | grep -q "FileVault is On"; then
echo -e " ${GREEN}${NC} FileVault Disk encryption active"
else
echo -e " ${RED}${NC} FileVault ${RED}Disk encryption disabled${NC}"
export FILEVAULT_DISABLED=true
fi
fi
}
check_firewall() {
# Check whitelist
if command -v is_whitelisted > /dev/null && is_whitelisted "firewall"; then return; fi
# Check firewall status using socketfilterfw (more reliable than defaults on modern macOS)
unset FIREWALL_DISABLED
local firewall_output=$(sudo /usr/libexec/ApplicationFirewall/socketfilterfw --getglobalstate 2> /dev/null || echo "")
if [[ "$firewall_output" == *"State = 1"* ]] || [[ "$firewall_output" == *"State = 2"* ]]; then
echo -e " ${GREEN}${NC} Firewall Network protection enabled"
else
echo -e " ${YELLOW}${ICON_WARNING}${NC} Firewall ${YELLOW}Network protection disabled${NC}"
export FIREWALL_DISABLED=true
fi
}
check_gatekeeper() {
# Check whitelist
if command -v is_whitelisted > /dev/null && is_whitelisted "gatekeeper"; then return; fi
# Check Gatekeeper status
if command -v spctl > /dev/null 2>&1; then
local gk_status=$(spctl --status 2> /dev/null || echo "")
if echo "$gk_status" | grep -q "enabled"; then
echo -e " ${GREEN}${NC} Gatekeeper App download protection active"
unset GATEKEEPER_DISABLED
else
echo -e " ${YELLOW}${ICON_WARNING}${NC} Gatekeeper ${YELLOW}App security disabled${NC}"
export GATEKEEPER_DISABLED=true
fi
fi
}
check_sip() {
# Check whitelist
if command -v is_whitelisted > /dev/null && is_whitelisted "check_sip"; then return; fi
# Check System Integrity Protection
if command -v csrutil > /dev/null 2>&1; then
local sip_status=$(csrutil status 2> /dev/null || echo "")
if echo "$sip_status" | grep -q "enabled"; then
echo -e " ${GREEN}${NC} SIP System integrity protected"
else
echo -e " ${YELLOW}${ICON_WARNING}${NC} SIP ${YELLOW}System protection disabled${NC}"
fi
fi
}
check_all_security() {
echo -e "${BLUE}${ICON_ARROW}${NC} Security Status"
check_filevault
check_firewall
check_gatekeeper
check_sip
}
# ============================================================================
# Software Update Checks
# ============================================================================
# Cache configuration
CACHE_DIR="${HOME}/.cache/mole"
CACHE_TTL=600 # 10 minutes in seconds
# Ensure cache directory exists
ensure_user_dir "$CACHE_DIR"
clear_cache_file() {
local file="$1"
rm -f "$file" 2> /dev/null || true
}
reset_brew_cache() {
clear_cache_file "$CACHE_DIR/brew_updates"
}
reset_softwareupdate_cache() {
clear_cache_file "$CACHE_DIR/softwareupdate_list"
SOFTWARE_UPDATE_LIST=""
}
reset_mole_cache() {
clear_cache_file "$CACHE_DIR/mole_version"
}
# Check if cache is still valid
is_cache_valid() {
local cache_file="$1"
local ttl="${2:-$CACHE_TTL}"
if [[ ! -f "$cache_file" ]]; then
return 1
fi
local cache_age=$(($(get_epoch_seconds) - $(get_file_mtime "$cache_file")))
[[ $cache_age -lt $ttl ]]
}
# Cache software update list to avoid calling softwareupdate twice
SOFTWARE_UPDATE_LIST=""
get_software_updates() {
local cache_file="$CACHE_DIR/softwareupdate_list"
# Optimized: Use defaults to check if updates are pending (much faster)
local pending_updates
pending_updates=$(defaults read /Library/Preferences/com.apple.SoftwareUpdate LastRecommendedUpdatesAvailable 2> /dev/null || echo "0")
if [[ "$pending_updates" -gt 0 ]]; then
echo "Updates Available"
else
echo ""
fi
}
check_appstore_updates() {
# Skipped for speed optimization - consolidated into check_macos_update
# We can't easily distinguish app store vs macos updates without the slow softwareupdate -l call
export APPSTORE_UPDATE_COUNT=0
}
check_macos_update() {
# Check whitelist
if command -v is_whitelisted > /dev/null && is_whitelisted "check_macos_updates"; then return; fi
# Fast check using system preferences
local updates_available="false"
if [[ $(get_software_updates) == "Updates Available" ]]; then
updates_available="true"
# Verify with softwareupdate using --no-scan to avoid triggering a fresh scan
# which can timeout. We prioritize avoiding false negatives (missing actual updates)
# over false positives, so we only clear the update flag when softwareupdate
# explicitly reports "No new software available"
local sw_output=""
local sw_status=0
local spinner_started=false
if [[ -t 1 ]]; then
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Checking macOS updates..."
spinner_started=true
fi
local softwareupdate_timeout=10
if sw_output=$(run_with_timeout "$softwareupdate_timeout" softwareupdate -l --no-scan 2> /dev/null); then
:
else
sw_status=$?
fi
if [[ "$spinner_started" == "true" ]]; then
stop_inline_spinner
fi
# Debug logging for troubleshooting
if [[ -n "${MO_DEBUG:-}" ]]; then
echo "[DEBUG] softwareupdate exit status: $sw_status, output lines: $(echo "$sw_output" | wc -l | tr -d ' ')" >&2
fi
# Prefer avoiding false negatives: if the system indicates updates are pending,
# only clear the flag when softwareupdate returns a list without any update entries.
if [[ $sw_status -eq 0 && -n "$sw_output" ]]; then
if ! echo "$sw_output" | grep -qE '^[[:space:]]*\*'; then
updates_available="false"
fi
fi
fi
export MACOS_UPDATE_AVAILABLE="$updates_available"
if [[ "$updates_available" == "true" ]]; then
echo -e " ${YELLOW}${ICON_WARNING}${NC} macOS ${YELLOW}Update available${NC}"
else
echo -e " ${GREEN}${NC} macOS System up to date"
fi
}
check_mole_update() {
if command -v is_whitelisted > /dev/null && is_whitelisted "check_mole_update"; then return; fi
# Check if Mole has updates
# Auto-detect version from mole main script
local current_version
if [[ -f "${SCRIPT_DIR:-/usr/local/bin}/mole" ]]; then
current_version=$(grep '^VERSION=' "${SCRIPT_DIR:-/usr/local/bin}/mole" 2> /dev/null | head -1 | sed 's/VERSION="\(.*\)"/\1/' || echo "unknown")
else
current_version="${VERSION:-unknown}"
fi
local latest_version=""
local cache_file="$CACHE_DIR/mole_version"
export MOLE_UPDATE_AVAILABLE="false"
# Check cache first
if is_cache_valid "$cache_file"; then
latest_version=$(cat "$cache_file" 2> /dev/null || echo "")
else
# Show spinner while checking
if [[ -t 1 ]]; then
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Checking Mole version..."
fi
# Try to get latest version from GitHub
if command -v curl > /dev/null 2>&1; then
# Run in background to allow Ctrl+C to interrupt
local temp_version
temp_version=$(mktemp_file "mole_version_check")
curl -fsSL --connect-timeout 3 --max-time 5 https://api.github.com/repos/tw93/mole/releases/latest 2> /dev/null | grep '"tag_name"' | sed -E 's/.*"v?([^"]+)".*/\1/' > "$temp_version" &
local curl_pid=$!
# Wait for curl to complete (allows Ctrl+C to interrupt)
if wait "$curl_pid" 2> /dev/null; then
latest_version=$(cat "$temp_version" 2> /dev/null || echo "")
# Save to cache
if [[ -n "$latest_version" ]]; then
ensure_user_file "$cache_file"
echo "$latest_version" > "$cache_file" 2> /dev/null || true
fi
fi
rm -f "$temp_version" 2> /dev/null || true
fi
# Stop spinner
if [[ -t 1 ]]; then
stop_inline_spinner
fi
fi
# Normalize version strings (remove leading 'v' or 'V')
current_version="${current_version#v}"
current_version="${current_version#V}"
latest_version="${latest_version#v}"
latest_version="${latest_version#V}"
if [[ -n "$latest_version" && "$current_version" != "$latest_version" ]]; then
# Compare versions
if [[ "$(printf '%s\n' "$current_version" "$latest_version" | sort -V | head -1)" == "$current_version" ]]; then
export MOLE_UPDATE_AVAILABLE="true"
echo -e " ${YELLOW}${ICON_WARNING}${NC} Mole ${YELLOW}${latest_version} available${NC} (running ${current_version})"
else
echo -e " ${GREEN}${NC} Mole Latest version ${current_version}"
fi
else
echo -e " ${GREEN}${NC} Mole Latest version ${current_version}"
fi
}
check_all_updates() {
# Reset spinner flag for softwareupdate
unset SOFTWAREUPDATE_SPINNER_SHOWN
# Preload software update data to avoid delays between subsequent checks
# Only redirect stdout, keep stderr for spinner display
get_software_updates > /dev/null
echo -e "${BLUE}${ICON_ARROW}${NC} System Updates"
check_appstore_updates
check_macos_update
check_mole_update
}
get_appstore_update_labels() {
get_software_updates | awk '
/^\*/ {
label=$0
sub(/^[[:space:]]*\* Label: */, "", label)
sub(/,.*/, "", label)
lower=tolower(label)
if (index(lower, "macos") == 0) {
print label
}
}
'
}
get_macos_update_labels() {
get_software_updates | awk '
/^\*/ {
label=$0
sub(/^[[:space:]]*\* Label: */, "", label)
sub(/,.*/, "", label)
lower=tolower(label)
if (index(lower, "macos") != 0) {
print label
}
}
'
}
# ============================================================================
# System Health Checks
# ============================================================================
check_disk_space() {
local free_gb=$(command df -H / | awk 'NR==2 {print $4}' | sed 's/G//')
local free_num=$(echo "$free_gb" | tr -d 'G' | cut -d'.' -f1)
export DISK_FREE_GB=$free_num
if [[ $free_num -lt 20 ]]; then
echo -e " ${RED}${NC} Disk Space ${RED}${free_gb}GB free${NC} (Critical)"
elif [[ $free_num -lt 50 ]]; then
echo -e " ${YELLOW}${ICON_WARNING}${NC} Disk Space ${YELLOW}${free_gb}GB free${NC} (Low)"
else
echo -e " ${GREEN}${NC} Disk Space ${free_gb}GB free"
fi
}
check_memory_usage() {
local mem_total
mem_total=$(sysctl -n hw.memsize 2> /dev/null || echo "0")
if [[ -z "$mem_total" || "$mem_total" -le 0 ]]; then
echo -e " ${GRAY}-${NC} Memory Unable to determine"
return
fi
local vm_output
vm_output=$(vm_stat 2> /dev/null || echo "")
local page_size
page_size=$(echo "$vm_output" | awk '/page size of/ {print $8}')
[[ -z "$page_size" ]] && page_size=4096
local free_pages inactive_pages spec_pages
free_pages=$(echo "$vm_output" | awk '/Pages free/ {gsub(/\./,"",$3); print $3}')
inactive_pages=$(echo "$vm_output" | awk '/Pages inactive/ {gsub(/\./,"",$3); print $3}')
spec_pages=$(echo "$vm_output" | awk '/Pages speculative/ {gsub(/\./,"",$3); print $3}')
free_pages=${free_pages:-0}
inactive_pages=${inactive_pages:-0}
spec_pages=${spec_pages:-0}
# Estimate used percent: (total - free - inactive - speculative) / total
local total_pages=$((mem_total / page_size))
local free_total=$((free_pages + inactive_pages + spec_pages))
local used_pages=$((total_pages - free_total))
if ((used_pages < 0)); then
used_pages=0
fi
local used_percent
used_percent=$(awk "BEGIN {printf \"%.0f\", ($used_pages / $total_pages) * 100}")
((used_percent > 100)) && used_percent=100
((used_percent < 0)) && used_percent=0
if [[ $used_percent -gt 90 ]]; then
echo -e " ${RED}${NC} Memory ${RED}${used_percent}% used${NC} (Critical)"
elif [[ $used_percent -gt 80 ]]; then
echo -e " ${YELLOW}${ICON_WARNING}${NC} Memory ${YELLOW}${used_percent}% used${NC} (High)"
else
echo -e " ${GREEN}${NC} Memory ${used_percent}% used"
fi
}
check_login_items() {
# Check whitelist
if command -v is_whitelisted > /dev/null && is_whitelisted "check_login_items"; then return; fi
local login_items_count=0
local -a login_items_list=()
if [[ -t 0 ]]; then
# Show spinner while getting login items
if [[ -t 1 ]]; then
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Checking login items..."
fi
while IFS= read -r login_item; do
[[ -n "$login_item" ]] && login_items_list+=("$login_item")
done < <(list_login_items || true)
login_items_count=${#login_items_list[@]}
# Stop spinner before output
if [[ -t 1 ]]; then
stop_inline_spinner
fi
fi
if [[ $login_items_count -gt 15 ]]; then
echo -e " ${YELLOW}${ICON_WARNING}${NC} Login Items ${YELLOW}${login_items_count} apps${NC}"
elif [[ $login_items_count -gt 0 ]]; then
echo -e " ${GREEN}${NC} Login Items ${login_items_count} apps"
else
echo -e " ${GREEN}${NC} Login Items None"
return
fi
# Show items in a single line (compact)
local preview_limit=3
((preview_limit > login_items_count)) && preview_limit=$login_items_count
local items_display=""
for ((i = 0; i < preview_limit; i++)); do
if [[ $i -eq 0 ]]; then
items_display="${login_items_list[$i]}"
else
items_display="${items_display}, ${login_items_list[$i]}"
fi
done
if ((login_items_count > preview_limit)); then
local remaining=$((login_items_count - preview_limit))
items_display="${items_display} +${remaining}"
fi
echo -e " ${GRAY}${items_display}${NC}"
}
check_cache_size() {
local cache_size_kb=0
# Check common cache locations
local -a cache_paths=(
"$HOME/Library/Caches"
"$HOME/Library/Logs"
)
# Show spinner while calculating cache size
if [[ -t 1 ]]; then
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning cache..."
fi
for cache_path in "${cache_paths[@]}"; do
if [[ -d "$cache_path" ]]; then
local size_output
size_output=$(get_path_size_kb "$cache_path")
[[ "$size_output" =~ ^[0-9]+$ ]] || size_output=0
cache_size_kb=$((cache_size_kb + size_output))
fi
done
local cache_size_gb=$(echo "scale=1; $cache_size_kb / 1024 / 1024" | bc)
export CACHE_SIZE_GB=$cache_size_gb
# Stop spinner before output
if [[ -t 1 ]]; then
stop_inline_spinner
fi
# Convert to integer for comparison
local cache_size_int=$(echo "$cache_size_gb" | cut -d'.' -f1)
if [[ $cache_size_int -gt 10 ]]; then
echo -e " ${YELLOW}${ICON_WARNING}${NC} Cache Size ${YELLOW}${cache_size_gb}GB${NC} cleanable"
elif [[ $cache_size_int -gt 5 ]]; then
echo -e " ${YELLOW}${ICON_WARNING}${NC} Cache Size ${YELLOW}${cache_size_gb}GB${NC} cleanable"
else
echo -e " ${GREEN}${NC} Cache Size ${cache_size_gb}GB"
fi
}
check_swap_usage() {
# Check swap usage
if command -v sysctl > /dev/null 2>&1; then
local swap_info=$(sysctl vm.swapusage 2> /dev/null || echo "")
if [[ -n "$swap_info" ]]; then
local swap_used=$(echo "$swap_info" | grep -o "used = [0-9.]*[GM]" | awk 'NR==1{print $3}')
swap_used=${swap_used:-0M}
local swap_num="${swap_used//[GM]/}"
if [[ "$swap_used" == *"G"* ]]; then
local swap_gb=${swap_num%.*}
if [[ $swap_gb -gt 2 ]]; then
echo -e " ${YELLOW}${ICON_WARNING}${NC} Swap Usage ${YELLOW}${swap_used}${NC} (High)"
else
echo -e " ${GREEN}${NC} Swap Usage ${swap_used}"
fi
else
echo -e " ${GREEN}${NC} Swap Usage ${swap_used}"
fi
fi
fi
}
check_brew_health() {
# Check whitelist
if command -v is_whitelisted > /dev/null && is_whitelisted "check_brew_health"; then return; fi
}
check_system_health() {
echo -e "${BLUE}${ICON_ARROW}${NC} System Health"
check_disk_space
check_memory_usage
check_swap_usage
check_login_items
check_cache_size
# Time Machine check is optional; skip by default to avoid noise on systems without backups
}

View File

@@ -1,184 +0,0 @@
#!/bin/bash
# System Health Check - JSON Generator
# Extracted from tasks.sh
set -euo pipefail
# Ensure dependencies are loaded (only if running standalone)
if [[ -z "${MOLE_FILE_OPS_LOADED:-}" ]]; then
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
source "$SCRIPT_DIR/lib/core/file_ops.sh"
fi
# Get memory info in GB
get_memory_info() {
local total_bytes used_gb total_gb
# Total memory
total_bytes=$(sysctl -n hw.memsize 2> /dev/null || echo "0")
total_gb=$(LC_ALL=C awk "BEGIN {printf \"%.2f\", $total_bytes / (1024*1024*1024)}" 2> /dev/null || echo "0")
[[ -z "$total_gb" || "$total_gb" == "" ]] && total_gb="0"
# Used memory from vm_stat
local vm_output active wired compressed page_size
vm_output=$(vm_stat 2> /dev/null || echo "")
page_size=4096
active=$(echo "$vm_output" | LC_ALL=C awk '/Pages active:/ {print $NF}' | tr -d '.\n' 2> /dev/null)
wired=$(echo "$vm_output" | LC_ALL=C awk '/Pages wired down:/ {print $NF}' | tr -d '.\n' 2> /dev/null)
compressed=$(echo "$vm_output" | LC_ALL=C awk '/Pages occupied by compressor:/ {print $NF}' | tr -d '.\n' 2> /dev/null)
active=${active:-0}
wired=${wired:-0}
compressed=${compressed:-0}
local used_bytes=$(((active + wired + compressed) * page_size))
used_gb=$(LC_ALL=C awk "BEGIN {printf \"%.2f\", $used_bytes / (1024*1024*1024)}" 2> /dev/null || echo "0")
[[ -z "$used_gb" || "$used_gb" == "" ]] && used_gb="0"
echo "$used_gb $total_gb"
}
# Get disk info
get_disk_info() {
local home="${HOME:-/}"
local df_output total_gb used_gb used_percent
df_output=$(command df -k "$home" 2> /dev/null | tail -1)
local total_kb used_kb
total_kb=$(echo "$df_output" | LC_ALL=C awk 'NR==1{print $2}' 2> /dev/null)
used_kb=$(echo "$df_output" | LC_ALL=C awk 'NR==1{print $3}' 2> /dev/null)
total_kb=${total_kb:-0}
used_kb=${used_kb:-0}
[[ "$total_kb" == "0" ]] && total_kb=1 # Avoid division by zero
total_gb=$(LC_ALL=C awk "BEGIN {printf \"%.2f\", $total_kb / (1024*1024)}" 2> /dev/null || echo "0")
used_gb=$(LC_ALL=C awk "BEGIN {printf \"%.2f\", $used_kb / (1024*1024)}" 2> /dev/null || echo "0")
used_percent=$(LC_ALL=C awk "BEGIN {printf \"%.1f\", ($used_kb / $total_kb) * 100}" 2> /dev/null || echo "0")
[[ -z "$total_gb" || "$total_gb" == "" ]] && total_gb="0"
[[ -z "$used_gb" || "$used_gb" == "" ]] && used_gb="0"
[[ -z "$used_percent" || "$used_percent" == "" ]] && used_percent="0"
echo "$used_gb $total_gb $used_percent"
}
# Get uptime in days
get_uptime_days() {
local boot_output boot_time uptime_days
boot_output=$(sysctl -n kern.boottime 2> /dev/null || echo "")
boot_time=$(echo "$boot_output" | awk -F 'sec = |, usec' '{print $2}' 2> /dev/null || echo "")
if [[ -n "$boot_time" && "$boot_time" =~ ^[0-9]+$ ]]; then
local now
now=$(get_epoch_seconds)
local uptime_sec=$((now - boot_time))
uptime_days=$(LC_ALL=C awk "BEGIN {printf \"%.1f\", $uptime_sec / 86400}" 2> /dev/null || echo "0")
else
uptime_days="0"
fi
[[ -z "$uptime_days" || "$uptime_days" == "" ]] && uptime_days="0"
echo "$uptime_days"
}
# JSON escape helper
json_escape() {
# Escape backslash, double quote, tab, and newline
local escaped
escaped=$(echo -n "$1" | sed 's/\\/\\\\/g; s/"/\\"/g; s/ /\\t/g' | tr '\n' ' ')
echo -n "${escaped% }"
}
# Generate JSON output
generate_health_json() {
# System info
read -r mem_used mem_total <<< "$(get_memory_info)"
read -r disk_used disk_total disk_percent <<< "$(get_disk_info)"
local uptime=$(get_uptime_days)
# Ensure all values are valid numbers (fallback to 0)
mem_used=${mem_used:-0}
mem_total=${mem_total:-0}
disk_used=${disk_used:-0}
disk_total=${disk_total:-0}
disk_percent=${disk_percent:-0}
uptime=${uptime:-0}
# Start JSON
cat << EOF
{
"memory_used_gb": $mem_used,
"memory_total_gb": $mem_total,
"disk_used_gb": $disk_used,
"disk_total_gb": $disk_total,
"disk_used_percent": $disk_percent,
"uptime_days": $uptime,
"optimizations": [
EOF
# Collect all optimization items
local -a items=()
# Core optimizations (safe and valuable)
items+=('system_maintenance|DNS & Spotlight Check|Refresh DNS cache & verify Spotlight status|true')
items+=('cache_refresh|Finder Cache Refresh|Refresh QuickLook thumbnails & icon services cache|true')
items+=('saved_state_cleanup|App State Cleanup|Remove old saved application states (30+ days)|true')
items+=('fix_broken_configs|Broken Config Repair|Fix corrupted preferences files|true')
items+=('network_optimization|Network Cache Refresh|Optimize DNS cache & restart mDNSResponder|true')
# Advanced optimizations (high value, auto-run with safety checks)
items+=('sqlite_vacuum|Database Optimization|Compress SQLite databases for Mail, Safari & Messages (skips if apps are running)|true')
items+=('launch_services_rebuild|LaunchServices Repair|Repair "Open with" menu & file associations|true')
items+=('font_cache_rebuild|Font Cache Rebuild|Rebuild font database to fix rendering issues|true')
items+=('dock_refresh|Dock Refresh|Fix broken icons and visual glitches in the Dock|true')
# System performance optimizations (new)
items+=('memory_pressure_relief|Memory Optimization|Release inactive memory to improve system responsiveness|true')
items+=('network_stack_optimize|Network Stack Refresh|Flush routing table and ARP cache to resolve network issues|true')
items+=('disk_permissions_repair|Permission Repair|Fix user directory permission issues|true')
items+=('bluetooth_reset|Bluetooth Refresh|Restart Bluetooth module to fix connectivity (skips if in use)|true')
items+=('spotlight_index_optimize|Spotlight Optimization|Rebuild index if search is slow (smart detection)|true')
# Removed high-risk optimizations:
# - startup_items_cleanup: Risk of deleting legitimate app helpers
# - system_services_refresh: Risk of data loss when killing system services
# - dyld_cache_update: Low benefit, time-consuming, auto-managed by macOS
# Output items as JSON
local first=true
for item in "${items[@]}"; do
IFS='|' read -r action name desc safe <<< "$item"
# Escape strings
action=$(json_escape "$action")
name=$(json_escape "$name")
desc=$(json_escape "$desc")
[[ "$first" == "true" ]] && first=false || echo ","
cat << EOF
{
"category": "system",
"name": "$name",
"description": "$desc",
"action": "$action",
"safe": $safe
}
EOF
done
# Close JSON
cat << 'EOF'
]
}
EOF
}
# Main execution (for testing)
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
generate_health_json
fi

View File

@@ -1,235 +0,0 @@
#!/bin/bash
# User GUI Applications Cleanup Module (desktop apps, media, utilities).
set -euo pipefail
# Xcode and iOS tooling.
clean_xcode_tools() {
# Skip DerivedData/Archives while Xcode is running.
local xcode_running=false
if pgrep -x "Xcode" > /dev/null 2>&1; then
xcode_running=true
fi
safe_clean ~/Library/Developer/CoreSimulator/Caches/* "Simulator cache"
safe_clean ~/Library/Developer/CoreSimulator/Devices/*/data/tmp/* "Simulator temp files"
safe_clean ~/Library/Caches/com.apple.dt.Xcode/* "Xcode cache"
safe_clean ~/Library/Developer/Xcode/iOS\ Device\ Logs/* "iOS device logs"
safe_clean ~/Library/Developer/Xcode/watchOS\ Device\ Logs/* "watchOS device logs"
safe_clean ~/Library/Developer/Xcode/Products/* "Xcode build products"
if [[ "$xcode_running" == "false" ]]; then
safe_clean ~/Library/Developer/Xcode/DerivedData/* "Xcode derived data"
safe_clean ~/Library/Developer/Xcode/Archives/* "Xcode archives"
else
echo -e " ${YELLOW}${ICON_WARNING}${NC} Xcode is running, skipping DerivedData and Archives cleanup"
fi
}
# Code editors.
clean_code_editors() {
safe_clean ~/Library/Application\ Support/Code/logs/* "VS Code logs"
safe_clean ~/Library/Application\ Support/Code/Cache/* "VS Code cache"
safe_clean ~/Library/Application\ Support/Code/CachedExtensions/* "VS Code extension cache"
safe_clean ~/Library/Application\ Support/Code/CachedData/* "VS Code data cache"
safe_clean ~/Library/Caches/com.sublimetext.*/* "Sublime Text cache"
}
# Communication apps.
clean_communication_apps() {
safe_clean ~/Library/Application\ Support/discord/Cache/* "Discord cache"
safe_clean ~/Library/Application\ Support/legcord/Cache/* "Legcord cache"
safe_clean ~/Library/Application\ Support/Slack/Cache/* "Slack cache"
safe_clean ~/Library/Caches/us.zoom.xos/* "Zoom cache"
safe_clean ~/Library/Caches/com.tencent.xinWeChat/* "WeChat cache"
safe_clean ~/Library/Caches/ru.keepcoder.Telegram/* "Telegram cache"
safe_clean ~/Library/Caches/com.microsoft.teams2/* "Microsoft Teams cache"
safe_clean ~/Library/Caches/net.whatsapp.WhatsApp/* "WhatsApp cache"
safe_clean ~/Library/Caches/com.skype.skype/* "Skype cache"
safe_clean ~/Library/Caches/com.tencent.meeting/* "Tencent Meeting cache"
safe_clean ~/Library/Caches/com.tencent.WeWorkMac/* "WeCom cache"
safe_clean ~/Library/Caches/com.feishu.*/* "Feishu cache"
}
# DingTalk.
clean_dingtalk() {
safe_clean ~/Library/Caches/dd.work.exclusive4aliding/* "DingTalk iDingTalk cache"
safe_clean ~/Library/Caches/com.alibaba.AliLang.osx/* "AliLang security component"
safe_clean ~/Library/Application\ Support/iDingTalk/log/* "DingTalk logs"
safe_clean ~/Library/Application\ Support/iDingTalk/holmeslogs/* "DingTalk holmes logs"
}
# AI assistants.
clean_ai_apps() {
safe_clean ~/Library/Caches/com.openai.chat/* "ChatGPT cache"
safe_clean ~/Library/Caches/com.anthropic.claudefordesktop/* "Claude desktop cache"
safe_clean ~/Library/Logs/Claude/* "Claude logs"
}
# Design and creative tools.
clean_design_tools() {
safe_clean ~/Library/Caches/com.bohemiancoding.sketch3/* "Sketch cache"
safe_clean ~/Library/Application\ Support/com.bohemiancoding.sketch3/cache/* "Sketch app cache"
safe_clean ~/Library/Caches/Adobe/* "Adobe cache"
safe_clean ~/Library/Caches/com.adobe.*/* "Adobe app caches"
safe_clean ~/Library/Caches/com.figma.Desktop/* "Figma cache"
# Raycast cache is protected (clipboard history, images).
}
# Video editing tools.
clean_video_tools() {
safe_clean ~/Library/Caches/net.telestream.screenflow10/* "ScreenFlow cache"
safe_clean ~/Library/Caches/com.apple.FinalCut/* "Final Cut Pro cache"
safe_clean ~/Library/Caches/com.blackmagic-design.DaVinciResolve/* "DaVinci Resolve cache"
safe_clean ~/Library/Caches/com.adobe.PremierePro.*/* "Premiere Pro cache"
}
# 3D and CAD tools.
clean_3d_tools() {
safe_clean ~/Library/Caches/org.blenderfoundation.blender/* "Blender cache"
safe_clean ~/Library/Caches/com.maxon.cinema4d/* "Cinema 4D cache"
safe_clean ~/Library/Caches/com.autodesk.*/* "Autodesk cache"
safe_clean ~/Library/Caches/com.sketchup.*/* "SketchUp cache"
}
# Productivity apps.
clean_productivity_apps() {
safe_clean ~/Library/Caches/com.tw93.MiaoYan/* "MiaoYan cache"
safe_clean ~/Library/Caches/com.klee.desktop/* "Klee cache"
safe_clean ~/Library/Caches/klee_desktop/* "Klee desktop cache"
safe_clean ~/Library/Caches/com.orabrowser.app/* "Ora browser cache"
safe_clean ~/Library/Caches/com.filo.client/* "Filo cache"
safe_clean ~/Library/Caches/com.flomoapp.mac/* "Flomo cache"
safe_clean ~/Library/Application\ Support/Quark/Cache/videoCache/* "Quark video cache"
}
# Music/media players (protect Spotify offline music).
clean_media_players() {
local spotify_cache="$HOME/Library/Caches/com.spotify.client"
local spotify_data="$HOME/Library/Application Support/Spotify"
local has_offline_music=false
# Heuristics: offline DB or large cache.
if [[ -f "$spotify_data/PersistentCache/Storage/offline.bnk" ]] ||
[[ -d "$spotify_data/PersistentCache/Storage" && -n "$(find "$spotify_data/PersistentCache/Storage" -type f -name "*.file" 2> /dev/null | head -1)" ]]; then
has_offline_music=true
elif [[ -d "$spotify_cache" ]]; then
local cache_size_kb
cache_size_kb=$(get_path_size_kb "$spotify_cache")
if [[ $cache_size_kb -ge 512000 ]]; then
has_offline_music=true
fi
fi
if [[ "$has_offline_music" == "true" ]]; then
echo -e " ${YELLOW}${ICON_WARNING}${NC} Spotify cache protected · offline music detected"
note_activity
else
safe_clean ~/Library/Caches/com.spotify.client/* "Spotify cache"
fi
safe_clean ~/Library/Caches/com.apple.Music "Apple Music cache"
safe_clean ~/Library/Caches/com.apple.podcasts "Apple Podcasts cache"
safe_clean ~/Library/Caches/com.apple.TV/* "Apple TV cache"
safe_clean ~/Library/Caches/tv.plex.player.desktop "Plex cache"
safe_clean ~/Library/Caches/com.netease.163music "NetEase Music cache"
safe_clean ~/Library/Caches/com.tencent.QQMusic/* "QQ Music cache"
safe_clean ~/Library/Caches/com.kugou.mac/* "Kugou Music cache"
safe_clean ~/Library/Caches/com.kuwo.mac/* "Kuwo Music cache"
}
# Video players.
clean_video_players() {
safe_clean ~/Library/Caches/com.colliderli.iina "IINA cache"
safe_clean ~/Library/Caches/org.videolan.vlc "VLC cache"
safe_clean ~/Library/Caches/io.mpv "MPV cache"
safe_clean ~/Library/Caches/com.iqiyi.player "iQIYI cache"
safe_clean ~/Library/Caches/com.tencent.tenvideo "Tencent Video cache"
safe_clean ~/Library/Caches/tv.danmaku.bili/* "Bilibili cache"
safe_clean ~/Library/Caches/com.douyu.*/* "Douyu cache"
safe_clean ~/Library/Caches/com.huya.*/* "Huya cache"
}
# Download managers.
clean_download_managers() {
safe_clean ~/Library/Caches/net.xmac.aria2gui "Aria2 cache"
safe_clean ~/Library/Caches/org.m0k.transmission "Transmission cache"
safe_clean ~/Library/Caches/com.qbittorrent.qBittorrent "qBittorrent cache"
safe_clean ~/Library/Caches/com.downie.Downie-* "Downie cache"
safe_clean ~/Library/Caches/com.folx.*/* "Folx cache"
safe_clean ~/Library/Caches/com.charlessoft.pacifist/* "Pacifist cache"
}
# Gaming platforms.
clean_gaming_platforms() {
safe_clean ~/Library/Caches/com.valvesoftware.steam/* "Steam cache"
safe_clean ~/Library/Application\ Support/Steam/htmlcache/* "Steam web cache"
safe_clean ~/Library/Caches/com.epicgames.EpicGamesLauncher/* "Epic Games cache"
safe_clean ~/Library/Caches/com.blizzard.Battle.net/* "Battle.net cache"
safe_clean ~/Library/Application\ Support/Battle.net/Cache/* "Battle.net app cache"
safe_clean ~/Library/Caches/com.ea.*/* "EA Origin cache"
safe_clean ~/Library/Caches/com.gog.galaxy/* "GOG Galaxy cache"
safe_clean ~/Library/Caches/com.riotgames.*/* "Riot Games cache"
}
# Translation/dictionary apps.
clean_translation_apps() {
safe_clean ~/Library/Caches/com.youdao.YoudaoDict "Youdao Dictionary cache"
safe_clean ~/Library/Caches/com.eudic.* "Eudict cache"
safe_clean ~/Library/Caches/com.bob-build.Bob "Bob Translation cache"
}
# Screenshot/recording tools.
clean_screenshot_tools() {
safe_clean ~/Library/Caches/com.cleanshot.* "CleanShot cache"
safe_clean ~/Library/Caches/com.reincubate.camo "Camo cache"
safe_clean ~/Library/Caches/com.xnipapp.xnip "Xnip cache"
}
# Email clients.
clean_email_clients() {
safe_clean ~/Library/Caches/com.readdle.smartemail-Mac "Spark cache"
safe_clean ~/Library/Caches/com.airmail.* "Airmail cache"
}
# Task management apps.
clean_task_apps() {
safe_clean ~/Library/Caches/com.todoist.mac.Todoist "Todoist cache"
safe_clean ~/Library/Caches/com.any.do.* "Any.do cache"
}
# Shell/terminal utilities.
clean_shell_utils() {
safe_clean ~/.zcompdump* "Zsh completion cache"
safe_clean ~/.lesshst "less history"
safe_clean ~/.viminfo.tmp "Vim temporary files"
safe_clean ~/.wget-hsts "wget HSTS cache"
}
# Input methods and system utilities.
clean_system_utils() {
safe_clean ~/Library/Caches/com.runjuu.Input-Source-Pro/* "Input Source Pro cache"
safe_clean ~/Library/Caches/macos-wakatime.WakaTime/* "WakaTime cache"
}
# Note-taking apps.
clean_note_apps() {
safe_clean ~/Library/Caches/notion.id/* "Notion cache"
safe_clean ~/Library/Caches/md.obsidian/* "Obsidian cache"
safe_clean ~/Library/Caches/com.logseq.*/* "Logseq cache"
safe_clean ~/Library/Caches/com.bear-writer.*/* "Bear cache"
safe_clean ~/Library/Caches/com.evernote.*/* "Evernote cache"
safe_clean ~/Library/Caches/com.yinxiang.*/* "Yinxiang Note cache"
}
# Launchers and automation tools.
clean_launcher_apps() {
safe_clean ~/Library/Caches/com.runningwithcrayons.Alfred/* "Alfred cache"
safe_clean ~/Library/Caches/cx.c3.theunarchiver/* "The Unarchiver cache"
}
# Remote desktop tools.
clean_remote_desktop() {
safe_clean ~/Library/Caches/com.teamviewer.*/* "TeamViewer cache"
safe_clean ~/Library/Caches/com.anydesk.*/* "AnyDesk cache"
safe_clean ~/Library/Caches/com.todesk.*/* "ToDesk cache"
safe_clean ~/Library/Caches/com.sunlogin.*/* "Sunlogin cache"
}
# Main entry for GUI app cleanup.
clean_user_gui_applications() {
stop_section_spinner
clean_xcode_tools
clean_code_editors
clean_communication_apps
clean_dingtalk
clean_ai_apps
clean_design_tools
clean_video_tools
clean_3d_tools
clean_productivity_apps
clean_media_players
clean_video_players
clean_download_managers
clean_gaming_platforms
clean_translation_apps
clean_screenshot_tools
clean_email_clients
clean_task_apps
clean_shell_utils
clean_system_utils
clean_note_apps
clean_launcher_apps
clean_remote_desktop
}

442
lib/clean/apps.ps1 Normal file
View File

@@ -0,0 +1,442 @@
# Mole - Application-Specific Cleanup Module
# Cleans leftover data from uninstalled apps and app-specific caches
#Requires -Version 5.1
Set-StrictMode -Version Latest
# Prevent multiple sourcing
if ((Get-Variable -Name 'MOLE_CLEAN_APPS_LOADED' -Scope Script -ErrorAction SilentlyContinue) -and $script:MOLE_CLEAN_APPS_LOADED) { return }
$script:MOLE_CLEAN_APPS_LOADED = $true
# Import dependencies
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$scriptDir\..\core\base.ps1"
. "$scriptDir\..\core\log.ps1"
. "$scriptDir\..\core\file_ops.ps1"
# ============================================================================
# Orphaned App Data Detection
# ============================================================================
function Get-InstalledPrograms {
<#
.SYNOPSIS
Get list of installed programs from registry
#>
$programs = @()
$registryPaths = @(
"HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\*"
"HKLM:\SOFTWARE\WOW6432Node\Microsoft\Windows\CurrentVersion\Uninstall\*"
"HKCU:\SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\*"
)
foreach ($path in $registryPaths) {
$items = Get-ItemProperty -Path $path -ErrorAction SilentlyContinue |
Where-Object { $_.DisplayName } |
Select-Object DisplayName, InstallLocation, Publisher
if ($items) {
$programs += $items
}
}
# Also check UWP apps
try {
$uwpApps = Get-AppxPackage -ErrorAction SilentlyContinue |
Select-Object @{N='DisplayName';E={$_.Name}}, @{N='InstallLocation';E={$_.InstallLocation}}, Publisher
if ($uwpApps) {
$programs += $uwpApps
}
}
catch {
Write-Debug "Could not enumerate UWP apps: $_"
}
return $programs
}
function Find-OrphanedAppData {
<#
.SYNOPSIS
Find app data folders for apps that are no longer installed
#>
param([int]$DaysOld = 60)
$installedPrograms = Get-InstalledPrograms
$installedNames = $installedPrograms | ForEach-Object { $_.DisplayName.ToLower() }
$orphanedPaths = @()
$cutoffDate = (Get-Date).AddDays(-$DaysOld)
# Check common app data locations
$appDataPaths = @(
@{ Path = $env:APPDATA; Type = "Roaming" }
@{ Path = $env:LOCALAPPDATA; Type = "Local" }
)
foreach ($location in $appDataPaths) {
if (-not (Test-Path $location.Path)) { continue }
$folders = Get-ChildItem -Path $location.Path -Directory -ErrorAction SilentlyContinue
foreach ($folder in $folders) {
# Skip system folders
$skipFolders = @('Microsoft', 'Windows', 'Packages', 'Programs', 'Temp', 'Roaming')
if ($folder.Name -in $skipFolders) { continue }
# Skip if recently modified
if ($folder.LastWriteTime -gt $cutoffDate) { continue }
# Check if app is installed using stricter matching
# Require exact match or that folder name is a clear prefix/suffix of app name
$isInstalled = $false
$folderLower = $folder.Name.ToLower()
foreach ($name in $installedNames) {
# Exact match
if ($name -eq $folderLower) {
$isInstalled = $true
break
}
# Folder is prefix of app name (e.g., "chrome" matches "chrome browser")
if ($name.StartsWith($folderLower) -and $folderLower.Length -ge 4) {
$isInstalled = $true
break
}
# App name is prefix of folder (e.g., "vscode" matches "vscode-data")
if ($folderLower.StartsWith($name) -and $name.Length -ge 4) {
$isInstalled = $true
break
}
}
if (-not $isInstalled) {
$orphanedPaths += @{
Path = $folder.FullName
Name = $folder.Name
Type = $location.Type
Size = (Get-PathSize -Path $folder.FullName)
LastModified = $folder.LastWriteTime
}
}
}
}
return $orphanedPaths
}
function Clear-OrphanedAppData {
<#
.SYNOPSIS
Clean orphaned application data
#>
param([int]$DaysOld = 60)
Start-Section "Orphaned app data"
$orphaned = Find-OrphanedAppData -DaysOld $DaysOld
if ($orphaned.Count -eq 0) {
Write-Info "No orphaned app data found"
Stop-Section
return
}
# Filter by size (only clean if > 10MB to avoid noise)
$significantOrphans = $orphaned | Where-Object { $_.Size -gt 10MB }
if ($significantOrphans.Count -gt 0) {
$totalSize = ($significantOrphans | Measure-Object -Property Size -Sum).Sum
$sizeHuman = Format-ByteSize -Bytes $totalSize
Write-Info "Found $($significantOrphans.Count) orphaned folders ($sizeHuman)"
foreach ($orphan in $significantOrphans) {
$orphanSize = Format-ByteSize -Bytes $orphan.Size
Remove-SafeItem -Path $orphan.Path -Description "$($orphan.Name) ($orphanSize)" -Recurse
}
}
Stop-Section
}
# ============================================================================
# Specific Application Cleanup
# ============================================================================
function Clear-OfficeCache {
<#
.SYNOPSIS
Clean Microsoft Office caches and temp files
#>
$officeCachePaths = @(
# Office 365 / 2019 / 2021
"$env:LOCALAPPDATA\Microsoft\Office\16.0\OfficeFileCache"
"$env:LOCALAPPDATA\Microsoft\Office\16.0\Wef"
"$env:LOCALAPPDATA\Microsoft\Outlook\RoamCache"
"$env:LOCALAPPDATA\Microsoft\Outlook\Offline Address Books"
# Older Office versions
"$env:LOCALAPPDATA\Microsoft\Office\15.0\OfficeFileCache"
# Office temp files
"$env:APPDATA\Microsoft\Templates\*.tmp"
"$env:APPDATA\Microsoft\Word\*.tmp"
"$env:APPDATA\Microsoft\Excel\*.tmp"
"$env:APPDATA\Microsoft\PowerPoint\*.tmp"
)
foreach ($path in $officeCachePaths) {
if ($path -like "*.tmp") {
$parent = Split-Path -Parent $path
if (Test-Path $parent) {
$tmpFiles = Get-ChildItem -Path $parent -Filter "*.tmp" -File -ErrorAction SilentlyContinue
if ($tmpFiles) {
$paths = $tmpFiles | ForEach-Object { $_.FullName }
Remove-SafeItems -Paths $paths -Description "Office temp files"
}
}
}
elseif (Test-Path $path) {
Clear-DirectoryContents -Path $path -Description "Office $(Split-Path -Leaf $path)"
}
}
}
function Clear-OneDriveCache {
<#
.SYNOPSIS
Clean OneDrive cache
#>
$oneDriveCachePaths = @(
"$env:LOCALAPPDATA\Microsoft\OneDrive\logs"
"$env:LOCALAPPDATA\Microsoft\OneDrive\setup\logs"
)
foreach ($path in $oneDriveCachePaths) {
if (Test-Path $path) {
Remove-OldFiles -Path $path -DaysOld 7 -Description "OneDrive logs"
}
}
}
function Clear-DropboxCache {
<#
.SYNOPSIS
Clean Dropbox cache
#>
# Dropbox cache is typically in the Dropbox folder itself
$dropboxInfoPath = "$env:LOCALAPPDATA\Dropbox\info.json"
if (Test-Path $dropboxInfoPath) {
try {
$dropboxInfo = Get-Content $dropboxInfoPath | ConvertFrom-Json
$dropboxPath = $dropboxInfo.personal.path
if ($dropboxPath) {
$dropboxCachePath = "$dropboxPath\.dropbox.cache"
if (Test-Path $dropboxCachePath) {
Clear-DirectoryContents -Path $dropboxCachePath -Description "Dropbox cache"
}
}
}
catch {
Write-Debug "Could not read Dropbox config: $_"
}
}
}
function Clear-GoogleDriveCache {
<#
.SYNOPSIS
Clean Google Drive cache
#>
$googleDriveCachePaths = @(
"$env:LOCALAPPDATA\Google\DriveFS\Logs"
"$env:LOCALAPPDATA\Google\DriveFS\*.tmp"
)
foreach ($path in $googleDriveCachePaths) {
if ($path -like "*.tmp") {
$parent = Split-Path -Parent $path
if (Test-Path $parent) {
$tmpFiles = Get-ChildItem -Path $parent -Filter "*.tmp" -ErrorAction SilentlyContinue
if ($tmpFiles) {
$paths = $tmpFiles | ForEach-Object { $_.FullName }
Remove-SafeItems -Paths $paths -Description "Google Drive temp"
}
}
}
elseif (Test-Path $path) {
Remove-OldFiles -Path $path -DaysOld 7 -Description "Google Drive logs"
}
}
}
function Clear-AdobeData {
<#
.SYNOPSIS
Clean Adobe application caches and temp files
#>
$adobeCachePaths = @(
"$env:APPDATA\Adobe\Common\Media Cache Files"
"$env:APPDATA\Adobe\Common\Peak Files"
"$env:APPDATA\Adobe\Common\Team Projects Cache"
"$env:LOCALAPPDATA\Adobe\*\Cache"
"$env:LOCALAPPDATA\Adobe\*\CameraRaw\Cache"
"$env:LOCALAPPDATA\Temp\Adobe"
)
foreach ($pattern in $adobeCachePaths) {
$paths = Resolve-Path $pattern -ErrorAction SilentlyContinue
foreach ($path in $paths) {
if (Test-Path $path.Path) {
Clear-DirectoryContents -Path $path.Path -Description "Adobe cache"
}
}
}
}
function Clear-AutodeskData {
<#
.SYNOPSIS
Clean Autodesk application caches
#>
$autodeskCachePaths = @(
"$env:LOCALAPPDATA\Autodesk\*\Cache"
"$env:APPDATA\Autodesk\*\cache"
)
foreach ($pattern in $autodeskCachePaths) {
$paths = Resolve-Path $pattern -ErrorAction SilentlyContinue
foreach ($path in $paths) {
if (Test-Path $path.Path) {
Clear-DirectoryContents -Path $path.Path -Description "Autodesk cache"
}
}
}
}
# ============================================================================
# Gaming Platform Cleanup
# ============================================================================
function Clear-GamingPlatformCaches {
<#
.SYNOPSIS
Clean gaming platform caches (Steam, Epic, Origin, etc.)
#>
# Steam
$steamPaths = @(
"${env:ProgramFiles(x86)}\Steam\appcache\httpcache"
"${env:ProgramFiles(x86)}\Steam\appcache\librarycache"
"${env:ProgramFiles(x86)}\Steam\logs"
)
foreach ($path in $steamPaths) {
if (Test-Path $path) {
Clear-DirectoryContents -Path $path -Description "Steam $(Split-Path -Leaf $path)"
}
}
# Epic Games Launcher
$epicPaths = @(
"$env:LOCALAPPDATA\EpicGamesLauncher\Saved\webcache"
"$env:LOCALAPPDATA\EpicGamesLauncher\Saved\Logs"
)
foreach ($path in $epicPaths) {
if (Test-Path $path) {
Clear-DirectoryContents -Path $path -Description "Epic Games $(Split-Path -Leaf $path)"
}
}
# EA App (Origin replacement)
$eaPaths = @(
"$env:LOCALAPPDATA\Electronic Arts\EA Desktop\cache"
"$env:APPDATA\Origin\*\cache"
)
foreach ($pattern in $eaPaths) {
$paths = Resolve-Path $pattern -ErrorAction SilentlyContinue
foreach ($path in $paths) {
if (Test-Path $path.Path) {
Clear-DirectoryContents -Path $path.Path -Description "EA/Origin cache"
}
}
}
# GOG Galaxy
$gogPaths = @(
"$env:LOCALAPPDATA\GOG.com\Galaxy\webcache"
"$env:PROGRAMDATA\GOG.com\Galaxy\logs"
)
foreach ($path in $gogPaths) {
if (Test-Path $path) {
Clear-DirectoryContents -Path $path -Description "GOG Galaxy $(Split-Path -Leaf $path)"
}
}
# Ubisoft Connect
$ubiPaths = @(
"$env:LOCALAPPDATA\Ubisoft Game Launcher\cache"
"$env:LOCALAPPDATA\Ubisoft Game Launcher\logs"
)
foreach ($path in $ubiPaths) {
if (Test-Path $path) {
Clear-DirectoryContents -Path $path -Description "Ubisoft $(Split-Path -Leaf $path)"
}
}
# Battle.net
$battlenetPaths = @(
"$env:APPDATA\Battle.net\Cache"
"$env:APPDATA\Battle.net\Logs"
)
foreach ($path in $battlenetPaths) {
if (Test-Path $path) {
Clear-DirectoryContents -Path $path -Description "Battle.net $(Split-Path -Leaf $path)"
}
}
}
# ============================================================================
# Main Application Cleanup Function
# ============================================================================
function Invoke-AppCleanup {
<#
.SYNOPSIS
Run all application-specific cleanup tasks
#>
param([switch]$IncludeOrphaned)
Start-Section "Applications"
# Productivity apps
Clear-OfficeCache
Clear-OneDriveCache
Clear-DropboxCache
Clear-GoogleDriveCache
# Creative apps
Clear-AdobeData
Clear-AutodeskData
# Gaming platforms
Clear-GamingPlatformCaches
Stop-Section
# Orphaned app data (separate section)
if ($IncludeOrphaned) {
Clear-OrphanedAppData -DaysOld 60
}
}
# ============================================================================
# Exports
# ============================================================================
# Functions: Get-InstalledPrograms, Find-OrphanedAppData, Clear-OfficeCache, etc.

View File

@@ -1,313 +0,0 @@
#!/bin/bash
# Application Data Cleanup Module
set -euo pipefail
# Args: $1=target_dir, $2=label
clean_ds_store_tree() {
local target="$1"
local label="$2"
[[ -d "$target" ]] || return 0
local file_count=0
local total_bytes=0
local spinner_active="false"
if [[ -t 1 ]]; then
MOLE_SPINNER_PREFIX=" "
start_inline_spinner "Cleaning Finder metadata..."
spinner_active="true"
fi
local -a exclude_paths=(
-path "*/Library/Application Support/MobileSync" -prune -o
-path "*/Library/Developer" -prune -o
-path "*/.Trash" -prune -o
-path "*/node_modules" -prune -o
-path "*/.git" -prune -o
-path "*/Library/Caches" -prune -o
)
local -a find_cmd=("command" "find" "$target")
if [[ "$target" == "$HOME" ]]; then
find_cmd+=("-maxdepth" "5")
fi
find_cmd+=("${exclude_paths[@]}" "-type" "f" "-name" ".DS_Store" "-print0")
while IFS= read -r -d '' ds_file; do
local size
size=$(get_file_size "$ds_file")
total_bytes=$((total_bytes + size))
((file_count++))
if [[ "$DRY_RUN" != "true" ]]; then
rm -f "$ds_file" 2> /dev/null || true
fi
if [[ $file_count -ge $MOLE_MAX_DS_STORE_FILES ]]; then
break
fi
done < <("${find_cmd[@]}" 2> /dev/null || true)
if [[ "$spinner_active" == "true" ]]; then
stop_section_spinner
fi
if [[ $file_count -gt 0 ]]; then
local size_human
size_human=$(bytes_to_human "$total_bytes")
if [[ "$DRY_RUN" == "true" ]]; then
echo -e " ${YELLOW}${ICON_DRY_RUN}${NC} $label ${YELLOW}($file_count files, $size_human dry)${NC}"
else
echo -e " ${GREEN}${ICON_SUCCESS}${NC} $label ${GREEN}($file_count files, $size_human)${NC}"
fi
local size_kb=$(((total_bytes + 1023) / 1024))
((files_cleaned += file_count))
((total_size_cleaned += size_kb))
((total_items++))
note_activity
fi
}
# Orphaned app data (60+ days inactive). Env: ORPHAN_AGE_THRESHOLD, DRY_RUN
# Usage: scan_installed_apps "output_file"
scan_installed_apps() {
local installed_bundles="$1"
# Cache installed app scan briefly to speed repeated runs.
local cache_file="$HOME/.cache/mole/installed_apps_cache"
local cache_age_seconds=300 # 5 minutes
if [[ -f "$cache_file" ]]; then
local cache_mtime=$(get_file_mtime "$cache_file")
local current_time
current_time=$(get_epoch_seconds)
local age=$((current_time - cache_mtime))
if [[ $age -lt $cache_age_seconds ]]; then
debug_log "Using cached app list (age: ${age}s)"
if [[ -r "$cache_file" ]] && [[ -s "$cache_file" ]]; then
if cat "$cache_file" > "$installed_bundles" 2> /dev/null; then
return 0
else
debug_log "Warning: Failed to read cache, rebuilding"
fi
else
debug_log "Warning: Cache file empty or unreadable, rebuilding"
fi
fi
fi
debug_log "Scanning installed applications (cache expired or missing)"
local -a app_dirs=(
"/Applications"
"/System/Applications"
"$HOME/Applications"
# Homebrew Cask locations
"/opt/homebrew/Caskroom"
"/usr/local/Caskroom"
# Setapp applications
"$HOME/Library/Application Support/Setapp/Applications"
)
# Temp dir avoids write contention across parallel scans.
local scan_tmp_dir=$(create_temp_dir)
local pids=()
local dir_idx=0
for app_dir in "${app_dirs[@]}"; do
[[ -d "$app_dir" ]] || continue
(
local -a app_paths=()
while IFS= read -r app_path; do
[[ -n "$app_path" ]] && app_paths+=("$app_path")
done < <(find "$app_dir" -name '*.app' -maxdepth 3 -type d 2> /dev/null)
local count=0
for app_path in "${app_paths[@]:-}"; do
local plist_path="$app_path/Contents/Info.plist"
[[ ! -f "$plist_path" ]] && continue
local bundle_id=$(/usr/libexec/PlistBuddy -c "Print :CFBundleIdentifier" "$plist_path" 2> /dev/null || echo "")
if [[ -n "$bundle_id" ]]; then
echo "$bundle_id"
((count++))
fi
done
) > "$scan_tmp_dir/apps_${dir_idx}.txt" &
pids+=($!)
((dir_idx++))
done
# Collect running apps and LaunchAgents to avoid false orphan cleanup.
(
local running_apps=$(run_with_timeout 5 osascript -e 'tell application "System Events" to get bundle identifier of every application process' 2> /dev/null || echo "")
echo "$running_apps" | tr ',' '\n' | sed -e 's/^ *//;s/ *$//' -e '/^$/d' > "$scan_tmp_dir/running.txt"
# Fallback: lsappinfo is more reliable than osascript
if command -v lsappinfo > /dev/null 2>&1; then
run_with_timeout 3 lsappinfo list 2> /dev/null | grep -o '"CFBundleIdentifier"="[^"]*"' | cut -d'"' -f4 >> "$scan_tmp_dir/running.txt" 2> /dev/null || true
fi
) &
pids+=($!)
(
run_with_timeout 5 find ~/Library/LaunchAgents /Library/LaunchAgents \
-name "*.plist" -type f 2> /dev/null |
xargs -I {} basename {} .plist > "$scan_tmp_dir/agents.txt" 2> /dev/null || true
) &
pids+=($!)
debug_log "Waiting for ${#pids[@]} background processes: ${pids[*]}"
for pid in "${pids[@]}"; do
wait "$pid" 2> /dev/null || true
done
debug_log "All background processes completed"
cat "$scan_tmp_dir"/*.txt >> "$installed_bundles" 2> /dev/null || true
safe_remove "$scan_tmp_dir" true
sort -u "$installed_bundles" -o "$installed_bundles"
ensure_user_dir "$(dirname "$cache_file")"
cp "$installed_bundles" "$cache_file" 2> /dev/null || true
local app_count=$(wc -l < "$installed_bundles" 2> /dev/null | tr -d ' ')
debug_log "Scanned $app_count unique applications"
}
# Sensitive data patterns that should never be treated as orphaned
# These patterns protect security-critical application data
readonly ORPHAN_NEVER_DELETE_PATTERNS=(
"*1password*" "*1Password*"
"*keychain*" "*Keychain*"
"*bitwarden*" "*Bitwarden*"
"*lastpass*" "*LastPass*"
"*keepass*" "*KeePass*"
"*dashlane*" "*Dashlane*"
"*enpass*" "*Enpass*"
"*ssh*" "*gpg*" "*gnupg*"
"com.apple.keychain*"
)
# Cache file for mdfind results (Bash 3.2 compatible, no associative arrays)
ORPHAN_MDFIND_CACHE_FILE=""
# Usage: is_bundle_orphaned "bundle_id" "directory_path" "installed_bundles_file"
is_bundle_orphaned() {
local bundle_id="$1"
local directory_path="$2"
local installed_bundles="$3"
# 1. Fast path: check protection list (in-memory, instant)
if should_protect_data "$bundle_id"; then
return 1
fi
# 2. Fast path: check sensitive data patterns (in-memory, instant)
local bundle_lower
bundle_lower=$(echo "$bundle_id" | LC_ALL=C tr '[:upper:]' '[:lower:]')
for pattern in "${ORPHAN_NEVER_DELETE_PATTERNS[@]}"; do
# shellcheck disable=SC2053
if [[ "$bundle_lower" == $pattern ]]; then
return 1
fi
done
# 3. Fast path: check installed bundles file (file read, fast)
if grep -Fxq "$bundle_id" "$installed_bundles" 2> /dev/null; then
return 1
fi
# 4. Fast path: hardcoded system components
case "$bundle_id" in
loginwindow | dock | systempreferences | systemsettings | settings | controlcenter | finder | safari)
return 1
;;
esac
# 5. Fast path: 60-day modification check (stat call, fast)
if [[ -e "$directory_path" ]]; then
local last_modified_epoch=$(get_file_mtime "$directory_path")
local current_epoch
current_epoch=$(get_epoch_seconds)
local days_since_modified=$(((current_epoch - last_modified_epoch) / 86400))
if [[ $days_since_modified -lt ${ORPHAN_AGE_THRESHOLD:-60} ]]; then
return 1
fi
fi
# 6. Slow path: mdfind fallback with file-based caching (Bash 3.2 compatible)
# This catches apps installed in non-standard locations
if [[ -n "$bundle_id" ]] && [[ "$bundle_id" =~ ^[a-zA-Z0-9._-]+$ ]] && [[ ${#bundle_id} -ge 5 ]]; then
# Initialize cache file if needed
if [[ -z "$ORPHAN_MDFIND_CACHE_FILE" ]]; then
ORPHAN_MDFIND_CACHE_FILE=$(mktemp "${TMPDIR:-/tmp}/mole_mdfind_cache.XXXXXX")
register_temp_file "$ORPHAN_MDFIND_CACHE_FILE"
fi
# Check cache first (grep is fast for small files)
if grep -Fxq "FOUND:$bundle_id" "$ORPHAN_MDFIND_CACHE_FILE" 2> /dev/null; then
return 1
fi
if grep -Fxq "NOTFOUND:$bundle_id" "$ORPHAN_MDFIND_CACHE_FILE" 2> /dev/null; then
# Already checked, not found - continue to return 0
:
else
# Query mdfind with strict timeout (2 seconds max)
local app_exists
app_exists=$(run_with_timeout 2 mdfind "kMDItemCFBundleIdentifier == '$bundle_id'" 2> /dev/null | head -1 || echo "")
if [[ -n "$app_exists" ]]; then
echo "FOUND:$bundle_id" >> "$ORPHAN_MDFIND_CACHE_FILE"
return 1
else
echo "NOTFOUND:$bundle_id" >> "$ORPHAN_MDFIND_CACHE_FILE"
fi
fi
fi
# All checks passed - this is an orphan
return 0
}
# Orphaned app data sweep.
clean_orphaned_app_data() {
if ! ls "$HOME/Library/Caches" > /dev/null 2>&1; then
stop_section_spinner
echo -e " ${YELLOW}${ICON_WARNING}${NC} Skipped: No permission to access Library folders"
return 0
fi
start_section_spinner "Scanning installed apps..."
local installed_bundles=$(create_temp_file)
scan_installed_apps "$installed_bundles"
stop_section_spinner
local app_count=$(wc -l < "$installed_bundles" 2> /dev/null | tr -d ' ')
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Found $app_count active/installed apps"
local orphaned_count=0
local total_orphaned_kb=0
start_section_spinner "Scanning orphaned app resources..."
# CRITICAL: NEVER add LaunchAgents or LaunchDaemons (breaks login items/startup apps).
local -a resource_types=(
"$HOME/Library/Caches|Caches|com.*:org.*:net.*:io.*"
"$HOME/Library/Logs|Logs|com.*:org.*:net.*:io.*"
"$HOME/Library/Saved Application State|States|*.savedState"
"$HOME/Library/WebKit|WebKit|com.*:org.*:net.*:io.*"
"$HOME/Library/HTTPStorages|HTTP|com.*:org.*:net.*:io.*"
"$HOME/Library/Cookies|Cookies|*.binarycookies"
)
orphaned_count=0
for resource_type in "${resource_types[@]}"; do
IFS='|' read -r base_path label patterns <<< "$resource_type"
if [[ ! -d "$base_path" ]]; then
continue
fi
if ! ls "$base_path" > /dev/null 2>&1; then
continue
fi
local -a file_patterns=()
IFS=':' read -ra pattern_arr <<< "$patterns"
for pat in "${pattern_arr[@]}"; do
file_patterns+=("$base_path/$pat")
done
for item_path in "${file_patterns[@]}"; do
local iteration_count=0
for match in $item_path; do
[[ -e "$match" ]] || continue
((iteration_count++))
if [[ $iteration_count -gt $MOLE_MAX_ORPHAN_ITERATIONS ]]; then
break
fi
local bundle_id=$(basename "$match")
bundle_id="${bundle_id%.savedState}"
bundle_id="${bundle_id%.binarycookies}"
if is_bundle_orphaned "$bundle_id" "$match" "$installed_bundles"; then
local size_kb
size_kb=$(get_path_size_kb "$match")
if [[ -z "$size_kb" || "$size_kb" == "0" ]]; then
continue
fi
safe_clean "$match" "Orphaned $label: $bundle_id"
((orphaned_count++))
((total_orphaned_kb += size_kb))
fi
done
done
done
stop_section_spinner
if [[ $orphaned_count -gt 0 ]]; then
local orphaned_mb=$(echo "$total_orphaned_kb" | awk '{printf "%.1f", $1/1024}')
echo " ${GREEN}${ICON_SUCCESS}${NC} Cleaned $orphaned_count items (~${orphaned_mb}MB)"
note_activity
fi
rm -f "$installed_bundles"
}

View File

@@ -1,117 +0,0 @@
#!/bin/bash
# Clean Homebrew caches and remove orphaned dependencies
# Env: DRY_RUN
# Skips if run within 7 days, runs cleanup/autoremove in parallel with 120s timeout
clean_homebrew() {
command -v brew > /dev/null 2>&1 || return 0
if [[ "${DRY_RUN:-false}" == "true" ]]; then
echo -e " ${YELLOW}${ICON_DRY_RUN}${NC} Homebrew · would cleanup and autoremove"
return 0
fi
# Skip if cleaned recently to avoid repeated heavy operations.
local brew_cache_file="${HOME}/.cache/mole/brew_last_cleanup"
local cache_valid_days=7
local should_skip=false
if [[ -f "$brew_cache_file" ]]; then
local last_cleanup
last_cleanup=$(cat "$brew_cache_file" 2> /dev/null || echo "0")
local current_time
current_time=$(get_epoch_seconds)
local time_diff=$((current_time - last_cleanup))
local days_diff=$((time_diff / 86400))
if [[ $days_diff -lt $cache_valid_days ]]; then
should_skip=true
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Homebrew · cleaned ${days_diff}d ago, skipped"
fi
fi
[[ "$should_skip" == "true" ]] && return 0
# Skip cleanup if cache is small; still run autoremove.
local skip_cleanup=false
local brew_cache_size=0
if [[ -d ~/Library/Caches/Homebrew ]]; then
brew_cache_size=$(run_with_timeout 3 du -sk ~/Library/Caches/Homebrew 2> /dev/null | awk '{print $1}')
local du_exit=$?
if [[ $du_exit -eq 0 && -n "$brew_cache_size" && "$brew_cache_size" -lt 51200 ]]; then
skip_cleanup=true
fi
fi
# Spinner reflects whether cleanup is skipped.
if [[ -t 1 ]]; then
if [[ "$skip_cleanup" == "true" ]]; then
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Homebrew autoremove (cleanup skipped)..."
else
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Homebrew cleanup and autoremove..."
fi
fi
# Run cleanup/autoremove in parallel with timeout guard per command.
local timeout_seconds=120
local brew_tmp_file autoremove_tmp_file
local brew_pid autoremove_pid
local brew_exit=0
local autoremove_exit=0
if [[ "$skip_cleanup" == "false" ]]; then
brew_tmp_file=$(create_temp_file)
run_with_timeout "$timeout_seconds" brew cleanup > "$brew_tmp_file" 2>&1 &
brew_pid=$!
fi
autoremove_tmp_file=$(create_temp_file)
run_with_timeout "$timeout_seconds" brew autoremove > "$autoremove_tmp_file" 2>&1 &
autoremove_pid=$!
if [[ -n "$brew_pid" ]]; then
wait "$brew_pid" 2> /dev/null || brew_exit=$?
fi
wait "$autoremove_pid" 2> /dev/null || autoremove_exit=$?
local brew_success=false
if [[ "$skip_cleanup" == "false" && $brew_exit -eq 0 ]]; then
brew_success=true
fi
local autoremove_success=false
if [[ $autoremove_exit -eq 0 ]]; then
autoremove_success=true
fi
if [[ -t 1 ]]; then stop_inline_spinner; fi
# Process cleanup output and extract metrics
# Summarize cleanup results.
if [[ "$skip_cleanup" == "true" ]]; then
# Cleanup was skipped due to small cache size
local size_mb=$((brew_cache_size / 1024))
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Homebrew cleanup · cache ${size_mb}MB, skipped"
elif [[ "$brew_success" == "true" && -f "$brew_tmp_file" ]]; then
local brew_output
brew_output=$(cat "$brew_tmp_file" 2> /dev/null || echo "")
local removed_count freed_space
removed_count=$(printf '%s\n' "$brew_output" | grep -c "Removing:" 2> /dev/null || true)
freed_space=$(printf '%s\n' "$brew_output" | grep -o "[0-9.]*[KMGT]B freed" 2> /dev/null | tail -1 || true)
if [[ $removed_count -gt 0 ]] || [[ -n "$freed_space" ]]; then
if [[ -n "$freed_space" ]]; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Homebrew cleanup ${GREEN}($freed_space)${NC}"
else
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Homebrew cleanup (${removed_count} items)"
fi
fi
elif [[ $brew_exit -eq 124 ]]; then
echo -e " ${YELLOW}${ICON_WARNING}${NC} Homebrew cleanup timed out · run ${GRAY}brew cleanup${NC} manually"
fi
# Process autoremove output - only show if packages were removed
# Only surface autoremove output when packages were removed.
if [[ "$autoremove_success" == "true" && -f "$autoremove_tmp_file" ]]; then
local autoremove_output
autoremove_output=$(cat "$autoremove_tmp_file" 2> /dev/null || echo "")
local removed_packages
removed_packages=$(printf '%s\n' "$autoremove_output" | grep -c "^Uninstalling" 2> /dev/null || true)
if [[ $removed_packages -gt 0 ]]; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Removed orphaned dependencies (${removed_packages} packages)"
fi
elif [[ $autoremove_exit -eq 124 ]]; then
echo -e " ${YELLOW}${ICON_WARNING}${NC} Autoremove timed out · run ${GRAY}brew autoremove${NC} manually"
fi
# Update cache timestamp on successful completion or when cleanup was intelligently skipped
# This prevents repeated cache size checks within the 7-day window
# Update cache timestamp when any work succeeded or was intentionally skipped.
if [[ "$skip_cleanup" == "true" ]] || [[ "$brew_success" == "true" ]] || [[ "$autoremove_success" == "true" ]]; then
ensure_user_file "$brew_cache_file"
get_epoch_seconds > "$brew_cache_file"
fi
}

385
lib/clean/caches.ps1 Normal file
View File

@@ -0,0 +1,385 @@
# Mole - Cache Cleanup Module
# Cleans Windows and application caches
#Requires -Version 5.1
Set-StrictMode -Version Latest
# Prevent multiple sourcing
if ((Get-Variable -Name 'MOLE_CLEAN_CACHES_LOADED' -Scope Script -ErrorAction SilentlyContinue) -and $script:MOLE_CLEAN_CACHES_LOADED) { return }
$script:MOLE_CLEAN_CACHES_LOADED = $true
# Import dependencies
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$scriptDir\..\core\base.ps1"
. "$scriptDir\..\core\log.ps1"
. "$scriptDir\..\core\file_ops.ps1"
# ============================================================================
# Windows System Caches
# ============================================================================
function Clear-WindowsUpdateCache {
<#
.SYNOPSIS
Clean Windows Update cache (requires admin)
#>
if (-not (Test-IsAdmin)) {
Write-Debug "Skipping Windows Update cache - requires admin"
return
}
$wuPath = "$env:WINDIR\SoftwareDistribution\Download"
if (Test-Path $wuPath) {
# Stop Windows Update service first
if (Test-DryRunMode) {
Write-DryRun "Windows Update cache"
Set-SectionActivity
return
}
try {
Stop-Service -Name wuauserv -Force -ErrorAction SilentlyContinue
Clear-DirectoryContents -Path $wuPath -Description "Windows Update cache"
Start-Service -Name wuauserv -ErrorAction SilentlyContinue
}
catch {
Write-Debug "Could not clear Windows Update cache: $_"
Start-Service -Name wuauserv -ErrorAction SilentlyContinue
}
}
}
function Clear-DeliveryOptimizationCache {
<#
.SYNOPSIS
Clean Windows Delivery Optimization cache (requires admin)
#>
if (-not (Test-IsAdmin)) {
Write-Debug "Skipping Delivery Optimization cache - requires admin"
return
}
$doPath = "$env:WINDIR\ServiceProfiles\NetworkService\AppData\Local\Microsoft\Windows\DeliveryOptimization"
if (Test-Path $doPath) {
if (Test-DryRunMode) {
Write-DryRun "Delivery Optimization cache"
Set-SectionActivity
return
}
try {
Stop-Service -Name DoSvc -Force -ErrorAction SilentlyContinue
Clear-DirectoryContents -Path "$doPath\Cache" -Description "Delivery Optimization cache"
Start-Service -Name DoSvc -ErrorAction SilentlyContinue
}
catch {
Write-Debug "Could not clear Delivery Optimization cache: $_"
Start-Service -Name DoSvc -ErrorAction SilentlyContinue
}
}
}
function Clear-FontCache {
<#
.SYNOPSIS
Clean Windows font cache (requires admin)
#>
if (-not (Test-IsAdmin)) {
return
}
$fontCachePath = "$env:LOCALAPPDATA\Microsoft\Windows\Fonts\FontCache"
if (Test-Path $fontCachePath) {
Remove-SafeItem -Path $fontCachePath -Description "Font cache"
}
}
# ============================================================================
# Browser Caches
# ============================================================================
function Clear-BrowserCaches {
<#
.SYNOPSIS
Clean browser cache directories
#>
Start-Section "Browser caches"
# Chrome
$chromeCachePaths = @(
"$env:LOCALAPPDATA\Google\Chrome\User Data\Default\Cache"
"$env:LOCALAPPDATA\Google\Chrome\User Data\Default\Code Cache"
"$env:LOCALAPPDATA\Google\Chrome\User Data\Default\GPUCache"
"$env:LOCALAPPDATA\Google\Chrome\User Data\Default\Service Worker\CacheStorage"
"$env:LOCALAPPDATA\Google\Chrome\User Data\ShaderCache"
"$env:LOCALAPPDATA\Google\Chrome\User Data\GrShaderCache"
)
foreach ($path in $chromeCachePaths) {
if (Test-Path $path) {
Clear-DirectoryContents -Path $path -Description "Chrome $(Split-Path -Leaf $path)"
}
}
# Edge
$edgeCachePaths = @(
"$env:LOCALAPPDATA\Microsoft\Edge\User Data\Default\Cache"
"$env:LOCALAPPDATA\Microsoft\Edge\User Data\Default\Code Cache"
"$env:LOCALAPPDATA\Microsoft\Edge\User Data\Default\GPUCache"
"$env:LOCALAPPDATA\Microsoft\Edge\User Data\Default\Service Worker\CacheStorage"
"$env:LOCALAPPDATA\Microsoft\Edge\User Data\ShaderCache"
)
foreach ($path in $edgeCachePaths) {
if (Test-Path $path) {
Clear-DirectoryContents -Path $path -Description "Edge $(Split-Path -Leaf $path)"
}
}
# Firefox
$firefoxProfiles = "$env:APPDATA\Mozilla\Firefox\Profiles"
if (Test-Path $firefoxProfiles) {
$profiles = Get-ChildItem -Path $firefoxProfiles -Directory -ErrorAction SilentlyContinue
foreach ($profile in $profiles) {
$firefoxCachePaths = @(
"$($profile.FullName)\cache2"
"$($profile.FullName)\startupCache"
"$($profile.FullName)\shader-cache"
)
foreach ($path in $firefoxCachePaths) {
if (Test-Path $path) {
Clear-DirectoryContents -Path $path -Description "Firefox cache"
}
}
}
}
# Brave
$braveCachePath = "$env:LOCALAPPDATA\BraveSoftware\Brave-Browser\User Data\Default\Cache"
if (Test-Path $braveCachePath) {
Clear-DirectoryContents -Path $braveCachePath -Description "Brave cache"
}
# Opera
$operaCachePath = "$env:APPDATA\Opera Software\Opera Stable\Cache"
if (Test-Path $operaCachePath) {
Clear-DirectoryContents -Path $operaCachePath -Description "Opera cache"
}
Stop-Section
}
# ============================================================================
# Application Caches
# ============================================================================
function Clear-AppCaches {
<#
.SYNOPSIS
Clean common application caches
#>
Start-Section "Application caches"
# Spotify
$spotifyCachePaths = @(
"$env:LOCALAPPDATA\Spotify\Data"
"$env:LOCALAPPDATA\Spotify\Storage"
)
foreach ($path in $spotifyCachePaths) {
if (Test-Path $path) {
Clear-DirectoryContents -Path $path -Description "Spotify cache"
}
}
# Discord
$discordCachePaths = @(
"$env:APPDATA\discord\Cache"
"$env:APPDATA\discord\Code Cache"
"$env:APPDATA\discord\GPUCache"
)
foreach ($path in $discordCachePaths) {
if (Test-Path $path) {
Clear-DirectoryContents -Path $path -Description "Discord cache"
}
}
# Slack
$slackCachePaths = @(
"$env:APPDATA\Slack\Cache"
"$env:APPDATA\Slack\Code Cache"
"$env:APPDATA\Slack\GPUCache"
"$env:APPDATA\Slack\Service Worker\CacheStorage"
)
foreach ($path in $slackCachePaths) {
if (Test-Path $path) {
Clear-DirectoryContents -Path $path -Description "Slack cache"
}
}
# Teams
$teamsCachePaths = @(
"$env:APPDATA\Microsoft\Teams\Cache"
"$env:APPDATA\Microsoft\Teams\blob_storage"
"$env:APPDATA\Microsoft\Teams\databases"
"$env:APPDATA\Microsoft\Teams\GPUCache"
"$env:APPDATA\Microsoft\Teams\IndexedDB"
"$env:APPDATA\Microsoft\Teams\Local Storage"
"$env:APPDATA\Microsoft\Teams\tmp"
)
foreach ($path in $teamsCachePaths) {
if (Test-Path $path) {
Clear-DirectoryContents -Path $path -Description "Teams cache"
}
}
# VS Code
$vscodeCachePaths = @(
"$env:APPDATA\Code\Cache"
"$env:APPDATA\Code\CachedData"
"$env:APPDATA\Code\CachedExtensions"
"$env:APPDATA\Code\CachedExtensionVSIXs"
"$env:APPDATA\Code\Code Cache"
"$env:APPDATA\Code\GPUCache"
)
foreach ($path in $vscodeCachePaths) {
if (Test-Path $path) {
Clear-DirectoryContents -Path $path -Description "VS Code cache"
}
}
# Zoom
$zoomCachePath = "$env:APPDATA\Zoom\data"
if (Test-Path $zoomCachePath) {
Clear-DirectoryContents -Path $zoomCachePath -Description "Zoom cache"
}
# Adobe Creative Cloud
$adobeCachePaths = @(
"$env:LOCALAPPDATA\Adobe\*\Cache"
"$env:APPDATA\Adobe\Common\Media Cache Files"
"$env:APPDATA\Adobe\Common\Peak Files"
)
foreach ($pattern in $adobeCachePaths) {
$paths = Resolve-Path $pattern -ErrorAction SilentlyContinue
foreach ($path in $paths) {
if (Test-Path $path) {
Clear-DirectoryContents -Path $path.Path -Description "Adobe cache"
}
}
}
# Steam (download cache, not games)
$steamCachePath = "${env:ProgramFiles(x86)}\Steam\appcache"
if (Test-Path $steamCachePath) {
Clear-DirectoryContents -Path $steamCachePath -Description "Steam app cache"
}
# Epic Games Launcher
$epicCachePath = "$env:LOCALAPPDATA\EpicGamesLauncher\Saved\webcache"
if (Test-Path $epicCachePath) {
Clear-DirectoryContents -Path $epicCachePath -Description "Epic Games cache"
}
Stop-Section
}
# ============================================================================
# Windows Store / UWP App Caches
# ============================================================================
function Clear-StoreAppCaches {
<#
.SYNOPSIS
Clean Windows Store and UWP app caches
#>
# Microsoft Store cache
$storeCache = "$env:LOCALAPPDATA\Microsoft\Windows\WCN"
if (Test-Path $storeCache) {
Clear-DirectoryContents -Path $storeCache -Description "Windows Store cache"
}
# Store app temp files
$storeTemp = "$env:LOCALAPPDATA\Packages\Microsoft.WindowsStore_*\LocalCache"
$storePaths = Resolve-Path $storeTemp -ErrorAction SilentlyContinue
foreach ($path in $storePaths) {
if (Test-Path $path.Path) {
Clear-DirectoryContents -Path $path.Path -Description "Store LocalCache"
}
}
}
# ============================================================================
# .NET / Runtime Caches
# ============================================================================
function Clear-DotNetCaches {
<#
.SYNOPSIS
Clean .NET runtime caches
#>
# .NET temp files
$dotnetTemp = "$env:LOCALAPPDATA\Temp\Microsoft.NET"
if (Test-Path $dotnetTemp) {
Clear-DirectoryContents -Path $dotnetTemp -Description ".NET temp files"
}
# NGen cache (don't touch - managed by Windows)
# Assembly cache (don't touch - managed by CLR)
}
# ============================================================================
# Main Cache Cleanup Function
# ============================================================================
function Invoke-CacheCleanup {
<#
.SYNOPSIS
Run all cache cleanup tasks
#>
param(
[switch]$IncludeWindowsUpdate,
[switch]$IncludeBrowsers,
[switch]$IncludeApps
)
Start-Section "System caches"
# Windows system caches (if admin)
if (Test-IsAdmin) {
if ($IncludeWindowsUpdate) {
Clear-WindowsUpdateCache
Clear-DeliveryOptimizationCache
}
Clear-FontCache
}
Clear-StoreAppCaches
Clear-DotNetCaches
Stop-Section
# Browser caches
if ($IncludeBrowsers) {
Clear-BrowserCaches
}
# Application caches
if ($IncludeApps) {
Clear-AppCaches
}
}
# ============================================================================
# Exports
# ============================================================================
# Functions: Clear-WindowsUpdateCache, Clear-BrowserCaches, Clear-AppCaches, etc.

View File

@@ -1,217 +0,0 @@
#!/bin/bash
# Cache Cleanup Module
set -euo pipefail
# Preflight TCC prompts once to avoid mid-run interruptions.
check_tcc_permissions() {
[[ -t 1 ]] || return 0
local permission_flag="$HOME/.cache/mole/permissions_granted"
[[ -f "$permission_flag" ]] && return 0
local -a tcc_dirs=(
"$HOME/Library/Caches"
"$HOME/Library/Logs"
"$HOME/Library/Application Support"
"$HOME/Library/Containers"
"$HOME/.cache"
)
# Quick permission probe (avoid deep scans).
local needs_permission_check=false
if ! ls "$HOME/Library/Caches" > /dev/null 2>&1; then
needs_permission_check=true
fi
if [[ "$needs_permission_check" == "true" ]]; then
echo ""
echo -e "${BLUE}First-time setup${NC}"
echo -e "${GRAY}macOS will request permissions to access Library folders.${NC}"
echo -e "${GRAY}You may see ${GREEN}${#tcc_dirs[@]} permission dialogs${NC}${GRAY} - please approve them all.${NC}"
echo ""
echo -ne "${PURPLE}${ICON_ARROW}${NC} Press ${GREEN}Enter${NC} to continue: "
read -r
MOLE_SPINNER_PREFIX="" start_inline_spinner "Requesting permissions..."
# Touch each directory to trigger prompts without deep scanning.
for dir in "${tcc_dirs[@]}"; do
[[ -d "$dir" ]] && command find "$dir" -maxdepth 1 -type d > /dev/null 2>&1
done
stop_inline_spinner
echo ""
fi
# Mark as granted to avoid repeat prompts.
ensure_user_file "$permission_flag"
return 0
}
# Args: $1=browser_name, $2=cache_path
# Clean Service Worker cache while protecting critical web editors.
clean_service_worker_cache() {
local browser_name="$1"
local cache_path="$2"
[[ ! -d "$cache_path" ]] && return 0
local cleaned_size=0
local protected_count=0
while IFS= read -r cache_dir; do
[[ ! -d "$cache_dir" ]] && continue
# Extract a best-effort domain name from cache folder.
local domain=$(basename "$cache_dir" | grep -oE '[a-zA-Z0-9][-a-zA-Z0-9]*\.[a-zA-Z]{2,}' | head -1 || echo "")
local size=$(run_with_timeout 5 get_path_size_kb "$cache_dir")
local is_protected=false
for protected_domain in "${PROTECTED_SW_DOMAINS[@]}"; do
if [[ "$domain" == *"$protected_domain"* ]]; then
is_protected=true
protected_count=$((protected_count + 1))
break
fi
done
if [[ "$is_protected" == "false" ]]; then
if [[ "$DRY_RUN" != "true" ]]; then
safe_remove "$cache_dir" true || true
fi
cleaned_size=$((cleaned_size + size))
fi
done < <(run_with_timeout 10 sh -c "find '$cache_path' -type d -depth 2 2> /dev/null || true")
if [[ $cleaned_size -gt 0 ]]; then
local spinner_was_running=false
if [[ -t 1 && -n "${INLINE_SPINNER_PID:-}" ]]; then
stop_inline_spinner
spinner_was_running=true
fi
local cleaned_mb=$((cleaned_size / 1024))
if [[ "$DRY_RUN" != "true" ]]; then
if [[ $protected_count -gt 0 ]]; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} $browser_name Service Worker (${cleaned_mb}MB, ${protected_count} protected)"
else
echo -e " ${GREEN}${ICON_SUCCESS}${NC} $browser_name Service Worker (${cleaned_mb}MB)"
fi
else
echo -e " ${YELLOW}${ICON_DRY_RUN}${NC} $browser_name Service Worker (would clean ${cleaned_mb}MB, ${protected_count} protected)"
fi
note_activity
if [[ "$spinner_was_running" == "true" ]]; then
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning browser Service Worker caches..."
fi
fi
}
# Next.js/Python project caches with tight scan bounds and timeouts.
clean_project_caches() {
stop_inline_spinner 2> /dev/null || true
# Fast pre-check before scanning the whole home dir.
local has_dev_projects=false
local -a common_dev_dirs=(
"$HOME/Code"
"$HOME/Projects"
"$HOME/workspace"
"$HOME/github"
"$HOME/dev"
"$HOME/work"
"$HOME/src"
"$HOME/repos"
"$HOME/Development"
"$HOME/www"
"$HOME/golang"
"$HOME/go"
"$HOME/rust"
"$HOME/python"
"$HOME/ruby"
"$HOME/java"
"$HOME/dotnet"
"$HOME/node"
)
for dir in "${common_dev_dirs[@]}"; do
if [[ -d "$dir" ]]; then
has_dev_projects=true
break
fi
done
# Fallback: look for project markers near $HOME.
if [[ "$has_dev_projects" == "false" ]]; then
local -a project_markers=(
"node_modules"
".git"
"target"
"go.mod"
"Cargo.toml"
"package.json"
"pom.xml"
"build.gradle"
)
local spinner_active=false
if [[ -t 1 ]]; then
MOLE_SPINNER_PREFIX=" "
start_inline_spinner "Detecting dev projects..."
spinner_active=true
fi
for marker in "${project_markers[@]}"; do
if run_with_timeout 3 sh -c "find '$HOME' -maxdepth 2 -name '$marker' -not -path '*/Library/*' -not -path '*/.Trash/*' 2>/dev/null | head -1" | grep -q .; then
has_dev_projects=true
break
fi
done
if [[ "$spinner_active" == "true" ]]; then
stop_inline_spinner 2> /dev/null || true
fi
[[ "$has_dev_projects" == "false" ]] && return 0
fi
if [[ -t 1 ]]; then
MOLE_SPINNER_PREFIX=" "
start_inline_spinner "Searching project caches..."
fi
local nextjs_tmp_file
nextjs_tmp_file=$(create_temp_file)
local pycache_tmp_file
pycache_tmp_file=$(create_temp_file)
local find_timeout=10
# Parallel scans (Next.js and __pycache__).
(
command find "$HOME" -P -mount -type d -name ".next" -maxdepth 3 \
-not -path "*/Library/*" \
-not -path "*/.Trash/*" \
-not -path "*/node_modules/*" \
-not -path "*/.*" \
2> /dev/null || true
) > "$nextjs_tmp_file" 2>&1 &
local next_pid=$!
(
command find "$HOME" -P -mount -type d -name "__pycache__" -maxdepth 3 \
-not -path "*/Library/*" \
-not -path "*/.Trash/*" \
-not -path "*/node_modules/*" \
-not -path "*/.*" \
2> /dev/null || true
) > "$pycache_tmp_file" 2>&1 &
local py_pid=$!
local elapsed=0
local check_interval=0.2 # Check every 200ms instead of 1s for smoother experience
while [[ $(echo "$elapsed < $find_timeout" | awk '{print ($1 < $2)}') -eq 1 ]]; do
if ! kill -0 $next_pid 2> /dev/null && ! kill -0 $py_pid 2> /dev/null; then
break
fi
sleep $check_interval
elapsed=$(echo "$elapsed + $check_interval" | awk '{print $1 + $2}')
done
# Kill stuck scans after timeout.
for pid in $next_pid $py_pid; do
if kill -0 "$pid" 2> /dev/null; then
kill -TERM "$pid" 2> /dev/null || true
local grace_period=0
while [[ $grace_period -lt 20 ]]; do
if ! kill -0 "$pid" 2> /dev/null; then
break
fi
sleep 0.1
((grace_period++))
done
if kill -0 "$pid" 2> /dev/null; then
kill -KILL "$pid" 2> /dev/null || true
fi
wait "$pid" 2> /dev/null || true
else
wait "$pid" 2> /dev/null || true
fi
done
if [[ -t 1 ]]; then
stop_inline_spinner
fi
while IFS= read -r next_dir; do
[[ -d "$next_dir/cache" ]] && safe_clean "$next_dir/cache"/* "Next.js build cache" || true
done < "$nextjs_tmp_file"
while IFS= read -r pycache; do
[[ -d "$pycache" ]] && safe_clean "$pycache"/* "Python bytecode cache" || true
done < "$pycache_tmp_file"
}

537
lib/clean/dev.ps1 Normal file
View File

@@ -0,0 +1,537 @@
# Mole - Developer Tools Cleanup Module
# Cleans development tool caches and build artifacts
#Requires -Version 5.1
Set-StrictMode -Version Latest
# Prevent multiple sourcing
if ((Get-Variable -Name 'MOLE_CLEAN_DEV_LOADED' -Scope Script -ErrorAction SilentlyContinue) -and $script:MOLE_CLEAN_DEV_LOADED) { return }
$script:MOLE_CLEAN_DEV_LOADED = $true
# Import dependencies
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$scriptDir\..\core\base.ps1"
. "$scriptDir\..\core\log.ps1"
. "$scriptDir\..\core\file_ops.ps1"
# ============================================================================
# Node.js / JavaScript Ecosystem
# ============================================================================
function Clear-NpmCache {
<#
.SYNOPSIS
Clean npm, pnpm, yarn, and bun caches
#>
# npm cache
if (Get-Command npm -ErrorAction SilentlyContinue) {
if (Test-DryRunMode) {
Write-DryRun "npm cache"
}
else {
try {
$null = npm cache clean --force 2>&1
Write-Success "npm cache"
Set-SectionActivity
}
catch {
Write-Debug "npm cache clean failed: $_"
}
}
}
# npm cache directory (fallback)
$npmCachePath = "$env:APPDATA\npm-cache"
if (Test-Path $npmCachePath) {
Clear-DirectoryContents -Path $npmCachePath -Description "npm cache directory"
}
# pnpm store
$pnpmStorePath = "$env:LOCALAPPDATA\pnpm\store"
if (Test-Path $pnpmStorePath) {
if (Get-Command pnpm -ErrorAction SilentlyContinue) {
if (Test-DryRunMode) {
Write-DryRun "pnpm store"
}
else {
try {
$null = pnpm store prune 2>&1
Write-Success "pnpm store pruned"
Set-SectionActivity
}
catch {
Write-Debug "pnpm store prune failed: $_"
}
}
}
}
# Yarn cache
$yarnCachePaths = @(
"$env:LOCALAPPDATA\Yarn\Cache"
"$env:USERPROFILE\.yarn\cache"
)
foreach ($path in $yarnCachePaths) {
if (Test-Path $path) {
Clear-DirectoryContents -Path $path -Description "Yarn cache"
}
}
# Bun cache
$bunCachePath = "$env:USERPROFILE\.bun\install\cache"
if (Test-Path $bunCachePath) {
Clear-DirectoryContents -Path $bunCachePath -Description "Bun cache"
}
}
function Clear-NodeBuildCaches {
<#
.SYNOPSIS
Clean Node.js build-related caches
#>
# node-gyp
$nodeGypPath = "$env:LOCALAPPDATA\node-gyp\Cache"
if (Test-Path $nodeGypPath) {
Clear-DirectoryContents -Path $nodeGypPath -Description "node-gyp cache"
}
# Electron cache
$electronCachePath = "$env:LOCALAPPDATA\electron\Cache"
if (Test-Path $electronCachePath) {
Clear-DirectoryContents -Path $electronCachePath -Description "Electron cache"
}
# TypeScript cache
$tsCachePath = "$env:LOCALAPPDATA\TypeScript"
if (Test-Path $tsCachePath) {
Clear-DirectoryContents -Path $tsCachePath -Description "TypeScript cache"
}
}
# ============================================================================
# Python Ecosystem
# ============================================================================
function Clear-PythonCaches {
<#
.SYNOPSIS
Clean Python and pip caches
#>
# pip cache
if (Get-Command pip -ErrorAction SilentlyContinue) {
if (Test-DryRunMode) {
Write-DryRun "pip cache"
}
else {
try {
$null = pip cache purge 2>&1
Write-Success "pip cache"
Set-SectionActivity
}
catch {
Write-Debug "pip cache purge failed: $_"
}
}
}
# pip cache directory
$pipCachePath = "$env:LOCALAPPDATA\pip\Cache"
if (Test-Path $pipCachePath) {
Clear-DirectoryContents -Path $pipCachePath -Description "pip cache directory"
}
# Python bytecode caches (__pycache__)
# Note: These are typically in project directories, cleaned by purge command
# pyenv cache
$pyenvCachePath = "$env:USERPROFILE\.pyenv\cache"
if (Test-Path $pyenvCachePath) {
Clear-DirectoryContents -Path $pyenvCachePath -Description "pyenv cache"
}
# Poetry cache
$poetryCachePath = "$env:LOCALAPPDATA\pypoetry\Cache"
if (Test-Path $poetryCachePath) {
Clear-DirectoryContents -Path $poetryCachePath -Description "Poetry cache"
}
# conda packages
$condaCachePaths = @(
"$env:USERPROFILE\.conda\pkgs"
"$env:USERPROFILE\anaconda3\pkgs"
"$env:USERPROFILE\miniconda3\pkgs"
)
foreach ($path in $condaCachePaths) {
if (Test-Path $path) {
# Only clean index and temp files, not actual packages
$tempFiles = Get-ChildItem -Path $path -Filter "*.tmp" -ErrorAction SilentlyContinue
if ($tempFiles) {
$paths = $tempFiles | ForEach-Object { $_.FullName }
Remove-SafeItems -Paths $paths -Description "Conda temp files"
}
}
}
# Jupyter runtime
$jupyterRuntimePath = "$env:APPDATA\jupyter\runtime"
if (Test-Path $jupyterRuntimePath) {
Clear-DirectoryContents -Path $jupyterRuntimePath -Description "Jupyter runtime"
}
# pytest cache
$pytestCachePath = "$env:USERPROFILE\.pytest_cache"
if (Test-Path $pytestCachePath) {
Remove-SafeItem -Path $pytestCachePath -Description "pytest cache" -Recurse
}
}
# ============================================================================
# .NET / C# Ecosystem
# ============================================================================
function Clear-DotNetDevCaches {
<#
.SYNOPSIS
Clean .NET development caches
#>
# NuGet cache
$nugetCachePath = "$env:USERPROFILE\.nuget\packages"
# Don't clean packages by default - they're needed for builds
# Only clean http-cache and temp
$nugetHttpCache = "$env:LOCALAPPDATA\NuGet\v3-cache"
if (Test-Path $nugetHttpCache) {
Clear-DirectoryContents -Path $nugetHttpCache -Description "NuGet HTTP cache"
}
$nugetTempPath = "$env:LOCALAPPDATA\NuGet\plugins-cache"
if (Test-Path $nugetTempPath) {
Clear-DirectoryContents -Path $nugetTempPath -Description "NuGet plugins cache"
}
# MSBuild temp files
$msbuildTemp = "$env:LOCALAPPDATA\Microsoft\MSBuild"
if (Test-Path $msbuildTemp) {
$tempDirs = Get-ChildItem -Path $msbuildTemp -Directory -Filter "*temp*" -ErrorAction SilentlyContinue
foreach ($dir in $tempDirs) {
Clear-DirectoryContents -Path $dir.FullName -Description "MSBuild temp"
}
}
}
# ============================================================================
# Go Ecosystem
# ============================================================================
function Clear-GoCaches {
<#
.SYNOPSIS
Clean Go build and module caches
#>
if (Get-Command go -ErrorAction SilentlyContinue) {
if (Test-DryRunMode) {
Write-DryRun "Go cache"
}
else {
try {
$null = go clean -cache 2>&1
Write-Success "Go build cache"
Set-SectionActivity
}
catch {
Write-Debug "go clean -cache failed: $_"
}
}
}
# Go module cache
$goModCachePath = "$env:GOPATH\pkg\mod\cache"
if (-not $env:GOPATH) {
$goModCachePath = "$env:USERPROFILE\go\pkg\mod\cache"
}
if (Test-Path $goModCachePath) {
Clear-DirectoryContents -Path $goModCachePath -Description "Go module cache"
}
}
# ============================================================================
# Rust Ecosystem
# ============================================================================
function Clear-RustCaches {
<#
.SYNOPSIS
Clean Rust/Cargo caches
#>
# Cargo registry cache
$cargoRegistryCache = "$env:USERPROFILE\.cargo\registry\cache"
if (Test-Path $cargoRegistryCache) {
Clear-DirectoryContents -Path $cargoRegistryCache -Description "Cargo registry cache"
}
# Cargo git cache
$cargoGitCache = "$env:USERPROFILE\.cargo\git\checkouts"
if (Test-Path $cargoGitCache) {
Clear-DirectoryContents -Path $cargoGitCache -Description "Cargo git cache"
}
# Rustup downloads
$rustupDownloads = "$env:USERPROFILE\.rustup\downloads"
if (Test-Path $rustupDownloads) {
Clear-DirectoryContents -Path $rustupDownloads -Description "Rustup downloads"
}
}
# ============================================================================
# Java / JVM Ecosystem
# ============================================================================
function Clear-JvmCaches {
<#
.SYNOPSIS
Clean JVM ecosystem caches (Gradle, Maven, etc.)
#>
# Gradle caches
$gradleCachePaths = @(
"$env:USERPROFILE\.gradle\caches"
"$env:USERPROFILE\.gradle\daemon"
"$env:USERPROFILE\.gradle\wrapper\dists"
)
foreach ($path in $gradleCachePaths) {
if (Test-Path $path) {
# Only clean temp and old daemon logs
$tempFiles = Get-ChildItem -Path $path -Recurse -Filter "*.lock" -ErrorAction SilentlyContinue
if ($tempFiles) {
$paths = $tempFiles | ForEach-Object { $_.FullName }
Remove-SafeItems -Paths $paths -Description "Gradle lock files"
}
}
}
# Maven repository (only clean temp files)
$mavenRepoPath = "$env:USERPROFILE\.m2\repository"
if (Test-Path $mavenRepoPath) {
$tempFiles = Get-ChildItem -Path $mavenRepoPath -Recurse -Filter "*.lastUpdated" -ErrorAction SilentlyContinue
if ($tempFiles) {
$paths = $tempFiles | ForEach-Object { $_.FullName }
Remove-SafeItems -Paths $paths -Description "Maven update markers"
}
}
}
# ============================================================================
# Docker / Containers
# ============================================================================
function Clear-DockerCaches {
<#
.SYNOPSIS
Clean Docker build caches and unused data
#>
if (-not (Get-Command docker -ErrorAction SilentlyContinue)) {
return
}
# Check if Docker daemon is running
$dockerRunning = $false
try {
$null = docker info 2>&1
$dockerRunning = $true
}
catch {
Write-Debug "Docker daemon not running"
}
if ($dockerRunning) {
if (Test-DryRunMode) {
Write-DryRun "Docker build cache"
}
else {
try {
$null = docker builder prune -af 2>&1
Write-Success "Docker build cache"
Set-SectionActivity
}
catch {
Write-Debug "docker builder prune failed: $_"
}
}
}
# Docker Desktop cache (Windows)
$dockerDesktopCache = "$env:LOCALAPPDATA\Docker\wsl\data"
# Note: Don't clean this - it's the WSL2 virtual disk
}
# ============================================================================
# Cloud CLI Tools
# ============================================================================
function Clear-CloudCliCaches {
<#
.SYNOPSIS
Clean cloud CLI tool caches (AWS, Azure, GCP)
#>
# AWS CLI cache
$awsCachePath = "$env:USERPROFILE\.aws\cli\cache"
if (Test-Path $awsCachePath) {
Clear-DirectoryContents -Path $awsCachePath -Description "AWS CLI cache"
}
# Azure CLI logs
$azureLogsPath = "$env:USERPROFILE\.azure\logs"
if (Test-Path $azureLogsPath) {
Clear-DirectoryContents -Path $azureLogsPath -Description "Azure CLI logs"
}
# Google Cloud logs
$gcloudLogsPath = "$env:APPDATA\gcloud\logs"
if (Test-Path $gcloudLogsPath) {
Clear-DirectoryContents -Path $gcloudLogsPath -Description "gcloud logs"
}
# Kubernetes cache
$kubeCachePath = "$env:USERPROFILE\.kube\cache"
if (Test-Path $kubeCachePath) {
Clear-DirectoryContents -Path $kubeCachePath -Description "Kubernetes cache"
}
# Terraform plugin cache
$terraformCachePath = "$env:APPDATA\terraform.d\plugin-cache"
if (Test-Path $terraformCachePath) {
Clear-DirectoryContents -Path $terraformCachePath -Description "Terraform plugin cache"
}
}
# ============================================================================
# IDE Caches
# ============================================================================
function Clear-IdeCaches {
<#
.SYNOPSIS
Clean IDE caches (VS, VSCode, JetBrains, etc.)
#>
# Visual Studio cache
$vsCachePaths = @(
"$env:LOCALAPPDATA\Microsoft\VisualStudio\*\ComponentModelCache"
"$env:LOCALAPPDATA\Microsoft\VisualStudio\*\ImageCache"
)
foreach ($pattern in $vsCachePaths) {
$paths = Resolve-Path $pattern -ErrorAction SilentlyContinue
foreach ($path in $paths) {
if (Test-Path $path.Path) {
Clear-DirectoryContents -Path $path.Path -Description "Visual Studio cache"
}
}
}
# JetBrains IDEs caches
$jetbrainsBasePaths = @(
"$env:LOCALAPPDATA\JetBrains"
"$env:APPDATA\JetBrains"
)
foreach ($basePath in $jetbrainsBasePaths) {
if (Test-Path $basePath) {
$ideFolders = Get-ChildItem -Path $basePath -Directory -ErrorAction SilentlyContinue
foreach ($ideFolder in $ideFolders) {
$cacheFolders = @("caches", "index", "tmp")
foreach ($cacheFolder in $cacheFolders) {
$cachePath = Join-Path $ideFolder.FullName $cacheFolder
if (Test-Path $cachePath) {
Clear-DirectoryContents -Path $cachePath -Description "$($ideFolder.Name) $cacheFolder"
}
}
}
}
}
}
# ============================================================================
# Git Caches
# ============================================================================
function Clear-GitCaches {
<#
.SYNOPSIS
Clean Git temporary files and lock files
#>
# Git config locks (stale)
$gitConfigLock = "$env:USERPROFILE\.gitconfig.lock"
if (Test-Path $gitConfigLock) {
Remove-SafeItem -Path $gitConfigLock -Description "Git config lock"
}
# GitHub CLI cache
$ghCachePath = "$env:APPDATA\GitHub CLI"
if (Test-Path $ghCachePath) {
$cacheFiles = Get-ChildItem -Path $ghCachePath -Filter "*.json" -ErrorAction SilentlyContinue |
Where-Object { $_.LastWriteTime -lt (Get-Date).AddDays(-7) }
if ($cacheFiles) {
$paths = $cacheFiles | ForEach-Object { $_.FullName }
Remove-SafeItems -Paths $paths -Description "GitHub CLI cache"
}
}
}
# ============================================================================
# Main Developer Tools Cleanup Function
# ============================================================================
function Invoke-DevToolsCleanup {
<#
.SYNOPSIS
Run all developer tools cleanup tasks
#>
Start-Section "Developer tools"
# JavaScript ecosystem
Clear-NpmCache
Clear-NodeBuildCaches
# Python ecosystem
Clear-PythonCaches
# .NET ecosystem
Clear-DotNetDevCaches
# Go ecosystem
Clear-GoCaches
# Rust ecosystem
Clear-RustCaches
# JVM ecosystem
Clear-JvmCaches
# Containers
Clear-DockerCaches
# Cloud CLI tools
Clear-CloudCliCaches
# IDEs
Clear-IdeCaches
# Git
Clear-GitCaches
Stop-Section
}
# ============================================================================
# Exports
# ============================================================================
# Functions: Clear-NpmCache, Clear-PythonCaches, Clear-DockerCaches, etc.

View File

@@ -1,296 +0,0 @@
#!/bin/bash
# Developer Tools Cleanup Module
set -euo pipefail
# Tool cache helper (respects DRY_RUN).
clean_tool_cache() {
local description="$1"
shift
if [[ "$DRY_RUN" != "true" ]]; then
if "$@" > /dev/null 2>&1; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} $description"
fi
else
echo -e " ${YELLOW}${ICON_DRY_RUN}${NC} $description · would clean"
fi
return 0
}
# npm/pnpm/yarn/bun caches.
clean_dev_npm() {
if command -v npm > /dev/null 2>&1; then
clean_tool_cache "npm cache" npm cache clean --force
note_activity
fi
# Clean pnpm store cache
local pnpm_default_store=~/Library/pnpm/store
# Check if pnpm is actually usable (not just Corepack shim)
if command -v pnpm > /dev/null 2>&1 && COREPACK_ENABLE_DOWNLOAD_PROMPT=0 pnpm --version > /dev/null 2>&1; then
COREPACK_ENABLE_DOWNLOAD_PROMPT=0 clean_tool_cache "pnpm cache" pnpm store prune
local pnpm_store_path
start_section_spinner "Checking store path..."
pnpm_store_path=$(COREPACK_ENABLE_DOWNLOAD_PROMPT=0 run_with_timeout 2 pnpm store path 2> /dev/null) || pnpm_store_path=""
stop_section_spinner
if [[ -n "$pnpm_store_path" && "$pnpm_store_path" != "$pnpm_default_store" ]]; then
safe_clean "$pnpm_default_store"/* "Orphaned pnpm store"
fi
else
# pnpm not installed or not usable, just clean the default store directory
safe_clean "$pnpm_default_store"/* "pnpm store"
fi
note_activity
safe_clean ~/.tnpm/_cacache/* "tnpm cache directory"
safe_clean ~/.tnpm/_logs/* "tnpm logs"
safe_clean ~/.yarn/cache/* "Yarn cache"
safe_clean ~/.bun/install/cache/* "Bun cache"
}
# Python/pip ecosystem caches.
clean_dev_python() {
if command -v pip3 > /dev/null 2>&1; then
clean_tool_cache "pip cache" bash -c 'pip3 cache purge >/dev/null 2>&1 || true'
note_activity
fi
safe_clean ~/.pyenv/cache/* "pyenv cache"
safe_clean ~/.cache/poetry/* "Poetry cache"
safe_clean ~/.cache/uv/* "uv cache"
safe_clean ~/.cache/ruff/* "Ruff cache"
safe_clean ~/.cache/mypy/* "MyPy cache"
safe_clean ~/.pytest_cache/* "Pytest cache"
safe_clean ~/.jupyter/runtime/* "Jupyter runtime cache"
safe_clean ~/.cache/huggingface/* "Hugging Face cache"
safe_clean ~/.cache/torch/* "PyTorch cache"
safe_clean ~/.cache/tensorflow/* "TensorFlow cache"
safe_clean ~/.conda/pkgs/* "Conda packages cache"
safe_clean ~/anaconda3/pkgs/* "Anaconda packages cache"
safe_clean ~/.cache/wandb/* "Weights & Biases cache"
}
# Go build/module caches.
clean_dev_go() {
if command -v go > /dev/null 2>&1; then
clean_tool_cache "Go cache" bash -c 'go clean -modcache >/dev/null 2>&1 || true; go clean -cache >/dev/null 2>&1 || true'
note_activity
fi
}
# Rust/cargo caches.
clean_dev_rust() {
safe_clean ~/.cargo/registry/cache/* "Rust cargo cache"
safe_clean ~/.cargo/git/* "Cargo git cache"
safe_clean ~/.rustup/downloads/* "Rust downloads cache"
}
# Docker caches (guarded by daemon check).
clean_dev_docker() {
if command -v docker > /dev/null 2>&1; then
if [[ "$DRY_RUN" != "true" ]]; then
start_section_spinner "Checking Docker daemon..."
local docker_running=false
if run_with_timeout 3 docker info > /dev/null 2>&1; then
docker_running=true
fi
stop_section_spinner
if [[ "$docker_running" == "true" ]]; then
clean_tool_cache "Docker build cache" docker builder prune -af
else
debug_log "Docker daemon not running, skipping Docker cache cleanup"
fi
else
note_activity
echo -e " ${YELLOW}${ICON_DRY_RUN}${NC} Docker build cache · would clean"
fi
fi
safe_clean ~/.docker/buildx/cache/* "Docker BuildX cache"
}
# Nix garbage collection.
clean_dev_nix() {
if command -v nix-collect-garbage > /dev/null 2>&1; then
if [[ "$DRY_RUN" != "true" ]]; then
clean_tool_cache "Nix garbage collection" nix-collect-garbage --delete-older-than 30d
else
echo -e " ${YELLOW}${ICON_DRY_RUN}${NC} Nix garbage collection · would clean"
fi
note_activity
fi
}
# Cloud CLI caches.
clean_dev_cloud() {
safe_clean ~/.kube/cache/* "Kubernetes cache"
safe_clean ~/.local/share/containers/storage/tmp/* "Container storage temp"
safe_clean ~/.aws/cli/cache/* "AWS CLI cache"
safe_clean ~/.config/gcloud/logs/* "Google Cloud logs"
safe_clean ~/.azure/logs/* "Azure CLI logs"
}
# Frontend build caches.
clean_dev_frontend() {
safe_clean ~/.cache/typescript/* "TypeScript cache"
safe_clean ~/.cache/electron/* "Electron cache"
safe_clean ~/.cache/node-gyp/* "node-gyp cache"
safe_clean ~/.node-gyp/* "node-gyp build cache"
safe_clean ~/.turbo/cache/* "Turbo cache"
safe_clean ~/.vite/cache/* "Vite cache"
safe_clean ~/.cache/vite/* "Vite global cache"
safe_clean ~/.cache/webpack/* "Webpack cache"
safe_clean ~/.parcel-cache/* "Parcel cache"
safe_clean ~/.cache/eslint/* "ESLint cache"
safe_clean ~/.cache/prettier/* "Prettier cache"
}
# Mobile dev caches (can be large).
# Check for multiple Android NDK versions.
check_android_ndk() {
local ndk_dir="$HOME/Library/Android/sdk/ndk"
if [[ -d "$ndk_dir" ]]; then
local count
count=$(find "$ndk_dir" -mindepth 1 -maxdepth 1 -type d 2> /dev/null | wc -l | tr -d ' ')
if [[ "$count" -gt 1 ]]; then
note_activity
echo -e " Found ${GREEN}${count}${NC} Android NDK versions"
echo -e " You can delete unused versions manually: ${ndk_dir}"
fi
fi
}
clean_dev_mobile() {
check_android_ndk
if command -v xcrun > /dev/null 2>&1; then
debug_log "Checking for unavailable Xcode simulators"
if [[ "$DRY_RUN" == "true" ]]; then
clean_tool_cache "Xcode unavailable simulators" xcrun simctl delete unavailable
else
start_section_spinner "Checking unavailable simulators..."
if xcrun simctl delete unavailable > /dev/null 2>&1; then
stop_section_spinner
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Xcode unavailable simulators"
else
stop_section_spinner
fi
fi
note_activity
fi
# DeviceSupport caches/logs (preserve core support files).
safe_clean ~/Library/Developer/Xcode/iOS\ DeviceSupport/*/Symbols/System/Library/Caches/* "iOS device symbol cache"
safe_clean ~/Library/Developer/Xcode/iOS\ DeviceSupport/*.log "iOS device support logs"
safe_clean ~/Library/Developer/Xcode/watchOS\ DeviceSupport/*/Symbols/System/Library/Caches/* "watchOS device symbol cache"
safe_clean ~/Library/Developer/Xcode/tvOS\ DeviceSupport/*/Symbols/System/Library/Caches/* "tvOS device symbol cache"
# Simulator runtime caches.
safe_clean ~/Library/Developer/CoreSimulator/Profiles/Runtimes/*/Contents/Resources/RuntimeRoot/System/Library/Caches/* "Simulator runtime cache"
safe_clean ~/Library/Caches/Google/AndroidStudio*/* "Android Studio cache"
safe_clean ~/Library/Caches/CocoaPods/* "CocoaPods cache"
safe_clean ~/.cache/flutter/* "Flutter cache"
safe_clean ~/.android/build-cache/* "Android build cache"
safe_clean ~/.android/cache/* "Android SDK cache"
safe_clean ~/Library/Developer/Xcode/UserData/IB\ Support/* "Xcode Interface Builder cache"
safe_clean ~/.cache/swift-package-manager/* "Swift package manager cache"
}
# JVM ecosystem caches.
clean_dev_jvm() {
safe_clean ~/.gradle/caches/* "Gradle caches"
safe_clean ~/.gradle/daemon/* "Gradle daemon logs"
safe_clean ~/.sbt/* "SBT cache"
safe_clean ~/.ivy2/cache/* "Ivy cache"
}
# Other language tool caches.
clean_dev_other_langs() {
safe_clean ~/.bundle/cache/* "Ruby Bundler cache"
safe_clean ~/.composer/cache/* "PHP Composer cache"
safe_clean ~/.nuget/packages/* "NuGet packages cache"
safe_clean ~/.pub-cache/* "Dart Pub cache"
safe_clean ~/.cache/bazel/* "Bazel cache"
safe_clean ~/.cache/zig/* "Zig cache"
safe_clean ~/Library/Caches/deno/* "Deno cache"
}
# CI/CD and DevOps caches.
clean_dev_cicd() {
safe_clean ~/.cache/terraform/* "Terraform cache"
safe_clean ~/.grafana/cache/* "Grafana cache"
safe_clean ~/.prometheus/data/wal/* "Prometheus WAL cache"
safe_clean ~/.jenkins/workspace/*/target/* "Jenkins workspace cache"
safe_clean ~/.cache/gitlab-runner/* "GitLab Runner cache"
safe_clean ~/.github/cache/* "GitHub Actions cache"
safe_clean ~/.circleci/cache/* "CircleCI cache"
safe_clean ~/.sonar/* "SonarQube cache"
}
# Database tool caches.
clean_dev_database() {
safe_clean ~/Library/Caches/com.sequel-ace.sequel-ace/* "Sequel Ace cache"
safe_clean ~/Library/Caches/com.eggerapps.Sequel-Pro/* "Sequel Pro cache"
safe_clean ~/Library/Caches/redis-desktop-manager/* "Redis Desktop Manager cache"
safe_clean ~/Library/Caches/com.navicat.* "Navicat cache"
safe_clean ~/Library/Caches/com.dbeaver.* "DBeaver cache"
safe_clean ~/Library/Caches/com.redis.RedisInsight "Redis Insight cache"
}
# API/debugging tool caches.
clean_dev_api_tools() {
safe_clean ~/Library/Caches/com.postmanlabs.mac/* "Postman cache"
safe_clean ~/Library/Caches/com.konghq.insomnia/* "Insomnia cache"
safe_clean ~/Library/Caches/com.tinyapp.TablePlus/* "TablePlus cache"
safe_clean ~/Library/Caches/com.getpaw.Paw/* "Paw API cache"
safe_clean ~/Library/Caches/com.charlesproxy.charles/* "Charles Proxy cache"
safe_clean ~/Library/Caches/com.proxyman.NSProxy/* "Proxyman cache"
}
# Misc dev tool caches.
clean_dev_misc() {
safe_clean ~/Library/Caches/com.unity3d.*/* "Unity cache"
safe_clean ~/Library/Caches/com.mongodb.compass/* "MongoDB Compass cache"
safe_clean ~/Library/Caches/com.figma.Desktop/* "Figma cache"
safe_clean ~/Library/Caches/com.github.GitHubDesktop/* "GitHub Desktop cache"
safe_clean ~/Library/Caches/SentryCrash/* "Sentry crash reports"
safe_clean ~/Library/Caches/KSCrash/* "KSCrash reports"
safe_clean ~/Library/Caches/com.crashlytics.data/* "Crashlytics data"
}
# Shell and VCS leftovers.
clean_dev_shell() {
safe_clean ~/.gitconfig.lock "Git config lock"
safe_clean ~/.gitconfig.bak* "Git config backup"
safe_clean ~/.oh-my-zsh/cache/* "Oh My Zsh cache"
safe_clean ~/.config/fish/fish_history.bak* "Fish shell backup"
safe_clean ~/.bash_history.bak* "Bash history backup"
safe_clean ~/.zsh_history.bak* "Zsh history backup"
safe_clean ~/.cache/pre-commit/* "pre-commit cache"
}
# Network tool caches.
clean_dev_network() {
safe_clean ~/.cache/curl/* "curl cache"
safe_clean ~/.cache/wget/* "wget cache"
safe_clean ~/Library/Caches/curl/* "macOS curl cache"
safe_clean ~/Library/Caches/wget/* "macOS wget cache"
}
# Orphaned SQLite temp files (-shm/-wal). Disabled due to low ROI.
clean_sqlite_temp_files() {
return 0
}
# Main developer tools cleanup sequence.
clean_developer_tools() {
stop_section_spinner
clean_sqlite_temp_files
clean_dev_npm
clean_dev_python
clean_dev_go
clean_dev_rust
clean_dev_docker
clean_dev_cloud
clean_dev_nix
clean_dev_shell
clean_dev_frontend
clean_project_caches
clean_dev_mobile
clean_dev_jvm
clean_dev_other_langs
clean_dev_cicd
clean_dev_database
clean_dev_api_tools
clean_dev_network
clean_dev_misc
safe_clean ~/Library/Caches/Homebrew/* "Homebrew cache"
# Clean Homebrew locks without repeated sudo prompts.
local brew_lock_dirs=(
"/opt/homebrew/var/homebrew/locks"
"/usr/local/var/homebrew/locks"
)
for lock_dir in "${brew_lock_dirs[@]}"; do
if [[ -d "$lock_dir" && -w "$lock_dir" ]]; then
safe_clean "$lock_dir"/* "Homebrew lock files"
elif [[ -d "$lock_dir" ]]; then
if find "$lock_dir" -mindepth 1 -maxdepth 1 -print -quit 2> /dev/null | grep -q .; then
debug_log "Skipping read-only Homebrew locks in $lock_dir"
fi
fi
done
clean_homebrew
}

View File

@@ -1,925 +0,0 @@
#!/bin/bash
# Project Purge Module (mo purge).
# Removes heavy project build artifacts and dependencies.
set -euo pipefail
PROJECT_LIB_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
CORE_LIB_DIR="$(cd "$PROJECT_LIB_DIR/../core" && pwd)"
if ! command -v ensure_user_dir > /dev/null 2>&1; then
# shellcheck disable=SC1090
source "$CORE_LIB_DIR/common.sh"
fi
# Targets to look for (heavy build artifacts).
readonly PURGE_TARGETS=(
"node_modules"
"target" # Rust, Maven
"build" # Gradle, various
"dist" # JS builds
"venv" # Python
".venv" # Python
".pytest_cache" # Python (pytest)
".mypy_cache" # Python (mypy)
".tox" # Python (tox virtualenvs)
".nox" # Python (nox virtualenvs)
".ruff_cache" # Python (ruff)
".gradle" # Gradle local
"__pycache__" # Python
".next" # Next.js
".nuxt" # Nuxt.js
".output" # Nuxt.js
"vendor" # PHP Composer
"bin" # .NET build output (guarded; see is_protected_purge_artifact)
"obj" # C# / Unity
".turbo" # Turborepo cache
".parcel-cache" # Parcel bundler
".dart_tool" # Flutter/Dart build cache
".zig-cache" # Zig
"zig-out" # Zig
".angular" # Angular
".svelte-kit" # SvelteKit
".astro" # Astro
"coverage" # Code coverage reports
)
# Minimum age in days before considering for cleanup.
readonly MIN_AGE_DAYS=7
# Scan depth defaults (relative to search root).
readonly PURGE_MIN_DEPTH_DEFAULT=2
readonly PURGE_MAX_DEPTH_DEFAULT=8
# Search paths (default, can be overridden via config file).
readonly DEFAULT_PURGE_SEARCH_PATHS=(
"$HOME/www"
"$HOME/dev"
"$HOME/Projects"
"$HOME/GitHub"
"$HOME/Code"
"$HOME/Workspace"
"$HOME/Repos"
"$HOME/Development"
)
# Config file for custom purge paths.
readonly PURGE_CONFIG_FILE="$HOME/.config/mole/purge_paths"
# Resolved search paths.
PURGE_SEARCH_PATHS=()
# Project indicators for container detection.
readonly PROJECT_INDICATORS=(
"package.json"
"Cargo.toml"
"go.mod"
"pyproject.toml"
"requirements.txt"
"pom.xml"
"build.gradle"
"Gemfile"
"composer.json"
"pubspec.yaml"
"Makefile"
"build.zig"
"build.zig.zon"
".git"
)
# Check if a directory contains projects (directly or in subdirectories).
is_project_container() {
local dir="$1"
local max_depth="${2:-2}"
# Skip hidden/system directories.
local basename
basename=$(basename "$dir")
[[ "$basename" == .* ]] && return 1
[[ "$basename" == "Library" ]] && return 1
[[ "$basename" == "Applications" ]] && return 1
[[ "$basename" == "Movies" ]] && return 1
[[ "$basename" == "Music" ]] && return 1
[[ "$basename" == "Pictures" ]] && return 1
[[ "$basename" == "Public" ]] && return 1
# Single find expression for indicators.
local -a find_args=("$dir" "-maxdepth" "$max_depth" "(")
local first=true
for indicator in "${PROJECT_INDICATORS[@]}"; do
if [[ "$first" == "true" ]]; then
first=false
else
find_args+=("-o")
fi
find_args+=("-name" "$indicator")
done
find_args+=(")" "-print" "-quit")
if find "${find_args[@]}" 2> /dev/null | grep -q .; then
return 0
fi
return 1
}
# Discover project directories in $HOME.
discover_project_dirs() {
local -a discovered=()
for path in "${DEFAULT_PURGE_SEARCH_PATHS[@]}"; do
if [[ -d "$path" ]]; then
discovered+=("$path")
fi
done
# Scan $HOME for other containers (depth 1).
local dir
for dir in "$HOME"/*/; do
[[ ! -d "$dir" ]] && continue
dir="${dir%/}" # Remove trailing slash
local already_found=false
for existing in "${DEFAULT_PURGE_SEARCH_PATHS[@]}"; do
if [[ "$dir" == "$existing" ]]; then
already_found=true
break
fi
done
[[ "$already_found" == "true" ]] && continue
if is_project_container "$dir" 2; then
discovered+=("$dir")
fi
done
printf '%s\n' "${discovered[@]}" | sort -u
}
# Save discovered paths to config.
save_discovered_paths() {
local -a paths=("$@")
ensure_user_dir "$(dirname "$PURGE_CONFIG_FILE")"
cat > "$PURGE_CONFIG_FILE" << 'EOF'
# Mole Purge Paths - Auto-discovered project directories
# Edit this file to customize, or run: mo purge --paths
# Add one path per line (supports ~ for home directory)
EOF
printf '\n' >> "$PURGE_CONFIG_FILE"
for path in "${paths[@]}"; do
# Convert $HOME to ~ for portability
path="${path/#$HOME/~}"
echo "$path" >> "$PURGE_CONFIG_FILE"
done
}
# Load purge paths from config or auto-discover
load_purge_config() {
PURGE_SEARCH_PATHS=()
if [[ -f "$PURGE_CONFIG_FILE" ]]; then
while IFS= read -r line; do
line="${line#"${line%%[![:space:]]*}"}"
line="${line%"${line##*[![:space:]]}"}"
[[ -z "$line" || "$line" =~ ^# ]] && continue
line="${line/#\~/$HOME}"
PURGE_SEARCH_PATHS+=("$line")
done < "$PURGE_CONFIG_FILE"
fi
if [[ ${#PURGE_SEARCH_PATHS[@]} -eq 0 ]]; then
if [[ -t 1 ]] && [[ -z "${_PURGE_DISCOVERY_SILENT:-}" ]]; then
echo -e "${GRAY}First run: discovering project directories...${NC}" >&2
fi
local -a discovered=()
while IFS= read -r path; do
[[ -n "$path" ]] && discovered+=("$path")
done < <(discover_project_dirs)
if [[ ${#discovered[@]} -gt 0 ]]; then
PURGE_SEARCH_PATHS=("${discovered[@]}")
save_discovered_paths "${discovered[@]}"
if [[ -t 1 ]] && [[ -z "${_PURGE_DISCOVERY_SILENT:-}" ]]; then
echo -e "${GRAY}Found ${#discovered[@]} project directories, saved to config${NC}" >&2
fi
else
PURGE_SEARCH_PATHS=("${DEFAULT_PURGE_SEARCH_PATHS[@]}")
fi
fi
}
# Initialize paths on script load.
load_purge_config
# Args: $1 - path to check
# Safe cleanup requires the path be inside a project directory.
is_safe_project_artifact() {
local path="$1"
local search_path="$2"
if [[ "$path" != /* ]]; then
return 1
fi
# Must not be a direct child of the search root.
local relative_path="${path#"$search_path"/}"
local depth=$(echo "$relative_path" | LC_ALL=C tr -cd '/' | wc -c)
if [[ $depth -lt 1 ]]; then
return 1
fi
return 0
}
# Detect if directory is a Rails project root
is_rails_project_root() {
local dir="$1"
[[ -f "$dir/config/application.rb" ]] || return 1
[[ -f "$dir/Gemfile" ]] || return 1
[[ -f "$dir/bin/rails" || -f "$dir/config/environment.rb" ]]
}
# Detect if directory is a Go project root
is_go_project_root() {
local dir="$1"
[[ -f "$dir/go.mod" ]]
}
# Detect if directory is a PHP Composer project root
is_php_project_root() {
local dir="$1"
[[ -f "$dir/composer.json" ]]
}
# Decide whether a "bin" directory is a .NET directory
is_dotnet_bin_dir() {
local path="$1"
[[ "$(basename "$path")" == "bin" ]] || return 1
# Check if parent directory has a .csproj/.fsproj/.vbproj file
local parent_dir
parent_dir="$(dirname "$path")"
find "$parent_dir" -maxdepth 1 \( -name "*.csproj" -o -name "*.fsproj" -o -name "*.vbproj" \) 2> /dev/null | grep -q . || return 1
# Check if bin directory contains Debug/ or Release/ subdirectories
[[ -d "$path/Debug" || -d "$path/Release" ]] || return 1
return 0
}
# Check if a vendor directory should be protected from purge
# Expects path to be a vendor directory (basename == vendor)
# Strategy: Only clean PHP Composer vendor, protect all others
is_protected_vendor_dir() {
local path="$1"
local base
base=$(basename "$path")
[[ "$base" == "vendor" ]] || return 1
local parent_dir
parent_dir=$(dirname "$path")
# PHP Composer vendor can be safely regenerated with 'composer install'
# Do NOT protect it (return 1 = not protected = can be cleaned)
if is_php_project_root "$parent_dir"; then
return 1
fi
# Rails vendor (importmap dependencies) - should be protected
if is_rails_project_root "$parent_dir"; then
return 0
fi
# Go vendor (optional vendoring) - protect to avoid accidental deletion
if is_go_project_root "$parent_dir"; then
return 0
fi
# Unknown vendor type - protect by default (conservative approach)
return 0
}
# Check if an artifact should be protected from purge
is_protected_purge_artifact() {
local path="$1"
local base
base=$(basename "$path")
case "$base" in
bin)
# Only allow purging bin/ when we can detect .NET context.
if is_dotnet_bin_dir "$path"; then
return 1
fi
return 0
;;
vendor)
is_protected_vendor_dir "$path"
return $?
;;
esac
return 1
}
# Scan purge targets using fd (fast) or pruned find.
scan_purge_targets() {
local search_path="$1"
local output_file="$2"
local min_depth="$PURGE_MIN_DEPTH_DEFAULT"
local max_depth="$PURGE_MAX_DEPTH_DEFAULT"
if [[ ! "$min_depth" =~ ^[0-9]+$ ]]; then
min_depth="$PURGE_MIN_DEPTH_DEFAULT"
fi
if [[ ! "$max_depth" =~ ^[0-9]+$ ]]; then
max_depth="$PURGE_MAX_DEPTH_DEFAULT"
fi
if [[ "$max_depth" -lt "$min_depth" ]]; then
max_depth="$min_depth"
fi
if [[ ! -d "$search_path" ]]; then
return
fi
if command -v fd > /dev/null 2>&1; then
# Escape regex special characters in target names for fd patterns
local escaped_targets=()
for target in "${PURGE_TARGETS[@]}"; do
escaped_targets+=("$(printf '%s' "$target" | sed -e 's/[][(){}.^$*+?|\\]/\\&/g')")
done
local pattern="($(
IFS='|'
echo "${escaped_targets[*]}"
))"
local fd_args=(
"--absolute-path"
"--hidden"
"--no-ignore"
"--type" "d"
"--min-depth" "$min_depth"
"--max-depth" "$max_depth"
"--threads" "4"
"--exclude" ".git"
"--exclude" "Library"
"--exclude" ".Trash"
"--exclude" "Applications"
)
fd "${fd_args[@]}" "$pattern" "$search_path" 2> /dev/null | while IFS= read -r item; do
if is_safe_project_artifact "$item" "$search_path"; then
echo "$item"
fi
done | filter_nested_artifacts | filter_protected_artifacts > "$output_file"
else
# Pruned find avoids descending into heavy directories.
local prune_args=()
local prune_dirs=(".git" "Library" ".Trash" "Applications")
for dir in "${prune_dirs[@]}"; do
prune_args+=("-name" "$dir" "-prune" "-o")
done
for target in "${PURGE_TARGETS[@]}"; do
prune_args+=("-name" "$target" "-print" "-prune" "-o")
done
local find_expr=()
for dir in "${prune_dirs[@]}"; do
find_expr+=("-name" "$dir" "-prune" "-o")
done
local i=0
for target in "${PURGE_TARGETS[@]}"; do
find_expr+=("-name" "$target" "-print" "-prune")
if [[ $i -lt $((${#PURGE_TARGETS[@]} - 1)) ]]; then
find_expr+=("-o")
fi
((i++))
done
command find "$search_path" -mindepth "$min_depth" -maxdepth "$max_depth" -type d \
\( "${find_expr[@]}" \) 2> /dev/null | while IFS= read -r item; do
if is_safe_project_artifact "$item" "$search_path"; then
echo "$item"
fi
done | filter_nested_artifacts | filter_protected_artifacts > "$output_file"
fi
}
# Filter out nested artifacts (e.g. node_modules inside node_modules).
filter_nested_artifacts() {
while IFS= read -r item; do
local parent_dir=$(dirname "$item")
local is_nested=false
for target in "${PURGE_TARGETS[@]}"; do
if [[ "$parent_dir" == *"/$target/"* || "$parent_dir" == *"/$target" ]]; then
is_nested=true
break
fi
done
if [[ "$is_nested" == "false" ]]; then
echo "$item"
fi
done
}
filter_protected_artifacts() {
while IFS= read -r item; do
if ! is_protected_purge_artifact "$item"; then
echo "$item"
fi
done
}
# Args: $1 - path
# Check if a path was modified recently (safety check).
is_recently_modified() {
local path="$1"
local age_days=$MIN_AGE_DAYS
if [[ ! -e "$path" ]]; then
return 1
fi
local mod_time
mod_time=$(get_file_mtime "$path")
local current_time
current_time=$(get_epoch_seconds)
local age_seconds=$((current_time - mod_time))
local age_in_days=$((age_seconds / 86400))
if [[ $age_in_days -lt $age_days ]]; then
return 0 # Recently modified
else
return 1 # Old enough to clean
fi
}
# Args: $1 - path
# Get directory size in KB.
get_dir_size_kb() {
local path="$1"
if [[ -d "$path" ]]; then
du -sk "$path" 2> /dev/null | awk '{print $1}' || echo "0"
else
echo "0"
fi
}
# Purge category selector.
select_purge_categories() {
local -a categories=("$@")
local total_items=${#categories[@]}
local clear_line=$'\r\033[2K'
if [[ $total_items -eq 0 ]]; then
return 1
fi
# Calculate items per page based on terminal height.
_get_items_per_page() {
local term_height=24
if [[ -t 0 ]] || [[ -t 2 ]]; then
term_height=$(stty size < /dev/tty 2> /dev/null | awk '{print $1}')
fi
if [[ -z "$term_height" || $term_height -le 0 ]]; then
if command -v tput > /dev/null 2>&1; then
term_height=$(tput lines 2> /dev/null || echo "24")
else
term_height=24
fi
fi
local reserved=6
local available=$((term_height - reserved))
if [[ $available -lt 3 ]]; then
echo 3
elif [[ $available -gt 50 ]]; then
echo 50
else
echo "$available"
fi
}
local items_per_page=$(_get_items_per_page)
local cursor_pos=0
local top_index=0
# Initialize selection (all selected by default, except recent ones)
local -a selected=()
IFS=',' read -r -a recent_flags <<< "${PURGE_RECENT_CATEGORIES:-}"
for ((i = 0; i < total_items; i++)); do
# Default unselected if category has recent items
if [[ ${recent_flags[i]:-false} == "true" ]]; then
selected[i]=false
else
selected[i]=true
fi
done
local original_stty=""
if [[ -t 0 ]] && command -v stty > /dev/null 2>&1; then
original_stty=$(stty -g 2> /dev/null || echo "")
fi
# Terminal control functions
restore_terminal() {
trap - EXIT INT TERM
show_cursor
if [[ -n "${original_stty:-}" ]]; then
stty "${original_stty}" 2> /dev/null || stty sane 2> /dev/null || true
fi
}
# shellcheck disable=SC2329
handle_interrupt() {
restore_terminal
exit 130
}
draw_menu() {
# Recalculate items_per_page dynamically to handle window resize
items_per_page=$(_get_items_per_page)
# Clamp pagination state to avoid cursor drifting out of view
local max_top_index=0
if [[ $total_items -gt $items_per_page ]]; then
max_top_index=$((total_items - items_per_page))
fi
if [[ $top_index -gt $max_top_index ]]; then
top_index=$max_top_index
fi
if [[ $top_index -lt 0 ]]; then
top_index=0
fi
local visible_count=$((total_items - top_index))
[[ $visible_count -gt $items_per_page ]] && visible_count=$items_per_page
if [[ $cursor_pos -gt $((visible_count - 1)) ]]; then
cursor_pos=$((visible_count - 1))
fi
if [[ $cursor_pos -lt 0 ]]; then
cursor_pos=0
fi
printf "\033[H"
# Calculate total size of selected items for header
local selected_size=0
local selected_count=0
IFS=',' read -r -a sizes <<< "${PURGE_CATEGORY_SIZES:-}"
for ((i = 0; i < total_items; i++)); do
if [[ ${selected[i]} == true ]]; then
selected_size=$((selected_size + ${sizes[i]:-0}))
((selected_count++))
fi
done
local selected_gb
selected_gb=$(printf "%.1f" "$(echo "scale=2; $selected_size/1024/1024" | bc)")
# Show position indicator if scrolling is needed
local scroll_indicator=""
if [[ $total_items -gt $items_per_page ]]; then
local current_pos=$((top_index + cursor_pos + 1))
scroll_indicator=" ${GRAY}[${current_pos}/${total_items}]${NC}"
fi
printf "%s\n" "$clear_line"
printf "%s${PURPLE_BOLD}Select Categories to Clean${NC}%s ${GRAY}- ${selected_gb}GB ($selected_count selected)${NC}\n" "$clear_line" "$scroll_indicator"
printf "%s\n" "$clear_line"
IFS=',' read -r -a recent_flags <<< "${PURGE_RECENT_CATEGORIES:-}"
# Calculate visible range
local end_index=$((top_index + visible_count))
# Draw only visible items
for ((i = top_index; i < end_index; i++)); do
local checkbox="$ICON_EMPTY"
[[ ${selected[i]} == true ]] && checkbox="$ICON_SOLID"
local recent_marker=""
[[ ${recent_flags[i]:-false} == "true" ]] && recent_marker=" ${GRAY}| Recent${NC}"
local rel_pos=$((i - top_index))
if [[ $rel_pos -eq $cursor_pos ]]; then
printf "%s${CYAN}${ICON_ARROW} %s %s%s${NC}\n" "$clear_line" "$checkbox" "${categories[i]}" "$recent_marker"
else
printf "%s %s %s%s\n" "$clear_line" "$checkbox" "${categories[i]}" "$recent_marker"
fi
done
# Fill empty slots to clear previous content
local items_shown=$visible_count
for ((i = items_shown; i < items_per_page; i++)); do
printf "%s\n" "$clear_line"
done
printf "%s\n" "$clear_line"
printf "%s${GRAY}${ICON_NAV_UP}${ICON_NAV_DOWN} | Space Select | Enter Confirm | A All | I Invert | Q Quit${NC}\n" "$clear_line"
}
trap restore_terminal EXIT
trap handle_interrupt INT TERM
# Preserve interrupt character for Ctrl-C
stty -echo -icanon intr ^C 2> /dev/null || true
hide_cursor
if [[ -t 1 ]]; then
clear_screen
fi
# Main loop
while true; do
draw_menu
# Read key
IFS= read -r -s -n1 key || key=""
case "$key" in
$'\x1b')
# Arrow keys or ESC
# Read next 2 chars with timeout (bash 3.2 needs integer)
IFS= read -r -s -n1 -t 1 key2 || key2=""
if [[ "$key2" == "[" ]]; then
IFS= read -r -s -n1 -t 1 key3 || key3=""
case "$key3" in
A) # Up arrow
if [[ $cursor_pos -gt 0 ]]; then
((cursor_pos--))
elif [[ $top_index -gt 0 ]]; then
((top_index--))
fi
;;
B) # Down arrow
local absolute_index=$((top_index + cursor_pos))
local last_index=$((total_items - 1))
if [[ $absolute_index -lt $last_index ]]; then
local visible_count=$((total_items - top_index))
[[ $visible_count -gt $items_per_page ]] && visible_count=$items_per_page
if [[ $cursor_pos -lt $((visible_count - 1)) ]]; then
((cursor_pos++))
elif [[ $((top_index + visible_count)) -lt $total_items ]]; then
((top_index++))
fi
fi
;;
esac
else
# ESC alone (no following chars)
restore_terminal
return 1
fi
;;
" ") # Space - toggle current item
local idx=$((top_index + cursor_pos))
if [[ ${selected[idx]} == true ]]; then
selected[idx]=false
else
selected[idx]=true
fi
;;
"a" | "A") # Select all
for ((i = 0; i < total_items; i++)); do
selected[i]=true
done
;;
"i" | "I") # Invert selection
for ((i = 0; i < total_items; i++)); do
if [[ ${selected[i]} == true ]]; then
selected[i]=false
else
selected[i]=true
fi
done
;;
"q" | "Q" | $'\x03') # Quit or Ctrl-C
restore_terminal
return 1
;;
"" | $'\n' | $'\r') # Enter - confirm
# Build result
PURGE_SELECTION_RESULT=""
for ((i = 0; i < total_items; i++)); do
if [[ ${selected[i]} == true ]]; then
[[ -n "$PURGE_SELECTION_RESULT" ]] && PURGE_SELECTION_RESULT+=","
PURGE_SELECTION_RESULT+="$i"
fi
done
restore_terminal
return 0
;;
esac
done
}
# Main cleanup function - scans and prompts user to select artifacts to clean
clean_project_artifacts() {
local -a all_found_items=()
local -a safe_to_clean=()
local -a recently_modified=()
# Set up cleanup on interrupt
# Note: Declared without 'local' so cleanup_scan trap can access them
scan_pids=()
scan_temps=()
# shellcheck disable=SC2329
cleanup_scan() {
# Kill all background scans
for pid in "${scan_pids[@]+"${scan_pids[@]}"}"; do
kill "$pid" 2> /dev/null || true
done
# Clean up temp files
for temp in "${scan_temps[@]+"${scan_temps[@]}"}"; do
rm -f "$temp" 2> /dev/null || true
done
if [[ -t 1 ]]; then
stop_inline_spinner
fi
echo ""
exit 130
}
trap cleanup_scan INT TERM
# Start parallel scanning of all paths at once
if [[ -t 1 ]]; then
start_inline_spinner "Scanning projects..."
fi
# Launch all scans in parallel
for path in "${PURGE_SEARCH_PATHS[@]}"; do
if [[ -d "$path" ]]; then
local scan_output
scan_output=$(mktemp)
scan_temps+=("$scan_output")
# Launch scan in background for true parallelism
scan_purge_targets "$path" "$scan_output" &
local scan_pid=$!
scan_pids+=("$scan_pid")
fi
done
# Wait for all scans to complete
for pid in "${scan_pids[@]+"${scan_pids[@]}"}"; do
wait "$pid" 2> /dev/null || true
done
if [[ -t 1 ]]; then
stop_inline_spinner
fi
# Collect all results
for scan_output in "${scan_temps[@]+"${scan_temps[@]}"}"; do
if [[ -f "$scan_output" ]]; then
while IFS= read -r item; do
if [[ -n "$item" ]]; then
all_found_items+=("$item")
fi
done < "$scan_output"
rm -f "$scan_output"
fi
done
# Clean up trap
trap - INT TERM
if [[ ${#all_found_items[@]} -eq 0 ]]; then
echo ""
echo -e "${GREEN}${ICON_SUCCESS}${NC} Great! No old project artifacts to clean"
printf '\n'
return 2 # Special code: nothing to clean
fi
# Mark recently modified items (for default selection state)
for item in "${all_found_items[@]}"; do
if is_recently_modified "$item"; then
recently_modified+=("$item")
fi
# Add all items to safe_to_clean, let user choose
safe_to_clean+=("$item")
done
# Build menu options - one per artifact
if [[ -t 1 ]]; then
start_inline_spinner "Calculating sizes..."
fi
local -a menu_options=()
local -a item_paths=()
local -a item_sizes=()
local -a item_recent_flags=()
# Helper to get project name from path
# For ~/www/pake/src-tauri/target -> returns "pake"
# For ~/work/code/MyProject/node_modules -> returns "MyProject"
# Strategy: Find the nearest ancestor directory containing a project indicator file
get_project_name() {
local path="$1"
local artifact_name
artifact_name=$(basename "$path")
# Start from the parent of the artifact and walk up
local current_dir
current_dir=$(dirname "$path")
while [[ "$current_dir" != "/" && "$current_dir" != "$HOME" && -n "$current_dir" ]]; do
# Check if current directory contains any project indicator
for indicator in "${PROJECT_INDICATORS[@]}"; do
if [[ -e "$current_dir/$indicator" ]]; then
# Found a project root, return its name
basename "$current_dir"
return 0
fi
done
# Move up one level
current_dir=$(dirname "$current_dir")
done
# Fallback: try the old logic (first directory under search root)
local search_roots=()
if [[ ${#PURGE_SEARCH_PATHS[@]} -gt 0 ]]; then
search_roots=("${PURGE_SEARCH_PATHS[@]}")
else
search_roots=("$HOME/www" "$HOME/dev" "$HOME/Projects")
fi
for root in "${search_roots[@]}"; do
root="${root%/}"
if [[ -n "$root" && "$path" == "$root/"* ]]; then
local relative_path="${path#"$root"/}"
echo "$relative_path" | cut -d'/' -f1
return 0
fi
done
# Final fallback: use grandparent directory
dirname "$(dirname "$path")" | xargs basename
}
# Format display with alignment (like app_selector)
format_purge_display() {
local project_name="$1"
local artifact_type="$2"
local size_str="$3"
# Terminal width for alignment
local terminal_width=$(tput cols 2> /dev/null || echo 80)
local fixed_width=28 # Reserve for type and size
local available_width=$((terminal_width - fixed_width))
# Bounds: 24-35 chars for project name
[[ $available_width -lt 24 ]] && available_width=24
[[ $available_width -gt 35 ]] && available_width=35
# Truncate project name if needed
local truncated_name=$(truncate_by_display_width "$project_name" "$available_width")
local current_width=$(get_display_width "$truncated_name")
local char_count=${#truncated_name}
local padding=$((available_width - current_width))
local printf_width=$((char_count + padding))
# Format: "project_name size | artifact_type"
printf "%-*s %9s | %-13s" "$printf_width" "$truncated_name" "$size_str" "$artifact_type"
}
# Build menu options - one line per artifact
for item in "${safe_to_clean[@]}"; do
local project_name=$(get_project_name "$item")
local artifact_type=$(basename "$item")
local size_kb=$(get_dir_size_kb "$item")
local size_human=$(bytes_to_human "$((size_kb * 1024))")
# Check if recent
local is_recent=false
for recent_item in "${recently_modified[@]+"${recently_modified[@]}"}"; do
if [[ "$item" == "$recent_item" ]]; then
is_recent=true
break
fi
done
menu_options+=("$(format_purge_display "$project_name" "$artifact_type" "$size_human")")
item_paths+=("$item")
item_sizes+=("$size_kb")
item_recent_flags+=("$is_recent")
done
if [[ -t 1 ]]; then
stop_inline_spinner
fi
# Set global vars for selector
export PURGE_CATEGORY_SIZES=$(
IFS=,
echo "${item_sizes[*]}"
)
export PURGE_RECENT_CATEGORIES=$(
IFS=,
echo "${item_recent_flags[*]}"
)
# Interactive selection (only if terminal is available)
PURGE_SELECTION_RESULT=""
if [[ -t 0 ]]; then
if ! select_purge_categories "${menu_options[@]}"; then
unset PURGE_CATEGORY_SIZES PURGE_RECENT_CATEGORIES PURGE_SELECTION_RESULT
return 1
fi
else
# Non-interactive: select all non-recent items
for ((i = 0; i < ${#menu_options[@]}; i++)); do
if [[ ${item_recent_flags[i]} != "true" ]]; then
[[ -n "$PURGE_SELECTION_RESULT" ]] && PURGE_SELECTION_RESULT+=","
PURGE_SELECTION_RESULT+="$i"
fi
done
fi
if [[ -z "$PURGE_SELECTION_RESULT" ]]; then
echo ""
echo -e "${GRAY}No items selected${NC}"
printf '\n'
unset PURGE_CATEGORY_SIZES PURGE_RECENT_CATEGORIES PURGE_SELECTION_RESULT
return 0
fi
# Clean selected items
echo ""
IFS=',' read -r -a selected_indices <<< "$PURGE_SELECTION_RESULT"
local stats_dir="${XDG_CACHE_HOME:-$HOME/.cache}/mole"
local cleaned_count=0
for idx in "${selected_indices[@]}"; do
local item_path="${item_paths[idx]}"
local artifact_type=$(basename "$item_path")
local project_name=$(get_project_name "$item_path")
local size_kb="${item_sizes[idx]}"
local size_human=$(bytes_to_human "$((size_kb * 1024))")
# Safety checks
if [[ -z "$item_path" || "$item_path" == "/" || "$item_path" == "$HOME" || "$item_path" != "$HOME/"* ]]; then
continue
fi
if [[ -t 1 ]]; then
start_inline_spinner "Cleaning $project_name/$artifact_type..."
fi
if [[ -e "$item_path" ]]; then
safe_remove "$item_path" true
if [[ ! -e "$item_path" ]]; then
local current_total=$(cat "$stats_dir/purge_stats" 2> /dev/null || echo "0")
echo "$((current_total + size_kb))" > "$stats_dir/purge_stats"
((cleaned_count++))
fi
fi
if [[ -t 1 ]]; then
stop_inline_spinner
echo -e "${GREEN}${ICON_SUCCESS}${NC} $project_name - $artifact_type ${GREEN}($size_human)${NC}"
fi
done
# Update count
echo "$cleaned_count" > "$stats_dir/purge_count"
unset PURGE_CATEGORY_SIZES PURGE_RECENT_CATEGORIES PURGE_SELECTION_RESULT
}

423
lib/clean/system.ps1 Normal file
View File

@@ -0,0 +1,423 @@
# Mole - System Cleanup Module
# Cleans Windows system files that require administrator access
#Requires -Version 5.1
Set-StrictMode -Version Latest
# Prevent multiple sourcing
if ((Get-Variable -Name 'MOLE_CLEAN_SYSTEM_LOADED' -Scope Script -ErrorAction SilentlyContinue) -and $script:MOLE_CLEAN_SYSTEM_LOADED) { return }
$script:MOLE_CLEAN_SYSTEM_LOADED = $true
# Import dependencies
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$scriptDir\..\core\base.ps1"
. "$scriptDir\..\core\log.ps1"
. "$scriptDir\..\core\file_ops.ps1"
# ============================================================================
# System Temp Files
# ============================================================================
function Clear-SystemTempFiles {
<#
.SYNOPSIS
Clean system-level temporary files (requires admin)
#>
if (-not (Test-IsAdmin)) {
Write-Debug "Skipping system temp cleanup - requires admin"
return
}
# Windows Temp folder
$winTemp = "$env:WINDIR\Temp"
if (Test-Path $winTemp) {
Remove-OldFiles -Path $winTemp -DaysOld 7 -Description "Windows temp files"
}
# System temp (different from Windows temp)
$systemTemp = "$env:SYSTEMROOT\Temp"
if ((Test-Path $systemTemp) -and ($systemTemp -ne $winTemp)) {
Remove-OldFiles -Path $systemTemp -DaysOld 7 -Description "System temp files"
}
}
# ============================================================================
# Windows Logs
# ============================================================================
function Clear-WindowsLogs {
<#
.SYNOPSIS
Clean Windows log files (requires admin)
#>
param([int]$DaysOld = 7)
if (-not (Test-IsAdmin)) {
Write-Debug "Skipping Windows logs cleanup - requires admin"
return
}
# Windows Logs directory
$logPaths = @(
"$env:WINDIR\Logs\CBS"
"$env:WINDIR\Logs\DISM"
"$env:WINDIR\Logs\DPX"
"$env:WINDIR\Logs\WindowsUpdate"
"$env:WINDIR\Logs\SIH"
"$env:WINDIR\Logs\waasmedia"
"$env:WINDIR\Debug"
"$env:WINDIR\Panther"
"$env:PROGRAMDATA\Microsoft\Windows\WER\ReportQueue"
"$env:PROGRAMDATA\Microsoft\Windows\WER\ReportArchive"
)
foreach ($path in $logPaths) {
if (Test-Path $path) {
Remove-OldFiles -Path $path -DaysOld $DaysOld -Description "$(Split-Path -Leaf $path) logs"
}
}
# Setup logs (*.log files in Windows directory)
$setupLogs = Get-ChildItem -Path "$env:WINDIR\*.log" -File -ErrorAction SilentlyContinue |
Where-Object { $_.LastWriteTime -lt (Get-Date).AddDays(-$DaysOld) }
if ($setupLogs) {
$paths = $setupLogs | ForEach-Object { $_.FullName }
Remove-SafeItems -Paths $paths -Description "Windows setup logs"
}
}
# ============================================================================
# Windows Update Cleanup
# ============================================================================
function Clear-WindowsUpdateFiles {
<#
.SYNOPSIS
Clean Windows Update download cache (requires admin)
#>
if (-not (Test-IsAdmin)) {
Write-Debug "Skipping Windows Update cleanup - requires admin"
return
}
# Stop Windows Update service
$wuService = Get-Service -Name wuauserv -ErrorAction SilentlyContinue
$wasRunning = $wuService.Status -eq 'Running'
if ($wasRunning) {
if (Test-DryRunMode) {
Write-DryRun "Windows Update cache (service would be restarted)"
return
}
try {
Stop-Service -Name wuauserv -Force -ErrorAction Stop
}
catch {
Write-Debug "Could not stop Windows Update service: $_"
return
}
}
try {
# Clean download cache
$wuDownloadPath = "$env:WINDIR\SoftwareDistribution\Download"
if (Test-Path $wuDownloadPath) {
Clear-DirectoryContents -Path $wuDownloadPath -Description "Windows Update download cache"
}
# Clean DataStore (old update history - be careful!)
# Only clean temp files, not the actual database
$wuDataStore = "$env:WINDIR\SoftwareDistribution\DataStore\Logs"
if (Test-Path $wuDataStore) {
Clear-DirectoryContents -Path $wuDataStore -Description "Windows Update logs"
}
}
finally {
# Always restart service if it was running, even if cleanup failed
if ($wasRunning) {
Start-Service -Name wuauserv -ErrorAction SilentlyContinue
}
}
}
# ============================================================================
# Installer Cleanup
# ============================================================================
function Clear-InstallerCache {
<#
.SYNOPSIS
Clean Windows Installer cache (orphaned patches)
#>
if (-not (Test-IsAdmin)) {
return
}
# Windows Installer patch cache
# WARNING: Be very careful here - only clean truly orphaned files
$installerPath = "$env:WINDIR\Installer"
# Only clean .tmp files and very old .msp files that are likely orphaned
if (Test-Path $installerPath) {
$tmpFiles = Get-ChildItem -Path $installerPath -Filter "*.tmp" -File -ErrorAction SilentlyContinue
if ($tmpFiles) {
$paths = $tmpFiles | ForEach-Object { $_.FullName }
Remove-SafeItems -Paths $paths -Description "Installer temp files"
}
}
# Installer logs in temp
$installerLogs = Get-ChildItem -Path $env:TEMP -Filter "MSI*.LOG" -File -ErrorAction SilentlyContinue |
Where-Object { $_.LastWriteTime -lt (Get-Date).AddDays(-30) }
if ($installerLogs) {
$paths = $installerLogs | ForEach-Object { $_.FullName }
Remove-SafeItems -Paths $paths -Description "Old MSI logs"
}
}
# ============================================================================
# Component Store Cleanup
# ============================================================================
function Invoke-ComponentStoreCleanup {
<#
.SYNOPSIS
Run Windows Component Store cleanup (DISM)
#>
if (-not (Test-IsAdmin)) {
Write-Debug "Skipping component store cleanup - requires admin"
return
}
if (Test-DryRunMode) {
Write-DryRun "Component Store cleanup (DISM)"
Set-SectionActivity
return
}
try {
Write-Info "Running Component Store cleanup (this may take a while)..."
# Run DISM cleanup
$result = Start-Process -FilePath "dism.exe" `
-ArgumentList "/Online", "/Cleanup-Image", "/StartComponentCleanup" `
-Wait -PassThru -NoNewWindow -ErrorAction Stop
if ($result.ExitCode -eq 0) {
Write-Success "Component Store cleanup"
Set-SectionActivity
}
else {
Write-Debug "DISM returned exit code: $($result.ExitCode)"
}
}
catch {
Write-Debug "Component Store cleanup failed: $_"
}
}
# ============================================================================
# Memory Dump Cleanup
# ============================================================================
function Clear-MemoryDumps {
<#
.SYNOPSIS
Clean Windows memory dumps
#>
$dumpPaths = @(
"$env:WINDIR\MEMORY.DMP"
"$env:WINDIR\Minidump"
"$env:LOCALAPPDATA\CrashDumps"
)
foreach ($path in $dumpPaths) {
if (Test-Path $path -PathType Leaf) {
# Single file (MEMORY.DMP)
Remove-SafeItem -Path $path -Description "Memory dump"
}
elseif (Test-Path $path -PathType Container) {
# Directory (Minidump, CrashDumps)
Clear-DirectoryContents -Path $path -Description "$(Split-Path -Leaf $path)"
}
}
}
# ============================================================================
# Font Cache
# ============================================================================
function Clear-SystemFontCache {
<#
.SYNOPSIS
Clear Windows font cache (requires admin and may need restart)
#>
if (-not (Test-IsAdmin)) {
return
}
$fontCacheService = Get-Service -Name "FontCache" -ErrorAction SilentlyContinue
if ($fontCacheService) {
if (Test-DryRunMode) {
Write-DryRun "System font cache"
return
}
try {
# Stop font cache service
Stop-Service -Name "FontCache" -Force -ErrorAction SilentlyContinue
# Clear font cache files
$fontCachePath = "$env:WINDIR\ServiceProfiles\LocalService\AppData\Local\FontCache"
if (Test-Path $fontCachePath) {
Clear-DirectoryContents -Path $fontCachePath -Description "System font cache"
}
# Restart font cache service
Start-Service -Name "FontCache" -ErrorAction SilentlyContinue
}
catch {
Write-Debug "Font cache cleanup failed: $_"
Start-Service -Name "FontCache" -ErrorAction SilentlyContinue
}
}
}
# ============================================================================
# Disk Cleanup Tool Integration
# ============================================================================
function Invoke-DiskCleanupTool {
<#
.SYNOPSIS
Run Windows built-in Disk Cleanup tool with predefined settings
#>
param([switch]$Full)
if (-not (Test-IsAdmin)) {
Write-Debug "Skipping Disk Cleanup tool - requires admin for full cleanup"
}
if (Test-DryRunMode) {
Write-DryRun "Windows Disk Cleanup tool"
return
}
# Set up registry keys for automated cleanup
$cleanupKey = "HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Explorer\VolumeCaches"
$cleanupItems = @(
"Active Setup Temp Folders"
"Downloaded Program Files"
"Internet Cache Files"
"Old ChkDsk Files"
"Recycle Bin"
"Setup Log Files"
"System error memory dump files"
"System error minidump files"
"Temporary Files"
"Temporary Setup Files"
"Thumbnail Cache"
"Windows Error Reporting Archive Files"
"Windows Error Reporting Queue Files"
"Windows Error Reporting System Archive Files"
"Windows Error Reporting System Queue Files"
)
if ($Full -and (Test-IsAdmin)) {
$cleanupItems += @(
"Previous Installations"
"Temporary Windows installation files"
"Update Cleanup"
"Windows Defender"
"Windows Upgrade Log Files"
)
}
# Enable cleanup items in registry
foreach ($item in $cleanupItems) {
$itemPath = Join-Path $cleanupKey $item
if (Test-Path $itemPath) {
Set-ItemProperty -Path $itemPath -Name "StateFlags0100" -Value 2 -Type DWord -ErrorAction SilentlyContinue
}
}
try {
# Run disk cleanup
$process = Start-Process -FilePath "cleanmgr.exe" `
-ArgumentList "/sagerun:100" `
-Wait -PassThru -NoNewWindow -ErrorAction Stop
if ($process.ExitCode -eq 0) {
Write-Success "Windows Disk Cleanup"
Set-SectionActivity
}
}
catch {
Write-Debug "Disk Cleanup failed: $_"
}
}
# ============================================================================
# Main System Cleanup Function
# ============================================================================
function Invoke-SystemCleanup {
<#
.SYNOPSIS
Run all system-level cleanup tasks (requires admin for full effect)
#>
param(
[switch]$IncludeComponentStore,
[switch]$IncludeDiskCleanup
)
Start-Section "System cleanup"
if (-not (Test-IsAdmin)) {
Write-MoleWarning "Running without admin - some cleanup tasks will be skipped"
}
# System temp files
Clear-SystemTempFiles
# Windows logs
Clear-WindowsLogs -DaysOld 7
# Windows Update cache
Clear-WindowsUpdateFiles
# Installer cache
Clear-InstallerCache
# Memory dumps
Clear-MemoryDumps
# Font cache
Clear-SystemFontCache
# Optional: Component Store (can take a long time)
if ($IncludeComponentStore) {
Invoke-ComponentStoreCleanup
}
# Optional: Windows Disk Cleanup tool
if ($IncludeDiskCleanup) {
Invoke-DiskCleanupTool -Full
}
Stop-Section
}
# ============================================================================
# Exports
# ============================================================================
# Functions: Clear-SystemTempFiles, Clear-WindowsLogs, Invoke-SystemCleanup, etc.

View File

@@ -1,339 +0,0 @@
#!/bin/bash
# System-Level Cleanup Module (requires sudo).
set -euo pipefail
# System caches, logs, and temp files.
clean_deep_system() {
stop_section_spinner
local cache_cleaned=0
safe_sudo_find_delete "/Library/Caches" "*.cache" "$MOLE_TEMP_FILE_AGE_DAYS" "f" && cache_cleaned=1 || true
safe_sudo_find_delete "/Library/Caches" "*.tmp" "$MOLE_TEMP_FILE_AGE_DAYS" "f" && cache_cleaned=1 || true
safe_sudo_find_delete "/Library/Caches" "*.log" "$MOLE_LOG_AGE_DAYS" "f" && cache_cleaned=1 || true
[[ $cache_cleaned -eq 1 ]] && log_success "System caches"
local tmp_cleaned=0
safe_sudo_find_delete "/private/tmp" "*" "${MOLE_TEMP_FILE_AGE_DAYS}" "f" && tmp_cleaned=1 || true
safe_sudo_find_delete "/private/var/tmp" "*" "${MOLE_TEMP_FILE_AGE_DAYS}" "f" && tmp_cleaned=1 || true
[[ $tmp_cleaned -eq 1 ]] && log_success "System temp files"
safe_sudo_find_delete "/Library/Logs/DiagnosticReports" "*" "$MOLE_CRASH_REPORT_AGE_DAYS" "f" || true
log_success "System crash reports"
safe_sudo_find_delete "/private/var/log" "*.log" "$MOLE_LOG_AGE_DAYS" "f" || true
safe_sudo_find_delete "/private/var/log" "*.gz" "$MOLE_LOG_AGE_DAYS" "f" || true
log_success "System logs"
if [[ -d "/Library/Updates" && ! -L "/Library/Updates" ]]; then
if ! is_sip_enabled; then
local updates_cleaned=0
while IFS= read -r -d '' item; do
if [[ -z "$item" ]] || [[ ! "$item" =~ ^/Library/Updates/[^/]+$ ]]; then
debug_log "Skipping malformed path: $item"
continue
fi
local item_flags
item_flags=$($STAT_BSD -f%Sf "$item" 2> /dev/null || echo "")
if [[ "$item_flags" == *"restricted"* ]]; then
continue
fi
if safe_sudo_remove "$item"; then
((updates_cleaned++))
fi
done < <(find /Library/Updates -mindepth 1 -maxdepth 1 -print0 2> /dev/null || true)
[[ $updates_cleaned -gt 0 ]] && log_success "System library updates"
fi
fi
if [[ -d "/macOS Install Data" ]]; then
local mtime=$(get_file_mtime "/macOS Install Data")
local age_days=$((($(get_epoch_seconds) - mtime) / 86400))
debug_log "Found macOS Install Data (age: ${age_days} days)"
if [[ $age_days -ge 30 ]]; then
local size_kb=$(get_path_size_kb "/macOS Install Data")
if [[ -n "$size_kb" && "$size_kb" -gt 0 ]]; then
local size_human=$(bytes_to_human "$((size_kb * 1024))")
debug_log "Cleaning macOS Install Data: $size_human (${age_days} days old)"
if safe_sudo_remove "/macOS Install Data"; then
log_success "macOS Install Data ($size_human)"
fi
fi
else
debug_log "Keeping macOS Install Data (only ${age_days} days old, needs 30+)"
fi
fi
start_section_spinner "Scanning system caches..."
local code_sign_cleaned=0
local found_count=0
local last_update_time
last_update_time=$(get_epoch_seconds)
local update_interval=2
while IFS= read -r -d '' cache_dir; do
if safe_remove "$cache_dir" true; then
((code_sign_cleaned++))
fi
((found_count++))
# Optimize: only check time every 50 files
if ((found_count % 50 == 0)); then
local current_time
current_time=$(get_epoch_seconds)
if [[ $((current_time - last_update_time)) -ge $update_interval ]]; then
start_section_spinner "Scanning system caches... ($found_count found)"
last_update_time=$current_time
fi
fi
done < <(run_with_timeout 5 command find /private/var/folders -type d -name "*.code_sign_clone" -path "*/X/*" -print0 2> /dev/null || true)
stop_section_spinner
[[ $code_sign_cleaned -gt 0 ]] && log_success "Browser code signature caches ($code_sign_cleaned items)"
safe_sudo_find_delete "/private/var/db/diagnostics/Special" "*" "$MOLE_LOG_AGE_DAYS" "f" || true
safe_sudo_find_delete "/private/var/db/diagnostics/Persist" "*" "$MOLE_LOG_AGE_DAYS" "f" || true
safe_sudo_find_delete "/private/var/db/DiagnosticPipeline" "*" "$MOLE_LOG_AGE_DAYS" "f" || true
log_success "System diagnostic logs"
safe_sudo_find_delete "/private/var/db/powerlog" "*" "$MOLE_LOG_AGE_DAYS" "f" || true
log_success "Power logs"
safe_sudo_find_delete "/private/var/db/reportmemoryexception/MemoryLimitViolations" "*" "30" "f" || true
log_success "Memory exception reports"
start_section_spinner "Cleaning diagnostic trace logs..."
local diag_logs_cleaned=0
safe_sudo_find_delete "/private/var/db/diagnostics/Persist" "*.tracev3" "30" "f" && diag_logs_cleaned=1 || true
safe_sudo_find_delete "/private/var/db/diagnostics/Special" "*.tracev3" "30" "f" && diag_logs_cleaned=1 || true
stop_section_spinner
[[ $diag_logs_cleaned -eq 1 ]] && log_success "System diagnostic trace logs"
}
# Incomplete Time Machine backups.
clean_time_machine_failed_backups() {
local tm_cleaned=0
if ! command -v tmutil > /dev/null 2>&1; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} No incomplete backups found"
return 0
fi
start_section_spinner "Checking Time Machine configuration..."
local spinner_active=true
local tm_info
tm_info=$(run_with_timeout 2 tmutil destinationinfo 2>&1 || echo "failed")
if [[ "$tm_info" == *"No destinations configured"* || "$tm_info" == "failed" ]]; then
if [[ "$spinner_active" == "true" ]]; then
stop_section_spinner
fi
echo -e " ${GREEN}${ICON_SUCCESS}${NC} No incomplete backups found"
return 0
fi
if [[ ! -d "/Volumes" ]]; then
if [[ "$spinner_active" == "true" ]]; then
stop_section_spinner
fi
echo -e " ${GREEN}${ICON_SUCCESS}${NC} No incomplete backups found"
return 0
fi
if tmutil status 2> /dev/null | grep -q "Running = 1"; then
if [[ "$spinner_active" == "true" ]]; then
stop_section_spinner
fi
echo -e " ${YELLOW}!${NC} Time Machine backup in progress, skipping cleanup"
return 0
fi
if [[ "$spinner_active" == "true" ]]; then
start_section_spinner "Checking backup volumes..."
fi
# Fast pre-scan for backup volumes to avoid slow tmutil checks.
local -a backup_volumes=()
for volume in /Volumes/*; do
[[ -d "$volume" ]] || continue
[[ "$volume" == "/Volumes/MacintoshHD" || "$volume" == "/" ]] && continue
[[ -L "$volume" ]] && continue
if [[ -d "$volume/Backups.backupdb" ]] || [[ -d "$volume/.MobileBackups" ]]; then
backup_volumes+=("$volume")
fi
done
if [[ ${#backup_volumes[@]} -eq 0 ]]; then
if [[ "$spinner_active" == "true" ]]; then
stop_section_spinner
fi
echo -e " ${GREEN}${ICON_SUCCESS}${NC} No incomplete backups found"
return 0
fi
if [[ "$spinner_active" == "true" ]]; then
start_section_spinner "Scanning backup volumes..."
fi
for volume in "${backup_volumes[@]}"; do
local fs_type
fs_type=$(run_with_timeout 1 command df -T "$volume" 2> /dev/null | tail -1 | awk '{print $2}' || echo "unknown")
case "$fs_type" in
nfs | smbfs | afpfs | cifs | webdav | unknown) continue ;;
esac
local backupdb_dir="$volume/Backups.backupdb"
if [[ -d "$backupdb_dir" ]]; then
while IFS= read -r inprogress_file; do
[[ -d "$inprogress_file" ]] || continue
# Only delete old incomplete backups (safety window).
local file_mtime=$(get_file_mtime "$inprogress_file")
local current_time
current_time=$(get_epoch_seconds)
local hours_old=$(((current_time - file_mtime) / 3600))
if [[ $hours_old -lt $MOLE_TM_BACKUP_SAFE_HOURS ]]; then
continue
fi
local size_kb=$(get_path_size_kb "$inprogress_file")
[[ "$size_kb" -le 0 ]] && continue
if [[ "$spinner_active" == "true" ]]; then
stop_section_spinner
spinner_active=false
fi
local backup_name=$(basename "$inprogress_file")
local size_human=$(bytes_to_human "$((size_kb * 1024))")
if [[ "$DRY_RUN" == "true" ]]; then
echo -e " ${YELLOW}${ICON_DRY_RUN}${NC} Incomplete backup: $backup_name ${YELLOW}($size_human dry)${NC}"
((tm_cleaned++))
note_activity
continue
fi
if ! command -v tmutil > /dev/null 2>&1; then
echo -e " ${YELLOW}!${NC} tmutil not available, skipping: $backup_name"
continue
fi
if tmutil delete "$inprogress_file" 2> /dev/null; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Incomplete backup: $backup_name ${GREEN}($size_human)${NC}"
((tm_cleaned++))
((files_cleaned++))
((total_size_cleaned += size_kb))
((total_items++))
note_activity
else
echo -e " ${YELLOW}!${NC} Could not delete: $backup_name · try manually with sudo"
fi
done < <(run_with_timeout 15 find "$backupdb_dir" -maxdepth 3 -type d \( -name "*.inProgress" -o -name "*.inprogress" \) 2> /dev/null || true)
fi
# APFS bundles.
for bundle in "$volume"/*.backupbundle "$volume"/*.sparsebundle; do
[[ -e "$bundle" ]] || continue
[[ -d "$bundle" ]] || continue
local bundle_name=$(basename "$bundle")
local mounted_path=$(hdiutil info 2> /dev/null | grep -A 5 "image-path.*$bundle_name" | grep "/Volumes/" | awk '{print $1}' | head -1 || echo "")
if [[ -n "$mounted_path" && -d "$mounted_path" ]]; then
while IFS= read -r inprogress_file; do
[[ -d "$inprogress_file" ]] || continue
local file_mtime=$(get_file_mtime "$inprogress_file")
local current_time
current_time=$(get_epoch_seconds)
local hours_old=$(((current_time - file_mtime) / 3600))
if [[ $hours_old -lt $MOLE_TM_BACKUP_SAFE_HOURS ]]; then
continue
fi
local size_kb=$(get_path_size_kb "$inprogress_file")
[[ "$size_kb" -le 0 ]] && continue
if [[ "$spinner_active" == "true" ]]; then
stop_section_spinner
spinner_active=false
fi
local backup_name=$(basename "$inprogress_file")
local size_human=$(bytes_to_human "$((size_kb * 1024))")
if [[ "$DRY_RUN" == "true" ]]; then
echo -e " ${YELLOW}${ICON_DRY_RUN}${NC} Incomplete APFS backup in $bundle_name: $backup_name ${YELLOW}($size_human dry)${NC}"
((tm_cleaned++))
note_activity
continue
fi
if ! command -v tmutil > /dev/null 2>&1; then
continue
fi
if tmutil delete "$inprogress_file" 2> /dev/null; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Incomplete APFS backup in $bundle_name: $backup_name ${GREEN}($size_human)${NC}"
((tm_cleaned++))
((files_cleaned++))
((total_size_cleaned += size_kb))
((total_items++))
note_activity
else
echo -e " ${YELLOW}!${NC} Could not delete from bundle: $backup_name"
fi
done < <(run_with_timeout 15 find "$mounted_path" -maxdepth 3 -type d \( -name "*.inProgress" -o -name "*.inprogress" \) 2> /dev/null || true)
fi
done
done
if [[ "$spinner_active" == "true" ]]; then
stop_section_spinner
fi
if [[ $tm_cleaned -eq 0 ]]; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} No incomplete backups found"
fi
}
# Local APFS snapshots (keep the most recent).
clean_local_snapshots() {
if ! command -v tmutil > /dev/null 2>&1; then
return 0
fi
start_section_spinner "Checking local snapshots..."
local snapshot_list
snapshot_list=$(tmutil listlocalsnapshots / 2> /dev/null)
stop_section_spinner
[[ -z "$snapshot_list" ]] && return 0
local cleaned_count=0
local total_cleaned_size=0 # Estimation not possible without thin
local newest_ts=0
local newest_name=""
local -a snapshots=()
while IFS= read -r line; do
if [[ "$line" =~ com\.apple\.TimeMachine\.([0-9]{4})-([0-9]{2})-([0-9]{2})-([0-9]{6}) ]]; then
local snap_name="${BASH_REMATCH[0]}"
snapshots+=("$snap_name")
local date_str="${BASH_REMATCH[1]}-${BASH_REMATCH[2]}-${BASH_REMATCH[3]} ${BASH_REMATCH[4]:0:2}:${BASH_REMATCH[4]:2:2}:${BASH_REMATCH[4]:4:2}"
local snap_ts=$(date -j -f "%Y-%m-%d %H:%M:%S" "$date_str" "+%s" 2> /dev/null || echo "0")
[[ "$snap_ts" == "0" ]] && continue
if [[ "$snap_ts" -gt "$newest_ts" ]]; then
newest_ts="$snap_ts"
newest_name="$snap_name"
fi
fi
done <<< "$snapshot_list"
[[ ${#snapshots[@]} -eq 0 ]] && return 0
[[ -z "$newest_name" ]] && return 0
local deletable_count=$((${#snapshots[@]} - 1))
[[ $deletable_count -le 0 ]] && return 0
if [[ "$DRY_RUN" != "true" ]]; then
if [[ ! -t 0 ]]; then
echo -e " ${YELLOW}!${NC} ${#snapshots[@]} local snapshot(s) found, skipping non-interactive mode"
echo -e " ${YELLOW}${ICON_WARNING}${NC} ${GRAY}Tip: Snapshots may cause Disk Utility to show different 'Available' values${NC}"
return 0
fi
echo -e " ${YELLOW}!${NC} Time Machine local snapshots found"
echo -e " ${GRAY}macOS can recreate them if needed.${NC}"
echo -e " ${GRAY}The most recent snapshot will be kept.${NC}"
echo -ne " ${PURPLE}${ICON_ARROW}${NC} Remove all local snapshots except the most recent one? ${GREEN}Enter${NC} continue, ${GRAY}Space${NC} skip: "
local choice
if type read_key > /dev/null 2>&1; then
choice=$(read_key)
else
IFS= read -r -s -n 1 choice || choice=""
if [[ -z "$choice" || "$choice" == $'\n' || "$choice" == $'\r' ]]; then
choice="ENTER"
fi
fi
if [[ "$choice" == "ENTER" ]]; then
printf "\r\033[K" # Clear the prompt line
else
echo -e " ${GRAY}Skipped${NC}"
return 0
fi
fi
local snap_name
for snap_name in "${snapshots[@]}"; do
if [[ "$snap_name" =~ com\.apple\.TimeMachine\.([0-9]{4})-([0-9]{2})-([0-9]{2})-([0-9]{6}) ]]; then
if [[ "${BASH_REMATCH[0]}" != "$newest_name" ]]; then
if [[ "$DRY_RUN" == "true" ]]; then
echo -e " ${YELLOW}${ICON_DRY_RUN}${NC} Local snapshot: $snap_name ${YELLOW}dry-run${NC}"
((cleaned_count++))
note_activity
else
if sudo tmutil deletelocalsnapshots "${BASH_REMATCH[1]}-${BASH_REMATCH[2]}-${BASH_REMATCH[3]}-${BASH_REMATCH[4]}" > /dev/null 2>&1; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Removed snapshot: $snap_name"
((cleaned_count++))
note_activity
else
echo -e " ${YELLOW}!${NC} Failed to remove: $snap_name"
fi
fi
fi
fi
done
if [[ $cleaned_count -gt 0 && "$DRY_RUN" != "true" ]]; then
log_success "Cleaned $cleaned_count local snapshots, kept latest"
fi
}

352
lib/clean/user.ps1 Normal file
View File

@@ -0,0 +1,352 @@
# Mole - User Cleanup Module
# Cleans user-level temporary files, caches, and downloads
#Requires -Version 5.1
Set-StrictMode -Version Latest
# Prevent multiple sourcing
if ((Get-Variable -Name 'MOLE_CLEAN_USER_LOADED' -Scope Script -ErrorAction SilentlyContinue) -and $script:MOLE_CLEAN_USER_LOADED) { return }
$script:MOLE_CLEAN_USER_LOADED = $true
# Import dependencies
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$scriptDir\..\core\base.ps1"
. "$scriptDir\..\core\log.ps1"
. "$scriptDir\..\core\file_ops.ps1"
# ============================================================================
# Windows Temp Files Cleanup
# ============================================================================
function Clear-UserTempFiles {
<#
.SYNOPSIS
Clean user temporary files
#>
param([int]$DaysOld = 7)
Start-Section "User temp files"
# User temp directory
$userTemp = $env:TEMP
if (Test-Path $userTemp) {
Remove-OldFiles -Path $userTemp -DaysOld $DaysOld -Description "User temp files"
}
# Windows Temp (if accessible)
$winTemp = "$env:WINDIR\Temp"
if ((Test-Path $winTemp) -and (Test-IsAdmin)) {
Remove-OldFiles -Path $winTemp -DaysOld $DaysOld -Description "Windows temp files"
}
Stop-Section
}
# ============================================================================
# Downloads Folder Cleanup
# ============================================================================
function Clear-OldDownloads {
<#
.SYNOPSIS
Clean old files from Downloads folder (with user confirmation pattern)
#>
param([int]$DaysOld = 30)
$downloadsPath = [Environment]::GetFolderPath('UserProfile') + '\Downloads'
if (-not (Test-Path $downloadsPath)) {
return
}
# Find old installers and archives
$patterns = @('*.exe', '*.msi', '*.zip', '*.7z', '*.rar', '*.tar.gz', '*.iso')
$cutoffDate = (Get-Date).AddDays(-$DaysOld)
$oldFiles = @()
foreach ($pattern in $patterns) {
$files = Get-ChildItem -Path $downloadsPath -Filter $pattern -File -ErrorAction SilentlyContinue |
Where-Object { $_.LastWriteTime -lt $cutoffDate }
if ($files) {
$oldFiles += $files
}
}
if ($oldFiles.Count -gt 0) {
$paths = $oldFiles | ForEach-Object { $_.FullName }
Remove-SafeItems -Paths $paths -Description "Old downloads (>${DaysOld}d)"
}
}
# ============================================================================
# Recycle Bin Cleanup
# ============================================================================
function Clear-RecycleBin {
<#
.SYNOPSIS
Empty the Recycle Bin
#>
if (Test-DryRunMode) {
Write-DryRun "Recycle Bin (would empty)"
Set-SectionActivity
return
}
try {
# Use Shell.Application COM object
$shell = New-Object -ComObject Shell.Application
$recycleBin = $shell.Namespace(0xA) # Recycle Bin
$items = $recycleBin.Items()
if ($items.Count -gt 0) {
# Calculate size
$totalSize = 0
foreach ($item in $items) {
$totalSize += $item.Size
}
# Clear using Clear-RecycleBin cmdlet (Windows 10+)
Clear-RecycleBin -Force -ErrorAction SilentlyContinue
$sizeHuman = Format-ByteSize -Bytes $totalSize
Write-Success "Recycle Bin $($script:Colors.Green)($sizeHuman)$($script:Colors.NC)"
Set-SectionActivity
}
}
catch {
Write-Debug "Could not clear Recycle Bin: $_"
}
}
# ============================================================================
# Recent Files Cleanup
# ============================================================================
function Clear-RecentFiles {
<#
.SYNOPSIS
Clean old recent file shortcuts
#>
param([int]$DaysOld = 30)
$recentPath = "$env:APPDATA\Microsoft\Windows\Recent"
if (Test-Path $recentPath) {
Remove-OldFiles -Path $recentPath -DaysOld $DaysOld -Filter "*.lnk" -Description "Old recent shortcuts"
}
# AutomaticDestinations (jump lists)
$autoDestPath = "$recentPath\AutomaticDestinations"
if (Test-Path $autoDestPath) {
Remove-OldFiles -Path $autoDestPath -DaysOld $DaysOld -Description "Old jump list entries"
}
}
# ============================================================================
# Thumbnail Cache Cleanup
# ============================================================================
function Clear-ThumbnailCache {
<#
.SYNOPSIS
Clean Windows thumbnail cache
#>
$thumbCachePath = "$env:LOCALAPPDATA\Microsoft\Windows\Explorer"
if (-not (Test-Path $thumbCachePath)) {
return
}
# Thumbnail cache files (thumbcache_*.db)
$thumbFiles = Get-ChildItem -Path $thumbCachePath -Filter "thumbcache_*.db" -File -ErrorAction SilentlyContinue
if ($thumbFiles) {
$paths = $thumbFiles | ForEach-Object { $_.FullName }
Remove-SafeItems -Paths $paths -Description "Thumbnail cache"
}
# Icon cache
$iconCache = "$env:LOCALAPPDATA\IconCache.db"
if (Test-Path $iconCache) {
Remove-SafeItem -Path $iconCache -Description "Icon cache"
}
}
# ============================================================================
# Windows Error Reports Cleanup
# ============================================================================
function Clear-ErrorReports {
<#
.SYNOPSIS
Clean Windows Error Reporting files
#>
param([int]$DaysOld = 7)
$werPaths = @(
"$env:LOCALAPPDATA\Microsoft\Windows\WER"
"$env:LOCALAPPDATA\CrashDumps"
"$env:USERPROFILE\AppData\Local\Microsoft\Windows\WER"
)
foreach ($path in $werPaths) {
if (Test-Path $path) {
$items = Get-ChildItem -Path $path -Recurse -Force -ErrorAction SilentlyContinue
if ($items) {
$paths = $items | ForEach-Object { $_.FullName }
Remove-SafeItems -Paths $paths -Description "Error reports"
}
}
}
# Memory dumps
$dumpPaths = @(
"$env:LOCALAPPDATA\CrashDumps"
"$env:USERPROFILE\*.dmp"
)
foreach ($path in $dumpPaths) {
$dumps = Get-ChildItem -Path $path -Filter "*.dmp" -ErrorAction SilentlyContinue
if ($dumps) {
$paths = $dumps | ForEach-Object { $_.FullName }
Remove-SafeItems -Paths $paths -Description "Memory dumps"
}
}
}
# ============================================================================
# Windows Prefetch Cleanup (requires admin)
# ============================================================================
function Clear-Prefetch {
<#
.SYNOPSIS
Clean Windows Prefetch files (requires admin)
#>
param([int]$DaysOld = 14)
if (-not (Test-IsAdmin)) {
Write-Debug "Skipping Prefetch cleanup - requires admin"
return
}
$prefetchPath = "$env:WINDIR\Prefetch"
if (Test-Path $prefetchPath) {
Remove-OldFiles -Path $prefetchPath -DaysOld $DaysOld -Description "Prefetch files"
}
}
# ============================================================================
# Log Files Cleanup
# ============================================================================
function Clear-UserLogs {
<#
.SYNOPSIS
Clean old log files from common locations
#>
param([int]$DaysOld = 7)
$logLocations = @(
"$env:LOCALAPPDATA\Temp\*.log"
"$env:APPDATA\*.log"
"$env:USERPROFILE\*.log"
)
foreach ($location in $logLocations) {
$parent = Split-Path -Parent $location
$filter = Split-Path -Leaf $location
if (Test-Path $parent) {
$logs = Get-ChildItem -Path $parent -Filter $filter -File -ErrorAction SilentlyContinue |
Where-Object { $_.LastWriteTime -lt (Get-Date).AddDays(-$DaysOld) }
if ($logs) {
$paths = $logs | ForEach-Object { $_.FullName }
Remove-SafeItems -Paths $paths -Description "Old log files"
}
}
}
}
# ============================================================================
# Clipboard History Cleanup
# ============================================================================
function Clear-ClipboardHistory {
<#
.SYNOPSIS
Clear Windows clipboard history
#>
if (Test-DryRunMode) {
Write-DryRun "Clipboard history (would clear)"
return
}
try {
# Load Windows Forms assembly for clipboard access
Add-Type -AssemblyName System.Windows.Forms -ErrorAction SilentlyContinue
# Clear current clipboard
[System.Windows.Forms.Clipboard]::Clear()
# Clear clipboard history (Windows 10 1809+)
$clipboardPath = "$env:LOCALAPPDATA\Microsoft\Windows\Clipboard"
if (Test-Path $clipboardPath) {
Clear-DirectoryContents -Path $clipboardPath -Description "Clipboard history"
}
}
catch {
Write-Debug "Could not clear clipboard: $_"
}
}
# ============================================================================
# Main User Cleanup Function
# ============================================================================
function Invoke-UserCleanup {
<#
.SYNOPSIS
Run all user-level cleanup tasks
#>
param(
[int]$TempDaysOld = 7,
[int]$DownloadsDaysOld = 30,
[int]$LogDaysOld = 7,
[switch]$IncludeDownloads,
[switch]$IncludeRecycleBin
)
Start-Section "User essentials"
# Always clean these
Clear-UserTempFiles -DaysOld $TempDaysOld
Clear-RecentFiles -DaysOld 30
Clear-ThumbnailCache
Clear-ErrorReports -DaysOld 7
Clear-UserLogs -DaysOld $LogDaysOld
Clear-Prefetch -DaysOld 14
# Optional: Downloads cleanup
if ($IncludeDownloads) {
Clear-OldDownloads -DaysOld $DownloadsDaysOld
}
# Optional: Recycle Bin
if ($IncludeRecycleBin) {
Clear-RecycleBin
}
Stop-Section
}
# ============================================================================
# Exports
# ============================================================================
# Functions: Clear-UserTempFiles, Clear-OldDownloads, Clear-RecycleBin, etc.

View File

@@ -1,695 +0,0 @@
#!/bin/bash
# User Data Cleanup Module
set -euo pipefail
clean_user_essentials() {
start_section_spinner "Scanning caches..."
safe_clean ~/Library/Caches/* "User app cache"
stop_section_spinner
start_section_spinner "Scanning empty items..."
clean_empty_library_items
stop_section_spinner
safe_clean ~/Library/Logs/* "User app logs"
if is_path_whitelisted "$HOME/.Trash"; then
note_activity
echo -e " ${GREEN}${ICON_EMPTY}${NC} Trash · whitelist protected"
else
safe_clean ~/.Trash/* "Trash"
fi
}
clean_empty_library_items() {
if [[ ! -d "$HOME/Library" ]]; then
return 0
fi
# 1. Clean top-level empty directories in Library
local -a empty_dirs=()
while IFS= read -r -d '' dir; do
[[ -d "$dir" ]] && empty_dirs+=("$dir")
done < <(find "$HOME/Library" -mindepth 1 -maxdepth 1 -type d -empty -print0 2> /dev/null)
if [[ ${#empty_dirs[@]} -gt 0 ]]; then
safe_clean "${empty_dirs[@]}" "Empty Library folders"
fi
# 2. Clean empty subdirectories in Application Support and other key locations
# Iteratively remove empty directories until no more are found
local -a key_locations=(
"$HOME/Library/Application Support"
"$HOME/Library/Caches"
)
for location in "${key_locations[@]}"; do
[[ -d "$location" ]] || continue
# Limit passes to keep cleanup fast; 3 iterations handle most nested scenarios.
local max_iterations=3
local iteration=0
while [[ $iteration -lt $max_iterations ]]; do
local -a nested_empty_dirs=()
# Find empty directories
while IFS= read -r -d '' dir; do
# Skip if whitelisted
if is_path_whitelisted "$dir"; then
continue
fi
# Skip protected system components
local dir_name=$(basename "$dir")
if is_critical_system_component "$dir_name"; then
continue
fi
[[ -d "$dir" ]] && nested_empty_dirs+=("$dir")
done < <(find "$location" -mindepth 1 -type d -empty -print0 2> /dev/null)
# If no empty dirs found, we're done with this location
if [[ ${#nested_empty_dirs[@]} -eq 0 ]]; then
break
fi
local location_name=$(basename "$location")
safe_clean "${nested_empty_dirs[@]}" "Empty $location_name subdirs"
((iteration++))
done
done
# Empty file cleanup is skipped to avoid removing app sentinel files.
}
# Remove old Google Chrome versions while keeping Current.
clean_chrome_old_versions() {
local -a app_paths=(
"/Applications/Google Chrome.app"
"$HOME/Applications/Google Chrome.app"
)
# Use -f to match Chrome Helper processes as well
if pgrep -f "Google Chrome" > /dev/null 2>&1; then
echo -e " ${YELLOW}${ICON_WARNING}${NC} Google Chrome running · old versions cleanup skipped"
return 0
fi
local cleaned_count=0
local total_size=0
local cleaned_any=false
for app_path in "${app_paths[@]}"; do
[[ -d "$app_path" ]] || continue
local versions_dir="$app_path/Contents/Frameworks/Google Chrome Framework.framework/Versions"
[[ -d "$versions_dir" ]] || continue
local current_link="$versions_dir/Current"
[[ -L "$current_link" ]] || continue
local current_version
current_version=$(readlink "$current_link" 2> /dev/null || true)
current_version="${current_version##*/}"
[[ -n "$current_version" ]] || continue
local -a old_versions=()
local dir name
for dir in "$versions_dir"/*; do
[[ -d "$dir" ]] || continue
name=$(basename "$dir")
[[ "$name" == "Current" ]] && continue
[[ "$name" == "$current_version" ]] && continue
if is_path_whitelisted "$dir"; then
continue
fi
old_versions+=("$dir")
done
if [[ ${#old_versions[@]} -eq 0 ]]; then
continue
fi
for dir in "${old_versions[@]}"; do
local size_kb
size_kb=$(get_path_size_kb "$dir" || echo 0)
size_kb="${size_kb:-0}"
total_size=$((total_size + size_kb))
((cleaned_count++))
cleaned_any=true
if [[ "$DRY_RUN" != "true" ]]; then
if has_sudo_session; then
safe_sudo_remove "$dir" > /dev/null 2>&1 || true
else
safe_remove "$dir" true > /dev/null 2>&1 || true
fi
fi
done
done
if [[ "$cleaned_any" == "true" ]]; then
local size_human
size_human=$(bytes_to_human "$((total_size * 1024))")
if [[ "$DRY_RUN" == "true" ]]; then
echo -e " ${YELLOW}${ICON_DRY_RUN}${NC} Chrome old versions ${YELLOW}(${cleaned_count} dirs, $size_human dry)${NC}"
else
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Chrome old versions ${GREEN}(${cleaned_count} dirs, $size_human)${NC}"
fi
((files_cleaned += cleaned_count))
((total_size_cleaned += total_size))
((total_items++))
note_activity
fi
}
# Remove old Microsoft Edge versions while keeping Current.
clean_edge_old_versions() {
local -a app_paths=(
"/Applications/Microsoft Edge.app"
"$HOME/Applications/Microsoft Edge.app"
)
# Use -f to match Edge Helper processes as well
if pgrep -f "Microsoft Edge" > /dev/null 2>&1; then
echo -e " ${YELLOW}${ICON_WARNING}${NC} Microsoft Edge running · old versions cleanup skipped"
return 0
fi
local cleaned_count=0
local total_size=0
local cleaned_any=false
for app_path in "${app_paths[@]}"; do
[[ -d "$app_path" ]] || continue
local versions_dir="$app_path/Contents/Frameworks/Microsoft Edge Framework.framework/Versions"
[[ -d "$versions_dir" ]] || continue
local current_link="$versions_dir/Current"
[[ -L "$current_link" ]] || continue
local current_version
current_version=$(readlink "$current_link" 2> /dev/null || true)
current_version="${current_version##*/}"
[[ -n "$current_version" ]] || continue
local -a old_versions=()
local dir name
for dir in "$versions_dir"/*; do
[[ -d "$dir" ]] || continue
name=$(basename "$dir")
[[ "$name" == "Current" ]] && continue
[[ "$name" == "$current_version" ]] && continue
if is_path_whitelisted "$dir"; then
continue
fi
old_versions+=("$dir")
done
if [[ ${#old_versions[@]} -eq 0 ]]; then
continue
fi
for dir in "${old_versions[@]}"; do
local size_kb
size_kb=$(get_path_size_kb "$dir" || echo 0)
size_kb="${size_kb:-0}"
total_size=$((total_size + size_kb))
((cleaned_count++))
cleaned_any=true
if [[ "$DRY_RUN" != "true" ]]; then
if has_sudo_session; then
safe_sudo_remove "$dir" > /dev/null 2>&1 || true
else
safe_remove "$dir" true > /dev/null 2>&1 || true
fi
fi
done
done
if [[ "$cleaned_any" == "true" ]]; then
local size_human
size_human=$(bytes_to_human "$((total_size * 1024))")
if [[ "$DRY_RUN" == "true" ]]; then
echo -e " ${YELLOW}${ICON_DRY_RUN}${NC} Edge old versions ${YELLOW}(${cleaned_count} dirs, $size_human dry)${NC}"
else
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Edge old versions ${GREEN}(${cleaned_count} dirs, $size_human)${NC}"
fi
((files_cleaned += cleaned_count))
((total_size_cleaned += total_size))
((total_items++))
note_activity
fi
}
# Remove old Microsoft EdgeUpdater versions while keeping latest.
clean_edge_updater_old_versions() {
local updater_dir="$HOME/Library/Application Support/Microsoft/EdgeUpdater/apps/msedge-stable"
[[ -d "$updater_dir" ]] || return 0
if pgrep -f "Microsoft Edge" > /dev/null 2>&1; then
echo -e " ${YELLOW}${ICON_WARNING}${NC} Microsoft Edge running · updater cleanup skipped"
return 0
fi
local -a version_dirs=()
local dir
for dir in "$updater_dir"/*; do
[[ -d "$dir" ]] || continue
version_dirs+=("$dir")
done
if [[ ${#version_dirs[@]} -lt 2 ]]; then
return 0
fi
local latest_version
latest_version=$(printf '%s\n' "${version_dirs[@]##*/}" | sort -V | tail -n 1)
[[ -n "$latest_version" ]] || return 0
local cleaned_count=0
local total_size=0
local cleaned_any=false
for dir in "${version_dirs[@]}"; do
local name
name=$(basename "$dir")
[[ "$name" == "$latest_version" ]] && continue
if is_path_whitelisted "$dir"; then
continue
fi
local size_kb
size_kb=$(get_path_size_kb "$dir" || echo 0)
size_kb="${size_kb:-0}"
total_size=$((total_size + size_kb))
((cleaned_count++))
cleaned_any=true
if [[ "$DRY_RUN" != "true" ]]; then
safe_remove "$dir" true > /dev/null 2>&1 || true
fi
done
if [[ "$cleaned_any" == "true" ]]; then
local size_human
size_human=$(bytes_to_human "$((total_size * 1024))")
if [[ "$DRY_RUN" == "true" ]]; then
echo -e " ${YELLOW}${ICON_DRY_RUN}${NC} Edge updater old versions ${YELLOW}(${cleaned_count} dirs, $size_human dry)${NC}"
else
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Edge updater old versions ${GREEN}(${cleaned_count} dirs, $size_human)${NC}"
fi
((files_cleaned += cleaned_count))
((total_size_cleaned += total_size))
((total_items++))
note_activity
fi
}
scan_external_volumes() {
[[ -d "/Volumes" ]] || return 0
local -a candidate_volumes=()
local -a network_volumes=()
for volume in /Volumes/*; do
[[ -d "$volume" && -w "$volume" && ! -L "$volume" ]] || continue
[[ "$volume" == "/" || "$volume" == "/Volumes/Macintosh HD" ]] && continue
local protocol=""
protocol=$(run_with_timeout 1 command diskutil info "$volume" 2> /dev/null | grep -i "Protocol:" | awk '{print $2}' || echo "")
case "$protocol" in
SMB | NFS | AFP | CIFS | WebDAV)
network_volumes+=("$volume")
continue
;;
esac
local fs_type=""
fs_type=$(run_with_timeout 1 command df -T "$volume" 2> /dev/null | tail -1 | awk '{print $2}' || echo "")
case "$fs_type" in
nfs | smbfs | afpfs | cifs | webdav)
network_volumes+=("$volume")
continue
;;
esac
candidate_volumes+=("$volume")
done
local volume_count=${#candidate_volumes[@]}
local network_count=${#network_volumes[@]}
if [[ $volume_count -eq 0 ]]; then
if [[ $network_count -gt 0 ]]; then
echo -e " ${GRAY}${ICON_LIST}${NC} External volumes (${network_count} network volume(s) skipped)"
note_activity
fi
return 0
fi
start_section_spinner "Scanning $volume_count external volume(s)..."
for volume in "${candidate_volumes[@]}"; do
[[ -d "$volume" && -r "$volume" ]] || continue
local volume_trash="$volume/.Trashes"
if [[ -d "$volume_trash" && "$DRY_RUN" != "true" ]] && ! is_path_whitelisted "$volume_trash"; then
while IFS= read -r -d '' item; do
safe_remove "$item" true || true
done < <(command find "$volume_trash" -mindepth 1 -maxdepth 1 -print0 2> /dev/null || true)
fi
if [[ "$PROTECT_FINDER_METADATA" != "true" ]]; then
clean_ds_store_tree "$volume" "$(basename "$volume") volume (.DS_Store)"
fi
done
stop_section_spinner
}
# Finder metadata (.DS_Store).
clean_finder_metadata() {
stop_section_spinner
if [[ "$PROTECT_FINDER_METADATA" == "true" ]]; then
note_activity
echo -e " ${GREEN}${ICON_EMPTY}${NC} Finder metadata · whitelist protected"
return
fi
clean_ds_store_tree "$HOME" "Home directory (.DS_Store)"
}
# macOS system caches and user-level leftovers.
clean_macos_system_caches() {
stop_section_spinner
# safe_clean already checks protected paths.
safe_clean ~/Library/Saved\ Application\ State/* "Saved application states" || true
safe_clean ~/Library/Caches/com.apple.photoanalysisd "Photo analysis cache" || true
safe_clean ~/Library/Caches/com.apple.akd "Apple ID cache" || true
safe_clean ~/Library/Caches/com.apple.WebKit.Networking/* "WebKit network cache" || true
safe_clean ~/Library/DiagnosticReports/* "Diagnostic reports" || true
safe_clean ~/Library/Caches/com.apple.QuickLook.thumbnailcache "QuickLook thumbnails" || true
safe_clean ~/Library/Caches/Quick\ Look/* "QuickLook cache" || true
safe_clean ~/Library/Caches/com.apple.iconservices* "Icon services cache" || true
safe_clean ~/Downloads/*.download "Safari incomplete downloads" || true
safe_clean ~/Downloads/*.crdownload "Chrome incomplete downloads" || true
safe_clean ~/Downloads/*.part "Partial incomplete downloads" || true
safe_clean ~/Library/Autosave\ Information/* "Autosave information" || true
safe_clean ~/Library/IdentityCaches/* "Identity caches" || true
safe_clean ~/Library/Suggestions/* "Siri suggestions cache" || true
safe_clean ~/Library/Calendars/Calendar\ Cache "Calendar cache" || true
safe_clean ~/Library/Application\ Support/AddressBook/Sources/*/Photos.cache "Address Book photo cache" || true
}
clean_recent_items() {
stop_section_spinner
local shared_dir="$HOME/Library/Application Support/com.apple.sharedfilelist"
local -a recent_lists=(
"$shared_dir/com.apple.LSSharedFileList.RecentApplications.sfl2"
"$shared_dir/com.apple.LSSharedFileList.RecentDocuments.sfl2"
"$shared_dir/com.apple.LSSharedFileList.RecentServers.sfl2"
"$shared_dir/com.apple.LSSharedFileList.RecentHosts.sfl2"
"$shared_dir/com.apple.LSSharedFileList.RecentApplications.sfl"
"$shared_dir/com.apple.LSSharedFileList.RecentDocuments.sfl"
"$shared_dir/com.apple.LSSharedFileList.RecentServers.sfl"
"$shared_dir/com.apple.LSSharedFileList.RecentHosts.sfl"
)
if [[ -d "$shared_dir" ]]; then
for sfl_file in "${recent_lists[@]}"; do
[[ -e "$sfl_file" ]] && safe_clean "$sfl_file" "Recent items list" || true
done
fi
safe_clean ~/Library/Preferences/com.apple.recentitems.plist "Recent items preferences" || true
}
clean_mail_downloads() {
stop_section_spinner
local mail_age_days=${MOLE_MAIL_AGE_DAYS:-}
if ! [[ "$mail_age_days" =~ ^[0-9]+$ ]]; then
mail_age_days=30
fi
local -a mail_dirs=(
"$HOME/Library/Mail Downloads"
"$HOME/Library/Containers/com.apple.mail/Data/Library/Mail Downloads"
)
local count=0
local cleaned_kb=0
for target_path in "${mail_dirs[@]}"; do
if [[ -d "$target_path" ]]; then
local dir_size_kb=0
dir_size_kb=$(get_path_size_kb "$target_path")
if ! [[ "$dir_size_kb" =~ ^[0-9]+$ ]]; then
dir_size_kb=0
fi
local min_kb="${MOLE_MAIL_DOWNLOADS_MIN_KB:-}"
if ! [[ "$min_kb" =~ ^[0-9]+$ ]]; then
min_kb=5120
fi
if [[ "$dir_size_kb" -lt "$min_kb" ]]; then
continue
fi
while IFS= read -r -d '' file_path; do
if [[ -f "$file_path" ]]; then
local file_size_kb=$(get_path_size_kb "$file_path")
if safe_remove "$file_path" true; then
((count++))
((cleaned_kb += file_size_kb))
fi
fi
done < <(command find "$target_path" -type f -mtime +"$mail_age_days" -print0 2> /dev/null || true)
fi
done
if [[ $count -gt 0 ]]; then
local cleaned_mb=$(echo "$cleaned_kb" | awk '{printf "%.1f", $1/1024}' || echo "0.0")
echo " ${GREEN}${ICON_SUCCESS}${NC} Cleaned $count mail attachments (~${cleaned_mb}MB)"
note_activity
fi
}
# Sandboxed app caches.
clean_sandboxed_app_caches() {
stop_section_spinner
safe_clean ~/Library/Containers/com.apple.wallpaper.agent/Data/Library/Caches/* "Wallpaper agent cache"
safe_clean ~/Library/Containers/com.apple.mediaanalysisd/Data/Library/Caches/* "Media analysis cache"
safe_clean ~/Library/Containers/com.apple.AppStore/Data/Library/Caches/* "App Store cache"
safe_clean ~/Library/Containers/com.apple.configurator.xpc.InternetService/Data/tmp/* "Apple Configurator temp files"
local containers_dir="$HOME/Library/Containers"
[[ ! -d "$containers_dir" ]] && return 0
start_section_spinner "Scanning sandboxed apps..."
local total_size=0
local cleaned_count=0
local found_any=false
# Use nullglob to avoid literal globs.
local _ng_state
_ng_state=$(shopt -p nullglob || true)
shopt -s nullglob
for container_dir in "$containers_dir"/*; do
process_container_cache "$container_dir"
done
eval "$_ng_state"
stop_section_spinner
if [[ "$found_any" == "true" ]]; then
local size_human=$(bytes_to_human "$((total_size * 1024))")
if [[ "$DRY_RUN" == "true" ]]; then
echo -e " ${YELLOW}${ICON_DRY_RUN}${NC} Sandboxed app caches ${YELLOW}($size_human dry)${NC}"
else
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Sandboxed app caches ${GREEN}($size_human)${NC}"
fi
((files_cleaned += cleaned_count))
((total_size_cleaned += total_size))
((total_items++))
note_activity
fi
}
# Process a single container cache directory.
process_container_cache() {
local container_dir="$1"
[[ -d "$container_dir" ]] || return 0
local bundle_id=$(basename "$container_dir")
if is_critical_system_component "$bundle_id"; then
return 0
fi
if should_protect_data "$bundle_id" || should_protect_data "$(echo "$bundle_id" | LC_ALL=C tr '[:upper:]' '[:lower:]')"; then
return 0
fi
local cache_dir="$container_dir/Data/Library/Caches"
[[ -d "$cache_dir" ]] || return 0
# Fast non-empty check.
if find "$cache_dir" -mindepth 1 -maxdepth 1 -print -quit 2> /dev/null | grep -q .; then
local size=$(get_path_size_kb "$cache_dir")
((total_size += size))
found_any=true
((cleaned_count++))
if [[ "$DRY_RUN" != "true" ]]; then
# Clean contents safely with local nullglob.
local _ng_state
_ng_state=$(shopt -p nullglob || true)
shopt -s nullglob
for item in "$cache_dir"/*; do
[[ -e "$item" ]] || continue
safe_remove "$item" true || true
done
eval "$_ng_state"
fi
fi
}
# Browser caches (Safari/Chrome/Edge/Firefox).
clean_browsers() {
stop_section_spinner
safe_clean ~/Library/Caches/com.apple.Safari/* "Safari cache"
# Chrome/Chromium.
safe_clean ~/Library/Caches/Google/Chrome/* "Chrome cache"
safe_clean ~/Library/Application\ Support/Google/Chrome/*/Application\ Cache/* "Chrome app cache"
safe_clean ~/Library/Application\ Support/Google/Chrome/*/GPUCache/* "Chrome GPU cache"
safe_clean ~/Library/Caches/Chromium/* "Chromium cache"
safe_clean ~/Library/Caches/com.microsoft.edgemac/* "Edge cache"
safe_clean ~/Library/Caches/company.thebrowser.Browser/* "Arc cache"
safe_clean ~/Library/Caches/company.thebrowser.dia/* "Dia cache"
safe_clean ~/Library/Caches/BraveSoftware/Brave-Browser/* "Brave cache"
local firefox_running=false
if pgrep -x "Firefox" > /dev/null 2>&1; then
firefox_running=true
fi
if [[ "$firefox_running" == "true" ]]; then
echo -e " ${YELLOW}${ICON_WARNING}${NC} Firefox is running · cache cleanup skipped"
else
safe_clean ~/Library/Caches/Firefox/* "Firefox cache"
fi
safe_clean ~/Library/Caches/com.operasoftware.Opera/* "Opera cache"
safe_clean ~/Library/Caches/com.vivaldi.Vivaldi/* "Vivaldi cache"
safe_clean ~/Library/Caches/Comet/* "Comet cache"
safe_clean ~/Library/Caches/com.kagi.kagimacOS/* "Orion cache"
safe_clean ~/Library/Caches/zen/* "Zen cache"
if [[ "$firefox_running" == "true" ]]; then
echo -e " ${YELLOW}${ICON_WARNING}${NC} Firefox is running · profile cache cleanup skipped"
else
safe_clean ~/Library/Application\ Support/Firefox/Profiles/*/cache2/* "Firefox profile cache"
fi
clean_chrome_old_versions
clean_edge_old_versions
clean_edge_updater_old_versions
}
# Cloud storage caches.
clean_cloud_storage() {
stop_section_spinner
safe_clean ~/Library/Caches/com.dropbox.* "Dropbox cache"
safe_clean ~/Library/Caches/com.getdropbox.dropbox "Dropbox cache"
safe_clean ~/Library/Caches/com.google.GoogleDrive "Google Drive cache"
safe_clean ~/Library/Caches/com.baidu.netdisk "Baidu Netdisk cache"
safe_clean ~/Library/Caches/com.alibaba.teambitiondisk "Alibaba Cloud cache"
safe_clean ~/Library/Caches/com.box.desktop "Box cache"
safe_clean ~/Library/Caches/com.microsoft.OneDrive "OneDrive cache"
}
# Office app caches.
clean_office_applications() {
stop_section_spinner
safe_clean ~/Library/Caches/com.microsoft.Word "Microsoft Word cache"
safe_clean ~/Library/Caches/com.microsoft.Excel "Microsoft Excel cache"
safe_clean ~/Library/Caches/com.microsoft.Powerpoint "Microsoft PowerPoint cache"
safe_clean ~/Library/Caches/com.microsoft.Outlook/* "Microsoft Outlook cache"
safe_clean ~/Library/Caches/com.apple.iWork.* "Apple iWork cache"
safe_clean ~/Library/Caches/com.kingsoft.wpsoffice.mac "WPS Office cache"
safe_clean ~/Library/Caches/org.mozilla.thunderbird/* "Thunderbird cache"
safe_clean ~/Library/Caches/com.apple.mail/* "Apple Mail cache"
}
# Virtualization caches.
clean_virtualization_tools() {
stop_section_spinner
safe_clean ~/Library/Caches/com.vmware.fusion "VMware Fusion cache"
safe_clean ~/Library/Caches/com.parallels.* "Parallels cache"
safe_clean ~/VirtualBox\ VMs/.cache "VirtualBox cache"
safe_clean ~/.vagrant.d/tmp/* "Vagrant temporary files"
}
# Application Support logs/caches.
clean_application_support_logs() {
stop_section_spinner
if [[ ! -d "$HOME/Library/Application Support" ]] || ! ls "$HOME/Library/Application Support" > /dev/null 2>&1; then
note_activity
echo -e " ${YELLOW}${ICON_WARNING}${NC} Skipped: No permission to access Application Support"
return 0
fi
start_section_spinner "Scanning Application Support..."
local total_size=0
local cleaned_count=0
local found_any=false
# Enable nullglob for safe globbing.
local _ng_state
_ng_state=$(shopt -p nullglob || true)
shopt -s nullglob
for app_dir in ~/Library/Application\ Support/*; do
[[ -d "$app_dir" ]] || continue
local app_name=$(basename "$app_dir")
local app_name_lower=$(echo "$app_name" | LC_ALL=C tr '[:upper:]' '[:lower:]')
local is_protected=false
if should_protect_data "$app_name"; then
is_protected=true
elif should_protect_data "$app_name_lower"; then
is_protected=true
fi
if [[ "$is_protected" == "true" ]]; then
continue
fi
if is_critical_system_component "$app_name"; then
continue
fi
local -a start_candidates=("$app_dir/log" "$app_dir/logs" "$app_dir/activitylog" "$app_dir/Cache/Cache_Data" "$app_dir/Crashpad/completed")
for candidate in "${start_candidates[@]}"; do
if [[ -d "$candidate" ]]; then
if find "$candidate" -mindepth 1 -maxdepth 1 -print -quit 2> /dev/null | grep -q .; then
local size=$(get_path_size_kb "$candidate")
((total_size += size))
((cleaned_count++))
found_any=true
if [[ "$DRY_RUN" != "true" ]]; then
for item in "$candidate"/*; do
[[ -e "$item" ]] || continue
safe_remove "$item" true > /dev/null 2>&1 || true
done
fi
fi
fi
done
done
# Group Containers logs (explicit allowlist).
local known_group_containers=(
"group.com.apple.contentdelivery"
)
for container in "${known_group_containers[@]}"; do
local container_path="$HOME/Library/Group Containers/$container"
local -a gc_candidates=("$container_path/Logs" "$container_path/Library/Logs")
for candidate in "${gc_candidates[@]}"; do
if [[ -d "$candidate" ]]; then
if find "$candidate" -mindepth 1 -maxdepth 1 -print -quit 2> /dev/null | grep -q .; then
local size=$(get_path_size_kb "$candidate")
((total_size += size))
((cleaned_count++))
found_any=true
if [[ "$DRY_RUN" != "true" ]]; then
for item in "$candidate"/*; do
[[ -e "$item" ]] || continue
safe_remove "$item" true > /dev/null 2>&1 || true
done
fi
fi
fi
done
done
eval "$_ng_state"
stop_section_spinner
if [[ "$found_any" == "true" ]]; then
local size_human=$(bytes_to_human "$((total_size * 1024))")
if [[ "$DRY_RUN" == "true" ]]; then
echo -e " ${YELLOW}${ICON_DRY_RUN}${NC} Application Support logs/caches ${YELLOW}($size_human dry)${NC}"
else
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Application Support logs/caches ${GREEN}($size_human)${NC}"
fi
((files_cleaned += cleaned_count))
((total_size_cleaned += total_size))
((total_items++))
note_activity
fi
}
# iOS device backup info.
check_ios_device_backups() {
local backup_dir="$HOME/Library/Application Support/MobileSync/Backup"
# Simplified check without find to avoid hanging.
if [[ -d "$backup_dir" ]]; then
local backup_kb=$(get_path_size_kb "$backup_dir")
if [[ -n "${backup_kb:-}" && "$backup_kb" -gt 102400 ]]; then
local backup_human=$(command du -sh "$backup_dir" 2> /dev/null | awk '{print $1}')
if [[ -n "$backup_human" ]]; then
note_activity
echo -e " Found ${GREEN}${backup_human}${NC} iOS backups"
echo -e " You can delete them manually: ${backup_dir}"
fi
fi
fi
return 0
}
# Apple Silicon specific caches (IS_M_SERIES).
clean_apple_silicon_caches() {
if [[ "${IS_M_SERIES:-false}" != "true" ]]; then
return 0
fi
start_section "Apple Silicon updates"
safe_clean /Library/Apple/usr/share/rosetta/rosetta_update_bundle "Rosetta 2 cache"
safe_clean ~/Library/Caches/com.apple.rosetta.update "Rosetta 2 user cache"
safe_clean ~/Library/Caches/com.apple.amp.mediasevicesd "Apple Silicon media service cache"
end_section
}

File diff suppressed because it is too large Load Diff

396
lib/core/base.ps1 Normal file
View File

@@ -0,0 +1,396 @@
# Mole - Base Definitions and Utilities
# Core definitions, constants, and basic utility functions used by all modules
#Requires -Version 5.1
Set-StrictMode -Version Latest
$ErrorActionPreference = "Stop"
# Prevent multiple sourcing
if ((Get-Variable -Name 'MOLE_BASE_LOADED' -Scope Script -ErrorAction SilentlyContinue) -and $script:MOLE_BASE_LOADED) { return }
$script:MOLE_BASE_LOADED = $true
# ============================================================================
# Color Definitions (ANSI escape codes for modern terminals)
# ============================================================================
$script:ESC = [char]27
$script:Colors = @{
Green = "$ESC[0;32m"
Blue = "$ESC[0;34m"
Cyan = "$ESC[0;36m"
Yellow = "$ESC[0;33m"
Purple = "$ESC[0;35m"
PurpleBold = "$ESC[1;35m"
Red = "$ESC[0;31m"
Gray = "$ESC[0;90m"
White = "$ESC[0;37m"
NC = "$ESC[0m" # No Color / Reset
}
# ============================================================================
# Icon Definitions
# ============================================================================
$script:Icons = @{
Confirm = [char]0x25CE # ◎
Admin = [char]0x2699 # ⚙
Success = [char]0x2713 # ✓
Error = [char]0x263B # ☻
Warning = [char]0x25CF # ●
Empty = [char]0x25CB # ○
Solid = [char]0x25CF # ●
List = [char]0x2022 # •
Arrow = [char]0x27A4 # ➤
DryRun = [char]0x2192 # →
NavUp = [char]0x2191 # ↑
NavDown = [char]0x2193 # ↓
Folder = [char]0x25A0 # ■ (folder substitute)
File = [char]0x25A1 # □ (file substitute)
Trash = [char]0x2718 # ✘ (trash substitute)
}
# ============================================================================
# Global Configuration Constants
# ============================================================================
$script:Config = @{
TempFileAgeDays = 7 # Temp file retention (days)
OrphanAgeDays = 60 # Orphaned data retention (days)
MaxParallelJobs = 15 # Parallel job limit
LogAgeDays = 7 # Log retention (days)
CrashReportAgeDays = 7 # Crash report retention (days)
MaxIterations = 100 # Max iterations for scans
ConfigPath = "$env:USERPROFILE\.config\mole"
CachePath = "$env:USERPROFILE\.cache\mole"
WhitelistFile = "$env:USERPROFILE\.config\mole\whitelist.txt"
}
# ============================================================================
# Default Whitelist Patterns (paths to never clean)
# ============================================================================
$script:DefaultWhitelistPatterns = @(
"$env:LOCALAPPDATA\Microsoft\Windows\Explorer" # Windows Explorer cache
"$env:LOCALAPPDATA\Microsoft\Windows\Fonts" # User fonts
"$env:APPDATA\Microsoft\Windows\Recent" # Recent files (used by shell)
"$env:LOCALAPPDATA\Packages\*" # UWP app data
"$env:USERPROFILE\.vscode\extensions" # VS Code extensions
"$env:USERPROFILE\.nuget" # NuGet packages
"$env:USERPROFILE\.cargo" # Rust packages
"$env:USERPROFILE\.rustup" # Rust toolchain
"$env:USERPROFILE\.m2\repository" # Maven repository
"$env:USERPROFILE\.gradle\caches\modules-2\files-*" # Gradle modules
"$env:USERPROFILE\.ollama\models" # Ollama AI models
"$env:LOCALAPPDATA\JetBrains" # JetBrains IDEs
)
# ============================================================================
# Protected System Paths (NEVER touch these)
# ============================================================================
$script:ProtectedPaths = @(
"C:\Windows"
"C:\Windows\System32"
"C:\Windows\SysWOW64"
"C:\Program Files"
"C:\Program Files (x86)"
"C:\Program Files\Windows Defender"
"C:\Program Files (x86)\Windows Defender"
"C:\ProgramData\Microsoft\Windows Defender"
"$env:SYSTEMROOT"
"$env:WINDIR"
)
# ============================================================================
# System Utilities
# ============================================================================
function Test-IsAdmin {
<#
.SYNOPSIS
Check if running with administrator privileges
#>
$identity = [Security.Principal.WindowsIdentity]::GetCurrent()
$principal = New-Object Security.Principal.WindowsPrincipal($identity)
return $principal.IsInRole([Security.Principal.WindowsBuiltInRole]::Administrator)
}
function Get-FreeSpace {
<#
.SYNOPSIS
Get free disk space on system drive
.OUTPUTS
Human-readable string (e.g., "100GB")
#>
param([string]$Drive = $env:SystemDrive)
$disk = Get-WmiObject Win32_LogicalDisk -Filter "DeviceID='$Drive'" -ErrorAction SilentlyContinue
if ($disk) {
return Format-ByteSize -Bytes $disk.FreeSpace
}
return "Unknown"
}
function Get-WindowsVersion {
<#
.SYNOPSIS
Get Windows version information
#>
$os = Get-WmiObject Win32_OperatingSystem
return @{
Name = $os.Caption
Version = $os.Version
Build = $os.BuildNumber
Arch = $os.OSArchitecture
}
}
function Get-CPUCores {
<#
.SYNOPSIS
Get number of CPU cores
#>
return (Get-WmiObject Win32_Processor).NumberOfLogicalProcessors
}
function Get-OptimalParallelJobs {
<#
.SYNOPSIS
Get optimal number of parallel jobs based on CPU cores
#>
param(
[ValidateSet('scan', 'io', 'compute', 'default')]
[string]$OperationType = 'default'
)
$cores = Get-CPUCores
switch ($OperationType) {
'scan' { return [Math]::Min($cores * 2, 32) }
'io' { return [Math]::Min($cores * 2, 32) }
'compute' { return $cores }
default { return [Math]::Min($cores + 2, 20) }
}
}
# ============================================================================
# Path Utilities
# ============================================================================
function Test-ProtectedPath {
<#
.SYNOPSIS
Check if a path is protected and should never be modified
#>
param([string]$Path)
$normalizedPath = [System.IO.Path]::GetFullPath($Path).TrimEnd('\')
foreach ($protected in $script:ProtectedPaths) {
$normalizedProtected = [System.IO.Path]::GetFullPath($protected).TrimEnd('\')
if ($normalizedPath -eq $normalizedProtected -or
$normalizedPath.StartsWith("$normalizedProtected\", [StringComparison]::OrdinalIgnoreCase)) {
return $true
}
}
return $false
}
function Test-Whitelisted {
<#
.SYNOPSIS
Check if path matches a whitelist pattern
#>
param([string]$Path)
# Check default patterns
foreach ($pattern in $script:DefaultWhitelistPatterns) {
$expandedPattern = [Environment]::ExpandEnvironmentVariables($pattern)
if ($Path -like $expandedPattern) {
return $true
}
}
# Check user whitelist file
if (Test-Path $script:Config.WhitelistFile) {
$userPatterns = Get-Content $script:Config.WhitelistFile -ErrorAction SilentlyContinue
foreach ($pattern in $userPatterns) {
$pattern = $pattern.Trim()
if ($pattern -and -not $pattern.StartsWith('#')) {
if ($Path -like $pattern) {
return $true
}
}
}
}
return $false
}
function Resolve-SafePath {
<#
.SYNOPSIS
Resolve and validate a path for safe operations
#>
param([string]$Path)
try {
$resolved = [System.IO.Path]::GetFullPath($Path)
return $resolved
}
catch {
return $null
}
}
# ============================================================================
# Formatting Utilities
# ============================================================================
function Format-ByteSize {
<#
.SYNOPSIS
Convert bytes to human-readable format
#>
param([long]$Bytes)
if ($Bytes -ge 1TB) {
return "{0:N2}TB" -f ($Bytes / 1TB)
}
elseif ($Bytes -ge 1GB) {
return "{0:N2}GB" -f ($Bytes / 1GB)
}
elseif ($Bytes -ge 1MB) {
return "{0:N1}MB" -f ($Bytes / 1MB)
}
elseif ($Bytes -ge 1KB) {
return "{0:N0}KB" -f ($Bytes / 1KB)
}
else {
return "{0}B" -f $Bytes
}
}
function Format-Number {
<#
.SYNOPSIS
Format a number with thousands separators
#>
param([long]$Number)
return $Number.ToString("N0")
}
function Format-TimeSpan {
<#
.SYNOPSIS
Format a timespan to human-readable string
#>
param([TimeSpan]$Duration)
if ($Duration.TotalHours -ge 1) {
return "{0:N1}h" -f $Duration.TotalHours
}
elseif ($Duration.TotalMinutes -ge 1) {
return "{0:N0}m" -f $Duration.TotalMinutes
}
else {
return "{0:N0}s" -f $Duration.TotalSeconds
}
}
# ============================================================================
# Environment Detection
# ============================================================================
function Get-UserHome {
<#
.SYNOPSIS
Get the current user's home directory
#>
return $env:USERPROFILE
}
function Get-TempPath {
<#
.SYNOPSIS
Get the system temp path
#>
return [System.IO.Path]::GetTempPath()
}
function Get-ConfigPath {
<#
.SYNOPSIS
Get Mole config directory, creating it if needed
#>
$path = $script:Config.ConfigPath
if (-not (Test-Path $path)) {
New-Item -ItemType Directory -Path $path -Force | Out-Null
}
return $path
}
function Get-CachePath {
<#
.SYNOPSIS
Get Mole cache directory, creating it if needed
#>
$path = $script:Config.CachePath
if (-not (Test-Path $path)) {
New-Item -ItemType Directory -Path $path -Force | Out-Null
}
return $path
}
# ============================================================================
# Temporary File Management
# ============================================================================
$script:TempFiles = [System.Collections.ArrayList]::new()
$script:TempDirs = [System.Collections.ArrayList]::new()
function New-TempFile {
<#
.SYNOPSIS
Create a tracked temporary file
#>
param([string]$Prefix = "winmole")
$tempPath = [System.IO.Path]::Combine([System.IO.Path]::GetTempPath(), "$Prefix-$([Guid]::NewGuid().ToString('N').Substring(0,8)).tmp")
New-Item -ItemType File -Path $tempPath -Force | Out-Null
[void]$script:TempFiles.Add($tempPath)
return $tempPath
}
function New-TempDirectory {
<#
.SYNOPSIS
Create a tracked temporary directory
#>
param([string]$Prefix = "winmole")
$tempPath = [System.IO.Path]::Combine([System.IO.Path]::GetTempPath(), "$Prefix-$([Guid]::NewGuid().ToString('N').Substring(0,8))")
New-Item -ItemType Directory -Path $tempPath -Force | Out-Null
[void]$script:TempDirs.Add($tempPath)
return $tempPath
}
function Clear-TempFiles {
<#
.SYNOPSIS
Clean up all tracked temporary files and directories
#>
foreach ($file in $script:TempFiles) {
if (Test-Path $file) {
Remove-Item $file -Force -ErrorAction SilentlyContinue
}
}
$script:TempFiles.Clear()
foreach ($dir in $script:TempDirs) {
if (Test-Path $dir) {
Remove-Item $dir -Recurse -Force -ErrorAction SilentlyContinue
}
}
$script:TempDirs.Clear()
}
# ============================================================================
# Exports (functions and variables are available via dot-sourcing)
# ============================================================================
# Variables: Colors, Icons, Config, ProtectedPaths, DefaultWhitelistPatterns
# Functions: Test-IsAdmin, Get-FreeSpace, Get-WindowsVersion, etc.

View File

@@ -1,864 +0,0 @@
#!/bin/bash
# Mole - Base Definitions and Utilities
# Core definitions, constants, and basic utility functions used by all modules
set -euo pipefail
# Prevent multiple sourcing
if [[ -n "${MOLE_BASE_LOADED:-}" ]]; then
return 0
fi
readonly MOLE_BASE_LOADED=1
# ============================================================================
# Color Definitions
# ============================================================================
readonly ESC=$'\033'
readonly GREEN="${ESC}[0;32m"
readonly BLUE="${ESC}[0;34m"
readonly CYAN="${ESC}[0;36m"
readonly YELLOW="${ESC}[0;33m"
readonly PURPLE="${ESC}[0;35m"
readonly PURPLE_BOLD="${ESC}[1;35m"
readonly RED="${ESC}[0;31m"
readonly GRAY="${ESC}[0;90m"
readonly NC="${ESC}[0m"
# ============================================================================
# Icon Definitions
# ============================================================================
readonly ICON_CONFIRM="◎"
readonly ICON_ADMIN="⚙"
readonly ICON_SUCCESS="✓"
readonly ICON_ERROR="☻"
readonly ICON_WARNING="●"
readonly ICON_EMPTY="○"
readonly ICON_SOLID="●"
readonly ICON_LIST="•"
readonly ICON_ARROW="➤"
readonly ICON_DRY_RUN="→"
readonly ICON_NAV_UP="↑"
readonly ICON_NAV_DOWN="↓"
# ============================================================================
# Global Configuration Constants
# ============================================================================
readonly MOLE_TEMP_FILE_AGE_DAYS=7 # Temp file retention (days)
readonly MOLE_ORPHAN_AGE_DAYS=60 # Orphaned data retention (days)
readonly MOLE_MAX_PARALLEL_JOBS=15 # Parallel job limit
readonly MOLE_MAIL_DOWNLOADS_MIN_KB=5120 # Mail attachment size threshold
readonly MOLE_MAIL_AGE_DAYS=30 # Mail attachment retention (days)
readonly MOLE_LOG_AGE_DAYS=7 # Log retention (days)
readonly MOLE_CRASH_REPORT_AGE_DAYS=7 # Crash report retention (days)
readonly MOLE_SAVED_STATE_AGE_DAYS=30 # Saved state retention (days) - increased for safety
readonly MOLE_TM_BACKUP_SAFE_HOURS=48 # TM backup safety window (hours)
readonly MOLE_MAX_DS_STORE_FILES=500 # Max .DS_Store files to clean per scan
readonly MOLE_MAX_ORPHAN_ITERATIONS=100 # Max iterations for orphaned app data scan
# ============================================================================
# Whitelist Configuration
# ============================================================================
readonly FINDER_METADATA_SENTINEL="FINDER_METADATA"
declare -a DEFAULT_WHITELIST_PATTERNS=(
"$HOME/Library/Caches/ms-playwright*"
"$HOME/.cache/huggingface*"
"$HOME/.m2/repository/*"
"$HOME/.ollama/models/*"
"$HOME/Library/Caches/com.nssurge.surge-mac/*"
"$HOME/Library/Application Support/com.nssurge.surge-mac/*"
"$HOME/Library/Caches/org.R-project.R/R/renv/*"
"$HOME/Library/Caches/pypoetry/virtualenvs*"
"$HOME/Library/Caches/JetBrains*"
"$HOME/Library/Caches/com.jetbrains.toolbox*"
"$HOME/Library/Application Support/JetBrains*"
"$HOME/Library/Caches/com.apple.finder"
"$HOME/Library/Mobile Documents*"
# System-critical caches that affect macOS functionality and stability
# CRITICAL: Removing these will cause system search and UI issues
"$HOME/Library/Caches/com.apple.FontRegistry*"
"$HOME/Library/Caches/com.apple.spotlight*"
"$HOME/Library/Caches/com.apple.Spotlight*"
"$HOME/Library/Caches/CloudKit*"
"$FINDER_METADATA_SENTINEL"
)
declare -a DEFAULT_OPTIMIZE_WHITELIST_PATTERNS=(
"check_brew_health"
"check_touchid"
"check_git_config"
)
# ============================================================================
# BSD Stat Compatibility
# ============================================================================
readonly STAT_BSD="/usr/bin/stat"
# Get file size in bytes
get_file_size() {
local file="$1"
local result
result=$($STAT_BSD -f%z "$file" 2> /dev/null)
echo "${result:-0}"
}
# Get file modification time in epoch seconds
get_file_mtime() {
local file="$1"
[[ -z "$file" ]] && {
echo "0"
return
}
local result
result=$($STAT_BSD -f%m "$file" 2> /dev/null || echo "")
if [[ "$result" =~ ^[0-9]+$ ]]; then
echo "$result"
else
echo "0"
fi
}
# Determine date command once
if [[ -x /bin/date ]]; then
_DATE_CMD="/bin/date"
else
_DATE_CMD="date"
fi
# Get current time in epoch seconds (defensive against locale/aliases)
get_epoch_seconds() {
local result
result=$($_DATE_CMD +%s 2> /dev/null || echo "")
if [[ "$result" =~ ^[0-9]+$ ]]; then
echo "$result"
else
echo "0"
fi
}
# Get file owner username
get_file_owner() {
local file="$1"
$STAT_BSD -f%Su "$file" 2> /dev/null || echo ""
}
# ============================================================================
# System Utilities
# ============================================================================
# Check if System Integrity Protection is enabled
# Returns: 0 if SIP is enabled, 1 if disabled or cannot determine
is_sip_enabled() {
if ! command -v csrutil > /dev/null 2>&1; then
return 0
fi
local sip_status
sip_status=$(csrutil status 2> /dev/null || echo "")
if echo "$sip_status" | grep -qi "enabled"; then
return 0
else
return 1
fi
}
# Check if running in an interactive terminal
is_interactive() {
[[ -t 1 ]]
}
# Detect CPU architecture
# Returns: "Apple Silicon" or "Intel"
detect_architecture() {
if [[ "$(uname -m)" == "arm64" ]]; then
echo "Apple Silicon"
else
echo "Intel"
fi
}
# Get free disk space on root volume
# Returns: human-readable string (e.g., "100G")
get_free_space() {
local target="/"
if [[ -d "/System/Volumes/Data" ]]; then
target="/System/Volumes/Data"
fi
df -h "$target" | awk 'NR==2 {print $4}'
}
# Get Darwin kernel major version (e.g., 24 for 24.2.0)
# Returns 999 on failure to adopt conservative behavior (assume modern system)
get_darwin_major() {
local kernel
kernel=$(uname -r 2> /dev/null || true)
local major="${kernel%%.*}"
if [[ ! "$major" =~ ^[0-9]+$ ]]; then
# Return high number to skip potentially dangerous operations on unknown systems
major=999
fi
echo "$major"
}
# Check if Darwin kernel major version meets minimum
is_darwin_ge() {
local minimum="$1"
local major
major=$(get_darwin_major)
[[ "$major" -ge "$minimum" ]]
}
# Get optimal parallel jobs for operation type (scan|io|compute|default)
get_optimal_parallel_jobs() {
local operation_type="${1:-default}"
local cpu_cores
cpu_cores=$(sysctl -n hw.ncpu 2> /dev/null || echo 4)
case "$operation_type" in
scan | io)
echo $((cpu_cores * 2))
;;
compute)
echo "$cpu_cores"
;;
*)
echo $((cpu_cores + 2))
;;
esac
}
# ============================================================================
# User Context Utilities
# ============================================================================
is_root_user() {
[[ "$(id -u)" == "0" ]]
}
get_user_home() {
local user="$1"
local home=""
if [[ -z "$user" ]]; then
echo ""
return 0
fi
if command -v dscl > /dev/null 2>&1; then
home=$(dscl . -read "/Users/$user" NFSHomeDirectory 2> /dev/null | awk '{print $2}' | head -1 || true)
fi
if [[ -z "$home" ]]; then
home=$(eval echo "~$user" 2> /dev/null || true)
fi
if [[ "$home" == "~"* ]]; then
home=""
fi
echo "$home"
}
get_invoking_user() {
if [[ -n "${SUDO_USER:-}" && "${SUDO_USER:-}" != "root" ]]; then
echo "$SUDO_USER"
return 0
fi
echo "${USER:-}"
}
get_invoking_uid() {
if [[ -n "${SUDO_UID:-}" ]]; then
echo "$SUDO_UID"
return 0
fi
local uid
uid=$(id -u 2> /dev/null || true)
echo "$uid"
}
get_invoking_gid() {
if [[ -n "${SUDO_GID:-}" ]]; then
echo "$SUDO_GID"
return 0
fi
local gid
gid=$(id -g 2> /dev/null || true)
echo "$gid"
}
get_invoking_home() {
if [[ -n "${SUDO_USER:-}" && "${SUDO_USER:-}" != "root" ]]; then
get_user_home "$SUDO_USER"
return 0
fi
echo "${HOME:-}"
}
ensure_user_dir() {
local raw_path="$1"
if [[ -z "$raw_path" ]]; then
return 0
fi
local target_path="$raw_path"
if [[ "$target_path" == "~"* ]]; then
target_path="${target_path/#\~/$HOME}"
fi
mkdir -p "$target_path" 2> /dev/null || true
if ! is_root_user; then
return 0
fi
local sudo_user="${SUDO_USER:-}"
if [[ -z "$sudo_user" || "$sudo_user" == "root" ]]; then
return 0
fi
local user_home
user_home=$(get_user_home "$sudo_user")
if [[ -z "$user_home" ]]; then
return 0
fi
user_home="${user_home%/}"
if [[ "$target_path" != "$user_home" && "$target_path" != "$user_home/"* ]]; then
return 0
fi
local owner_uid="${SUDO_UID:-}"
local owner_gid="${SUDO_GID:-}"
if [[ -z "$owner_uid" || -z "$owner_gid" ]]; then
owner_uid=$(id -u "$sudo_user" 2> /dev/null || true)
owner_gid=$(id -g "$sudo_user" 2> /dev/null || true)
fi
if [[ -z "$owner_uid" || -z "$owner_gid" ]]; then
return 0
fi
local dir="$target_path"
while [[ -n "$dir" && "$dir" != "/" ]]; do
# Early stop: if ownership is already correct, no need to continue up the tree
if [[ -d "$dir" ]]; then
local current_uid
current_uid=$("$STAT_BSD" -f%u "$dir" 2> /dev/null || echo "")
if [[ "$current_uid" == "$owner_uid" ]]; then
break
fi
fi
chown "$owner_uid:$owner_gid" "$dir" 2> /dev/null || true
if [[ "$dir" == "$user_home" ]]; then
break
fi
dir=$(dirname "$dir")
if [[ "$dir" == "." ]]; then
break
fi
done
}
ensure_user_file() {
local raw_path="$1"
if [[ -z "$raw_path" ]]; then
return 0
fi
local target_path="$raw_path"
if [[ "$target_path" == "~"* ]]; then
target_path="${target_path/#\~/$HOME}"
fi
ensure_user_dir "$(dirname "$target_path")"
touch "$target_path" 2> /dev/null || true
if ! is_root_user; then
return 0
fi
local sudo_user="${SUDO_USER:-}"
if [[ -z "$sudo_user" || "$sudo_user" == "root" ]]; then
return 0
fi
local user_home
user_home=$(get_user_home "$sudo_user")
if [[ -z "$user_home" ]]; then
return 0
fi
user_home="${user_home%/}"
if [[ "$target_path" != "$user_home" && "$target_path" != "$user_home/"* ]]; then
return 0
fi
local owner_uid="${SUDO_UID:-}"
local owner_gid="${SUDO_GID:-}"
if [[ -z "$owner_uid" || -z "$owner_gid" ]]; then
owner_uid=$(id -u "$sudo_user" 2> /dev/null || true)
owner_gid=$(id -g "$sudo_user" 2> /dev/null || true)
fi
if [[ -n "$owner_uid" && -n "$owner_gid" ]]; then
chown "$owner_uid:$owner_gid" "$target_path" 2> /dev/null || true
fi
}
# ============================================================================
# Formatting Utilities
# ============================================================================
# Convert bytes to human-readable format (e.g., 1.5GB)
bytes_to_human() {
local bytes="$1"
[[ "$bytes" =~ ^[0-9]+$ ]] || {
echo "0B"
return 1
}
# GB: >= 1073741824 bytes
if ((bytes >= 1073741824)); then
printf "%d.%02dGB\n" $((bytes / 1073741824)) $(((bytes % 1073741824) * 100 / 1073741824))
# MB: >= 1048576 bytes
elif ((bytes >= 1048576)); then
printf "%d.%01dMB\n" $((bytes / 1048576)) $(((bytes % 1048576) * 10 / 1048576))
# KB: >= 1024 bytes (round up)
elif ((bytes >= 1024)); then
printf "%dKB\n" $(((bytes + 512) / 1024))
else
printf "%dB\n" "$bytes"
fi
}
# Convert kilobytes to human-readable format
# Args: $1 - size in KB
# Returns: formatted string
bytes_to_human_kb() {
bytes_to_human "$((${1:-0} * 1024))"
}
# Get brand-friendly localized name for an application
get_brand_name() {
local name="$1"
# Detect if system primary language is Chinese (Cached)
if [[ -z "${MOLE_IS_CHINESE_SYSTEM:-}" ]]; then
local sys_lang
sys_lang=$(defaults read -g AppleLanguages 2> /dev/null | grep -o 'zh-Hans\|zh-Hant\|zh' | head -1 || echo "")
if [[ -n "$sys_lang" ]]; then
export MOLE_IS_CHINESE_SYSTEM="true"
else
export MOLE_IS_CHINESE_SYSTEM="false"
fi
fi
local is_chinese="${MOLE_IS_CHINESE_SYSTEM}"
# Return localized names based on system language
if [[ "$is_chinese" == true ]]; then
# Chinese system - prefer Chinese names
case "$name" in
"qiyimac" | "iQiyi") echo "爱奇艺" ;;
"wechat" | "WeChat") echo "微信" ;;
"QQ") echo "QQ" ;;
"VooV Meeting") echo "腾讯会议" ;;
"dingtalk" | "DingTalk") echo "钉钉" ;;
"NeteaseMusic" | "NetEase Music") echo "网易云音乐" ;;
"BaiduNetdisk" | "Baidu NetDisk") echo "百度网盘" ;;
"alipay" | "Alipay") echo "支付宝" ;;
"taobao" | "Taobao") echo "淘宝" ;;
"futunn" | "Futu NiuNiu") echo "富途牛牛" ;;
"tencent lemon" | "Tencent Lemon Cleaner" | "Tencent Lemon") echo "腾讯柠檬清理" ;;
*) echo "$name" ;;
esac
else
# Non-Chinese system - use English names
case "$name" in
"qiyimac" | "爱奇艺") echo "iQiyi" ;;
"wechat" | "微信") echo "WeChat" ;;
"QQ") echo "QQ" ;;
"腾讯会议") echo "VooV Meeting" ;;
"dingtalk" | "钉钉") echo "DingTalk" ;;
"网易云音乐") echo "NetEase Music" ;;
"百度网盘") echo "Baidu NetDisk" ;;
"alipay" | "支付宝") echo "Alipay" ;;
"taobao" | "淘宝") echo "Taobao" ;;
"富途牛牛") echo "Futu NiuNiu" ;;
"腾讯柠檬清理" | "Tencent Lemon Cleaner") echo "Tencent Lemon" ;;
"keynote" | "Keynote") echo "Keynote" ;;
"pages" | "Pages") echo "Pages" ;;
"numbers" | "Numbers") echo "Numbers" ;;
*) echo "$name" ;;
esac
fi
}
# ============================================================================
# Temporary File Management
# ============================================================================
# Tracked temporary files and directories
declare -a MOLE_TEMP_FILES=()
declare -a MOLE_TEMP_DIRS=()
# Create tracked temporary file
create_temp_file() {
local temp
temp=$(mktemp) || return 1
MOLE_TEMP_FILES+=("$temp")
echo "$temp"
}
# Create tracked temporary directory
create_temp_dir() {
local temp
temp=$(mktemp -d) || return 1
MOLE_TEMP_DIRS+=("$temp")
echo "$temp"
}
# Register existing file for cleanup
register_temp_file() {
MOLE_TEMP_FILES+=("$1")
}
# Register existing directory for cleanup
register_temp_dir() {
MOLE_TEMP_DIRS+=("$1")
}
# Create temp file with prefix (for analyze.sh compatibility)
# Compatible with both BSD mktemp (macOS default) and GNU mktemp (coreutils)
mktemp_file() {
local prefix="${1:-mole}"
# Use TMPDIR if set, otherwise /tmp
# Add .XXXXXX suffix to work with both BSD and GNU mktemp
mktemp "${TMPDIR:-/tmp}/${prefix}.XXXXXX"
}
# Cleanup all tracked temp files and directories
cleanup_temp_files() {
stop_inline_spinner 2> /dev/null || true
local file
if [[ ${#MOLE_TEMP_FILES[@]} -gt 0 ]]; then
for file in "${MOLE_TEMP_FILES[@]}"; do
[[ -f "$file" ]] && rm -f "$file" 2> /dev/null || true
done
fi
if [[ ${#MOLE_TEMP_DIRS[@]} -gt 0 ]]; then
for file in "${MOLE_TEMP_DIRS[@]}"; do
[[ -d "$file" ]] && rm -rf "$file" 2> /dev/null || true # SAFE: cleanup_temp_files
done
fi
MOLE_TEMP_FILES=()
MOLE_TEMP_DIRS=()
}
# ============================================================================
# Section Tracking (for progress indication)
# ============================================================================
# Global section tracking variables
TRACK_SECTION=0
SECTION_ACTIVITY=0
# Start a new section
# Args: $1 - section title
start_section() {
TRACK_SECTION=1
SECTION_ACTIVITY=0
echo ""
echo -e "${PURPLE_BOLD}${ICON_ARROW} $1${NC}"
}
# End a section
# Shows "Nothing to tidy" if no activity was recorded
end_section() {
if [[ "${TRACK_SECTION:-0}" == "1" && "${SECTION_ACTIVITY:-0}" == "0" ]]; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Nothing to tidy"
fi
TRACK_SECTION=0
}
# Mark activity in current section
note_activity() {
if [[ "${TRACK_SECTION:-0}" == "1" ]]; then
SECTION_ACTIVITY=1
fi
}
# Start a section spinner with optional message
# Usage: start_section_spinner "message"
start_section_spinner() {
local message="${1:-Scanning...}"
stop_inline_spinner 2> /dev/null || true
if [[ -t 1 ]]; then
MOLE_SPINNER_PREFIX=" " start_inline_spinner "$message"
fi
}
# Stop spinner and clear the line
# Usage: stop_section_spinner
stop_section_spinner() {
stop_inline_spinner 2> /dev/null || true
if [[ -t 1 ]]; then
echo -ne "\r\033[K" >&2 || true
fi
}
# Safe terminal line clearing with terminal type detection
# Usage: safe_clear_lines <num_lines> [tty_device]
# Returns: 0 on success, 1 if terminal doesn't support ANSI
safe_clear_lines() {
local lines="${1:-1}"
local tty_device="${2:-/dev/tty}"
# Use centralized ANSI support check (defined below)
# Note: This forward reference works because functions are parsed before execution
is_ansi_supported 2> /dev/null || return 1
# Clear lines one by one (more reliable than multi-line sequences)
local i
for ((i = 0; i < lines; i++)); do
printf "\033[1A\r\033[K" > "$tty_device" 2> /dev/null || return 1
done
return 0
}
# Safe single line clear with fallback
# Usage: safe_clear_line [tty_device]
safe_clear_line() {
local tty_device="${1:-/dev/tty}"
# Use centralized ANSI support check
is_ansi_supported 2> /dev/null || return 1
printf "\r\033[K" > "$tty_device" 2> /dev/null || return 1
return 0
}
# Update progress spinner if enough time has elapsed
# Usage: update_progress_if_needed <completed> <total> <last_update_time_var> [interval]
# Example: update_progress_if_needed "$completed" "$total" last_progress_update 2
# Returns: 0 if updated, 1 if skipped
update_progress_if_needed() {
local completed="$1"
local total="$2"
local last_update_var="$3" # Name of variable holding last update time
local interval="${4:-2}" # Default: update every 2 seconds
# Get current time
local current_time
current_time=$(get_epoch_seconds)
# Get last update time from variable
local last_time
eval "last_time=\${$last_update_var:-0}"
[[ "$last_time" =~ ^[0-9]+$ ]] || last_time=0
# Check if enough time has elapsed
if [[ $((current_time - last_time)) -ge $interval ]]; then
# Update the spinner with progress
stop_section_spinner
start_section_spinner "Scanning items... ($completed/$total)"
# Update the last_update_time variable
eval "$last_update_var=$current_time"
return 0
fi
return 1
}
# ============================================================================
# Spinner Stack Management (prevents nesting issues)
# ============================================================================
# Global spinner stack
declare -a MOLE_SPINNER_STACK=()
# Push current spinner state onto stack
# Usage: push_spinner_state
push_spinner_state() {
local current_state=""
# Save current spinner PID if running
if [[ -n "${MOLE_SPINNER_PID:-}" ]] && kill -0 "$MOLE_SPINNER_PID" 2> /dev/null; then
current_state="running:$MOLE_SPINNER_PID"
else
current_state="stopped"
fi
MOLE_SPINNER_STACK+=("$current_state")
debug_log "Pushed spinner state: $current_state (stack depth: ${#MOLE_SPINNER_STACK[@]})"
}
# Pop and restore spinner state from stack
# Usage: pop_spinner_state
pop_spinner_state() {
if [[ ${#MOLE_SPINNER_STACK[@]} -eq 0 ]]; then
debug_log "Warning: Attempted to pop from empty spinner stack"
return 1
fi
# Stack depth safety check
if [[ ${#MOLE_SPINNER_STACK[@]} -gt 10 ]]; then
debug_log "Warning: Spinner stack depth excessive (${#MOLE_SPINNER_STACK[@]}), possible leak"
fi
local last_idx=$((${#MOLE_SPINNER_STACK[@]} - 1))
local state="${MOLE_SPINNER_STACK[$last_idx]}"
# Remove from stack (Bash 3.2 compatible way)
# Instead of unset, rebuild array without last element
local -a new_stack=()
local i
for ((i = 0; i < last_idx; i++)); do
new_stack+=("${MOLE_SPINNER_STACK[$i]}")
done
MOLE_SPINNER_STACK=("${new_stack[@]}")
debug_log "Popped spinner state: $state (remaining depth: ${#MOLE_SPINNER_STACK[@]})"
# Restore state if needed
if [[ "$state" == running:* ]]; then
# Previous spinner was running - we don't restart it automatically
# This is intentional to avoid UI conflicts
:
fi
return 0
}
# Safe spinner start with stack management
# Usage: safe_start_spinner <message>
safe_start_spinner() {
local message="${1:-Working...}"
# Push current state
push_spinner_state
# Stop any existing spinner
stop_section_spinner 2> /dev/null || true
# Start new spinner
start_section_spinner "$message"
}
# Safe spinner stop with stack management
# Usage: safe_stop_spinner
safe_stop_spinner() {
# Stop current spinner
stop_section_spinner 2> /dev/null || true
# Pop previous state
pop_spinner_state || true
}
# ============================================================================
# Terminal Compatibility Checks
# ============================================================================
# Check if terminal supports ANSI escape codes
# Usage: is_ansi_supported
# Returns: 0 if supported, 1 if not
is_ansi_supported() {
# Check if running in interactive terminal
[[ -t 1 ]] || return 1
# Check TERM variable
[[ -n "${TERM:-}" ]] || return 1
# Check for known ANSI-compatible terminals
case "$TERM" in
xterm* | vt100 | vt220 | screen* | tmux* | ansi | linux | rxvt* | konsole*)
return 0
;;
dumb | unknown)
return 1
;;
*)
# Check terminfo database if available
if command -v tput > /dev/null 2>&1; then
# Test if terminal supports colors (good proxy for ANSI support)
local colors=$(tput colors 2> /dev/null || echo "0")
[[ "$colors" -ge 8 ]] && return 0
fi
return 1
;;
esac
}
# Get terminal capability info
# Usage: get_terminal_info
get_terminal_info() {
local info="Terminal: ${TERM:-unknown}"
if is_ansi_supported; then
info+=" (ANSI supported)"
if command -v tput > /dev/null 2>&1; then
local cols=$(tput cols 2> /dev/null || echo "?")
local lines=$(tput lines 2> /dev/null || echo "?")
local colors=$(tput colors 2> /dev/null || echo "?")
info+=" ${cols}x${lines}, ${colors} colors"
fi
else
info+=" (ANSI not supported)"
fi
echo "$info"
}
# Validate terminal environment before running
# Usage: validate_terminal_environment
# Returns: 0 if OK, 1 with warning if issues detected
validate_terminal_environment() {
local warnings=0
# Check if TERM is set
if [[ -z "${TERM:-}" ]]; then
log_warning "TERM environment variable not set"
((warnings++))
fi
# Check if running in a known problematic terminal
case "${TERM:-}" in
dumb)
log_warning "Running in 'dumb' terminal - limited functionality"
((warnings++))
;;
unknown)
log_warning "Terminal type unknown - may have display issues"
((warnings++))
;;
esac
# Check terminal size if available
if command -v tput > /dev/null 2>&1; then
local cols=$(tput cols 2> /dev/null || echo "80")
if [[ "$cols" -lt 60 ]]; then
log_warning "Terminal width ($cols cols) is narrow - output may wrap"
((warnings++))
fi
fi
# Report compatibility
if [[ $warnings -eq 0 ]]; then
debug_log "Terminal environment validated: $(get_terminal_info)"
return 0
else
debug_log "Terminal compatibility warnings: $warnings"
return 1
fi
}

View File

@@ -1,18 +0,0 @@
#!/bin/bash
# Shared command list for help text and completions.
MOLE_COMMANDS=(
"clean:Free up disk space"
"uninstall:Remove apps completely"
"optimize:Check and maintain system"
"analyze:Explore disk usage"
"status:Monitor system health"
"purge:Remove old project artifacts"
"installer:Find and remove installer files"
"touchid:Configure Touch ID for sudo"
"completion:Setup shell tab completion"
"update:Update to latest version"
"remove:Remove Mole from system"
"help:Show help"
"version:Show version"
)

130
lib/core/common.ps1 Normal file
View File

@@ -0,0 +1,130 @@
# Mole - Common Functions Library
# Main entry point that loads all core modules
#Requires -Version 5.1
Set-StrictMode -Version Latest
$ErrorActionPreference = "Stop"
# Prevent multiple sourcing
if ((Get-Variable -Name 'MOLE_COMMON_LOADED' -Scope Script -ErrorAction SilentlyContinue) -and $script:MOLE_COMMON_LOADED) {
return
}
$script:MOLE_COMMON_LOADED = $true
# Get the directory containing this script
$script:MOLE_CORE_DIR = Split-Path -Parent $MyInvocation.MyCommand.Path
$script:MOLE_LIB_DIR = Split-Path -Parent $script:MOLE_CORE_DIR
$script:MOLE_ROOT_DIR = Split-Path -Parent $script:MOLE_LIB_DIR
# ============================================================================
# Load Core Modules
# ============================================================================
# Base definitions (colors, icons, constants)
. "$script:MOLE_CORE_DIR\base.ps1"
# Logging functions
. "$script:MOLE_CORE_DIR\log.ps1"
# Safe file operations
. "$script:MOLE_CORE_DIR\file_ops.ps1"
# UI components
. "$script:MOLE_CORE_DIR\ui.ps1"
# ============================================================================
# Version Information
# ============================================================================
$script:MOLE_VERSION = "1.0.0"
$script:MOLE_BUILD_DATE = "2026-01-07"
function Get-MoleVersion {
<#
.SYNOPSIS
Get Mole version information
#>
return @{
Version = $script:MOLE_VERSION
BuildDate = $script:MOLE_BUILD_DATE
PowerShell = $PSVersionTable.PSVersion.ToString()
Windows = (Get-WindowsVersion).Version
}
}
# ============================================================================
# Initialization
# ============================================================================
function Initialize-Mole {
<#
.SYNOPSIS
Initialize Mole environment
#>
# Ensure config directory exists
$configPath = Get-ConfigPath
# Ensure cache directory exists
$cachePath = Get-CachePath
# Set up cleanup trap
$null = Register-EngineEvent -SourceIdentifier PowerShell.Exiting -Action {
Clear-TempFiles
}
Write-Debug "Mole initialized"
Write-Debug "Config: $configPath"
Write-Debug "Cache: $cachePath"
}
# ============================================================================
# Admin Elevation
# ============================================================================
function Request-AdminPrivileges {
<#
.SYNOPSIS
Request admin privileges if not already running as admin
.DESCRIPTION
Restarts the script with elevated privileges using UAC
#>
if (-not (Test-IsAdmin)) {
Write-MoleWarning "Some operations require administrator privileges."
if (Read-Confirmation -Prompt "Restart with admin privileges?" -Default $true) {
$scriptPath = $MyInvocation.PSCommandPath
if ($scriptPath) {
Start-Process powershell.exe -ArgumentList "-ExecutionPolicy Bypass -File `"$scriptPath`"" -Verb RunAs
exit
}
}
return $false
}
return $true
}
function Invoke-AsAdmin {
<#
.SYNOPSIS
Run a script block with admin privileges
#>
param(
[Parameter(Mandatory)]
[scriptblock]$ScriptBlock
)
if (Test-IsAdmin) {
& $ScriptBlock
}
else {
$command = $ScriptBlock.ToString()
Start-Process powershell.exe -ArgumentList "-ExecutionPolicy Bypass -Command `"$command`"" -Verb RunAs -Wait
}
}
# ============================================================================
# Exports (functions are available via dot-sourcing)
# ============================================================================
# All functions from base.ps1, log.ps1, file_ops.ps1, and ui.ps1 are
# automatically available when this file is dot-sourced.

View File

@@ -1,188 +0,0 @@
#!/bin/bash
# Mole - Common Functions Library
# Main entry point that loads all core modules
set -euo pipefail
# Prevent multiple sourcing
if [[ -n "${MOLE_COMMON_LOADED:-}" ]]; then
return 0
fi
readonly MOLE_COMMON_LOADED=1
_MOLE_CORE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# Load core modules
source "$_MOLE_CORE_DIR/base.sh"
source "$_MOLE_CORE_DIR/log.sh"
source "$_MOLE_CORE_DIR/timeout.sh"
source "$_MOLE_CORE_DIR/file_ops.sh"
source "$_MOLE_CORE_DIR/ui.sh"
source "$_MOLE_CORE_DIR/app_protection.sh"
# Load sudo management if available
if [[ -f "$_MOLE_CORE_DIR/sudo.sh" ]]; then
source "$_MOLE_CORE_DIR/sudo.sh"
fi
# Update via Homebrew
update_via_homebrew() {
local current_version="$1"
local temp_update temp_upgrade
temp_update=$(mktemp_file "brew_update")
temp_upgrade=$(mktemp_file "brew_upgrade")
# Set up trap for interruption (Ctrl+C) with inline cleanup
trap 'stop_inline_spinner 2>/dev/null; rm -f "$temp_update" "$temp_upgrade" 2>/dev/null; echo ""; exit 130' INT TERM
# Update Homebrew
if [[ -t 1 ]]; then
start_inline_spinner "Updating Homebrew..."
else
echo "Updating Homebrew..."
fi
brew update > "$temp_update" 2>&1 &
local update_pid=$!
wait $update_pid 2> /dev/null || true # Continue even if brew update fails
if [[ -t 1 ]]; then
stop_inline_spinner
fi
# Upgrade Mole
if [[ -t 1 ]]; then
start_inline_spinner "Upgrading Mole..."
else
echo "Upgrading Mole..."
fi
brew upgrade mole > "$temp_upgrade" 2>&1 &
local upgrade_pid=$!
wait $upgrade_pid 2> /dev/null || true # Continue even if brew upgrade fails
local upgrade_output
upgrade_output=$(cat "$temp_upgrade")
if [[ -t 1 ]]; then
stop_inline_spinner
fi
# Clear trap
trap - INT TERM
# Cleanup temp files
rm -f "$temp_update" "$temp_upgrade"
if echo "$upgrade_output" | grep -q "already installed"; then
local installed_version
installed_version=$(brew list --versions mole 2> /dev/null | awk '{print $2}')
echo ""
echo -e "${GREEN}${ICON_SUCCESS}${NC} Already on latest version (${installed_version:-$current_version})"
echo ""
elif echo "$upgrade_output" | grep -q "Error:"; then
log_error "Homebrew upgrade failed"
echo "$upgrade_output" | grep "Error:" >&2
return 1
else
echo "$upgrade_output" | grep -Ev "^(==>|Updating Homebrew|Warning:)" || true
local new_version
new_version=$(brew list --versions mole 2> /dev/null | awk '{print $2}')
echo ""
echo -e "${GREEN}${ICON_SUCCESS}${NC} Updated to latest version (${new_version:-$current_version})"
echo ""
fi
# Clear update cache (suppress errors if cache doesn't exist or is locked)
rm -f "$HOME/.cache/mole/version_check" "$HOME/.cache/mole/update_message" 2> /dev/null || true
}
# Remove applications from Dock
remove_apps_from_dock() {
if [[ $# -eq 0 ]]; then
return 0
fi
local plist="$HOME/Library/Preferences/com.apple.dock.plist"
[[ -f "$plist" ]] || return 0
if ! command -v python3 > /dev/null 2>&1; then
return 0
fi
# Prune dock entries using Python helper
python3 - "$@" << 'PY' 2> /dev/null || return 0
import os
import plistlib
import subprocess
import sys
import urllib.parse
plist_path = os.path.expanduser('~/Library/Preferences/com.apple.dock.plist')
if not os.path.exists(plist_path):
sys.exit(0)
def normalise(path):
if not path:
return ''
return os.path.normpath(os.path.realpath(path.rstrip('/')))
targets = {normalise(arg) for arg in sys.argv[1:] if arg}
targets = {t for t in targets if t}
if not targets:
sys.exit(0)
with open(plist_path, 'rb') as fh:
try:
data = plistlib.load(fh)
except Exception:
sys.exit(0)
apps = data.get('persistent-apps')
if not isinstance(apps, list):
sys.exit(0)
changed = False
filtered = []
for item in apps:
try:
url = item['tile-data']['file-data']['_CFURLString']
except (KeyError, TypeError):
filtered.append(item)
continue
if not isinstance(url, str):
filtered.append(item)
continue
parsed = urllib.parse.urlparse(url)
path = urllib.parse.unquote(parsed.path or '')
if not path:
filtered.append(item)
continue
candidate = normalise(path)
if any(candidate == t or candidate.startswith(t + os.sep) for t in targets):
changed = True
continue
filtered.append(item)
if not changed:
sys.exit(0)
data['persistent-apps'] = filtered
with open(plist_path, 'wb') as fh:
try:
plistlib.dump(data, fh, fmt=plistlib.FMT_BINARY)
except Exception:
plistlib.dump(data, fh)
# Restart Dock to apply changes
try:
subprocess.run(['killall', 'Dock'], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=False)
except Exception:
pass
PY
}

439
lib/core/file_ops.ps1 Normal file
View File

@@ -0,0 +1,439 @@
# Mole - Safe File Operations Module
# Provides safe file deletion and manipulation functions with protection checks
#Requires -Version 5.1
Set-StrictMode -Version Latest
# Prevent multiple sourcing
if ((Get-Variable -Name 'MOLE_FILEOPS_LOADED' -Scope Script -ErrorAction SilentlyContinue) -and $script:MOLE_FILEOPS_LOADED) { return }
$script:MOLE_FILEOPS_LOADED = $true
# Import dependencies
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$scriptDir\base.ps1"
. "$scriptDir\log.ps1"
# ============================================================================
# Global State
# ============================================================================
$script:MoleDryRunMode = $env:MOLE_DRY_RUN -eq "1"
$script:TotalSizeCleaned = 0
$script:FilesCleaned = 0
$script:TotalItems = 0
# ============================================================================
# Safety Validation Functions
# ============================================================================
function Test-SafePath {
<#
.SYNOPSIS
Validate that a path is safe to operate on
.DESCRIPTION
Checks against protected paths and whitelist
.OUTPUTS
$true if safe, $false if protected
#>
param(
[Parameter(Mandatory)]
[string]$Path
)
# Must have a path
if ([string]::IsNullOrWhiteSpace($Path)) {
Write-Debug "Empty path rejected"
return $false
}
# Resolve to full path
$fullPath = Resolve-SafePath -Path $Path
if (-not $fullPath) {
Write-Debug "Could not resolve path: $Path"
return $false
}
# Check protected paths
if (Test-ProtectedPath -Path $fullPath) {
Write-Debug "Protected path rejected: $fullPath"
return $false
}
# Check whitelist
if (Test-Whitelisted -Path $fullPath) {
Write-Debug "Whitelisted path rejected: $fullPath"
return $false
}
return $true
}
function Get-PathSize {
<#
.SYNOPSIS
Get the size of a file or directory in bytes
#>
param(
[Parameter(Mandatory)]
[string]$Path
)
if (-not (Test-Path $Path)) {
return 0
}
try {
if (Test-Path $Path -PathType Container) {
$size = (Get-ChildItem -Path $Path -Recurse -Force -ErrorAction SilentlyContinue |
Measure-Object -Property Length -Sum -ErrorAction SilentlyContinue).Sum
if ($null -eq $size) { return 0 }
return [long]$size
}
else {
return (Get-Item $Path -Force -ErrorAction SilentlyContinue).Length
}
}
catch {
return 0
}
}
function Get-PathSizeKB {
<#
.SYNOPSIS
Get the size of a file or directory in kilobytes
#>
param([string]$Path)
$bytes = Get-PathSize -Path $Path
return [Math]::Ceiling($bytes / 1024)
}
# ============================================================================
# Safe Removal Functions
# ============================================================================
function Remove-SafeItem {
<#
.SYNOPSIS
Safely remove a file or directory with all protection checks
.DESCRIPTION
This is the main safe deletion function. It:
- Validates the path is not protected
- Checks against whitelist
- Supports dry-run mode
- Tracks cleanup statistics
#>
param(
[Parameter(Mandatory)]
[string]$Path,
[string]$Description = "",
[switch]$Force,
[switch]$Recurse
)
# Validate path safety
if (-not (Test-SafePath -Path $Path)) {
Write-Debug "Skipping protected/whitelisted path: $Path"
return $false
}
# Check if path exists
if (-not (Test-Path $Path)) {
Write-Debug "Path does not exist: $Path"
return $false
}
# Get size before removal
$size = Get-PathSize -Path $Path
$sizeKB = [Math]::Ceiling($size / 1024)
$sizeHuman = Format-ByteSize -Bytes $size
# Handle dry run
if ($script:MoleDryRunMode) {
$name = if ($Description) { $Description } else { Split-Path -Leaf $Path }
Write-DryRun "$name $($script:Colors.Yellow)($sizeHuman dry)$($script:Colors.NC)"
Set-SectionActivity
return $true
}
# Perform removal
try {
$isDirectory = Test-Path $Path -PathType Container
if ($isDirectory) {
Remove-Item -Path $Path -Recurse -Force -ErrorAction Stop
}
else {
Remove-Item -Path $Path -Force -ErrorAction Stop
}
# Update statistics
$script:TotalSizeCleaned += $sizeKB
$script:FilesCleaned++
$script:TotalItems++
# Log success
$name = if ($Description) { $Description } else { Split-Path -Leaf $Path }
Write-Success "$name $($script:Colors.Green)($sizeHuman)$($script:Colors.NC)"
Set-SectionActivity
return $true
}
catch {
Write-Debug "Failed to remove $Path : $_"
return $false
}
}
function Remove-SafeItems {
<#
.SYNOPSIS
Safely remove multiple items with a collective description
#>
param(
[Parameter(Mandatory)]
[string[]]$Paths,
[string]$Description = "Items"
)
$totalSize = 0
$removedCount = 0
$failedCount = 0
foreach ($path in $Paths) {
if (-not (Test-SafePath -Path $path)) {
continue
}
if (-not (Test-Path $path)) {
continue
}
$size = Get-PathSize -Path $path
if ($script:MoleDryRunMode) {
$totalSize += $size
$removedCount++
continue
}
try {
$isDirectory = Test-Path $path -PathType Container
if ($isDirectory) {
Remove-Item -Path $path -Recurse -Force -ErrorAction Stop
}
else {
Remove-Item -Path $path -Force -ErrorAction Stop
}
$totalSize += $size
$removedCount++
}
catch {
$failedCount++
Write-Debug "Failed to remove: $path - $_"
}
}
if ($removedCount -gt 0) {
$sizeKB = [Math]::Ceiling($totalSize / 1024)
$sizeHuman = Format-ByteSize -Bytes $totalSize
if ($script:MoleDryRunMode) {
Write-DryRun "$Description $($script:Colors.Yellow)($removedCount items, $sizeHuman dry)$($script:Colors.NC)"
}
else {
$script:TotalSizeCleaned += $sizeKB
$script:FilesCleaned += $removedCount
$script:TotalItems++
Write-Success "$Description $($script:Colors.Green)($removedCount items, $sizeHuman)$($script:Colors.NC)"
}
Set-SectionActivity
}
return @{
Removed = $removedCount
Failed = $failedCount
Size = $totalSize
}
}
# ============================================================================
# Pattern-Based Cleanup Functions
# ============================================================================
function Remove-OldFiles {
<#
.SYNOPSIS
Remove files older than specified days
#>
param(
[Parameter(Mandatory)]
[string]$Path,
[int]$DaysOld = 7,
[string]$Filter = "*",
[string]$Description = "Old files"
)
if (-not (Test-Path $Path)) {
return @{ Removed = 0; Size = 0 }
}
$cutoffDate = (Get-Date).AddDays(-$DaysOld)
$oldFiles = Get-ChildItem -Path $Path -Filter $Filter -File -Force -ErrorAction SilentlyContinue |
Where-Object { $_.LastWriteTime -lt $cutoffDate }
if ($oldFiles) {
$paths = $oldFiles | ForEach-Object { $_.FullName }
return Remove-SafeItems -Paths $paths -Description "$Description (>${DaysOld}d old)"
}
return @{ Removed = 0; Size = 0 }
}
function Remove-EmptyDirectories {
<#
.SYNOPSIS
Remove empty directories recursively
#>
param(
[Parameter(Mandatory)]
[string]$Path,
[string]$Description = "Empty directories"
)
if (-not (Test-Path $Path)) {
return @{ Removed = 0 }
}
$removedCount = 0
$maxIterations = 5
for ($i = 0; $i -lt $maxIterations; $i++) {
$emptyDirs = Get-ChildItem -Path $Path -Directory -Recurse -Force -ErrorAction SilentlyContinue |
Where-Object {
(Get-ChildItem -Path $_.FullName -Force -ErrorAction SilentlyContinue | Measure-Object).Count -eq 0
}
if (-not $emptyDirs -or $emptyDirs.Count -eq 0) {
break
}
foreach ($dir in $emptyDirs) {
if (Test-SafePath -Path $dir.FullName) {
if (-not $script:MoleDryRunMode) {
try {
Remove-Item -Path $dir.FullName -Force -ErrorAction Stop
$removedCount++
}
catch {
Write-Debug "Could not remove empty dir: $($dir.FullName)"
}
}
else {
$removedCount++
}
}
}
}
if ($removedCount -gt 0) {
if ($script:MoleDryRunMode) {
Write-DryRun "$Description $($script:Colors.Yellow)($removedCount dirs dry)$($script:Colors.NC)"
}
else {
Write-Success "$Description $($script:Colors.Green)($removedCount dirs)$($script:Colors.NC)"
}
Set-SectionActivity
}
return @{ Removed = $removedCount }
}
function Clear-DirectoryContents {
<#
.SYNOPSIS
Clear all contents of a directory but keep the directory itself
#>
param(
[Parameter(Mandatory)]
[string]$Path,
[string]$Description = ""
)
if (-not (Test-Path $Path)) {
return @{ Removed = 0; Size = 0 }
}
if (-not (Test-SafePath -Path $Path)) {
return @{ Removed = 0; Size = 0 }
}
$items = Get-ChildItem -Path $Path -Force -ErrorAction SilentlyContinue
if ($items) {
$paths = $items | ForEach-Object { $_.FullName }
$desc = if ($Description) { $Description } else { Split-Path -Leaf $Path }
return Remove-SafeItems -Paths $paths -Description $desc
}
return @{ Removed = 0; Size = 0 }
}
# ============================================================================
# Statistics Functions
# ============================================================================
function Get-CleanupStats {
<#
.SYNOPSIS
Get current cleanup statistics
#>
return @{
TotalSizeKB = $script:TotalSizeCleaned
TotalSizeHuman = Format-ByteSize -Bytes ($script:TotalSizeCleaned * 1024)
FilesCleaned = $script:FilesCleaned
TotalItems = $script:TotalItems
}
}
function Reset-CleanupStats {
<#
.SYNOPSIS
Reset cleanup statistics
#>
$script:TotalSizeCleaned = 0
$script:FilesCleaned = 0
$script:TotalItems = 0
}
function Set-DryRunMode {
<#
.SYNOPSIS
Enable or disable dry-run mode
#>
param([bool]$Enabled)
$script:MoleDryRunMode = $Enabled
}
function Test-DryRunMode {
<#
.SYNOPSIS
Check if dry-run mode is enabled
#>
return $script:MoleDryRunMode
}
# ============================================================================
# Exports (functions are available via dot-sourcing)
# ============================================================================
# Functions: Test-SafePath, Get-PathSize, Remove-SafeItem, etc.

View File

@@ -1,351 +0,0 @@
#!/bin/bash
# Mole - File Operations
# Safe file and directory manipulation with validation
set -euo pipefail
# Prevent multiple sourcing
if [[ -n "${MOLE_FILE_OPS_LOADED:-}" ]]; then
return 0
fi
readonly MOLE_FILE_OPS_LOADED=1
# Ensure dependencies are loaded
_MOLE_CORE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if [[ -z "${MOLE_BASE_LOADED:-}" ]]; then
# shellcheck source=lib/core/base.sh
source "$_MOLE_CORE_DIR/base.sh"
fi
if [[ -z "${MOLE_LOG_LOADED:-}" ]]; then
# shellcheck source=lib/core/log.sh
source "$_MOLE_CORE_DIR/log.sh"
fi
if [[ -z "${MOLE_TIMEOUT_LOADED:-}" ]]; then
# shellcheck source=lib/core/timeout.sh
source "$_MOLE_CORE_DIR/timeout.sh"
fi
# ============================================================================
# Path Validation
# ============================================================================
# Validate path for deletion (absolute, no traversal, not system dir)
validate_path_for_deletion() {
local path="$1"
# Check path is not empty
if [[ -z "$path" ]]; then
log_error "Path validation failed: empty path"
return 1
fi
# Check path is absolute
if [[ "$path" != /* ]]; then
log_error "Path validation failed: path must be absolute: $path"
return 1
fi
# Check for path traversal attempts
# Only reject .. when it appears as a complete path component (/../ or /.. or ../)
# This allows legitimate directory names containing .. (e.g., Firefox's "name..files")
if [[ "$path" =~ (^|/)\.\.(\/|$) ]]; then
log_error "Path validation failed: path traversal not allowed: $path"
return 1
fi
# Check path doesn't contain dangerous characters
if [[ "$path" =~ [[:cntrl:]] ]] || [[ "$path" =~ $'\n' ]]; then
log_error "Path validation failed: contains control characters: $path"
return 1
fi
# Allow deletion of coresymbolicationd cache (safe system cache that can be rebuilt)
case "$path" in
/System/Library/Caches/com.apple.coresymbolicationd/data | /System/Library/Caches/com.apple.coresymbolicationd/data/*)
return 0
;;
esac
# Check path isn't critical system directory
case "$path" in
/ | /bin | /sbin | /usr | /usr/bin | /usr/sbin | /etc | /var | /System | /System/* | /Library/Extensions)
log_error "Path validation failed: critical system directory: $path"
return 1
;;
esac
return 0
}
# ============================================================================
# Safe Removal Operations
# ============================================================================
# Safe wrapper around rm -rf with validation
safe_remove() {
local path="$1"
local silent="${2:-false}"
# Validate path
if ! validate_path_for_deletion "$path"; then
return 1
fi
# Check if path exists
if [[ ! -e "$path" ]]; then
return 0
fi
# Dry-run mode: log but don't delete
if [[ "${MOLE_DRY_RUN:-0}" == "1" ]]; then
if [[ "${MO_DEBUG:-}" == "1" ]]; then
local file_type="file"
[[ -d "$path" ]] && file_type="directory"
[[ -L "$path" ]] && file_type="symlink"
local file_size=""
local file_age=""
if [[ -e "$path" ]]; then
local size_kb
size_kb=$(get_path_size_kb "$path" 2> /dev/null || echo "0")
if [[ "$size_kb" -gt 0 ]]; then
file_size=$(bytes_to_human "$((size_kb * 1024))")
fi
if [[ -f "$path" || -d "$path" ]] && ! [[ -L "$path" ]]; then
local mod_time
mod_time=$(stat -f%m "$path" 2> /dev/null || echo "0")
local now
now=$(date +%s 2> /dev/null || echo "0")
if [[ "$mod_time" -gt 0 && "$now" -gt 0 ]]; then
file_age=$(((now - mod_time) / 86400))
fi
fi
fi
debug_file_action "[DRY RUN] Would remove" "$path" "$file_size" "$file_age"
else
debug_log "[DRY RUN] Would remove: $path"
fi
return 0
fi
debug_log "Removing: $path"
# Perform the deletion
# Use || to capture the exit code so set -e won't abort on rm failures
local error_msg
local rm_exit=0
error_msg=$(rm -rf "$path" 2>&1) || rm_exit=$? # safe_remove
if [[ $rm_exit -eq 0 ]]; then
return 0
else
# Check if it's a permission error
if [[ "$error_msg" == *"Permission denied"* ]] || [[ "$error_msg" == *"Operation not permitted"* ]]; then
MOLE_PERMISSION_DENIED_COUNT=${MOLE_PERMISSION_DENIED_COUNT:-0}
MOLE_PERMISSION_DENIED_COUNT=$((MOLE_PERMISSION_DENIED_COUNT + 1))
export MOLE_PERMISSION_DENIED_COUNT
debug_log "Permission denied: $path (may need Full Disk Access)"
else
[[ "$silent" != "true" ]] && log_error "Failed to remove: $path"
fi
return 1
fi
}
# Safe sudo removal with symlink protection
safe_sudo_remove() {
local path="$1"
# Validate path
if ! validate_path_for_deletion "$path"; then
log_error "Path validation failed for sudo remove: $path"
return 1
fi
# Check if path exists
if [[ ! -e "$path" ]]; then
return 0
fi
# Additional check: reject symlinks for sudo operations
if [[ -L "$path" ]]; then
log_error "Refusing to sudo remove symlink: $path"
return 1
fi
# Dry-run mode: log but don't delete
if [[ "${MOLE_DRY_RUN:-0}" == "1" ]]; then
if [[ "${MO_DEBUG:-}" == "1" ]]; then
local file_type="file"
[[ -d "$path" ]] && file_type="directory"
local file_size=""
local file_age=""
if sudo test -e "$path" 2> /dev/null; then
local size_kb
size_kb=$(sudo du -sk "$path" 2> /dev/null | awk '{print $1}' || echo "0")
if [[ "$size_kb" -gt 0 ]]; then
file_size=$(bytes_to_human "$((size_kb * 1024))")
fi
if sudo test -f "$path" 2> /dev/null || sudo test -d "$path" 2> /dev/null; then
local mod_time
mod_time=$(sudo stat -f%m "$path" 2> /dev/null || echo "0")
local now
now=$(date +%s 2> /dev/null || echo "0")
if [[ "$mod_time" -gt 0 && "$now" -gt 0 ]]; then
file_age=$(((now - mod_time) / 86400))
fi
fi
fi
debug_file_action "[DRY RUN] Would remove (sudo)" "$path" "$file_size" "$file_age"
else
debug_log "[DRY RUN] Would remove (sudo): $path"
fi
return 0
fi
debug_log "Removing (sudo): $path"
# Perform the deletion
if sudo rm -rf "$path" 2> /dev/null; then # SAFE: safe_sudo_remove implementation
return 0
else
log_error "Failed to remove (sudo): $path"
return 1
fi
}
# ============================================================================
# Safe Find and Delete Operations
# ============================================================================
# Safe file discovery and deletion with depth and age limits
safe_find_delete() {
local base_dir="$1"
local pattern="$2"
local age_days="${3:-7}"
local type_filter="${4:-f}"
# Validate base directory exists and is not a symlink
if [[ ! -d "$base_dir" ]]; then
log_error "Directory does not exist: $base_dir"
return 1
fi
if [[ -L "$base_dir" ]]; then
log_error "Refusing to search symlinked directory: $base_dir"
return 1
fi
# Validate type filter
if [[ "$type_filter" != "f" && "$type_filter" != "d" ]]; then
log_error "Invalid type filter: $type_filter (must be 'f' or 'd')"
return 1
fi
debug_log "Finding in $base_dir: $pattern (age: ${age_days}d, type: $type_filter)"
local find_args=("-maxdepth" "5" "-name" "$pattern" "-type" "$type_filter")
if [[ "$age_days" -gt 0 ]]; then
find_args+=("-mtime" "+$age_days")
fi
# Iterate results to respect should_protect_path
while IFS= read -r -d '' match; do
if should_protect_path "$match"; then
continue
fi
safe_remove "$match" true || true
done < <(command find "$base_dir" "${find_args[@]}" -print0 2> /dev/null || true)
return 0
}
# Safe sudo discovery and deletion
safe_sudo_find_delete() {
local base_dir="$1"
local pattern="$2"
local age_days="${3:-7}"
local type_filter="${4:-f}"
# Validate base directory (use sudo for permission-restricted dirs)
if ! sudo test -d "$base_dir" 2> /dev/null; then
debug_log "Directory does not exist (skipping): $base_dir"
return 0
fi
if sudo test -L "$base_dir" 2> /dev/null; then
log_error "Refusing to search symlinked directory: $base_dir"
return 1
fi
# Validate type filter
if [[ "$type_filter" != "f" && "$type_filter" != "d" ]]; then
log_error "Invalid type filter: $type_filter (must be 'f' or 'd')"
return 1
fi
debug_log "Finding (sudo) in $base_dir: $pattern (age: ${age_days}d, type: $type_filter)"
local find_args=("-maxdepth" "5" "-name" "$pattern" "-type" "$type_filter")
if [[ "$age_days" -gt 0 ]]; then
find_args+=("-mtime" "+$age_days")
fi
# Iterate results to respect should_protect_path
while IFS= read -r -d '' match; do
if should_protect_path "$match"; then
continue
fi
safe_sudo_remove "$match" || true
done < <(sudo find "$base_dir" "${find_args[@]}" -print0 2> /dev/null || true)
return 0
}
# ============================================================================
# Size Calculation
# ============================================================================
# Get path size in KB (returns 0 if not found)
get_path_size_kb() {
local path="$1"
[[ -z "$path" || ! -e "$path" ]] && {
echo "0"
return
}
# Direct execution without timeout overhead - critical for performance in loops
# Use || echo 0 to ensure failure in du (e.g. permission error) doesn't exit script under set -e
# Pipefail would normally cause the pipeline to fail if du fails, but || handle catches it.
local size
size=$(command du -sk "$path" 2> /dev/null | awk 'NR==1 {print $1; exit}' || true)
# Ensure size is a valid number (fix for non-numeric du output)
if [[ "$size" =~ ^[0-9]+$ ]]; then
echo "$size"
else
echo "0"
fi
}
# Calculate total size for multiple paths
calculate_total_size() {
local files="$1"
local total_kb=0
while IFS= read -r file; do
if [[ -n "$file" && -e "$file" ]]; then
local size_kb
size_kb=$(get_path_size_kb "$file")
((total_kb += size_kb))
fi
done <<< "$files"
echo "$total_kb"
}

285
lib/core/log.ps1 Normal file
View File

@@ -0,0 +1,285 @@
# Mole - Logging Module
# Provides consistent logging functions with colors and icons
#Requires -Version 5.1
Set-StrictMode -Version Latest
# Prevent multiple sourcing
if ((Get-Variable -Name 'MOLE_LOG_LOADED' -Scope Script -ErrorAction SilentlyContinue) -and $script:MOLE_LOG_LOADED) { return }
$script:MOLE_LOG_LOADED = $true
# Import base module
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$scriptDir\base.ps1"
# ============================================================================
# Log Configuration
# ============================================================================
$script:LogConfig = @{
DebugEnabled = $env:MOLE_DEBUG -eq "1"
LogFile = $null
Verbose = $false
}
# ============================================================================
# Core Logging Functions
# ============================================================================
function Write-LogMessage {
<#
.SYNOPSIS
Internal function to write formatted log message
#>
param(
[string]$Message,
[string]$Level,
[string]$Color,
[string]$Icon
)
$timestamp = Get-Date -Format "HH:mm:ss"
$colorCode = $script:Colors[$Color]
$nc = $script:Colors.NC
$formattedIcon = if ($Icon) { "$Icon " } else { "" }
$output = " ${colorCode}${formattedIcon}${nc}${Message}"
Write-Host $output
# Also write to log file if configured
if ($script:LogConfig.LogFile) {
"$timestamp [$Level] $Message" | Out-File -Append -FilePath $script:LogConfig.LogFile -Encoding UTF8
}
}
function Write-Info {
<#
.SYNOPSIS
Write an informational message
#>
param([string]$Message)
Write-LogMessage -Message $Message -Level "INFO" -Color "Cyan" -Icon $script:Icons.List
}
function Write-Success {
<#
.SYNOPSIS
Write a success message
#>
param([string]$Message)
Write-LogMessage -Message $Message -Level "SUCCESS" -Color "Green" -Icon $script:Icons.Success
}
function Write-MoleWarning {
<#
.SYNOPSIS
Write a warning message
#>
param([string]$Message)
Write-LogMessage -Message $Message -Level "WARN" -Color "Yellow" -Icon $script:Icons.Warning
}
function Write-MoleError {
<#
.SYNOPSIS
Write an error message
#>
param([string]$Message)
Write-LogMessage -Message $Message -Level "ERROR" -Color "Red" -Icon $script:Icons.Error
}
function Write-Debug {
<#
.SYNOPSIS
Write a debug message (only if debug mode is enabled)
#>
param([string]$Message)
if ($script:LogConfig.DebugEnabled) {
$gray = $script:Colors.Gray
$nc = $script:Colors.NC
Write-Host " ${gray}[DEBUG] $Message${nc}"
}
}
function Write-DryRun {
<#
.SYNOPSIS
Write a dry-run message (action that would be taken)
#>
param([string]$Message)
Write-LogMessage -Message $Message -Level "DRYRUN" -Color "Yellow" -Icon $script:Icons.DryRun
}
# ============================================================================
# Section Functions (for progress indication)
# ============================================================================
$script:CurrentSection = @{
Active = $false
Activity = $false
Name = ""
}
function Start-Section {
<#
.SYNOPSIS
Start a new section with a title
#>
param([string]$Title)
$script:CurrentSection.Active = $true
$script:CurrentSection.Activity = $false
$script:CurrentSection.Name = $Title
$purple = $script:Colors.PurpleBold
$nc = $script:Colors.NC
$arrow = $script:Icons.Arrow
Write-Host ""
Write-Host "${purple}${arrow} ${Title}${nc}"
}
function Stop-Section {
<#
.SYNOPSIS
End the current section
#>
if ($script:CurrentSection.Active -and -not $script:CurrentSection.Activity) {
Write-Success "Nothing to tidy"
}
$script:CurrentSection.Active = $false
}
function Set-SectionActivity {
<#
.SYNOPSIS
Mark that activity occurred in current section
#>
if ($script:CurrentSection.Active) {
$script:CurrentSection.Activity = $true
}
}
# ============================================================================
# Progress Spinner
# ============================================================================
$script:SpinnerFrames = @('|', '/', '-', '\')
$script:SpinnerIndex = 0
$script:SpinnerJob = $null
function Start-Spinner {
<#
.SYNOPSIS
Start an inline spinner with message
#>
param([string]$Message = "Working...")
$script:SpinnerIndex = 0
$gray = $script:Colors.Gray
$nc = $script:Colors.NC
Write-Host -NoNewline " ${gray}$($script:SpinnerFrames[0]) $Message${nc}"
}
function Update-Spinner {
<#
.SYNOPSIS
Update the spinner animation
#>
param([string]$Message)
$script:SpinnerIndex = ($script:SpinnerIndex + 1) % $script:SpinnerFrames.Count
$frame = $script:SpinnerFrames[$script:SpinnerIndex]
$gray = $script:Colors.Gray
$nc = $script:Colors.NC
# Move cursor to beginning of line and clear
Write-Host -NoNewline "`r ${gray}$frame $Message${nc} "
}
function Stop-Spinner {
<#
.SYNOPSIS
Stop the spinner and clear the line
#>
Write-Host -NoNewline "`r `r"
}
# ============================================================================
# Progress Bar
# ============================================================================
function Write-Progress {
<#
.SYNOPSIS
Write a progress bar
#>
param(
[int]$Current,
[int]$Total,
[string]$Message = "",
[int]$Width = 30
)
$percent = if ($Total -gt 0) { [Math]::Round(($Current / $Total) * 100) } else { 0 }
$filled = [Math]::Round(($Width * $Current) / [Math]::Max($Total, 1))
$empty = $Width - $filled
$bar = ("[" + ("=" * $filled) + (" " * $empty) + "]")
$cyan = $script:Colors.Cyan
$nc = $script:Colors.NC
Write-Host -NoNewline "`r ${cyan}$bar${nc} ${percent}% $Message "
}
function Complete-Progress {
<#
.SYNOPSIS
Clear the progress bar line
#>
Write-Host -NoNewline "`r" + (" " * 80) + "`r"
}
# ============================================================================
# Log File Management
# ============================================================================
function Set-LogFile {
<#
.SYNOPSIS
Set a log file for persistent logging
#>
param([string]$Path)
$script:LogConfig.LogFile = $Path
$dir = Split-Path -Parent $Path
if ($dir -and -not (Test-Path $dir)) {
New-Item -ItemType Directory -Path $dir -Force | Out-Null
}
}
function Enable-DebugMode {
<#
.SYNOPSIS
Enable debug logging
#>
$script:LogConfig.DebugEnabled = $true
}
function Disable-DebugMode {
<#
.SYNOPSIS
Disable debug logging
#>
$script:LogConfig.DebugEnabled = $false
}
# ============================================================================
# Exports (functions are available via dot-sourcing)
# ============================================================================
# Functions: Write-Info, Write-Success, Write-Warning, Write-Error, etc.

View File

@@ -1,291 +0,0 @@
#!/bin/bash
# Mole - Logging System
# Centralized logging with rotation support
set -euo pipefail
# Prevent multiple sourcing
if [[ -n "${MOLE_LOG_LOADED:-}" ]]; then
return 0
fi
readonly MOLE_LOG_LOADED=1
# Ensure base.sh is loaded for colors and icons
if [[ -z "${MOLE_BASE_LOADED:-}" ]]; then
_MOLE_CORE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# shellcheck source=lib/core/base.sh
source "$_MOLE_CORE_DIR/base.sh"
fi
# ============================================================================
# Logging Configuration
# ============================================================================
readonly LOG_FILE="${HOME}/.config/mole/mole.log"
readonly DEBUG_LOG_FILE="${HOME}/.config/mole/mole_debug_session.log"
readonly LOG_MAX_SIZE_DEFAULT=1048576 # 1MB
# Ensure log directory and file exist with correct ownership
ensure_user_file "$LOG_FILE"
# ============================================================================
# Log Rotation
# ============================================================================
# Rotate log file if it exceeds maximum size
rotate_log_once() {
# Skip if already checked this session
[[ -n "${MOLE_LOG_ROTATED:-}" ]] && return 0
export MOLE_LOG_ROTATED=1
local max_size="$LOG_MAX_SIZE_DEFAULT"
if [[ -f "$LOG_FILE" ]] && [[ $(get_file_size "$LOG_FILE") -gt "$max_size" ]]; then
mv "$LOG_FILE" "${LOG_FILE}.old" 2> /dev/null || true
ensure_user_file "$LOG_FILE"
fi
}
# ============================================================================
# Logging Functions
# ============================================================================
# Log informational message
log_info() {
echo -e "${BLUE}$1${NC}"
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
echo "[$timestamp] INFO: $1" >> "$LOG_FILE" 2> /dev/null || true
if [[ "${MO_DEBUG:-}" == "1" ]]; then
echo "[$timestamp] INFO: $1" >> "$DEBUG_LOG_FILE" 2> /dev/null || true
fi
}
# Log success message
log_success() {
echo -e " ${GREEN}${ICON_SUCCESS}${NC} $1"
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
echo "[$timestamp] SUCCESS: $1" >> "$LOG_FILE" 2> /dev/null || true
if [[ "${MO_DEBUG:-}" == "1" ]]; then
echo "[$timestamp] SUCCESS: $1" >> "$DEBUG_LOG_FILE" 2> /dev/null || true
fi
}
# Log warning message
log_warning() {
echo -e "${YELLOW}$1${NC}"
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
echo "[$timestamp] WARNING: $1" >> "$LOG_FILE" 2> /dev/null || true
if [[ "${MO_DEBUG:-}" == "1" ]]; then
echo "[$timestamp] WARNING: $1" >> "$DEBUG_LOG_FILE" 2> /dev/null || true
fi
}
# Log error message
log_error() {
echo -e "${YELLOW}${ICON_ERROR}${NC} $1" >&2
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
echo "[$timestamp] ERROR: $1" >> "$LOG_FILE" 2> /dev/null || true
if [[ "${MO_DEBUG:-}" == "1" ]]; then
echo "[$timestamp] ERROR: $1" >> "$DEBUG_LOG_FILE" 2> /dev/null || true
fi
}
# Debug logging (active when MO_DEBUG=1)
debug_log() {
if [[ "${MO_DEBUG:-}" == "1" ]]; then
echo -e "${GRAY}[DEBUG]${NC} $*" >&2
echo "[$(date '+%Y-%m-%d %H:%M:%S')] DEBUG: $*" >> "$DEBUG_LOG_FILE" 2> /dev/null || true
fi
}
# Enhanced debug logging for operations
debug_operation_start() {
local operation_name="$1"
local operation_desc="${2:-}"
if [[ "${MO_DEBUG:-}" == "1" ]]; then
# Output to stderr for immediate feedback
echo -e "${GRAY}[DEBUG] === $operation_name ===${NC}" >&2
[[ -n "$operation_desc" ]] && echo -e "${GRAY}[DEBUG] $operation_desc${NC}" >&2
# Also log to file
{
echo ""
echo "=== $operation_name ==="
[[ -n "$operation_desc" ]] && echo "Description: $operation_desc"
} >> "$DEBUG_LOG_FILE" 2> /dev/null || true
fi
}
# Log detailed operation information
debug_operation_detail() {
local detail_type="$1" # e.g., "Method", "Target", "Expected Outcome"
local detail_value="$2"
if [[ "${MO_DEBUG:-}" == "1" ]]; then
# Output to stderr
echo -e "${GRAY}[DEBUG] $detail_type: $detail_value${NC}" >&2
# Also log to file
echo "$detail_type: $detail_value" >> "$DEBUG_LOG_FILE" 2> /dev/null || true
fi
}
# Log individual file action with metadata
debug_file_action() {
local action="$1" # e.g., "Would remove", "Removing"
local file_path="$2"
local file_size="${3:-}"
local file_age="${4:-}"
if [[ "${MO_DEBUG:-}" == "1" ]]; then
local msg=" - $file_path"
[[ -n "$file_size" ]] && msg+=" ($file_size"
[[ -n "$file_age" ]] && msg+=", ${file_age} days old"
[[ -n "$file_size" ]] && msg+=")"
# Output to stderr
echo -e "${GRAY}[DEBUG] $action: $msg${NC}" >&2
# Also log to file
echo "$action: $msg" >> "$DEBUG_LOG_FILE" 2> /dev/null || true
fi
}
# Log risk level for operations
debug_risk_level() {
local risk_level="$1" # LOW, MEDIUM, HIGH
local reason="$2"
if [[ "${MO_DEBUG:-}" == "1" ]]; then
local color="$GRAY"
case "$risk_level" in
LOW) color="$GREEN" ;;
MEDIUM) color="$YELLOW" ;;
HIGH) color="$RED" ;;
esac
# Output to stderr with color
echo -e "${GRAY}[DEBUG] Risk Level: ${color}${risk_level}${GRAY} ($reason)${NC}" >&2
# Also log to file
echo "Risk Level: $risk_level ($reason)" >> "$DEBUG_LOG_FILE" 2> /dev/null || true
fi
}
# Log system information for debugging
log_system_info() {
# Only allow once per session
[[ -n "${MOLE_SYS_INFO_LOGGED:-}" ]] && return 0
export MOLE_SYS_INFO_LOGGED=1
# Reset debug log file for this new session
ensure_user_file "$DEBUG_LOG_FILE"
: > "$DEBUG_LOG_FILE"
# Start block in debug log file
{
echo "----------------------------------------------------------------------"
echo "Mole Debug Session - $(date '+%Y-%m-%d %H:%M:%S')"
echo "----------------------------------------------------------------------"
echo "User: $USER"
echo "Hostname: $(hostname)"
echo "Architecture: $(uname -m)"
echo "Kernel: $(uname -r)"
if command -v sw_vers > /dev/null; then
echo "macOS: $(sw_vers -productVersion) ($(sw_vers -buildVersion))"
fi
echo "Shell: ${SHELL:-unknown} (${TERM:-unknown})"
# Check sudo status non-interactively
if sudo -n true 2> /dev/null; then
echo "Sudo Access: Active"
else
echo "Sudo Access: Required"
fi
echo "----------------------------------------------------------------------"
} >> "$DEBUG_LOG_FILE" 2> /dev/null || true
# Notification to stderr
echo -e "${GRAY}[DEBUG] Debug logging enabled. Session log: $DEBUG_LOG_FILE${NC}" >&2
}
# ============================================================================
# Command Execution Wrappers
# ============================================================================
# Run command silently (ignore errors)
run_silent() {
"$@" > /dev/null 2>&1 || true
}
# Run command with error logging
run_logged() {
local cmd="$1"
# Log to main file, and also to debug file if enabled
if [[ "${MO_DEBUG:-}" == "1" ]]; then
if ! "$@" 2>&1 | tee -a "$LOG_FILE" | tee -a "$DEBUG_LOG_FILE" > /dev/null; then
log_warning "Command failed: $cmd"
return 1
fi
else
if ! "$@" 2>&1 | tee -a "$LOG_FILE" > /dev/null; then
log_warning "Command failed: $cmd"
return 1
fi
fi
return 0
}
# ============================================================================
# Formatted Output
# ============================================================================
# Print formatted summary block
print_summary_block() {
local heading=""
local -a details=()
local saw_heading=false
# Parse arguments
for arg in "$@"; do
if [[ "$saw_heading" == "false" ]]; then
saw_heading=true
heading="$arg"
else
details+=("$arg")
fi
done
local divider="======================================================================"
# Print with dividers
echo ""
echo "$divider"
if [[ -n "$heading" ]]; then
echo -e "${BLUE}${heading}${NC}"
fi
# Print details
for detail in "${details[@]}"; do
[[ -z "$detail" ]] && continue
echo -e "${detail}"
done
echo "$divider"
# If debug mode is on, remind user about the log file location
if [[ "${MO_DEBUG:-}" == "1" ]]; then
echo -e "${GRAY}Debug session log saved to:${NC} ${DEBUG_LOG_FILE}"
fi
}
# ============================================================================
# Initialize Logging
# ============================================================================
# Perform log rotation check on module load
rotate_log_once
# If debug mode is enabled, log system info immediately
if [[ "${MO_DEBUG:-}" == "1" ]]; then
log_system_info
fi

View File

@@ -1,319 +0,0 @@
#!/bin/bash
# Sudo Session Manager
# Unified sudo authentication and keepalive management
set -euo pipefail
# ============================================================================
# Touch ID and Clamshell Detection
# ============================================================================
check_touchid_support() {
# Check sudo_local first (Sonoma+)
if [[ -f /etc/pam.d/sudo_local ]]; then
grep -q "pam_tid.so" /etc/pam.d/sudo_local 2> /dev/null
return $?
fi
# Fallback to checking sudo directly
if [[ -f /etc/pam.d/sudo ]]; then
grep -q "pam_tid.so" /etc/pam.d/sudo 2> /dev/null
return $?
fi
return 1
}
# Detect clamshell mode (lid closed)
is_clamshell_mode() {
# ioreg is missing (not macOS) -> treat as lid open
if ! command -v ioreg > /dev/null 2>&1; then
return 1
fi
# Check if lid is closed; ignore pipeline failures so set -e doesn't exit
local clamshell_state=""
clamshell_state=$( (ioreg -r -k AppleClamshellState -d 4 2> /dev/null |
grep "AppleClamshellState" |
head -1) || true)
if [[ "$clamshell_state" =~ \"AppleClamshellState\"\ =\ Yes ]]; then
return 0 # Lid is closed
fi
return 1 # Lid is open
}
_request_password() {
local tty_path="$1"
local attempts=0
local show_hint=true
# Extra safety: ensure sudo cache is cleared before password input
sudo -k 2> /dev/null
# Save original terminal settings and ensure they're restored on exit
local stty_orig
stty_orig=$(stty -g < "$tty_path" 2> /dev/null || echo "")
trap '[[ -n "${stty_orig:-}" ]] && stty "${stty_orig:-}" < "$tty_path" 2> /dev/null || true' RETURN
while ((attempts < 3)); do
local password=""
# Show hint on first attempt about Touch ID appearing again
if [[ $show_hint == true ]] && check_touchid_support; then
echo -e "${GRAY}Note: Touch ID dialog may appear once more - just cancel it${NC}" > "$tty_path"
show_hint=false
fi
printf "${PURPLE}${ICON_ARROW}${NC} Password: " > "$tty_path"
# Disable terminal echo to hide password input
stty -echo -icanon min 1 time 0 < "$tty_path" 2> /dev/null || true
IFS= read -r password < "$tty_path" || password=""
# Restore terminal echo immediately
stty echo icanon < "$tty_path" 2> /dev/null || true
printf "\n" > "$tty_path"
if [[ -z "$password" ]]; then
unset password
((attempts++))
if [[ $attempts -lt 3 ]]; then
echo -e "${YELLOW}${ICON_WARNING}${NC} Password cannot be empty" > "$tty_path"
fi
continue
fi
# Verify password with sudo
# NOTE: macOS PAM will trigger Touch ID before password auth - this is system behavior
if printf '%s\n' "$password" | sudo -S -p "" -v > /dev/null 2>&1; then
unset password
return 0
fi
unset password
((attempts++))
if [[ $attempts -lt 3 ]]; then
echo -e "${YELLOW}${ICON_WARNING}${NC} Incorrect password, try again" > "$tty_path"
fi
done
return 1
}
request_sudo_access() {
local prompt_msg="${1:-Admin access required}"
# Check if already have sudo access
if sudo -n true 2> /dev/null; then
return 0
fi
# Get TTY path
local tty_path="/dev/tty"
if [[ ! -r "$tty_path" || ! -w "$tty_path" ]]; then
tty_path=$(tty 2> /dev/null || echo "")
if [[ -z "$tty_path" || ! -r "$tty_path" || ! -w "$tty_path" ]]; then
log_error "No interactive terminal available"
return 1
fi
fi
sudo -k
# Check if in clamshell mode - if yes, skip Touch ID entirely
if is_clamshell_mode; then
echo -e "${PURPLE}${ICON_ARROW}${NC} ${prompt_msg}"
if _request_password "$tty_path"; then
# Clear all prompt lines (use safe clearing method)
safe_clear_lines 3 "$tty_path"
return 0
fi
return 1
fi
# Not in clamshell mode - try Touch ID if configured
if ! check_touchid_support; then
echo -e "${PURPLE}${ICON_ARROW}${NC} ${prompt_msg}"
if _request_password "$tty_path"; then
# Clear all prompt lines (use safe clearing method)
safe_clear_lines 3 "$tty_path"
return 0
fi
return 1
fi
# Touch ID is available and not in clamshell mode
echo -e "${PURPLE}${ICON_ARROW}${NC} ${prompt_msg} ${GRAY}(Touch ID or password)${NC}"
# Start sudo in background so we can monitor and control it
sudo -v < /dev/null > /dev/null 2>&1 &
local sudo_pid=$!
# Wait for sudo to complete or timeout (5 seconds)
local elapsed=0
local timeout=50 # 50 * 0.1s = 5 seconds
while ((elapsed < timeout)); do
if ! kill -0 "$sudo_pid" 2> /dev/null; then
# Process exited
wait "$sudo_pid" 2> /dev/null
local exit_code=$?
if [[ $exit_code -eq 0 ]] && sudo -n true 2> /dev/null; then
# Touch ID succeeded - clear the prompt line
safe_clear_lines 1 "$tty_path"
return 0
fi
# Touch ID failed or cancelled
break
fi
sleep 0.1
((elapsed++))
done
# Touch ID failed/cancelled - clean up thoroughly before password input
# Kill the sudo process if still running
if kill -0 "$sudo_pid" 2> /dev/null; then
kill -9 "$sudo_pid" 2> /dev/null
wait "$sudo_pid" 2> /dev/null || true
fi
# Clear sudo state immediately
sudo -k 2> /dev/null
# IMPORTANT: Wait longer for macOS to fully close Touch ID UI and SecurityAgent
# Without this delay, subsequent sudo calls may re-trigger Touch ID
sleep 1
# Clear any leftover prompts on the screen
safe_clear_line "$tty_path"
# Now use our password input (this should not trigger Touch ID again)
if _request_password "$tty_path"; then
# Clear all prompt lines (use safe clearing method)
safe_clear_lines 3 "$tty_path"
return 0
fi
return 1
}
# ============================================================================
# Sudo Session Management
# ============================================================================
# Global state
MOLE_SUDO_KEEPALIVE_PID=""
MOLE_SUDO_ESTABLISHED="false"
# Start sudo keepalive
_start_sudo_keepalive() {
# Start background keepalive process with all outputs redirected
# This is critical: command substitution waits for all file descriptors to close
(
# Initial delay to let sudo cache stabilize after password entry
# This prevents immediately triggering Touch ID again
sleep 2
local retry_count=0
while true; do
if ! sudo -n -v 2> /dev/null; then
((retry_count++))
if [[ $retry_count -ge 3 ]]; then
exit 1
fi
sleep 5
continue
fi
retry_count=0
sleep 30
kill -0 "$$" 2> /dev/null || exit
done
) > /dev/null 2>&1 &
local pid=$!
echo $pid
}
# Stop sudo keepalive
_stop_sudo_keepalive() {
local pid="${1:-}"
if [[ -n "$pid" ]]; then
kill "$pid" 2> /dev/null || true
wait "$pid" 2> /dev/null || true
fi
}
# Check if sudo session is active
has_sudo_session() {
sudo -n true 2> /dev/null
}
# Request administrative access
request_sudo() {
local prompt_msg="${1:-Admin access required}"
if has_sudo_session; then
return 0
fi
# Use the robust implementation from common.sh
if request_sudo_access "$prompt_msg"; then
return 0
else
return 1
fi
}
# Maintain active sudo session with keepalive
ensure_sudo_session() {
local prompt="${1:-Admin access required}"
# Check if already established
if has_sudo_session && [[ "$MOLE_SUDO_ESTABLISHED" == "true" ]]; then
return 0
fi
# Stop old keepalive if exists
if [[ -n "$MOLE_SUDO_KEEPALIVE_PID" ]]; then
_stop_sudo_keepalive "$MOLE_SUDO_KEEPALIVE_PID"
MOLE_SUDO_KEEPALIVE_PID=""
fi
# Request sudo access
if ! request_sudo "$prompt"; then
MOLE_SUDO_ESTABLISHED="false"
return 1
fi
# Start keepalive
MOLE_SUDO_KEEPALIVE_PID=$(_start_sudo_keepalive)
MOLE_SUDO_ESTABLISHED="true"
return 0
}
# Stop sudo session and cleanup
stop_sudo_session() {
if [[ -n "$MOLE_SUDO_KEEPALIVE_PID" ]]; then
_stop_sudo_keepalive "$MOLE_SUDO_KEEPALIVE_PID"
MOLE_SUDO_KEEPALIVE_PID=""
fi
MOLE_SUDO_ESTABLISHED="false"
}
# Register cleanup on script exit
register_sudo_cleanup() {
trap stop_sudo_session EXIT INT TERM
}
# Predict if operation requires administrative access
will_need_sudo() {
local -a operations=("$@")
for op in "${operations[@]}"; do
case "$op" in
system_update | appstore_update | macos_update | firewall | touchid | rosetta | system_fix)
return 0
;;
esac
done
return 1
}

View File

@@ -1,156 +0,0 @@
#!/bin/bash
# Mole - Timeout Control
# Command execution with timeout support
set -euo pipefail
# Prevent multiple sourcing
if [[ -n "${MOLE_TIMEOUT_LOADED:-}" ]]; then
return 0
fi
readonly MOLE_TIMEOUT_LOADED=1
# ============================================================================
# Timeout Command Initialization
# ============================================================================
# Initialize timeout command (prefer gtimeout from coreutils, fallback to timeout)
# Sets MO_TIMEOUT_BIN to the available timeout command
#
# Recommendation: Install coreutils for reliable timeout support
# brew install coreutils
#
# The shell-based fallback has known limitations:
# - May not clean up all child processes
# - Has race conditions in edge cases
# - Less reliable than native timeout command
if [[ -z "${MO_TIMEOUT_INITIALIZED:-}" ]]; then
MO_TIMEOUT_BIN=""
for candidate in gtimeout timeout; do
if command -v "$candidate" > /dev/null 2>&1; then
MO_TIMEOUT_BIN="$candidate"
if [[ "${MO_DEBUG:-0}" == "1" ]]; then
echo "[TIMEOUT] Using command: $candidate" >&2
fi
break
fi
done
# Log warning if no timeout command available
if [[ -z "$MO_TIMEOUT_BIN" ]] && [[ "${MO_DEBUG:-0}" == "1" ]]; then
echo "[TIMEOUT] No timeout command found, using shell fallback" >&2
echo "[TIMEOUT] Install coreutils for better reliability: brew install coreutils" >&2
fi
export MO_TIMEOUT_INITIALIZED=1
fi
# ============================================================================
# Timeout Execution
# ============================================================================
# Run command with timeout
# Uses gtimeout/timeout if available, falls back to shell-based implementation
#
# Args:
# $1 - duration in seconds (0 or invalid = no timeout)
# $@ - command and arguments to execute
#
# Returns:
# Command exit code, or 124 if timed out (matches gtimeout behavior)
#
# Environment:
# MO_DEBUG - Set to 1 to enable debug logging to stderr
#
# Implementation notes:
# - Prefers gtimeout (coreutils) or timeout for reliability
# - Shell fallback uses SIGTERM → SIGKILL escalation
# - Attempts process group cleanup to handle child processes
# - Returns exit code 124 on timeout (standard timeout exit code)
#
# Known limitations of shell-based fallback:
# - Race condition: If command exits during signal delivery, the signal
# may target a reused PID (very rare, requires quick PID reuse)
# - Zombie processes: Brief zombies until wait completes
# - Nested children: SIGKILL may not reach all descendants
# - No process group: Cannot guarantee cleanup of detached children
#
# For mission-critical timeouts, install coreutils.
run_with_timeout() {
local duration="${1:-0}"
shift || true
# No timeout if duration is invalid or zero
if [[ ! "$duration" =~ ^[0-9]+(\.[0-9]+)?$ ]] || [[ $(echo "$duration <= 0" | bc -l 2> /dev/null) -eq 1 ]]; then
"$@"
return $?
fi
# Use timeout command if available (preferred path)
if [[ -n "${MO_TIMEOUT_BIN:-}" ]]; then
if [[ "${MO_DEBUG:-0}" == "1" ]]; then
echo "[TIMEOUT] Running with ${duration}s timeout: $*" >&2
fi
"$MO_TIMEOUT_BIN" "$duration" "$@"
return $?
fi
# ========================================================================
# Shell-based fallback implementation
# ========================================================================
if [[ "${MO_DEBUG:-0}" == "1" ]]; then
echo "[TIMEOUT] Shell fallback (${duration}s): $*" >&2
fi
# Start command in background
"$@" &
local cmd_pid=$!
# Start timeout killer in background
(
# Wait for timeout duration
sleep "$duration"
# Check if process still exists
if kill -0 "$cmd_pid" 2> /dev/null; then
# Try to kill process group first (negative PID), fallback to single process
# Process group kill is best effort - may not work if setsid was used
kill -TERM -"$cmd_pid" 2> /dev/null || kill -TERM "$cmd_pid" 2> /dev/null || true
# Grace period for clean shutdown
sleep 2
# Escalate to SIGKILL if still alive
if kill -0 "$cmd_pid" 2> /dev/null; then
kill -KILL -"$cmd_pid" 2> /dev/null || kill -KILL "$cmd_pid" 2> /dev/null || true
fi
fi
) &
local killer_pid=$!
# Wait for command to complete
local exit_code=0
set +e
wait "$cmd_pid" 2> /dev/null
exit_code=$?
set -e
# Clean up killer process
if kill -0 "$killer_pid" 2> /dev/null; then
kill "$killer_pid" 2> /dev/null || true
wait "$killer_pid" 2> /dev/null || true
fi
# Check if command was killed by timeout (exit codes 143=SIGTERM, 137=SIGKILL)
if [[ $exit_code -eq 143 || $exit_code -eq 137 ]]; then
# Command was killed by timeout
if [[ "${MO_DEBUG:-0}" == "1" ]]; then
echo "[TIMEOUT] Command timed out after ${duration}s" >&2
fi
return 124
fi
# Command completed normally (or with its own error)
return "$exit_code"
}

449
lib/core/ui.ps1 Normal file
View File

@@ -0,0 +1,449 @@
# Mole - UI Module
# Provides interactive UI components (menus, confirmations, etc.)
#Requires -Version 5.1
Set-StrictMode -Version Latest
# Prevent multiple sourcing
if ((Get-Variable -Name 'MOLE_UI_LOADED' -Scope Script -ErrorAction SilentlyContinue) -and $script:MOLE_UI_LOADED) { return }
$script:MOLE_UI_LOADED = $true
# Import dependencies
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$scriptDir\base.ps1"
. "$scriptDir\log.ps1"
# ============================================================================
# Terminal Utilities
# ============================================================================
function Get-TerminalSize {
<#
.SYNOPSIS
Get terminal width and height
#>
try {
return @{
Width = $Host.UI.RawUI.WindowSize.Width
Height = $Host.UI.RawUI.WindowSize.Height
}
}
catch {
return @{ Width = 80; Height = 24 }
}
}
function Clear-Line {
<#
.SYNOPSIS
Clear the current line
#>
$width = (Get-TerminalSize).Width
Write-Host -NoNewline ("`r" + (" " * ($width - 1)) + "`r")
}
function Move-CursorUp {
<#
.SYNOPSIS
Move cursor up N lines
#>
param([int]$Lines = 1)
Write-Host -NoNewline "$([char]27)[$Lines`A"
}
function Move-CursorDown {
<#
.SYNOPSIS
Move cursor down N lines
#>
param([int]$Lines = 1)
Write-Host -NoNewline "$([char]27)[$Lines`B"
}
# ============================================================================
# Confirmation Dialogs
# ============================================================================
function Read-Confirmation {
<#
.SYNOPSIS
Ask for yes/no confirmation
#>
param(
[Parameter(Mandatory)]
[string]$Prompt,
[bool]$Default = $false
)
$cyan = $script:Colors.Cyan
$nc = $script:Colors.NC
$hint = if ($Default) { "[Y/n]" } else { "[y/N]" }
Write-Host -NoNewline " ${cyan}$($script:Icons.Confirm)${nc} $Prompt $hint "
$response = Read-Host
if ([string]::IsNullOrWhiteSpace($response)) {
return $Default
}
return $response -match '^[Yy]'
}
function Read-ConfirmationDestructive {
<#
.SYNOPSIS
Ask for confirmation on destructive operations (requires typing 'yes')
#>
param(
[Parameter(Mandatory)]
[string]$Prompt,
[string]$ConfirmText = "yes"
)
$red = $script:Colors.Red
$nc = $script:Colors.NC
Write-Host ""
Write-Host " ${red}$($script:Icons.Warning) WARNING: $Prompt${nc}"
Write-Host " Type '$ConfirmText' to confirm: " -NoNewline
$response = Read-Host
return $response -eq $ConfirmText
}
# ============================================================================
# Menu Components
# ============================================================================
function Show-Menu {
<#
.SYNOPSIS
Display an interactive menu and return selected option
.PARAMETER Title
Menu title
.PARAMETER Options
Array of menu options (hashtables with Name and optionally Description, Action)
.PARAMETER AllowBack
Show back/exit option
#>
param(
[string]$Title = "Menu",
[Parameter(Mandatory)]
[array]$Options,
[switch]$AllowBack
)
$selected = 0
$maxIndex = $Options.Count - 1
# Add back option if allowed
if ($AllowBack) {
$Options = $Options + @{ Name = "Back"; Description = "Return to previous menu" }
$maxIndex++
}
$purple = $script:Colors.PurpleBold
$cyan = $script:Colors.Cyan
$gray = $script:Colors.Gray
$nc = $script:Colors.NC
# Hide cursor
Write-Host -NoNewline "$([char]27)[?25l"
try {
while ($true) {
# Clear screen and show menu
Clear-Host
Write-Host ""
Write-Host " ${purple}$($script:Icons.Arrow) $Title${nc}"
Write-Host ""
for ($i = 0; $i -le $maxIndex; $i++) {
$option = $Options[$i]
$name = if ($option -is [hashtable]) { $option.Name } else { $option.ToString() }
$desc = if ($option -is [hashtable] -and $option.Description) { " - $($option.Description)" } else { "" }
if ($i -eq $selected) {
Write-Host " ${cyan}> $name${nc}${gray}$desc${nc}"
}
else {
Write-Host " $name${gray}$desc${nc}"
}
}
Write-Host ""
Write-Host " ${gray}Use arrows or j/k to navigate, Enter to select, q to quit${nc}"
# Read key - handle both VirtualKeyCode and escape sequences
$key = $Host.UI.RawUI.ReadKey("NoEcho,IncludeKeyDown")
# Debug: uncomment to see key codes
# Write-Host "VKey: $($key.VirtualKeyCode), Char: $([int]$key.Character)"
# Handle escape sequences for arrow keys (some terminals send these)
$moved = $false
if ($key.VirtualKeyCode -eq 0 -or $key.Character -eq [char]27) {
# Escape sequence - read the next characters
if ($Host.UI.RawUI.KeyAvailable) {
$key2 = $Host.UI.RawUI.ReadKey("NoEcho,IncludeKeyDown")
if ($key2.Character -eq '[' -and $Host.UI.RawUI.KeyAvailable) {
$key3 = $Host.UI.RawUI.ReadKey("NoEcho,IncludeKeyDown")
switch ($key3.Character) {
'A' { # Up arrow escape sequence
$selected = if ($selected -gt 0) { $selected - 1 } else { $maxIndex }
$moved = $true
}
'B' { # Down arrow escape sequence
$selected = if ($selected -lt $maxIndex) { $selected + 1 } else { 0 }
$moved = $true
}
}
}
}
}
if (-not $moved) {
switch ($key.VirtualKeyCode) {
38 { # Up arrow
$selected = if ($selected -gt 0) { $selected - 1 } else { $maxIndex }
}
40 { # Down arrow
$selected = if ($selected -lt $maxIndex) { $selected + 1 } else { 0 }
}
13 { # Enter
# Show cursor
Write-Host -NoNewline "$([char]27)[?25h"
if ($AllowBack -and $selected -eq $maxIndex) {
return $null # Back selected
}
return $Options[$selected]
}
default {
switch ($key.Character) {
'k' { $selected = if ($selected -gt 0) { $selected - 1 } else { $maxIndex } }
'j' { $selected = if ($selected -lt $maxIndex) { $selected + 1 } else { 0 } }
'q' {
Write-Host -NoNewline "$([char]27)[?25h"
return $null
}
}
}
}
}
}
}
finally {
# Ensure cursor is shown
Write-Host -NoNewline "$([char]27)[?25h"
}
}
function Show-SelectionList {
<#
.SYNOPSIS
Display a multi-select list
#>
param(
[string]$Title = "Select Items",
[Parameter(Mandatory)]
[array]$Items,
[switch]$MultiSelect
)
$cursor = 0
$selected = @{}
$maxIndex = $Items.Count - 1
$purple = $script:Colors.PurpleBold
$cyan = $script:Colors.Cyan
$green = $script:Colors.Green
$gray = $script:Colors.Gray
$nc = $script:Colors.NC
Write-Host -NoNewline "$([char]27)[?25l"
try {
while ($true) {
Clear-Host
Write-Host ""
Write-Host " ${purple}$($script:Icons.Arrow) $Title${nc}"
if ($MultiSelect) {
Write-Host " ${gray}Space to toggle, Enter to confirm${nc}"
}
Write-Host ""
for ($i = 0; $i -le $maxIndex; $i++) {
$item = $Items[$i]
$name = if ($item -is [hashtable]) { $item.Name } else { $item.ToString() }
$check = if ($selected[$i]) { "$($script:Icons.Success)" } else { "$($script:Icons.Empty)" }
if ($i -eq $cursor) {
Write-Host " ${cyan}> ${check} $name${nc}"
}
else {
$checkColor = if ($selected[$i]) { $green } else { $gray }
Write-Host " ${checkColor}${check}${nc} $name"
}
}
Write-Host ""
Write-Host " ${gray}j/k or arrows to navigate, space to select, Enter to confirm, q to cancel${nc}"
$key = $Host.UI.RawUI.ReadKey("NoEcho,IncludeKeyDown")
# Handle escape sequences for arrow keys (some terminals send these)
$moved = $false
if ($key.VirtualKeyCode -eq 0 -or $key.Character -eq [char]27) {
# Escape sequence - read the next characters
if ($Host.UI.RawUI.KeyAvailable) {
$key2 = $Host.UI.RawUI.ReadKey("NoEcho,IncludeKeyDown")
if ($key2.Character -eq '[' -and $Host.UI.RawUI.KeyAvailable) {
$key3 = $Host.UI.RawUI.ReadKey("NoEcho,IncludeKeyDown")
switch ($key3.Character) {
'A' { # Up arrow escape sequence
$cursor = if ($cursor -gt 0) { $cursor - 1 } else { $maxIndex }
$moved = $true
}
'B' { # Down arrow escape sequence
$cursor = if ($cursor -lt $maxIndex) { $cursor + 1 } else { 0 }
$moved = $true
}
}
}
}
}
if (-not $moved) {
switch ($key.VirtualKeyCode) {
38 { $cursor = if ($cursor -gt 0) { $cursor - 1 } else { $maxIndex } }
40 { $cursor = if ($cursor -lt $maxIndex) { $cursor + 1 } else { 0 } }
32 { # Space
if ($MultiSelect) {
$selected[$cursor] = -not $selected[$cursor]
}
else {
$selected = @{ $cursor = $true }
}
}
13 { # Enter
Write-Host -NoNewline "$([char]27)[?25h"
$result = @()
foreach ($selKey in $selected.Keys) {
if ($selected[$selKey]) {
$result += $Items[$selKey]
}
}
return $result
}
default {
switch ($key.Character) {
'k' { $cursor = if ($cursor -gt 0) { $cursor - 1 } else { $maxIndex } }
'j' { $cursor = if ($cursor -lt $maxIndex) { $cursor + 1 } else { 0 } }
' ' {
if ($MultiSelect) {
$selected[$cursor] = -not $selected[$cursor]
}
else {
$selected = @{ $cursor = $true }
}
}
'q' {
Write-Host -NoNewline "$([char]27)[?25h"
return @()
}
}
}
}
}
}
}
finally {
Write-Host -NoNewline "$([char]27)[?25h"
}
}
# ============================================================================
# Banner / Header
# ============================================================================
function Show-Banner {
<#
.SYNOPSIS
Display the Mole ASCII banner
#>
$purple = $script:Colors.Purple
$cyan = $script:Colors.Cyan
$nc = $script:Colors.NC
Write-Host ""
Write-Host " ${purple}MOLE${nc}"
Write-Host " ${cyan}Windows System Maintenance${nc}"
Write-Host ""
}
function Show-Header {
<#
.SYNOPSIS
Display a section header
#>
param(
[Parameter(Mandatory)]
[string]$Title,
[string]$Subtitle = ""
)
$purple = $script:Colors.PurpleBold
$gray = $script:Colors.Gray
$nc = $script:Colors.NC
Write-Host ""
Write-Host " ${purple}$Title${nc}"
if ($Subtitle) {
Write-Host " ${gray}$Subtitle${nc}"
}
Write-Host ""
}
# ============================================================================
# Summary Display
# ============================================================================
function Show-Summary {
<#
.SYNOPSIS
Display cleanup summary
#>
param(
[long]$SizeBytes = 0,
[int]$ItemCount = 0,
[string]$Action = "Cleaned"
)
$green = $script:Colors.Green
$cyan = $script:Colors.Cyan
$nc = $script:Colors.NC
$sizeHuman = Format-ByteSize -Bytes $SizeBytes
Write-Host ""
Write-Host " $($green)━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━$($nc)"
Write-Host " $($green)$($script:Icons.Success)$($nc) $($Action): $($cyan)$($sizeHuman)$($nc) across $($cyan)$($ItemCount)$($nc) items"
Write-Host " $($green)━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━$($nc)"
Write-Host ""
}
# ============================================================================
# Exports (functions are available via dot-sourcing)
# ============================================================================
# Functions: Show-Menu, Show-Banner, Read-Confirmation, etc.

View File

@@ -1,434 +0,0 @@
#!/bin/bash
# Mole - UI Components
# Terminal UI utilities: cursor control, keyboard input, spinners, menus
set -euo pipefail
if [[ -n "${MOLE_UI_LOADED:-}" ]]; then
return 0
fi
readonly MOLE_UI_LOADED=1
_MOLE_CORE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
[[ -z "${MOLE_BASE_LOADED:-}" ]] && source "$_MOLE_CORE_DIR/base.sh"
# Cursor control
clear_screen() { printf '\033[2J\033[H'; }
hide_cursor() { [[ -t 1 ]] && printf '\033[?25l' >&2 || true; }
show_cursor() { [[ -t 1 ]] && printf '\033[?25h' >&2 || true; }
# Calculate display width (CJK characters count as 2)
get_display_width() {
local str="$1"
# Optimized pure bash implementation without forks
local width
# Save current locale
local old_lc="${LC_ALL:-}"
# Get Char Count (UTF-8)
# We must export ensuring it applies to the expansion (though just assignment often works in newer bash, export is safer for all subshells/cmds)
export LC_ALL=en_US.UTF-8
local char_count=${#str}
# Get Byte Count (C)
export LC_ALL=C
local byte_count=${#str}
# Restore Locale immediately
if [[ -n "$old_lc" ]]; then
export LC_ALL="$old_lc"
else
unset LC_ALL
fi
if [[ $byte_count -eq $char_count ]]; then
echo "$char_count"
return
fi
# CJK Heuristic:
# Most CJK chars are 3 bytes in UTF-8 and width 2.
# ASCII chars are 1 byte and width 1.
# Width ~= CharCount + (ByteCount - CharCount) / 2
# "中" (1 char, 3 bytes) -> 1 + (2)/2 = 2.
# "A" (1 char, 1 byte) -> 1 + 0 = 1.
# This is an approximation but very fast and sufficient for App names.
# Integer arithmetic in bash automatically handles floor.
local extra_bytes=$((byte_count - char_count))
local padding=$((extra_bytes / 2))
width=$((char_count + padding))
# Adjust for zero-width joiners and emoji variation selectors (common in filenames/emojis)
# These characters add bytes but no visible width; subtract their count if present.
local zwj=$'\u200d' # zero-width joiner
local vs16=$'\ufe0f' # emoji variation selector
local zero_width=0
local without_zwj=${str//$zwj/}
zero_width=$((zero_width + (char_count - ${#without_zwj})))
local without_vs=${str//$vs16/}
zero_width=$((zero_width + (char_count - ${#without_vs})))
if ((zero_width > 0 && width > zero_width)); then
width=$((width - zero_width))
fi
echo "$width"
}
# Truncate string by display width (handles CJK)
truncate_by_display_width() {
local str="$1"
local max_width="$2"
local current_width
current_width=$(get_display_width "$str")
if [[ $current_width -le $max_width ]]; then
echo "$str"
return
fi
# Fallback: Use pure bash character iteration
# Since we need to know the width of *each* character to truncate at the right spot,
# we cannot just use the total width formula on the whole string.
# However, iterating char-by-char and calling the optimized get_display_width function
# is now much faster because it doesn't fork 'wc'.
# CRITICAL: Switch to UTF-8 for correct character iteration
local old_lc="${LC_ALL:-}"
export LC_ALL=en_US.UTF-8
local truncated=""
local width=0
local i=0
local char char_width
local strlen=${#str} # Re-calculate in UTF-8
# Optimization: If total width <= max_width, return original string (checked above)
while [[ $i -lt $strlen ]]; do
char="${str:$i:1}"
# Inlined width calculation for minimal overhead to avoid recursion overhead
# We are already in UTF-8, so ${#char} is char length (1).
# We need byte length for the heuristic.
# But switching locale inside loop is disastrous for perf.
# Logic: If char is ASCII (1 byte), width 1.
# If char is wide (3 bytes), width 2.
# How to detect byte size without switching locale?
# printf %s "$char" | wc -c ? Slow.
# Check against ASCII range?
# Fast ASCII check: if [[ "$char" < $'\x7f' ]]; then ...
if [[ "$char" =~ [[:ascii:]] ]]; then
char_width=1
else
# Assume wide for non-ascii in this context (simplified)
# Or use LC_ALL=C inside? No.
# Most non-ASCII in filenames are either CJK (width 2) or heavy symbols.
# Let's assume 2 for simplicity in this fast loop as we know we are usually dealing with CJK.
char_width=2
fi
if ((width + char_width + 3 > max_width)); then
break
fi
truncated+="$char"
((width += char_width))
((i++))
done
# Restore locale
if [[ -n "$old_lc" ]]; then
export LC_ALL="$old_lc"
else
unset LC_ALL
fi
echo "${truncated}..."
}
# Read single keyboard input
read_key() {
local key rest read_status
IFS= read -r -s -n 1 key
read_status=$?
[[ $read_status -ne 0 ]] && {
echo "QUIT"
return 0
}
if [[ "${MOLE_READ_KEY_FORCE_CHAR:-}" == "1" ]]; then
[[ -z "$key" ]] && {
echo "ENTER"
return 0
}
case "$key" in
$'\n' | $'\r') echo "ENTER" ;;
$'\x7f' | $'\x08') echo "DELETE" ;;
$'\x1b') echo "QUIT" ;;
[[:print:]]) echo "CHAR:$key" ;;
*) echo "OTHER" ;;
esac
return 0
fi
[[ -z "$key" ]] && {
echo "ENTER"
return 0
}
case "$key" in
$'\n' | $'\r') echo "ENTER" ;;
' ') echo "SPACE" ;;
'/') echo "FILTER" ;;
'q' | 'Q') echo "QUIT" ;;
'R') echo "RETRY" ;;
'm' | 'M') echo "MORE" ;;
'u' | 'U') echo "UPDATE" ;;
't' | 'T') echo "TOUCHID" ;;
'j' | 'J') echo "DOWN" ;;
'k' | 'K') echo "UP" ;;
'h' | 'H') echo "LEFT" ;;
'l' | 'L') echo "RIGHT" ;;
$'\x03') echo "QUIT" ;;
$'\x7f' | $'\x08') echo "DELETE" ;;
$'\x1b')
if IFS= read -r -s -n 1 -t 1 rest 2> /dev/null; then
if [[ "$rest" == "[" ]]; then
if IFS= read -r -s -n 1 -t 1 rest2 2> /dev/null; then
case "$rest2" in
"A") echo "UP" ;; "B") echo "DOWN" ;;
"C") echo "RIGHT" ;; "D") echo "LEFT" ;;
"3")
IFS= read -r -s -n 1 -t 1 rest3 2> /dev/null
[[ "$rest3" == "~" ]] && echo "DELETE" || echo "OTHER"
;;
*) echo "OTHER" ;;
esac
else echo "QUIT"; fi
elif [[ "$rest" == "O" ]]; then
if IFS= read -r -s -n 1 -t 1 rest2 2> /dev/null; then
case "$rest2" in
"A") echo "UP" ;; "B") echo "DOWN" ;;
"C") echo "RIGHT" ;; "D") echo "LEFT" ;;
*) echo "OTHER" ;;
esac
else echo "OTHER"; fi
else echo "OTHER"; fi
else echo "QUIT"; fi
;;
[[:print:]]) echo "CHAR:$key" ;;
*) echo "OTHER" ;;
esac
}
drain_pending_input() {
local drained=0
while IFS= read -r -s -n 1 -t 0.01 _ 2> /dev/null; do
((drained++))
[[ $drained -gt 100 ]] && break
done
}
# Format menu option display
show_menu_option() {
local number="$1"
local text="$2"
local selected="$3"
if [[ "$selected" == "true" ]]; then
echo -e "${CYAN}${ICON_ARROW} $number. $text${NC}"
else
echo " $number. $text"
fi
}
# Background spinner implementation
INLINE_SPINNER_PID=""
INLINE_SPINNER_STOP_FILE=""
start_inline_spinner() {
stop_inline_spinner 2> /dev/null || true
local message="$1"
if [[ -t 1 ]]; then
# Create unique stop flag file for this spinner instance
INLINE_SPINNER_STOP_FILE="${TMPDIR:-/tmp}/mole_spinner_$$_$RANDOM.stop"
(
local stop_file="$INLINE_SPINNER_STOP_FILE"
local chars
chars="$(mo_spinner_chars)"
[[ -z "$chars" ]] && chars="|/-\\"
local i=0
# Cooperative exit: check for stop file instead of relying on signals
while [[ ! -f "$stop_file" ]]; do
local c="${chars:$((i % ${#chars})):1}"
# Output to stderr to avoid interfering with stdout
printf "\r${MOLE_SPINNER_PREFIX:-}${BLUE}%s${NC} %s" "$c" "$message" >&2 || break
((i++))
sleep 0.1
done
# Clean up stop file before exiting
rm -f "$stop_file" 2> /dev/null || true
exit 0
) &
INLINE_SPINNER_PID=$!
disown 2> /dev/null || true
else
echo -n " ${BLUE}|${NC} $message" >&2 || true
fi
}
stop_inline_spinner() {
if [[ -n "$INLINE_SPINNER_PID" ]]; then
# Cooperative stop: create stop file to signal spinner to exit
if [[ -n "$INLINE_SPINNER_STOP_FILE" ]]; then
touch "$INLINE_SPINNER_STOP_FILE" 2> /dev/null || true
fi
# Wait briefly for cooperative exit
local wait_count=0
while kill -0 "$INLINE_SPINNER_PID" 2> /dev/null && [[ $wait_count -lt 5 ]]; do
sleep 0.05 2> /dev/null || true
((wait_count++))
done
# Only use SIGKILL as last resort if process is stuck
if kill -0 "$INLINE_SPINNER_PID" 2> /dev/null; then
kill -KILL "$INLINE_SPINNER_PID" 2> /dev/null || true
fi
wait "$INLINE_SPINNER_PID" 2> /dev/null || true
# Cleanup
rm -f "$INLINE_SPINNER_STOP_FILE" 2> /dev/null || true
INLINE_SPINNER_PID=""
INLINE_SPINNER_STOP_FILE=""
# Clear the line - use \033[2K to clear entire line, not just to end
[[ -t 1 ]] && printf "\r\033[2K" >&2 || true
fi
}
# Run command with a terminal spinner
with_spinner() {
local msg="$1"
shift || true
local timeout=180
start_inline_spinner "$msg"
local exit_code=0
if [[ -n "${MOLE_TIMEOUT_BIN:-}" ]]; then
"$MOLE_TIMEOUT_BIN" "$timeout" "$@" > /dev/null 2>&1 || exit_code=$?
else "$@" > /dev/null 2>&1 || exit_code=$?; fi
stop_inline_spinner "$msg"
return $exit_code
}
# Get spinner characters
mo_spinner_chars() {
local chars="|/-\\"
[[ -z "$chars" ]] && chars="|/-\\"
printf "%s" "$chars"
}
# Format relative time for compact display (e.g., 3d ago)
format_last_used_summary() {
local value="$1"
case "$value" in
"" | "Unknown")
echo "Unknown"
return 0
;;
"Never" | "Recent" | "Today" | "Yesterday" | "This year" | "Old")
echo "$value"
return 0
;;
esac
if [[ $value =~ ^([0-9]+)[[:space:]]+days?\ ago$ ]]; then
echo "${BASH_REMATCH[1]}d ago"
return 0
fi
if [[ $value =~ ^([0-9]+)[[:space:]]+weeks?\ ago$ ]]; then
echo "${BASH_REMATCH[1]}w ago"
return 0
fi
if [[ $value =~ ^([0-9]+)[[:space:]]+months?\ ago$ ]]; then
echo "${BASH_REMATCH[1]}m ago"
return 0
fi
if [[ $value =~ ^([0-9]+)[[:space:]]+month\(s\)\ ago$ ]]; then
echo "${BASH_REMATCH[1]}m ago"
return 0
fi
if [[ $value =~ ^([0-9]+)[[:space:]]+years?\ ago$ ]]; then
echo "${BASH_REMATCH[1]}y ago"
return 0
fi
echo "$value"
}
# Check if terminal has Full Disk Access
# Returns 0 if FDA is granted, 1 if denied, 2 if unknown
has_full_disk_access() {
# Cache the result to avoid repeated checks
if [[ -n "${MOLE_HAS_FDA:-}" ]]; then
if [[ "$MOLE_HAS_FDA" == "1" ]]; then
return 0
elif [[ "$MOLE_HAS_FDA" == "unknown" ]]; then
return 2
else
return 1
fi
fi
# Test access to protected directories that require FDA
# Strategy: Try to access directories that are commonly protected
# If ANY of them are accessible, we likely have FDA
# If ALL fail, we definitely don't have FDA
local -a protected_dirs=(
"$HOME/Library/Safari/LocalStorage"
"$HOME/Library/Mail/V10"
"$HOME/Library/Messages/chat.db"
)
local accessible_count=0
local tested_count=0
for test_path in "${protected_dirs[@]}"; do
# Only test when the protected path exists
if [[ -e "$test_path" ]]; then
tested_count=$((tested_count + 1))
# Try to stat the ACTUAL protected path - this requires FDA
if stat "$test_path" > /dev/null 2>&1; then
accessible_count=$((accessible_count + 1))
fi
fi
done
# Three possible outcomes:
# 1. tested_count = 0: Can't determine (test paths don't exist) → unknown
# 2. tested_count > 0 && accessible_count > 0: Has FDA → yes
# 3. tested_count > 0 && accessible_count = 0: No FDA → no
if [[ $tested_count -eq 0 ]]; then
# Can't determine - test paths don't exist, treat as unknown
export MOLE_HAS_FDA="unknown"
return 2
elif [[ $accessible_count -gt 0 ]]; then
# At least one path is accessible → has FDA
export MOLE_HAS_FDA=1
return 0
else
# Tested paths exist but not accessible → no FDA
export MOLE_HAS_FDA=0
return 1
fi
}

View File

@@ -1,191 +0,0 @@
#!/bin/bash
# Auto-fix Manager
# Unified auto-fix suggestions and execution
set -euo pipefail
# Show system suggestions with auto-fix markers
show_suggestions() {
local has_suggestions=false
local can_auto_fix=false
local -a auto_fix_items=()
local -a manual_items=()
local skip_security_autofix=false
if [[ "${MOLE_SECURITY_FIXES_SHOWN:-}" == "true" ]]; then
skip_security_autofix=true
fi
# Security suggestions
if [[ "$skip_security_autofix" == "false" && -n "${FIREWALL_DISABLED:-}" && "${FIREWALL_DISABLED}" == "true" ]]; then
auto_fix_items+=("Enable Firewall for better security")
has_suggestions=true
can_auto_fix=true
fi
if [[ -n "${FILEVAULT_DISABLED:-}" && "${FILEVAULT_DISABLED}" == "true" ]]; then
manual_items+=("Enable FileVault|System Settings → Privacy & Security → FileVault")
has_suggestions=true
fi
# Configuration suggestions
if [[ "$skip_security_autofix" == "false" && -n "${TOUCHID_NOT_CONFIGURED:-}" && "${TOUCHID_NOT_CONFIGURED}" == "true" ]]; then
auto_fix_items+=("Enable Touch ID for sudo")
has_suggestions=true
can_auto_fix=true
fi
if [[ -n "${ROSETTA_NOT_INSTALLED:-}" && "${ROSETTA_NOT_INSTALLED}" == "true" ]]; then
auto_fix_items+=("Install Rosetta 2 for Intel app support")
has_suggestions=true
can_auto_fix=true
fi
# Health suggestions
if [[ -n "${CACHE_SIZE_GB:-}" ]]; then
local cache_gb="${CACHE_SIZE_GB:-0}"
if (($(echo "$cache_gb > 5" | bc -l 2> /dev/null || echo 0))); then
manual_items+=("Free up ${cache_gb}GB by cleaning caches|Run: mo clean")
has_suggestions=true
fi
fi
if [[ -n "${BREW_HAS_WARNINGS:-}" && "${BREW_HAS_WARNINGS}" == "true" ]]; then
manual_items+=("Fix Homebrew warnings|Run: brew doctor to see details")
has_suggestions=true
fi
if [[ -n "${DISK_FREE_GB:-}" && "${DISK_FREE_GB:-0}" -lt 50 ]]; then
if [[ -z "${CACHE_SIZE_GB:-}" ]] || (($(echo "${CACHE_SIZE_GB:-0} <= 5" | bc -l 2> /dev/null || echo 1))); then
manual_items+=("Low disk space (${DISK_FREE_GB}GB free)|Run: mo analyze to find large files")
has_suggestions=true
fi
fi
# Display suggestions
echo -e "${BLUE}${ICON_ARROW}${NC} Suggestions"
if [[ "$has_suggestions" == "false" ]]; then
echo -e " ${GREEN}${NC} All looks good"
export HAS_AUTO_FIX_SUGGESTIONS="false"
return
fi
# Show auto-fix items
if [[ ${#auto_fix_items[@]} -gt 0 ]]; then
for item in "${auto_fix_items[@]}"; do
echo -e " ${YELLOW}${ICON_WARNING}${NC} ${item} ${GREEN}[auto]${NC}"
done
fi
# Show manual items
if [[ ${#manual_items[@]} -gt 0 ]]; then
for item in "${manual_items[@]}"; do
local title="${item%%|*}"
local hint="${item#*|}"
echo -e " ${YELLOW}${ICON_WARNING}${NC} ${title}"
echo -e " ${GRAY}${hint}${NC}"
done
fi
# Export for use in auto-fix
export HAS_AUTO_FIX_SUGGESTIONS="$can_auto_fix"
}
# Ask user if they want to auto-fix
# Returns: 0 if yes, 1 if no
ask_for_auto_fix() {
if [[ "${HAS_AUTO_FIX_SUGGESTIONS:-false}" != "true" ]]; then
return 1
fi
echo -ne "${PURPLE}${ICON_ARROW}${NC} Auto-fix issues now? ${GRAY}Enter confirm / Space cancel${NC}: "
local key
if ! key=$(read_key); then
echo "no"
echo ""
return 1
fi
if [[ "$key" == "ENTER" ]]; then
echo "yes"
echo ""
return 0
else
echo "no"
echo ""
return 1
fi
}
# Perform auto-fixes
# Returns: number of fixes applied
perform_auto_fix() {
local fixed_count=0
local -a fixed_items=()
# Ensure sudo access
if ! has_sudo_session; then
if ! ensure_sudo_session "System fixes require admin access"; then
echo -e "${YELLOW}Skipping auto fixes (admin authentication required)${NC}"
echo ""
return 0
fi
fi
# Fix Firewall
if [[ -n "${FIREWALL_DISABLED:-}" && "${FIREWALL_DISABLED}" == "true" ]]; then
echo -e "${BLUE}Enabling Firewall...${NC}"
if sudo /usr/libexec/ApplicationFirewall/socketfilterfw --setglobalstate on > /dev/null 2>&1; then
echo -e "${GREEN}${NC} Firewall enabled"
((fixed_count++))
fixed_items+=("Firewall enabled")
else
echo -e "${RED}${NC} Failed to enable Firewall"
fi
echo ""
fi
# Fix Touch ID
if [[ -n "${TOUCHID_NOT_CONFIGURED:-}" && "${TOUCHID_NOT_CONFIGURED}" == "true" ]]; then
echo -e "${BLUE}${ICON_ARROW}${NC} Configuring Touch ID for sudo..."
local pam_file="/etc/pam.d/sudo"
if sudo bash -c "grep -q 'pam_tid.so' '$pam_file' 2>/dev/null || sed -i '' '2i\\
auth sufficient pam_tid.so
' '$pam_file'" 2> /dev/null; then
echo -e "${GREEN}${NC} Touch ID configured"
((fixed_count++))
fixed_items+=("Touch ID configured for sudo")
else
echo -e "${RED}${NC} Failed to configure Touch ID"
fi
echo ""
fi
# Install Rosetta 2
if [[ -n "${ROSETTA_NOT_INSTALLED:-}" && "${ROSETTA_NOT_INSTALLED}" == "true" ]]; then
echo -e "${BLUE}Installing Rosetta 2...${NC}"
if sudo softwareupdate --install-rosetta --agree-to-license 2>&1 | grep -qE "(Installing|Installed|already installed)"; then
echo -e "${GREEN}${NC} Rosetta 2 installed"
((fixed_count++))
fixed_items+=("Rosetta 2 installed")
else
echo -e "${RED}${NC} Failed to install Rosetta 2"
fi
echo ""
fi
if [[ $fixed_count -gt 0 ]]; then
AUTO_FIX_SUMMARY="Auto fixes applied: ${fixed_count} issue(s)"
if [[ ${#fixed_items[@]} -gt 0 ]]; then
AUTO_FIX_DETAILS=$(printf '%s\n' "${fixed_items[@]}")
else
AUTO_FIX_DETAILS=""
fi
else
AUTO_FIX_SUMMARY="Auto fixes skipped: No changes were required"
AUTO_FIX_DETAILS=""
fi
export AUTO_FIX_SUMMARY AUTO_FIX_DETAILS
return 0
}

View File

@@ -1,117 +0,0 @@
#!/bin/bash
# Purge paths management functionality
# Opens config file for editing and shows current status
set -euo pipefail
# Get script directory and source dependencies
_MOLE_MANAGE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$_MOLE_MANAGE_DIR/../core/common.sh"
# Only source project.sh if not already loaded (has readonly vars)
if [[ -z "${PURGE_TARGETS:-}" ]]; then
source "$_MOLE_MANAGE_DIR/../clean/project.sh"
fi
# Config file path (use :- to avoid re-declaration if already set)
PURGE_PATHS_CONFIG="${PURGE_PATHS_CONFIG:-$HOME/.config/mole/purge_paths}"
# Ensure config file exists with helpful template
ensure_config_template() {
if [[ ! -f "$PURGE_PATHS_CONFIG" ]]; then
ensure_user_dir "$(dirname "$PURGE_PATHS_CONFIG")"
cat > "$PURGE_PATHS_CONFIG" << 'EOF'
# Mole Purge Paths - Directories to scan for project artifacts
# Add one path per line (supports ~ for home directory)
# Delete all paths or this file to use defaults
#
# Example:
# ~/Documents/MyProjects
# ~/Work/ClientA
# ~/Work/ClientB
EOF
fi
}
# Main management function
manage_purge_paths() {
ensure_config_template
local display_config="${PURGE_PATHS_CONFIG/#$HOME/~}"
# Clear screen
if [[ -t 1 ]]; then
printf '\033[2J\033[H'
fi
echo -e "${PURPLE_BOLD}Purge Paths Configuration${NC}"
echo ""
# Show current status
echo -e "${YELLOW}Current Scan Paths:${NC}"
# Reload config
load_purge_config
if [[ ${#PURGE_SEARCH_PATHS[@]} -gt 0 ]]; then
for path in "${PURGE_SEARCH_PATHS[@]}"; do
local display_path="${path/#$HOME/~}"
if [[ -d "$path" ]]; then
echo -e " ${GREEN}${NC} $display_path"
else
echo -e " ${GRAY}${NC} $display_path ${GRAY}(not found)${NC}"
fi
done
fi
# Check if using custom config
local custom_count=0
if [[ -f "$PURGE_PATHS_CONFIG" ]]; then
while IFS= read -r line; do
line="${line#"${line%%[![:space:]]*}"}"
line="${line%"${line##*[![:space:]]}"}"
[[ -z "$line" || "$line" =~ ^# ]] && continue
((custom_count++))
done < "$PURGE_PATHS_CONFIG"
fi
echo ""
if [[ $custom_count -gt 0 ]]; then
echo -e "${GRAY}Using custom config with $custom_count path(s)${NC}"
else
echo -e "${GRAY}Using ${#DEFAULT_PURGE_SEARCH_PATHS[@]} default paths${NC}"
fi
echo ""
echo -e "${YELLOW}Default Paths:${NC}"
for path in "${DEFAULT_PURGE_SEARCH_PATHS[@]}"; do
echo -e " ${GRAY}-${NC} ${path/#$HOME/~}"
done
echo ""
echo -e "${YELLOW}Config File:${NC} $display_config"
echo ""
# Open in editor
local editor="${EDITOR:-${VISUAL:-vim}}"
echo -e "Opening in ${CYAN}$editor${NC}..."
echo -e "${GRAY}Save and exit to apply changes. Leave empty to use defaults.${NC}"
echo ""
# Wait for user to read
sleep 1
# Open editor
"$editor" "$PURGE_PATHS_CONFIG"
# Reload and show updated status
load_purge_config
echo ""
echo -e "${GREEN}${ICON_SUCCESS}${NC} Configuration updated"
echo -e "${GRAY}Run 'mo purge' to clean with new paths${NC}"
echo ""
}
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
manage_purge_paths
fi

View File

@@ -1,141 +0,0 @@
#!/bin/bash
# Update Manager
# Unified update execution for all update types
set -euo pipefail
# Format Homebrew update label for display
format_brew_update_label() {
local total="${BREW_OUTDATED_COUNT:-0}"
if [[ -z "$total" || "$total" -le 0 ]]; then
return
fi
local -a details=()
local formulas="${BREW_FORMULA_OUTDATED_COUNT:-0}"
local casks="${BREW_CASK_OUTDATED_COUNT:-0}"
((formulas > 0)) && details+=("${formulas} formula")
((casks > 0)) && details+=("${casks} cask")
local detail_str="(${total} updates)"
if ((${#details[@]} > 0)); then
detail_str="($(
IFS=', '
printf '%s' "${details[*]}"
))"
fi
printf " • Homebrew %s" "$detail_str"
}
brew_has_outdated() {
local kind="${1:-formula}"
command -v brew > /dev/null 2>&1 || return 1
if [[ "$kind" == "cask" ]]; then
brew outdated --cask --quiet 2> /dev/null | grep -q .
else
brew outdated --quiet 2> /dev/null | grep -q .
fi
}
# Ask user if they want to update
# Returns: 0 if yes, 1 if no
ask_for_updates() {
local has_updates=false
local -a update_list=()
local brew_entry
brew_entry=$(format_brew_update_label || true)
if [[ -n "$brew_entry" ]]; then
has_updates=true
update_list+=("$brew_entry")
fi
if [[ -n "${APPSTORE_UPDATE_COUNT:-}" && "${APPSTORE_UPDATE_COUNT:-0}" -gt 0 ]]; then
has_updates=true
update_list+=(" • App Store (${APPSTORE_UPDATE_COUNT} apps)")
fi
if [[ -n "${MACOS_UPDATE_AVAILABLE:-}" && "${MACOS_UPDATE_AVAILABLE}" == "true" ]]; then
has_updates=true
update_list+=(" • macOS system")
fi
if [[ -n "${MOLE_UPDATE_AVAILABLE:-}" && "${MOLE_UPDATE_AVAILABLE}" == "true" ]]; then
has_updates=true
update_list+=(" • Mole")
fi
if [[ "$has_updates" == "false" ]]; then
return 1
fi
echo -e "${BLUE}AVAILABLE UPDATES${NC}"
for item in "${update_list[@]}"; do
echo -e "$item"
done
echo ""
# If only Mole is relevant for automation, prompt just for Mole
if [[ "${MOLE_UPDATE_AVAILABLE:-}" == "true" ]]; then
echo ""
echo -ne "${YELLOW}Update Mole now?${NC} ${GRAY}Enter confirm / ESC cancel${NC}: "
local key
if ! key=$(read_key); then
echo "skip"
echo ""
return 1
fi
if [[ "$key" == "ENTER" ]]; then
echo "yes"
echo ""
return 0
fi
fi
echo ""
echo -e "${YELLOW}💡 Run ${GREEN}brew upgrade${YELLOW} to update${NC}"
return 1
}
# Perform all pending updates
# Returns: 0 if all succeeded, 1 if some failed
perform_updates() {
# Only handle Mole updates here; Homebrew/App Store/macOS are manual (tips shown in ask_for_updates)
local updated_count=0
local total_count=0
if [[ -n "${MOLE_UPDATE_AVAILABLE:-}" && "${MOLE_UPDATE_AVAILABLE}" == "true" ]]; then
echo -e "${BLUE}Updating Mole...${NC}"
local mole_bin="${SCRIPT_DIR}/../../mole"
[[ ! -f "$mole_bin" ]] && mole_bin=$(command -v mole 2> /dev/null || echo "")
if [[ -x "$mole_bin" ]]; then
if "$mole_bin" update 2>&1 | grep -qE "(Updated|latest version)"; then
echo -e "${GREEN}${NC} Mole updated"
reset_mole_cache
((updated_count++))
else
echo -e "${RED}${NC} Mole update failed"
fi
else
echo -e "${RED}${NC} Mole executable not found"
fi
echo ""
total_count=1
fi
if [[ $total_count -eq 0 ]]; then
echo -e "${GRAY}No updates to perform${NC}"
return 0
elif [[ $updated_count -eq $total_count ]]; then
echo -e "${GREEN}All updates completed (${updated_count}/${total_count})${NC}"
return 0
else
echo -e "${RED}Update failed (${updated_count}/${total_count})${NC}"
return 1
fi
}

View File

@@ -1,430 +0,0 @@
#!/bin/bash
# Whitelist management functionality
# Shows actual files that would be deleted by dry-run
set -euo pipefail
# Get script directory and source dependencies
_MOLE_MANAGE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$_MOLE_MANAGE_DIR/../core/common.sh"
source "$_MOLE_MANAGE_DIR/../ui/menu_simple.sh"
# Config file paths
readonly WHITELIST_CONFIG_CLEAN="$HOME/.config/mole/whitelist"
readonly WHITELIST_CONFIG_OPTIMIZE="$HOME/.config/mole/whitelist_optimize"
readonly WHITELIST_CONFIG_OPTIMIZE_LEGACY="$HOME/.config/mole/whitelist_checks"
# Default whitelist patterns defined in lib/core/common.sh:
# - DEFAULT_WHITELIST_PATTERNS
# - FINDER_METADATA_SENTINEL
# Save whitelist patterns to config (defaults to "clean" for legacy callers)
save_whitelist_patterns() {
local mode="clean"
if [[ $# -gt 0 ]]; then
case "$1" in
clean | optimize)
mode="$1"
shift
;;
esac
fi
local -a patterns
patterns=("$@")
local config_file
local header_text
if [[ "$mode" == "optimize" ]]; then
config_file="$WHITELIST_CONFIG_OPTIMIZE"
header_text="# Mole Optimization Whitelist - These checks will be skipped during optimization"
else
config_file="$WHITELIST_CONFIG_CLEAN"
header_text="# Mole Whitelist - Protected paths won't be deleted\n# Default protections: Playwright browsers, HuggingFace models, Maven repo, Ollama models, Surge Mac, R renv, Finder metadata\n# Add one pattern per line to keep items safe."
fi
ensure_user_file "$config_file"
echo -e "$header_text" > "$config_file"
if [[ ${#patterns[@]} -gt 0 ]]; then
local -a unique_patterns=()
for pattern in "${patterns[@]}"; do
local duplicate="false"
if [[ ${#unique_patterns[@]} -gt 0 ]]; then
for existing in "${unique_patterns[@]}"; do
if patterns_equivalent "$pattern" "$existing"; then
duplicate="true"
break
fi
done
fi
[[ "$duplicate" == "true" ]] && continue
unique_patterns+=("$pattern")
done
if [[ ${#unique_patterns[@]} -gt 0 ]]; then
printf '\n' >> "$config_file"
for pattern in "${unique_patterns[@]}"; do
echo "$pattern" >> "$config_file"
done
fi
fi
}
# Get all cache items with their patterns
get_all_cache_items() {
# Format: "display_name|pattern|category"
cat << 'EOF'
Apple Mail cache|$HOME/Library/Caches/com.apple.mail/*|system_cache
Gradle build cache (Android Studio, Gradle projects)|$HOME/.gradle/caches/*|ide_cache
Gradle daemon processes cache|$HOME/.gradle/daemon/*|ide_cache
Xcode DerivedData (build outputs, indexes)|$HOME/Library/Developer/Xcode/DerivedData/*|ide_cache
Xcode archives (built app packages)|$HOME/Library/Developer/Xcode/Archives/*|ide_cache
Xcode internal cache files|$HOME/Library/Caches/com.apple.dt.Xcode/*|ide_cache
Xcode iOS device support symbols|$HOME/Library/Developer/Xcode/iOS DeviceSupport/*/Symbols/System/Library/Caches/*|ide_cache
Maven local repository (Java dependencies)|$HOME/.m2/repository/*|ide_cache
JetBrains IDEs data (IntelliJ, PyCharm, WebStorm, GoLand)|$HOME/Library/Application Support/JetBrains/*|ide_cache
JetBrains IDEs cache|$HOME/Library/Caches/JetBrains/*|ide_cache
Android Studio cache and indexes|$HOME/Library/Caches/Google/AndroidStudio*/*|ide_cache
Android build cache|$HOME/.android/build-cache/*|ide_cache
VS Code runtime cache|$HOME/Library/Application Support/Code/Cache/*|ide_cache
VS Code extension and update cache|$HOME/Library/Application Support/Code/CachedData/*|ide_cache
VS Code system cache (Cursor, VSCodium)|$HOME/Library/Caches/com.microsoft.VSCode/*|ide_cache
Cursor editor cache|$HOME/Library/Caches/com.todesktop.230313mzl4w4u92/*|ide_cache
Bazel build cache|$HOME/.cache/bazel/*|compiler_cache
Go build cache and module cache|$HOME/Library/Caches/go-build/*|compiler_cache
Go module cache|$HOME/go/pkg/mod/cache/*|compiler_cache
Rust Cargo registry cache|$HOME/.cargo/registry/cache/*|compiler_cache
Rust documentation cache|$HOME/.rustup/toolchains/*/share/doc/*|compiler_cache
Rustup toolchain downloads|$HOME/.rustup/downloads/*|compiler_cache
ccache compiler cache|$HOME/.ccache/*|compiler_cache
sccache distributed compiler cache|$HOME/.cache/sccache/*|compiler_cache
SBT Scala build cache|$HOME/.sbt/*|compiler_cache
Ivy dependency cache|$HOME/.ivy2/cache/*|compiler_cache
Turbo monorepo build cache|$HOME/.turbo/*|compiler_cache
Next.js build cache|$HOME/.next/*|compiler_cache
Vite build cache|$HOME/.vite/*|compiler_cache
Parcel bundler cache|$HOME/.parcel-cache/*|compiler_cache
pre-commit hooks cache|$HOME/.cache/pre-commit/*|compiler_cache
Ruff Python linter cache|$HOME/.cache/ruff/*|compiler_cache
MyPy type checker cache|$HOME/.cache/mypy/*|compiler_cache
Pytest test cache|$HOME/.pytest_cache/*|compiler_cache
Flutter SDK cache|$HOME/.cache/flutter/*|compiler_cache
Swift Package Manager cache|$HOME/.cache/swift-package-manager/*|compiler_cache
Zig compiler cache|$HOME/.cache/zig/*|compiler_cache
Deno cache|$HOME/Library/Caches/deno/*|compiler_cache
CocoaPods cache (iOS dependencies)|$HOME/Library/Caches/CocoaPods/*|package_manager
npm package cache|$HOME/.npm/_cacache/*|package_manager
pip Python package cache|$HOME/.cache/pip/*|package_manager
uv Python package cache|$HOME/.cache/uv/*|package_manager
R renv global cache (virtual environments)|$HOME/Library/Caches/org.R-project.R/R/renv/*|package_manager
Homebrew downloaded packages|$HOME/Library/Caches/Homebrew/*|package_manager
Yarn package manager cache|$HOME/.cache/yarn/*|package_manager
pnpm package store|$HOME/.pnpm-store/*|package_manager
Composer PHP dependencies cache|$HOME/.composer/cache/*|package_manager
RubyGems cache|$HOME/.gem/cache/*|package_manager
Conda packages cache|$HOME/.conda/pkgs/*|package_manager
Anaconda packages cache|$HOME/anaconda3/pkgs/*|package_manager
PyTorch model cache|$HOME/.cache/torch/*|ai_ml_cache
TensorFlow model and dataset cache|$HOME/.cache/tensorflow/*|ai_ml_cache
HuggingFace models and datasets|$HOME/.cache/huggingface/*|ai_ml_cache
Playwright browser binaries|$HOME/Library/Caches/ms-playwright*|ai_ml_cache
Selenium WebDriver binaries|$HOME/.cache/selenium/*|ai_ml_cache
Ollama local AI models|$HOME/.ollama/models/*|ai_ml_cache
Weights & Biases ML experiments cache|$HOME/.cache/wandb/*|ai_ml_cache
Safari web browser cache|$HOME/Library/Caches/com.apple.Safari/*|browser_cache
Chrome browser cache|$HOME/Library/Caches/Google/Chrome/*|browser_cache
Firefox browser cache|$HOME/Library/Caches/Firefox/*|browser_cache
Brave browser cache|$HOME/Library/Caches/BraveSoftware/Brave-Browser/*|browser_cache
Surge proxy cache|$HOME/Library/Caches/com.nssurge.surge-mac/*|network_tools
Surge configuration and data|$HOME/Library/Application Support/com.nssurge.surge-mac/*|network_tools
Docker Desktop image cache|$HOME/Library/Containers/com.docker.docker/Data/*|container_cache
Podman container cache|$HOME/.local/share/containers/cache/*|container_cache
Font cache|$HOME/Library/Caches/com.apple.FontRegistry/*|system_cache
Spotlight metadata cache|$HOME/Library/Caches/com.apple.spotlight/*|system_cache
CloudKit cache|$HOME/Library/Caches/CloudKit/*|system_cache
Trash|$HOME/.Trash|system_cache
EOF
# Add FINDER_METADATA with constant reference
echo "Finder metadata (.DS_Store)|$FINDER_METADATA_SENTINEL|system_cache"
}
# Get all optimize items with their patterns
get_optimize_whitelist_items() {
# Format: "display_name|pattern|category"
cat << 'EOF'
macOS Firewall check|firewall|security_check
Gatekeeper check|gatekeeper|security_check
macOS system updates check|check_macos_updates|update_check
Mole updates check|check_mole_update|update_check
Homebrew health check (doctor)|check_brew_health|health_check
SIP status check|check_sip|security_check
FileVault status check|check_filevault|security_check
TouchID sudo check|check_touchid|config_check
Rosetta 2 check|check_rosetta|config_check
Git configuration check|check_git_config|config_check
Login items check|check_login_items|config_check
EOF
}
patterns_equivalent() {
local first="${1/#~/$HOME}"
local second="${2/#~/$HOME}"
# Only exact string match, no glob expansion
[[ "$first" == "$second" ]] && return 0
return 1
}
load_whitelist() {
local mode="${1:-clean}"
local -a patterns=()
local config_file
local legacy_file=""
if [[ "$mode" == "optimize" ]]; then
config_file="$WHITELIST_CONFIG_OPTIMIZE"
legacy_file="$WHITELIST_CONFIG_OPTIMIZE_LEGACY"
else
config_file="$WHITELIST_CONFIG_CLEAN"
fi
local using_legacy="false"
if [[ ! -f "$config_file" && -n "$legacy_file" && -f "$legacy_file" ]]; then
config_file="$legacy_file"
using_legacy="true"
fi
if [[ -f "$config_file" ]]; then
while IFS= read -r line; do
# shellcheck disable=SC2295
line="${line#"${line%%[![:space:]]*}"}"
# shellcheck disable=SC2295
line="${line%"${line##*[![:space:]]}"}"
[[ -z "$line" || "$line" =~ ^# ]] && continue
patterns+=("$line")
done < "$config_file"
else
if [[ "$mode" == "clean" ]]; then
patterns=("${DEFAULT_WHITELIST_PATTERNS[@]}")
elif [[ "$mode" == "optimize" ]]; then
patterns=("${DEFAULT_OPTIMIZE_WHITELIST_PATTERNS[@]}")
fi
fi
if [[ ${#patterns[@]} -gt 0 ]]; then
local -a unique_patterns=()
for pattern in "${patterns[@]}"; do
local duplicate="false"
if [[ ${#unique_patterns[@]} -gt 0 ]]; then
for existing in "${unique_patterns[@]}"; do
if patterns_equivalent "$pattern" "$existing"; then
duplicate="true"
break
fi
done
fi
[[ "$duplicate" == "true" ]] && continue
unique_patterns+=("$pattern")
done
CURRENT_WHITELIST_PATTERNS=("${unique_patterns[@]}")
# Migrate legacy optimize config to the new path automatically
if [[ "$mode" == "optimize" && "$using_legacy" == "true" && "$config_file" != "$WHITELIST_CONFIG_OPTIMIZE" ]]; then
save_whitelist_patterns "$mode" "${CURRENT_WHITELIST_PATTERNS[@]}"
fi
else
CURRENT_WHITELIST_PATTERNS=()
fi
}
is_whitelisted() {
local pattern="$1"
local check_pattern="${pattern/#\~/$HOME}"
if [[ ${#CURRENT_WHITELIST_PATTERNS[@]} -eq 0 ]]; then
return 1
fi
for existing in "${CURRENT_WHITELIST_PATTERNS[@]}"; do
local existing_expanded="${existing/#\~/$HOME}"
# Only use exact string match to prevent glob expansion security issues
if [[ "$check_pattern" == "$existing_expanded" ]]; then
return 0
fi
done
return 1
}
manage_whitelist() {
local mode="${1:-clean}"
manage_whitelist_categories "$mode"
}
manage_whitelist_categories() {
local mode="$1"
# Load currently enabled patterns from both sources
load_whitelist "$mode"
# Build cache items list
local -a cache_items=()
local -a cache_patterns=()
local -a menu_options=()
local index=0
# Choose source based on mode
local items_source
local menu_title
local active_config_file
if [[ "$mode" == "optimize" ]]; then
items_source=$(get_optimize_whitelist_items)
active_config_file="$WHITELIST_CONFIG_OPTIMIZE"
local display_config="${active_config_file/#$HOME/~}"
menu_title="Whitelist Manager Select system checks to ignore
${GRAY}Edit: ${display_config}${NC}"
else
items_source=$(get_all_cache_items)
active_config_file="$WHITELIST_CONFIG_CLEAN"
local display_config="${active_config_file/#$HOME/~}"
menu_title="Whitelist Manager Select caches to protect
${GRAY}Edit: ${display_config}${NC}"
fi
while IFS='|' read -r display_name pattern _; do
# Expand $HOME in pattern
pattern="${pattern/\$HOME/$HOME}"
cache_items+=("$display_name")
cache_patterns+=("$pattern")
menu_options+=("$display_name")
((index++)) || true
done <<< "$items_source"
# Identify custom patterns (not in predefined list)
local -a custom_patterns=()
if [[ ${#CURRENT_WHITELIST_PATTERNS[@]} -gt 0 ]]; then
for current_pattern in "${CURRENT_WHITELIST_PATTERNS[@]}"; do
local is_predefined=false
for predefined_pattern in "${cache_patterns[@]}"; do
if patterns_equivalent "$current_pattern" "$predefined_pattern"; then
is_predefined=true
break
fi
done
if [[ "$is_predefined" == "false" ]]; then
custom_patterns+=("$current_pattern")
fi
done
fi
# Prioritize already-selected items to appear first
local -a selected_cache_items=()
local -a selected_cache_patterns=()
local -a selected_menu_options=()
local -a remaining_cache_items=()
local -a remaining_cache_patterns=()
local -a remaining_menu_options=()
for ((i = 0; i < ${#cache_patterns[@]}; i++)); do
if is_whitelisted "${cache_patterns[i]}"; then
selected_cache_items+=("${cache_items[i]}")
selected_cache_patterns+=("${cache_patterns[i]}")
selected_menu_options+=("${menu_options[i]}")
else
remaining_cache_items+=("${cache_items[i]}")
remaining_cache_patterns+=("${cache_patterns[i]}")
remaining_menu_options+=("${menu_options[i]}")
fi
done
cache_items=()
cache_patterns=()
menu_options=()
if [[ ${#selected_cache_items[@]} -gt 0 ]]; then
cache_items=("${selected_cache_items[@]}")
cache_patterns=("${selected_cache_patterns[@]}")
menu_options=("${selected_menu_options[@]}")
fi
if [[ ${#remaining_cache_items[@]} -gt 0 ]]; then
cache_items+=("${remaining_cache_items[@]}")
cache_patterns+=("${remaining_cache_patterns[@]}")
menu_options+=("${remaining_menu_options[@]}")
fi
if [[ ${#selected_cache_patterns[@]} -gt 0 ]]; then
local -a preselected_indices=()
for ((i = 0; i < ${#selected_cache_patterns[@]}; i++)); do
preselected_indices+=("$i")
done
local IFS=','
export MOLE_PRESELECTED_INDICES="${preselected_indices[*]}"
else
unset MOLE_PRESELECTED_INDICES
fi
MOLE_SELECTION_RESULT=""
paginated_multi_select "$menu_title" "${menu_options[@]}"
unset MOLE_PRESELECTED_INDICES
local exit_code=$?
# Normal exit or cancel
if [[ $exit_code -ne 0 ]]; then
return 1
fi
# Convert selected indices to patterns
local -a selected_patterns=()
if [[ -n "$MOLE_SELECTION_RESULT" ]]; then
local -a selected_indices
IFS=',' read -ra selected_indices <<< "$MOLE_SELECTION_RESULT"
for idx in "${selected_indices[@]}"; do
if [[ $idx -ge 0 && $idx -lt ${#cache_patterns[@]} ]]; then
local pattern="${cache_patterns[$idx]}"
# Convert back to portable format with ~
pattern="${pattern/#$HOME/~}"
selected_patterns+=("$pattern")
fi
done
fi
# Merge custom patterns with selected patterns
local -a all_patterns=()
if [[ ${#selected_patterns[@]} -gt 0 ]]; then
all_patterns=("${selected_patterns[@]}")
fi
if [[ ${#custom_patterns[@]} -gt 0 ]]; then
for custom_pattern in "${custom_patterns[@]}"; do
all_patterns+=("$custom_pattern")
done
fi
# Save to whitelist config (bash 3.2 + set -u safe)
if [[ ${#all_patterns[@]} -gt 0 ]]; then
save_whitelist_patterns "$mode" "${all_patterns[@]}"
else
save_whitelist_patterns "$mode"
fi
local total_protected=$((${#selected_patterns[@]} + ${#custom_patterns[@]}))
local -a summary_lines=()
summary_lines+=("Whitelist Updated")
if [[ ${#custom_patterns[@]} -gt 0 ]]; then
summary_lines+=("Protected ${#selected_patterns[@]} predefined + ${#custom_patterns[@]} custom patterns")
else
summary_lines+=("Protected ${total_protected} cache(s)")
fi
local display_config="${active_config_file/#$HOME/~}"
summary_lines+=("Config: ${GRAY}${display_config}${NC}")
print_summary_block "${summary_lines[@]}"
printf '\n'
}
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
manage_whitelist
fi

View File

@@ -1,53 +0,0 @@
#!/bin/bash
# System Configuration Maintenance Module.
# Fix broken preferences and login items.
set -euo pipefail
# Remove corrupted preference files.
fix_broken_preferences() {
local prefs_dir="$HOME/Library/Preferences"
[[ -d "$prefs_dir" ]] || return 0
local broken_count=0
while IFS= read -r plist_file; do
[[ -f "$plist_file" ]] || continue
local filename
filename=$(basename "$plist_file")
case "$filename" in
com.apple.* | .GlobalPreferences* | loginwindow.plist)
continue
;;
esac
plutil -lint "$plist_file" > /dev/null 2>&1 && continue
safe_remove "$plist_file" true > /dev/null 2>&1 || true
((broken_count++))
done < <(command find "$prefs_dir" -maxdepth 1 -name "*.plist" -type f 2> /dev/null || true)
# Check ByHost preferences.
local byhost_dir="$prefs_dir/ByHost"
if [[ -d "$byhost_dir" ]]; then
while IFS= read -r plist_file; do
[[ -f "$plist_file" ]] || continue
local filename
filename=$(basename "$plist_file")
case "$filename" in
com.apple.* | .GlobalPreferences*)
continue
;;
esac
plutil -lint "$plist_file" > /dev/null 2>&1 && continue
safe_remove "$plist_file" true > /dev/null 2>&1 || true
((broken_count++))
done < <(command find "$byhost_dir" -name "*.plist" -type f 2> /dev/null || true)
fi
echo "$broken_count"
}

View File

@@ -1,779 +0,0 @@
#!/bin/bash
# Optimization Tasks
set -euo pipefail
# Config constants (override via env).
readonly MOLE_TM_THIN_TIMEOUT=180
readonly MOLE_TM_THIN_VALUE=9999999999
readonly MOLE_SQLITE_MAX_SIZE=104857600 # 100MB
# Dry-run aware output.
opt_msg() {
local message="$1"
if [[ "${MOLE_DRY_RUN:-0}" == "1" ]]; then
echo -e " ${YELLOW}${ICON_DRY_RUN}${NC} $message"
else
echo -e " ${GREEN}${NC} $message"
fi
}
run_launchctl_unload() {
local plist_file="$1"
local need_sudo="${2:-false}"
if [[ "${MOLE_DRY_RUN:-0}" == "1" ]]; then
return 0
fi
if [[ "$need_sudo" == "true" ]]; then
sudo launchctl unload "$plist_file" 2> /dev/null || true
else
launchctl unload "$plist_file" 2> /dev/null || true
fi
}
needs_permissions_repair() {
local owner
owner=$(stat -f %Su "$HOME" 2> /dev/null || echo "")
if [[ -n "$owner" && "$owner" != "$USER" ]]; then
return 0
fi
local -a paths=(
"$HOME"
"$HOME/Library"
"$HOME/Library/Preferences"
)
local path
for path in "${paths[@]}"; do
if [[ -e "$path" && ! -w "$path" ]]; then
return 0
fi
done
return 1
}
has_bluetooth_hid_connected() {
local bt_report
bt_report=$(system_profiler SPBluetoothDataType 2> /dev/null || echo "")
if ! echo "$bt_report" | grep -q "Connected: Yes"; then
return 1
fi
if echo "$bt_report" | grep -Eiq "Keyboard|Trackpad|Mouse|HID"; then
return 0
fi
return 1
}
is_ac_power() {
pmset -g batt 2> /dev/null | grep -q "AC Power"
}
is_memory_pressure_high() {
if ! command -v memory_pressure > /dev/null 2>&1; then
return 1
fi
local mp_output
mp_output=$(memory_pressure -Q 2> /dev/null || echo "")
if echo "$mp_output" | grep -Eiq "warning|critical"; then
return 0
fi
return 1
}
flush_dns_cache() {
if [[ "${MOLE_DRY_RUN:-0}" == "1" ]]; then
MOLE_DNS_FLUSHED=1
return 0
fi
if sudo dscacheutil -flushcache 2> /dev/null && sudo killall -HUP mDNSResponder 2> /dev/null; then
MOLE_DNS_FLUSHED=1
return 0
fi
return 1
}
# Basic system maintenance.
opt_system_maintenance() {
if flush_dns_cache; then
opt_msg "DNS cache flushed"
fi
local spotlight_status
spotlight_status=$(mdutil -s / 2> /dev/null || echo "")
if echo "$spotlight_status" | grep -qi "Indexing disabled"; then
echo -e " ${GRAY}${ICON_EMPTY}${NC} Spotlight indexing disabled"
else
opt_msg "Spotlight index verified"
fi
}
# Refresh Finder caches (QuickLook/icon services).
opt_cache_refresh() {
local total_cache_size=0
if [[ "${MO_DEBUG:-}" == "1" ]]; then
debug_operation_start "Finder Cache Refresh" "Refresh QuickLook thumbnails and icon services"
debug_operation_detail "Method" "Remove cache files and rebuild via qlmanage"
debug_operation_detail "Expected outcome" "Faster Finder preview generation, fixed icon display issues"
debug_risk_level "LOW" "Caches are automatically rebuilt"
local -a cache_targets=(
"$HOME/Library/Caches/com.apple.QuickLook.thumbnailcache"
"$HOME/Library/Caches/com.apple.iconservices.store"
"$HOME/Library/Caches/com.apple.iconservices"
)
debug_operation_detail "Files to be removed" ""
for target_path in "${cache_targets[@]}"; do
if [[ -e "$target_path" ]]; then
local size_kb
size_kb=$(get_path_size_kb "$target_path" 2> /dev/null || echo "0")
local size_human="unknown"
if [[ "$size_kb" -gt 0 ]]; then
size_human=$(bytes_to_human "$((size_kb * 1024))")
fi
debug_file_action " Will remove" "$target_path" "$size_human" ""
fi
done
fi
if [[ "${MOLE_DRY_RUN:-0}" != "1" ]]; then
qlmanage -r cache > /dev/null 2>&1 || true
qlmanage -r > /dev/null 2>&1 || true
fi
local -a cache_targets=(
"$HOME/Library/Caches/com.apple.QuickLook.thumbnailcache"
"$HOME/Library/Caches/com.apple.iconservices.store"
"$HOME/Library/Caches/com.apple.iconservices"
)
for target_path in "${cache_targets[@]}"; do
if [[ -e "$target_path" ]]; then
if ! should_protect_path "$target_path"; then
local size_kb
size_kb=$(get_path_size_kb "$target_path" 2> /dev/null || echo "0")
if [[ "$size_kb" =~ ^[0-9]+$ ]]; then
total_cache_size=$((total_cache_size + size_kb))
fi
safe_remove "$target_path" true > /dev/null 2>&1
fi
fi
done
export OPTIMIZE_CACHE_CLEANED_KB="${total_cache_size}"
opt_msg "QuickLook thumbnails refreshed"
opt_msg "Icon services cache rebuilt"
}
# Removed: opt_maintenance_scripts - macOS handles log rotation automatically via launchd
# Removed: opt_radio_refresh - Interrupts active user connections (WiFi, Bluetooth), degrading UX
# Old saved states cleanup.
opt_saved_state_cleanup() {
if [[ "${MO_DEBUG:-}" == "1" ]]; then
debug_operation_start "App Saved State Cleanup" "Remove old application saved states"
debug_operation_detail "Method" "Find and remove .savedState folders older than $MOLE_SAVED_STATE_AGE_DAYS days"
debug_operation_detail "Location" "$HOME/Library/Saved Application State"
debug_operation_detail "Expected outcome" "Reduced disk usage, apps start with clean state"
debug_risk_level "LOW" "Old saved states, apps will create new ones"
fi
local state_dir="$HOME/Library/Saved Application State"
if [[ -d "$state_dir" ]]; then
while IFS= read -r -d '' state_path; do
if should_protect_path "$state_path"; then
continue
fi
safe_remove "$state_path" true > /dev/null 2>&1
done < <(command find "$state_dir" -type d -name "*.savedState" -mtime "+$MOLE_SAVED_STATE_AGE_DAYS" -print0 2> /dev/null)
fi
opt_msg "App saved states optimized"
}
# Removed: opt_swap_cleanup - Direct virtual memory operations pose system crash risk
# Removed: opt_startup_cache - Modern macOS has no such mechanism
# Removed: opt_local_snapshots - Deletes user Time Machine recovery points, breaks backup continuity
opt_fix_broken_configs() {
local spinner_started="false"
if [[ -t 1 ]]; then
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Checking preferences..."
spinner_started="true"
fi
local broken_prefs=$(fix_broken_preferences)
if [[ "$spinner_started" == "true" ]]; then
stop_inline_spinner
fi
export OPTIMIZE_CONFIGS_REPAIRED="${broken_prefs}"
if [[ $broken_prefs -gt 0 ]]; then
opt_msg "Repaired $broken_prefs corrupted preference files"
else
opt_msg "All preference files valid"
fi
}
# DNS cache refresh.
opt_network_optimization() {
if [[ "${MO_DEBUG:-}" == "1" ]]; then
debug_operation_start "Network Optimization" "Refresh DNS cache and restart mDNSResponder"
debug_operation_detail "Method" "Flush DNS cache via dscacheutil and killall mDNSResponder"
debug_operation_detail "Expected outcome" "Faster DNS resolution, fixed network connectivity issues"
debug_risk_level "LOW" "DNS cache is automatically rebuilt"
fi
if [[ "${MOLE_DNS_FLUSHED:-0}" == "1" ]]; then
opt_msg "DNS cache already refreshed"
opt_msg "mDNSResponder already restarted"
return 0
fi
if flush_dns_cache; then
opt_msg "DNS cache refreshed"
opt_msg "mDNSResponder restarted"
else
echo -e " ${YELLOW}!${NC} Failed to refresh DNS cache"
fi
}
# SQLite vacuum for Mail/Messages/Safari (safety checks applied).
opt_sqlite_vacuum() {
if [[ "${MO_DEBUG:-}" == "1" ]]; then
debug_operation_start "Database Optimization" "Vacuum SQLite databases for Mail, Safari, and Messages"
debug_operation_detail "Method" "Run VACUUM command on databases after integrity check"
debug_operation_detail "Safety checks" "Skip if apps are running, verify integrity first, 20s timeout"
debug_operation_detail "Expected outcome" "Reduced database size, faster app performance"
debug_risk_level "LOW" "Only optimizes databases, does not delete data"
fi
if ! command -v sqlite3 > /dev/null 2>&1; then
echo -e " ${GRAY}-${NC} Database optimization already optimal (sqlite3 unavailable)"
return 0
fi
local -a busy_apps=()
local -a check_apps=("Mail" "Safari" "Messages")
local app
for app in "${check_apps[@]}"; do
if pgrep -x "$app" > /dev/null 2>&1; then
busy_apps+=("$app")
fi
done
if [[ ${#busy_apps[@]} -gt 0 ]]; then
echo -e " ${YELLOW}!${NC} Close these apps before database optimization: ${busy_apps[*]}"
return 0
fi
local spinner_started="false"
if [[ "${MOLE_DRY_RUN:-0}" != "1" && -t 1 ]]; then
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Optimizing databases..."
spinner_started="true"
fi
local -a db_paths=(
"$HOME/Library/Mail/V*/MailData/Envelope Index*"
"$HOME/Library/Messages/chat.db"
"$HOME/Library/Safari/History.db"
"$HOME/Library/Safari/TopSites.db"
)
local vacuumed=0
local timed_out=0
local failed=0
local skipped=0
for pattern in "${db_paths[@]}"; do
while IFS= read -r db_file; do
[[ ! -f "$db_file" ]] && continue
[[ "$db_file" == *"-wal" || "$db_file" == *"-shm" ]] && continue
should_protect_path "$db_file" && continue
if ! file "$db_file" 2> /dev/null | grep -q "SQLite"; then
continue
fi
# Skip large DBs (>100MB).
local file_size
file_size=$(get_file_size "$db_file")
if [[ "$file_size" -gt "$MOLE_SQLITE_MAX_SIZE" ]]; then
((skipped++))
continue
fi
# Skip if freelist is tiny (already compact).
local page_info=""
page_info=$(run_with_timeout 5 sqlite3 "$db_file" "PRAGMA page_count; PRAGMA freelist_count;" 2> /dev/null || echo "")
local page_count=""
local freelist_count=""
page_count=$(echo "$page_info" | awk 'NR==1 {print $1}' 2> /dev/null || echo "")
freelist_count=$(echo "$page_info" | awk 'NR==2 {print $1}' 2> /dev/null || echo "")
if [[ "$page_count" =~ ^[0-9]+$ && "$freelist_count" =~ ^[0-9]+$ && "$page_count" -gt 0 ]]; then
if ((freelist_count * 100 < page_count * 5)); then
((skipped++))
continue
fi
fi
# Verify integrity before VACUUM.
if [[ "${MOLE_DRY_RUN:-0}" != "1" ]]; then
local integrity_check=""
set +e
integrity_check=$(run_with_timeout 10 sqlite3 "$db_file" "PRAGMA integrity_check;" 2> /dev/null)
local integrity_status=$?
set -e
if [[ $integrity_status -ne 0 ]] || ! echo "$integrity_check" | grep -q "ok"; then
((skipped++))
continue
fi
fi
local exit_code=0
if [[ "${MOLE_DRY_RUN:-0}" != "1" ]]; then
set +e
run_with_timeout 20 sqlite3 "$db_file" "VACUUM;" 2> /dev/null
exit_code=$?
set -e
if [[ $exit_code -eq 0 ]]; then
((vacuumed++))
elif [[ $exit_code -eq 124 ]]; then
((timed_out++))
else
((failed++))
fi
else
((vacuumed++))
fi
done < <(compgen -G "$pattern" || true)
done
if [[ "$spinner_started" == "true" ]]; then
stop_inline_spinner
fi
export OPTIMIZE_DATABASES_COUNT="${vacuumed}"
if [[ $vacuumed -gt 0 ]]; then
opt_msg "Optimized $vacuumed databases for Mail, Safari, Messages"
elif [[ $timed_out -eq 0 && $failed -eq 0 ]]; then
opt_msg "All databases already optimized"
else
echo -e " ${YELLOW}!${NC} Database optimization incomplete"
fi
if [[ $skipped -gt 0 ]]; then
opt_msg "Already optimal for $skipped databases"
fi
if [[ $timed_out -gt 0 ]]; then
echo -e " ${YELLOW}!${NC} Timed out on $timed_out databases"
fi
if [[ $failed -gt 0 ]]; then
echo -e " ${YELLOW}!${NC} Failed on $failed databases"
fi
}
# LaunchServices rebuild ("Open with" issues).
opt_launch_services_rebuild() {
if [[ "${MO_DEBUG:-}" == "1" ]]; then
debug_operation_start "LaunchServices Rebuild" "Rebuild LaunchServices database"
debug_operation_detail "Method" "Run lsregister -r on system, user, and local domains"
debug_operation_detail "Purpose" "Fix \"Open with\" menu issues and file associations"
debug_operation_detail "Expected outcome" "Correct app associations, fixed duplicate entries"
debug_risk_level "LOW" "Database is automatically rebuilt"
fi
if [[ -t 1 ]]; then
start_inline_spinner ""
fi
local lsregister="/System/Library/Frameworks/CoreServices.framework/Frameworks/LaunchServices.framework/Support/lsregister"
if [[ -f "$lsregister" ]]; then
local success=0
if [[ "${MOLE_DRY_RUN:-0}" != "1" ]]; then
set +e
"$lsregister" -r -domain local -domain user -domain system > /dev/null 2>&1
success=$?
if [[ $success -ne 0 ]]; then
"$lsregister" -r -domain local -domain user > /dev/null 2>&1
success=$?
fi
set -e
else
success=0
fi
if [[ -t 1 ]]; then
stop_inline_spinner
fi
if [[ $success -eq 0 ]]; then
opt_msg "LaunchServices repaired"
opt_msg "File associations refreshed"
else
echo -e " ${YELLOW}!${NC} Failed to rebuild LaunchServices"
fi
else
if [[ -t 1 ]]; then
stop_inline_spinner
fi
echo -e " ${YELLOW}!${NC} lsregister not found"
fi
}
# Font cache rebuild.
opt_font_cache_rebuild() {
if [[ "${MO_DEBUG:-}" == "1" ]]; then
debug_operation_start "Font Cache Rebuild" "Clear and rebuild font cache"
debug_operation_detail "Method" "Run atsutil databases -remove"
debug_operation_detail "Expected outcome" "Fixed font display issues, removed corrupted font cache"
debug_risk_level "LOW" "System automatically rebuilds font database"
fi
local success=false
if [[ "${MOLE_DRY_RUN:-0}" != "1" ]]; then
if sudo atsutil databases -remove > /dev/null 2>&1; then
success=true
fi
else
success=true
fi
if [[ "$success" == "true" ]]; then
opt_msg "Font cache cleared"
opt_msg "System will rebuild font database automatically"
else
echo -e " ${YELLOW}!${NC} Failed to clear font cache"
fi
}
# Removed high-risk optimizations:
# - opt_startup_items_cleanup: Risk of deleting legitimate app helpers
# - opt_dyld_cache_update: Low benefit, time-consuming, auto-managed by macOS
# - opt_system_services_refresh: Risk of data loss when killing system services
# Memory pressure relief.
opt_memory_pressure_relief() {
if [[ "${MO_DEBUG:-}" == "1" ]]; then
debug_operation_start "Memory Pressure Relief" "Release inactive memory if pressure is high"
debug_operation_detail "Method" "Run purge command to clear inactive memory"
debug_operation_detail "Condition" "Only runs if memory pressure is warning/critical"
debug_operation_detail "Expected outcome" "More available memory, improved responsiveness"
debug_risk_level "LOW" "Safe system command, does not affect active processes"
fi
if [[ "${MOLE_DRY_RUN:-0}" != "1" ]]; then
if ! is_memory_pressure_high; then
opt_msg "Memory pressure already optimal"
return 0
fi
if sudo purge > /dev/null 2>&1; then
opt_msg "Inactive memory released"
opt_msg "System responsiveness improved"
else
echo -e " ${YELLOW}!${NC} Failed to release memory pressure"
fi
else
opt_msg "Inactive memory released"
opt_msg "System responsiveness improved"
fi
}
# Network stack reset (route + ARP).
opt_network_stack_optimize() {
local route_flushed="false"
local arp_flushed="false"
if [[ "${MOLE_DRY_RUN:-0}" != "1" ]]; then
local route_ok=true
local dns_ok=true
if ! route -n get default > /dev/null 2>&1; then
route_ok=false
fi
if ! dscacheutil -q host -a name "example.com" > /dev/null 2>&1; then
dns_ok=false
fi
if [[ "$route_ok" == "true" && "$dns_ok" == "true" ]]; then
opt_msg "Network stack already optimal"
return 0
fi
if sudo route -n flush > /dev/null 2>&1; then
route_flushed="true"
fi
if sudo arp -a -d > /dev/null 2>&1; then
arp_flushed="true"
fi
else
route_flushed="true"
arp_flushed="true"
fi
if [[ "$route_flushed" == "true" ]]; then
opt_msg "Network routing table refreshed"
fi
if [[ "$arp_flushed" == "true" ]]; then
opt_msg "ARP cache cleared"
else
if [[ "$route_flushed" == "true" ]]; then
return 0
fi
echo -e " ${YELLOW}!${NC} Failed to optimize network stack"
fi
}
# User directory permissions repair.
opt_disk_permissions_repair() {
if [[ "${MO_DEBUG:-}" == "1" ]]; then
debug_operation_start "Disk Permissions Repair" "Reset user directory permissions"
debug_operation_detail "Method" "Run diskutil resetUserPermissions on user home directory"
debug_operation_detail "Condition" "Only runs if permissions issues are detected"
debug_operation_detail "Expected outcome" "Fixed file access issues, correct ownership"
debug_risk_level "MEDIUM" "Requires sudo, modifies permissions"
fi
local user_id
user_id=$(id -u)
if [[ "${MOLE_DRY_RUN:-0}" != "1" ]]; then
if ! needs_permissions_repair; then
opt_msg "User directory permissions already optimal"
return 0
fi
if [[ -t 1 ]]; then
start_inline_spinner "Repairing disk permissions..."
fi
local success=false
if sudo diskutil resetUserPermissions / "$user_id" > /dev/null 2>&1; then
success=true
fi
if [[ -t 1 ]]; then
stop_inline_spinner
fi
if [[ "$success" == "true" ]]; then
opt_msg "User directory permissions repaired"
opt_msg "File access issues resolved"
else
echo -e " ${YELLOW}!${NC} Failed to repair permissions (may not be needed)"
fi
else
opt_msg "User directory permissions repaired"
opt_msg "File access issues resolved"
fi
}
# Bluetooth reset (skip if HID/audio active).
opt_bluetooth_reset() {
if [[ "${MO_DEBUG:-}" == "1" ]]; then
debug_operation_start "Bluetooth Reset" "Restart Bluetooth daemon"
debug_operation_detail "Method" "Kill bluetoothd daemon (auto-restarts)"
debug_operation_detail "Safety" "Skips if active Bluetooth keyboard/mouse/audio detected"
debug_operation_detail "Expected outcome" "Fixed Bluetooth connectivity issues"
debug_risk_level "LOW" "Daemon auto-restarts, connections auto-reconnect"
fi
local spinner_started="false"
if [[ -t 1 ]]; then
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Checking Bluetooth..."
spinner_started="true"
fi
if [[ "${MOLE_DRY_RUN:-0}" != "1" ]]; then
if has_bluetooth_hid_connected; then
if [[ "$spinner_started" == "true" ]]; then
stop_inline_spinner
fi
opt_msg "Bluetooth already optimal"
return 0
fi
local bt_audio_active=false
local audio_info
audio_info=$(system_profiler SPAudioDataType 2> /dev/null || echo "")
local default_output
default_output=$(echo "$audio_info" | awk '/Default Output Device: Yes/,/^$/' 2> /dev/null || echo "")
if echo "$default_output" | grep -qi "Transport:.*Bluetooth"; then
bt_audio_active=true
fi
if [[ "$bt_audio_active" == "false" ]]; then
if system_profiler SPBluetoothDataType 2> /dev/null | grep -q "Connected: Yes"; then
local -a media_apps=("Music" "Spotify" "VLC" "QuickTime Player" "TV" "Podcasts" "Safari" "Google Chrome" "Chrome" "Firefox" "Arc" "IINA" "mpv")
for app in "${media_apps[@]}"; do
if pgrep -x "$app" > /dev/null 2>&1; then
bt_audio_active=true
break
fi
done
fi
fi
if [[ "$bt_audio_active" == "true" ]]; then
if [[ "$spinner_started" == "true" ]]; then
stop_inline_spinner
fi
opt_msg "Bluetooth already optimal"
return 0
fi
if sudo pkill -TERM bluetoothd > /dev/null 2>&1; then
sleep 1
if pgrep -x bluetoothd > /dev/null 2>&1; then
sudo pkill -KILL bluetoothd > /dev/null 2>&1 || true
fi
if [[ "$spinner_started" == "true" ]]; then
stop_inline_spinner
fi
opt_msg "Bluetooth module restarted"
opt_msg "Connectivity issues resolved"
else
if [[ "$spinner_started" == "true" ]]; then
stop_inline_spinner
fi
opt_msg "Bluetooth already optimal"
fi
else
if [[ "$spinner_started" == "true" ]]; then
stop_inline_spinner
fi
opt_msg "Bluetooth module restarted"
opt_msg "Connectivity issues resolved"
fi
}
# Spotlight index check/rebuild (only if slow).
opt_spotlight_index_optimize() {
local spotlight_status
spotlight_status=$(mdutil -s / 2> /dev/null || echo "")
if echo "$spotlight_status" | grep -qi "Indexing disabled"; then
echo -e " ${GRAY}${ICON_EMPTY}${NC} Spotlight indexing is disabled"
return 0
fi
if echo "$spotlight_status" | grep -qi "Indexing enabled" && ! echo "$spotlight_status" | grep -qi "Indexing and searching disabled"; then
local slow_count=0
local test_start test_end test_duration
for _ in 1 2; do
test_start=$(get_epoch_seconds)
mdfind "kMDItemFSName == 'Applications'" > /dev/null 2>&1 || true
test_end=$(get_epoch_seconds)
test_duration=$((test_end - test_start))
if [[ $test_duration -gt 3 ]]; then
((slow_count++))
fi
sleep 1
done
if [[ $slow_count -ge 2 ]]; then
if ! is_ac_power; then
opt_msg "Spotlight index already optimal"
return 0
fi
if [[ "${MOLE_DRY_RUN:-0}" != "1" ]]; then
echo -e " ${BLUE}${NC} Spotlight search is slow, rebuilding index (may take 1-2 hours)"
if sudo mdutil -E / > /dev/null 2>&1; then
opt_msg "Spotlight index rebuild started"
echo -e " ${GRAY}Indexing will continue in background${NC}"
else
echo -e " ${YELLOW}!${NC} Failed to rebuild Spotlight index"
fi
else
opt_msg "Spotlight index rebuild started"
fi
else
opt_msg "Spotlight index already optimal"
fi
else
opt_msg "Spotlight index verified"
fi
}
# Dock cache refresh.
opt_dock_refresh() {
local dock_support="$HOME/Library/Application Support/Dock"
local refreshed=false
if [[ -d "$dock_support" ]]; then
while IFS= read -r db_file; do
if [[ -f "$db_file" ]]; then
safe_remove "$db_file" true > /dev/null 2>&1 && refreshed=true
fi
done < <(find "$dock_support" -name "*.db" -type f 2> /dev/null || true)
fi
local dock_plist="$HOME/Library/Preferences/com.apple.dock.plist"
if [[ -f "$dock_plist" ]]; then
touch "$dock_plist" 2> /dev/null || true
fi
if [[ "${MOLE_DRY_RUN:-0}" != "1" ]]; then
killall Dock 2> /dev/null || true
fi
if [[ "$refreshed" == "true" ]]; then
opt_msg "Dock cache cleared"
fi
opt_msg "Dock refreshed"
}
# Dispatch optimization by action name.
execute_optimization() {
local action="$1"
local path="${2:-}"
case "$action" in
system_maintenance) opt_system_maintenance ;;
cache_refresh) opt_cache_refresh ;;
saved_state_cleanup) opt_saved_state_cleanup ;;
fix_broken_configs) opt_fix_broken_configs ;;
network_optimization) opt_network_optimization ;;
sqlite_vacuum) opt_sqlite_vacuum ;;
launch_services_rebuild) opt_launch_services_rebuild ;;
font_cache_rebuild) opt_font_cache_rebuild ;;
dock_refresh) opt_dock_refresh ;;
memory_pressure_relief) opt_memory_pressure_relief ;;
network_stack_optimize) opt_network_stack_optimize ;;
disk_permissions_repair) opt_disk_permissions_repair ;;
bluetooth_reset) opt_bluetooth_reset ;;
spotlight_index_optimize) opt_spotlight_index_optimize ;;
*)
echo -e "${YELLOW}${ICON_ERROR}${NC} Unknown action: $action"
return 1
;;
esac
}

View File

@@ -1,192 +0,0 @@
#!/bin/bash
# App selection functionality
set -euo pipefail
# Note: get_display_width() is now defined in lib/core/ui.sh
# Format app info for display
format_app_display() {
local display_name="$1" size="$2" last_used="$3"
# Use common function from ui.sh to format last used time
local compact_last_used
compact_last_used=$(format_last_used_summary "$last_used")
# Format size
local size_str="Unknown"
[[ "$size" != "0" && "$size" != "" && "$size" != "Unknown" ]] && size_str="$size"
# Calculate available width for app name based on terminal width
# Accept pre-calculated max_name_width (5th param) to avoid recalculation in loops
local terminal_width="${4:-$(tput cols 2> /dev/null || echo 80)}"
local max_name_width="${5:-}"
local available_width
if [[ -n "$max_name_width" ]]; then
# Use pre-calculated width from caller
available_width=$max_name_width
else
# Fallback: calculate it (slower, but works for standalone calls)
# Fixed elements: " ○ " (4) + " " (1) + size (9) + " | " (3) + max_last (7) = 24
local fixed_width=24
available_width=$((terminal_width - fixed_width))
# Dynamic minimum for better spacing on wide terminals
local min_width=18
if [[ $terminal_width -ge 120 ]]; then
min_width=48
elif [[ $terminal_width -ge 100 ]]; then
min_width=38
elif [[ $terminal_width -ge 80 ]]; then
min_width=25
fi
[[ $available_width -lt $min_width ]] && available_width=$min_width
[[ $available_width -gt 60 ]] && available_width=60
fi
# Truncate long names if needed (based on display width, not char count)
local truncated_name
truncated_name=$(truncate_by_display_width "$display_name" "$available_width")
# Get actual display width after truncation
local current_display_width
current_display_width=$(get_display_width "$truncated_name")
# Calculate padding needed
# Formula: char_count + (available_width - display_width) = padding to add
local char_count=${#truncated_name}
local padding_needed=$((available_width - current_display_width))
local printf_width=$((char_count + padding_needed))
# Use dynamic column width with corrected padding
printf "%-*s %9s | %s" "$printf_width" "$truncated_name" "$size_str" "$compact_last_used"
}
# Global variable to store selection result (bash 3.2 compatible)
MOLE_SELECTION_RESULT=""
# Main app selection function
# shellcheck disable=SC2154 # apps_data is set by caller
select_apps_for_uninstall() {
if [[ ${#apps_data[@]} -eq 0 ]]; then
log_warning "No applications available for uninstallation"
return 1
fi
# Build menu options
# Show loading for large lists (formatting can be slow due to width calculations)
local app_count=${#apps_data[@]}
local terminal_width=$(tput cols 2> /dev/null || echo 80)
if [[ $app_count -gt 100 ]]; then
if [[ -t 2 ]]; then
printf "\rPreparing %d applications... " "$app_count" >&2
fi
fi
# Pre-scan to get actual max name width
local max_name_width=0
for app_data in "${apps_data[@]}"; do
IFS='|' read -r _ _ display_name _ _ _ _ <<< "$app_data"
local name_width=$(get_display_width "$display_name")
[[ $name_width -gt $max_name_width ]] && max_name_width=$name_width
done
# Constrain based on terminal width: fixed=24, min varies by terminal width, max=60
local fixed_width=24
local available=$((terminal_width - fixed_width))
# Dynamic minimum: wider terminals get larger minimum for better spacing
local min_width=18
if [[ $terminal_width -ge 120 ]]; then
min_width=48 # Wide terminals: very generous spacing
elif [[ $terminal_width -ge 100 ]]; then
min_width=38 # Medium-wide terminals: generous spacing
elif [[ $terminal_width -ge 80 ]]; then
min_width=25 # Standard terminals
fi
[[ $max_name_width -lt $min_width ]] && max_name_width=$min_width
[[ $available -lt $max_name_width ]] && max_name_width=$available
[[ $max_name_width -gt 60 ]] && max_name_width=60
local -a menu_options=()
# Prepare metadata (comma-separated) for sorting/filtering inside the menu
local epochs_csv=""
local sizekb_csv=""
local idx=0
for app_data in "${apps_data[@]}"; do
# Keep extended field 7 (size_kb) if present
IFS='|' read -r epoch _ display_name _ size last_used size_kb <<< "$app_data"
menu_options+=("$(format_app_display "$display_name" "$size" "$last_used" "$terminal_width" "$max_name_width")")
# Build csv lists (avoid trailing commas)
if [[ $idx -eq 0 ]]; then
epochs_csv="${epoch:-0}"
sizekb_csv="${size_kb:-0}"
else
epochs_csv+=",${epoch:-0}"
sizekb_csv+=",${size_kb:-0}"
fi
((idx++))
done
# Clear loading message
if [[ $app_count -gt 100 ]]; then
if [[ -t 2 ]]; then
printf "\r\033[K" >&2
fi
fi
# Expose metadata for the paginated menu (optional inputs)
# - MOLE_MENU_META_EPOCHS: numeric last_used_epoch per item
# - MOLE_MENU_META_SIZEKB: numeric size in KB per item
# The menu will gracefully fallback if these are unset or malformed.
export MOLE_MENU_META_EPOCHS="$epochs_csv"
export MOLE_MENU_META_SIZEKB="$sizekb_csv"
# Optional: allow default sort override via env (date|name|size)
# export MOLE_MENU_SORT_DEFAULT="${MOLE_MENU_SORT_DEFAULT:-date}"
# Use paginated menu - result will be stored in MOLE_SELECTION_RESULT
# Note: paginated_multi_select enters alternate screen and handles clearing
MOLE_SELECTION_RESULT=""
paginated_multi_select "Select Apps to Remove" "${menu_options[@]}"
local exit_code=$?
# Clean env leakage for safety
unset MOLE_MENU_META_EPOCHS MOLE_MENU_META_SIZEKB
# leave MOLE_MENU_SORT_DEFAULT untouched if user set it globally
# Refresh signal handling
if [[ $exit_code -eq 10 ]]; then
return 10
fi
if [[ $exit_code -ne 0 ]]; then
return 1
fi
if [[ -z "$MOLE_SELECTION_RESULT" ]]; then
echo "No apps selected"
return 1
fi
# Build selected apps array (global variable in bin/uninstall.sh)
selected_apps=()
# Parse indices and build selected apps array
IFS=',' read -r -a indices_array <<< "$MOLE_SELECTION_RESULT"
for idx in "${indices_array[@]}"; do
if [[ "$idx" =~ ^[0-9]+$ ]] && [[ $idx -ge 0 ]] && [[ $idx -lt ${#apps_data[@]} ]]; then
selected_apps+=("${apps_data[idx]}")
fi
done
return 0
}
# Export function for external use
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
echo "This is a library file. Source it from other scripts." >&2
exit 1
fi

View File

@@ -1,911 +0,0 @@
#!/bin/bash
# Paginated menu with arrow key navigation
set -euo pipefail
# Terminal control functions
enter_alt_screen() {
if command -v tput > /dev/null 2>&1 && [[ -t 1 ]]; then
tput smcup 2> /dev/null || true
fi
}
leave_alt_screen() {
if command -v tput > /dev/null 2>&1 && [[ -t 1 ]]; then
tput rmcup 2> /dev/null || true
fi
}
# Get terminal height with fallback
_pm_get_terminal_height() {
local height=0
# Try stty size first (most reliable, real-time)
# Use </dev/tty to ensure we read from terminal even if stdin is redirected
if [[ -t 0 ]] || [[ -t 2 ]]; then
height=$(stty size < /dev/tty 2> /dev/null | awk '{print $1}')
fi
# Fallback to tput
if [[ -z "$height" || $height -le 0 ]]; then
if command -v tput > /dev/null 2>&1; then
height=$(tput lines 2> /dev/null || echo "24")
else
height=24
fi
fi
echo "$height"
}
# Calculate dynamic items per page based on terminal height
_pm_calculate_items_per_page() {
local term_height=$(_pm_get_terminal_height)
# Reserved: header(1) + blank(1) + blank(1) + footer(1-2) = 4-5 rows
# Use 5 to be safe (leaves 1 row buffer when footer wraps to 2 lines)
local reserved=5
local available=$((term_height - reserved))
# Ensure minimum and maximum bounds
if [[ $available -lt 1 ]]; then
echo 1
elif [[ $available -gt 50 ]]; then
echo 50
else
echo "$available"
fi
}
# Parse CSV into newline list (Bash 3.2)
_pm_parse_csv_to_array() {
local csv="${1:-}"
if [[ -z "$csv" ]]; then
return 0
fi
local IFS=','
for _tok in $csv; do
printf "%s\n" "$_tok"
done
}
# Main paginated multi-select menu function
paginated_multi_select() {
local title="$1"
shift
local -a items=("$@")
local external_alt_screen=false
if [[ "${MOLE_MANAGED_ALT_SCREEN:-}" == "1" || "${MOLE_MANAGED_ALT_SCREEN:-}" == "true" ]]; then
external_alt_screen=true
fi
# Validation
if [[ ${#items[@]} -eq 0 ]]; then
echo "No items provided" >&2
return 1
fi
local total_items=${#items[@]}
local items_per_page=$(_pm_calculate_items_per_page)
local cursor_pos=0
local top_index=0
local filter_query=""
local filter_mode="false" # filter mode toggle
local sort_mode="${MOLE_MENU_SORT_MODE:-${MOLE_MENU_SORT_DEFAULT:-date}}" # date|name|size
local sort_reverse="${MOLE_MENU_SORT_REVERSE:-false}"
# Live query vs applied query
local applied_query=""
local searching="false"
# Metadata (optional)
# epochs[i] -> last_used_epoch (numeric) for item i
# sizekb[i] -> size in KB (numeric) for item i
local -a epochs=()
local -a sizekb=()
local has_metadata="false"
if [[ -n "${MOLE_MENU_META_EPOCHS:-}" ]]; then
while IFS= read -r v; do epochs+=("${v:-0}"); done < <(_pm_parse_csv_to_array "$MOLE_MENU_META_EPOCHS")
has_metadata="true"
fi
if [[ -n "${MOLE_MENU_META_SIZEKB:-}" ]]; then
while IFS= read -r v; do sizekb+=("${v:-0}"); done < <(_pm_parse_csv_to_array "$MOLE_MENU_META_SIZEKB")
has_metadata="true"
fi
# If no metadata, force name sorting and disable sorting controls
if [[ "$has_metadata" == "false" && "$sort_mode" != "name" ]]; then
sort_mode="name"
fi
# Index mappings
local -a orig_indices=()
local -a view_indices=()
local i
for ((i = 0; i < total_items; i++)); do
orig_indices[i]=$i
view_indices[i]=$i
done
# Escape for shell globbing without upsetting highlighters
_pm_escape_glob() {
local s="${1-}" out="" c
local i len=${#s}
for ((i = 0; i < len; i++)); do
c="${s:i:1}"
case "$c" in
$'\\' | '*' | '?' | '[' | ']') out+="\\$c" ;;
*) out+="$c" ;;
esac
done
printf '%s' "$out"
}
# Case-insensitive fuzzy match (substring search)
_pm_match() {
local hay="$1" q="$2"
q="$(_pm_escape_glob "$q")"
local pat="*${q}*"
shopt -s nocasematch
local ok=1
# shellcheck disable=SC2254 # intentional glob match with a computed pattern
case "$hay" in
$pat) ok=0 ;;
esac
shopt -u nocasematch
return $ok
}
local -a selected=()
local selected_count=0 # Cache selection count to avoid O(n) loops on every draw
# Initialize selection array
for ((i = 0; i < total_items; i++)); do
selected[i]=false
done
if [[ -n "${MOLE_PRESELECTED_INDICES:-}" ]]; then
local cleaned_preselect="${MOLE_PRESELECTED_INDICES//[[:space:]]/}"
local -a initial_indices=()
IFS=',' read -ra initial_indices <<< "$cleaned_preselect"
for idx in "${initial_indices[@]}"; do
if [[ "$idx" =~ ^[0-9]+$ && $idx -ge 0 && $idx -lt $total_items ]]; then
# Only count if not already selected (handles duplicates)
if [[ ${selected[idx]} != true ]]; then
selected[idx]=true
((selected_count++))
fi
fi
done
fi
# Preserve original TTY settings so we can restore them reliably
local original_stty=""
if [[ -t 0 ]] && command -v stty > /dev/null 2>&1; then
original_stty=$(stty -g 2> /dev/null || echo "")
fi
restore_terminal() {
show_cursor
if [[ -n "${original_stty-}" ]]; then
stty "${original_stty}" 2> /dev/null || stty sane 2> /dev/null || stty echo icanon 2> /dev/null || true
else
stty sane 2> /dev/null || stty echo icanon 2> /dev/null || true
fi
if [[ "${external_alt_screen:-false}" == false ]]; then
leave_alt_screen
fi
}
# Cleanup function
cleanup() {
trap - EXIT INT TERM
export MOLE_MENU_SORT_MODE="$sort_mode"
export MOLE_MENU_SORT_REVERSE="$sort_reverse"
restore_terminal
unset MOLE_READ_KEY_FORCE_CHAR
}
# Interrupt handler
# shellcheck disable=SC2329
handle_interrupt() {
cleanup
exit 130 # Standard exit code for Ctrl+C
}
trap cleanup EXIT
trap handle_interrupt INT TERM
# Setup terminal - preserve interrupt character
stty -echo -icanon intr ^C 2> /dev/null || true
if [[ $external_alt_screen == false ]]; then
enter_alt_screen
# Clear screen once on entry to alt screen
printf "\033[2J\033[H" >&2
else
printf "\033[H" >&2
fi
hide_cursor
# Helper functions
# shellcheck disable=SC2329
print_line() { printf "\r\033[2K%s\n" "$1" >&2; }
# Print footer lines wrapping only at separators
_print_wrapped_controls() {
local sep="$1"
shift
local -a segs=("$@")
local cols="${COLUMNS:-}"
[[ -z "$cols" ]] && cols=$(tput cols 2> /dev/null || echo 80)
[[ "$cols" =~ ^[0-9]+$ ]] || cols=80
_strip_ansi_len() {
local text="$1"
local stripped
stripped=$(printf "%s" "$text" | LC_ALL=C awk '{gsub(/\033\[[0-9;]*[A-Za-z]/,""); print}' || true)
[[ -z "$stripped" ]] && stripped="$text"
printf "%d" "${#stripped}"
}
local line="" s candidate
local clear_line=$'\r\033[2K'
for s in "${segs[@]}"; do
if [[ -z "$line" ]]; then
candidate="$s"
else
candidate="$line${sep}${s}"
fi
local candidate_len
candidate_len=$(_strip_ansi_len "$candidate")
[[ -z "$candidate_len" ]] && candidate_len=0
if ((candidate_len > cols)); then
printf "%s%s\n" "$clear_line" "$line" >&2
line="$s"
else
line="$candidate"
fi
done
printf "%s%s\n" "$clear_line" "$line" >&2
}
# Rebuild the view_indices applying filter and sort
rebuild_view() {
# Filter
local -a filtered=()
local effective_query=""
if [[ "$filter_mode" == "true" ]]; then
# Live editing: empty query -> show all items
effective_query="$filter_query"
if [[ -z "$effective_query" ]]; then
filtered=("${orig_indices[@]}")
else
local idx
for ((idx = 0; idx < total_items; idx++)); do
if _pm_match "${items[idx]}" "$effective_query"; then
filtered+=("$idx")
fi
done
fi
else
# Normal mode: use applied query; empty -> show all
effective_query="$applied_query"
if [[ -z "$effective_query" ]]; then
filtered=("${orig_indices[@]}")
else
local idx
for ((idx = 0; idx < total_items; idx++)); do
if _pm_match "${items[idx]}" "$effective_query"; then
filtered+=("$idx")
fi
done
fi
fi
# Sort (skip if no metadata)
if [[ "$has_metadata" == "false" ]]; then
# No metadata: just use filtered list (already sorted by name naturally)
view_indices=("${filtered[@]}")
elif [[ ${#filtered[@]} -eq 0 ]]; then
view_indices=()
else
# Build sort key
local sort_key
if [[ "$sort_mode" == "date" ]]; then
# Date: ascending by default (oldest first)
sort_key="-k1,1n"
[[ "$sort_reverse" == "true" ]] && sort_key="-k1,1nr"
elif [[ "$sort_mode" == "size" ]]; then
# Size: descending by default (largest first)
sort_key="-k1,1nr"
[[ "$sort_reverse" == "true" ]] && sort_key="-k1,1n"
else
# Name: ascending by default (A to Z)
sort_key="-k1,1f"
[[ "$sort_reverse" == "true" ]] && sort_key="-k1,1fr"
fi
# Create temporary file for sorting
local tmpfile
tmpfile=$(mktemp 2> /dev/null) || tmpfile=""
if [[ -n "$tmpfile" ]]; then
local k id
for id in "${filtered[@]}"; do
case "$sort_mode" in
date) k="${epochs[id]:-0}" ;;
size) k="${sizekb[id]:-0}" ;;
name | *) k="${items[id]}|${id}" ;;
esac
printf "%s\t%s\n" "$k" "$id" >> "$tmpfile"
done
view_indices=()
while IFS=$'\t' read -r _key _id; do
[[ -z "$_id" ]] && continue
view_indices+=("$_id")
done < <(LC_ALL=C sort -t $'\t' $sort_key -- "$tmpfile" 2> /dev/null)
rm -f "$tmpfile"
else
# Fallback: no sorting
view_indices=("${filtered[@]}")
fi
fi
# Clamp cursor into visible range
local visible_count=${#view_indices[@]}
local max_top
if [[ $visible_count -gt $items_per_page ]]; then
max_top=$((visible_count - items_per_page))
else
max_top=0
fi
[[ $top_index -gt $max_top ]] && top_index=$max_top
local current_visible=$((visible_count - top_index))
[[ $current_visible -gt $items_per_page ]] && current_visible=$items_per_page
if [[ $cursor_pos -ge $current_visible ]]; then
cursor_pos=$((current_visible > 0 ? current_visible - 1 : 0))
fi
[[ $cursor_pos -lt 0 ]] && cursor_pos=0
}
# Initial view (default sort)
rebuild_view
render_item() {
# $1: visible row index (0..items_per_page-1 in current window)
# $2: is_current flag
local vrow=$1 is_current=$2
local idx=$((top_index + vrow))
local real="${view_indices[idx]:--1}"
[[ $real -lt 0 ]] && return
local checkbox="$ICON_EMPTY"
[[ ${selected[real]} == true ]] && checkbox="$ICON_SOLID"
if [[ $is_current == true ]]; then
printf "\r\033[2K${CYAN}${ICON_ARROW} %s %s${NC}\n" "$checkbox" "${items[real]}" >&2
else
printf "\r\033[2K %s %s\n" "$checkbox" "${items[real]}" >&2
fi
}
# Draw the complete menu
draw_menu() {
# Recalculate items_per_page dynamically to handle window resize
items_per_page=$(_pm_calculate_items_per_page)
printf "\033[H" >&2
local clear_line="\r\033[2K"
# Use cached selection count (maintained incrementally on toggle)
# No need to loop through all items anymore!
# Header only
printf "${clear_line}${PURPLE_BOLD}%s${NC} ${GRAY}%d/%d selected${NC}\n" "${title}" "$selected_count" "$total_items" >&2
# Visible slice
local visible_total=${#view_indices[@]}
if [[ $visible_total -eq 0 ]]; then
if [[ "$filter_mode" == "true" ]]; then
# While editing: do not show "No items available"
for ((i = 0; i < items_per_page; i++)); do
printf "${clear_line}\n" >&2
done
printf "${clear_line}${GRAY}Type to filter | Delete | Enter Confirm | ESC Cancel${NC}\n" >&2
printf "${clear_line}" >&2
return
else
if [[ "$searching" == "true" ]]; then
printf "${clear_line}Searching…\n" >&2
for ((i = 0; i < items_per_page; i++)); do
printf "${clear_line}\n" >&2
done
printf "${clear_line}${GRAY}${ICON_NAV_UP}${ICON_NAV_DOWN} | Space | Enter | / Filter | Q Exit${NC}\n" >&2
printf "${clear_line}" >&2
return
else
# Post-search: truly empty list
printf "${clear_line}No items available\n" >&2
for ((i = 0; i < items_per_page; i++)); do
printf "${clear_line}\n" >&2
done
printf "${clear_line}${GRAY}${ICON_NAV_UP}${ICON_NAV_DOWN} | Space | Enter | / Filter | Q Exit${NC}\n" >&2
printf "${clear_line}" >&2
return
fi
fi
fi
local visible_count=$((visible_total - top_index))
[[ $visible_count -gt $items_per_page ]] && visible_count=$items_per_page
[[ $visible_count -le 0 ]] && visible_count=1
if [[ $cursor_pos -ge $visible_count ]]; then
cursor_pos=$((visible_count - 1))
[[ $cursor_pos -lt 0 ]] && cursor_pos=0
fi
printf "${clear_line}\n" >&2
# Items for current window
local start_idx=$top_index
local end_idx=$((top_index + items_per_page - 1))
[[ $end_idx -ge $visible_total ]] && end_idx=$((visible_total - 1))
for ((i = start_idx; i <= end_idx; i++)); do
[[ $i -lt 0 ]] && continue
local is_current=false
[[ $((i - start_idx)) -eq $cursor_pos ]] && is_current=true
render_item $((i - start_idx)) $is_current
done
# Fill empty slots to clear previous content
local items_shown=$((end_idx - start_idx + 1))
[[ $items_shown -lt 0 ]] && items_shown=0
for ((i = items_shown; i < items_per_page; i++)); do
printf "${clear_line}\n" >&2
done
printf "${clear_line}\n" >&2
# Build sort and filter status
local sort_label=""
case "$sort_mode" in
date) sort_label="Date" ;;
name) sort_label="Name" ;;
size) sort_label="Size" ;;
esac
local sort_status="${sort_label}"
local filter_status=""
if [[ "$filter_mode" == "true" ]]; then
filter_status="${filter_query:-_}"
elif [[ -n "$applied_query" ]]; then
filter_status="${applied_query}"
else
filter_status="—"
fi
# Footer: single line with controls
local sep=" ${GRAY}|${NC} "
# Helper to calculate display length without ANSI codes
_calc_len() {
local text="$1"
local stripped
stripped=$(printf "%s" "$text" | LC_ALL=C awk '{gsub(/\033\[[0-9;]*[A-Za-z]/,""); print}')
printf "%d" "${#stripped}"
}
# Common menu items
local nav="${GRAY}${ICON_NAV_UP}${ICON_NAV_DOWN}${NC}"
local space_select="${GRAY}Space Select${NC}"
local space="${GRAY}Space${NC}"
local enter="${GRAY}Enter${NC}"
local exit="${GRAY}Q Exit${NC}"
if [[ "$filter_mode" == "true" ]]; then
# Filter mode: simple controls without sort
local -a _segs_filter=(
"${GRAY}Search: ${filter_status}${NC}"
"${GRAY}Delete${NC}"
"${GRAY}Enter Confirm${NC}"
"${GRAY}ESC Cancel${NC}"
)
_print_wrapped_controls "$sep" "${_segs_filter[@]}"
else
# Normal mode - prepare dynamic items
local reverse_arrow="↑"
[[ "$sort_reverse" == "true" ]] && reverse_arrow="↓"
local filter_text="/ Search"
[[ -n "$applied_query" ]] && filter_text="/ Clear"
local refresh="${GRAY}R Refresh${NC}"
local search="${GRAY}${filter_text}${NC}"
local sort_ctrl="${GRAY}S ${sort_status}${NC}"
local order_ctrl="${GRAY}O ${reverse_arrow}${NC}"
if [[ "$has_metadata" == "true" ]]; then
if [[ -n "$applied_query" ]]; then
# Filtering active: hide sort controls
local -a _segs_all=("$nav" "$space" "$enter" "$refresh" "$search" "$exit")
_print_wrapped_controls "$sep" "${_segs_all[@]}"
else
# Normal: show full controls with dynamic reduction
local term_width="${COLUMNS:-}"
[[ -z "$term_width" ]] && term_width=$(tput cols 2> /dev/null || echo 80)
[[ "$term_width" =~ ^[0-9]+$ ]] || term_width=80
# Level 0: Full controls
local -a _segs=("$nav" "$space_select" "$enter" "$refresh" "$search" "$sort_ctrl" "$order_ctrl" "$exit")
# Calculate width
local total_len=0 seg_count=${#_segs[@]}
for i in "${!_segs[@]}"; do
total_len=$((total_len + $(_calc_len "${_segs[i]}")))
[[ $i -lt $((seg_count - 1)) ]] && total_len=$((total_len + 3))
done
# Level 1: Remove "Space Select"
if [[ $total_len -gt $term_width ]]; then
_segs=("$nav" "$enter" "$refresh" "$search" "$sort_ctrl" "$order_ctrl" "$exit")
total_len=0
seg_count=${#_segs[@]}
for i in "${!_segs[@]}"; do
total_len=$((total_len + $(_calc_len "${_segs[i]}")))
[[ $i -lt $((seg_count - 1)) ]] && total_len=$((total_len + 3))
done
# Level 2: Remove "S ${sort_status}"
if [[ $total_len -gt $term_width ]]; then
_segs=("$nav" "$enter" "$refresh" "$search" "$order_ctrl" "$exit")
fi
fi
_print_wrapped_controls "$sep" "${_segs[@]}"
fi
else
# Without metadata: basic controls
local -a _segs_simple=("$nav" "$space_select" "$enter" "$refresh" "$search" "$exit")
_print_wrapped_controls "$sep" "${_segs_simple[@]}"
fi
fi
printf "${clear_line}" >&2
}
# Track previous cursor position for incremental rendering
local prev_cursor_pos=$cursor_pos
local prev_top_index=$top_index
local need_full_redraw=true
# Main interaction loop
while true; do
if [[ "$need_full_redraw" == "true" ]]; then
draw_menu
need_full_redraw=false
# Update tracking variables after full redraw
prev_cursor_pos=$cursor_pos
prev_top_index=$top_index
fi
local key
key=$(read_key)
case "$key" in
"QUIT")
if [[ "$filter_mode" == "true" ]]; then
filter_mode="false"
unset MOLE_READ_KEY_FORCE_CHAR
filter_query=""
applied_query=""
top_index=0
cursor_pos=0
rebuild_view
need_full_redraw=true
continue
fi
cleanup
return 1
;;
"UP")
if [[ ${#view_indices[@]} -eq 0 ]]; then
:
elif [[ $cursor_pos -gt 0 ]]; then
# Simple cursor move - only redraw affected rows
local old_cursor=$cursor_pos
((cursor_pos--))
local new_cursor=$cursor_pos
# Calculate terminal row positions (+3: row 1=header, row 2=blank, row 3=first item)
local old_row=$((old_cursor + 3))
local new_row=$((new_cursor + 3))
# Quick redraw: update only the two affected rows
printf "\033[%d;1H" "$old_row" >&2
render_item "$old_cursor" false
printf "\033[%d;1H" "$new_row" >&2
render_item "$new_cursor" true
# CRITICAL: Move cursor to footer to avoid visual artifacts
printf "\033[%d;1H" "$((items_per_page + 4))" >&2
prev_cursor_pos=$cursor_pos
continue # Skip full redraw
elif [[ $top_index -gt 0 ]]; then
((top_index--))
prev_cursor_pos=$cursor_pos
prev_top_index=$top_index
need_full_redraw=true # Scrolling requires full redraw
fi
;;
"DOWN")
if [[ ${#view_indices[@]} -eq 0 ]]; then
:
else
local absolute_index=$((top_index + cursor_pos))
local last_index=$((${#view_indices[@]} - 1))
if [[ $absolute_index -lt $last_index ]]; then
local visible_count=$((${#view_indices[@]} - top_index))
[[ $visible_count -gt $items_per_page ]] && visible_count=$items_per_page
if [[ $cursor_pos -lt $((visible_count - 1)) ]]; then
# Simple cursor move - only redraw affected rows
local old_cursor=$cursor_pos
((cursor_pos++))
local new_cursor=$cursor_pos
# Calculate terminal row positions (+3: row 1=header, row 2=blank, row 3=first item)
local old_row=$((old_cursor + 3))
local new_row=$((new_cursor + 3))
# Quick redraw: update only the two affected rows
printf "\033[%d;1H" "$old_row" >&2
render_item "$old_cursor" false
printf "\033[%d;1H" "$new_row" >&2
render_item "$new_cursor" true
# CRITICAL: Move cursor to footer to avoid visual artifacts
printf "\033[%d;1H" "$((items_per_page + 4))" >&2
prev_cursor_pos=$cursor_pos
continue # Skip full redraw
elif [[ $((top_index + visible_count)) -lt ${#view_indices[@]} ]]; then
((top_index++))
visible_count=$((${#view_indices[@]} - top_index))
[[ $visible_count -gt $items_per_page ]] && visible_count=$items_per_page
if [[ $cursor_pos -ge $visible_count ]]; then
cursor_pos=$((visible_count - 1))
fi
prev_cursor_pos=$cursor_pos
prev_top_index=$top_index
need_full_redraw=true # Scrolling requires full redraw
fi
fi
fi
;;
"SPACE")
local idx=$((top_index + cursor_pos))
if [[ $idx -lt ${#view_indices[@]} ]]; then
local real="${view_indices[idx]}"
if [[ ${selected[real]} == true ]]; then
selected[real]=false
((selected_count--))
else
selected[real]=true
((selected_count++))
fi
# Incremental update: only redraw header (for count) and current row
# Header is at row 1
printf "\033[1;1H\033[2K${PURPLE_BOLD}%s${NC} ${GRAY}%d/%d selected${NC}\n" "${title}" "$selected_count" "$total_items" >&2
# Redraw current item row (+3: row 1=header, row 2=blank, row 3=first item)
local item_row=$((cursor_pos + 3))
printf "\033[%d;1H" "$item_row" >&2
render_item "$cursor_pos" true
# Move cursor to footer to avoid visual artifacts (items + header + 2 blanks)
printf "\033[%d;1H" "$((items_per_page + 4))" >&2
continue # Skip full redraw
fi
;;
"RETRY")
# 'R' toggles reverse order (only if metadata available)
if [[ "$has_metadata" == "true" ]]; then
if [[ "$sort_reverse" == "true" ]]; then
sort_reverse="false"
else
sort_reverse="true"
fi
rebuild_view
need_full_redraw=true
fi
;;
"CHAR:s" | "CHAR:S")
if [[ "$filter_mode" == "true" ]]; then
local ch="${key#CHAR:}"
filter_query+="$ch"
need_full_redraw=true
elif [[ "$has_metadata" == "true" ]]; then
# Cycle sort mode (only if metadata available)
case "$sort_mode" in
date) sort_mode="name" ;;
name) sort_mode="size" ;;
size) sort_mode="date" ;;
esac
rebuild_view
need_full_redraw=true
fi
;;
"FILTER")
# / key: toggle between filter and return
if [[ -n "$applied_query" ]]; then
# Already filtering, clear and return to full list
applied_query=""
filter_query=""
top_index=0
cursor_pos=0
rebuild_view
need_full_redraw=true
else
# Enter filter mode
filter_mode="true"
export MOLE_READ_KEY_FORCE_CHAR=1
filter_query=""
top_index=0
cursor_pos=0
rebuild_view
need_full_redraw=true
fi
;;
"CHAR:j")
if [[ "$filter_mode" != "true" ]]; then
# Down navigation
if [[ ${#view_indices[@]} -gt 0 ]]; then
local absolute_index=$((top_index + cursor_pos))
local last_index=$((${#view_indices[@]} - 1))
if [[ $absolute_index -lt $last_index ]]; then
local visible_count=$((${#view_indices[@]} - top_index))
[[ $visible_count -gt $items_per_page ]] && visible_count=$items_per_page
if [[ $cursor_pos -lt $((visible_count - 1)) ]]; then
((cursor_pos++))
elif [[ $((top_index + visible_count)) -lt ${#view_indices[@]} ]]; then
((top_index++))
fi
fi
fi
else
filter_query+="j"
fi
;;
"CHAR:k")
if [[ "$filter_mode" != "true" ]]; then
# Up navigation
if [[ ${#view_indices[@]} -gt 0 ]]; then
if [[ $cursor_pos -gt 0 ]]; then
((cursor_pos--))
elif [[ $top_index -gt 0 ]]; then
((top_index--))
fi
fi
else
filter_query+="k"
fi
;;
"CHAR:f" | "CHAR:F")
if [[ "$filter_mode" == "true" ]]; then
filter_query+="${key#CHAR:}"
fi
# F is currently unbound in normal mode to avoid conflict with Refresh (R)
;;
"CHAR:r" | "CHAR:R")
if [[ "$filter_mode" == "true" ]]; then
filter_query+="${key#CHAR:}"
else
# Trigger Refresh signal (Unified with Analyze)
cleanup
return 10
fi
;;
"CHAR:o" | "CHAR:O")
if [[ "$filter_mode" == "true" ]]; then
filter_query+="${key#CHAR:}"
elif [[ "$has_metadata" == "true" ]]; then
# O toggles reverse order (Unified Sort Order)
if [[ "$sort_reverse" == "true" ]]; then
sort_reverse="false"
else
sort_reverse="true"
fi
rebuild_view
need_full_redraw=true
fi
;;
"DELETE")
# Backspace filter
if [[ "$filter_mode" == "true" && -n "$filter_query" ]]; then
filter_query="${filter_query%?}"
need_full_redraw=true
fi
;;
CHAR:*)
if [[ "$filter_mode" == "true" ]]; then
local ch="${key#CHAR:}"
# avoid accidental leading spaces
if [[ -n "$filter_query" || "$ch" != " " ]]; then
filter_query+="$ch"
need_full_redraw=true
fi
fi
;;
"ENTER")
if [[ "$filter_mode" == "true" ]]; then
applied_query="$filter_query"
filter_mode="false"
unset MOLE_READ_KEY_FORCE_CHAR
top_index=0
cursor_pos=0
searching="true"
draw_menu # paint "searching..."
drain_pending_input # drop any extra keypresses (e.g., double-Enter)
rebuild_view
searching="false"
draw_menu
continue
fi
# In normal mode: smart Enter behavior
# 1. Check if any items are already selected
local has_selection=false
for ((i = 0; i < total_items; i++)); do
if [[ ${selected[i]} == true ]]; then
has_selection=true
break
fi
done
# 2. If nothing selected, auto-select current item
if [[ $has_selection == false ]]; then
local idx=$((top_index + cursor_pos))
if [[ $idx -lt ${#view_indices[@]} ]]; then
local real="${view_indices[idx]}"
selected[real]=true
((selected_count++))
fi
fi
# 3. Confirm and exit with current selections
local -a selected_indices=()
for ((i = 0; i < total_items; i++)); do
if [[ ${selected[i]} == true ]]; then
selected_indices+=("$i")
fi
done
local final_result=""
if [[ ${#selected_indices[@]} -gt 0 ]]; then
local IFS=','
final_result="${selected_indices[*]}"
fi
trap - EXIT INT TERM
MOLE_SELECTION_RESULT="$final_result"
export MOLE_MENU_SORT_MODE="$sort_mode"
export MOLE_MENU_SORT_REVERSE="$sort_reverse"
restore_terminal
return 0
;;
esac
# Drain any accumulated input after processing (e.g., mouse wheel events)
# This prevents buffered events from causing jumps, without blocking keyboard input
drain_pending_input
done
}
# Export function for external use
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
echo "This is a library file. Source it from other scripts." >&2
exit 1
fi

View File

@@ -1,318 +0,0 @@
#!/bin/bash
# Paginated menu with arrow key navigation
set -euo pipefail
# Terminal control functions
enter_alt_screen() { tput smcup 2> /dev/null || true; }
leave_alt_screen() { tput rmcup 2> /dev/null || true; }
# Get terminal height with fallback
_ms_get_terminal_height() {
local height=0
# Try stty size first (most reliable, real-time)
# Use </dev/tty to ensure we read from terminal even if stdin is redirected
if [[ -t 0 ]] || [[ -t 2 ]]; then
height=$(stty size < /dev/tty 2> /dev/null | awk '{print $1}')
fi
# Fallback to tput
if [[ -z "$height" || $height -le 0 ]]; then
if command -v tput > /dev/null 2>&1; then
height=$(tput lines 2> /dev/null || echo "24")
else
height=24
fi
fi
echo "$height"
}
# Calculate dynamic items per page based on terminal height
_ms_calculate_items_per_page() {
local term_height=$(_ms_get_terminal_height)
# Layout: header(1) + spacing(1) + items + spacing(1) + footer(1) + clear(1) = 5 fixed lines
local reserved=6 # Increased to prevent header from being overwritten
local available=$((term_height - reserved))
# Ensure minimum and maximum bounds
if [[ $available -lt 1 ]]; then
echo 1
elif [[ $available -gt 50 ]]; then
echo 50
else
echo "$available"
fi
}
# Main paginated multi-select menu function
paginated_multi_select() {
local title="$1"
shift
local -a items=("$@")
local external_alt_screen=false
if [[ "${MOLE_MANAGED_ALT_SCREEN:-}" == "1" || "${MOLE_MANAGED_ALT_SCREEN:-}" == "true" ]]; then
external_alt_screen=true
fi
# Validation
if [[ ${#items[@]} -eq 0 ]]; then
echo "No items provided" >&2
return 1
fi
local total_items=${#items[@]}
local items_per_page=$(_ms_calculate_items_per_page)
local cursor_pos=0
local top_index=0
local -a selected=()
# Initialize selection array
for ((i = 0; i < total_items; i++)); do
selected[i]=false
done
if [[ -n "${MOLE_PRESELECTED_INDICES:-}" ]]; then
local cleaned_preselect="${MOLE_PRESELECTED_INDICES//[[:space:]]/}"
local -a initial_indices=()
IFS=',' read -ra initial_indices <<< "$cleaned_preselect"
for idx in "${initial_indices[@]}"; do
if [[ "$idx" =~ ^[0-9]+$ && $idx -ge 0 && $idx -lt $total_items ]]; then
selected[idx]=true
fi
done
fi
# Preserve original TTY settings so we can restore them reliably
local original_stty=""
if [[ -t 0 ]] && command -v stty > /dev/null 2>&1; then
original_stty=$(stty -g 2> /dev/null || echo "")
fi
restore_terminal() {
show_cursor
if [[ -n "${original_stty-}" ]]; then
stty "${original_stty}" 2> /dev/null || stty sane 2> /dev/null || stty echo icanon 2> /dev/null || true
else
stty sane 2> /dev/null || stty echo icanon 2> /dev/null || true
fi
if [[ "${external_alt_screen:-false}" == false ]]; then
leave_alt_screen
fi
}
# Cleanup function
cleanup() {
trap - EXIT INT TERM
restore_terminal
}
# Interrupt handler
# shellcheck disable=SC2329
handle_interrupt() {
cleanup
exit 130 # Standard exit code for Ctrl+C
}
trap cleanup EXIT
trap handle_interrupt INT TERM
# Setup terminal - preserve interrupt character
stty -echo -icanon intr ^C 2> /dev/null || true
if [[ $external_alt_screen == false ]]; then
enter_alt_screen
# Clear screen once on entry to alt screen
printf "\033[2J\033[H" >&2
else
printf "\033[H" >&2
fi
hide_cursor
# Helper functions
# shellcheck disable=SC2329
print_line() { printf "\r\033[2K%s\n" "$1" >&2; }
render_item() {
local idx=$1 is_current=$2
local checkbox="$ICON_EMPTY"
[[ ${selected[idx]} == true ]] && checkbox="$ICON_SOLID"
if [[ $is_current == true ]]; then
printf "\r\033[2K${CYAN}${ICON_ARROW} %s %s${NC}\n" "$checkbox" "${items[idx]}" >&2
else
printf "\r\033[2K %s %s\n" "$checkbox" "${items[idx]}" >&2
fi
}
# Draw the complete menu
draw_menu() {
# Recalculate items_per_page dynamically to handle window resize
items_per_page=$(_ms_calculate_items_per_page)
# Move to home position without clearing (reduces flicker)
printf "\033[H" >&2
# Clear each line as we go instead of clearing entire screen
local clear_line="\r\033[2K"
# Count selections for header display
local selected_count=0
for ((i = 0; i < total_items; i++)); do
[[ ${selected[i]} == true ]] && ((selected_count++))
done
# Header
printf "${clear_line}${PURPLE_BOLD}%s${NC} ${GRAY}%d/%d selected${NC}\n" "${title}" "$selected_count" "$total_items" >&2
if [[ $total_items -eq 0 ]]; then
printf "${clear_line}${GRAY}No items available${NC}\n" >&2
printf "${clear_line}\n" >&2
printf "${clear_line}${GRAY}Q${NC} Quit\n" >&2
printf "${clear_line}" >&2
return
fi
if [[ $top_index -gt $((total_items - 1)) ]]; then
if [[ $total_items -gt $items_per_page ]]; then
top_index=$((total_items - items_per_page))
else
top_index=0
fi
fi
local visible_count=$((total_items - top_index))
[[ $visible_count -gt $items_per_page ]] && visible_count=$items_per_page
[[ $visible_count -le 0 ]] && visible_count=1
if [[ $cursor_pos -ge $visible_count ]]; then
cursor_pos=$((visible_count - 1))
[[ $cursor_pos -lt 0 ]] && cursor_pos=0
fi
printf "${clear_line}\n" >&2
# Items for current window
local start_idx=$top_index
local end_idx=$((top_index + items_per_page - 1))
[[ $end_idx -ge $total_items ]] && end_idx=$((total_items - 1))
for ((i = start_idx; i <= end_idx; i++)); do
[[ $i -lt 0 ]] && continue
local is_current=false
[[ $((i - start_idx)) -eq $cursor_pos ]] && is_current=true
render_item $i $is_current
done
# Fill empty slots to clear previous content
local items_shown=$((end_idx - start_idx + 1))
[[ $items_shown -lt 0 ]] && items_shown=0
for ((i = items_shown; i < items_per_page; i++)); do
printf "${clear_line}\n" >&2
done
# Clear any remaining lines at bottom
printf "${clear_line}\n" >&2
printf "${clear_line}${GRAY}${ICON_NAV_UP}${ICON_NAV_DOWN} | Space | Enter | Q Exit${NC}\n" >&2
# Clear one more line to ensure no artifacts
printf "${clear_line}" >&2
}
# Main interaction loop
while true; do
draw_menu
local key=$(read_key)
case "$key" in
"QUIT")
cleanup
return 1
;;
"UP")
if [[ $total_items -eq 0 ]]; then
:
elif [[ $cursor_pos -gt 0 ]]; then
((cursor_pos--))
elif [[ $top_index -gt 0 ]]; then
((top_index--))
fi
;;
"DOWN")
if [[ $total_items -eq 0 ]]; then
:
else
local absolute_index=$((top_index + cursor_pos))
if [[ $absolute_index -lt $((total_items - 1)) ]]; then
local visible_count=$((total_items - top_index))
[[ $visible_count -gt $items_per_page ]] && visible_count=$items_per_page
if [[ $cursor_pos -lt $((visible_count - 1)) ]]; then
((cursor_pos++))
elif [[ $((top_index + visible_count)) -lt $total_items ]]; then
((top_index++))
visible_count=$((total_items - top_index))
[[ $visible_count -gt $items_per_page ]] && visible_count=$items_per_page
if [[ $cursor_pos -ge $visible_count ]]; then
cursor_pos=$((visible_count - 1))
fi
fi
fi
fi
;;
"SPACE")
local idx=$((top_index + cursor_pos))
if [[ $idx -lt $total_items ]]; then
if [[ ${selected[idx]} == true ]]; then
selected[idx]=false
else
selected[idx]=true
fi
fi
;;
"ALL")
for ((i = 0; i < total_items; i++)); do
selected[i]=true
done
;;
"NONE")
for ((i = 0; i < total_items; i++)); do
selected[i]=false
done
;;
"ENTER")
# Store result in global variable instead of returning via stdout
local -a selected_indices=()
for ((i = 0; i < total_items; i++)); do
if [[ ${selected[i]} == true ]]; then
selected_indices+=("$i")
fi
done
# Allow empty selection - don't auto-select cursor position
# This fixes the bug where unselecting all items would still select the last cursor position
local final_result=""
if [[ ${#selected_indices[@]} -gt 0 ]]; then
local IFS=','
final_result="${selected_indices[*]}"
fi
# Remove the trap to avoid cleanup on normal exit
trap - EXIT INT TERM
# Store result in global variable
MOLE_SELECTION_RESULT="$final_result"
# Manually cleanup terminal before returning
restore_terminal
return 0
;;
esac
done
}
# Export function for external use
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
echo "This is a library file. Source it from other scripts." >&2
exit 1
fi

View File

@@ -1,492 +0,0 @@
#!/bin/bash
set -euo pipefail
# Ensure common.sh is loaded.
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
[[ -z "${MOLE_COMMON_LOADED:-}" ]] && source "$SCRIPT_DIR/lib/core/common.sh"
# Batch uninstall with a single confirmation.
# User data detection patterns (prompt user to backup if found).
readonly SENSITIVE_DATA_PATTERNS=(
"\.warp" # Warp terminal configs/themes
"/\.config/" # Standard Unix config directory
"/themes/" # Theme customizations
"/settings/" # Settings directories
"/Application Support/[^/]+/User Data" # Chrome/Electron user data
"/Preferences/[^/]+\.plist" # User preference files
"/Documents/" # User documents
"/\.ssh/" # SSH keys and configs (critical)
"/\.gnupg/" # GPG keys (critical)
)
# Join patterns into a single regex for grep.
SENSITIVE_DATA_REGEX=$(
IFS='|'
echo "${SENSITIVE_DATA_PATTERNS[*]}"
)
# Decode and validate base64 file list (safe for set -e).
decode_file_list() {
local encoded="$1"
local app_name="$2"
local decoded
# macOS uses -D, GNU uses -d. Always return 0 for set -e safety.
if ! decoded=$(printf '%s' "$encoded" | base64 -D 2> /dev/null); then
if ! decoded=$(printf '%s' "$encoded" | base64 -d 2> /dev/null); then
log_error "Failed to decode file list for $app_name" >&2
echo ""
return 0 # Return success with empty string
fi
fi
if [[ "$decoded" =~ $'\0' ]]; then
log_warning "File list for $app_name contains null bytes, rejecting" >&2
echo ""
return 0 # Return success with empty string
fi
while IFS= read -r line; do
if [[ -n "$line" && ! "$line" =~ ^/ ]]; then
log_warning "Invalid path in file list for $app_name: $line" >&2
echo ""
return 0 # Return success with empty string
fi
done <<< "$decoded"
echo "$decoded"
return 0
}
# Note: find_app_files() and calculate_total_size() are in lib/core/common.sh.
# Stop Launch Agents/Daemons for an app.
stop_launch_services() {
local bundle_id="$1"
local has_system_files="${2:-false}"
[[ -z "$bundle_id" || "$bundle_id" == "unknown" ]] && return 0
if [[ -d ~/Library/LaunchAgents ]]; then
while IFS= read -r -d '' plist; do
launchctl unload "$plist" 2> /dev/null || true
done < <(find ~/Library/LaunchAgents -maxdepth 1 -name "${bundle_id}*.plist" -print0 2> /dev/null)
fi
if [[ "$has_system_files" == "true" ]]; then
if [[ -d /Library/LaunchAgents ]]; then
while IFS= read -r -d '' plist; do
sudo launchctl unload "$plist" 2> /dev/null || true
done < <(find /Library/LaunchAgents -maxdepth 1 -name "${bundle_id}*.plist" -print0 2> /dev/null)
fi
if [[ -d /Library/LaunchDaemons ]]; then
while IFS= read -r -d '' plist; do
sudo launchctl unload "$plist" 2> /dev/null || true
done < <(find /Library/LaunchDaemons -maxdepth 1 -name "${bundle_id}*.plist" -print0 2> /dev/null)
fi
fi
}
# Remove files (handles symlinks, optional sudo).
remove_file_list() {
local file_list="$1"
local use_sudo="${2:-false}"
local count=0
while IFS= read -r file; do
[[ -n "$file" && -e "$file" ]] || continue
if [[ -L "$file" ]]; then
if [[ "$use_sudo" == "true" ]]; then
sudo rm "$file" 2> /dev/null && ((count++)) || true
else
rm "$file" 2> /dev/null && ((count++)) || true
fi
else
if [[ "$use_sudo" == "true" ]]; then
safe_sudo_remove "$file" && ((count++)) || true
else
safe_remove "$file" true && ((count++)) || true
fi
fi
done <<< "$file_list"
echo "$count"
}
# Batch uninstall with single confirmation.
batch_uninstall_applications() {
local total_size_freed=0
# shellcheck disable=SC2154
if [[ ${#selected_apps[@]} -eq 0 ]]; then
log_warning "No applications selected for uninstallation"
return 0
fi
# Pre-scan: running apps, sudo needs, size.
local -a running_apps=()
local -a sudo_apps=()
local total_estimated_size=0
local -a app_details=()
if [[ -t 1 ]]; then start_inline_spinner "Scanning files..."; fi
for selected_app in "${selected_apps[@]}"; do
[[ -z "$selected_app" ]] && continue
IFS='|' read -r _ app_path app_name bundle_id _ _ <<< "$selected_app"
# Check running app by bundle executable if available.
local exec_name=""
if [[ -e "$app_path/Contents/Info.plist" ]]; then
exec_name=$(defaults read "$app_path/Contents/Info.plist" CFBundleExecutable 2> /dev/null || echo "")
fi
local check_pattern="${exec_name:-$app_name}"
if pgrep -x "$check_pattern" > /dev/null 2>&1; then
running_apps+=("$app_name")
fi
# Sudo needed if bundle owner/dir is not writable or system files exist.
local needs_sudo=false
local app_owner=$(get_file_owner "$app_path")
local current_user=$(whoami)
if [[ ! -w "$(dirname "$app_path")" ]] ||
[[ "$app_owner" == "root" ]] ||
[[ -n "$app_owner" && "$app_owner" != "$current_user" ]]; then
needs_sudo=true
fi
# Size estimate includes related and system files.
local app_size_kb=$(get_path_size_kb "$app_path")
local related_files=$(find_app_files "$bundle_id" "$app_name")
local related_size_kb=$(calculate_total_size "$related_files")
# system_files is a newline-separated string, not an array.
# shellcheck disable=SC2178,SC2128
local system_files=$(find_app_system_files "$bundle_id" "$app_name")
# shellcheck disable=SC2128
local system_size_kb=$(calculate_total_size "$system_files")
local total_kb=$((app_size_kb + related_size_kb + system_size_kb))
((total_estimated_size += total_kb))
# shellcheck disable=SC2128
if [[ -n "$system_files" ]]; then
needs_sudo=true
fi
if [[ "$needs_sudo" == "true" ]]; then
sudo_apps+=("$app_name")
fi
# Check for sensitive user data once.
local has_sensitive_data="false"
if [[ -n "$related_files" ]] && echo "$related_files" | grep -qE "$SENSITIVE_DATA_REGEX"; then
has_sensitive_data="true"
fi
# Store details for later use (base64 keeps lists on one line).
local encoded_files
encoded_files=$(printf '%s' "$related_files" | base64 | tr -d '\n')
local encoded_system_files
encoded_system_files=$(printf '%s' "$system_files" | base64 | tr -d '\n')
app_details+=("$app_name|$app_path|$bundle_id|$total_kb|$encoded_files|$encoded_system_files|$has_sensitive_data|$needs_sudo")
done
if [[ -t 1 ]]; then stop_inline_spinner; fi
local size_display=$(bytes_to_human "$((total_estimated_size * 1024))")
echo ""
echo -e "${PURPLE_BOLD}Files to be removed:${NC}"
echo ""
# Warn if user data is detected.
local has_user_data=false
for detail in "${app_details[@]}"; do
IFS='|' read -r _ _ _ _ _ _ has_sensitive_data <<< "$detail"
if [[ "$has_sensitive_data" == "true" ]]; then
has_user_data=true
break
fi
done
if [[ "$has_user_data" == "true" ]]; then
echo -e "${YELLOW}${ICON_WARNING}${NC} ${YELLOW}Note: Some apps contain user configurations/themes${NC}"
echo ""
fi
for detail in "${app_details[@]}"; do
IFS='|' read -r app_name app_path bundle_id total_kb encoded_files encoded_system_files has_sensitive_data needs_sudo_flag <<< "$detail"
local related_files=$(decode_file_list "$encoded_files" "$app_name")
local system_files=$(decode_file_list "$encoded_system_files" "$app_name")
local app_size_display=$(bytes_to_human "$((total_kb * 1024))")
echo -e "${BLUE}${ICON_CONFIRM}${NC} ${app_name} ${GRAY}(${app_size_display})${NC}"
echo -e " ${GREEN}${ICON_SUCCESS}${NC} ${app_path/$HOME/~}"
# Show related files (limit to 5).
local file_count=0
local max_files=5
while IFS= read -r file; do
if [[ -n "$file" && -e "$file" ]]; then
if [[ $file_count -lt $max_files ]]; then
echo -e " ${GREEN}${ICON_SUCCESS}${NC} ${file/$HOME/~}"
fi
((file_count++))
fi
done <<< "$related_files"
# Show system files (limit to 5).
local sys_file_count=0
while IFS= read -r file; do
if [[ -n "$file" && -e "$file" ]]; then
if [[ $sys_file_count -lt $max_files ]]; then
echo -e " ${BLUE}${ICON_SOLID}${NC} System: $file"
fi
((sys_file_count++))
fi
done <<< "$system_files"
local total_hidden=$((file_count > max_files ? file_count - max_files : 0))
((total_hidden += sys_file_count > max_files ? sys_file_count - max_files : 0))
if [[ $total_hidden -gt 0 ]]; then
echo -e " ${GRAY} ... and ${total_hidden} more files${NC}"
fi
done
# Confirmation before requesting sudo.
local app_total=${#selected_apps[@]}
local app_text="app"
[[ $app_total -gt 1 ]] && app_text="apps"
echo ""
local removal_note="Remove ${app_total} ${app_text}"
[[ -n "$size_display" ]] && removal_note+=" (${size_display})"
if [[ ${#running_apps[@]} -gt 0 ]]; then
removal_note+=" ${YELLOW}[Running]${NC}"
fi
echo -ne "${PURPLE}${ICON_ARROW}${NC} ${removal_note} ${GREEN}Enter${NC} confirm, ${GRAY}ESC${NC} cancel: "
drain_pending_input # Clean up any pending input before confirmation
IFS= read -r -s -n1 key || key=""
drain_pending_input # Clean up any escape sequence remnants
case "$key" in
$'\e' | q | Q)
echo ""
echo ""
return 0
;;
"" | $'\n' | $'\r' | y | Y)
printf "\r\033[K" # Clear the prompt line
;;
*)
echo ""
echo ""
return 0
;;
esac
# Request sudo if needed.
if [[ ${#sudo_apps[@]} -gt 0 ]]; then
if ! sudo -n true 2> /dev/null; then
if ! request_sudo_access "Admin required for system apps: ${sudo_apps[*]}"; then
echo ""
log_error "Admin access denied"
return 1
fi
fi
# Keep sudo alive during uninstall.
parent_pid=$$
(while true; do
if ! kill -0 "$parent_pid" 2> /dev/null; then
exit 0
fi
sudo -n true
sleep 60
done 2> /dev/null) &
sudo_keepalive_pid=$!
fi
if [[ -t 1 ]]; then start_inline_spinner "Uninstalling apps..."; fi
# Perform uninstallations (silent mode, show results at end).
if [[ -t 1 ]]; then stop_inline_spinner; fi
local success_count=0 failed_count=0
local -a failed_items=()
local -a success_items=()
for detail in "${app_details[@]}"; do
IFS='|' read -r app_name app_path bundle_id total_kb encoded_files encoded_system_files has_sensitive_data needs_sudo <<< "$detail"
local related_files=$(decode_file_list "$encoded_files" "$app_name")
local system_files=$(decode_file_list "$encoded_system_files" "$app_name")
local reason=""
# Stop Launch Agents/Daemons before removal.
local has_system_files="false"
[[ -n "$system_files" ]] && has_system_files="true"
stop_launch_services "$bundle_id" "$has_system_files"
if ! force_kill_app "$app_name" "$app_path"; then
reason="still running"
fi
# Remove the application only if not running.
if [[ -z "$reason" ]]; then
if [[ "$needs_sudo" == true ]]; then
if ! safe_sudo_remove "$app_path"; then
local app_owner=$(get_file_owner "$app_path")
local current_user=$(whoami)
if [[ -n "$app_owner" && "$app_owner" != "$current_user" && "$app_owner" != "root" ]]; then
reason="owned by $app_owner"
else
reason="permission denied"
fi
fi
else
safe_remove "$app_path" true || reason="remove failed"
fi
fi
# Remove related files if app removal succeeded.
if [[ -z "$reason" ]]; then
remove_file_list "$related_files" "false" > /dev/null
remove_file_list "$system_files" "true" > /dev/null
# Clean up macOS defaults (preference domains).
if [[ -n "$bundle_id" && "$bundle_id" != "unknown" ]]; then
if defaults read "$bundle_id" &> /dev/null; then
defaults delete "$bundle_id" 2> /dev/null || true
fi
# ByHost preferences (machine-specific).
if [[ -d ~/Library/Preferences/ByHost ]]; then
find ~/Library/Preferences/ByHost -maxdepth 1 -name "${bundle_id}.*.plist" -delete 2> /dev/null || true
fi
fi
((total_size_freed += total_kb))
((success_count++))
((files_cleaned++))
((total_items++))
success_items+=("$app_name")
else
((failed_count++))
failed_items+=("$app_name:$reason")
fi
done
# Summary
local freed_display
freed_display=$(bytes_to_human "$((total_size_freed * 1024))")
local summary_status="success"
local -a summary_details=()
if [[ $success_count -gt 0 ]]; then
local success_list="${success_items[*]}"
local success_text="app"
[[ $success_count -gt 1 ]] && success_text="apps"
local success_line="Removed ${success_count} ${success_text}"
if [[ -n "$freed_display" ]]; then
success_line+=", freed ${GREEN}${freed_display}${NC}"
fi
# Format app list with max 3 per line.
if [[ -n "$success_list" ]]; then
local idx=0
local is_first_line=true
local current_line=""
for app_name in "${success_items[@]}"; do
local display_item="${GREEN}${app_name}${NC}"
if ((idx % 3 == 0)); then
if [[ -n "$current_line" ]]; then
summary_details+=("$current_line")
fi
if [[ "$is_first_line" == true ]]; then
current_line="${success_line}: $display_item"
is_first_line=false
else
current_line="$display_item"
fi
else
current_line="$current_line, $display_item"
fi
((idx++))
done
if [[ -n "$current_line" ]]; then
summary_details+=("$current_line")
fi
else
summary_details+=("$success_line")
fi
fi
if [[ $failed_count -gt 0 ]]; then
summary_status="warn"
local failed_names=()
for item in "${failed_items[@]}"; do
local name=${item%%:*}
failed_names+=("$name")
done
local failed_list="${failed_names[*]}"
local reason_summary="could not be removed"
if [[ $failed_count -eq 1 ]]; then
local first_reason=${failed_items[0]#*:}
case "$first_reason" in
still*running*) reason_summary="is still running" ;;
remove*failed*) reason_summary="could not be removed" ;;
permission*denied*) reason_summary="permission denied" ;;
owned*by*) reason_summary="$first_reason (try with sudo)" ;;
*) reason_summary="$first_reason" ;;
esac
fi
summary_details+=("Failed: ${RED}${failed_list}${NC} ${reason_summary}")
fi
if [[ $success_count -eq 0 && $failed_count -eq 0 ]]; then
summary_status="info"
summary_details+=("No applications were uninstalled.")
fi
local title="Uninstall complete"
if [[ "$summary_status" == "warn" ]]; then
title="Uninstall incomplete"
fi
print_summary_block "$title" "${summary_details[@]}"
printf '\n'
# Clean up Dock entries for uninstalled apps.
if [[ $success_count -gt 0 ]]; then
local -a removed_paths=()
for detail in "${app_details[@]}"; do
IFS='|' read -r app_name app_path _ _ _ _ <<< "$detail"
for success_name in "${success_items[@]}"; do
if [[ "$success_name" == "$app_name" ]]; then
removed_paths+=("$app_path")
break
fi
done
done
if [[ ${#removed_paths[@]} -gt 0 ]]; then
remove_apps_from_dock "${removed_paths[@]}" 2> /dev/null || true
fi
fi
# Clean up sudo keepalive if it was started.
if [[ -n "${sudo_keepalive_pid:-}" ]]; then
kill "$sudo_keepalive_pid" 2> /dev/null || true
wait "$sudo_keepalive_pid" 2> /dev/null || true
sudo_keepalive_pid=""
fi
# Invalidate cache if any apps were successfully uninstalled.
if [[ $success_count -gt 0 ]]; then
local cache_file="$HOME/.cache/mole/app_scan_cache"
rm -f "$cache_file" 2> /dev/null || true
fi
((total_size_cleaned += total_size_freed))
unset failed_items
}