mirror of
https://github.com/tw93/Mole.git
synced 2026-02-07 10:00:41 +00:00
Merge branch 'dev' into dev
This commit is contained in:
@@ -135,9 +135,11 @@ scan_installed_apps() {
|
||||
) &
|
||||
pids+=($!)
|
||||
debug_log "Waiting for ${#pids[@]} background processes: ${pids[*]}"
|
||||
for pid in "${pids[@]}"; do
|
||||
wait "$pid" 2> /dev/null || true
|
||||
done
|
||||
if [[ ${#pids[@]} -gt 0 ]]; then
|
||||
for pid in "${pids[@]}"; do
|
||||
wait "$pid" 2> /dev/null || true
|
||||
done
|
||||
fi
|
||||
debug_log "All background processes completed"
|
||||
cat "$scan_tmp_dir"/*.txt >> "$installed_bundles" 2> /dev/null || true
|
||||
safe_remove "$scan_tmp_dir" true
|
||||
@@ -279,29 +281,31 @@ clean_orphaned_app_data() {
|
||||
for pat in "${pattern_arr[@]}"; do
|
||||
file_patterns+=("$base_path/$pat")
|
||||
done
|
||||
for item_path in "${file_patterns[@]}"; do
|
||||
local iteration_count=0
|
||||
for match in $item_path; do
|
||||
[[ -e "$match" ]] || continue
|
||||
((iteration_count++))
|
||||
if [[ $iteration_count -gt $MOLE_MAX_ORPHAN_ITERATIONS ]]; then
|
||||
break
|
||||
fi
|
||||
local bundle_id=$(basename "$match")
|
||||
bundle_id="${bundle_id%.savedState}"
|
||||
bundle_id="${bundle_id%.binarycookies}"
|
||||
if is_bundle_orphaned "$bundle_id" "$match" "$installed_bundles"; then
|
||||
local size_kb
|
||||
size_kb=$(get_path_size_kb "$match")
|
||||
if [[ -z "$size_kb" || "$size_kb" == "0" ]]; then
|
||||
continue
|
||||
if [[ ${#file_patterns[@]} -gt 0 ]]; then
|
||||
for item_path in "${file_patterns[@]}"; do
|
||||
local iteration_count=0
|
||||
for match in $item_path; do
|
||||
[[ -e "$match" ]] || continue
|
||||
((iteration_count++))
|
||||
if [[ $iteration_count -gt $MOLE_MAX_ORPHAN_ITERATIONS ]]; then
|
||||
break
|
||||
fi
|
||||
safe_clean "$match" "Orphaned $label: $bundle_id"
|
||||
((orphaned_count++))
|
||||
((total_orphaned_kb += size_kb))
|
||||
fi
|
||||
local bundle_id=$(basename "$match")
|
||||
bundle_id="${bundle_id%.savedState}"
|
||||
bundle_id="${bundle_id%.binarycookies}"
|
||||
if is_bundle_orphaned "$bundle_id" "$match" "$installed_bundles"; then
|
||||
local size_kb
|
||||
size_kb=$(get_path_size_kb "$match")
|
||||
if [[ -z "$size_kb" || "$size_kb" == "0" ]]; then
|
||||
continue
|
||||
fi
|
||||
safe_clean "$match" "Orphaned $label: $bundle_id"
|
||||
((orphaned_count++))
|
||||
((total_orphaned_kb += size_kb))
|
||||
fi
|
||||
done
|
||||
done
|
||||
done
|
||||
fi
|
||||
done
|
||||
stop_section_spinner
|
||||
if [[ $orphaned_count -gt 0 ]]; then
|
||||
|
||||
@@ -75,6 +75,41 @@ clean_dev_rust() {
|
||||
safe_clean ~/.cargo/git/* "Cargo git cache"
|
||||
safe_clean ~/.rustup/downloads/* "Rust downloads cache"
|
||||
}
|
||||
|
||||
# Helper: Check for multiple versions in a directory.
|
||||
# Args: $1=directory, $2=tool_name, $3+=additional_lines
|
||||
check_multiple_versions() {
|
||||
local dir="$1"
|
||||
local tool_name="$2"
|
||||
shift 2
|
||||
local -a additional_lines=("$@")
|
||||
|
||||
if [[ ! -d "$dir" ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
local count
|
||||
count=$(find "$dir" -mindepth 1 -maxdepth 1 -type d 2> /dev/null | wc -l | tr -d ' ')
|
||||
|
||||
if [[ "$count" -gt 1 ]]; then
|
||||
note_activity
|
||||
echo -e " Found ${GREEN}${count}${NC} ${tool_name}"
|
||||
for line in "${additional_lines[@]}"; do
|
||||
echo -e " $line"
|
||||
done
|
||||
fi
|
||||
}
|
||||
|
||||
# Check for multiple Rust toolchains.
|
||||
check_rust_toolchains() {
|
||||
command -v rustup > /dev/null 2>&1 || return 0
|
||||
|
||||
check_multiple_versions \
|
||||
"$HOME/.rustup/toolchains" \
|
||||
"Rust toolchains" \
|
||||
"You can list them with: ${GRAY}rustup toolchain list${NC}" \
|
||||
"Remove unused with: ${GRAY}rustup toolchain uninstall <name>${NC}"
|
||||
}
|
||||
# Docker caches (guarded by daemon check).
|
||||
clean_dev_docker() {
|
||||
if command -v docker > /dev/null 2>&1; then
|
||||
@@ -130,19 +165,13 @@ clean_dev_frontend() {
|
||||
safe_clean ~/.cache/eslint/* "ESLint cache"
|
||||
safe_clean ~/.cache/prettier/* "Prettier cache"
|
||||
}
|
||||
# Mobile dev caches (can be large).
|
||||
# Check for multiple Android NDK versions.
|
||||
check_android_ndk() {
|
||||
local ndk_dir="$HOME/Library/Android/sdk/ndk"
|
||||
if [[ -d "$ndk_dir" ]]; then
|
||||
local count
|
||||
count=$(find "$ndk_dir" -mindepth 1 -maxdepth 1 -type d 2> /dev/null | wc -l | tr -d ' ')
|
||||
if [[ "$count" -gt 1 ]]; then
|
||||
note_activity
|
||||
echo -e " Found ${GREEN}${count}${NC} Android NDK versions"
|
||||
echo -e " You can delete unused versions manually: ${ndk_dir}"
|
||||
fi
|
||||
fi
|
||||
check_multiple_versions \
|
||||
"$HOME/Library/Android/sdk/ndk" \
|
||||
"Android NDK versions" \
|
||||
"Manage in: ${GRAY}Android Studio → SDK Manager${NC}" \
|
||||
"Or manually at: ${GRAY}\$HOME/Library/Android/sdk/ndk${NC}"
|
||||
}
|
||||
|
||||
clean_dev_mobile() {
|
||||
@@ -233,6 +262,23 @@ clean_dev_misc() {
|
||||
safe_clean ~/Library/Caches/SentryCrash/* "Sentry crash reports"
|
||||
safe_clean ~/Library/Caches/KSCrash/* "KSCrash reports"
|
||||
safe_clean ~/Library/Caches/com.crashlytics.data/* "Crashlytics data"
|
||||
safe_clean ~/Library/Application\ Support/Antigravity/Cache/* "Antigravity cache"
|
||||
safe_clean ~/Library/Application\ Support/Antigravity/Code\ Cache/* "Antigravity code cache"
|
||||
safe_clean ~/Library/Application\ Support/Antigravity/GPUCache/* "Antigravity GPU cache"
|
||||
safe_clean ~/Library/Application\ Support/Antigravity/DawnGraphiteCache/* "Antigravity Dawn cache"
|
||||
safe_clean ~/Library/Application\ Support/Antigravity/DawnWebGPUCache/* "Antigravity WebGPU cache"
|
||||
# Filo (Electron)
|
||||
safe_clean ~/Library/Application\ Support/Filo/production/Cache/* "Filo cache"
|
||||
safe_clean ~/Library/Application\ Support/Filo/production/Code\ Cache/* "Filo code cache"
|
||||
safe_clean ~/Library/Application\ Support/Filo/production/GPUCache/* "Filo GPU cache"
|
||||
safe_clean ~/Library/Application\ Support/Filo/production/DawnGraphiteCache/* "Filo Dawn cache"
|
||||
safe_clean ~/Library/Application\ Support/Filo/production/DawnWebGPUCache/* "Filo WebGPU cache"
|
||||
# Claude (Electron)
|
||||
safe_clean ~/Library/Application\ Support/Claude/Cache/* "Claude cache"
|
||||
safe_clean ~/Library/Application\ Support/Claude/Code\ Cache/* "Claude code cache"
|
||||
safe_clean ~/Library/Application\ Support/Claude/GPUCache/* "Claude GPU cache"
|
||||
safe_clean ~/Library/Application\ Support/Claude/DawnGraphiteCache/* "Claude Dawn cache"
|
||||
safe_clean ~/Library/Application\ Support/Claude/DawnWebGPUCache/* "Claude WebGPU cache"
|
||||
}
|
||||
# Shell and VCS leftovers.
|
||||
clean_dev_shell() {
|
||||
@@ -256,24 +302,28 @@ clean_sqlite_temp_files() {
|
||||
return 0
|
||||
}
|
||||
# Elixir/Erlang ecosystem.
|
||||
# Note: ~/.mix/archives contains installed Mix tools - excluded from cleanup
|
||||
clean_dev_elixir() {
|
||||
# safe_clean ~/.mix/archives/* "Mix cache"
|
||||
safe_clean ~/.hex/cache/* "Hex cache"
|
||||
}
|
||||
# Haskell ecosystem.
|
||||
# Note: ~/.stack/programs contains Stack-installed GHC compilers - excluded from cleanup
|
||||
clean_dev_haskell() {
|
||||
safe_clean ~/.cabal/packages/* "Cabal install cache"
|
||||
# safe_clean ~/.stack/programs/* "Stack cache"
|
||||
}
|
||||
# OCaml ecosystem.
|
||||
clean_dev_ocaml() {
|
||||
safe_clean ~/.opam/download-cache/* "Opam cache"
|
||||
}
|
||||
# Editor caches.
|
||||
# Note: ~/Library/Application Support/Code/User/workspaceStorage contains workspace settings - excluded from cleanup
|
||||
clean_dev_editors() {
|
||||
safe_clean ~/Library/Caches/com.microsoft.VSCode/Cache/* "VS Code cached data"
|
||||
safe_clean ~/Library/Application\ Support/Code/CachedData/* "VS Code cached data"
|
||||
# safe_clean ~/Library/Application\ Support/Code/User/workspaceStorage/* "VS Code workspace storage"
|
||||
safe_clean ~/Library/Application\ Support/Code/DawnGraphiteCache/* "VS Code Dawn cache"
|
||||
safe_clean ~/Library/Application\ Support/Code/DawnWebGPUCache/* "VS Code WebGPU cache"
|
||||
safe_clean ~/Library/Application\ Support/Code/GPUCache/* "VS Code GPU cache"
|
||||
safe_clean ~/Library/Application\ Support/Code/CachedExtensionVSIXs/* "VS Code extension cache"
|
||||
safe_clean ~/Library/Caches/Zed/* "Zed cache"
|
||||
}
|
||||
# Main developer tools cleanup sequence.
|
||||
@@ -284,6 +334,7 @@ clean_developer_tools() {
|
||||
clean_dev_python
|
||||
clean_dev_go
|
||||
clean_dev_rust
|
||||
check_rust_toolchains
|
||||
clean_dev_docker
|
||||
clean_dev_cloud
|
||||
clean_dev_nix
|
||||
|
||||
@@ -65,6 +65,14 @@ readonly PURGE_CONFIG_FILE="$HOME/.config/mole/purge_paths"
|
||||
PURGE_SEARCH_PATHS=()
|
||||
|
||||
# Project indicators for container detection.
|
||||
# Monorepo indicators (higher priority)
|
||||
readonly MONOREPO_INDICATORS=(
|
||||
"lerna.json"
|
||||
"pnpm-workspace.yaml"
|
||||
"nx.json"
|
||||
"rush.json"
|
||||
)
|
||||
|
||||
readonly PROJECT_INDICATORS=(
|
||||
"package.json"
|
||||
"Cargo.toml"
|
||||
@@ -348,7 +356,7 @@ scan_purge_targets() {
|
||||
# Escape regex special characters in target names for fd patterns
|
||||
local escaped_targets=()
|
||||
for target in "${PURGE_TARGETS[@]}"; do
|
||||
escaped_targets+=("$(printf '%s' "$target" | sed -e 's/[][(){}.^$*+?|\\]/\\&/g')")
|
||||
escaped_targets+=("^$(printf '%s' "$target" | sed -e 's/[][(){}.^$*+?|\\]/\\&/g')\$")
|
||||
done
|
||||
local pattern="($(
|
||||
IFS='|'
|
||||
@@ -762,6 +770,18 @@ clean_project_artifacts() {
|
||||
for pid in "${scan_pids[@]+"${scan_pids[@]}"}"; do
|
||||
wait "$pid" 2> /dev/null || true
|
||||
done
|
||||
|
||||
# Stop the scanning monitor (removes purge_scanning file to signal completion)
|
||||
local stats_dir="${XDG_CACHE_HOME:-$HOME/.cache}/mole"
|
||||
rm -f "$stats_dir/purge_scanning" 2> /dev/null || true
|
||||
|
||||
# Give monitor process time to exit and clear its output
|
||||
if [[ -t 1 ]]; then
|
||||
sleep 0.2
|
||||
# Clear the scanning line but preserve the title
|
||||
printf '\n\033[K'
|
||||
fi
|
||||
|
||||
# Collect all results
|
||||
for scan_output in "${scan_temps[@]+"${scan_temps[@]}"}"; do
|
||||
if [[ -f "$scan_output" ]]; then
|
||||
@@ -803,71 +823,204 @@ clean_project_artifacts() {
|
||||
# Strategy: Find the nearest ancestor directory containing a project indicator file
|
||||
get_project_name() {
|
||||
local path="$1"
|
||||
local artifact_name
|
||||
artifact_name=$(basename "$path")
|
||||
|
||||
# Start from the parent of the artifact and walk up
|
||||
local current_dir
|
||||
current_dir=$(dirname "$path")
|
||||
local monorepo_root=""
|
||||
local project_root=""
|
||||
|
||||
# Single pass: check both monorepo and project indicators
|
||||
while [[ "$current_dir" != "/" && "$current_dir" != "$HOME" && -n "$current_dir" ]]; do
|
||||
# Check if current directory contains any project indicator
|
||||
for indicator in "${PROJECT_INDICATORS[@]}"; do
|
||||
if [[ -e "$current_dir/$indicator" ]]; then
|
||||
# Found a project root, return its name
|
||||
basename "$current_dir"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
# Move up one level
|
||||
# First check for monorepo indicators (higher priority)
|
||||
if [[ -z "$monorepo_root" ]]; then
|
||||
for indicator in "${MONOREPO_INDICATORS[@]}"; do
|
||||
if [[ -e "$current_dir/$indicator" ]]; then
|
||||
monorepo_root="$current_dir"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Then check for project indicators (save first match)
|
||||
if [[ -z "$project_root" ]]; then
|
||||
for indicator in "${PROJECT_INDICATORS[@]}"; do
|
||||
if [[ -e "$current_dir/$indicator" ]]; then
|
||||
project_root="$current_dir"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# If we found monorepo, we can stop (monorepo always wins)
|
||||
if [[ -n "$monorepo_root" ]]; then
|
||||
break
|
||||
fi
|
||||
|
||||
# If we found project but still checking for monorepo above
|
||||
# (only stop if we're beyond reasonable depth)
|
||||
local depth=$(echo "${current_dir#"$HOME"}" | LC_ALL=C tr -cd '/' | wc -c | tr -d ' ')
|
||||
if [[ -n "$project_root" && $depth -lt 2 ]]; then
|
||||
break
|
||||
fi
|
||||
|
||||
current_dir=$(dirname "$current_dir")
|
||||
done
|
||||
|
||||
# Fallback: try the old logic (first directory under search root)
|
||||
local search_roots=()
|
||||
if [[ ${#PURGE_SEARCH_PATHS[@]} -gt 0 ]]; then
|
||||
search_roots=("${PURGE_SEARCH_PATHS[@]}")
|
||||
# Determine result: monorepo > project > fallback
|
||||
local result=""
|
||||
if [[ -n "$monorepo_root" ]]; then
|
||||
result=$(basename "$monorepo_root")
|
||||
elif [[ -n "$project_root" ]]; then
|
||||
result=$(basename "$project_root")
|
||||
else
|
||||
search_roots=("$HOME/www" "$HOME/dev" "$HOME/Projects")
|
||||
# Fallback: first directory under search root
|
||||
local search_roots=()
|
||||
if [[ ${#PURGE_SEARCH_PATHS[@]} -gt 0 ]]; then
|
||||
search_roots=("${PURGE_SEARCH_PATHS[@]}")
|
||||
else
|
||||
search_roots=("$HOME/www" "$HOME/dev" "$HOME/Projects")
|
||||
fi
|
||||
for root in "${search_roots[@]}"; do
|
||||
root="${root%/}"
|
||||
if [[ -n "$root" && "$path" == "$root/"* ]]; then
|
||||
local relative_path="${path#"$root"/}"
|
||||
result=$(echo "$relative_path" | cut -d'/' -f1)
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# Final fallback: use grandparent directory
|
||||
if [[ -z "$result" ]]; then
|
||||
result=$(dirname "$(dirname "$path")" | xargs basename)
|
||||
fi
|
||||
fi
|
||||
for root in "${search_roots[@]}"; do
|
||||
root="${root%/}"
|
||||
if [[ -n "$root" && "$path" == "$root/"* ]]; then
|
||||
local relative_path="${path#"$root"/}"
|
||||
echo "$relative_path" | cut -d'/' -f1
|
||||
return 0
|
||||
|
||||
echo "$result"
|
||||
}
|
||||
|
||||
# Helper to get project path (more complete than just project name)
|
||||
# For ~/www/pake/src-tauri/target -> returns "~/www/pake"
|
||||
# For ~/work/code/MyProject/node_modules -> returns "~/work/code/MyProject"
|
||||
# Shows the full path relative to HOME with ~ prefix for better clarity
|
||||
get_project_path() {
|
||||
local path="$1"
|
||||
|
||||
local current_dir
|
||||
current_dir=$(dirname "$path")
|
||||
local monorepo_root=""
|
||||
local project_root=""
|
||||
|
||||
# Single pass: check both monorepo and project indicators
|
||||
while [[ "$current_dir" != "/" && "$current_dir" != "$HOME" && -n "$current_dir" ]]; do
|
||||
# First check for monorepo indicators (higher priority)
|
||||
if [[ -z "$monorepo_root" ]]; then
|
||||
for indicator in "${MONOREPO_INDICATORS[@]}"; do
|
||||
if [[ -e "$current_dir/$indicator" ]]; then
|
||||
monorepo_root="$current_dir"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Then check for project indicators (save first match)
|
||||
if [[ -z "$project_root" ]]; then
|
||||
for indicator in "${PROJECT_INDICATORS[@]}"; do
|
||||
if [[ -e "$current_dir/$indicator" ]]; then
|
||||
project_root="$current_dir"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# If we found monorepo, we can stop (monorepo always wins)
|
||||
if [[ -n "$monorepo_root" ]]; then
|
||||
break
|
||||
fi
|
||||
|
||||
# If we found project but still checking for monorepo above
|
||||
local depth=$(echo "${current_dir#"$HOME"}" | LC_ALL=C tr -cd '/' | wc -c | tr -d ' ')
|
||||
if [[ -n "$project_root" && $depth -lt 2 ]]; then
|
||||
break
|
||||
fi
|
||||
|
||||
current_dir=$(dirname "$current_dir")
|
||||
done
|
||||
|
||||
# Determine result: monorepo > project > fallback
|
||||
local result=""
|
||||
if [[ -n "$monorepo_root" ]]; then
|
||||
result="$monorepo_root"
|
||||
elif [[ -n "$project_root" ]]; then
|
||||
result="$project_root"
|
||||
else
|
||||
# Fallback: use parent directory of artifact
|
||||
result=$(dirname "$path")
|
||||
fi
|
||||
|
||||
# Convert to ~ format for cleaner display
|
||||
result="${result/#$HOME/~}"
|
||||
echo "$result"
|
||||
}
|
||||
|
||||
# Helper to get artifact display name
|
||||
# For duplicate artifact names within same project, include parent directory for context
|
||||
get_artifact_display_name() {
|
||||
local path="$1"
|
||||
local artifact_name=$(basename "$path")
|
||||
local project_name=$(get_project_name "$path")
|
||||
local parent_name=$(basename "$(dirname "$path")")
|
||||
|
||||
# Check if there are other items with same artifact name AND same project
|
||||
local has_duplicate=false
|
||||
for other_item in "${safe_to_clean[@]}"; do
|
||||
if [[ "$other_item" != "$path" && "$(basename "$other_item")" == "$artifact_name" ]]; then
|
||||
# Same artifact name, check if same project
|
||||
if [[ "$(get_project_name "$other_item")" == "$project_name" ]]; then
|
||||
has_duplicate=true
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Final fallback: use grandparent directory
|
||||
dirname "$(dirname "$path")" | xargs basename
|
||||
# If duplicate exists in same project and parent is not the project itself, show parent/artifact
|
||||
if [[ "$has_duplicate" == "true" && "$parent_name" != "$project_name" && "$parent_name" != "." && "$parent_name" != "/" ]]; then
|
||||
echo "$parent_name/$artifact_name"
|
||||
else
|
||||
echo "$artifact_name"
|
||||
fi
|
||||
}
|
||||
# Format display with alignment (like app_selector)
|
||||
format_purge_display() {
|
||||
local project_name="$1"
|
||||
local project_path="$1"
|
||||
local artifact_type="$2"
|
||||
local size_str="$3"
|
||||
# Terminal width for alignment
|
||||
local terminal_width=$(tput cols 2> /dev/null || echo 80)
|
||||
local fixed_width=28 # Reserve for type and size
|
||||
local fixed_width=28 # Reserve for size and artifact type (9 + 3 + 16)
|
||||
local available_width=$((terminal_width - fixed_width))
|
||||
# Bounds: 24-35 chars for project name
|
||||
[[ $available_width -lt 24 ]] && available_width=24
|
||||
[[ $available_width -gt 35 ]] && available_width=35
|
||||
# Truncate project name if needed
|
||||
local truncated_name=$(truncate_by_display_width "$project_name" "$available_width")
|
||||
local current_width=$(get_display_width "$truncated_name")
|
||||
local char_count=${#truncated_name}
|
||||
# Bounds: 30-50 chars for project path (increased to accommodate full paths)
|
||||
[[ $available_width -lt 30 ]] && available_width=30
|
||||
[[ $available_width -gt 50 ]] && available_width=50
|
||||
# Truncate project path if needed
|
||||
local truncated_path=$(truncate_by_display_width "$project_path" "$available_width")
|
||||
local current_width=$(get_display_width "$truncated_path")
|
||||
local char_count=${#truncated_path}
|
||||
local padding=$((available_width - current_width))
|
||||
local printf_width=$((char_count + padding))
|
||||
# Format: "project_name size | artifact_type"
|
||||
printf "%-*s %9s | %-13s" "$printf_width" "$truncated_name" "$size_str" "$artifact_type"
|
||||
# Format: "project_path size | artifact_type"
|
||||
printf "%-*s %9s | %-13s" "$printf_width" "$truncated_path" "$size_str" "$artifact_type"
|
||||
}
|
||||
# Build menu options - one line per artifact
|
||||
for item in "${safe_to_clean[@]}"; do
|
||||
local project_name=$(get_project_name "$item")
|
||||
local artifact_type=$(basename "$item")
|
||||
local project_path=$(get_project_path "$item")
|
||||
local artifact_type=$(get_artifact_display_name "$item")
|
||||
local size_kb=$(get_dir_size_kb "$item")
|
||||
|
||||
# Skip empty directories (0 bytes)
|
||||
if [[ $size_kb -eq 0 ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
local size_human=$(bytes_to_human "$((size_kb * 1024))")
|
||||
# Check if recent
|
||||
local is_recent=false
|
||||
@@ -877,11 +1030,48 @@ clean_project_artifacts() {
|
||||
break
|
||||
fi
|
||||
done
|
||||
menu_options+=("$(format_purge_display "$project_name" "$artifact_type" "$size_human")")
|
||||
menu_options+=("$(format_purge_display "$project_path" "$artifact_type" "$size_human")")
|
||||
item_paths+=("$item")
|
||||
item_sizes+=("$size_kb")
|
||||
item_recent_flags+=("$is_recent")
|
||||
done
|
||||
|
||||
# Sort by size descending (largest first) - requested in issue #311
|
||||
# Use external sort for better performance with many items
|
||||
if [[ ${#item_sizes[@]} -gt 0 ]]; then
|
||||
# Create temporary file with index|size pairs
|
||||
local sort_temp
|
||||
sort_temp=$(mktemp)
|
||||
for ((i = 0; i < ${#item_sizes[@]}; i++)); do
|
||||
printf '%d|%d\n' "$i" "${item_sizes[i]}"
|
||||
done > "$sort_temp"
|
||||
|
||||
# Sort by size (field 2) descending, extract indices
|
||||
local -a sorted_indices=()
|
||||
while IFS='|' read -r idx size; do
|
||||
sorted_indices+=("$idx")
|
||||
done < <(sort -t'|' -k2,2nr "$sort_temp")
|
||||
rm -f "$sort_temp"
|
||||
|
||||
# Rebuild arrays in sorted order
|
||||
local -a sorted_menu_options=()
|
||||
local -a sorted_item_paths=()
|
||||
local -a sorted_item_sizes=()
|
||||
local -a sorted_item_recent_flags=()
|
||||
|
||||
for idx in "${sorted_indices[@]}"; do
|
||||
sorted_menu_options+=("${menu_options[idx]}")
|
||||
sorted_item_paths+=("${item_paths[idx]}")
|
||||
sorted_item_sizes+=("${item_sizes[idx]}")
|
||||
sorted_item_recent_flags+=("${item_recent_flags[idx]}")
|
||||
done
|
||||
|
||||
# Replace original arrays with sorted versions
|
||||
menu_options=("${sorted_menu_options[@]}")
|
||||
item_paths=("${sorted_item_paths[@]}")
|
||||
item_sizes=("${sorted_item_sizes[@]}")
|
||||
item_recent_flags=("${sorted_item_recent_flags[@]}")
|
||||
fi
|
||||
if [[ -t 1 ]]; then
|
||||
stop_inline_spinner
|
||||
fi
|
||||
@@ -925,7 +1115,7 @@ clean_project_artifacts() {
|
||||
for idx in "${selected_indices[@]}"; do
|
||||
local item_path="${item_paths[idx]}"
|
||||
local artifact_type=$(basename "$item_path")
|
||||
local project_name=$(get_project_name "$item_path")
|
||||
local project_path=$(get_project_path "$item_path")
|
||||
local size_kb="${item_sizes[idx]}"
|
||||
local size_human=$(bytes_to_human "$((size_kb * 1024))")
|
||||
# Safety checks
|
||||
@@ -933,7 +1123,7 @@ clean_project_artifacts() {
|
||||
continue
|
||||
fi
|
||||
if [[ -t 1 ]]; then
|
||||
start_inline_spinner "Cleaning $project_name/$artifact_type..."
|
||||
start_inline_spinner "Cleaning $project_path/$artifact_type..."
|
||||
fi
|
||||
if [[ -e "$item_path" ]]; then
|
||||
safe_remove "$item_path" true
|
||||
@@ -945,7 +1135,7 @@ clean_project_artifacts() {
|
||||
fi
|
||||
if [[ -t 1 ]]; then
|
||||
stop_inline_spinner
|
||||
echo -e "${GREEN}${ICON_SUCCESS}${NC} $project_name - $artifact_type ${GREEN}($size_human)${NC}"
|
||||
echo -e "${GREEN}${ICON_SUCCESS}${NC} $project_path - $artifact_type ${GREEN}($size_human)${NC}"
|
||||
fi
|
||||
done
|
||||
# Update count
|
||||
|
||||
@@ -22,7 +22,7 @@ clean_empty_library_items() {
|
||||
return 0
|
||||
fi
|
||||
|
||||
# 1. Clean top-level empty directories in Library
|
||||
# 1. Clean top-level empty directories and files in Library
|
||||
local -a empty_dirs=()
|
||||
while IFS= read -r -d '' dir; do
|
||||
[[ -d "$dir" ]] && empty_dirs+=("$dir")
|
||||
@@ -32,6 +32,24 @@ clean_empty_library_items() {
|
||||
safe_clean "${empty_dirs[@]}" "Empty Library folders"
|
||||
fi
|
||||
|
||||
# Clean empty files in Library root (skipping .localized and other sentinels)
|
||||
local -a empty_files=()
|
||||
while IFS= read -r -d '' file; do
|
||||
[[ -f "$file" ]] || continue
|
||||
# Protect .localized and potential system sentinels
|
||||
if [[ "$(basename "$file")" == ".localized" ]]; then
|
||||
continue
|
||||
fi
|
||||
if is_path_whitelisted "$file"; then
|
||||
continue
|
||||
fi
|
||||
empty_files+=("$file")
|
||||
done < <(find "$HOME/Library" -mindepth 1 -maxdepth 1 -type f -empty -print0 2> /dev/null)
|
||||
|
||||
if [[ ${#empty_files[@]} -gt 0 ]]; then
|
||||
safe_clean "${empty_files[@]}" "Empty Library files"
|
||||
fi
|
||||
|
||||
# 2. Clean empty subdirectories in Application Support and other key locations
|
||||
# Iteratively remove empty directories until no more are found
|
||||
local -a key_locations=(
|
||||
|
||||
@@ -99,91 +99,119 @@ update_via_homebrew() {
|
||||
rm -f "$HOME/.cache/mole/version_check" "$HOME/.cache/mole/update_message" 2> /dev/null || true
|
||||
}
|
||||
|
||||
# Get Homebrew cask name for an application bundle
|
||||
get_brew_cask_name() {
|
||||
local app_path="$1"
|
||||
[[ -z "$app_path" || ! -d "$app_path" ]] && return 1
|
||||
|
||||
# Check if brew command exists
|
||||
command -v brew > /dev/null 2>&1 || return 1
|
||||
|
||||
local app_bundle_name
|
||||
app_bundle_name=$(basename "$app_path")
|
||||
|
||||
# 1. Search in Homebrew Caskroom for the app bundle (most reliable for name mismatches)
|
||||
# Checks /opt/homebrew (Apple Silicon) and /usr/local (Intel)
|
||||
# Note: Modern Homebrew uses symlinks in Caskroom, not directories
|
||||
local cask_match
|
||||
for room in "/opt/homebrew/Caskroom" "/usr/local/Caskroom"; do
|
||||
[[ -d "$room" ]] || continue
|
||||
# Path is room/token/version/App.app (can be directory or symlink)
|
||||
cask_match=$(find "$room" -maxdepth 3 -name "$app_bundle_name" 2> /dev/null | head -1 || echo "")
|
||||
if [[ -n "$cask_match" ]]; then
|
||||
local relative="${cask_match#"$room"/}"
|
||||
echo "${relative%%/*}"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
# 2. Check for symlink from Caskroom
|
||||
if [[ -L "$app_path" ]]; then
|
||||
local target
|
||||
target=$(readlink "$app_path")
|
||||
for room in "/opt/homebrew/Caskroom" "/usr/local/Caskroom"; do
|
||||
if [[ "$target" == "$room/"* ]]; then
|
||||
local relative="${target#"$room"/}"
|
||||
echo "${relative%%/*}"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# 3. Fallback: Direct list check (handles some cases where app is moved)
|
||||
local app_name_only="${app_bundle_name%.app}"
|
||||
local cask_name
|
||||
cask_name=$(brew list --cask 2> /dev/null | grep -Fx "$(echo "$app_name_only" | LC_ALL=C tr '[:upper:]' '[:lower:]')" || echo "")
|
||||
if [[ -n "$cask_name" ]]; then
|
||||
if brew info --cask "$cask_name" 2> /dev/null | grep -q "$app_path"; then
|
||||
echo "$cask_name"
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
# Remove applications from Dock
|
||||
remove_apps_from_dock() {
|
||||
if [[ $# -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
local plist="$HOME/Library/Preferences/com.apple.dock.plist"
|
||||
[[ -f "$plist" ]] || return 0
|
||||
local -a targets=()
|
||||
for arg in "$@"; do
|
||||
[[ -n "$arg" ]] && targets+=("$arg")
|
||||
done
|
||||
|
||||
if ! command -v python3 > /dev/null 2>&1; then
|
||||
if [[ ${#targets[@]} -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Prune dock entries using Python helper
|
||||
python3 - "$@" << 'PY' 2> /dev/null || return 0
|
||||
import os
|
||||
import plistlib
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib.parse
|
||||
# Use pure shell (PlistBuddy) to remove items from Dock
|
||||
# This avoids dependencies on Python 3 or osascript (AppleScript)
|
||||
local plist="$HOME/Library/Preferences/com.apple.dock.plist"
|
||||
[[ -f "$plist" ]] || return 0
|
||||
|
||||
plist_path = os.path.expanduser('~/Library/Preferences/com.apple.dock.plist')
|
||||
if not os.path.exists(plist_path):
|
||||
sys.exit(0)
|
||||
command -v PlistBuddy > /dev/null 2>&1 || return 0
|
||||
|
||||
def normalise(path):
|
||||
if not path:
|
||||
return ''
|
||||
return os.path.normpath(os.path.realpath(path.rstrip('/')))
|
||||
local changed=false
|
||||
for target in "${targets[@]}"; do
|
||||
local app_path="$target"
|
||||
local app_name
|
||||
app_name=$(basename "$app_path" .app)
|
||||
|
||||
targets = {normalise(arg) for arg in sys.argv[1:] if arg}
|
||||
targets = {t for t in targets if t}
|
||||
if not targets:
|
||||
sys.exit(0)
|
||||
# Normalize path for comparison - realpath might fail if app is already deleted
|
||||
local full_path
|
||||
full_path=$(cd "$(dirname "$app_path")" 2> /dev/null && pwd || echo "")
|
||||
[[ -n "$full_path" ]] && full_path="$full_path/$(basename "$app_path")"
|
||||
|
||||
with open(plist_path, 'rb') as fh:
|
||||
try:
|
||||
data = plistlib.load(fh)
|
||||
except Exception:
|
||||
sys.exit(0)
|
||||
# Find the index of the app in persistent-apps
|
||||
local i=0
|
||||
while true; do
|
||||
local label
|
||||
label=$(/usr/libexec/PlistBuddy -c "Print :persistent-apps:$i:tile-data:file-label" "$plist" 2> /dev/null || echo "")
|
||||
[[ -z "$label" ]] && break
|
||||
|
||||
apps = data.get('persistent-apps')
|
||||
if not isinstance(apps, list):
|
||||
sys.exit(0)
|
||||
local url
|
||||
url=$(/usr/libexec/PlistBuddy -c "Print :persistent-apps:$i:tile-data:file-data:_CFURLString" "$plist" 2> /dev/null || echo "")
|
||||
|
||||
changed = False
|
||||
filtered = []
|
||||
for item in apps:
|
||||
try:
|
||||
url = item['tile-data']['file-data']['_CFURLString']
|
||||
except (KeyError, TypeError):
|
||||
filtered.append(item)
|
||||
continue
|
||||
# Match by label or by path (parsing the CFURLString which is usually a file:// URL)
|
||||
if [[ "$label" == "$app_name" ]] || [[ "$url" == *"$app_name.app"* ]]; then
|
||||
# Double check path if possible to avoid false positives for similarly named apps
|
||||
if [[ -n "$full_path" && "$url" == *"$full_path"* ]] || [[ "$label" == "$app_name" ]]; then
|
||||
if /usr/libexec/PlistBuddy -c "Delete :persistent-apps:$i" "$plist" 2> /dev/null; then
|
||||
changed=true
|
||||
# After deletion, current index i now points to the next item
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
((i++))
|
||||
done
|
||||
done
|
||||
|
||||
if not isinstance(url, str):
|
||||
filtered.append(item)
|
||||
continue
|
||||
|
||||
parsed = urllib.parse.urlparse(url)
|
||||
path = urllib.parse.unquote(parsed.path or '')
|
||||
if not path:
|
||||
filtered.append(item)
|
||||
continue
|
||||
|
||||
candidate = normalise(path)
|
||||
if any(candidate == t or candidate.startswith(t + os.sep) for t in targets):
|
||||
changed = True
|
||||
continue
|
||||
|
||||
filtered.append(item)
|
||||
|
||||
if not changed:
|
||||
sys.exit(0)
|
||||
|
||||
data['persistent-apps'] = filtered
|
||||
with open(plist_path, 'wb') as fh:
|
||||
try:
|
||||
plistlib.dump(data, fh, fmt=plistlib.FMT_BINARY)
|
||||
except Exception:
|
||||
plistlib.dump(data, fh)
|
||||
|
||||
# Restart Dock to apply changes
|
||||
try:
|
||||
subprocess.run(['killall', 'Dock'], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=False)
|
||||
except Exception:
|
||||
pass
|
||||
PY
|
||||
if [[ "$changed" == "true" ]]; then
|
||||
# Restart Dock to apply changes from the plist
|
||||
killall Dock 2> /dev/null || true
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -632,10 +632,29 @@ paginated_multi_select() {
|
||||
prev_cursor_pos=$cursor_pos
|
||||
continue # Skip full redraw
|
||||
elif [[ $top_index -gt 0 ]]; then
|
||||
# Scroll up - redraw visible items only
|
||||
((top_index--))
|
||||
|
||||
# Redraw all visible items (faster than full screen redraw)
|
||||
local start_idx=$top_index
|
||||
local end_idx=$((top_index + items_per_page - 1))
|
||||
local visible_total=${#view_indices[@]}
|
||||
[[ $end_idx -ge $visible_total ]] && end_idx=$((visible_total - 1))
|
||||
|
||||
for ((i = start_idx; i <= end_idx; i++)); do
|
||||
local row=$((i - start_idx + 3)) # +3 for header
|
||||
printf "\033[%d;1H" "$row" >&2
|
||||
local is_current=false
|
||||
[[ $((i - start_idx)) -eq $cursor_pos ]] && is_current=true
|
||||
render_item $((i - start_idx)) $is_current
|
||||
done
|
||||
|
||||
# Move cursor to footer
|
||||
printf "\033[%d;1H" "$((items_per_page + 4))" >&2
|
||||
|
||||
prev_cursor_pos=$cursor_pos
|
||||
prev_top_index=$top_index
|
||||
need_full_redraw=true # Scrolling requires full redraw
|
||||
continue
|
||||
fi
|
||||
;;
|
||||
"DOWN")
|
||||
@@ -670,15 +689,34 @@ paginated_multi_select() {
|
||||
prev_cursor_pos=$cursor_pos
|
||||
continue # Skip full redraw
|
||||
elif [[ $((top_index + visible_count)) -lt ${#view_indices[@]} ]]; then
|
||||
# Scroll down - redraw visible items only
|
||||
((top_index++))
|
||||
visible_count=$((${#view_indices[@]} - top_index))
|
||||
[[ $visible_count -gt $items_per_page ]] && visible_count=$items_per_page
|
||||
if [[ $cursor_pos -ge $visible_count ]]; then
|
||||
cursor_pos=$((visible_count - 1))
|
||||
fi
|
||||
|
||||
# Redraw all visible items (faster than full screen redraw)
|
||||
local start_idx=$top_index
|
||||
local end_idx=$((top_index + items_per_page - 1))
|
||||
local visible_total=${#view_indices[@]}
|
||||
[[ $end_idx -ge $visible_total ]] && end_idx=$((visible_total - 1))
|
||||
|
||||
for ((i = start_idx; i <= end_idx; i++)); do
|
||||
local row=$((i - start_idx + 3)) # +3 for header
|
||||
printf "\033[%d;1H" "$row" >&2
|
||||
local is_current=false
|
||||
[[ $((i - start_idx)) -eq $cursor_pos ]] && is_current=true
|
||||
render_item $((i - start_idx)) $is_current
|
||||
done
|
||||
|
||||
# Move cursor to footer
|
||||
printf "\033[%d;1H" "$((items_per_page + 4))" >&2
|
||||
|
||||
prev_cursor_pos=$cursor_pos
|
||||
prev_top_index=$top_index
|
||||
need_full_redraw=true # Scrolling requires full redraw
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -146,49 +146,72 @@ batch_uninstall_applications() {
|
||||
running_apps+=("$app_name")
|
||||
fi
|
||||
|
||||
# Sudo needed if bundle owner/dir is not writable or system files exist.
|
||||
local needs_sudo=false
|
||||
local app_owner=$(get_file_owner "$app_path")
|
||||
local current_user=$(whoami)
|
||||
if [[ ! -w "$(dirname "$app_path")" ]] ||
|
||||
[[ "$app_owner" == "root" ]] ||
|
||||
[[ -n "$app_owner" && "$app_owner" != "$current_user" ]]; then
|
||||
needs_sudo=true
|
||||
# Check if it's a Homebrew cask
|
||||
local cask_name=""
|
||||
cask_name=$(get_brew_cask_name "$app_path" || echo "")
|
||||
local is_brew_cask="false"
|
||||
[[ -n "$cask_name" ]] && is_brew_cask="true"
|
||||
|
||||
# For Homebrew casks, skip detailed file scanning since brew handles it
|
||||
if [[ "$is_brew_cask" == "true" ]]; then
|
||||
local app_size_kb=$(get_path_size_kb "$app_path")
|
||||
local total_kb=$app_size_kb
|
||||
((total_estimated_size += total_kb))
|
||||
|
||||
# Homebrew may need sudo for system-wide installations
|
||||
local needs_sudo=false
|
||||
if [[ "$app_path" == "/Applications/"* ]]; then
|
||||
needs_sudo=true
|
||||
sudo_apps+=("$app_name")
|
||||
fi
|
||||
|
||||
# Store minimal details for Homebrew apps
|
||||
app_details+=("$app_name|$app_path|$bundle_id|$total_kb|||false|$needs_sudo|$is_brew_cask|$cask_name")
|
||||
else
|
||||
# For non-Homebrew apps, do full file scanning
|
||||
local needs_sudo=false
|
||||
local app_owner=$(get_file_owner "$app_path")
|
||||
local current_user=$(whoami)
|
||||
if [[ ! -w "$(dirname "$app_path")" ]] ||
|
||||
[[ "$app_owner" == "root" ]] ||
|
||||
[[ -n "$app_owner" && "$app_owner" != "$current_user" ]]; then
|
||||
needs_sudo=true
|
||||
fi
|
||||
|
||||
# Size estimate includes related and system files.
|
||||
local app_size_kb=$(get_path_size_kb "$app_path")
|
||||
local related_files=$(find_app_files "$bundle_id" "$app_name")
|
||||
local related_size_kb=$(calculate_total_size "$related_files")
|
||||
# system_files is a newline-separated string, not an array.
|
||||
# shellcheck disable=SC2178,SC2128
|
||||
local system_files=$(find_app_system_files "$bundle_id" "$app_name")
|
||||
# shellcheck disable=SC2128
|
||||
local system_size_kb=$(calculate_total_size "$system_files")
|
||||
local total_kb=$((app_size_kb + related_size_kb + system_size_kb))
|
||||
((total_estimated_size += total_kb))
|
||||
|
||||
# shellcheck disable=SC2128
|
||||
if [[ -n "$system_files" ]]; then
|
||||
needs_sudo=true
|
||||
fi
|
||||
|
||||
if [[ "$needs_sudo" == "true" ]]; then
|
||||
sudo_apps+=("$app_name")
|
||||
fi
|
||||
|
||||
# Check for sensitive user data once.
|
||||
local has_sensitive_data="false"
|
||||
if [[ -n "$related_files" ]] && echo "$related_files" | grep -qE "$SENSITIVE_DATA_REGEX"; then
|
||||
has_sensitive_data="true"
|
||||
fi
|
||||
|
||||
# Store details for later use (base64 keeps lists on one line).
|
||||
local encoded_files
|
||||
encoded_files=$(printf '%s' "$related_files" | base64 | tr -d '\n')
|
||||
local encoded_system_files
|
||||
encoded_system_files=$(printf '%s' "$system_files" | base64 | tr -d '\n')
|
||||
app_details+=("$app_name|$app_path|$bundle_id|$total_kb|$encoded_files|$encoded_system_files|$has_sensitive_data|$needs_sudo|$is_brew_cask|$cask_name")
|
||||
fi
|
||||
|
||||
# Size estimate includes related and system files.
|
||||
local app_size_kb=$(get_path_size_kb "$app_path")
|
||||
local related_files=$(find_app_files "$bundle_id" "$app_name")
|
||||
local related_size_kb=$(calculate_total_size "$related_files")
|
||||
# system_files is a newline-separated string, not an array.
|
||||
# shellcheck disable=SC2178,SC2128
|
||||
local system_files=$(find_app_system_files "$bundle_id" "$app_name")
|
||||
# shellcheck disable=SC2128
|
||||
local system_size_kb=$(calculate_total_size "$system_files")
|
||||
local total_kb=$((app_size_kb + related_size_kb + system_size_kb))
|
||||
((total_estimated_size += total_kb))
|
||||
|
||||
# shellcheck disable=SC2128
|
||||
if [[ -n "$system_files" ]]; then
|
||||
needs_sudo=true
|
||||
fi
|
||||
|
||||
if [[ "$needs_sudo" == "true" ]]; then
|
||||
sudo_apps+=("$app_name")
|
||||
fi
|
||||
|
||||
# Check for sensitive user data once.
|
||||
local has_sensitive_data="false"
|
||||
if [[ -n "$related_files" ]] && echo "$related_files" | grep -qE "$SENSITIVE_DATA_REGEX"; then
|
||||
has_sensitive_data="true"
|
||||
fi
|
||||
|
||||
# Store details for later use (base64 keeps lists on one line).
|
||||
local encoded_files
|
||||
encoded_files=$(printf '%s' "$related_files" | base64 | tr -d '\n')
|
||||
local encoded_system_files
|
||||
encoded_system_files=$(printf '%s' "$system_files" | base64 | tr -d '\n')
|
||||
app_details+=("$app_name|$app_path|$bundle_id|$total_kb|$encoded_files|$encoded_system_files|$has_sensitive_data|$needs_sudo")
|
||||
done
|
||||
if [[ -t 1 ]]; then stop_inline_spinner; fi
|
||||
|
||||
@@ -214,41 +237,49 @@ batch_uninstall_applications() {
|
||||
fi
|
||||
|
||||
for detail in "${app_details[@]}"; do
|
||||
IFS='|' read -r app_name app_path bundle_id total_kb encoded_files encoded_system_files has_sensitive_data needs_sudo_flag <<< "$detail"
|
||||
local related_files=$(decode_file_list "$encoded_files" "$app_name")
|
||||
local system_files=$(decode_file_list "$encoded_system_files" "$app_name")
|
||||
IFS='|' read -r app_name app_path bundle_id total_kb encoded_files encoded_system_files has_sensitive_data needs_sudo_flag is_brew_cask cask_name <<< "$detail"
|
||||
local app_size_display=$(bytes_to_human "$((total_kb * 1024))")
|
||||
|
||||
echo -e "${BLUE}${ICON_CONFIRM}${NC} ${app_name} ${GRAY}(${app_size_display})${NC}"
|
||||
echo -e " ${GREEN}${ICON_SUCCESS}${NC} ${app_path/$HOME/~}"
|
||||
local brew_tag=""
|
||||
[[ "$is_brew_cask" == "true" ]] && brew_tag=" ${CYAN}[Brew]${NC}"
|
||||
echo -e "${BLUE}${ICON_CONFIRM}${NC} ${app_name}${brew_tag} ${GRAY}(${app_size_display})${NC}"
|
||||
|
||||
# Show related files (limit to 5).
|
||||
local file_count=0
|
||||
local max_files=5
|
||||
while IFS= read -r file; do
|
||||
if [[ -n "$file" && -e "$file" ]]; then
|
||||
if [[ $file_count -lt $max_files ]]; then
|
||||
echo -e " ${GREEN}${ICON_SUCCESS}${NC} ${file/$HOME/~}"
|
||||
# For Homebrew apps, [Brew] tag is enough indication
|
||||
# For non-Homebrew apps, show detailed file list
|
||||
if [[ "$is_brew_cask" != "true" ]]; then
|
||||
local related_files=$(decode_file_list "$encoded_files" "$app_name")
|
||||
local system_files=$(decode_file_list "$encoded_system_files" "$app_name")
|
||||
|
||||
echo -e " ${GREEN}${ICON_SUCCESS}${NC} ${app_path/$HOME/~}"
|
||||
|
||||
# Show related files (limit to 5).
|
||||
local file_count=0
|
||||
local max_files=5
|
||||
while IFS= read -r file; do
|
||||
if [[ -n "$file" && -e "$file" ]]; then
|
||||
if [[ $file_count -lt $max_files ]]; then
|
||||
echo -e " ${GREEN}${ICON_SUCCESS}${NC} ${file/$HOME/~}"
|
||||
fi
|
||||
((file_count++))
|
||||
fi
|
||||
((file_count++))
|
||||
fi
|
||||
done <<< "$related_files"
|
||||
done <<< "$related_files"
|
||||
|
||||
# Show system files (limit to 5).
|
||||
local sys_file_count=0
|
||||
while IFS= read -r file; do
|
||||
if [[ -n "$file" && -e "$file" ]]; then
|
||||
if [[ $sys_file_count -lt $max_files ]]; then
|
||||
echo -e " ${BLUE}${ICON_SOLID}${NC} System: $file"
|
||||
# Show system files (limit to 5).
|
||||
local sys_file_count=0
|
||||
while IFS= read -r file; do
|
||||
if [[ -n "$file" && -e "$file" ]]; then
|
||||
if [[ $sys_file_count -lt $max_files ]]; then
|
||||
echo -e " ${BLUE}${ICON_SOLID}${NC} System: $file"
|
||||
fi
|
||||
((sys_file_count++))
|
||||
fi
|
||||
((sys_file_count++))
|
||||
fi
|
||||
done <<< "$system_files"
|
||||
done <<< "$system_files"
|
||||
|
||||
local total_hidden=$((file_count > max_files ? file_count - max_files : 0))
|
||||
((total_hidden += sys_file_count > max_files ? sys_file_count - max_files : 0))
|
||||
if [[ $total_hidden -gt 0 ]]; then
|
||||
echo -e " ${GRAY} ... and ${total_hidden} more files${NC}"
|
||||
local total_hidden=$((file_count > max_files ? file_count - max_files : 0))
|
||||
((total_hidden += sys_file_count > max_files ? sys_file_count - max_files : 0))
|
||||
if [[ $total_hidden -gt 0 ]]; then
|
||||
echo -e " ${GRAY} ... and ${total_hidden} more files${NC}"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
@@ -275,7 +306,7 @@ batch_uninstall_applications() {
|
||||
return 0
|
||||
;;
|
||||
"" | $'\n' | $'\r' | y | Y)
|
||||
printf "\r\033[K" # Clear the prompt line
|
||||
echo "" # Move to next line
|
||||
;;
|
||||
*)
|
||||
echo ""
|
||||
@@ -305,19 +336,29 @@ batch_uninstall_applications() {
|
||||
sudo_keepalive_pid=$!
|
||||
fi
|
||||
|
||||
if [[ -t 1 ]]; then start_inline_spinner "Uninstalling apps..."; fi
|
||||
|
||||
# Perform uninstallations (silent mode, show results at end).
|
||||
if [[ -t 1 ]]; then stop_inline_spinner; fi
|
||||
# Perform uninstallations with per-app progress feedback
|
||||
local success_count=0 failed_count=0
|
||||
local -a failed_items=()
|
||||
local -a success_items=()
|
||||
local current_index=0
|
||||
for detail in "${app_details[@]}"; do
|
||||
IFS='|' read -r app_name app_path bundle_id total_kb encoded_files encoded_system_files has_sensitive_data needs_sudo <<< "$detail"
|
||||
((current_index++))
|
||||
IFS='|' read -r app_name app_path bundle_id total_kb encoded_files encoded_system_files has_sensitive_data needs_sudo is_brew_cask cask_name <<< "$detail"
|
||||
local related_files=$(decode_file_list "$encoded_files" "$app_name")
|
||||
local system_files=$(decode_file_list "$encoded_system_files" "$app_name")
|
||||
local reason=""
|
||||
|
||||
# Show progress for current app
|
||||
local brew_tag=""
|
||||
[[ "$is_brew_cask" == "true" ]] && brew_tag=" ${CYAN}[Brew]${NC}"
|
||||
if [[ -t 1 ]]; then
|
||||
if [[ ${#app_details[@]} -gt 1 ]]; then
|
||||
start_inline_spinner "[$current_index/${#app_details[@]}] Uninstalling ${app_name}${brew_tag}..."
|
||||
else
|
||||
start_inline_spinner "Uninstalling ${app_name}${brew_tag}..."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Stop Launch Agents/Daemons before removal.
|
||||
local has_system_files="false"
|
||||
[[ -n "$system_files" ]] && has_system_files="true"
|
||||
@@ -329,7 +370,19 @@ batch_uninstall_applications() {
|
||||
|
||||
# Remove the application only if not running.
|
||||
if [[ -z "$reason" ]]; then
|
||||
if [[ "$needs_sudo" == true ]]; then
|
||||
if [[ "$is_brew_cask" == "true" && -n "$cask_name" ]]; then
|
||||
# Use brew uninstall --cask with progress indicator
|
||||
local brew_output_file=$(mktemp)
|
||||
if ! run_with_timeout 120 brew uninstall --cask "$cask_name" > "$brew_output_file" 2>&1; then
|
||||
# Fallback to manual removal if brew fails
|
||||
if [[ "$needs_sudo" == true ]]; then
|
||||
safe_sudo_remove "$app_path" || reason="remove failed"
|
||||
else
|
||||
safe_remove "$app_path" true || reason="remove failed"
|
||||
fi
|
||||
fi
|
||||
rm -f "$brew_output_file"
|
||||
elif [[ "$needs_sudo" == true ]]; then
|
||||
if ! safe_sudo_remove "$app_path"; then
|
||||
local app_owner=$(get_file_owner "$app_path")
|
||||
local current_user=$(whoami)
|
||||
@@ -361,12 +414,32 @@ batch_uninstall_applications() {
|
||||
fi
|
||||
fi
|
||||
|
||||
# Stop spinner and show success
|
||||
if [[ -t 1 ]]; then
|
||||
stop_inline_spinner
|
||||
if [[ ${#app_details[@]} -gt 1 ]]; then
|
||||
echo -e "\r\033[K${GREEN}✓${NC} [$current_index/${#app_details[@]}] ${app_name}"
|
||||
else
|
||||
echo -e "\r\033[K${GREEN}✓${NC} ${app_name}"
|
||||
fi
|
||||
fi
|
||||
|
||||
((total_size_freed += total_kb))
|
||||
((success_count++))
|
||||
((files_cleaned++))
|
||||
((total_items++))
|
||||
success_items+=("$app_name")
|
||||
else
|
||||
# Stop spinner and show failure
|
||||
if [[ -t 1 ]]; then
|
||||
stop_inline_spinner
|
||||
if [[ ${#app_details[@]} -gt 1 ]]; then
|
||||
echo -e "\r\033[K${RED}✗${NC} [$current_index/${#app_details[@]}] ${app_name} ${GRAY}($reason)${NC}"
|
||||
else
|
||||
echo -e "\r\033[K${RED}✗${NC} ${app_name} failed: $reason"
|
||||
fi
|
||||
fi
|
||||
|
||||
((failed_count++))
|
||||
failed_items+=("$app_name:$reason")
|
||||
fi
|
||||
@@ -454,6 +527,7 @@ batch_uninstall_applications() {
|
||||
title="Uninstall incomplete"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
print_summary_block "$title" "${summary_details[@]}"
|
||||
printf '\n'
|
||||
|
||||
|
||||
Reference in New Issue
Block a user