mirror of
https://github.com/tw93/Mole.git
synced 2026-02-16 10:06:10 +00:00
Code support format detection
This commit is contained in:
25
.editorconfig
Normal file
25
.editorconfig
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# EditorConfig for Mole project
|
||||||
|
# https://editorconfig.org
|
||||||
|
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
charset = utf-8
|
||||||
|
end_of_line = lf
|
||||||
|
insert_final_newline = true
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
|
||||||
|
[*.{sh,bash}]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
||||||
|
# shfmt will use these settings automatically
|
||||||
|
|
||||||
|
[*.{yml,yaml}]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[*.md]
|
||||||
|
trim_trailing_whitespace = false
|
||||||
|
|
||||||
|
[Makefile]
|
||||||
|
indent_style = tab
|
||||||
19
.github/workflows/tests.yml
vendored
19
.github/workflows/tests.yml
vendored
@@ -1,4 +1,4 @@
|
|||||||
name: Mole Tests
|
name: Tests
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -13,10 +13,17 @@ jobs:
|
|||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install bats-core
|
- name: Install tools
|
||||||
run: |
|
run: brew install bats-core shfmt shellcheck
|
||||||
brew update
|
|
||||||
brew install bats-core
|
|
||||||
|
|
||||||
- name: Run test suite
|
- name: Check formatting
|
||||||
|
run: ./scripts/format.sh --check
|
||||||
|
|
||||||
|
- name: Run shellcheck
|
||||||
|
run: |
|
||||||
|
find . -type f \( -name "*.sh" -o -name "mole" \) \
|
||||||
|
! -path "./.git/*" \
|
||||||
|
-exec shellcheck -S warning {} +
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
run: tests/run.sh
|
run: tests/run.sh
|
||||||
|
|||||||
54
CONTRIBUTING.md
Normal file
54
CONTRIBUTING.md
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# Contributing to Mole
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install tools
|
||||||
|
brew install shfmt shellcheck bats-core
|
||||||
|
|
||||||
|
# Install git hooks (optional)
|
||||||
|
./scripts/install-hooks.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Format code
|
||||||
|
./scripts/format.sh
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
./tests/run.sh
|
||||||
|
|
||||||
|
# Check quality
|
||||||
|
shellcheck -S warning mole bin/*.sh lib/*.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
## Git Hooks
|
||||||
|
|
||||||
|
Pre-commit hook will auto-format your code. Install with:
|
||||||
|
```bash
|
||||||
|
./scripts/install-hooks.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Skip if needed: `git commit --no-verify`
|
||||||
|
|
||||||
|
## Code Style
|
||||||
|
|
||||||
|
- Bash 3.2+ compatible
|
||||||
|
- 4 spaces indent
|
||||||
|
- Use `set -euo pipefail`
|
||||||
|
- Quote all variables
|
||||||
|
- BSD commands not GNU
|
||||||
|
|
||||||
|
Config: `.editorconfig` and `.shellcheckrc`
|
||||||
|
|
||||||
|
## Pull Requests
|
||||||
|
|
||||||
|
1. Fork and create branch
|
||||||
|
2. Make changes
|
||||||
|
3. Format: `./scripts/format.sh`
|
||||||
|
4. Test: `./tests/run.sh`
|
||||||
|
5. Commit and push
|
||||||
|
6. Open PR
|
||||||
|
|
||||||
|
CI will check formatting, lint, and run tests.
|
||||||
@@ -78,7 +78,7 @@ scan_large_files() {
|
|||||||
size=$(stat -f%z "$file" 2> /dev/null || echo "0")
|
size=$(stat -f%z "$file" 2> /dev/null || echo "0")
|
||||||
echo "$size|$file"
|
echo "$size|$file"
|
||||||
fi
|
fi
|
||||||
done < <(mdfind -onlyin "$target_path" "kMDItemFSSize > $MIN_LARGE_FILE_SIZE" 2>/dev/null) | \
|
done < <(mdfind -onlyin "$target_path" "kMDItemFSSize > $MIN_LARGE_FILE_SIZE" 2> /dev/null) |
|
||||||
sort -t'|' -k1 -rn > "$output_file"
|
sort -t'|' -k1 -rn > "$output_file"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -99,7 +99,7 @@ scan_medium_files() {
|
|||||||
echo "$size|$file"
|
echo "$size|$file"
|
||||||
fi
|
fi
|
||||||
done < <(mdfind -onlyin "$target_path" \
|
done < <(mdfind -onlyin "$target_path" \
|
||||||
"kMDItemFSSize > $MIN_MEDIUM_FILE_SIZE && kMDItemFSSize < $MIN_LARGE_FILE_SIZE" 2>/dev/null) | \
|
"kMDItemFSSize > $MIN_MEDIUM_FILE_SIZE && kMDItemFSSize < $MIN_LARGE_FILE_SIZE" 2> /dev/null) |
|
||||||
sort -t'|' -k1 -rn > "$output_file"
|
sort -t'|' -k1 -rn > "$output_file"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -112,16 +112,16 @@ scan_directories() {
|
|||||||
# Check if we can use parallel processing
|
# Check if we can use parallel processing
|
||||||
if command -v xargs &> /dev/null && [[ $depth -eq 1 ]]; then
|
if command -v xargs &> /dev/null && [[ $depth -eq 1 ]]; then
|
||||||
# Fast parallel scan for depth 1
|
# Fast parallel scan for depth 1
|
||||||
find "$target_path" -mindepth 1 -maxdepth 1 -type d -print0 2>/dev/null | \
|
find "$target_path" -mindepth 1 -maxdepth 1 -type d -print0 2> /dev/null |
|
||||||
xargs -0 -P 4 -I {} du -sk {} 2>/dev/null | \
|
xargs -0 -P 4 -I {} du -sk {} 2> /dev/null |
|
||||||
sort -rn | \
|
sort -rn |
|
||||||
while IFS=$'\t' read -r size path; do
|
while IFS=$'\t' read -r size path; do
|
||||||
echo "$((size * 1024))|$path"
|
echo "$((size * 1024))|$path"
|
||||||
done > "$output_file"
|
done > "$output_file"
|
||||||
else
|
else
|
||||||
# Standard du scan
|
# Standard du scan
|
||||||
du -d "$depth" -k "$target_path" 2>/dev/null | \
|
du -d "$depth" -k "$target_path" 2> /dev/null |
|
||||||
sort -rn | \
|
sort -rn |
|
||||||
while IFS=$'\t' read -r size path; do
|
while IFS=$'\t' read -r size path; do
|
||||||
# Skip if path is the target itself at depth > 0
|
# Skip if path is the target itself at depth > 0
|
||||||
if [[ "$path" != "$target_path" ]]; then
|
if [[ "$path" != "$target_path" ]]; then
|
||||||
@@ -651,7 +651,7 @@ display_cleanup_suggestions_compact() {
|
|||||||
"kMDItemFSSize > 500000000 && kMDItemDisplayName == '*.dmg'" 2> /dev/null | wc -l | tr -d ' ')
|
"kMDItemFSSize > 500000000 && kMDItemDisplayName == '*.dmg'" 2> /dev/null | wc -l | tr -d ' ')
|
||||||
if [[ $dmg_count -gt 0 ]]; then
|
if [[ $dmg_count -gt 0 ]]; then
|
||||||
local dmg_size=$(mdfind -onlyin "$CURRENT_PATH" \
|
local dmg_size=$(mdfind -onlyin "$CURRENT_PATH" \
|
||||||
"kMDItemFSSize > 500000000 && kMDItemDisplayName == '*.dmg'" 2>/dev/null | \
|
"kMDItemFSSize > 500000000 && kMDItemDisplayName == '*.dmg'" 2> /dev/null |
|
||||||
xargs stat -f%z 2> /dev/null | awk '{sum+=$1} END {print sum}')
|
xargs stat -f%z 2> /dev/null | awk '{sum+=$1} END {print sum}')
|
||||||
local dmg_human
|
local dmg_human
|
||||||
dmg_human=$(bytes_to_human "$dmg_size")
|
dmg_human=$(bytes_to_human "$dmg_size")
|
||||||
@@ -678,7 +678,7 @@ display_cleanup_suggestions_compact() {
|
|||||||
|
|
||||||
# Check for duplicates in current path
|
# Check for duplicates in current path
|
||||||
if command -v mdfind &> /dev/null; then
|
if command -v mdfind &> /dev/null; then
|
||||||
local dup_count=$(mdfind -onlyin "$CURRENT_PATH" "kMDItemFSSize > 10000000" 2>/dev/null | \
|
local dup_count=$(mdfind -onlyin "$CURRENT_PATH" "kMDItemFSSize > 10000000" 2> /dev/null |
|
||||||
xargs -I {} stat -f "%z" {} 2> /dev/null | sort | uniq -d | wc -l | tr -d ' ')
|
xargs -I {} stat -f "%z" {} 2> /dev/null | sort | uniq -d | wc -l | tr -d ' ')
|
||||||
if [[ $dup_count -gt 5 ]]; then
|
if [[ $dup_count -gt 5 ]]; then
|
||||||
[[ -z "$top_suggestion" ]] && top_suggestion="$dup_count potential duplicate files detected"
|
[[ -z "$top_suggestion" ]] && top_suggestion="$dup_count potential duplicate files detected"
|
||||||
@@ -771,8 +771,8 @@ display_cleanup_suggestions() {
|
|||||||
# Check for duplicate files (by size, quick heuristic)
|
# Check for duplicate files (by size, quick heuristic)
|
||||||
if command -v mdfind &> /dev/null; then
|
if command -v mdfind &> /dev/null; then
|
||||||
local temp_dup="$TEMP_PREFIX.dup_check"
|
local temp_dup="$TEMP_PREFIX.dup_check"
|
||||||
mdfind -onlyin "$CURRENT_PATH" "kMDItemFSSize > 10000000" 2>/dev/null | \
|
mdfind -onlyin "$CURRENT_PATH" "kMDItemFSSize > 10000000" 2> /dev/null |
|
||||||
xargs -I {} stat -f "%z" {} 2>/dev/null | \
|
xargs -I {} stat -f "%z" {} 2> /dev/null |
|
||||||
sort | uniq -d | wc -l | tr -d ' ' > "$temp_dup" 2> /dev/null || echo "0" > "$temp_dup"
|
sort | uniq -d | wc -l | tr -d ' ' > "$temp_dup" 2> /dev/null || echo "0" > "$temp_dup"
|
||||||
local dup_count
|
local dup_count
|
||||||
dup_count=$(cat "$temp_dup" 2> /dev/null || echo "0")
|
dup_count=$(cat "$temp_dup" 2> /dev/null || echo "0")
|
||||||
@@ -842,19 +842,23 @@ get_file_info() {
|
|||||||
|
|
||||||
case "$ext" in
|
case "$ext" in
|
||||||
dmg | iso | pkg | zip | tar | gz | rar | 7z)
|
dmg | iso | pkg | zip | tar | gz | rar | 7z)
|
||||||
badge="$BADGE_BUNDLE" ; type="Bundle"
|
badge="$BADGE_BUNDLE"
|
||||||
|
type="Bundle"
|
||||||
;;
|
;;
|
||||||
mov | mp4 | avi | mkv | webm | jpg | jpeg | png | gif | heic)
|
mov | mp4 | avi | mkv | webm | jpg | jpeg | png | gif | heic)
|
||||||
badge="$BADGE_MEDIA" ; type="Media"
|
badge="$BADGE_MEDIA"
|
||||||
|
type="Media"
|
||||||
;;
|
;;
|
||||||
pdf | key | ppt | pptx)
|
pdf | key | ppt | pptx)
|
||||||
type="Document"
|
type="Document"
|
||||||
;;
|
;;
|
||||||
log)
|
log)
|
||||||
badge="$BADGE_LOG" ; type="Log"
|
badge="$BADGE_LOG"
|
||||||
|
type="Log"
|
||||||
;;
|
;;
|
||||||
app)
|
app)
|
||||||
badge="$BADGE_APP" ; type="App"
|
badge="$BADGE_APP"
|
||||||
|
type="App"
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
@@ -1114,7 +1118,7 @@ display_recent_large_files() {
|
|||||||
|
|
||||||
# Find files created in last 30 days, larger than 100MB
|
# Find files created in last 30 days, larger than 100MB
|
||||||
mdfind -onlyin "$CURRENT_PATH" \
|
mdfind -onlyin "$CURRENT_PATH" \
|
||||||
"kMDItemFSSize > 100000000 && kMDItemContentCreationDate >= \$time.today(-30)" 2>/dev/null | \
|
"kMDItemFSSize > 100000000 && kMDItemContentCreationDate >= \$time.today(-30)" 2> /dev/null |
|
||||||
while IFS= read -r file; do
|
while IFS= read -r file; do
|
||||||
if [[ -f "$file" ]]; then
|
if [[ -f "$file" ]]; then
|
||||||
local size
|
local size
|
||||||
@@ -1162,7 +1166,7 @@ get_subdirectories() {
|
|||||||
local target="$1"
|
local target="$1"
|
||||||
local temp_file="$2"
|
local temp_file="$2"
|
||||||
|
|
||||||
find "$target" -mindepth 1 -maxdepth 1 -type d 2>/dev/null | \
|
find "$target" -mindepth 1 -maxdepth 1 -type d 2> /dev/null |
|
||||||
while IFS= read -r dir; do
|
while IFS= read -r dir; do
|
||||||
local size
|
local size
|
||||||
size=$(du -sk "$dir" 2> /dev/null | cut -f1)
|
size=$(du -sk "$dir" 2> /dev/null | cut -f1)
|
||||||
@@ -1396,13 +1400,13 @@ scan_directory_contents_fast() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Ultra-fast file scanning - batch stat for maximum speed
|
# Ultra-fast file scanning - batch stat for maximum speed
|
||||||
find "$dir_path" -mindepth 1 -maxdepth 1 -type f -print0 2>/dev/null | \
|
find "$dir_path" -mindepth 1 -maxdepth 1 -type f -print0 2> /dev/null |
|
||||||
xargs -0 -n 20 -P "$num_jobs" stat -f "%z|file|%N" 2> /dev/null > "$temp_files" &
|
xargs -0 -n 20 -P "$num_jobs" stat -f "%z|file|%N" 2> /dev/null > "$temp_files" &
|
||||||
local file_pid=$!
|
local file_pid=$!
|
||||||
|
|
||||||
# Smart directory scanning with aggressive optimization
|
# Smart directory scanning with aggressive optimization
|
||||||
# Strategy: Fast estimation first, accurate on-demand
|
# Strategy: Fast estimation first, accurate on-demand
|
||||||
find "$dir_path" -mindepth 1 -maxdepth 1 -type d -print0 2>/dev/null | \
|
find "$dir_path" -mindepth 1 -maxdepth 1 -type d -print0 2> /dev/null |
|
||||||
xargs -0 -n 1 -P "$num_jobs" sh -c '
|
xargs -0 -n 1 -P "$num_jobs" sh -c '
|
||||||
dir="$1"
|
dir="$1"
|
||||||
size=""
|
size=""
|
||||||
@@ -1498,7 +1502,7 @@ scan_directory_contents_fast() {
|
|||||||
|
|
||||||
# Combine and sort - only keep top items
|
# Combine and sort - only keep top items
|
||||||
# Ensure we handle empty files gracefully
|
# Ensure we handle empty files gracefully
|
||||||
> "$output_file"
|
true > "$output_file"
|
||||||
if [[ -f "$temp_dirs" ]] || [[ -f "$temp_files" ]]; then
|
if [[ -f "$temp_dirs" ]] || [[ -f "$temp_files" ]]; then
|
||||||
cat "$temp_dirs" "$temp_files" 2> /dev/null | sort -t'|' -k1 -rn | head -"$max_items" > "$output_file" || true
|
cat "$temp_dirs" "$temp_files" 2> /dev/null | sort -t'|' -k1 -rn | head -"$max_items" > "$output_file" || true
|
||||||
fi
|
fi
|
||||||
@@ -1531,7 +1535,7 @@ combine_initial_scan_results() {
|
|||||||
local temp_large="$TEMP_PREFIX.large"
|
local temp_large="$TEMP_PREFIX.large"
|
||||||
local temp_dirs="$TEMP_PREFIX.dirs"
|
local temp_dirs="$TEMP_PREFIX.dirs"
|
||||||
|
|
||||||
> "$output_file"
|
true > "$output_file"
|
||||||
|
|
||||||
# Add directories
|
# Add directories
|
||||||
if [[ -f "$temp_dirs" ]]; then
|
if [[ -f "$temp_dirs" ]]; then
|
||||||
@@ -1760,7 +1764,7 @@ interactive_drill_down() {
|
|||||||
# Use || true to prevent exit on scan failure
|
# Use || true to prevent exit on scan failure
|
||||||
scan_directory_contents_fast "$current_path" "$temp_items" 50 true || {
|
scan_directory_contents_fast "$current_path" "$temp_items" 50 true || {
|
||||||
# Scan failed - create empty result file
|
# Scan failed - create empty result file
|
||||||
> "$temp_items"
|
true > "$temp_items"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Save to cache for next time (only if not empty)
|
# Save to cache for next time (only if not empty)
|
||||||
@@ -1886,8 +1890,10 @@ interactive_drill_down() {
|
|||||||
local badge="$BADGE_FILE" color="${NC}"
|
local badge="$BADGE_FILE" color="${NC}"
|
||||||
if [[ "$type" == "dir" ]]; then
|
if [[ "$type" == "dir" ]]; then
|
||||||
badge="$BADGE_DIR" color="${BLUE}"
|
badge="$BADGE_DIR" color="${BLUE}"
|
||||||
if [[ $size -gt 10737418240 ]]; then color="${RED}"
|
if [[ $size -gt 10737418240 ]]; then
|
||||||
elif [[ $size -gt 1073741824 ]]; then color="${YELLOW}"
|
color="${RED}"
|
||||||
|
elif [[ $size -gt 1073741824 ]]; then
|
||||||
|
color="${YELLOW}"
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
local ext="${name##*.}"
|
local ext="${name##*.}"
|
||||||
|
|||||||
36
bin/clean.sh
36
bin/clean.sh
@@ -229,7 +229,7 @@ safe_clean() {
|
|||||||
if [[ ${#WHITELIST_PATTERNS[@]} -gt 0 ]]; then
|
if [[ ${#WHITELIST_PATTERNS[@]} -gt 0 ]]; then
|
||||||
for w in "${WHITELIST_PATTERNS[@]}"; do
|
for w in "${WHITELIST_PATTERNS[@]}"; do
|
||||||
# Match both exact path and glob pattern
|
# Match both exact path and glob pattern
|
||||||
if [[ "$path" == "$w" ]] || [[ "$path" == $w ]]; then
|
if [[ "$path" == "$w" ]] || [[ "$path" == "$w" ]]; then
|
||||||
skip=true
|
skip=true
|
||||||
((skipped_count++))
|
((skipped_count++))
|
||||||
break
|
break
|
||||||
@@ -253,15 +253,18 @@ safe_clean() {
|
|||||||
# Show progress indicator for potentially slow operations
|
# Show progress indicator for potentially slow operations
|
||||||
if [[ ${#existing_paths[@]} -gt 3 ]]; then
|
if [[ ${#existing_paths[@]} -gt 3 ]]; then
|
||||||
if [[ -t 1 ]]; then MOLE_SPINNER_PREFIX=" " start_inline_spinner "Checking items with whitelist safety..."; fi
|
if [[ -t 1 ]]; then MOLE_SPINNER_PREFIX=" " start_inline_spinner "Checking items with whitelist safety..."; fi
|
||||||
local temp_dir=$(create_temp_dir)
|
local temp_dir
|
||||||
|
temp_dir=$(create_temp_dir)
|
||||||
|
|
||||||
# Parallel processing (bash 3.2 compatible)
|
# Parallel processing (bash 3.2 compatible)
|
||||||
local -a pids=()
|
local -a pids=()
|
||||||
local idx=0
|
local idx=0
|
||||||
for path in "${existing_paths[@]}"; do
|
for path in "${existing_paths[@]}"; do
|
||||||
(
|
(
|
||||||
local size=$(du -sk "$path" 2>/dev/null | awk '{print $1}' || echo "0")
|
local size
|
||||||
local count=$(find "$path" -type f 2>/dev/null | wc -l | tr -d ' ')
|
size=$(du -sk "$path" 2> /dev/null | awk '{print $1}' || echo "0")
|
||||||
|
local count
|
||||||
|
count=$(find "$path" -type f 2> /dev/null | wc -l | tr -d ' ')
|
||||||
# Use index + PID for unique filename
|
# Use index + PID for unique filename
|
||||||
local tmp_file="$temp_dir/result_${idx}.$$"
|
local tmp_file="$temp_dir/result_${idx}.$$"
|
||||||
echo "$size $count" > "$tmp_file"
|
echo "$size $count" > "$tmp_file"
|
||||||
@@ -304,8 +307,10 @@ safe_clean() {
|
|||||||
if [[ -t 1 ]]; then MOLE_SPINNER_PREFIX=" " start_inline_spinner "Checking items with whitelist safety..."; fi
|
if [[ -t 1 ]]; then MOLE_SPINNER_PREFIX=" " start_inline_spinner "Checking items with whitelist safety..."; fi
|
||||||
|
|
||||||
for path in "${existing_paths[@]}"; do
|
for path in "${existing_paths[@]}"; do
|
||||||
local size_bytes=$(du -sk "$path" 2>/dev/null | awk '{print $1}' || echo "0")
|
local size_bytes
|
||||||
local count=$(find "$path" -type f 2>/dev/null | wc -l | tr -d ' ')
|
size_bytes=$(du -sk "$path" 2> /dev/null | awk '{print $1}' || echo "0")
|
||||||
|
local count
|
||||||
|
count=$(find "$path" -type f 2> /dev/null | wc -l | tr -d ' ')
|
||||||
|
|
||||||
if [[ "$count" -gt 0 && "$size_bytes" -gt 0 ]]; then
|
if [[ "$count" -gt 0 && "$size_bytes" -gt 0 ]]; then
|
||||||
if [[ "$DRY_RUN" != "true" ]]; then
|
if [[ "$DRY_RUN" != "true" ]]; then
|
||||||
@@ -319,7 +324,10 @@ safe_clean() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Clear progress / stop spinner before showing result
|
# Clear progress / stop spinner before showing result
|
||||||
if [[ -t 1 ]]; then stop_inline_spinner; echo -ne "\r\033[K"; fi
|
if [[ -t 1 ]]; then
|
||||||
|
stop_inline_spinner
|
||||||
|
echo -ne "\r\033[K"
|
||||||
|
fi
|
||||||
|
|
||||||
if [[ $removed_any -eq 1 ]]; then
|
if [[ $removed_any -eq 1 ]]; then
|
||||||
# Convert KB to bytes for bytes_to_human()
|
# Convert KB to bytes for bytes_to_human()
|
||||||
@@ -526,7 +534,6 @@ perform_cleanup() {
|
|||||||
safe_clean ~/Downloads/*.part "Incomplete downloads (partial)"
|
safe_clean ~/Downloads/*.part "Incomplete downloads (partial)"
|
||||||
end_section
|
end_section
|
||||||
|
|
||||||
|
|
||||||
# ===== 3. macOS System Caches =====
|
# ===== 3. macOS System Caches =====
|
||||||
start_section "macOS system caches"
|
start_section "macOS system caches"
|
||||||
safe_clean ~/Library/Saved\ Application\ State/* "Saved application states"
|
safe_clean ~/Library/Saved\ Application\ State/* "Saved application states"
|
||||||
@@ -542,7 +549,6 @@ perform_cleanup() {
|
|||||||
safe_clean ~/Library/Application\ Support/CloudDocs/session/db/* "iCloud session cache"
|
safe_clean ~/Library/Application\ Support/CloudDocs/session/db/* "iCloud session cache"
|
||||||
end_section
|
end_section
|
||||||
|
|
||||||
|
|
||||||
# ===== 4. Sandboxed App Caches =====
|
# ===== 4. Sandboxed App Caches =====
|
||||||
start_section "Sandboxed app caches"
|
start_section "Sandboxed app caches"
|
||||||
# Clean specific high-usage apps first for better user feedback
|
# Clean specific high-usage apps first for better user feedback
|
||||||
@@ -553,7 +559,6 @@ perform_cleanup() {
|
|||||||
safe_clean ~/Library/Containers/*/Data/Library/Caches/* "Sandboxed app caches"
|
safe_clean ~/Library/Containers/*/Data/Library/Caches/* "Sandboxed app caches"
|
||||||
end_section
|
end_section
|
||||||
|
|
||||||
|
|
||||||
# ===== 5. Browsers =====
|
# ===== 5. Browsers =====
|
||||||
start_section "Browser cleanup"
|
start_section "Browser cleanup"
|
||||||
# Safari (cache only, NOT local storage or databases to preserve login states)
|
# Safari (cache only, NOT local storage or databases to preserve login states)
|
||||||
@@ -577,7 +582,6 @@ perform_cleanup() {
|
|||||||
safe_clean ~/Library/Application\ Support/Firefox/Profiles/*/cache2/* "Firefox profile cache"
|
safe_clean ~/Library/Application\ Support/Firefox/Profiles/*/cache2/* "Firefox profile cache"
|
||||||
end_section
|
end_section
|
||||||
|
|
||||||
|
|
||||||
# ===== 6. Cloud Storage =====
|
# ===== 6. Cloud Storage =====
|
||||||
start_section "Cloud storage caches"
|
start_section "Cloud storage caches"
|
||||||
# Only cache files, not sync state or login credentials
|
# Only cache files, not sync state or login credentials
|
||||||
@@ -590,7 +594,6 @@ perform_cleanup() {
|
|||||||
safe_clean ~/Library/Caches/com.microsoft.OneDrive "OneDrive cache"
|
safe_clean ~/Library/Caches/com.microsoft.OneDrive "OneDrive cache"
|
||||||
end_section
|
end_section
|
||||||
|
|
||||||
|
|
||||||
# ===== 7. Office Applications =====
|
# ===== 7. Office Applications =====
|
||||||
start_section "Office applications"
|
start_section "Office applications"
|
||||||
safe_clean ~/Library/Caches/com.microsoft.Word "Microsoft Word cache"
|
safe_clean ~/Library/Caches/com.microsoft.Word "Microsoft Word cache"
|
||||||
@@ -603,7 +606,6 @@ perform_cleanup() {
|
|||||||
safe_clean ~/Library/Caches/com.apple.mail/* "Apple Mail cache"
|
safe_clean ~/Library/Caches/com.apple.mail/* "Apple Mail cache"
|
||||||
end_section
|
end_section
|
||||||
|
|
||||||
|
|
||||||
# ===== 8. Developer tools =====
|
# ===== 8. Developer tools =====
|
||||||
start_section "Developer tools"
|
start_section "Developer tools"
|
||||||
# Node.js ecosystem
|
# Node.js ecosystem
|
||||||
@@ -676,7 +678,8 @@ perform_cleanup() {
|
|||||||
safe_clean /usr/local/var/homebrew/locks/* "Homebrew lock files (Intel)"
|
safe_clean /usr/local/var/homebrew/locks/* "Homebrew lock files (Intel)"
|
||||||
if command -v brew > /dev/null 2>&1; then
|
if command -v brew > /dev/null 2>&1; then
|
||||||
if [[ "$DRY_RUN" != "true" ]]; then
|
if [[ "$DRY_RUN" != "true" ]]; then
|
||||||
clean_tool_cache "Homebrew cleanup" brew cleanup
|
# Use -s (scrub cache) for faster cleanup, --prune=all removes old versions
|
||||||
|
MOLE_CMD_TIMEOUT=300 clean_tool_cache "Homebrew cleanup" brew cleanup -s --prune=all
|
||||||
else
|
else
|
||||||
echo -e " ${YELLOW}→${NC} Homebrew (would cleanup)"
|
echo -e " ${YELLOW}→${NC} Homebrew (would cleanup)"
|
||||||
fi
|
fi
|
||||||
@@ -818,7 +821,6 @@ perform_cleanup() {
|
|||||||
|
|
||||||
end_section
|
end_section
|
||||||
|
|
||||||
|
|
||||||
# ===== 10. Applications =====
|
# ===== 10. Applications =====
|
||||||
start_section "Applications"
|
start_section "Applications"
|
||||||
|
|
||||||
@@ -983,7 +985,6 @@ perform_cleanup() {
|
|||||||
|
|
||||||
end_section
|
end_section
|
||||||
|
|
||||||
|
|
||||||
# ===== 11. Virtualization Tools =====
|
# ===== 11. Virtualization Tools =====
|
||||||
start_section "Virtualization tools"
|
start_section "Virtualization tools"
|
||||||
safe_clean ~/Library/Caches/com.vmware.fusion "VMware Fusion cache"
|
safe_clean ~/Library/Caches/com.vmware.fusion "VMware Fusion cache"
|
||||||
@@ -992,7 +993,6 @@ perform_cleanup() {
|
|||||||
safe_clean ~/.vagrant.d/tmp/* "Vagrant temporary files"
|
safe_clean ~/.vagrant.d/tmp/* "Vagrant temporary files"
|
||||||
end_section
|
end_section
|
||||||
|
|
||||||
|
|
||||||
# ===== 12. Application Support logs cleanup =====
|
# ===== 12. Application Support logs cleanup =====
|
||||||
start_section "Application Support logs"
|
start_section "Application Support logs"
|
||||||
|
|
||||||
@@ -1022,7 +1022,6 @@ perform_cleanup() {
|
|||||||
|
|
||||||
end_section
|
end_section
|
||||||
|
|
||||||
|
|
||||||
# ===== 13. Orphaned app data cleanup =====
|
# ===== 13. Orphaned app data cleanup =====
|
||||||
# Deep cleanup of leftover files from uninstalled apps
|
# Deep cleanup of leftover files from uninstalled apps
|
||||||
#
|
#
|
||||||
@@ -1525,7 +1524,6 @@ perform_cleanup() {
|
|||||||
printf '\n'
|
printf '\n'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
main() {
|
main() {
|
||||||
# Parse args (only dry-run and help for minimal impact)
|
# Parse args (only dry-run and help for minimal impact)
|
||||||
for arg in "$@"; do
|
for arg in "$@"; do
|
||||||
|
|||||||
@@ -59,7 +59,6 @@ fi
|
|||||||
selected_apps=() # Global array for app selection
|
selected_apps=() # Global array for app selection
|
||||||
declare -a apps_data=()
|
declare -a apps_data=()
|
||||||
declare -a selection_state=()
|
declare -a selection_state=()
|
||||||
current_line=0
|
|
||||||
total_items=0
|
total_items=0
|
||||||
files_cleaned=0
|
files_cleaned=0
|
||||||
total_size_cleaned=0
|
total_size_cleaned=0
|
||||||
@@ -108,8 +107,10 @@ scan_applications() {
|
|||||||
# Quick count of current apps (system + user directories)
|
# Quick count of current apps (system + user directories)
|
||||||
local current_app_count
|
local current_app_count
|
||||||
current_app_count=$(
|
current_app_count=$(
|
||||||
(find /Applications -name "*.app" -maxdepth 1 2>/dev/null;
|
(
|
||||||
find ~/Applications -name "*.app" -maxdepth 1 2>/dev/null) | wc -l | tr -d ' '
|
find /Applications -name "*.app" -maxdepth 1 2> /dev/null
|
||||||
|
find ~/Applications -name "*.app" -maxdepth 1 2> /dev/null
|
||||||
|
) | wc -l | tr -d ' '
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if cache is valid unless explicitly disabled
|
# Check if cache is valid unless explicitly disabled
|
||||||
@@ -347,7 +348,10 @@ scan_applications() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Sort by last used (oldest first) and cache the result
|
# Sort by last used (oldest first) and cache the result
|
||||||
sort -t'|' -k1,1n "$temp_file" > "${temp_file}.sorted" || { rm -f "$temp_file"; return 1; }
|
sort -t'|' -k1,1n "$temp_file" > "${temp_file}.sorted" || {
|
||||||
|
rm -f "$temp_file"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
rm -f "$temp_file"
|
rm -f "$temp_file"
|
||||||
|
|
||||||
# Update cache with app count metadata
|
# Update cache with app count metadata
|
||||||
@@ -461,12 +465,13 @@ uninstall_applications() {
|
|||||||
done <<< "$system_files"
|
done <<< "$system_files"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
local size_display
|
||||||
if [[ $total_kb -gt 1048576 ]]; then # > 1GB
|
if [[ $total_kb -gt 1048576 ]]; then # > 1GB
|
||||||
local size_display=$(echo "$total_kb" | awk '{printf "%.2fGB", $1/1024/1024}')
|
size_display=$(echo "$total_kb" | awk '{printf "%.2fGB", $1/1024/1024}')
|
||||||
elif [[ $total_kb -gt 1024 ]]; then # > 1MB
|
elif [[ $total_kb -gt 1024 ]]; then # > 1MB
|
||||||
local size_display=$(echo "$total_kb" | awk '{printf "%.1fMB", $1/1024}')
|
size_display=$(echo "$total_kb" | awk '{printf "%.1fMB", $1/1024}')
|
||||||
else
|
else
|
||||||
local size_display="${total_kb}KB"
|
size_display="${total_kb}KB"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo -e " ${BLUE}Total size: $size_display${NC}"
|
echo -e " ${BLUE}Total size: $size_display${NC}"
|
||||||
@@ -521,12 +526,13 @@ uninstall_applications() {
|
|||||||
echo -e "${PURPLE}${ICON_ARROW} Uninstallation Summary${NC}"
|
echo -e "${PURPLE}${ICON_ARROW} Uninstallation Summary${NC}"
|
||||||
|
|
||||||
if [[ $total_size_freed -gt 0 ]]; then
|
if [[ $total_size_freed -gt 0 ]]; then
|
||||||
|
local freed_display
|
||||||
if [[ $total_size_freed -gt 1048576 ]]; then # > 1GB
|
if [[ $total_size_freed -gt 1048576 ]]; then # > 1GB
|
||||||
local freed_display=$(echo "$total_size_freed" | awk '{printf "%.2fGB", $1/1024/1024}')
|
freed_display=$(echo "$total_size_freed" | awk '{printf "%.2fGB", $1/1024/1024}')
|
||||||
elif [[ $total_size_freed -gt 1024 ]]; then # > 1MB
|
elif [[ $total_size_freed -gt 1024 ]]; then # > 1MB
|
||||||
local freed_display=$(echo "$total_size_freed" | awk '{printf "%.1fMB", $1/1024}')
|
freed_display=$(echo "$total_size_freed" | awk '{printf "%.1fMB", $1/1024}')
|
||||||
else
|
else
|
||||||
local freed_display="${total_size_freed}KB"
|
freed_display="${total_size_freed}KB"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Freed $freed_display of disk space"
|
echo -e " ${GREEN}${ICON_SUCCESS}${NC} Freed $freed_display of disk space"
|
||||||
@@ -634,7 +640,9 @@ main() {
|
|||||||
clear
|
clear
|
||||||
local selection_count=${#selected_apps[@]}
|
local selection_count=${#selected_apps[@]}
|
||||||
if [[ $selection_count -eq 0 ]]; then
|
if [[ $selection_count -eq 0 ]]; then
|
||||||
echo "No apps selected"; rm -f "$apps_file"; return 0
|
echo "No apps selected"
|
||||||
|
rm -f "$apps_file"
|
||||||
|
return 0
|
||||||
fi
|
fi
|
||||||
# Show selected apps, max 3 per line
|
# Show selected apps, max 3 per line
|
||||||
echo -e "${BLUE}${ICON_CONFIRM}${NC} Selected ${selection_count} app(s):"
|
echo -e "${BLUE}${ICON_CONFIRM}${NC} Selected ${selection_count} app(s):"
|
||||||
|
|||||||
46
install.sh
46
install.sh
@@ -13,14 +13,28 @@ NC='\033[0m'
|
|||||||
# Simple spinner
|
# Simple spinner
|
||||||
_SPINNER_PID=""
|
_SPINNER_PID=""
|
||||||
start_line_spinner() {
|
start_line_spinner() {
|
||||||
local msg="$1"; [[ ! -t 1 ]] && { echo -e "${BLUE}|${NC} $msg"; return; }
|
local msg="$1"
|
||||||
local chars="${MO_SPINNER_CHARS:-|/-\\}"; [[ -z "$chars" ]] && chars='|/-\\'
|
[[ ! -t 1 ]] && {
|
||||||
|
echo -e "${BLUE}|${NC} $msg"
|
||||||
|
return
|
||||||
|
}
|
||||||
|
local chars="${MO_SPINNER_CHARS:-|/-\\}"
|
||||||
|
[[ -z "$chars" ]] && chars='|/-\\'
|
||||||
local i=0
|
local i=0
|
||||||
( while true; do c="${chars:$((i % ${#chars})):1}"; printf "\r${BLUE}%s${NC} %s" "$c" "$msg"; ((i++)); sleep 0.12; done ) &
|
(while true; do
|
||||||
|
c="${chars:$((i % ${#chars})):1}"
|
||||||
|
printf "\r${BLUE}%s${NC} %s" "$c" "$msg"
|
||||||
|
((i++))
|
||||||
|
sleep 0.12
|
||||||
|
done) &
|
||||||
_SPINNER_PID=$!
|
_SPINNER_PID=$!
|
||||||
}
|
}
|
||||||
stop_line_spinner() { if [[ -n "$_SPINNER_PID" ]]; then kill "$_SPINNER_PID" 2>/dev/null || true; wait "$_SPINNER_PID" 2>/dev/null || true; _SPINNER_PID=""; printf "\r\033[K"; fi; }
|
stop_line_spinner() { if [[ -n "$_SPINNER_PID" ]]; then
|
||||||
|
kill "$_SPINNER_PID" 2> /dev/null || true
|
||||||
|
wait "$_SPINNER_PID" 2> /dev/null || true
|
||||||
|
_SPINNER_PID=""
|
||||||
|
printf "\r\033[K"
|
||||||
|
fi; }
|
||||||
|
|
||||||
# Verbosity (0 = quiet, 1 = verbose)
|
# Verbosity (0 = quiet, 1 = verbose)
|
||||||
VERBOSE=1
|
VERBOSE=1
|
||||||
@@ -457,7 +471,9 @@ uninstall_mole() {
|
|||||||
echo " $CONFIG_DIR"
|
echo " $CONFIG_DIR"
|
||||||
else
|
else
|
||||||
echo ""
|
echo ""
|
||||||
read -p "Remove configuration directory $CONFIG_DIR? (y/N): " -n 1 -r; echo ""; if [[ $REPLY =~ ^[Yy]$ ]]; then
|
read -p "Remove configuration directory $CONFIG_DIR? (y/N): " -n 1 -r
|
||||||
|
echo ""
|
||||||
|
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||||
rm -rf "$CONFIG_DIR"
|
rm -rf "$CONFIG_DIR"
|
||||||
log_success "Removed configuration"
|
log_success "Removed configuration"
|
||||||
else
|
else
|
||||||
@@ -571,9 +587,21 @@ perform_update() {
|
|||||||
# Update with minimal output (suppress info/success, show errors only)
|
# Update with minimal output (suppress info/success, show errors only)
|
||||||
local old_verbose=$VERBOSE
|
local old_verbose=$VERBOSE
|
||||||
VERBOSE=0
|
VERBOSE=0
|
||||||
create_directories || { VERBOSE=$old_verbose; log_error "Failed to create directories"; exit 1; }
|
create_directories || {
|
||||||
install_files || { VERBOSE=$old_verbose; log_error "Failed to install files"; exit 1; }
|
VERBOSE=$old_verbose
|
||||||
verify_installation || { VERBOSE=$old_verbose; log_error "Failed to verify installation"; exit 1; }
|
log_error "Failed to create directories"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
install_files || {
|
||||||
|
VERBOSE=$old_verbose
|
||||||
|
log_error "Failed to install files"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
verify_installation || {
|
||||||
|
VERBOSE=$old_verbose
|
||||||
|
log_error "Failed to verify installation"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
setup_path
|
setup_path
|
||||||
VERBOSE=$old_verbose
|
VERBOSE=$old_verbose
|
||||||
|
|
||||||
|
|||||||
@@ -129,7 +129,11 @@ batch_uninstall_applications() {
|
|||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
(while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null) &
|
(while true; do
|
||||||
|
sudo -n true
|
||||||
|
sleep 60
|
||||||
|
kill -0 "$$" || exit
|
||||||
|
done 2> /dev/null) &
|
||||||
sudo_keepalive_pid=$!
|
sudo_keepalive_pid=$!
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|||||||
@@ -349,7 +349,6 @@ get_directory_size_bytes() {
|
|||||||
du -sk "$path" 2> /dev/null | cut -f1 | awk '{print $1 * 1024}' || echo "0"
|
du -sk "$path" 2> /dev/null | cut -f1 | awk '{print $1 * 1024}' || echo "0"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# Permission checks
|
# Permission checks
|
||||||
check_sudo() {
|
check_sudo() {
|
||||||
if ! sudo -n true 2> /dev/null; then
|
if ! sudo -n true 2> /dev/null; then
|
||||||
@@ -467,10 +466,6 @@ load_config() {
|
|||||||
MOLE_MAX_LOG_SIZE="${MOLE_MAX_LOG_SIZE:-1048576}"
|
MOLE_MAX_LOG_SIZE="${MOLE_MAX_LOG_SIZE:-1048576}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Initialize configuration on sourcing
|
# Initialize configuration on sourcing
|
||||||
load_config
|
load_config
|
||||||
|
|
||||||
@@ -570,7 +565,6 @@ stop_spinner() {
|
|||||||
# User Interaction - Confirmation Dialogs
|
# User Interaction - Confirmation Dialogs
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# Temporary File Management
|
# Temporary File Management
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
@@ -677,7 +671,8 @@ parallel_execute() {
|
|||||||
# Usage: with_spinner "Message" cmd arg...
|
# Usage: with_spinner "Message" cmd arg...
|
||||||
# Set MOLE_SPINNER_PREFIX=" " for indented spinner (e.g., in clean context)
|
# Set MOLE_SPINNER_PREFIX=" " for indented spinner (e.g., in clean context)
|
||||||
with_spinner() {
|
with_spinner() {
|
||||||
local msg="$1"; shift || true
|
local msg="$1"
|
||||||
|
shift || true
|
||||||
local timeout="${MOLE_CMD_TIMEOUT:-180}" # Default 3min timeout
|
local timeout="${MOLE_CMD_TIMEOUT:-180}" # Default 3min timeout
|
||||||
|
|
||||||
if [[ -t 1 ]]; then
|
if [[ -t 1 ]]; then
|
||||||
@@ -727,7 +722,8 @@ with_spinner() {
|
|||||||
# ============================================================================
|
# ============================================================================
|
||||||
# clean_tool_cache "Label" command...
|
# clean_tool_cache "Label" command...
|
||||||
clean_tool_cache() {
|
clean_tool_cache() {
|
||||||
local label="$1"; shift || true
|
local label="$1"
|
||||||
|
shift || true
|
||||||
if [[ "$DRY_RUN" == "true" ]]; then
|
if [[ "$DRY_RUN" == "true" ]]; then
|
||||||
echo -e " ${YELLOW}→${NC} $label (would clean)"
|
echo -e " ${YELLOW}→${NC} $label (would clean)"
|
||||||
return 0
|
return 0
|
||||||
@@ -782,13 +778,21 @@ confirm_prompt() {
|
|||||||
echo -n "$message (Enter=OK / ESC q=Cancel): "
|
echo -n "$message (Enter=OK / ESC q=Cancel): "
|
||||||
IFS= read -r -s -n1 _key || _key=""
|
IFS= read -r -s -n1 _key || _key=""
|
||||||
case "$_key" in
|
case "$_key" in
|
||||||
$'\e'|q|Q) echo ""; return 1 ;;
|
$'\e' | q | Q)
|
||||||
""|$'\n'|$'\r'|y|Y) echo ""; return 0 ;;
|
echo ""
|
||||||
*) echo ""; return 1 ;;
|
return 1
|
||||||
|
;;
|
||||||
|
"" | $'\n' | $'\r' | y | Y)
|
||||||
|
echo ""
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo ""
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# Get optimal parallel job count based on CPU cores
|
# Get optimal parallel job count based on CPU cores
|
||||||
|
|
||||||
# =========================================================================
|
# =========================================================================
|
||||||
@@ -796,7 +800,8 @@ confirm_prompt() {
|
|||||||
# =========================================================================
|
# =========================================================================
|
||||||
bytes_to_human_kb() { bytes_to_human "$((${1:-0} * 1024))"; }
|
bytes_to_human_kb() { bytes_to_human "$((${1:-0} * 1024))"; }
|
||||||
print_space_stat() {
|
print_space_stat() {
|
||||||
local freed_kb="$1"; shift || true
|
local freed_kb="$1"
|
||||||
|
shift || true
|
||||||
local current_free
|
local current_free
|
||||||
current_free=$(get_free_space)
|
current_free=$(get_free_space)
|
||||||
local human
|
local human
|
||||||
@@ -810,8 +815,18 @@ print_space_stat() {
|
|||||||
register_temp_file() { MOLE_TEMP_FILES+=("$1"); }
|
register_temp_file() { MOLE_TEMP_FILES+=("$1"); }
|
||||||
register_temp_dir() { MOLE_TEMP_DIRS+=("$1"); }
|
register_temp_dir() { MOLE_TEMP_DIRS+=("$1"); }
|
||||||
|
|
||||||
mktemp_file() { local f; f=$(mktemp) || return 1; register_temp_file "$f"; echo "$f"; }
|
mktemp_file() {
|
||||||
mktemp_dir() { local d; d=$(mktemp -d) || return 1; register_temp_dir "$d"; echo "$d"; }
|
local f
|
||||||
|
f=$(mktemp) || return 1
|
||||||
|
register_temp_file "$f"
|
||||||
|
echo "$f"
|
||||||
|
}
|
||||||
|
mktemp_dir() {
|
||||||
|
local d
|
||||||
|
d=$(mktemp -d) || return 1
|
||||||
|
register_temp_dir "$d"
|
||||||
|
echo "$d"
|
||||||
|
}
|
||||||
|
|
||||||
# =========================================================================
|
# =========================================================================
|
||||||
# Uninstall helper abstractions
|
# Uninstall helper abstractions
|
||||||
@@ -955,7 +970,8 @@ map_uninstall_reason() {
|
|||||||
|
|
||||||
batch_safe_clean() {
|
batch_safe_clean() {
|
||||||
# Usage: batch_safe_clean "Label" path1 path2 ...
|
# Usage: batch_safe_clean "Label" path1 path2 ...
|
||||||
local label="$1"; shift || true
|
local label="$1"
|
||||||
|
shift || true
|
||||||
local -a paths=("$@")
|
local -a paths=("$@")
|
||||||
if [[ ${#paths[@]} -eq 0 ]]; then return 0; fi
|
if [[ ${#paths[@]} -eq 0 ]]; then return 0; fi
|
||||||
safe_clean "${paths[@]}" "$label"
|
safe_clean "${paths[@]}" "$label"
|
||||||
@@ -1381,7 +1397,6 @@ readonly DATA_PROTECTED_BUNDLES=(
|
|||||||
"org.sparkle-project.Sparkle" # Sparkle (update framework)
|
"org.sparkle-project.Sparkle" # Sparkle (update framework)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# Legacy function - preserved for backward compatibility
|
# Legacy function - preserved for backward compatibility
|
||||||
# Use should_protect_from_uninstall() or should_protect_data() instead
|
# Use should_protect_from_uninstall() or should_protect_data() instead
|
||||||
readonly PRESERVED_BUNDLE_PATTERNS=("${SYSTEM_CRITICAL_BUNDLES[@]}" "${DATA_PROTECTED_BUNDLES[@]}")
|
readonly PRESERVED_BUNDLE_PATTERNS=("${SYSTEM_CRITICAL_BUNDLES[@]}" "${DATA_PROTECTED_BUNDLES[@]}")
|
||||||
@@ -1390,7 +1405,7 @@ should_preserve_bundle() {
|
|||||||
for pattern in "${PRESERVED_BUNDLE_PATTERNS[@]}"; do
|
for pattern in "${PRESERVED_BUNDLE_PATTERNS[@]}"; do
|
||||||
# Use case for safer glob matching
|
# Use case for safer glob matching
|
||||||
case "$bundle_id" in
|
case "$bundle_id" in
|
||||||
$pattern) return 0 ;;
|
"$pattern") return 0 ;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
return 1
|
return 1
|
||||||
@@ -1402,7 +1417,7 @@ should_protect_from_uninstall() {
|
|||||||
for pattern in "${SYSTEM_CRITICAL_BUNDLES[@]}"; do
|
for pattern in "${SYSTEM_CRITICAL_BUNDLES[@]}"; do
|
||||||
# Use case for safer glob matching
|
# Use case for safer glob matching
|
||||||
case "$bundle_id" in
|
case "$bundle_id" in
|
||||||
$pattern) return 0 ;;
|
"$pattern") return 0 ;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
return 1
|
return 1
|
||||||
@@ -1415,7 +1430,7 @@ should_protect_data() {
|
|||||||
for pattern in "${SYSTEM_CRITICAL_BUNDLES[@]}" "${DATA_PROTECTED_BUNDLES[@]}"; do
|
for pattern in "${SYSTEM_CRITICAL_BUNDLES[@]}" "${DATA_PROTECTED_BUNDLES[@]}"; do
|
||||||
# Use case for safer glob matching
|
# Use case for safer glob matching
|
||||||
case "$bundle_id" in
|
case "$bundle_id" in
|
||||||
$pattern) return 0 ;;
|
"$pattern") return 0 ;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
return 1
|
return 1
|
||||||
@@ -1677,7 +1692,8 @@ calculate_total_size() {
|
|||||||
|
|
||||||
while IFS= read -r file; do
|
while IFS= read -r file; do
|
||||||
if [[ -n "$file" && -e "$file" ]]; then
|
if [[ -n "$file" && -e "$file" ]]; then
|
||||||
local size_kb=$(du -sk "$file" 2>/dev/null | awk '{print $1}' || echo "0")
|
local size_kb
|
||||||
|
size_kb=$(du -sk "$file" 2> /dev/null | awk '{print $1}' || echo "0")
|
||||||
((total_kb += size_kb))
|
((total_kb += size_kb))
|
||||||
fi
|
fi
|
||||||
done <<< "$files"
|
done <<< "$files"
|
||||||
|
|||||||
@@ -114,7 +114,6 @@ patterns_equivalent() {
|
|||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
load_whitelist() {
|
load_whitelist() {
|
||||||
local -a patterns=()
|
local -a patterns=()
|
||||||
|
|
||||||
@@ -163,14 +162,13 @@ is_whitelisted() {
|
|||||||
if [[ "$check_pattern" == "$existing_expanded" ]]; then
|
if [[ "$check_pattern" == "$existing_expanded" ]]; then
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
if [[ "$check_pattern" == $existing_expanded ]]; then
|
if [[ "$check_pattern" == "$existing_expanded" ]]; then
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
manage_whitelist() {
|
manage_whitelist() {
|
||||||
manage_whitelist_categories
|
manage_whitelist_categories
|
||||||
}
|
}
|
||||||
@@ -286,7 +284,6 @@ manage_whitelist_categories() {
|
|||||||
printf '\n'
|
printf '\n'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
|
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
|
||||||
manage_whitelist
|
manage_whitelist
|
||||||
fi
|
fi
|
||||||
|
|||||||
27
mole
27
mole
@@ -28,7 +28,7 @@ MOLE_TAGLINE="can dig deep to clean your Mac."
|
|||||||
# Get latest version from remote repository
|
# Get latest version from remote repository
|
||||||
get_latest_version() {
|
get_latest_version() {
|
||||||
curl -fsSL --connect-timeout 2 --max-time 3 -H "Cache-Control: no-cache" \
|
curl -fsSL --connect-timeout 2 --max-time 3 -H "Cache-Control: no-cache" \
|
||||||
"https://raw.githubusercontent.com/tw93/mole/main/mole" 2>/dev/null | \
|
"https://raw.githubusercontent.com/tw93/mole/main/mole" 2> /dev/null |
|
||||||
grep '^VERSION=' | head -1 | sed 's/VERSION="\(.*\)"/\1/'
|
grep '^VERSION=' | head -1 | sed 's/VERSION="\(.*\)"/\1/'
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -180,7 +180,10 @@ update_mole() {
|
|||||||
|
|
||||||
local installer_url="https://raw.githubusercontent.com/tw93/mole/main/install.sh"
|
local installer_url="https://raw.githubusercontent.com/tw93/mole/main/install.sh"
|
||||||
local tmp_installer
|
local tmp_installer
|
||||||
tmp_installer="$(mktemp_file)" || { log_error "Update failed"; exit 1; }
|
tmp_installer="$(mktemp_file)" || {
|
||||||
|
log_error "Update failed"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
# Download installer with progress
|
# Download installer with progress
|
||||||
if command -v curl > /dev/null 2>&1; then
|
if command -v curl > /dev/null 2>&1; then
|
||||||
@@ -318,7 +321,9 @@ remove_mole() {
|
|||||||
printf '\n'
|
printf '\n'
|
||||||
|
|
||||||
# Check if anything to remove
|
# Check if anything to remove
|
||||||
if [[ "$is_homebrew" == "false" && ${#manual_installs[@]:-0} -eq 0 && ${#alias_installs[@]:-0} -eq 0 ]]; then
|
local manual_count=${#manual_installs[@]}
|
||||||
|
local alias_count=${#alias_installs[@]}
|
||||||
|
if [[ "$is_homebrew" == "false" && ${manual_count:-0} -eq 0 && ${alias_count:-0} -eq 0 ]]; then
|
||||||
printf '%s\n\n' "${YELLOW}No Mole installation detected${NC}"
|
printf '%s\n\n' "${YELLOW}No Mole installation detected${NC}"
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
@@ -362,14 +367,14 @@ remove_mole() {
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
# Remove manual installations (silent)
|
# Remove manual installations (silent)
|
||||||
if [[ ${#manual_installs[@]:-0} -gt 0 ]]; then
|
if [[ ${manual_count:-0} -gt 0 ]]; then
|
||||||
for install in "${manual_installs[@]}"; do
|
for install in "${manual_installs[@]}"; do
|
||||||
if [[ -f "$install" ]]; then
|
if [[ -f "$install" ]]; then
|
||||||
rm -f "$install" 2> /dev/null || has_error=true
|
rm -f "$install" 2> /dev/null || has_error=true
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
if [[ ${#alias_installs[@]} -gt 0 ]]; then
|
if [[ ${alias_count:-0} -gt 0 ]]; then
|
||||||
for alias in "${alias_installs[@]}"; do
|
for alias in "${alias_installs[@]}"; do
|
||||||
if [[ -f "$alias" ]]; then
|
if [[ -f "$alias" ]]; then
|
||||||
rm -f "$alias" 2> /dev/null || true
|
rm -f "$alias" 2> /dev/null || true
|
||||||
@@ -509,7 +514,11 @@ interactive_main_menu() {
|
|||||||
;;
|
;;
|
||||||
2) exec "$SCRIPT_DIR/bin/uninstall.sh" ;;
|
2) exec "$SCRIPT_DIR/bin/uninstall.sh" ;;
|
||||||
3) exec "$SCRIPT_DIR/bin/analyze.sh" ;;
|
3) exec "$SCRIPT_DIR/bin/analyze.sh" ;;
|
||||||
4) clear; show_help; exit 0 ;;
|
4)
|
||||||
|
clear
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
5) cleanup_and_exit ;;
|
5) cleanup_and_exit ;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
@@ -522,7 +531,11 @@ interactive_main_menu() {
|
|||||||
;;
|
;;
|
||||||
2) exec "$SCRIPT_DIR/bin/uninstall.sh" ;;
|
2) exec "$SCRIPT_DIR/bin/uninstall.sh" ;;
|
||||||
3) exec "$SCRIPT_DIR/bin/analyze.sh" ;;
|
3) exec "$SCRIPT_DIR/bin/analyze.sh" ;;
|
||||||
4) clear; show_help; exit 0 ;;
|
4)
|
||||||
|
clear
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
5) cleanup_and_exit ;;
|
5) cleanup_and_exit ;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
|
|||||||
60
scripts/format.sh
Executable file
60
scripts/format.sh
Executable file
@@ -0,0 +1,60 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Format all shell scripts in the Mole project
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./scripts/format.sh # Format all scripts
|
||||||
|
# ./scripts/format.sh --check # Check only, don't modify
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||||
|
|
||||||
|
CHECK_ONLY=false
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
if [[ "${1:-}" == "--check" ]]; then
|
||||||
|
CHECK_ONLY=true
|
||||||
|
elif [[ "${1:-}" == "--help" || "${1:-}" == "-h" ]]; then
|
||||||
|
cat << 'EOF'
|
||||||
|
Usage: ./scripts/format.sh [--check]
|
||||||
|
|
||||||
|
Format shell scripts using shfmt.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--check Check formatting without modifying files
|
||||||
|
--help Show this help
|
||||||
|
|
||||||
|
Install: brew install shfmt
|
||||||
|
EOF
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if shfmt is installed
|
||||||
|
if ! command -v shfmt > /dev/null 2>&1; then
|
||||||
|
echo "Error: shfmt not installed"
|
||||||
|
echo "Install: brew install shfmt"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Find all shell scripts
|
||||||
|
cd "$PROJECT_ROOT"
|
||||||
|
|
||||||
|
# shfmt options: -i 4 (4 spaces), -ci (indent switch cases), -sr (space after redirect)
|
||||||
|
if [[ "$CHECK_ONLY" == "true" ]]; then
|
||||||
|
echo "Checking formatting..."
|
||||||
|
if shfmt -i 4 -ci -sr -d . > /dev/null 2>&1; then
|
||||||
|
echo "✓ All scripts properly formatted"
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo "✗ Some scripts need formatting:"
|
||||||
|
shfmt -i 4 -ci -sr -d .
|
||||||
|
echo ""
|
||||||
|
echo "Run './scripts/format.sh' to fix"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "Formatting scripts..."
|
||||||
|
shfmt -i 4 -ci -sr -w .
|
||||||
|
echo "✓ Done"
|
||||||
|
fi
|
||||||
44
scripts/install-hooks.sh
Executable file
44
scripts/install-hooks.sh
Executable file
@@ -0,0 +1,44 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Install git hooks for Mole project
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./scripts/install-hooks.sh
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||||
|
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
cd "$PROJECT_ROOT"
|
||||||
|
|
||||||
|
# Check if this is a git repository
|
||||||
|
if [ ! -d ".git" ]; then
|
||||||
|
echo "Error: Not a git repository"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BLUE}Installing git hooks...${NC}"
|
||||||
|
|
||||||
|
# Install pre-commit hook
|
||||||
|
if [ -f ".git/hooks/pre-commit" ]; then
|
||||||
|
echo "Pre-commit hook already exists, creating backup..."
|
||||||
|
mv .git/hooks/pre-commit .git/hooks/pre-commit.backup
|
||||||
|
fi
|
||||||
|
|
||||||
|
ln -s ../../scripts/pre-commit.sh .git/hooks/pre-commit
|
||||||
|
chmod +x .git/hooks/pre-commit
|
||||||
|
|
||||||
|
echo -e "${GREEN}✓ Pre-commit hook installed${NC}"
|
||||||
|
echo ""
|
||||||
|
echo "The hook will:"
|
||||||
|
echo " • Auto-format shell scripts before commit"
|
||||||
|
echo " • Run shellcheck on changed files"
|
||||||
|
echo " • Show warnings but won't block commits"
|
||||||
|
echo ""
|
||||||
|
echo "To uninstall:"
|
||||||
|
echo " rm .git/hooks/pre-commit"
|
||||||
|
echo ""
|
||||||
67
scripts/pre-commit.sh
Executable file
67
scripts/pre-commit.sh
Executable file
@@ -0,0 +1,67 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Git pre-commit hook for Mole
|
||||||
|
# Automatically formats shell scripts before commit
|
||||||
|
#
|
||||||
|
# Installation:
|
||||||
|
# ln -s ../../scripts/pre-commit.sh .git/hooks/pre-commit
|
||||||
|
# chmod +x .git/hooks/pre-commit
|
||||||
|
#
|
||||||
|
# Or use the install script:
|
||||||
|
# ./scripts/install-hooks.sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
# Only check shell files that are staged
|
||||||
|
STAGED_SH_FILES=$(git diff --cached --name-only --diff-filter=ACMR | grep -E '\.sh$|^mole$' || true)
|
||||||
|
|
||||||
|
if [ -z "$STAGED_SH_FILES" ]; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${YELLOW}Running pre-commit checks on shell files...${NC}"
|
||||||
|
|
||||||
|
# Check if shfmt is installed
|
||||||
|
if ! command -v shfmt &> /dev/null; then
|
||||||
|
echo -e "${RED}shfmt is not installed. Install with: brew install shfmt${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if shellcheck is installed
|
||||||
|
if ! command -v shellcheck &> /dev/null; then
|
||||||
|
echo -e "${RED}shellcheck is not installed. Install with: brew install shellcheck${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
NEEDS_FORMAT=0
|
||||||
|
|
||||||
|
# Check formatting
|
||||||
|
for file in $STAGED_SH_FILES; do
|
||||||
|
if ! shfmt -i 4 -ci -sr -d "$file" > /dev/null 2>&1; then
|
||||||
|
echo -e "${YELLOW}Formatting $file...${NC}"
|
||||||
|
shfmt -i 4 -ci -sr -w "$file"
|
||||||
|
git add "$file"
|
||||||
|
NEEDS_FORMAT=1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Run shellcheck
|
||||||
|
for file in $STAGED_SH_FILES; do
|
||||||
|
if ! shellcheck -S warning "$file" > /dev/null 2>&1; then
|
||||||
|
echo -e "${YELLOW}ShellCheck warnings in $file:${NC}"
|
||||||
|
shellcheck -S warning "$file"
|
||||||
|
echo -e "${YELLOW}Continuing with commit (warnings are non-critical)...${NC}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ $NEEDS_FORMAT -eq 1 ]; then
|
||||||
|
echo -e "${GREEN}✓ Files formatted and re-staged${NC}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${GREEN}✓ Pre-commit checks passed${NC}"
|
||||||
|
exit 0
|
||||||
@@ -80,7 +80,8 @@ teardown() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "bytes_to_human converts byte counts into readable units" {
|
@test "bytes_to_human converts byte counts into readable units" {
|
||||||
output="$(HOME="$HOME" bash --noprofile --norc <<'EOF'
|
output="$(
|
||||||
|
HOME="$HOME" bash --noprofile --norc << 'EOF'
|
||||||
source "$PROJECT_ROOT/lib/common.sh"
|
source "$PROJECT_ROOT/lib/common.sh"
|
||||||
bytes_to_human 512
|
bytes_to_human 512
|
||||||
bytes_to_human 2048
|
bytes_to_human 2048
|
||||||
|
|||||||
@@ -39,7 +39,8 @@ create_app_artifacts() {
|
|||||||
@test "find_app_files discovers user-level leftovers" {
|
@test "find_app_files discovers user-level leftovers" {
|
||||||
create_app_artifacts
|
create_app_artifacts
|
||||||
|
|
||||||
result="$(HOME="$HOME" bash --noprofile --norc <<'EOF'
|
result="$(
|
||||||
|
HOME="$HOME" bash --noprofile --norc << 'EOF'
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source "$PROJECT_ROOT/lib/common.sh"
|
source "$PROJECT_ROOT/lib/common.sh"
|
||||||
find_app_files "com.example.TestApp" "TestApp"
|
find_app_files "com.example.TestApp" "TestApp"
|
||||||
@@ -58,7 +59,8 @@ EOF
|
|||||||
dd if=/dev/zero of="$HOME/sized/file1" bs=1024 count=1 > /dev/null 2>&1
|
dd if=/dev/zero of="$HOME/sized/file1" bs=1024 count=1 > /dev/null 2>&1
|
||||||
dd if=/dev/zero of="$HOME/sized/file2" bs=1024 count=2 > /dev/null 2>&1
|
dd if=/dev/zero of="$HOME/sized/file2" bs=1024 count=2 > /dev/null 2>&1
|
||||||
|
|
||||||
result="$(HOME="$HOME" bash --noprofile --norc <<'EOF'
|
result="$(
|
||||||
|
HOME="$HOME" bash --noprofile --norc << 'EOF'
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source "$PROJECT_ROOT/lib/common.sh"
|
source "$PROJECT_ROOT/lib/common.sh"
|
||||||
files="$(printf '%s\n%s\n' "$HOME/sized/file1" "$HOME/sized/file2")"
|
files="$(printf '%s\n%s\n' "$HOME/sized/file1" "$HOME/sized/file2")"
|
||||||
|
|||||||
Reference in New Issue
Block a user