mirror of
https://github.com/tw93/Mole.git
synced 2026-03-22 20:15:07 +00:00
refactor(clean): extract shared purge hint helpers
This commit is contained in:
422
bin/clean.sh
422
bin/clean.sh
@@ -17,6 +17,7 @@ source "$SCRIPT_DIR/../lib/clean/caches.sh"
|
||||
source "$SCRIPT_DIR/../lib/clean/apps.sh"
|
||||
source "$SCRIPT_DIR/../lib/clean/dev.sh"
|
||||
source "$SCRIPT_DIR/../lib/clean/app_caches.sh"
|
||||
source "$SCRIPT_DIR/../lib/clean/hints.sh"
|
||||
source "$SCRIPT_DIR/../lib/clean/system.sh"
|
||||
source "$SCRIPT_DIR/../lib/clean/user.sh"
|
||||
|
||||
@@ -301,427 +302,6 @@ classify_cleanup_risk() {
|
||||
echo "MEDIUM|User data files"
|
||||
}
|
||||
|
||||
# Quick reminder probe for project build artifacts handled by `mo purge`.
|
||||
# Designed to be very fast: shallow directory checks only, no du/find scans.
|
||||
# shellcheck disable=SC2329
|
||||
load_quick_purge_hint_paths() {
|
||||
local config_file="$HOME/.config/mole/purge_paths"
|
||||
local -a paths=()
|
||||
|
||||
if [[ -f "$config_file" ]]; then
|
||||
while IFS= read -r line; do
|
||||
line="${line#"${line%%[![:space:]]*}"}"
|
||||
line="${line%"${line##*[![:space:]]}"}"
|
||||
|
||||
[[ -z "$line" || "$line" =~ ^# ]] && continue
|
||||
[[ "$line" == ~* ]] && line="${line/#~/$HOME}"
|
||||
paths+=("$line")
|
||||
done < "$config_file"
|
||||
fi
|
||||
|
||||
if [[ ${#paths[@]} -eq 0 ]]; then
|
||||
paths=(
|
||||
"$HOME/www"
|
||||
"$HOME/dev"
|
||||
"$HOME/Projects"
|
||||
"$HOME/GitHub"
|
||||
"$HOME/Code"
|
||||
"$HOME/Workspace"
|
||||
"$HOME/Repos"
|
||||
"$HOME/Development"
|
||||
)
|
||||
fi
|
||||
|
||||
if [[ ${#paths[@]} -gt 0 ]]; then
|
||||
printf '%s\n' "${paths[@]}"
|
||||
fi
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2329
|
||||
record_project_artifact_hint() {
|
||||
local path="$1"
|
||||
|
||||
((PROJECT_ARTIFACT_HINT_COUNT++))
|
||||
|
||||
if [[ ${#PROJECT_ARTIFACT_HINT_EXAMPLES[@]} -lt 2 ]]; then
|
||||
PROJECT_ARTIFACT_HINT_EXAMPLES+=("${path/#$HOME/~}")
|
||||
fi
|
||||
|
||||
local sample_max=3
|
||||
|
||||
if [[ $PROJECT_ARTIFACT_HINT_ESTIMATE_SAMPLES -ge $sample_max ]]; then
|
||||
PROJECT_ARTIFACT_HINT_ESTIMATE_PARTIAL=true
|
||||
return 0
|
||||
fi
|
||||
|
||||
local timeout_seconds="0.8"
|
||||
|
||||
local du_tmp
|
||||
du_tmp=$(mktemp)
|
||||
|
||||
local du_status=0
|
||||
if run_with_timeout "$timeout_seconds" du -skP "$path" > "$du_tmp" 2> /dev/null; then
|
||||
du_status=0
|
||||
else
|
||||
du_status=$?
|
||||
fi
|
||||
|
||||
if [[ $du_status -eq 0 ]]; then
|
||||
local size_kb
|
||||
size_kb=$(awk 'NR==1 {print $1; exit}' "$du_tmp")
|
||||
if [[ "$size_kb" =~ ^[0-9]+$ ]]; then
|
||||
((PROJECT_ARTIFACT_HINT_ESTIMATED_KB += size_kb))
|
||||
((PROJECT_ARTIFACT_HINT_ESTIMATE_SAMPLES++))
|
||||
else
|
||||
PROJECT_ARTIFACT_HINT_ESTIMATE_PARTIAL=true
|
||||
fi
|
||||
else
|
||||
PROJECT_ARTIFACT_HINT_ESTIMATE_PARTIAL=true
|
||||
fi
|
||||
|
||||
rm -f "$du_tmp"
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2329
|
||||
is_quick_purge_project_root() {
|
||||
local dir="$1"
|
||||
local indicator
|
||||
|
||||
# Mirror purge single-project detection so configured project roots like
|
||||
# ~/www/Pake can still surface direct-child artifacts such as target/.
|
||||
local -a indicators=(
|
||||
"lerna.json"
|
||||
"pnpm-workspace.yaml"
|
||||
"nx.json"
|
||||
"rush.json"
|
||||
"package.json"
|
||||
"Cargo.toml"
|
||||
"go.mod"
|
||||
"pyproject.toml"
|
||||
"requirements.txt"
|
||||
"pom.xml"
|
||||
"build.gradle"
|
||||
"Gemfile"
|
||||
"composer.json"
|
||||
"pubspec.yaml"
|
||||
"Makefile"
|
||||
"build.zig"
|
||||
"build.zig.zon"
|
||||
".git"
|
||||
)
|
||||
|
||||
for indicator in "${indicators[@]}"; do
|
||||
if [[ -e "$dir/$indicator" ]]; then
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2329
|
||||
probe_project_artifact_hints() {
|
||||
PROJECT_ARTIFACT_HINT_DETECTED=false
|
||||
PROJECT_ARTIFACT_HINT_COUNT=0
|
||||
PROJECT_ARTIFACT_HINT_TRUNCATED=false
|
||||
PROJECT_ARTIFACT_HINT_EXAMPLES=()
|
||||
PROJECT_ARTIFACT_HINT_ESTIMATED_KB=0
|
||||
PROJECT_ARTIFACT_HINT_ESTIMATE_SAMPLES=0
|
||||
PROJECT_ARTIFACT_HINT_ESTIMATE_PARTIAL=false
|
||||
|
||||
local max_projects=200
|
||||
local max_projects_per_root=0
|
||||
local max_nested_per_project=120
|
||||
local max_matches=12
|
||||
|
||||
# Fast hint list tracks most purge targets, but excludes high-noise names
|
||||
# like `bin` and `vendor` that need deeper context checks in purge logic.
|
||||
local -a target_names=(
|
||||
"node_modules"
|
||||
"target"
|
||||
"build"
|
||||
"dist"
|
||||
"venv"
|
||||
".venv"
|
||||
".pytest_cache"
|
||||
".mypy_cache"
|
||||
".tox"
|
||||
".nox"
|
||||
".ruff_cache"
|
||||
".gradle"
|
||||
"__pycache__"
|
||||
".next"
|
||||
".nuxt"
|
||||
".output"
|
||||
"obj"
|
||||
".turbo"
|
||||
".parcel-cache"
|
||||
".dart_tool"
|
||||
".zig-cache"
|
||||
"zig-out"
|
||||
".angular"
|
||||
".svelte-kit"
|
||||
".astro"
|
||||
"coverage"
|
||||
"DerivedData"
|
||||
"Pods"
|
||||
".cxx"
|
||||
".expo"
|
||||
)
|
||||
|
||||
local -a scan_roots=()
|
||||
while IFS= read -r path; do
|
||||
[[ -n "$path" ]] && scan_roots+=("$path")
|
||||
done < <(load_quick_purge_hint_paths)
|
||||
|
||||
[[ ${#scan_roots[@]} -eq 0 ]] && return 0
|
||||
|
||||
# Fairness: avoid one very large root exhausting the entire scan budget.
|
||||
if [[ $max_projects_per_root -le 0 ]]; then
|
||||
max_projects_per_root=$(((max_projects + ${#scan_roots[@]} - 1) / ${#scan_roots[@]}))
|
||||
[[ $max_projects_per_root -lt 25 ]] && max_projects_per_root=25
|
||||
fi
|
||||
[[ $max_projects_per_root -gt $max_projects ]] && max_projects_per_root=$max_projects
|
||||
|
||||
local nullglob_was_set=0
|
||||
if shopt -q nullglob; then
|
||||
nullglob_was_set=1
|
||||
fi
|
||||
shopt -s nullglob
|
||||
|
||||
local scanned_projects=0
|
||||
local stop_scan=false
|
||||
local root project_dir nested_dir target_name candidate
|
||||
|
||||
for root in "${scan_roots[@]}"; do
|
||||
[[ -d "$root" ]] || continue
|
||||
local root_projects_scanned=0
|
||||
|
||||
if is_quick_purge_project_root "$root"; then
|
||||
((scanned_projects++))
|
||||
((root_projects_scanned++))
|
||||
if [[ $scanned_projects -gt $max_projects ]]; then
|
||||
PROJECT_ARTIFACT_HINT_TRUNCATED=true
|
||||
stop_scan=true
|
||||
break
|
||||
fi
|
||||
|
||||
for target_name in "${target_names[@]}"; do
|
||||
candidate="$root/$target_name"
|
||||
if [[ -d "$candidate" ]]; then
|
||||
record_project_artifact_hint "$candidate"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
[[ "$stop_scan" == "true" ]] && break
|
||||
|
||||
if [[ $root_projects_scanned -ge $max_projects_per_root ]]; then
|
||||
PROJECT_ARTIFACT_HINT_TRUNCATED=true
|
||||
continue
|
||||
fi
|
||||
|
||||
for project_dir in "$root"/*/; do
|
||||
[[ -d "$project_dir" ]] || continue
|
||||
project_dir="${project_dir%/}"
|
||||
|
||||
local project_name
|
||||
project_name=$(basename "$project_dir")
|
||||
[[ "$project_name" == .* ]] && continue
|
||||
|
||||
if [[ $root_projects_scanned -ge $max_projects_per_root ]]; then
|
||||
PROJECT_ARTIFACT_HINT_TRUNCATED=true
|
||||
break
|
||||
fi
|
||||
|
||||
((scanned_projects++))
|
||||
((root_projects_scanned++))
|
||||
if [[ $scanned_projects -gt $max_projects ]]; then
|
||||
PROJECT_ARTIFACT_HINT_TRUNCATED=true
|
||||
stop_scan=true
|
||||
break
|
||||
fi
|
||||
|
||||
for target_name in "${target_names[@]}"; do
|
||||
candidate="$project_dir/$target_name"
|
||||
if [[ -d "$candidate" ]]; then
|
||||
record_project_artifact_hint "$candidate"
|
||||
fi
|
||||
done
|
||||
[[ "$stop_scan" == "true" ]] && break
|
||||
|
||||
local nested_count=0
|
||||
for nested_dir in "$project_dir"/*/; do
|
||||
[[ -d "$nested_dir" ]] || continue
|
||||
nested_dir="${nested_dir%/}"
|
||||
|
||||
local nested_name
|
||||
nested_name=$(basename "$nested_dir")
|
||||
[[ "$nested_name" == .* ]] && continue
|
||||
|
||||
case "$nested_name" in
|
||||
node_modules | target | build | dist | DerivedData | Pods)
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
|
||||
((nested_count++))
|
||||
if [[ $nested_count -gt $max_nested_per_project ]]; then
|
||||
break
|
||||
fi
|
||||
|
||||
for target_name in "${target_names[@]}"; do
|
||||
candidate="$nested_dir/$target_name"
|
||||
if [[ -d "$candidate" ]]; then
|
||||
record_project_artifact_hint "$candidate"
|
||||
fi
|
||||
done
|
||||
|
||||
[[ "$stop_scan" == "true" ]] && break
|
||||
done
|
||||
|
||||
[[ "$stop_scan" == "true" ]] && break
|
||||
done
|
||||
|
||||
[[ "$stop_scan" == "true" ]] && break
|
||||
done
|
||||
|
||||
if [[ $nullglob_was_set -eq 0 ]]; then
|
||||
shopt -u nullglob
|
||||
fi
|
||||
|
||||
if [[ $PROJECT_ARTIFACT_HINT_COUNT -gt 0 ]]; then
|
||||
PROJECT_ARTIFACT_HINT_DETECTED=true
|
||||
fi
|
||||
|
||||
# Preserve a compact display hint if candidate count is large, but do not
|
||||
# stop scanning early solely because we exceeded this threshold.
|
||||
if [[ $PROJECT_ARTIFACT_HINT_COUNT -gt $max_matches ]]; then
|
||||
PROJECT_ARTIFACT_HINT_TRUNCATED=true
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2329
|
||||
show_system_data_hint_notice() {
|
||||
local min_gb=2
|
||||
local timeout_seconds="0.8"
|
||||
local max_hits=3
|
||||
|
||||
local threshold_kb=$((min_gb * 1024 * 1024))
|
||||
local -a clue_labels=()
|
||||
local -a clue_sizes=()
|
||||
local -a clue_paths=()
|
||||
|
||||
local -a labels=(
|
||||
"Xcode DerivedData"
|
||||
"Xcode Archives"
|
||||
"iPhone backups"
|
||||
"Simulator data"
|
||||
"Docker Desktop data"
|
||||
"Mail data"
|
||||
)
|
||||
local -a paths=(
|
||||
"$HOME/Library/Developer/Xcode/DerivedData"
|
||||
"$HOME/Library/Developer/Xcode/Archives"
|
||||
"$HOME/Library/Application Support/MobileSync/Backup"
|
||||
"$HOME/Library/Developer/CoreSimulator/Devices"
|
||||
"$HOME/Library/Containers/com.docker.docker/Data"
|
||||
"$HOME/Library/Mail"
|
||||
)
|
||||
|
||||
local i
|
||||
for i in "${!paths[@]}"; do
|
||||
local path="${paths[$i]}"
|
||||
[[ -d "$path" ]] || continue
|
||||
|
||||
local du_tmp
|
||||
du_tmp=$(mktemp)
|
||||
local du_status=0
|
||||
if run_with_timeout "$timeout_seconds" du -skP "$path" > "$du_tmp" 2> /dev/null; then
|
||||
du_status=0
|
||||
else
|
||||
du_status=$?
|
||||
fi
|
||||
|
||||
if [[ $du_status -eq 0 ]]; then
|
||||
local size_kb
|
||||
size_kb=$(awk 'NR==1 {print $1; exit}' "$du_tmp")
|
||||
if [[ "$size_kb" =~ ^[0-9]+$ ]] && [[ "$size_kb" -ge "$threshold_kb" ]]; then
|
||||
clue_labels+=("${labels[$i]}")
|
||||
clue_sizes+=("$size_kb")
|
||||
clue_paths+=("${path/#$HOME/~}")
|
||||
if [[ ${#clue_labels[@]} -ge $max_hits ]]; then
|
||||
rm -f "$du_tmp"
|
||||
break
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
rm -f "$du_tmp"
|
||||
done
|
||||
|
||||
if [[ ${#clue_labels[@]} -eq 0 ]]; then
|
||||
note_activity
|
||||
echo -e " ${GREEN}${ICON_SUCCESS}${NC} No common System Data clues detected"
|
||||
return 0
|
||||
fi
|
||||
|
||||
note_activity
|
||||
|
||||
for i in "${!clue_labels[@]}"; do
|
||||
local human_size
|
||||
human_size=$(bytes_to_human "$((clue_sizes[$i] * 1024))")
|
||||
echo -e " ${GREEN}${ICON_LIST}${NC} ${clue_labels[$i]}: ${human_size}"
|
||||
echo -e " ${GRAY}${ICON_SUBLIST}${NC} Path: ${GRAY}${clue_paths[$i]}${NC}"
|
||||
done
|
||||
echo -e " ${GRAY}${ICON_REVIEW}${NC} Review: mo analyze, Device backups, docker system df"
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2329
|
||||
show_project_artifact_hint_notice() {
|
||||
probe_project_artifact_hints
|
||||
|
||||
if [[ "$PROJECT_ARTIFACT_HINT_DETECTED" != "true" ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
note_activity
|
||||
|
||||
local hint_count_label="$PROJECT_ARTIFACT_HINT_COUNT"
|
||||
[[ "$PROJECT_ARTIFACT_HINT_TRUNCATED" == "true" ]] && hint_count_label="${hint_count_label}+"
|
||||
|
||||
local example_text=""
|
||||
if [[ ${#PROJECT_ARTIFACT_HINT_EXAMPLES[@]} -gt 0 ]]; then
|
||||
example_text="${PROJECT_ARTIFACT_HINT_EXAMPLES[0]}"
|
||||
if [[ ${#PROJECT_ARTIFACT_HINT_EXAMPLES[@]} -gt 1 ]]; then
|
||||
example_text+=", ${PROJECT_ARTIFACT_HINT_EXAMPLES[1]}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $PROJECT_ARTIFACT_HINT_ESTIMATE_SAMPLES -gt 0 ]]; then
|
||||
local estimate_human
|
||||
estimate_human=$(bytes_to_human "$((PROJECT_ARTIFACT_HINT_ESTIMATED_KB * 1024))")
|
||||
|
||||
local estimate_is_partial="$PROJECT_ARTIFACT_HINT_ESTIMATE_PARTIAL"
|
||||
if [[ "$PROJECT_ARTIFACT_HINT_TRUNCATED" == "true" ]] || [[ $PROJECT_ARTIFACT_HINT_ESTIMATE_SAMPLES -lt $PROJECT_ARTIFACT_HINT_COUNT ]]; then
|
||||
estimate_is_partial=true
|
||||
fi
|
||||
|
||||
if [[ "$estimate_is_partial" == "true" ]]; then
|
||||
echo -e " ${GREEN}${ICON_LIST}${NC} ${GREEN}${hint_count_label}${NC} candidates, at least ${estimate_human} sampled from ${PROJECT_ARTIFACT_HINT_ESTIMATE_SAMPLES} items"
|
||||
else
|
||||
echo -e " ${GREEN}${ICON_LIST}${NC} ${GREEN}${hint_count_label}${NC} candidates, sampled ${estimate_human}"
|
||||
fi
|
||||
else
|
||||
echo -e " ${GREEN}${ICON_LIST}${NC} ${GREEN}${hint_count_label}${NC} candidates"
|
||||
fi
|
||||
|
||||
if [[ -n "$example_text" ]]; then
|
||||
echo -e " ${GRAY}${ICON_SUBLIST}${NC} Examples: ${GRAY}${example_text}${NC}"
|
||||
fi
|
||||
echo -e " ${GRAY}${ICON_REVIEW}${NC} Review: mo purge"
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2329
|
||||
safe_clean() {
|
||||
if [[ $# -eq 0 ]]; then
|
||||
|
||||
353
lib/clean/hints.sh
Normal file
353
lib/clean/hints.sh
Normal file
@@ -0,0 +1,353 @@
|
||||
#!/bin/bash
|
||||
# Hint notices used by `mo clean` (non-destructive guidance only).
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
_MOLE_HINTS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
# shellcheck disable=SC1090
|
||||
source "$_MOLE_HINTS_DIR/purge_shared.sh"
|
||||
|
||||
# Quick reminder probe for project build artifacts handled by `mo purge`.
|
||||
# Designed to be very fast: shallow directory checks only, no deep find scans.
|
||||
# shellcheck disable=SC2329
|
||||
load_quick_purge_hint_paths() {
|
||||
local config_file="$HOME/.config/mole/purge_paths"
|
||||
local -a paths=()
|
||||
|
||||
while IFS= read -r line; do
|
||||
[[ -n "$line" ]] && paths+=("$line")
|
||||
done < <(mole_purge_read_paths_config "$config_file")
|
||||
|
||||
if [[ ${#paths[@]} -eq 0 ]]; then
|
||||
paths=("${MOLE_PURGE_DEFAULT_SEARCH_PATHS[@]}")
|
||||
fi
|
||||
|
||||
if [[ ${#paths[@]} -gt 0 ]]; then
|
||||
printf '%s\n' "${paths[@]}"
|
||||
fi
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2329
|
||||
hint_get_path_size_kb_with_timeout() {
|
||||
local path="$1"
|
||||
local timeout_seconds="${2:-0.8}"
|
||||
local du_tmp
|
||||
du_tmp=$(mktemp)
|
||||
|
||||
local du_status=0
|
||||
if run_with_timeout "$timeout_seconds" du -skP "$path" > "$du_tmp" 2> /dev/null; then
|
||||
du_status=0
|
||||
else
|
||||
du_status=$?
|
||||
fi
|
||||
|
||||
if [[ $du_status -ne 0 ]]; then
|
||||
rm -f "$du_tmp"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local size_kb
|
||||
size_kb=$(awk 'NR==1 {print $1; exit}' "$du_tmp")
|
||||
rm -f "$du_tmp"
|
||||
|
||||
[[ "$size_kb" =~ ^[0-9]+$ ]] || return 1
|
||||
printf '%s\n' "$size_kb"
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2329
|
||||
record_project_artifact_hint() {
|
||||
local path="$1"
|
||||
|
||||
((PROJECT_ARTIFACT_HINT_COUNT++))
|
||||
|
||||
if [[ ${#PROJECT_ARTIFACT_HINT_EXAMPLES[@]} -lt 2 ]]; then
|
||||
PROJECT_ARTIFACT_HINT_EXAMPLES+=("${path/#$HOME/~}")
|
||||
fi
|
||||
|
||||
local sample_max=3
|
||||
if [[ $PROJECT_ARTIFACT_HINT_ESTIMATE_SAMPLES -ge $sample_max ]]; then
|
||||
PROJECT_ARTIFACT_HINT_ESTIMATE_PARTIAL=true
|
||||
return 0
|
||||
fi
|
||||
|
||||
local timeout_seconds="0.8"
|
||||
local size_kb=""
|
||||
if size_kb=$(hint_get_path_size_kb_with_timeout "$path" "$timeout_seconds"); then
|
||||
if [[ "$size_kb" =~ ^[0-9]+$ ]]; then
|
||||
((PROJECT_ARTIFACT_HINT_ESTIMATED_KB += size_kb))
|
||||
((PROJECT_ARTIFACT_HINT_ESTIMATE_SAMPLES++))
|
||||
else
|
||||
PROJECT_ARTIFACT_HINT_ESTIMATE_PARTIAL=true
|
||||
fi
|
||||
else
|
||||
PROJECT_ARTIFACT_HINT_ESTIMATE_PARTIAL=true
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2329
|
||||
is_quick_purge_project_root() {
|
||||
mole_purge_is_project_root "$1"
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2329
|
||||
probe_project_artifact_hints() {
|
||||
PROJECT_ARTIFACT_HINT_DETECTED=false
|
||||
PROJECT_ARTIFACT_HINT_COUNT=0
|
||||
PROJECT_ARTIFACT_HINT_TRUNCATED=false
|
||||
PROJECT_ARTIFACT_HINT_EXAMPLES=()
|
||||
PROJECT_ARTIFACT_HINT_ESTIMATED_KB=0
|
||||
PROJECT_ARTIFACT_HINT_ESTIMATE_SAMPLES=0
|
||||
PROJECT_ARTIFACT_HINT_ESTIMATE_PARTIAL=false
|
||||
|
||||
local max_projects=200
|
||||
local max_projects_per_root=0
|
||||
local max_nested_per_project=120
|
||||
local max_matches=12
|
||||
|
||||
local -a target_names=()
|
||||
while IFS= read -r target_name; do
|
||||
[[ -n "$target_name" ]] && target_names+=("$target_name")
|
||||
done < <(mole_purge_quick_hint_target_names)
|
||||
|
||||
local -a scan_roots=()
|
||||
while IFS= read -r path; do
|
||||
[[ -n "$path" ]] && scan_roots+=("$path")
|
||||
done < <(load_quick_purge_hint_paths)
|
||||
|
||||
[[ ${#scan_roots[@]} -eq 0 ]] && return 0
|
||||
|
||||
# Fairness: avoid one very large root exhausting the entire scan budget.
|
||||
if [[ $max_projects_per_root -le 0 ]]; then
|
||||
max_projects_per_root=$(((max_projects + ${#scan_roots[@]} - 1) / ${#scan_roots[@]}))
|
||||
[[ $max_projects_per_root -lt 25 ]] && max_projects_per_root=25
|
||||
fi
|
||||
[[ $max_projects_per_root -gt $max_projects ]] && max_projects_per_root=$max_projects
|
||||
|
||||
local nullglob_was_set=0
|
||||
if shopt -q nullglob; then
|
||||
nullglob_was_set=1
|
||||
fi
|
||||
shopt -s nullglob
|
||||
|
||||
local scanned_projects=0
|
||||
local stop_scan=false
|
||||
local root project_dir nested_dir target_name candidate
|
||||
|
||||
for root in "${scan_roots[@]}"; do
|
||||
[[ -d "$root" ]] || continue
|
||||
local root_projects_scanned=0
|
||||
|
||||
if is_quick_purge_project_root "$root"; then
|
||||
((scanned_projects++))
|
||||
((root_projects_scanned++))
|
||||
if [[ $scanned_projects -gt $max_projects ]]; then
|
||||
PROJECT_ARTIFACT_HINT_TRUNCATED=true
|
||||
stop_scan=true
|
||||
break
|
||||
fi
|
||||
|
||||
for target_name in "${target_names[@]}"; do
|
||||
candidate="$root/$target_name"
|
||||
if [[ -d "$candidate" ]]; then
|
||||
record_project_artifact_hint "$candidate"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
[[ "$stop_scan" == "true" ]] && break
|
||||
|
||||
if [[ $root_projects_scanned -ge $max_projects_per_root ]]; then
|
||||
PROJECT_ARTIFACT_HINT_TRUNCATED=true
|
||||
continue
|
||||
fi
|
||||
|
||||
for project_dir in "$root"/*/; do
|
||||
[[ -d "$project_dir" ]] || continue
|
||||
project_dir="${project_dir%/}"
|
||||
|
||||
local project_name
|
||||
project_name=$(basename "$project_dir")
|
||||
[[ "$project_name" == .* ]] && continue
|
||||
|
||||
if [[ $root_projects_scanned -ge $max_projects_per_root ]]; then
|
||||
PROJECT_ARTIFACT_HINT_TRUNCATED=true
|
||||
break
|
||||
fi
|
||||
|
||||
((scanned_projects++))
|
||||
((root_projects_scanned++))
|
||||
if [[ $scanned_projects -gt $max_projects ]]; then
|
||||
PROJECT_ARTIFACT_HINT_TRUNCATED=true
|
||||
stop_scan=true
|
||||
break
|
||||
fi
|
||||
|
||||
for target_name in "${target_names[@]}"; do
|
||||
candidate="$project_dir/$target_name"
|
||||
if [[ -d "$candidate" ]]; then
|
||||
record_project_artifact_hint "$candidate"
|
||||
fi
|
||||
done
|
||||
[[ "$stop_scan" == "true" ]] && break
|
||||
|
||||
local nested_count=0
|
||||
for nested_dir in "$project_dir"/*/; do
|
||||
[[ -d "$nested_dir" ]] || continue
|
||||
nested_dir="${nested_dir%/}"
|
||||
|
||||
local nested_name
|
||||
nested_name=$(basename "$nested_dir")
|
||||
[[ "$nested_name" == .* ]] && continue
|
||||
|
||||
case "$nested_name" in
|
||||
node_modules | target | build | dist | DerivedData | Pods)
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
|
||||
((nested_count++))
|
||||
if [[ $nested_count -gt $max_nested_per_project ]]; then
|
||||
break
|
||||
fi
|
||||
|
||||
for target_name in "${target_names[@]}"; do
|
||||
candidate="$nested_dir/$target_name"
|
||||
if [[ -d "$candidate" ]]; then
|
||||
record_project_artifact_hint "$candidate"
|
||||
fi
|
||||
done
|
||||
|
||||
[[ "$stop_scan" == "true" ]] && break
|
||||
done
|
||||
|
||||
[[ "$stop_scan" == "true" ]] && break
|
||||
done
|
||||
|
||||
[[ "$stop_scan" == "true" ]] && break
|
||||
done
|
||||
|
||||
if [[ $nullglob_was_set -eq 0 ]]; then
|
||||
shopt -u nullglob
|
||||
fi
|
||||
|
||||
if [[ $PROJECT_ARTIFACT_HINT_COUNT -gt 0 ]]; then
|
||||
PROJECT_ARTIFACT_HINT_DETECTED=true
|
||||
fi
|
||||
|
||||
# Preserve a compact display hint if candidate count is large, but do not
|
||||
# stop scanning early solely because we exceeded this threshold.
|
||||
if [[ $PROJECT_ARTIFACT_HINT_COUNT -gt $max_matches ]]; then
|
||||
PROJECT_ARTIFACT_HINT_TRUNCATED=true
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2329
|
||||
show_system_data_hint_notice() {
|
||||
local min_gb=2
|
||||
local timeout_seconds="0.8"
|
||||
local max_hits=3
|
||||
|
||||
local threshold_kb=$((min_gb * 1024 * 1024))
|
||||
local -a clue_labels=()
|
||||
local -a clue_sizes=()
|
||||
local -a clue_paths=()
|
||||
|
||||
local -a labels=(
|
||||
"Xcode DerivedData"
|
||||
"Xcode Archives"
|
||||
"iPhone backups"
|
||||
"Simulator data"
|
||||
"Docker Desktop data"
|
||||
"Mail data"
|
||||
)
|
||||
local -a paths=(
|
||||
"$HOME/Library/Developer/Xcode/DerivedData"
|
||||
"$HOME/Library/Developer/Xcode/Archives"
|
||||
"$HOME/Library/Application Support/MobileSync/Backup"
|
||||
"$HOME/Library/Developer/CoreSimulator/Devices"
|
||||
"$HOME/Library/Containers/com.docker.docker/Data"
|
||||
"$HOME/Library/Mail"
|
||||
)
|
||||
|
||||
local i
|
||||
for i in "${!paths[@]}"; do
|
||||
local path="${paths[$i]}"
|
||||
[[ -d "$path" ]] || continue
|
||||
|
||||
local size_kb=""
|
||||
if size_kb=$(hint_get_path_size_kb_with_timeout "$path" "$timeout_seconds"); then
|
||||
if [[ "$size_kb" -ge "$threshold_kb" ]]; then
|
||||
clue_labels+=("${labels[$i]}")
|
||||
clue_sizes+=("$size_kb")
|
||||
clue_paths+=("${path/#$HOME/~}")
|
||||
if [[ ${#clue_labels[@]} -ge $max_hits ]]; then
|
||||
break
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ ${#clue_labels[@]} -eq 0 ]]; then
|
||||
note_activity
|
||||
echo -e " ${GREEN}${ICON_SUCCESS}${NC} No common System Data clues detected"
|
||||
return 0
|
||||
fi
|
||||
|
||||
note_activity
|
||||
|
||||
for i in "${!clue_labels[@]}"; do
|
||||
local human_size
|
||||
human_size=$(bytes_to_human "$((clue_sizes[i] * 1024))")
|
||||
echo -e " ${GREEN}${ICON_LIST}${NC} ${clue_labels[$i]}: ${human_size}"
|
||||
echo -e " ${GRAY}${ICON_SUBLIST}${NC} Path: ${GRAY}${clue_paths[$i]}${NC}"
|
||||
done
|
||||
echo -e " ${GRAY}${ICON_REVIEW}${NC} Review: mo analyze, Device backups, docker system df"
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2329
|
||||
show_project_artifact_hint_notice() {
|
||||
probe_project_artifact_hints
|
||||
|
||||
if [[ "$PROJECT_ARTIFACT_HINT_DETECTED" != "true" ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
note_activity
|
||||
|
||||
local hint_count_label="$PROJECT_ARTIFACT_HINT_COUNT"
|
||||
[[ "$PROJECT_ARTIFACT_HINT_TRUNCATED" == "true" ]] && hint_count_label="${hint_count_label}+"
|
||||
|
||||
local example_text=""
|
||||
if [[ ${#PROJECT_ARTIFACT_HINT_EXAMPLES[@]} -gt 0 ]]; then
|
||||
example_text="${PROJECT_ARTIFACT_HINT_EXAMPLES[0]}"
|
||||
if [[ ${#PROJECT_ARTIFACT_HINT_EXAMPLES[@]} -gt 1 ]]; then
|
||||
example_text+=", ${PROJECT_ARTIFACT_HINT_EXAMPLES[1]}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $PROJECT_ARTIFACT_HINT_ESTIMATE_SAMPLES -gt 0 ]]; then
|
||||
local estimate_human
|
||||
estimate_human=$(bytes_to_human "$((PROJECT_ARTIFACT_HINT_ESTIMATED_KB * 1024))")
|
||||
|
||||
local estimate_is_partial="$PROJECT_ARTIFACT_HINT_ESTIMATE_PARTIAL"
|
||||
if [[ "$PROJECT_ARTIFACT_HINT_TRUNCATED" == "true" ]] || [[ $PROJECT_ARTIFACT_HINT_ESTIMATE_SAMPLES -lt $PROJECT_ARTIFACT_HINT_COUNT ]]; then
|
||||
estimate_is_partial=true
|
||||
fi
|
||||
|
||||
if [[ "$estimate_is_partial" == "true" ]]; then
|
||||
echo -e " ${GREEN}${ICON_LIST}${NC} ${GREEN}${hint_count_label}${NC} candidates, at least ${estimate_human} sampled from ${PROJECT_ARTIFACT_HINT_ESTIMATE_SAMPLES} items"
|
||||
else
|
||||
echo -e " ${GREEN}${ICON_LIST}${NC} ${GREEN}${hint_count_label}${NC} candidates, sampled ${estimate_human}"
|
||||
fi
|
||||
else
|
||||
echo -e " ${GREEN}${ICON_LIST}${NC} ${GREEN}${hint_count_label}${NC} candidates"
|
||||
fi
|
||||
|
||||
if [[ -n "$example_text" ]]; then
|
||||
echo -e " ${GRAY}${ICON_SUBLIST}${NC} Examples: ${GRAY}${example_text}${NC}"
|
||||
fi
|
||||
echo -e " ${GRAY}${ICON_REVIEW}${NC} Review: mo purge"
|
||||
}
|
||||
@@ -9,58 +9,17 @@ if ! command -v ensure_user_dir > /dev/null 2>&1; then
|
||||
# shellcheck disable=SC1090
|
||||
source "$CORE_LIB_DIR/common.sh"
|
||||
fi
|
||||
# shellcheck disable=SC1090
|
||||
source "$PROJECT_LIB_DIR/purge_shared.sh"
|
||||
|
||||
# Targets to look for (heavy build artifacts).
|
||||
readonly PURGE_TARGETS=(
|
||||
"node_modules"
|
||||
"target" # Rust, Maven
|
||||
"build" # Gradle, various
|
||||
"dist" # JS builds
|
||||
"venv" # Python
|
||||
".venv" # Python
|
||||
".pytest_cache" # Python (pytest)
|
||||
".mypy_cache" # Python (mypy)
|
||||
".tox" # Python (tox virtualenvs)
|
||||
".nox" # Python (nox virtualenvs)
|
||||
".ruff_cache" # Python (ruff)
|
||||
".gradle" # Gradle local
|
||||
"__pycache__" # Python
|
||||
".next" # Next.js
|
||||
".nuxt" # Nuxt.js
|
||||
".output" # Nuxt.js
|
||||
"vendor" # PHP Composer
|
||||
"bin" # .NET build output (guarded; see is_protected_purge_artifact)
|
||||
"obj" # C# / Unity
|
||||
".turbo" # Turborepo cache
|
||||
".parcel-cache" # Parcel bundler
|
||||
".dart_tool" # Flutter/Dart build cache
|
||||
".zig-cache" # Zig
|
||||
"zig-out" # Zig
|
||||
".angular" # Angular
|
||||
".svelte-kit" # SvelteKit
|
||||
".astro" # Astro
|
||||
"coverage" # Code coverage reports
|
||||
"DerivedData" # Xcode
|
||||
"Pods" # CocoaPods
|
||||
".cxx" # React Native Android NDK build cache
|
||||
".expo" # Expo
|
||||
)
|
||||
readonly PURGE_TARGETS=("${MOLE_PURGE_TARGETS[@]}")
|
||||
# Minimum age in days before considering for cleanup.
|
||||
readonly MIN_AGE_DAYS=7
|
||||
# Scan depth defaults (relative to search root).
|
||||
readonly PURGE_MIN_DEPTH_DEFAULT=1
|
||||
readonly PURGE_MAX_DEPTH_DEFAULT=6
|
||||
# Search paths (default, can be overridden via config file).
|
||||
readonly DEFAULT_PURGE_SEARCH_PATHS=(
|
||||
"$HOME/www"
|
||||
"$HOME/dev"
|
||||
"$HOME/Projects"
|
||||
"$HOME/GitHub"
|
||||
"$HOME/Code"
|
||||
"$HOME/Workspace"
|
||||
"$HOME/Repos"
|
||||
"$HOME/Development"
|
||||
)
|
||||
readonly DEFAULT_PURGE_SEARCH_PATHS=("${MOLE_PURGE_DEFAULT_SEARCH_PATHS[@]}")
|
||||
|
||||
# Config file for custom purge paths.
|
||||
readonly PURGE_CONFIG_FILE="$HOME/.config/mole/purge_paths"
|
||||
@@ -70,29 +29,8 @@ PURGE_SEARCH_PATHS=()
|
||||
|
||||
# Project indicators for container detection.
|
||||
# Monorepo indicators (higher priority)
|
||||
readonly MONOREPO_INDICATORS=(
|
||||
"lerna.json"
|
||||
"pnpm-workspace.yaml"
|
||||
"nx.json"
|
||||
"rush.json"
|
||||
)
|
||||
|
||||
readonly PROJECT_INDICATORS=(
|
||||
"package.json"
|
||||
"Cargo.toml"
|
||||
"go.mod"
|
||||
"pyproject.toml"
|
||||
"requirements.txt"
|
||||
"pom.xml"
|
||||
"build.gradle"
|
||||
"Gemfile"
|
||||
"composer.json"
|
||||
"pubspec.yaml"
|
||||
"Makefile"
|
||||
"build.zig"
|
||||
"build.zig.zon"
|
||||
".git"
|
||||
)
|
||||
readonly MONOREPO_INDICATORS=("${MOLE_PURGE_MONOREPO_INDICATORS[@]}")
|
||||
readonly PROJECT_INDICATORS=("${MOLE_PURGE_PROJECT_INDICATORS[@]}")
|
||||
|
||||
# Check if a directory contains projects (directly or in subdirectories).
|
||||
is_project_container() {
|
||||
@@ -187,18 +125,9 @@ EOF
|
||||
load_purge_config() {
|
||||
PURGE_SEARCH_PATHS=()
|
||||
|
||||
if [[ -f "$PURGE_CONFIG_FILE" ]]; then
|
||||
while IFS= read -r line; do
|
||||
line="${line#"${line%%[![:space:]]*}"}"
|
||||
line="${line%"${line##*[![:space:]]}"}"
|
||||
|
||||
[[ -z "$line" || "$line" =~ ^# ]] && continue
|
||||
|
||||
line="${line/#\~/$HOME}"
|
||||
|
||||
PURGE_SEARCH_PATHS+=("$line")
|
||||
done < "$PURGE_CONFIG_FILE"
|
||||
fi
|
||||
while IFS= read -r line; do
|
||||
[[ -n "$line" ]] && PURGE_SEARCH_PATHS+=("$line")
|
||||
done < <(mole_purge_read_paths_config "$PURGE_CONFIG_FILE")
|
||||
|
||||
if [[ ${#PURGE_SEARCH_PATHS[@]} -eq 0 ]]; then
|
||||
if [[ -t 1 ]] && [[ -z "${_PURGE_DISCOVERY_SILENT:-}" ]]; then
|
||||
@@ -231,22 +160,7 @@ load_purge_config
|
||||
# This is used to safely allow cleaning direct-child artifacts when
|
||||
# users configure a single project directory as a purge search path.
|
||||
is_purge_project_root() {
|
||||
local dir="$1"
|
||||
local indicator
|
||||
|
||||
for indicator in "${MONOREPO_INDICATORS[@]}"; do
|
||||
if [[ -e "$dir/$indicator" ]]; then
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
for indicator in "${PROJECT_INDICATORS[@]}"; do
|
||||
if [[ -e "$dir/$indicator" ]]; then
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
return 1
|
||||
mole_purge_is_project_root "$1"
|
||||
}
|
||||
|
||||
# Args: $1 - path to check
|
||||
|
||||
137
lib/clean/purge_shared.sh
Normal file
137
lib/clean/purge_shared.sh
Normal file
@@ -0,0 +1,137 @@
|
||||
#!/bin/bash
|
||||
# Shared purge configuration and helpers (side-effect free).
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
if [[ -n "${MOLE_PURGE_SHARED_LOADED:-}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
readonly MOLE_PURGE_SHARED_LOADED=1
|
||||
|
||||
# Canonical purge targets (heavy project build artifacts).
|
||||
readonly MOLE_PURGE_TARGETS=(
|
||||
"node_modules"
|
||||
"target" # Rust, Maven
|
||||
"build" # Gradle, various
|
||||
"dist" # JS builds
|
||||
"venv" # Python
|
||||
".venv" # Python
|
||||
".pytest_cache" # Python (pytest)
|
||||
".mypy_cache" # Python (mypy)
|
||||
".tox" # Python (tox virtualenvs)
|
||||
".nox" # Python (nox virtualenvs)
|
||||
".ruff_cache" # Python (ruff)
|
||||
".gradle" # Gradle local
|
||||
"__pycache__" # Python
|
||||
".next" # Next.js
|
||||
".nuxt" # Nuxt.js
|
||||
".output" # Nuxt.js
|
||||
"vendor" # PHP Composer
|
||||
"bin" # .NET build output (guarded; see is_protected_purge_artifact)
|
||||
"obj" # C# / Unity
|
||||
".turbo" # Turborepo cache
|
||||
".parcel-cache" # Parcel bundler
|
||||
".dart_tool" # Flutter/Dart build cache
|
||||
".zig-cache" # Zig
|
||||
"zig-out" # Zig
|
||||
".angular" # Angular
|
||||
".svelte-kit" # SvelteKit
|
||||
".astro" # Astro
|
||||
"coverage" # Code coverage reports
|
||||
"DerivedData" # Xcode
|
||||
"Pods" # CocoaPods
|
||||
".cxx" # React Native Android NDK build cache
|
||||
".expo" # Expo
|
||||
)
|
||||
|
||||
readonly MOLE_PURGE_DEFAULT_SEARCH_PATHS=(
|
||||
"$HOME/www"
|
||||
"$HOME/dev"
|
||||
"$HOME/Projects"
|
||||
"$HOME/GitHub"
|
||||
"$HOME/Code"
|
||||
"$HOME/Workspace"
|
||||
"$HOME/Repos"
|
||||
"$HOME/Development"
|
||||
)
|
||||
|
||||
readonly MOLE_PURGE_MONOREPO_INDICATORS=(
|
||||
"lerna.json"
|
||||
"pnpm-workspace.yaml"
|
||||
"nx.json"
|
||||
"rush.json"
|
||||
)
|
||||
|
||||
readonly MOLE_PURGE_PROJECT_INDICATORS=(
|
||||
"package.json"
|
||||
"Cargo.toml"
|
||||
"go.mod"
|
||||
"pyproject.toml"
|
||||
"requirements.txt"
|
||||
"pom.xml"
|
||||
"build.gradle"
|
||||
"Gemfile"
|
||||
"composer.json"
|
||||
"pubspec.yaml"
|
||||
"Makefile"
|
||||
"build.zig"
|
||||
"build.zig.zon"
|
||||
".git"
|
||||
)
|
||||
|
||||
# High-noise targets intentionally excluded from quick hint scans in mo clean.
|
||||
readonly MOLE_PURGE_QUICK_HINT_EXCLUDED_TARGETS=(
|
||||
"bin"
|
||||
"vendor"
|
||||
)
|
||||
|
||||
mole_purge_is_project_root() {
|
||||
local dir="$1"
|
||||
local indicator
|
||||
|
||||
for indicator in "${MOLE_PURGE_MONOREPO_INDICATORS[@]}"; do
|
||||
if [[ -e "$dir/$indicator" ]]; then
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
for indicator in "${MOLE_PURGE_PROJECT_INDICATORS[@]}"; do
|
||||
if [[ -e "$dir/$indicator" ]]; then
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
mole_purge_quick_hint_target_names() {
|
||||
local target
|
||||
local excluded
|
||||
local is_excluded
|
||||
|
||||
for target in "${MOLE_PURGE_TARGETS[@]}"; do
|
||||
is_excluded=false
|
||||
for excluded in "${MOLE_PURGE_QUICK_HINT_EXCLUDED_TARGETS[@]}"; do
|
||||
if [[ "$target" == "$excluded" ]]; then
|
||||
is_excluded=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
[[ "$is_excluded" == "true" ]] && continue
|
||||
printf '%s\n' "$target"
|
||||
done
|
||||
}
|
||||
|
||||
mole_purge_read_paths_config() {
|
||||
local config_file="${1:-$HOME/.config/mole/purge_paths}"
|
||||
[[ -f "$config_file" ]] || return 0
|
||||
|
||||
local line
|
||||
while IFS= read -r line; do
|
||||
line="${line#"${line%%[![:space:]]*}"}"
|
||||
line="${line%"${line##*[![:space:]]}"}"
|
||||
[[ -z "$line" || "$line" =~ ^# ]] && continue
|
||||
line="${line/#\~/$HOME}"
|
||||
printf '%s\n' "$line"
|
||||
done < "$config_file"
|
||||
}
|
||||
99
tests/clean_hints.bats
Normal file
99
tests/clean_hints.bats
Normal file
@@ -0,0 +1,99 @@
|
||||
#!/usr/bin/env bats
|
||||
|
||||
setup_file() {
|
||||
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||
export PROJECT_ROOT
|
||||
|
||||
ORIGINAL_HOME="${HOME:-}"
|
||||
export ORIGINAL_HOME
|
||||
|
||||
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-clean-hints-home.XXXXXX")"
|
||||
export HOME
|
||||
}
|
||||
|
||||
teardown_file() {
|
||||
rm -rf "$HOME"
|
||||
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
|
||||
export HOME="$ORIGINAL_HOME"
|
||||
fi
|
||||
}
|
||||
|
||||
setup() {
|
||||
rm -rf "${HOME:?}"/*
|
||||
mkdir -p "$HOME/.config/mole"
|
||||
}
|
||||
|
||||
@test "probe_project_artifact_hints reuses purge targets and excludes noisy names" {
|
||||
local root="$HOME/hints-root"
|
||||
mkdir -p "$root/proj/node_modules" "$root/proj/vendor" "$root/proj/bin"
|
||||
touch "$root/proj/package.json"
|
||||
printf '%s\n' "$root" > "$HOME/.config/mole/purge_paths"
|
||||
|
||||
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOT1'
|
||||
set -euo pipefail
|
||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||
source "$PROJECT_ROOT/lib/clean/hints.sh"
|
||||
run_with_timeout() { shift; "$@"; }
|
||||
probe_project_artifact_hints
|
||||
printf 'count=%s\n' "$PROJECT_ARTIFACT_HINT_COUNT"
|
||||
printf 'examples=%s\n' "${PROJECT_ARTIFACT_HINT_EXAMPLES[*]}"
|
||||
EOT1
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" == *"count=1"* ]]
|
||||
[[ "$output" == *"node_modules"* ]]
|
||||
[[ "$output" != *"vendor"* ]]
|
||||
[[ "$output" != *"/bin"* ]]
|
||||
}
|
||||
|
||||
@test "show_project_artifact_hint_notice renders sampled summary" {
|
||||
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOT2'
|
||||
set -euo pipefail
|
||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||
source "$PROJECT_ROOT/lib/clean/hints.sh"
|
||||
probe_project_artifact_hints() {
|
||||
PROJECT_ARTIFACT_HINT_DETECTED=true
|
||||
PROJECT_ARTIFACT_HINT_COUNT=5
|
||||
PROJECT_ARTIFACT_HINT_TRUNCATED=true
|
||||
PROJECT_ARTIFACT_HINT_EXAMPLES=("~/www/demo/node_modules" "~/www/demo/target")
|
||||
PROJECT_ARTIFACT_HINT_ESTIMATED_KB=2048
|
||||
PROJECT_ARTIFACT_HINT_ESTIMATE_SAMPLES=2
|
||||
PROJECT_ARTIFACT_HINT_ESTIMATE_PARTIAL=false
|
||||
}
|
||||
bytes_to_human() { echo "2.00MB"; }
|
||||
note_activity() { :; }
|
||||
show_project_artifact_hint_notice
|
||||
EOT2
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" == *"5+"* ]]
|
||||
[[ "$output" == *"at least 2.00MB sampled from 2 items"* ]]
|
||||
[[ "$output" == *"Examples:"* ]]
|
||||
[[ "$output" == *"Review: mo purge"* ]]
|
||||
}
|
||||
|
||||
@test "show_system_data_hint_notice reports large clue paths" {
|
||||
mkdir -p "$HOME/Library/Developer/Xcode/DerivedData"
|
||||
|
||||
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOT3'
|
||||
set -euo pipefail
|
||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||
source "$PROJECT_ROOT/lib/clean/hints.sh"
|
||||
run_with_timeout() {
|
||||
shift
|
||||
if [[ "${1:-}" == "du" ]]; then
|
||||
printf '3145728 %s\n' "${4:-/tmp}"
|
||||
return 0
|
||||
fi
|
||||
"$@"
|
||||
}
|
||||
bytes_to_human() { echo "3.00GB"; }
|
||||
note_activity() { :; }
|
||||
show_system_data_hint_notice
|
||||
EOT3
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" == *"Xcode DerivedData: 3.00GB"* ]]
|
||||
[[ "$output" == *"~/Library/Developer/Xcode/DerivedData"* ]]
|
||||
[[ "$output" == *"Review: mo analyze, Device backups, docker system df"* ]]
|
||||
}
|
||||
Reference in New Issue
Block a user