mirror of
https://github.com/tw93/Mole.git
synced 2026-02-04 15:04:42 +00:00
feat: overhaul quality checks and expand test suite for clean and optimize features
This commit is contained in:
@@ -1,4 +1,4 @@
|
|||||||
name: Quality
|
name: Check
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -10,7 +10,7 @@ permissions:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
format:
|
format:
|
||||||
name: Auto Format
|
name: Format
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
@@ -37,15 +37,11 @@ jobs:
|
|||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v5
|
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v5
|
||||||
with:
|
with:
|
||||||
go-version: '1.24'
|
go-version: '1.24.6'
|
||||||
|
|
||||||
- name: Format all code
|
- name: Format all code
|
||||||
run: |
|
run: |
|
||||||
echo "Formatting shell scripts..."
|
./scripts/check.sh --format
|
||||||
./scripts/format.sh
|
|
||||||
echo "Formatting Go code..."
|
|
||||||
gofmt -w ./cmd
|
|
||||||
echo "✓ All code formatted"
|
|
||||||
|
|
||||||
- name: Commit formatting changes
|
- name: Commit formatting changes
|
||||||
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository }}
|
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository }}
|
||||||
@@ -62,7 +58,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
quality:
|
quality:
|
||||||
name: Code Quality
|
name: Check
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
needs: format
|
needs: format
|
||||||
|
|
||||||
@@ -86,22 +82,5 @@ jobs:
|
|||||||
- name: Install tools
|
- name: Install tools
|
||||||
run: brew install shfmt shellcheck
|
run: brew install shfmt shellcheck
|
||||||
|
|
||||||
- name: ShellCheck
|
- name: Run check script
|
||||||
run: |
|
run: ./scripts/check.sh --no-format
|
||||||
echo "Running ShellCheck on all shell scripts..."
|
|
||||||
shellcheck mole
|
|
||||||
shellcheck bin/*.sh
|
|
||||||
find lib -name "*.sh" -exec shellcheck {} +
|
|
||||||
echo "✓ ShellCheck passed"
|
|
||||||
|
|
||||||
- name: Syntax check
|
|
||||||
run: |
|
|
||||||
echo "Checking Bash syntax..."
|
|
||||||
bash -n mole
|
|
||||||
for script in bin/*.sh; do
|
|
||||||
bash -n "$script"
|
|
||||||
done
|
|
||||||
find lib -name "*.sh" | while read -r script; do
|
|
||||||
bash -n "$script"
|
|
||||||
done
|
|
||||||
echo "✓ All scripts have valid syntax"
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
name: Tests
|
name: Test
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -7,88 +7,22 @@ on:
|
|||||||
branches: [main, dev]
|
branches: [main, dev]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
unit-tests:
|
tests:
|
||||||
name: Unit Tests
|
name: Test
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v4
|
||||||
|
|
||||||
- name: Install bats
|
- name: Install tools
|
||||||
run: brew install bats-core
|
run: brew install bats-core shellcheck
|
||||||
|
|
||||||
- name: Run all test suites
|
|
||||||
run: |
|
|
||||||
echo "Running all test suites..."
|
|
||||||
bats tests/*.bats --formatter tap
|
|
||||||
echo ""
|
|
||||||
echo "Test summary:"
|
|
||||||
echo " Total test files: $(ls tests/*.bats | wc -l | tr -d ' ')"
|
|
||||||
echo " Total tests: $(grep -c "^@test" tests/*.bats | awk -F: '{sum+=$2} END {print sum}')"
|
|
||||||
echo "✓ All tests passed"
|
|
||||||
|
|
||||||
go-tests:
|
|
||||||
name: Go Tests
|
|
||||||
runs-on: macos-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v4
|
|
||||||
|
|
||||||
- name: Set up Go
|
|
||||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v5
|
|
||||||
with:
|
|
||||||
go-version: '1.24'
|
|
||||||
|
|
||||||
- name: Build Go binaries
|
|
||||||
run: |
|
|
||||||
echo "Building Go binaries..."
|
|
||||||
go build ./...
|
|
||||||
echo "✓ Build successful"
|
|
||||||
|
|
||||||
- name: Run go vet
|
|
||||||
run: |
|
|
||||||
echo "Running go vet..."
|
|
||||||
go vet ./cmd/...
|
|
||||||
echo "✓ Vet passed"
|
|
||||||
|
|
||||||
- name: Run go test
|
|
||||||
run: |
|
|
||||||
echo "Running go test..."
|
|
||||||
go test ./cmd/...
|
|
||||||
echo "✓ Go tests passed"
|
|
||||||
|
|
||||||
integration-tests:
|
|
||||||
name: Integration Tests
|
|
||||||
runs-on: macos-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v4
|
|
||||||
|
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v5
|
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.24.6"
|
go-version: "1.24.6"
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Run test script
|
||||||
run: brew install coreutils
|
run: ./scripts/test.sh
|
||||||
|
|
||||||
- name: Build binaries
|
|
||||||
run: make build
|
|
||||||
|
|
||||||
- name: Test module loading
|
|
||||||
run: |
|
|
||||||
echo "Testing module loading..."
|
|
||||||
bash -c 'source lib/core/common.sh && echo "✓ Modules loaded successfully"'
|
|
||||||
|
|
||||||
- name: Test clean --dry-run
|
|
||||||
run: |
|
|
||||||
echo "Testing clean --dry-run..."
|
|
||||||
./bin/clean.sh --dry-run
|
|
||||||
echo "✓ Clean dry-run completed"
|
|
||||||
|
|
||||||
- name: Test installation
|
|
||||||
run: |
|
|
||||||
echo "Testing installation script..."
|
|
||||||
./install.sh --prefix /tmp/mole-test
|
|
||||||
test -f /tmp/mole-test/mole
|
|
||||||
echo "✓ Installation successful"
|
|
||||||
|
|
||||||
compatibility:
|
compatibility:
|
||||||
name: macOS Compatibility
|
name: macOS Compatibility
|
||||||
2
AGENT.md
2
AGENT.md
@@ -36,7 +36,7 @@ Use this file as the single source of truth for how to work on Mole.
|
|||||||
|
|
||||||
## Build & Test
|
## Build & Test
|
||||||
|
|
||||||
- `./scripts/run-tests.sh` runs lint/shell/go tests.
|
- `./scripts/test.sh` runs unit/go/integration tests.
|
||||||
- `make build` builds Go binaries for local development.
|
- `make build` builds Go binaries for local development.
|
||||||
- `go run ./cmd/analyze` for dev runs without building.
|
- `go run ./cmd/analyze` for dev runs without building.
|
||||||
|
|
||||||
|
|||||||
@@ -9,26 +9,16 @@ brew install shfmt shellcheck bats-core
|
|||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
Run all quality checks before committing:
|
Run quality checks before committing (auto-formats code):
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
./scripts/check.sh
|
./scripts/check.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
This command runs:
|
Run tests:
|
||||||
|
|
||||||
- Code formatting check
|
|
||||||
- ShellCheck linting
|
|
||||||
- Unit tests
|
|
||||||
|
|
||||||
Individual commands:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Format code
|
./scripts/test.sh
|
||||||
./scripts/format.sh
|
|
||||||
|
|
||||||
# Run tests only
|
|
||||||
./scripts/run-tests.sh
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Code Style
|
## Code Style
|
||||||
|
|||||||
11
install.sh
11
install.sh
@@ -218,10 +218,19 @@ get_installed_version() {
|
|||||||
parse_args() {
|
parse_args() {
|
||||||
local -a args=("$@")
|
local -a args=("$@")
|
||||||
local version_token=""
|
local version_token=""
|
||||||
local i
|
local i skip_next=false
|
||||||
for i in "${!args[@]}"; do
|
for i in "${!args[@]}"; do
|
||||||
local token="${args[$i]}"
|
local token="${args[$i]}"
|
||||||
[[ -z "$token" ]] && continue
|
[[ -z "$token" ]] && continue
|
||||||
|
# Skip values for options that take arguments
|
||||||
|
if [[ "$skip_next" == "true" ]]; then
|
||||||
|
skip_next=false
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
if [[ "$token" == "--prefix" || "$token" == "--config" ]]; then
|
||||||
|
skip_next=true
|
||||||
|
continue
|
||||||
|
fi
|
||||||
if [[ "$token" == -* ]]; then
|
if [[ "$token" == -* ]]; then
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
|
|||||||
183
scripts/check.sh
183
scripts/check.sh
@@ -1,126 +1,189 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
# Unified check script for Mole project
|
# Code quality checks for Mole.
|
||||||
# Runs all quality checks in one command
|
# Auto-formats code, then runs lint and syntax checks.
|
||||||
|
|
||||||
set -e
|
set -euo pipefail
|
||||||
|
|
||||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||||
|
|
||||||
# Colors
|
MODE="all"
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat << 'EOF'
|
||||||
|
Usage: ./scripts/check.sh [--format|--no-format]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--format Apply formatting fixes only (shfmt, gofmt)
|
||||||
|
--no-format Skip formatting and run checks only
|
||||||
|
--help Show this help
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
--format)
|
||||||
|
MODE="format"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--no-format)
|
||||||
|
MODE="check"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--help | -h)
|
||||||
|
usage
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown option: $1"
|
||||||
|
usage
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
cd "$PROJECT_ROOT"
|
||||||
|
|
||||||
RED='\033[0;31m'
|
RED='\033[0;31m'
|
||||||
GREEN='\033[0;32m'
|
GREEN='\033[0;32m'
|
||||||
YELLOW='\033[1;33m'
|
YELLOW='\033[1;33m'
|
||||||
BLUE='\033[0;34m'
|
BLUE='\033[0;34m'
|
||||||
NC='\033[0m'
|
NC='\033[0m'
|
||||||
|
|
||||||
cd "$PROJECT_ROOT"
|
readonly ICON_SUCCESS="✓"
|
||||||
|
readonly ICON_ERROR="☻"
|
||||||
|
readonly ICON_WARNING="●"
|
||||||
|
readonly ICON_LIST="•"
|
||||||
|
|
||||||
echo -e "${BLUE}=== Running Mole Quality Checks ===${NC}\n"
|
echo -e "${BLUE}=== Mole Check (${MODE}) ===${NC}\n"
|
||||||
|
|
||||||
# 1. Format check
|
SHELL_FILES=$(find . -type f \( -name "*.sh" -o -name "mole" \) \
|
||||||
echo -e "${YELLOW}1. Checking code formatting...${NC}"
|
-not -path "./.git/*" \
|
||||||
if command -v shfmt > /dev/null 2>&1; then
|
-not -path "*/node_modules/*" \
|
||||||
if ./scripts/format.sh --check; then
|
-not -path "*/tests/tmp-*/*" \
|
||||||
echo -e "${GREEN}✓ Formatting check passed${NC}\n"
|
-not -path "*/.*" \
|
||||||
|
2> /dev/null)
|
||||||
|
|
||||||
|
if [[ "$MODE" == "format" ]]; then
|
||||||
|
echo -e "${YELLOW}Formatting shell scripts...${NC}"
|
||||||
|
if command -v shfmt > /dev/null 2>&1; then
|
||||||
|
echo "$SHELL_FILES" | xargs shfmt -i 4 -ci -sr -w
|
||||||
|
echo -e "${GREEN}${ICON_SUCCESS} Shell formatting complete${NC}\n"
|
||||||
else
|
else
|
||||||
echo -e "${RED}✗ Formatting check failed${NC}\n"
|
echo -e "${RED}${ICON_ERROR} shfmt not installed${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
else
|
|
||||||
echo -e "${YELLOW}⚠ shfmt not installed, skipping format check${NC}\n"
|
if command -v go > /dev/null 2>&1; then
|
||||||
|
echo -e "${YELLOW}Formatting Go code...${NC}"
|
||||||
|
gofmt -w ./cmd
|
||||||
|
echo -e "${GREEN}${ICON_SUCCESS} Go formatting complete${NC}\n"
|
||||||
|
else
|
||||||
|
echo -e "${YELLOW}${ICON_WARNING} go not installed, skipping gofmt${NC}\n"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${GREEN}=== Format Completed ===${NC}"
|
||||||
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# 2. ShellCheck
|
if [[ "$MODE" != "check" ]]; then
|
||||||
echo -e "${YELLOW}2. Running ShellCheck...${NC}"
|
echo -e "${YELLOW}1. Formatting shell scripts...${NC}"
|
||||||
|
if command -v shfmt > /dev/null 2>&1; then
|
||||||
|
echo "$SHELL_FILES" | xargs shfmt -i 4 -ci -sr -w
|
||||||
|
echo -e "${GREEN}${ICON_SUCCESS} Shell formatting applied${NC}\n"
|
||||||
|
else
|
||||||
|
echo -e "${YELLOW}${ICON_WARNING} shfmt not installed, skipping${NC}\n"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if command -v go > /dev/null 2>&1; then
|
||||||
|
echo -e "${YELLOW}2. Formatting Go code...${NC}"
|
||||||
|
gofmt -w ./cmd
|
||||||
|
echo -e "${GREEN}${ICON_SUCCESS} Go formatting applied${NC}\n"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${YELLOW}3. Running ShellCheck...${NC}"
|
||||||
if command -v shellcheck > /dev/null 2>&1; then
|
if command -v shellcheck > /dev/null 2>&1; then
|
||||||
# Count total files
|
if shellcheck mole bin/*.sh lib/*/*.sh scripts/*.sh; then
|
||||||
SHELL_FILES=$(find . -type f \( -name "*.sh" -o -name "mole" \) -not -path "./tests/*" -not -path "./.git/*")
|
echo -e "${GREEN}${ICON_SUCCESS} ShellCheck passed${NC}\n"
|
||||||
FILE_COUNT=$(echo "$SHELL_FILES" | wc -l | tr -d ' ')
|
|
||||||
|
|
||||||
if shellcheck mole bin/*.sh lib/*/*.sh scripts/*.sh 2>&1 | grep -q "SC[0-9]"; then
|
|
||||||
echo -e "${YELLOW}⚠ ShellCheck found some issues (non-critical):${NC}"
|
|
||||||
shellcheck mole bin/*.sh lib/*/*.sh scripts/*.sh 2>&1 | head -20
|
|
||||||
echo -e "${GREEN}✓ ShellCheck completed (${FILE_COUNT} files checked)${NC}\n"
|
|
||||||
else
|
else
|
||||||
echo -e "${GREEN}✓ ShellCheck passed (${FILE_COUNT} files checked)${NC}\n"
|
echo -e "${RED}${ICON_ERROR} ShellCheck failed${NC}\n"
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo -e "${YELLOW}⚠ shellcheck not installed, skipping${NC}\n"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# 3. Unit tests (if available)
|
|
||||||
echo -e "${YELLOW}3. Running tests...${NC}"
|
|
||||||
if command -v bats > /dev/null 2>&1 && [ -d "tests" ]; then
|
|
||||||
if bats tests/*.bats; then
|
|
||||||
echo -e "${GREEN}✓ Tests passed${NC}\n"
|
|
||||||
else
|
|
||||||
echo -e "${RED}✗ Tests failed (see output above)${NC}\n"
|
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo -e "${YELLOW}⚠ bats not installed or no tests found, skipping${NC}\n"
|
echo -e "${YELLOW}${ICON_WARNING} shellcheck not installed, skipping${NC}\n"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# 4. Code optimization checks
|
echo -e "${YELLOW}4. Running syntax check...${NC}"
|
||||||
echo -e "${YELLOW}4. Checking code optimizations...${NC}"
|
if ! bash -n mole; then
|
||||||
|
echo -e "${RED}${ICON_ERROR} Syntax check failed (mole)${NC}\n"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
for script in bin/*.sh; do
|
||||||
|
if ! bash -n "$script"; then
|
||||||
|
echo -e "${RED}${ICON_ERROR} Syntax check failed ($script)${NC}\n"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
find lib -name "*.sh" | while read -r script; do
|
||||||
|
if ! bash -n "$script"; then
|
||||||
|
echo -e "${RED}${ICON_ERROR} Syntax check failed ($script)${NC}\n"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
echo -e "${GREEN}${ICON_SUCCESS} Syntax check passed${NC}\n"
|
||||||
|
|
||||||
|
echo -e "${YELLOW}5. Checking optimizations...${NC}"
|
||||||
OPTIMIZATION_SCORE=0
|
OPTIMIZATION_SCORE=0
|
||||||
TOTAL_CHECKS=0
|
TOTAL_CHECKS=0
|
||||||
|
|
||||||
# Check 1: Keyboard input handling (restored to 1s for reliability)
|
|
||||||
((TOTAL_CHECKS++))
|
((TOTAL_CHECKS++))
|
||||||
if grep -q "read -r -s -n 1 -t 1" lib/core/ui.sh; then
|
if grep -q "read -r -s -n 1 -t 1" lib/core/ui.sh; then
|
||||||
echo -e "${GREEN} ✓ Keyboard timeout properly configured (1s)${NC}"
|
echo -e "${GREEN} ${ICON_SUCCESS} Keyboard timeout configured${NC}"
|
||||||
((OPTIMIZATION_SCORE++))
|
((OPTIMIZATION_SCORE++))
|
||||||
else
|
else
|
||||||
echo -e "${YELLOW} ⚠ Keyboard timeout may be misconfigured${NC}"
|
echo -e "${YELLOW} ${ICON_WARNING} Keyboard timeout may be misconfigured${NC}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check 2: Single-pass drain_pending_input
|
|
||||||
((TOTAL_CHECKS++))
|
((TOTAL_CHECKS++))
|
||||||
DRAIN_PASSES=$(grep -c "while IFS= read -r -s -n 1" lib/core/ui.sh 2> /dev/null || true)
|
DRAIN_PASSES=$(grep -c "while IFS= read -r -s -n 1" lib/core/ui.sh 2> /dev/null || true)
|
||||||
DRAIN_PASSES=${DRAIN_PASSES:-0}
|
DRAIN_PASSES=${DRAIN_PASSES:-0}
|
||||||
if [[ $DRAIN_PASSES -eq 1 ]]; then
|
if [[ $DRAIN_PASSES -eq 1 ]]; then
|
||||||
echo -e "${GREEN} ✓ drain_pending_input optimized (single-pass)${NC}"
|
echo -e "${GREEN} ${ICON_SUCCESS} drain_pending_input optimized${NC}"
|
||||||
((OPTIMIZATION_SCORE++))
|
((OPTIMIZATION_SCORE++))
|
||||||
else
|
else
|
||||||
echo -e "${YELLOW} ⚠ drain_pending_input has multiple passes${NC}"
|
echo -e "${YELLOW} ${ICON_WARNING} drain_pending_input has multiple passes${NC}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check 3: Log rotation once per session
|
|
||||||
((TOTAL_CHECKS++))
|
((TOTAL_CHECKS++))
|
||||||
if grep -q "rotate_log_once" lib/core/log.sh; then
|
if grep -q "rotate_log_once" lib/core/log.sh; then
|
||||||
echo -e "${GREEN} ✓ Log rotation optimized (once per session)${NC}"
|
echo -e "${GREEN} ${ICON_SUCCESS} Log rotation optimized${NC}"
|
||||||
((OPTIMIZATION_SCORE++))
|
((OPTIMIZATION_SCORE++))
|
||||||
else
|
else
|
||||||
echo -e "${YELLOW} ⚠ Log rotation not optimized${NC}"
|
echo -e "${YELLOW} ${ICON_WARNING} Log rotation not optimized${NC}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check 4: Simplified cache validation
|
|
||||||
((TOTAL_CHECKS++))
|
((TOTAL_CHECKS++))
|
||||||
if ! grep -q "cache_meta\|cache_dir_mtime" bin/uninstall.sh; then
|
if ! grep -q "cache_meta\|cache_dir_mtime" bin/uninstall.sh; then
|
||||||
echo -e "${GREEN} ✓ Cache validation simplified${NC}"
|
echo -e "${GREEN} ${ICON_SUCCESS} Cache validation simplified${NC}"
|
||||||
((OPTIMIZATION_SCORE++))
|
((OPTIMIZATION_SCORE++))
|
||||||
else
|
else
|
||||||
echo -e "${YELLOW} ⚠ Cache still uses redundant metadata${NC}"
|
echo -e "${YELLOW} ${ICON_WARNING} Cache still uses redundant metadata${NC}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check 5: Stricter path validation
|
|
||||||
((TOTAL_CHECKS++))
|
((TOTAL_CHECKS++))
|
||||||
if grep -q "Consecutive slashes" bin/clean.sh; then
|
if grep -q "Consecutive slashes" bin/clean.sh; then
|
||||||
echo -e "${GREEN} ✓ Path validation enhanced${NC}"
|
echo -e "${GREEN} ${ICON_SUCCESS} Path validation enhanced${NC}"
|
||||||
((OPTIMIZATION_SCORE++))
|
((OPTIMIZATION_SCORE++))
|
||||||
else
|
else
|
||||||
echo -e "${YELLOW} ⚠ Path validation not enhanced${NC}"
|
echo -e "${YELLOW} ${ICON_WARNING} Path validation not enhanced${NC}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo -e "${BLUE} Optimization score: $OPTIMIZATION_SCORE/$TOTAL_CHECKS${NC}\n"
|
echo -e "${BLUE} Optimization score: $OPTIMIZATION_SCORE/$TOTAL_CHECKS${NC}\n"
|
||||||
|
|
||||||
# Summary
|
echo -e "${GREEN}=== Checks Completed ===${NC}"
|
||||||
echo -e "${GREEN}=== All Checks Completed ===${NC}"
|
|
||||||
if [[ $OPTIMIZATION_SCORE -eq $TOTAL_CHECKS ]]; then
|
if [[ $OPTIMIZATION_SCORE -eq $TOTAL_CHECKS ]]; then
|
||||||
echo -e "${GREEN}✓ Code quality checks passed!${NC}"
|
echo -e "${GREEN}${ICON_SUCCESS} All optimizations applied${NC}"
|
||||||
echo -e "${GREEN}✓ All optimizations applied!${NC}"
|
|
||||||
else
|
else
|
||||||
echo -e "${YELLOW}⚠ Code quality checks passed, but some optimizations missing${NC}"
|
echo -e "${YELLOW}${ICON_WARNING} Some optimizations missing${NC}"
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -1,73 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Format all shell scripts in the Mole project
|
|
||||||
#
|
|
||||||
# Usage:
|
|
||||||
# ./scripts/format.sh # Format all scripts
|
|
||||||
# ./scripts/format.sh --check # Check only, don't modify
|
|
||||||
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
||||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
|
||||||
|
|
||||||
CHECK_ONLY=false
|
|
||||||
|
|
||||||
# Parse arguments
|
|
||||||
if [[ "${1:-}" == "--check" ]]; then
|
|
||||||
CHECK_ONLY=true
|
|
||||||
elif [[ "${1:-}" == "--help" || "${1:-}" == "-h" ]]; then
|
|
||||||
cat << 'EOF'
|
|
||||||
Usage: ./scripts/format.sh [--check]
|
|
||||||
|
|
||||||
Format shell scripts using shfmt.
|
|
||||||
|
|
||||||
Options:
|
|
||||||
--check Check formatting without modifying files
|
|
||||||
--help Show this help
|
|
||||||
|
|
||||||
Install: brew install shfmt
|
|
||||||
EOF
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if shfmt is installed
|
|
||||||
if ! command -v shfmt > /dev/null 2>&1; then
|
|
||||||
echo "Error: shfmt not installed"
|
|
||||||
echo "Install: brew install shfmt"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Find all shell scripts (excluding temp directories and build artifacts)
|
|
||||||
cd "$PROJECT_ROOT"
|
|
||||||
|
|
||||||
# Build list of files to format (exclude .git, node_modules, tmp directories)
|
|
||||||
FILES=$(find . -type f \( -name "*.sh" -o -name "mole" \) \
|
|
||||||
-not -path "./.git/*" \
|
|
||||||
-not -path "*/node_modules/*" \
|
|
||||||
-not -path "*/tests/tmp-*/*" \
|
|
||||||
-not -path "*/.*" \
|
|
||||||
2> /dev/null)
|
|
||||||
|
|
||||||
if [[ -z "$FILES" ]]; then
|
|
||||||
echo "No shell scripts found"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# shfmt options: -i 4 (4 spaces), -ci (indent switch cases), -sr (space after redirect)
|
|
||||||
if [[ "$CHECK_ONLY" == "true" ]]; then
|
|
||||||
echo "Checking formatting..."
|
|
||||||
if echo "$FILES" | xargs shfmt -i 4 -ci -sr -d > /dev/null 2>&1; then
|
|
||||||
echo "✓ All scripts properly formatted"
|
|
||||||
exit 0
|
|
||||||
else
|
|
||||||
echo "✗ Some scripts need formatting:"
|
|
||||||
echo "$FILES" | xargs shfmt -i 4 -ci -sr -d
|
|
||||||
echo ""
|
|
||||||
echo "Run './scripts/format.sh' to fix"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "Formatting scripts..."
|
|
||||||
echo "$FILES" | xargs shfmt -i 4 -ci -sr -w
|
|
||||||
echo "✓ Done"
|
|
||||||
fi
|
|
||||||
@@ -1,133 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Quick test runner script
|
|
||||||
# Runs all tests before committing
|
|
||||||
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
||||||
cd "$SCRIPT_DIR/.."
|
|
||||||
|
|
||||||
# Colors
|
|
||||||
RED='\033[0;31m'
|
|
||||||
GREEN='\033[0;32m'
|
|
||||||
YELLOW='\033[1;33m'
|
|
||||||
NC='\033[0m'
|
|
||||||
|
|
||||||
echo "==============================="
|
|
||||||
echo "Mole Test Runner"
|
|
||||||
echo "==============================="
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Track failures
|
|
||||||
FAILED=0
|
|
||||||
|
|
||||||
# 1. ShellCheck
|
|
||||||
echo "1. Running ShellCheck..."
|
|
||||||
if command -v shellcheck > /dev/null 2>&1; then
|
|
||||||
# Optimize: Collect all files first, then pass to shellcheck in one call
|
|
||||||
SHELL_FILES=()
|
|
||||||
while IFS= read -r file; do
|
|
||||||
SHELL_FILES+=("$file")
|
|
||||||
done < <(find lib -name "*.sh" -type f)
|
|
||||||
if shellcheck mole bin/*.sh "${SHELL_FILES[@]}" 2> /dev/null; then
|
|
||||||
printf "${GREEN}✓ ShellCheck passed${NC}\n"
|
|
||||||
else
|
|
||||||
printf "${RED}✗ ShellCheck failed${NC}\n"
|
|
||||||
((FAILED++))
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
printf "${YELLOW}⚠ ShellCheck not installed, skipping${NC}\n"
|
|
||||||
fi
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# 2. Syntax Check
|
|
||||||
echo "2. Running syntax check..."
|
|
||||||
SYNTAX_OK=true
|
|
||||||
|
|
||||||
# Check main file
|
|
||||||
bash -n mole 2> /dev/null || SYNTAX_OK=false
|
|
||||||
|
|
||||||
# Check all shell files without requiring bash 4+
|
|
||||||
# Note: bash -n must check files one-by-one (can't batch process)
|
|
||||||
if [[ "$SYNTAX_OK" == "true" ]]; then
|
|
||||||
while IFS= read -r file; do
|
|
||||||
bash -n "$file" 2> /dev/null || {
|
|
||||||
SYNTAX_OK=false
|
|
||||||
break
|
|
||||||
}
|
|
||||||
done < <(find bin lib -name "*.sh" -type f)
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$SYNTAX_OK" == "true" ]]; then
|
|
||||||
printf "${GREEN}✓ Syntax check passed${NC}\n"
|
|
||||||
else
|
|
||||||
printf "${RED}✗ Syntax check failed${NC}\n"
|
|
||||||
((FAILED++))
|
|
||||||
fi
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# 3. Unit Tests
|
|
||||||
echo "3. Running unit tests..."
|
|
||||||
if command -v bats > /dev/null 2>&1; then
|
|
||||||
# Note: bats might detect non-TTY and suppress color.
|
|
||||||
# Adding --tap prevents spinner issues in background.
|
|
||||||
if bats tests/*.bats; then
|
|
||||||
printf "${GREEN}✓ Unit tests passed${NC}\n"
|
|
||||||
else
|
|
||||||
printf "${RED}✗ Unit tests failed${NC}\n"
|
|
||||||
((FAILED++))
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
printf "${YELLOW}⚠ Bats not installed, skipping unit tests${NC}\n"
|
|
||||||
echo " Install with: brew install bats-core"
|
|
||||||
fi
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# 4. Go Tests
|
|
||||||
echo "4. Running Go tests..."
|
|
||||||
if command -v go > /dev/null 2>&1; then
|
|
||||||
if go build ./... && go vet ./cmd/... && go test ./cmd/...; then
|
|
||||||
printf "${GREEN}✓ Go tests passed${NC}\n"
|
|
||||||
else
|
|
||||||
printf "${RED}✗ Go tests failed${NC}\n"
|
|
||||||
((FAILED++))
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
printf "${YELLOW}⚠ Go not installed, skipping Go tests${NC}\n"
|
|
||||||
fi
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# 5. Module Loading Test
|
|
||||||
echo "5. Testing module loading..."
|
|
||||||
if bash -c 'source lib/core/common.sh && echo "OK"' > /dev/null 2>&1; then
|
|
||||||
printf "${GREEN}✓ Module loading passed${NC}\n"
|
|
||||||
else
|
|
||||||
printf "${RED}✗ Module loading failed${NC}\n"
|
|
||||||
((FAILED++))
|
|
||||||
fi
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# 6. Integration Tests
|
|
||||||
echo "6. Running integration tests..."
|
|
||||||
export MOLE_MAX_PARALLEL_JOBS=30
|
|
||||||
if ./bin/clean.sh --dry-run > /dev/null 2>&1; then
|
|
||||||
printf "${GREEN}✓ Clean dry-run passed${NC}\n"
|
|
||||||
else
|
|
||||||
printf "${RED}✗ Clean dry-run failed${NC}\n"
|
|
||||||
((FAILED++))
|
|
||||||
fi
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Summary
|
|
||||||
echo "==============================="
|
|
||||||
if [[ $FAILED -eq 0 ]]; then
|
|
||||||
printf "${GREEN}All tests passed!${NC}\n"
|
|
||||||
echo ""
|
|
||||||
echo "You can now commit your changes."
|
|
||||||
exit 0
|
|
||||||
else
|
|
||||||
printf "${RED}$FAILED test(s) failed!${NC}\n"
|
|
||||||
echo ""
|
|
||||||
echo "Please fix the failing tests before committing."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
135
scripts/test.sh
Executable file
135
scripts/test.sh
Executable file
@@ -0,0 +1,135 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Test runner for Mole.
|
||||||
|
# Runs unit, Go, and integration tests.
|
||||||
|
# Exits non-zero on failures.
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||||
|
|
||||||
|
cd "$PROJECT_ROOT"
|
||||||
|
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
readonly ICON_SUCCESS="✓"
|
||||||
|
readonly ICON_ERROR="☻"
|
||||||
|
readonly ICON_WARNING="●"
|
||||||
|
readonly ICON_LIST="•"
|
||||||
|
|
||||||
|
echo "==============================="
|
||||||
|
echo "Mole Test Runner"
|
||||||
|
echo "==============================="
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
FAILED=0
|
||||||
|
|
||||||
|
echo "1. Linting test scripts..."
|
||||||
|
if command -v shellcheck > /dev/null 2>&1; then
|
||||||
|
TEST_FILES=()
|
||||||
|
while IFS= read -r file; do
|
||||||
|
TEST_FILES+=("$file")
|
||||||
|
done < <(find tests -type f \( -name '*.bats' -o -name '*.sh' \) | sort)
|
||||||
|
if [[ ${#TEST_FILES[@]} -gt 0 ]]; then
|
||||||
|
if shellcheck --rcfile "$PROJECT_ROOT/.shellcheckrc" "${TEST_FILES[@]}"; then
|
||||||
|
printf "${GREEN}${ICON_SUCCESS} Test script lint passed${NC}\n"
|
||||||
|
else
|
||||||
|
printf "${RED}${ICON_ERROR} Test script lint failed${NC}\n"
|
||||||
|
((FAILED++))
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
printf "${YELLOW}${ICON_WARNING} No test scripts found, skipping${NC}\n"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
printf "${YELLOW}${ICON_WARNING} shellcheck not installed, skipping${NC}\n"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "2. Running unit tests..."
|
||||||
|
if command -v bats > /dev/null 2>&1 && [ -d "tests" ]; then
|
||||||
|
if [[ -z "${TERM:-}" ]]; then
|
||||||
|
export TERM="xterm-256color"
|
||||||
|
fi
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
set -- tests
|
||||||
|
fi
|
||||||
|
if [[ -t 1 ]]; then
|
||||||
|
if bats -p "$@"; then
|
||||||
|
printf "${GREEN}${ICON_SUCCESS} Unit tests passed${NC}\n"
|
||||||
|
else
|
||||||
|
printf "${RED}${ICON_ERROR} Unit tests failed${NC}\n"
|
||||||
|
((FAILED++))
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
if TERM="${TERM:-xterm-256color}" bats --tap "$@"; then
|
||||||
|
printf "${GREEN}${ICON_SUCCESS} Unit tests passed${NC}\n"
|
||||||
|
else
|
||||||
|
printf "${RED}${ICON_ERROR} Unit tests failed${NC}\n"
|
||||||
|
((FAILED++))
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
printf "${YELLOW}${ICON_WARNING} bats not installed or no tests found, skipping${NC}\n"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "3. Running Go tests..."
|
||||||
|
if command -v go > /dev/null 2>&1; then
|
||||||
|
if go build ./... > /dev/null 2>&1 && go vet ./cmd/... > /dev/null 2>&1 && go test ./cmd/... > /dev/null 2>&1; then
|
||||||
|
printf "${GREEN}${ICON_SUCCESS} Go tests passed${NC}\n"
|
||||||
|
else
|
||||||
|
printf "${RED}${ICON_ERROR} Go tests failed${NC}\n"
|
||||||
|
((FAILED++))
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
printf "${YELLOW}${ICON_WARNING} Go not installed, skipping Go tests${NC}\n"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "4. Testing module loading..."
|
||||||
|
if bash -c 'source lib/core/common.sh && echo "OK"' > /dev/null 2>&1; then
|
||||||
|
printf "${GREEN}${ICON_SUCCESS} Module loading passed${NC}\n"
|
||||||
|
else
|
||||||
|
printf "${RED}${ICON_ERROR} Module loading failed${NC}\n"
|
||||||
|
((FAILED++))
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "5. Running integration tests..."
|
||||||
|
# Quick syntax check for main scripts
|
||||||
|
if bash -n mole && bash -n bin/clean.sh && bash -n bin/optimize.sh; then
|
||||||
|
printf "${GREEN}${ICON_SUCCESS} Integration tests passed${NC}\n"
|
||||||
|
else
|
||||||
|
printf "${RED}${ICON_ERROR} Integration tests failed${NC}\n"
|
||||||
|
((FAILED++))
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "6. Testing installation..."
|
||||||
|
# Skip if Homebrew mole is installed (install.sh will refuse to overwrite)
|
||||||
|
if brew list mole &> /dev/null; then
|
||||||
|
printf "${GREEN}${ICON_SUCCESS} Installation test skipped (Homebrew)${NC}\n"
|
||||||
|
elif ./install.sh --prefix /tmp/mole-test > /dev/null 2>&1; then
|
||||||
|
if [ -f /tmp/mole-test/mole ]; then
|
||||||
|
printf "${GREEN}${ICON_SUCCESS} Installation test passed${NC}\n"
|
||||||
|
else
|
||||||
|
printf "${RED}${ICON_ERROR} Installation test failed${NC}\n"
|
||||||
|
((FAILED++))
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
printf "${RED}${ICON_ERROR} Installation test failed${NC}\n"
|
||||||
|
((FAILED++))
|
||||||
|
fi
|
||||||
|
rm -rf /tmp/mole-test
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "==============================="
|
||||||
|
if [[ $FAILED -eq 0 ]]; then
|
||||||
|
printf "${GREEN}${ICON_SUCCESS} All tests passed!${NC}\n"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
printf "${RED}${ICON_ERROR} $FAILED test(s) failed!${NC}\n"
|
||||||
|
exit 1
|
||||||
72
tests/app_caches.bats
Normal file
72
tests/app_caches.bats
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
|
setup_file() {
|
||||||
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
|
export PROJECT_ROOT
|
||||||
|
|
||||||
|
ORIGINAL_HOME="${HOME:-}"
|
||||||
|
export ORIGINAL_HOME
|
||||||
|
|
||||||
|
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-app-caches.XXXXXX")"
|
||||||
|
export HOME
|
||||||
|
|
||||||
|
mkdir -p "$HOME"
|
||||||
|
}
|
||||||
|
|
||||||
|
teardown_file() {
|
||||||
|
rm -rf "$HOME"
|
||||||
|
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
|
||||||
|
export HOME="$ORIGINAL_HOME"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_xcode_tools skips derived data when Xcode running" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" /bin/bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
pgrep() { return 0; }
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_xcode_tools
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Xcode is running"* ]]
|
||||||
|
[[ "$output" != *"derived data"* ]]
|
||||||
|
[[ "$output" != *"archives"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_media_players protects spotify offline cache" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" /bin/bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
mkdir -p "$HOME/Library/Application Support/Spotify/PersistentCache/Storage"
|
||||||
|
touch "$HOME/Library/Application Support/Spotify/PersistentCache/Storage/offline.bnk"
|
||||||
|
safe_clean() { echo "CLEAN:$2"; }
|
||||||
|
clean_media_players
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Spotify cache protected"* ]]
|
||||||
|
[[ "$output" != *"CLEAN: Spotify cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_user_gui_applications calls all sections" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" /bin/bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
stop_section_spinner() { :; }
|
||||||
|
safe_clean() { :; }
|
||||||
|
clean_xcode_tools() { echo "xcode"; }
|
||||||
|
clean_code_editors() { echo "editors"; }
|
||||||
|
clean_communication_apps() { echo "comm"; }
|
||||||
|
clean_user_gui_applications
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"xcode"* ]]
|
||||||
|
[[ "$output" == *"editors"* ]]
|
||||||
|
[[ "$output" == *"comm"* ]]
|
||||||
|
}
|
||||||
113
tests/app_caches_more.bats
Normal file
113
tests/app_caches_more.bats
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
|
setup_file() {
|
||||||
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
|
export PROJECT_ROOT
|
||||||
|
|
||||||
|
ORIGINAL_HOME="${HOME:-}"
|
||||||
|
export ORIGINAL_HOME
|
||||||
|
|
||||||
|
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-app-caches-more.XXXXXX")"
|
||||||
|
export HOME
|
||||||
|
|
||||||
|
mkdir -p "$HOME"
|
||||||
|
}
|
||||||
|
|
||||||
|
teardown_file() {
|
||||||
|
rm -rf "$HOME"
|
||||||
|
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
|
||||||
|
export HOME="$ORIGINAL_HOME"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_ai_apps calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_ai_apps
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"ChatGPT cache"* ]]
|
||||||
|
[[ "$output" == *"Claude desktop cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_design_tools calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_design_tools
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Sketch cache"* ]]
|
||||||
|
[[ "$output" == *"Figma cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_dingtalk calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_dingtalk
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"DingTalk iDingTalk cache"* ]]
|
||||||
|
[[ "$output" == *"DingTalk logs"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_download_managers calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_download_managers
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Aria2 cache"* ]]
|
||||||
|
[[ "$output" == *"qBittorrent cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_productivity_apps calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_productivity_apps
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"MiaoYan cache"* ]]
|
||||||
|
[[ "$output" == *"Flomo cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_screenshot_tools calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_screenshot_tools
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"CleanShot cache"* ]]
|
||||||
|
[[ "$output" == *"Xnip cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_office_applications calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/user.sh"
|
||||||
|
stop_section_spinner() { :; }
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_office_applications
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Microsoft Word cache"* ]]
|
||||||
|
[[ "$output" == *"Apple iWork cache"* ]]
|
||||||
|
}
|
||||||
90
tests/apps_module.bats
Normal file
90
tests/apps_module.bats
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
|
setup_file() {
|
||||||
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
|
export PROJECT_ROOT
|
||||||
|
|
||||||
|
ORIGINAL_HOME="${HOME:-}"
|
||||||
|
export ORIGINAL_HOME
|
||||||
|
|
||||||
|
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-apps-module.XXXXXX")"
|
||||||
|
export HOME
|
||||||
|
|
||||||
|
mkdir -p "$HOME"
|
||||||
|
}
|
||||||
|
|
||||||
|
teardown_file() {
|
||||||
|
rm -rf "$HOME"
|
||||||
|
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
|
||||||
|
export HOME="$ORIGINAL_HOME"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_ds_store_tree reports dry-run summary" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" DRY_RUN=true /bin/bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/apps.sh"
|
||||||
|
start_inline_spinner() { :; }
|
||||||
|
stop_section_spinner() { :; }
|
||||||
|
note_activity() { :; }
|
||||||
|
get_file_size() { echo 10; }
|
||||||
|
bytes_to_human() { echo "0B"; }
|
||||||
|
files_cleaned=0
|
||||||
|
total_size_cleaned=0
|
||||||
|
total_items=0
|
||||||
|
mkdir -p "$HOME/test_ds"
|
||||||
|
touch "$HOME/test_ds/.DS_Store"
|
||||||
|
clean_ds_store_tree "$HOME/test_ds" "DS test"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"DS test"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "scan_installed_apps uses cache when fresh" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/apps.sh"
|
||||||
|
mkdir -p "$HOME/.cache/mole"
|
||||||
|
echo "com.example.App" > "$HOME/.cache/mole/installed_apps_cache"
|
||||||
|
get_file_mtime() { date +%s; }
|
||||||
|
debug_log() { :; }
|
||||||
|
scan_installed_apps "$HOME/installed.txt"
|
||||||
|
cat "$HOME/installed.txt"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"com.example.App"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "is_bundle_orphaned returns true for old uninstalled bundle" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" ORPHAN_AGE_THRESHOLD=60 bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/apps.sh"
|
||||||
|
should_protect_data() { return 1; }
|
||||||
|
get_file_mtime() { echo 0; }
|
||||||
|
if is_bundle_orphaned "com.example.Old" "$HOME/old" "$HOME/installed.txt"; then
|
||||||
|
echo "orphan"
|
||||||
|
fi
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"orphan"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_orphaned_app_data skips when no permission" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/apps.sh"
|
||||||
|
ls() { return 1; }
|
||||||
|
stop_section_spinner() { :; }
|
||||||
|
clean_orphaned_app_data
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Skipped: No permission"* ]]
|
||||||
|
}
|
||||||
@@ -86,7 +86,17 @@ EOF
|
|||||||
FINDER_METADATA_SENTINEL
|
FINDER_METADATA_SENTINEL
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
run env HOME="$HOME" "$PROJECT_ROOT/mole" clean --dry-run
|
# Test whitelist logic directly instead of running full clean
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/manage/whitelist.sh"
|
||||||
|
load_whitelist
|
||||||
|
if is_whitelisted "$HOME/Documents/.DS_Store"; then
|
||||||
|
echo "protected by whitelist"
|
||||||
|
fi
|
||||||
|
EOF
|
||||||
|
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
[[ "$output" == *"protected by whitelist"* ]]
|
[[ "$output" == *"protected by whitelist"* ]]
|
||||||
[ -f "$HOME/Documents/.DS_Store" ]
|
[ -f "$HOME/Documents/.DS_Store" ]
|
||||||
@@ -146,13 +156,10 @@ EOF
|
|||||||
@test "clean_mail_downloads removes old attachments" {
|
@test "clean_mail_downloads removes old attachments" {
|
||||||
mkdir -p "$HOME/Library/Mail Downloads"
|
mkdir -p "$HOME/Library/Mail Downloads"
|
||||||
touch "$HOME/Library/Mail Downloads/old.pdf"
|
touch "$HOME/Library/Mail Downloads/old.pdf"
|
||||||
# Make file old (31+ days)
|
|
||||||
touch -t 202301010000 "$HOME/Library/Mail Downloads/old.pdf"
|
touch -t 202301010000 "$HOME/Library/Mail Downloads/old.pdf"
|
||||||
|
|
||||||
# Create large enough size to trigger cleanup (>5MB threshold)
|
|
||||||
dd if=/dev/zero of="$HOME/Library/Mail Downloads/dummy.dat" bs=1024 count=6000 2>/dev/null
|
dd if=/dev/zero of="$HOME/Library/Mail Downloads/dummy.dat" bs=1024 count=6000 2>/dev/null
|
||||||
|
|
||||||
# Verify file exists before cleanup
|
|
||||||
[ -f "$HOME/Library/Mail Downloads/old.pdf" ]
|
[ -f "$HOME/Library/Mail Downloads/old.pdf" ]
|
||||||
|
|
||||||
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
|
||||||
@@ -163,17 +170,14 @@ clean_mail_downloads
|
|||||||
EOF
|
EOF
|
||||||
|
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
# Verify old file was actually removed
|
|
||||||
[ ! -f "$HOME/Library/Mail Downloads/old.pdf" ]
|
[ ! -f "$HOME/Library/Mail Downloads/old.pdf" ]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "clean_time_machine_failed_backups detects running backup correctly" {
|
@test "clean_time_machine_failed_backups detects running backup correctly" {
|
||||||
# Skip test if tmutil is not available
|
|
||||||
if ! command -v tmutil > /dev/null 2>&1; then
|
if ! command -v tmutil > /dev/null 2>&1; then
|
||||||
skip "tmutil not available"
|
skip "tmutil not available"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Create a mock tmutil executable
|
|
||||||
local mock_bin="$HOME/bin"
|
local mock_bin="$HOME/bin"
|
||||||
mkdir -p "$mock_bin"
|
mkdir -p "$mock_bin"
|
||||||
|
|
||||||
@@ -205,22 +209,18 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/clean/system.sh"
|
source "$PROJECT_ROOT/lib/clean/system.sh"
|
||||||
|
|
||||||
# Run the function - should NOT skip cleanup when Running = 0
|
|
||||||
clean_time_machine_failed_backups
|
clean_time_machine_failed_backups
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
# Should NOT output the "backup in progress" message
|
|
||||||
[[ "$output" != *"Time Machine backup in progress, skipping cleanup"* ]]
|
[[ "$output" != *"Time Machine backup in progress, skipping cleanup"* ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "clean_time_machine_failed_backups skips when backup is actually running" {
|
@test "clean_time_machine_failed_backups skips when backup is actually running" {
|
||||||
# Skip test if tmutil is not available
|
|
||||||
if ! command -v tmutil > /dev/null 2>&1; then
|
if ! command -v tmutil > /dev/null 2>&1; then
|
||||||
skip "tmutil not available"
|
skip "tmutil not available"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Create a mock tmutil executable
|
|
||||||
local mock_bin="$HOME/bin"
|
local mock_bin="$HOME/bin"
|
||||||
mkdir -p "$mock_bin"
|
mkdir -p "$mock_bin"
|
||||||
|
|
||||||
@@ -252,11 +252,9 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/clean/system.sh"
|
source "$PROJECT_ROOT/lib/clean/system.sh"
|
||||||
|
|
||||||
# Run the function - should skip cleanup when Running = 1
|
|
||||||
clean_time_machine_failed_backups
|
clean_time_machine_failed_backups
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
# Should output the "backup in progress" message
|
|
||||||
[[ "$output" == *"Time Machine backup in progress, skipping cleanup"* ]]
|
[[ "$output" == *"Time Machine backup in progress, skipping cleanup"* ]]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -27,77 +27,84 @@ setup() {
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/clean/caches.sh"
|
source "$PROJECT_ROOT/lib/clean/caches.sh"
|
||||||
|
|
||||||
# Clean permission flag for each test
|
# Mock run_with_timeout to skip timeout overhead in tests
|
||||||
|
# shellcheck disable=SC2329
|
||||||
|
run_with_timeout() {
|
||||||
|
shift # Remove timeout argument
|
||||||
|
"$@"
|
||||||
|
}
|
||||||
|
export -f run_with_timeout
|
||||||
|
|
||||||
rm -f "$HOME/.cache/mole/permissions_granted"
|
rm -f "$HOME/.cache/mole/permissions_granted"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test check_tcc_permissions in non-interactive mode
|
|
||||||
@test "check_tcc_permissions skips in non-interactive mode" {
|
@test "check_tcc_permissions skips in non-interactive mode" {
|
||||||
# Redirect stdin to simulate non-TTY
|
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/clean/caches.sh'; check_tcc_permissions" < /dev/null
|
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/clean/caches.sh'; check_tcc_permissions" < /dev/null
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
# Should not create permission flag in non-interactive mode
|
|
||||||
[[ ! -f "$HOME/.cache/mole/permissions_granted" ]]
|
[[ ! -f "$HOME/.cache/mole/permissions_granted" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test check_tcc_permissions with existing permission flag
|
|
||||||
@test "check_tcc_permissions skips when permissions already granted" {
|
@test "check_tcc_permissions skips when permissions already granted" {
|
||||||
# Create permission flag
|
|
||||||
mkdir -p "$HOME/.cache/mole"
|
mkdir -p "$HOME/.cache/mole"
|
||||||
touch "$HOME/.cache/mole/permissions_granted"
|
touch "$HOME/.cache/mole/permissions_granted"
|
||||||
|
|
||||||
# Even in TTY mode, should skip if flag exists
|
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/clean/caches.sh'; [[ -t 1 ]] || true; check_tcc_permissions"
|
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/clean/caches.sh'; [[ -t 1 ]] || true; check_tcc_permissions"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test check_tcc_permissions directory checks
|
|
||||||
@test "check_tcc_permissions validates protected directories" {
|
@test "check_tcc_permissions validates protected directories" {
|
||||||
# The function checks these directories exist:
|
|
||||||
# - ~/Library/Caches
|
|
||||||
# - ~/Library/Logs
|
|
||||||
# - ~/Library/Application Support
|
|
||||||
# - ~/Library/Containers
|
|
||||||
# - ~/.cache
|
|
||||||
|
|
||||||
# Ensure test environment has these directories
|
|
||||||
[[ -d "$HOME/Library/Caches" ]]
|
[[ -d "$HOME/Library/Caches" ]]
|
||||||
[[ -d "$HOME/Library/Logs" ]]
|
[[ -d "$HOME/Library/Logs" ]]
|
||||||
[[ -d "$HOME/.cache/mole" ]]
|
[[ -d "$HOME/.cache/mole" ]]
|
||||||
|
|
||||||
# Function should handle missing directories gracefully
|
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/clean/caches.sh'; check_tcc_permissions < /dev/null"
|
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/clean/caches.sh'; check_tcc_permissions < /dev/null"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test clean_service_worker_cache with non-existent path
|
|
||||||
@test "clean_service_worker_cache returns early when path doesn't exist" {
|
@test "clean_service_worker_cache returns early when path doesn't exist" {
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/clean/caches.sh'; clean_service_worker_cache 'TestBrowser' '/nonexistent/path'"
|
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/clean/caches.sh'; clean_service_worker_cache 'TestBrowser' '/nonexistent/path'"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test clean_service_worker_cache with empty directory
|
|
||||||
@test "clean_service_worker_cache handles empty cache directory" {
|
@test "clean_service_worker_cache handles empty cache directory" {
|
||||||
local test_cache="$HOME/test_sw_cache"
|
local test_cache="$HOME/test_sw_cache"
|
||||||
mkdir -p "$test_cache"
|
mkdir -p "$test_cache"
|
||||||
|
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/clean/caches.sh'; clean_service_worker_cache 'TestBrowser' '$test_cache'"
|
run bash -c "
|
||||||
|
run_with_timeout() { shift; \"\$@\"; }
|
||||||
|
export -f run_with_timeout
|
||||||
|
source '$PROJECT_ROOT/lib/core/common.sh'
|
||||||
|
source '$PROJECT_ROOT/lib/clean/caches.sh'
|
||||||
|
clean_service_worker_cache 'TestBrowser' '$test_cache'
|
||||||
|
"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
|
|
||||||
rm -rf "$test_cache"
|
rm -rf "$test_cache"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test clean_service_worker_cache domain protection
|
|
||||||
@test "clean_service_worker_cache protects specified domains" {
|
@test "clean_service_worker_cache protects specified domains" {
|
||||||
local test_cache="$HOME/test_sw_cache"
|
local test_cache="$HOME/test_sw_cache"
|
||||||
mkdir -p "$test_cache/abc123_https_capcut.com_0"
|
mkdir -p "$test_cache/abc123_https_capcut.com_0"
|
||||||
mkdir -p "$test_cache/def456_https_example.com_0"
|
mkdir -p "$test_cache/def456_https_example.com_0"
|
||||||
|
|
||||||
# Mock PROTECTED_SW_DOMAINS
|
|
||||||
export PROTECTED_SW_DOMAINS=("capcut.com" "photopea.com")
|
|
||||||
|
|
||||||
# Dry run to check protection logic
|
|
||||||
run bash -c "
|
run bash -c "
|
||||||
|
run_with_timeout() {
|
||||||
|
local timeout=\"\$1\"
|
||||||
|
shift
|
||||||
|
if [[ \"\$1\" == \"get_path_size_kb\" ]]; then
|
||||||
|
echo 0
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
if [[ \"\$1\" == \"sh\" ]]; then
|
||||||
|
printf '%s\n' \
|
||||||
|
'$test_cache/abc123_https_capcut.com_0' \
|
||||||
|
'$test_cache/def456_https_example.com_0'
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
\"\$@\"
|
||||||
|
}
|
||||||
|
export -f run_with_timeout
|
||||||
export DRY_RUN=true
|
export DRY_RUN=true
|
||||||
export PROTECTED_SW_DOMAINS=(capcut.com photopea.com)
|
export PROTECTED_SW_DOMAINS=(capcut.com photopea.com)
|
||||||
source '$PROJECT_ROOT/lib/core/common.sh'
|
source '$PROJECT_ROOT/lib/core/common.sh'
|
||||||
@@ -106,19 +113,15 @@ setup() {
|
|||||||
"
|
"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
|
|
||||||
# Protected domain directory should still exist
|
|
||||||
[[ -d "$test_cache/abc123_https_capcut.com_0" ]]
|
[[ -d "$test_cache/abc123_https_capcut.com_0" ]]
|
||||||
|
|
||||||
rm -rf "$test_cache"
|
rm -rf "$test_cache"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test clean_project_caches function
|
|
||||||
@test "clean_project_caches completes without errors" {
|
@test "clean_project_caches completes without errors" {
|
||||||
# Create test project structures
|
|
||||||
mkdir -p "$HOME/projects/test-app/.next/cache"
|
mkdir -p "$HOME/projects/test-app/.next/cache"
|
||||||
mkdir -p "$HOME/projects/python-app/__pycache__"
|
mkdir -p "$HOME/projects/python-app/__pycache__"
|
||||||
|
|
||||||
# Create some dummy cache files
|
|
||||||
touch "$HOME/projects/test-app/.next/cache/test.cache"
|
touch "$HOME/projects/test-app/.next/cache/test.cache"
|
||||||
touch "$HOME/projects/python-app/__pycache__/module.pyc"
|
touch "$HOME/projects/python-app/__pycache__/module.pyc"
|
||||||
|
|
||||||
@@ -133,39 +136,30 @@ setup() {
|
|||||||
rm -rf "$HOME/projects"
|
rm -rf "$HOME/projects"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test clean_project_caches timeout protection
|
|
||||||
@test "clean_project_caches handles timeout gracefully" {
|
@test "clean_project_caches handles timeout gracefully" {
|
||||||
# Create a test directory structure
|
|
||||||
mkdir -p "$HOME/test-project/.next"
|
mkdir -p "$HOME/test-project/.next"
|
||||||
|
|
||||||
# Mock find to simulate slow operation
|
|
||||||
function find() {
|
function find() {
|
||||||
sleep 2 # Simulate slow find
|
sleep 2 # Simulate slow find
|
||||||
echo "$HOME/test-project/.next"
|
echo "$HOME/test-project/.next"
|
||||||
}
|
}
|
||||||
export -f find
|
export -f find
|
||||||
|
|
||||||
# Should complete within reasonable time even with slow find
|
|
||||||
run timeout 15 bash -c "
|
run timeout 15 bash -c "
|
||||||
source '$PROJECT_ROOT/lib/core/common.sh'
|
source '$PROJECT_ROOT/lib/core/common.sh'
|
||||||
source '$PROJECT_ROOT/lib/clean/caches.sh'
|
source '$PROJECT_ROOT/lib/clean/caches.sh'
|
||||||
clean_project_caches
|
clean_project_caches
|
||||||
"
|
"
|
||||||
# Either succeeds or times out gracefully (both acceptable)
|
|
||||||
[ "$status" -eq 0 ] || [ "$status" -eq 124 ]
|
[ "$status" -eq 0 ] || [ "$status" -eq 124 ]
|
||||||
|
|
||||||
rm -rf "$HOME/test-project"
|
rm -rf "$HOME/test-project"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test clean_project_caches exclusions
|
|
||||||
@test "clean_project_caches excludes Library and Trash directories" {
|
@test "clean_project_caches excludes Library and Trash directories" {
|
||||||
# These directories should be excluded from scan
|
|
||||||
mkdir -p "$HOME/Library/.next/cache"
|
mkdir -p "$HOME/Library/.next/cache"
|
||||||
mkdir -p "$HOME/.Trash/.next/cache"
|
mkdir -p "$HOME/.Trash/.next/cache"
|
||||||
mkdir -p "$HOME/projects/.next/cache"
|
mkdir -p "$HOME/projects/.next/cache"
|
||||||
|
|
||||||
# Only non-excluded directories should be scanned
|
|
||||||
# We can't easily test this without mocking, but we can verify no crashes
|
|
||||||
run bash -c "
|
run bash -c "
|
||||||
export DRY_RUN=true
|
export DRY_RUN=true
|
||||||
source '$PROJECT_ROOT/lib/core/common.sh'
|
source '$PROJECT_ROOT/lib/core/common.sh'
|
||||||
@@ -176,4 +170,3 @@ setup() {
|
|||||||
|
|
||||||
rm -rf "$HOME/projects"
|
rm -rf "$HOME/projects"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
102
tests/clean_extras.bats
Normal file
102
tests/clean_extras.bats
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
|
setup_file() {
|
||||||
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
|
export PROJECT_ROOT
|
||||||
|
|
||||||
|
ORIGINAL_HOME="${HOME:-}"
|
||||||
|
export ORIGINAL_HOME
|
||||||
|
|
||||||
|
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-clean-extras.XXXXXX")"
|
||||||
|
export HOME
|
||||||
|
|
||||||
|
mkdir -p "$HOME"
|
||||||
|
}
|
||||||
|
|
||||||
|
teardown_file() {
|
||||||
|
rm -rf "$HOME"
|
||||||
|
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
|
||||||
|
export HOME="$ORIGINAL_HOME"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_cloud_storage calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/user.sh"
|
||||||
|
stop_section_spinner() { :; }
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_cloud_storage
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Dropbox cache"* ]]
|
||||||
|
[[ "$output" == *"Google Drive cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_virtualization_tools hits cache paths" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/user.sh"
|
||||||
|
stop_section_spinner() { :; }
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_virtualization_tools
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"VMware Fusion cache"* ]]
|
||||||
|
[[ "$output" == *"Parallels cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_email_clients calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_email_clients
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Spark cache"* ]]
|
||||||
|
[[ "$output" == *"Airmail cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_note_apps calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_note_apps
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Notion cache"* ]]
|
||||||
|
[[ "$output" == *"Obsidian cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_task_apps calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_task_apps
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Todoist cache"* ]]
|
||||||
|
[[ "$output" == *"Any.do cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "scan_external_volumes skips when no volumes" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/user.sh"
|
||||||
|
run_with_timeout() { return 1; }
|
||||||
|
scan_external_volumes
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
}
|
||||||
138
tests/clean_extras_more.bats
Normal file
138
tests/clean_extras_more.bats
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
|
setup_file() {
|
||||||
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
|
export PROJECT_ROOT
|
||||||
|
|
||||||
|
ORIGINAL_HOME="${HOME:-}"
|
||||||
|
export ORIGINAL_HOME
|
||||||
|
|
||||||
|
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-clean-extras-more.XXXXXX")"
|
||||||
|
export HOME
|
||||||
|
|
||||||
|
mkdir -p "$HOME"
|
||||||
|
}
|
||||||
|
|
||||||
|
teardown_file() {
|
||||||
|
rm -rf "$HOME"
|
||||||
|
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
|
||||||
|
export HOME="$ORIGINAL_HOME"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_video_tools calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_video_tools
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"ScreenFlow cache"* ]]
|
||||||
|
[[ "$output" == *"Final Cut Pro cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_video_players calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_video_players
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"IINA cache"* ]]
|
||||||
|
[[ "$output" == *"VLC cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_3d_tools calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_3d_tools
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Blender cache"* ]]
|
||||||
|
[[ "$output" == *"Cinema 4D cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_gaming_platforms calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_gaming_platforms
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Steam cache"* ]]
|
||||||
|
[[ "$output" == *"Epic Games cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_translation_apps calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_translation_apps
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Youdao Dictionary cache"* ]]
|
||||||
|
[[ "$output" == *"Eudict cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_launcher_apps calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_launcher_apps
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Alfred cache"* ]]
|
||||||
|
[[ "$output" == *"The Unarchiver cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_remote_desktop calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_remote_desktop
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"TeamViewer cache"* ]]
|
||||||
|
[[ "$output" == *"AnyDesk cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_system_utils calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_system_utils
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Input Source Pro cache"* ]]
|
||||||
|
[[ "$output" == *"WakaTime cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_shell_utils calls expected caches" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/app_caches.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_shell_utils
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Zsh completion cache"* ]]
|
||||||
|
[[ "$output" == *"wget HSTS cache"* ]]
|
||||||
|
}
|
||||||
@@ -46,24 +46,18 @@ setup() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "touchid status reports current configuration" {
|
@test "touchid status reports current configuration" {
|
||||||
# Don't test actual Touch ID config (system-dependent, may trigger prompts)
|
|
||||||
# Just verify the command exists and can run
|
|
||||||
run env HOME="$HOME" "$PROJECT_ROOT/mole" touchid status
|
run env HOME="$HOME" "$PROJECT_ROOT/mole" touchid status
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
# Should output either "enabled" or "not configured" message
|
|
||||||
[[ "$output" == *"Touch ID"* ]]
|
[[ "$output" == *"Touch ID"* ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "mo optimize command is recognized" {
|
@test "mo optimize command is recognized" {
|
||||||
# Test that optimize command exists without actually running it
|
|
||||||
# Running full optimize in tests is too slow (waits for sudo, runs health checks)
|
|
||||||
run bash -c "grep -q '\"optimize\")' '$PROJECT_ROOT/mole'"
|
run bash -c "grep -q '\"optimize\")' '$PROJECT_ROOT/mole'"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "mo analyze binary is valid" {
|
@test "mo analyze binary is valid" {
|
||||||
if [[ -f "$PROJECT_ROOT/bin/analyze-go" ]]; then
|
if [[ -f "$PROJECT_ROOT/bin/analyze-go" ]]; then
|
||||||
# Verify binary is executable and valid Universal Binary
|
|
||||||
[ -x "$PROJECT_ROOT/bin/analyze-go" ]
|
[ -x "$PROJECT_ROOT/bin/analyze-go" ]
|
||||||
run file "$PROJECT_ROOT/bin/analyze-go"
|
run file "$PROJECT_ROOT/bin/analyze-go"
|
||||||
[[ "$output" == *"Mach-O"* ]] || [[ "$output" == *"executable"* ]]
|
[[ "$output" == *"Mach-O"* ]] || [[ "$output" == *"executable"* ]]
|
||||||
|
|||||||
@@ -80,22 +80,18 @@ teardown() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "rotate_log_once only checks log size once per session" {
|
@test "rotate_log_once only checks log size once per session" {
|
||||||
# Create a log file exceeding the max size
|
|
||||||
local log_file="$HOME/.config/mole/mole.log"
|
local log_file="$HOME/.config/mole/mole.log"
|
||||||
mkdir -p "$(dirname "$log_file")"
|
mkdir -p "$(dirname "$log_file")"
|
||||||
dd if=/dev/zero of="$log_file" bs=1024 count=1100 2> /dev/null
|
dd if=/dev/zero of="$log_file" bs=1024 count=1100 2> /dev/null
|
||||||
|
|
||||||
# First call should rotate
|
|
||||||
HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/core/common.sh'"
|
HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/core/common.sh'"
|
||||||
[[ -f "${log_file}.old" ]]
|
[[ -f "${log_file}.old" ]]
|
||||||
|
|
||||||
# Verify MOLE_LOG_ROTATED was set (rotation happened)
|
|
||||||
result=$(HOME="$HOME" MOLE_LOG_ROTATED=1 bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/core/common.sh'; echo \$MOLE_LOG_ROTATED")
|
result=$(HOME="$HOME" MOLE_LOG_ROTATED=1 bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/core/common.sh'; echo \$MOLE_LOG_ROTATED")
|
||||||
[[ "$result" == "1" ]]
|
[[ "$result" == "1" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "drain_pending_input clears stdin buffer" {
|
@test "drain_pending_input clears stdin buffer" {
|
||||||
# Test that drain_pending_input doesn't hang (using background job with timeout)
|
|
||||||
result=$(
|
result=$(
|
||||||
(echo -e "test\ninput" | HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/core/common.sh'; drain_pending_input; echo done") &
|
(echo -e "test\ninput" | HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/core/common.sh'; drain_pending_input; echo done") &
|
||||||
pid=$!
|
pid=$!
|
||||||
@@ -150,15 +146,12 @@ EOF
|
|||||||
|
|
||||||
|
|
||||||
@test "should_protect_data protects system and critical apps" {
|
@test "should_protect_data protects system and critical apps" {
|
||||||
# System apps should be protected
|
|
||||||
result=$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/core/common.sh'; should_protect_data 'com.apple.Safari' && echo 'protected' || echo 'not-protected'")
|
result=$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/core/common.sh'; should_protect_data 'com.apple.Safari' && echo 'protected' || echo 'not-protected'")
|
||||||
[ "$result" = "protected" ]
|
[ "$result" = "protected" ]
|
||||||
|
|
||||||
# Critical network apps should be protected
|
|
||||||
result=$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/core/common.sh'; should_protect_data 'com.clash.app' && echo 'protected' || echo 'not-protected'")
|
result=$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/core/common.sh'; should_protect_data 'com.clash.app' && echo 'protected' || echo 'not-protected'")
|
||||||
[ "$result" = "protected" ]
|
[ "$result" = "protected" ]
|
||||||
|
|
||||||
# Regular apps should not be protected
|
|
||||||
result=$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/core/common.sh'; should_protect_data 'com.example.RegularApp' && echo 'protected' || echo 'not-protected'")
|
result=$(HOME="$HOME" bash --noprofile --norc -c "source '$PROJECT_ROOT/lib/core/common.sh'; should_protect_data 'com.example.RegularApp' && echo 'protected' || echo 'not-protected'")
|
||||||
[ "$result" = "not-protected" ]
|
[ "$result" = "not-protected" ]
|
||||||
}
|
}
|
||||||
@@ -171,7 +164,6 @@ EOF
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "start_inline_spinner and stop_inline_spinner work in non-TTY" {
|
@test "start_inline_spinner and stop_inline_spinner work in non-TTY" {
|
||||||
# Should not hang in non-interactive mode
|
|
||||||
result=$(HOME="$HOME" bash --noprofile --norc << 'EOF'
|
result=$(HOME="$HOME" bash --noprofile --norc << 'EOF'
|
||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Testing..."
|
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Testing..."
|
||||||
|
|||||||
@@ -31,15 +31,12 @@ setup() {
|
|||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
MOLE_OUTPUT="$output"
|
MOLE_OUTPUT="$output"
|
||||||
|
|
||||||
# Check if log file exists
|
|
||||||
DEBUG_LOG="$HOME/.config/mole/mole_debug_session.log"
|
DEBUG_LOG="$HOME/.config/mole/mole_debug_session.log"
|
||||||
[ -f "$DEBUG_LOG" ]
|
[ -f "$DEBUG_LOG" ]
|
||||||
|
|
||||||
# Validates log content
|
|
||||||
run grep "Mole Debug Session" "$DEBUG_LOG"
|
run grep "Mole Debug Session" "$DEBUG_LOG"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
|
|
||||||
# Validates standard output message (ignoring colors)
|
|
||||||
[[ "$MOLE_OUTPUT" =~ "Debug session log saved to" ]]
|
[[ "$MOLE_OUTPUT" =~ "Debug session log saved to" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -56,7 +53,6 @@ setup() {
|
|||||||
|
|
||||||
DEBUG_LOG="$HOME/.config/mole/mole_debug_session.log"
|
DEBUG_LOG="$HOME/.config/mole/mole_debug_session.log"
|
||||||
|
|
||||||
# Check for system info headers
|
|
||||||
run grep "User:" "$DEBUG_LOG"
|
run grep "User:" "$DEBUG_LOG"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
|
|
||||||
|
|||||||
106
tests/dev_caches.bats
Normal file
106
tests/dev_caches.bats
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
|
setup_file() {
|
||||||
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
|
export PROJECT_ROOT
|
||||||
|
|
||||||
|
ORIGINAL_HOME="${HOME:-}"
|
||||||
|
export ORIGINAL_HOME
|
||||||
|
|
||||||
|
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-dev-caches.XXXXXX")"
|
||||||
|
export HOME
|
||||||
|
|
||||||
|
mkdir -p "$HOME"
|
||||||
|
}
|
||||||
|
|
||||||
|
teardown_file() {
|
||||||
|
rm -rf "$HOME"
|
||||||
|
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
|
||||||
|
export HOME="$ORIGINAL_HOME"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_dev_npm cleans orphaned pnpm store" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/dev.sh"
|
||||||
|
start_section_spinner() { :; }
|
||||||
|
stop_section_spinner() { :; }
|
||||||
|
clean_tool_cache() { echo "$1"; }
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
note_activity() { :; }
|
||||||
|
pnpm() {
|
||||||
|
if [[ "$1" == "store" && "$2" == "prune" ]]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
if [[ "$1" == "store" && "$2" == "path" ]]; then
|
||||||
|
echo "/tmp/pnpm-store"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
npm() { return 0; }
|
||||||
|
export -f pnpm npm
|
||||||
|
clean_dev_npm
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Orphaned pnpm store"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_dev_docker skips when daemon not running" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" MO_DEBUG=1 DRY_RUN=false bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/dev.sh"
|
||||||
|
start_section_spinner() { :; }
|
||||||
|
stop_section_spinner() { :; }
|
||||||
|
run_with_timeout() { return 1; }
|
||||||
|
clean_tool_cache() { echo "$1"; }
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
debug_log() { echo "$*"; }
|
||||||
|
docker() { return 1; }
|
||||||
|
export -f docker
|
||||||
|
clean_dev_docker
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Docker daemon not running"* ]]
|
||||||
|
[[ "$output" != *"Docker build cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_developer_tools runs key stages" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/dev.sh"
|
||||||
|
stop_section_spinner() { :; }
|
||||||
|
clean_sqlite_temp_files() { :; }
|
||||||
|
clean_dev_npm() { echo "npm"; }
|
||||||
|
clean_homebrew() { echo "brew"; }
|
||||||
|
clean_project_caches() { :; }
|
||||||
|
clean_dev_python() { :; }
|
||||||
|
clean_dev_go() { :; }
|
||||||
|
clean_dev_rust() { :; }
|
||||||
|
clean_dev_docker() { :; }
|
||||||
|
clean_dev_cloud() { :; }
|
||||||
|
clean_dev_nix() { :; }
|
||||||
|
clean_dev_shell() { :; }
|
||||||
|
clean_dev_frontend() { :; }
|
||||||
|
clean_dev_mobile() { :; }
|
||||||
|
clean_dev_jvm() { :; }
|
||||||
|
clean_dev_other_langs() { :; }
|
||||||
|
clean_dev_cicd() { :; }
|
||||||
|
clean_dev_database() { :; }
|
||||||
|
clean_dev_api_tools() { :; }
|
||||||
|
clean_dev_network() { :; }
|
||||||
|
clean_dev_misc() { :; }
|
||||||
|
safe_clean() { :; }
|
||||||
|
debug_log() { :; }
|
||||||
|
clean_developer_tools
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"npm"* ]]
|
||||||
|
[[ "$output" == *"brew"* ]]
|
||||||
|
}
|
||||||
100
tests/optimize_core.bats
Normal file
100
tests/optimize_core.bats
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
|
setup_file() {
|
||||||
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
|
export PROJECT_ROOT
|
||||||
|
|
||||||
|
ORIGINAL_HOME="${HOME:-}"
|
||||||
|
export ORIGINAL_HOME
|
||||||
|
|
||||||
|
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-optimize-core.XXXXXX")"
|
||||||
|
export HOME
|
||||||
|
|
||||||
|
mkdir -p "$HOME"
|
||||||
|
}
|
||||||
|
|
||||||
|
teardown_file() {
|
||||||
|
rm -rf "$HOME"
|
||||||
|
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
|
||||||
|
export HOME="$ORIGINAL_HOME"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "needs_permissions_repair returns true when home not writable" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" USER="tester" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
stat() { echo "root"; }
|
||||||
|
export -f stat
|
||||||
|
if needs_permissions_repair; then
|
||||||
|
echo "needs"
|
||||||
|
fi
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"needs"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "has_bluetooth_hid_connected detects HID" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
system_profiler() {
|
||||||
|
cat << 'OUT'
|
||||||
|
Bluetooth:
|
||||||
|
Apple Magic Mouse:
|
||||||
|
Connected: Yes
|
||||||
|
Type: Mouse
|
||||||
|
OUT
|
||||||
|
}
|
||||||
|
export -f system_profiler
|
||||||
|
if has_bluetooth_hid_connected; then
|
||||||
|
echo "hid"
|
||||||
|
fi
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"hid"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "is_ac_power detects AC power" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
pmset() { echo "AC Power"; }
|
||||||
|
export -f pmset
|
||||||
|
if is_ac_power; then
|
||||||
|
echo "ac"
|
||||||
|
fi
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"ac"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "is_memory_pressure_high detects warning" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
memory_pressure() { echo "warning"; }
|
||||||
|
export -f memory_pressure
|
||||||
|
if is_memory_pressure_high; then
|
||||||
|
echo "high"
|
||||||
|
fi
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"high"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "opt_launch_services_rebuild handles missing lsregister" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" MOLE_DRY_RUN=1 bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
opt_launch_services_rebuild
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"LaunchServices repaired"* ]]
|
||||||
|
}
|
||||||
46
tests/optimize_helpers.bats
Normal file
46
tests/optimize_helpers.bats
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
|
setup_file() {
|
||||||
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
|
export PROJECT_ROOT
|
||||||
|
|
||||||
|
ORIGINAL_HOME="${HOME:-}"
|
||||||
|
export ORIGINAL_HOME
|
||||||
|
|
||||||
|
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-optimize-helpers.XXXXXX")"
|
||||||
|
export HOME
|
||||||
|
|
||||||
|
mkdir -p "$HOME"
|
||||||
|
}
|
||||||
|
|
||||||
|
teardown_file() {
|
||||||
|
rm -rf "$HOME"
|
||||||
|
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
|
||||||
|
export HOME="$ORIGINAL_HOME"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "opt_msg uses dry-run output" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" MOLE_DRY_RUN=1 bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
opt_msg "dry"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"dry"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "run_launchctl_unload skips in dry-run" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" MOLE_DRY_RUN=1 bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
launchctl() { echo "called"; }
|
||||||
|
export -f launchctl
|
||||||
|
run_launchctl_unload "/tmp/test.plist" false
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" != *"called"* ]]
|
||||||
|
}
|
||||||
116
tests/optimize_tasks.bats
Normal file
116
tests/optimize_tasks.bats
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
|
setup_file() {
|
||||||
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
|
export PROJECT_ROOT
|
||||||
|
|
||||||
|
ORIGINAL_HOME="${HOME:-}"
|
||||||
|
export ORIGINAL_HOME
|
||||||
|
|
||||||
|
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-optimize.XXXXXX")"
|
||||||
|
export HOME
|
||||||
|
|
||||||
|
mkdir -p "$HOME"
|
||||||
|
}
|
||||||
|
|
||||||
|
teardown_file() {
|
||||||
|
rm -rf "$HOME"
|
||||||
|
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
|
||||||
|
export HOME="$ORIGINAL_HOME"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "opt_system_maintenance reports DNS and Spotlight" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" MOLE_DRY_RUN=1 bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
flush_dns_cache() { return 0; }
|
||||||
|
mdutil() { echo "Indexing enabled."; }
|
||||||
|
opt_system_maintenance
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"DNS cache flushed"* ]]
|
||||||
|
[[ "$output" == *"Spotlight index verified"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "opt_network_optimization refreshes DNS" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" MOLE_DRY_RUN=1 bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
flush_dns_cache() { return 0; }
|
||||||
|
opt_network_optimization
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"DNS cache refreshed"* ]]
|
||||||
|
[[ "$output" == *"mDNSResponder restarted"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "opt_sqlite_vacuum reports sqlite3 unavailable" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" /bin/bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
export PATH="/nonexistent"
|
||||||
|
opt_sqlite_vacuum
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"sqlite3 unavailable"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "opt_font_cache_rebuild succeeds in dry-run" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" MOLE_DRY_RUN=1 bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
opt_font_cache_rebuild
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Font cache cleared"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "opt_dock_refresh clears cache files" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" MOLE_DRY_RUN=1 bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
mkdir -p "$HOME/Library/Application Support/Dock"
|
||||||
|
touch "$HOME/Library/Application Support/Dock/test.db"
|
||||||
|
safe_remove() { return 0; }
|
||||||
|
opt_dock_refresh
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Dock cache cleared"* ]]
|
||||||
|
[[ "$output" == *"Dock refreshed"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "execute_optimization dispatches actions" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
opt_dock_refresh() { echo "dock"; }
|
||||||
|
execute_optimization dock_refresh
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"dock"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "execute_optimization rejects unknown action" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
execute_optimization unknown_action
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 1 ]
|
||||||
|
[[ "$output" == *"Unknown action"* ]]
|
||||||
|
}
|
||||||
@@ -1,12 +1,9 @@
|
|||||||
#!/usr/bin/env bats
|
#!/usr/bin/env bats
|
||||||
# Performance benchmark tests for Mole optimizations
|
|
||||||
# Tests the performance improvements introduced in V1.14.0+
|
|
||||||
|
|
||||||
setup_file() {
|
setup_file() {
|
||||||
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
export PROJECT_ROOT
|
export PROJECT_ROOT
|
||||||
|
|
||||||
# Create test data directory
|
|
||||||
TEST_DATA_DIR="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-perf.XXXXXX")"
|
TEST_DATA_DIR="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-perf.XXXXXX")"
|
||||||
export TEST_DATA_DIR
|
export TEST_DATA_DIR
|
||||||
}
|
}
|
||||||
@@ -19,26 +16,19 @@ setup() {
|
|||||||
source "$PROJECT_ROOT/lib/core/base.sh"
|
source "$PROJECT_ROOT/lib/core/base.sh"
|
||||||
}
|
}
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# bytes_to_human Performance Tests
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@test "bytes_to_human handles large values efficiently" {
|
@test "bytes_to_human handles large values efficiently" {
|
||||||
local start end elapsed
|
local start end elapsed
|
||||||
|
|
||||||
# Warm up
|
|
||||||
bytes_to_human 1073741824 > /dev/null
|
bytes_to_human 1073741824 > /dev/null
|
||||||
|
|
||||||
# Benchmark: 1000 iterations should complete in < 2 seconds (relaxed threshold)
|
|
||||||
start=$(date +%s%N)
|
start=$(date +%s%N)
|
||||||
for i in {1..1000}; do
|
for i in {1..1000}; do
|
||||||
bytes_to_human 1073741824 > /dev/null
|
bytes_to_human 1073741824 > /dev/null
|
||||||
done
|
done
|
||||||
end=$(date +%s%N)
|
end=$(date +%s%N)
|
||||||
|
|
||||||
elapsed=$(( (end - start) / 1000000 )) # Convert to milliseconds
|
elapsed=$(( (end - start) / 1000000 ))
|
||||||
|
|
||||||
# Should complete in less than 2000ms (2 seconds)
|
|
||||||
[ "$elapsed" -lt 2000 ]
|
[ "$elapsed" -lt 2000 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -67,31 +57,22 @@ setup() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "bytes_to_human handles edge cases" {
|
@test "bytes_to_human handles edge cases" {
|
||||||
# Zero bytes
|
|
||||||
result=$(bytes_to_human 0)
|
result=$(bytes_to_human 0)
|
||||||
[ "$result" = "0B" ]
|
[ "$result" = "0B" ]
|
||||||
|
|
||||||
# Invalid input returns 0B (with error code 1)
|
|
||||||
run bytes_to_human "invalid"
|
run bytes_to_human "invalid"
|
||||||
[ "$status" -eq 1 ]
|
[ "$status" -eq 1 ]
|
||||||
[ "$output" = "0B" ]
|
[ "$output" = "0B" ]
|
||||||
|
|
||||||
# Negative should also fail validation
|
|
||||||
run bytes_to_human "-100"
|
run bytes_to_human "-100"
|
||||||
[ "$status" -eq 1 ]
|
[ "$status" -eq 1 ]
|
||||||
[ "$output" = "0B" ]
|
[ "$output" = "0B" ]
|
||||||
}
|
}
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# BSD Stat Wrapper Performance Tests
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@test "get_file_size is faster than multiple stat calls" {
|
@test "get_file_size is faster than multiple stat calls" {
|
||||||
# Create test file
|
|
||||||
local test_file="$TEST_DATA_DIR/size_test.txt"
|
local test_file="$TEST_DATA_DIR/size_test.txt"
|
||||||
dd if=/dev/zero of="$test_file" bs=1024 count=100 2> /dev/null
|
dd if=/dev/zero of="$test_file" bs=1024 count=100 2> /dev/null
|
||||||
|
|
||||||
# Benchmark: 100 calls should complete quickly
|
|
||||||
local start end elapsed
|
local start end elapsed
|
||||||
start=$(date +%s%N)
|
start=$(date +%s%N)
|
||||||
for i in {1..100}; do
|
for i in {1..100}; do
|
||||||
@@ -101,7 +82,6 @@ setup() {
|
|||||||
|
|
||||||
elapsed=$(( (end - start) / 1000000 ))
|
elapsed=$(( (end - start) / 1000000 ))
|
||||||
|
|
||||||
# Should complete in less than 1000ms (relaxed threshold)
|
|
||||||
[ "$elapsed" -lt 1000 ]
|
[ "$elapsed" -lt 1000 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -111,7 +91,6 @@ setup() {
|
|||||||
|
|
||||||
result=$(get_file_mtime "$test_file")
|
result=$(get_file_mtime "$test_file")
|
||||||
|
|
||||||
# Should be a valid epoch timestamp (10 digits)
|
|
||||||
[[ "$result" =~ ^[0-9]{10,}$ ]]
|
[[ "$result" =~ ^[0-9]{10,}$ ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -125,10 +104,6 @@ setup() {
|
|||||||
[ "$result" = "$current_user" ]
|
[ "$result" = "$current_user" ]
|
||||||
}
|
}
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# User Context Detection Performance Tests
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@test "get_invoking_user executes quickly" {
|
@test "get_invoking_user executes quickly" {
|
||||||
local start end elapsed
|
local start end elapsed
|
||||||
|
|
||||||
@@ -140,12 +115,10 @@ setup() {
|
|||||||
|
|
||||||
elapsed=$(( (end - start) / 1000000 ))
|
elapsed=$(( (end - start) / 1000000 ))
|
||||||
|
|
||||||
# Should complete in less than 200ms
|
|
||||||
[ "$elapsed" -lt 200 ]
|
[ "$elapsed" -lt 200 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "get_darwin_major caches correctly" {
|
@test "get_darwin_major caches correctly" {
|
||||||
# Multiple calls should return same result
|
|
||||||
local first second
|
local first second
|
||||||
first=$(get_darwin_major)
|
first=$(get_darwin_major)
|
||||||
second=$(get_darwin_major)
|
second=$(get_darwin_major)
|
||||||
@@ -154,17 +127,11 @@ setup() {
|
|||||||
[[ "$first" =~ ^[0-9]+$ ]]
|
[[ "$first" =~ ^[0-9]+$ ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Temporary File Management Performance Tests
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@test "create_temp_file and cleanup_temp_files work efficiently" {
|
@test "create_temp_file and cleanup_temp_files work efficiently" {
|
||||||
local start end elapsed
|
local start end elapsed
|
||||||
|
|
||||||
# Ensure MOLE_TEMP_DIRS is initialized (base.sh should do this)
|
|
||||||
declare -a MOLE_TEMP_DIRS=()
|
declare -a MOLE_TEMP_DIRS=()
|
||||||
|
|
||||||
# Create 50 temp files (reduced from 100 for faster testing)
|
|
||||||
start=$(date +%s%N)
|
start=$(date +%s%N)
|
||||||
for i in {1..50}; do
|
for i in {1..50}; do
|
||||||
create_temp_file > /dev/null
|
create_temp_file > /dev/null
|
||||||
@@ -173,22 +140,17 @@ setup() {
|
|||||||
|
|
||||||
elapsed=$(( (end - start) / 1000000 ))
|
elapsed=$(( (end - start) / 1000000 ))
|
||||||
|
|
||||||
# Should complete in less than 1000ms
|
|
||||||
[ "$elapsed" -lt 1000 ]
|
[ "$elapsed" -lt 1000 ]
|
||||||
|
|
||||||
# Verify temp files were tracked
|
|
||||||
[ "${#MOLE_TEMP_FILES[@]}" -eq 50 ]
|
[ "${#MOLE_TEMP_FILES[@]}" -eq 50 ]
|
||||||
|
|
||||||
# Cleanup should also be reasonably fast
|
|
||||||
start=$(date +%s%N)
|
start=$(date +%s%N)
|
||||||
cleanup_temp_files
|
cleanup_temp_files
|
||||||
end=$(date +%s%N)
|
end=$(date +%s%N)
|
||||||
|
|
||||||
elapsed=$(( (end - start) / 1000000 ))
|
elapsed=$(( (end - start) / 1000000 ))
|
||||||
# Relaxed threshold: should complete within 2 seconds
|
|
||||||
[ "$elapsed" -lt 2000 ]
|
[ "$elapsed" -lt 2000 ]
|
||||||
|
|
||||||
# Verify cleanup
|
|
||||||
[ "${#MOLE_TEMP_FILES[@]}" -eq 0 ]
|
[ "${#MOLE_TEMP_FILES[@]}" -eq 0 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -196,27 +158,18 @@ setup() {
|
|||||||
local temp_file
|
local temp_file
|
||||||
temp_file=$(mktemp_file "test_prefix")
|
temp_file=$(mktemp_file "test_prefix")
|
||||||
|
|
||||||
# Should contain prefix
|
|
||||||
[[ "$temp_file" =~ test_prefix ]]
|
[[ "$temp_file" =~ test_prefix ]]
|
||||||
|
|
||||||
# Should exist
|
|
||||||
[ -f "$temp_file" ]
|
[ -f "$temp_file" ]
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
rm -f "$temp_file"
|
rm -f "$temp_file"
|
||||||
}
|
}
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Brand Name Lookup Performance Tests
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@test "get_brand_name handles common apps efficiently" {
|
@test "get_brand_name handles common apps efficiently" {
|
||||||
local start end elapsed
|
local start end elapsed
|
||||||
|
|
||||||
# Warm up (first call includes defaults read which is slow)
|
|
||||||
get_brand_name "wechat" > /dev/null
|
get_brand_name "wechat" > /dev/null
|
||||||
|
|
||||||
# Benchmark: 50 lookups (reduced from 100)
|
|
||||||
start=$(date +%s%N)
|
start=$(date +%s%N)
|
||||||
for i in {1..50}; do
|
for i in {1..50}; do
|
||||||
get_brand_name "wechat" > /dev/null
|
get_brand_name "wechat" > /dev/null
|
||||||
@@ -227,52 +180,36 @@ setup() {
|
|||||||
|
|
||||||
elapsed=$(( (end - start) / 1000000 ))
|
elapsed=$(( (end - start) / 1000000 ))
|
||||||
|
|
||||||
# Relaxed threshold: defaults read is called multiple times
|
|
||||||
# Should complete within 5 seconds on most systems
|
|
||||||
[ "$elapsed" -lt 5000 ]
|
[ "$elapsed" -lt 5000 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "get_brand_name returns correct localized names" {
|
@test "get_brand_name returns correct localized names" {
|
||||||
# Test should work regardless of system language
|
|
||||||
local result
|
local result
|
||||||
result=$(get_brand_name "wechat")
|
result=$(get_brand_name "wechat")
|
||||||
|
|
||||||
# Should return either "WeChat" or "微信"
|
|
||||||
[[ "$result" == "WeChat" || "$result" == "微信" ]]
|
[[ "$result" == "WeChat" || "$result" == "微信" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Parallel Job Calculation Tests
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@test "get_optimal_parallel_jobs returns sensible values" {
|
@test "get_optimal_parallel_jobs returns sensible values" {
|
||||||
local result
|
local result
|
||||||
|
|
||||||
# Default mode
|
|
||||||
result=$(get_optimal_parallel_jobs)
|
result=$(get_optimal_parallel_jobs)
|
||||||
[[ "$result" =~ ^[0-9]+$ ]]
|
[[ "$result" =~ ^[0-9]+$ ]]
|
||||||
[ "$result" -gt 0 ]
|
[ "$result" -gt 0 ]
|
||||||
[ "$result" -le 128 ]
|
[ "$result" -le 128 ]
|
||||||
|
|
||||||
# Scan mode (should be higher)
|
|
||||||
local scan_jobs
|
local scan_jobs
|
||||||
scan_jobs=$(get_optimal_parallel_jobs "scan")
|
scan_jobs=$(get_optimal_parallel_jobs "scan")
|
||||||
[ "$scan_jobs" -gt "$result" ]
|
[ "$scan_jobs" -gt "$result" ]
|
||||||
|
|
||||||
# Compute mode (should be lower)
|
|
||||||
local compute_jobs
|
local compute_jobs
|
||||||
compute_jobs=$(get_optimal_parallel_jobs "compute")
|
compute_jobs=$(get_optimal_parallel_jobs "compute")
|
||||||
[ "$compute_jobs" -le "$scan_jobs" ]
|
[ "$compute_jobs" -le "$scan_jobs" ]
|
||||||
}
|
}
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Section Tracking Performance Tests
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@test "section tracking has minimal overhead" {
|
@test "section tracking has minimal overhead" {
|
||||||
local start end elapsed
|
local start end elapsed
|
||||||
|
|
||||||
# Define note_activity if not already defined (it's in bin/clean.sh)
|
|
||||||
if ! declare -f note_activity > /dev/null 2>&1; then
|
if ! declare -f note_activity > /dev/null 2>&1; then
|
||||||
TRACK_SECTION=0
|
TRACK_SECTION=0
|
||||||
SECTION_ACTIVITY=0
|
SECTION_ACTIVITY=0
|
||||||
@@ -283,7 +220,6 @@ setup() {
|
|||||||
}
|
}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Warm up
|
|
||||||
note_activity
|
note_activity
|
||||||
|
|
||||||
start=$(date +%s%N)
|
start=$(date +%s%N)
|
||||||
@@ -294,6 +230,5 @@ setup() {
|
|||||||
|
|
||||||
elapsed=$(( (end - start) / 1000000 ))
|
elapsed=$(( (end - start) / 1000000 ))
|
||||||
|
|
||||||
# Should complete in less than 2000ms (relaxed for CI environments)
|
|
||||||
[ "$elapsed" -lt 2000 ]
|
[ "$elapsed" -lt 2000 ]
|
||||||
}
|
}
|
||||||
|
|||||||
84
tests/project_purge_extra.bats
Normal file
84
tests/project_purge_extra.bats
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
|
setup_file() {
|
||||||
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
|
export PROJECT_ROOT
|
||||||
|
|
||||||
|
ORIGINAL_HOME="${HOME:-}"
|
||||||
|
export ORIGINAL_HOME
|
||||||
|
|
||||||
|
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-purge-extra.XXXXXX")"
|
||||||
|
export HOME
|
||||||
|
|
||||||
|
mkdir -p "$HOME"
|
||||||
|
}
|
||||||
|
|
||||||
|
teardown_file() {
|
||||||
|
rm -rf "$HOME"
|
||||||
|
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
|
||||||
|
export HOME="$ORIGINAL_HOME"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "is_project_container detects project indicators" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/project.sh"
|
||||||
|
mkdir -p "$HOME/Workspace2/project"
|
||||||
|
touch "$HOME/Workspace2/project/package.json"
|
||||||
|
if is_project_container "$HOME/Workspace2" 2; then
|
||||||
|
echo "yes"
|
||||||
|
fi
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"yes"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "discover_project_dirs includes detected containers" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/project.sh"
|
||||||
|
mkdir -p "$HOME/CustomProjects/app"
|
||||||
|
touch "$HOME/CustomProjects/app/go.mod"
|
||||||
|
discover_project_dirs | grep -q "$HOME/CustomProjects"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "save_discovered_paths writes config with tilde" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/project.sh"
|
||||||
|
save_discovered_paths "$HOME/Projects"
|
||||||
|
grep -q "^~/" "$HOME/.config/mole/purge_paths"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "scan_purge_targets finds artifacts via find path" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" MOLE_PURGE_MIN_DEPTH=1 MOLE_PURGE_MAX_DEPTH=2 bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
PATH="/usr/bin:/bin"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/project.sh"
|
||||||
|
mkdir -p "$HOME/dev/app/node_modules"
|
||||||
|
scan_purge_targets "$HOME/dev" "$HOME/results.txt"
|
||||||
|
grep -q "node_modules" "$HOME/results.txt"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "select_purge_categories returns failure on empty input" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/clean/project.sh"
|
||||||
|
if select_purge_categories; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
}
|
||||||
@@ -1,6 +1,4 @@
|
|||||||
#!/usr/bin/env bats
|
#!/usr/bin/env bats
|
||||||
# Tests for project artifact purge functionality
|
|
||||||
# bin/purge.sh and lib/clean/project.sh
|
|
||||||
|
|
||||||
setup_file() {
|
setup_file() {
|
||||||
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
@@ -23,21 +21,14 @@ teardown_file() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
setup() {
|
setup() {
|
||||||
# Create test project directories
|
|
||||||
mkdir -p "$HOME/www"
|
mkdir -p "$HOME/www"
|
||||||
mkdir -p "$HOME/dev"
|
mkdir -p "$HOME/dev"
|
||||||
mkdir -p "$HOME/.cache/mole"
|
mkdir -p "$HOME/.cache/mole"
|
||||||
|
|
||||||
# Clean any previous test artifacts
|
|
||||||
rm -rf "${HOME:?}/www"/* "${HOME:?}/dev"/*
|
rm -rf "${HOME:?}/www"/* "${HOME:?}/dev"/*
|
||||||
}
|
}
|
||||||
|
|
||||||
# =================================================================
|
|
||||||
# Safety Checks
|
|
||||||
# =================================================================
|
|
||||||
|
|
||||||
@test "is_safe_project_artifact: rejects shallow paths (protection against accidents)" {
|
@test "is_safe_project_artifact: rejects shallow paths (protection against accidents)" {
|
||||||
# Should reject ~/www/node_modules (too shallow, depth < 1)
|
|
||||||
result=$(bash -c "
|
result=$(bash -c "
|
||||||
source '$PROJECT_ROOT/lib/clean/project.sh'
|
source '$PROJECT_ROOT/lib/clean/project.sh'
|
||||||
if is_safe_project_artifact '$HOME/www/node_modules' '$HOME/www'; then
|
if is_safe_project_artifact '$HOME/www/node_modules' '$HOME/www'; then
|
||||||
@@ -50,7 +41,6 @@ setup() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "is_safe_project_artifact: allows proper project artifacts" {
|
@test "is_safe_project_artifact: allows proper project artifacts" {
|
||||||
# Should allow ~/www/myproject/node_modules (depth >= 1)
|
|
||||||
result=$(bash -c "
|
result=$(bash -c "
|
||||||
source '$PROJECT_ROOT/lib/clean/project.sh'
|
source '$PROJECT_ROOT/lib/clean/project.sh'
|
||||||
if is_safe_project_artifact '$HOME/www/myproject/node_modules' '$HOME/www'; then
|
if is_safe_project_artifact '$HOME/www/myproject/node_modules' '$HOME/www'; then
|
||||||
@@ -63,7 +53,6 @@ setup() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "is_safe_project_artifact: rejects non-absolute paths" {
|
@test "is_safe_project_artifact: rejects non-absolute paths" {
|
||||||
# Should reject relative paths
|
|
||||||
result=$(bash -c "
|
result=$(bash -c "
|
||||||
source '$PROJECT_ROOT/lib/clean/project.sh'
|
source '$PROJECT_ROOT/lib/clean/project.sh'
|
||||||
if is_safe_project_artifact 'relative/path/node_modules' '$HOME/www'; then
|
if is_safe_project_artifact 'relative/path/node_modules' '$HOME/www'; then
|
||||||
@@ -76,7 +65,6 @@ setup() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "is_safe_project_artifact: validates depth calculation" {
|
@test "is_safe_project_artifact: validates depth calculation" {
|
||||||
# ~/www/project/subdir/node_modules should be allowed (depth = 2)
|
|
||||||
result=$(bash -c "
|
result=$(bash -c "
|
||||||
source '$PROJECT_ROOT/lib/clean/project.sh'
|
source '$PROJECT_ROOT/lib/clean/project.sh'
|
||||||
if is_safe_project_artifact '$HOME/www/project/subdir/node_modules' '$HOME/www'; then
|
if is_safe_project_artifact '$HOME/www/project/subdir/node_modules' '$HOME/www'; then
|
||||||
@@ -88,13 +76,7 @@ setup() {
|
|||||||
[[ "$result" == "ALLOWED" ]]
|
[[ "$result" == "ALLOWED" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# =================================================================
|
|
||||||
# Nested Artifact Filtering
|
|
||||||
# =================================================================
|
|
||||||
|
|
||||||
@test "filter_nested_artifacts: removes nested node_modules" {
|
@test "filter_nested_artifacts: removes nested node_modules" {
|
||||||
# Create nested structure:
|
|
||||||
# ~/www/project/node_modules/package/node_modules
|
|
||||||
mkdir -p "$HOME/www/project/node_modules/package/node_modules"
|
mkdir -p "$HOME/www/project/node_modules/package/node_modules"
|
||||||
|
|
||||||
result=$(bash -c "
|
result=$(bash -c "
|
||||||
@@ -103,7 +85,6 @@ setup() {
|
|||||||
filter_nested_artifacts | wc -l | tr -d ' '
|
filter_nested_artifacts | wc -l | tr -d ' '
|
||||||
")
|
")
|
||||||
|
|
||||||
# Should only keep the parent node_modules (nested one filtered out)
|
|
||||||
[[ "$result" == "1" ]]
|
[[ "$result" == "1" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -117,17 +98,12 @@ setup() {
|
|||||||
filter_nested_artifacts | wc -l | tr -d ' '
|
filter_nested_artifacts | wc -l | tr -d ' '
|
||||||
")
|
")
|
||||||
|
|
||||||
# Should keep both (they're independent)
|
|
||||||
[[ "$result" == "2" ]]
|
[[ "$result" == "2" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# =================================================================
|
|
||||||
# Recently Modified Detection
|
|
||||||
# =================================================================
|
|
||||||
|
|
||||||
@test "is_recently_modified: detects recent projects" {
|
@test "is_recently_modified: detects recent projects" {
|
||||||
mkdir -p "$HOME/www/project/node_modules"
|
mkdir -p "$HOME/www/project/node_modules"
|
||||||
touch "$HOME/www/project/package.json" # Recently touched
|
touch "$HOME/www/project/package.json"
|
||||||
|
|
||||||
result=$(bash -c "
|
result=$(bash -c "
|
||||||
source '$PROJECT_ROOT/lib/core/common.sh'
|
source '$PROJECT_ROOT/lib/core/common.sh'
|
||||||
@@ -145,24 +121,16 @@ setup() {
|
|||||||
mkdir -p "$HOME/www/old-project/node_modules"
|
mkdir -p "$HOME/www/old-project/node_modules"
|
||||||
mkdir -p "$HOME/www/old-project"
|
mkdir -p "$HOME/www/old-project"
|
||||||
|
|
||||||
# Simulate old project (modified 30 days ago)
|
|
||||||
# Note: This is hard to test reliably without mocking 'find'
|
|
||||||
# Just verify the function can run without errors
|
|
||||||
bash -c "
|
bash -c "
|
||||||
source '$PROJECT_ROOT/lib/core/common.sh'
|
source '$PROJECT_ROOT/lib/core/common.sh'
|
||||||
source '$PROJECT_ROOT/lib/clean/project.sh'
|
source '$PROJECT_ROOT/lib/clean/project.sh'
|
||||||
is_recently_modified '$HOME/www/old-project/node_modules' || true
|
is_recently_modified '$HOME/www/old-project/node_modules' || true
|
||||||
"
|
"
|
||||||
local exit_code=$?
|
local exit_code=$?
|
||||||
[ "$exit_code" -eq 0 ] || [ "$exit_code" -eq 1 ] # Allow both true/false, just check no crash
|
[ "$exit_code" -eq 0 ] || [ "$exit_code" -eq 1 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
# =================================================================
|
|
||||||
# Artifact Detection
|
|
||||||
# =================================================================
|
|
||||||
|
|
||||||
@test "purge targets are configured correctly" {
|
@test "purge targets are configured correctly" {
|
||||||
# Verify PURGE_TARGETS array exists and contains expected values
|
|
||||||
result=$(bash -c "
|
result=$(bash -c "
|
||||||
source '$PROJECT_ROOT/lib/clean/project.sh'
|
source '$PROJECT_ROOT/lib/clean/project.sh'
|
||||||
echo \"\${PURGE_TARGETS[@]}\"
|
echo \"\${PURGE_TARGETS[@]}\"
|
||||||
@@ -171,13 +139,8 @@ setup() {
|
|||||||
[[ "$result" == *"target"* ]]
|
[[ "$result" == *"target"* ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# =================================================================
|
|
||||||
# Size Calculation
|
|
||||||
# =================================================================
|
|
||||||
|
|
||||||
@test "get_dir_size_kb: calculates directory size" {
|
@test "get_dir_size_kb: calculates directory size" {
|
||||||
mkdir -p "$HOME/www/test-project/node_modules"
|
mkdir -p "$HOME/www/test-project/node_modules"
|
||||||
# Create a file with known size (~1MB)
|
|
||||||
dd if=/dev/zero of="$HOME/www/test-project/node_modules/file.bin" bs=1024 count=1024 2>/dev/null
|
dd if=/dev/zero of="$HOME/www/test-project/node_modules/file.bin" bs=1024 count=1024 2>/dev/null
|
||||||
|
|
||||||
result=$(bash -c "
|
result=$(bash -c "
|
||||||
@@ -185,7 +148,6 @@ setup() {
|
|||||||
get_dir_size_kb '$HOME/www/test-project/node_modules'
|
get_dir_size_kb '$HOME/www/test-project/node_modules'
|
||||||
")
|
")
|
||||||
|
|
||||||
# Should be around 1024 KB (allow some filesystem overhead)
|
|
||||||
[[ "$result" -ge 1000 ]] && [[ "$result" -le 1100 ]]
|
[[ "$result" -ge 1000 ]] && [[ "$result" -le 1100 ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -197,12 +159,7 @@ setup() {
|
|||||||
[[ "$result" == "0" ]]
|
[[ "$result" == "0" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# =================================================================
|
|
||||||
# Integration Tests (Non-Interactive)
|
|
||||||
# =================================================================
|
|
||||||
|
|
||||||
@test "clean_project_artifacts: handles empty directory gracefully" {
|
@test "clean_project_artifacts: handles empty directory gracefully" {
|
||||||
# No projects, should exit cleanly
|
|
||||||
run bash -c "
|
run bash -c "
|
||||||
export HOME='$HOME'
|
export HOME='$HOME'
|
||||||
source '$PROJECT_ROOT/lib/core/common.sh'
|
source '$PROJECT_ROOT/lib/core/common.sh'
|
||||||
@@ -210,36 +167,26 @@ setup() {
|
|||||||
clean_project_artifacts
|
clean_project_artifacts
|
||||||
" < /dev/null
|
" < /dev/null
|
||||||
|
|
||||||
# Should succeed (exit code 0 or 2 for nothing to clean)
|
|
||||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 2 ]]
|
[[ "$status" -eq 0 ]] || [[ "$status" -eq 2 ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "clean_project_artifacts: scans and finds artifacts" {
|
@test "clean_project_artifacts: scans and finds artifacts" {
|
||||||
# Create test project with node_modules (make it big enough to detect)
|
|
||||||
mkdir -p "$HOME/www/test-project/node_modules/package1"
|
mkdir -p "$HOME/www/test-project/node_modules/package1"
|
||||||
echo "test data" > "$HOME/www/test-project/node_modules/package1/index.js"
|
echo "test data" > "$HOME/www/test-project/node_modules/package1/index.js"
|
||||||
|
|
||||||
# Create parent directory timestamp old enough
|
|
||||||
mkdir -p "$HOME/www/test-project"
|
mkdir -p "$HOME/www/test-project"
|
||||||
|
|
||||||
# Run in non-interactive mode (with timeout to avoid hanging)
|
|
||||||
run bash -c "
|
run bash -c "
|
||||||
export HOME='$HOME'
|
export HOME='$HOME'
|
||||||
timeout 5 '$PROJECT_ROOT/bin/purge.sh' 2>&1 < /dev/null || true
|
timeout 5 '$PROJECT_ROOT/bin/purge.sh' 2>&1 < /dev/null || true
|
||||||
"
|
"
|
||||||
|
|
||||||
# Should either scan successfully or exit gracefully
|
|
||||||
# Check for expected outputs (scanning, completion, or nothing found)
|
|
||||||
[[ "$output" =~ "Scanning" ]] ||
|
[[ "$output" =~ "Scanning" ]] ||
|
||||||
[[ "$output" =~ "Purge complete" ]] ||
|
[[ "$output" =~ "Purge complete" ]] ||
|
||||||
[[ "$output" =~ "No old" ]] ||
|
[[ "$output" =~ "No old" ]] ||
|
||||||
[[ "$output" =~ "Great" ]]
|
[[ "$output" =~ "Great" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# =================================================================
|
|
||||||
# Command Line Interface
|
|
||||||
# =================================================================
|
|
||||||
|
|
||||||
@test "mo purge: command exists and is executable" {
|
@test "mo purge: command exists and is executable" {
|
||||||
[ -x "$PROJECT_ROOT/mole" ]
|
[ -x "$PROJECT_ROOT/mole" ]
|
||||||
[ -f "$PROJECT_ROOT/bin/purge.sh" ]
|
[ -f "$PROJECT_ROOT/bin/purge.sh" ]
|
||||||
@@ -252,22 +199,18 @@ setup() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "mo purge: accepts --debug flag" {
|
@test "mo purge: accepts --debug flag" {
|
||||||
# Just verify it doesn't crash with --debug
|
|
||||||
run bash -c "
|
run bash -c "
|
||||||
export HOME='$HOME'
|
export HOME='$HOME'
|
||||||
timeout 2 '$PROJECT_ROOT/mole' purge --debug < /dev/null 2>&1 || true
|
timeout 2 '$PROJECT_ROOT/mole' purge --debug < /dev/null 2>&1 || true
|
||||||
"
|
"
|
||||||
# Should not crash (any exit code is OK, we just want to verify it runs)
|
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "mo purge: creates cache directory for stats" {
|
@test "mo purge: creates cache directory for stats" {
|
||||||
# Run purge (will exit quickly in non-interactive with no projects)
|
|
||||||
bash -c "
|
bash -c "
|
||||||
export HOME='$HOME'
|
export HOME='$HOME'
|
||||||
timeout 2 '$PROJECT_ROOT/mole' purge < /dev/null 2>&1 || true
|
timeout 2 '$PROJECT_ROOT/mole' purge < /dev/null 2>&1 || true
|
||||||
"
|
"
|
||||||
|
|
||||||
# Cache directory should be created
|
|
||||||
[ -d "$HOME/.cache/mole" ] || [ -d "${XDG_CACHE_HOME:-$HOME/.cache}/mole" ]
|
[ -d "$HOME/.cache/mole" ] || [ -d "${XDG_CACHE_HOME:-$HOME/.cache}/mole" ]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,13 +26,10 @@ setup() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "load_purge_config loads default paths when config file is missing" {
|
@test "load_purge_config loads default paths when config file is missing" {
|
||||||
# Source the file in a subshell to avoid polluting test environment variables
|
|
||||||
# We need to export HOME so it's picked up by the script
|
|
||||||
run env HOME="$HOME" bash -c "source '$PROJECT_ROOT/lib/clean/project.sh'; echo \"\${PURGE_SEARCH_PATHS[*]}\""
|
run env HOME="$HOME" bash -c "source '$PROJECT_ROOT/lib/clean/project.sh'; echo \"\${PURGE_SEARCH_PATHS[*]}\""
|
||||||
|
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
|
|
||||||
# Check for a few expected default paths
|
|
||||||
[[ "$output" == *"$HOME/Projects"* ]]
|
[[ "$output" == *"$HOME/Projects"* ]]
|
||||||
[[ "$output" == *"$HOME/GitHub"* ]]
|
[[ "$output" == *"$HOME/GitHub"* ]]
|
||||||
[[ "$output" == *"$HOME/dev"* ]]
|
[[ "$output" == *"$HOME/dev"* ]]
|
||||||
@@ -52,7 +49,6 @@ EOF
|
|||||||
|
|
||||||
[[ "$output" == *"$HOME/custom/projects"* ]]
|
[[ "$output" == *"$HOME/custom/projects"* ]]
|
||||||
[[ "$output" == *"$HOME/work"* ]]
|
[[ "$output" == *"$HOME/work"* ]]
|
||||||
# Should NOT have default paths
|
|
||||||
[[ "$output" != *"$HOME/GitHub"* ]]
|
[[ "$output" != *"$HOME/GitHub"* ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -77,10 +73,8 @@ EOF
|
|||||||
local config_file="$HOME/.config/mole/purge_paths"
|
local config_file="$HOME/.config/mole/purge_paths"
|
||||||
|
|
||||||
cat > "$config_file" << EOF
|
cat > "$config_file" << EOF
|
||||||
# This is a comment
|
|
||||||
$HOME/valid/path
|
$HOME/valid/path
|
||||||
|
|
||||||
# Indented comment
|
|
||||||
|
|
||||||
$HOME/another/path
|
$HOME/another/path
|
||||||
EOF
|
EOF
|
||||||
@@ -91,7 +85,6 @@ EOF
|
|||||||
|
|
||||||
local lines
|
local lines
|
||||||
read -r -a lines <<< "$output"
|
read -r -a lines <<< "$output"
|
||||||
# First line of output is count
|
|
||||||
local count="${lines[0]}"
|
local count="${lines[0]}"
|
||||||
|
|
||||||
[ "$count" -eq 2 ]
|
[ "$count" -eq 2 ]
|
||||||
@@ -107,7 +100,6 @@ EOF
|
|||||||
|
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
|
|
||||||
# Should have default paths
|
|
||||||
[[ "$output" == *"$HOME/Projects"* ]]
|
[[ "$output" == *"$HOME/Projects"* ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -119,6 +111,5 @@ EOF
|
|||||||
|
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
|
|
||||||
# Should have default paths
|
|
||||||
[[ "$output" == *"$HOME/Projects"* ]]
|
[[ "$output" == *"$HOME/Projects"* ]]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
#!/usr/bin/env bats
|
#!/usr/bin/env bats
|
||||||
# Regression tests for previously fixed bugs
|
|
||||||
# Ensures历史bug不再复现
|
|
||||||
|
|
||||||
setup() {
|
setup() {
|
||||||
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
@@ -9,12 +7,8 @@ setup() {
|
|||||||
mkdir -p "$HOME/.config/mole"
|
mkdir -p "$HOME/.config/mole"
|
||||||
}
|
}
|
||||||
|
|
||||||
# =================================================================
|
|
||||||
# 退出问题回归测试 (bb21bb1, 4b6c436, d75c34d)
|
|
||||||
# =================================================================
|
|
||||||
|
|
||||||
@test "find with non-existent directory doesn't cause script exit (pipefail bug)" {
|
@test "find with non-existent directory doesn't cause script exit (pipefail bug)" {
|
||||||
# 这个模式曾导致 lib/clean/user.sh 在 pipefail 模式下意外退出
|
|
||||||
result=$(bash -c '
|
result=$(bash -c '
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
find /non/existent/dir -name "*.cache" 2>/dev/null || true
|
find /non/existent/dir -name "*.cache" 2>/dev/null || true
|
||||||
@@ -24,7 +18,6 @@ setup() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "browser directory check pattern is safe when directories don't exist" {
|
@test "browser directory check pattern is safe when directories don't exist" {
|
||||||
# 修复模式:先检查目录是否存在
|
|
||||||
result=$(bash -c '
|
result=$(bash -c '
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
search_dirs=()
|
search_dirs=()
|
||||||
@@ -44,7 +37,6 @@ setup() {
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
search_dirs=()
|
search_dirs=()
|
||||||
|
|
||||||
# 这不应该执行且不应该报错
|
|
||||||
if [[ ${#search_dirs[@]} -gt 0 ]]; then
|
if [[ ${#search_dirs[@]} -gt 0 ]]; then
|
||||||
echo "should not reach here"
|
echo "should not reach here"
|
||||||
fi
|
fi
|
||||||
@@ -53,9 +45,6 @@ setup() {
|
|||||||
[[ "$result" == "survived" ]]
|
[[ "$result" == "survived" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# ===============================================================
|
|
||||||
# 更新检查回归测试 (260254f, b61b3f7, 2a64cae, 7a9c946)
|
|
||||||
# ===============================================================
|
|
||||||
|
|
||||||
@test "version comparison works correctly" {
|
@test "version comparison works correctly" {
|
||||||
result=$(bash -c '
|
result=$(bash -c '
|
||||||
@@ -92,7 +81,6 @@ setup() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "network timeout prevents hanging (simulated)" {
|
@test "network timeout prevents hanging (simulated)" {
|
||||||
# curl 超时参数应该生效
|
|
||||||
# shellcheck disable=SC2016
|
# shellcheck disable=SC2016
|
||||||
result=$(timeout 5 bash -c '
|
result=$(timeout 5 bash -c '
|
||||||
result=$(curl -fsSL --connect-timeout 1 --max-time 2 "http://192.0.2.1:12345/test" 2>/dev/null || echo "failed")
|
result=$(curl -fsSL --connect-timeout 1 --max-time 2 "http://192.0.2.1:12345/test" 2>/dev/null || echo "failed")
|
||||||
@@ -113,9 +101,6 @@ setup() {
|
|||||||
[[ "$result" == "handled" ]]
|
[[ "$result" == "handled" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# ===============================================================
|
|
||||||
# Pipefail 模式安全模式测试
|
|
||||||
# ===============================================================
|
|
||||||
|
|
||||||
@test "grep with no match doesn't cause exit in pipefail mode" {
|
@test "grep with no match doesn't cause exit in pipefail mode" {
|
||||||
result=$(bash -c '
|
result=$(bash -c '
|
||||||
@@ -145,9 +130,6 @@ setup() {
|
|||||||
[[ "$result" == "1" ]]
|
[[ "$result" == "1" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# ===============================================================
|
|
||||||
# 实际场景回归测试
|
|
||||||
# ===============================================================
|
|
||||||
|
|
||||||
@test "safe_remove pattern doesn't fail on non-existent path" {
|
@test "safe_remove pattern doesn't fail on non-existent path" {
|
||||||
result=$(bash -c "
|
result=$(bash -c "
|
||||||
|
|||||||
44
tests/run.sh
44
tests/run.sh
@@ -1,44 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
||||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
|
||||||
|
|
||||||
if command -v shellcheck > /dev/null 2>&1; then
|
|
||||||
SHELLCHECK_TARGETS=()
|
|
||||||
while IFS= read -r file; do
|
|
||||||
SHELLCHECK_TARGETS+=("$file")
|
|
||||||
done < <(find "$PROJECT_ROOT/tests" -type f \( -name '*.bats' -o -name '*.sh' \) | sort)
|
|
||||||
|
|
||||||
if [[ ${#SHELLCHECK_TARGETS[@]} -gt 0 ]]; then
|
|
||||||
shellcheck --rcfile "$PROJECT_ROOT/.shellcheckrc" "${SHELLCHECK_TARGETS[@]}"
|
|
||||||
else
|
|
||||||
echo "No shell files to lint under tests/." >&2
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "shellcheck not found; skipping linting." >&2
|
|
||||||
fi
|
|
||||||
|
|
||||||
if command -v bats > /dev/null 2>&1; then
|
|
||||||
cd "$PROJECT_ROOT"
|
|
||||||
|
|
||||||
if [[ -z "${TERM:-}" ]]; then
|
|
||||||
export TERM="xterm-256color"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ $# -eq 0 ]]; then
|
|
||||||
set -- tests
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -t 1 ]]; then
|
|
||||||
bats -p "$@"
|
|
||||||
else
|
|
||||||
TERM="${TERM:-xterm-256color}" bats --tap "$@"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
cat << 'EOF' >&2
|
|
||||||
bats is required to run Mole's test suite.
|
|
||||||
Install via Homebrew with 'brew install bats-core' or via npm with 'npm install -g bats'.
|
|
||||||
EOF
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
#!/usr/bin/env bats
|
#!/usr/bin/env bats
|
||||||
# Tests for safe_* functions in lib/core/common.sh
|
|
||||||
|
|
||||||
setup_file() {
|
setup_file() {
|
||||||
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
@@ -31,7 +30,6 @@ teardown() {
|
|||||||
rm -rf "$TEST_DIR"
|
rm -rf "$TEST_DIR"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test validate_path_for_deletion
|
|
||||||
@test "validate_path_for_deletion rejects empty path" {
|
@test "validate_path_for_deletion rejects empty path" {
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; validate_path_for_deletion ''"
|
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; validate_path_for_deletion ''"
|
||||||
[ "$status" -eq 1 ]
|
[ "$status" -eq 1 ]
|
||||||
@@ -63,7 +61,6 @@ teardown() {
|
|||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test safe_remove
|
|
||||||
@test "safe_remove validates path before deletion" {
|
@test "safe_remove validates path before deletion" {
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; safe_remove '/System/test' 2>&1"
|
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; safe_remove '/System/test' 2>&1"
|
||||||
[ "$status" -eq 1 ]
|
[ "$status" -eq 1 ]
|
||||||
@@ -94,14 +91,11 @@ teardown() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "safe_remove in silent mode suppresses error output" {
|
@test "safe_remove in silent mode suppresses error output" {
|
||||||
# Try to remove system directory in silent mode
|
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; safe_remove '/System/test' true 2>&1"
|
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; safe_remove '/System/test' true 2>&1"
|
||||||
[ "$status" -eq 1 ]
|
[ "$status" -eq 1 ]
|
||||||
# Should not output error in silent mode
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# Test safe_find_delete
|
|
||||||
@test "safe_find_delete validates base directory" {
|
@test "safe_find_delete validates base directory" {
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; safe_find_delete '/nonexistent' '*.tmp' 7 'f' 2>&1"
|
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; safe_find_delete '/nonexistent' '*.tmp' 7 'f' 2>&1"
|
||||||
[ "$status" -eq 1 ]
|
[ "$status" -eq 1 ]
|
||||||
@@ -127,21 +121,18 @@ teardown() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "safe_find_delete deletes old files" {
|
@test "safe_find_delete deletes old files" {
|
||||||
# Create test files with different ages
|
|
||||||
local old_file="$TEST_DIR/old.tmp"
|
local old_file="$TEST_DIR/old.tmp"
|
||||||
local new_file="$TEST_DIR/new.tmp"
|
local new_file="$TEST_DIR/new.tmp"
|
||||||
|
|
||||||
touch "$old_file"
|
touch "$old_file"
|
||||||
touch "$new_file"
|
touch "$new_file"
|
||||||
|
|
||||||
# Make old_file 8 days old (requires touch -t)
|
|
||||||
touch -t "$(date -v-8d '+%Y%m%d%H%M.%S' 2>/dev/null || date -d '8 days ago' '+%Y%m%d%H%M.%S')" "$old_file" 2>/dev/null || true
|
touch -t "$(date -v-8d '+%Y%m%d%H%M.%S' 2>/dev/null || date -d '8 days ago' '+%Y%m%d%H%M.%S')" "$old_file" 2>/dev/null || true
|
||||||
|
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; safe_find_delete '$TEST_DIR' '*.tmp' 7 'f'"
|
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; safe_find_delete '$TEST_DIR' '*.tmp' 7 'f'"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test MOLE constants are defined
|
|
||||||
@test "MOLE_* constants are defined" {
|
@test "MOLE_* constants are defined" {
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; echo \$MOLE_TEMP_FILE_AGE_DAYS"
|
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; echo \$MOLE_TEMP_FILE_AGE_DAYS"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
|
|||||||
@@ -26,66 +26,46 @@ setup() {
|
|||||||
mkdir -p "$HOME"
|
mkdir -p "$HOME"
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "format.sh --check validates script formatting" {
|
@test "check.sh --help shows usage information" {
|
||||||
if ! command -v shfmt > /dev/null 2>&1; then
|
run "$PROJECT_ROOT/scripts/check.sh" --help
|
||||||
skip "shfmt not installed"
|
|
||||||
fi
|
|
||||||
|
|
||||||
run "$PROJECT_ROOT/scripts/format.sh" --check
|
|
||||||
# May pass or fail depending on formatting, but should not error
|
|
||||||
[[ "$status" -eq 0 || "$status" -eq 1 ]]
|
|
||||||
}
|
|
||||||
|
|
||||||
@test "format.sh --help shows usage information" {
|
|
||||||
run "$PROJECT_ROOT/scripts/format.sh" --help
|
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
[[ "$output" == *"Usage"* ]]
|
[[ "$output" == *"Usage"* ]]
|
||||||
|
[[ "$output" == *"--format"* ]]
|
||||||
|
[[ "$output" == *"--no-format"* ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "check.sh script exists and is valid" {
|
@test "check.sh script exists and is valid" {
|
||||||
# Don't actually run check.sh in tests - it would recursively run all bats tests!
|
|
||||||
# Just verify the script is valid bash
|
|
||||||
[ -f "$PROJECT_ROOT/scripts/check.sh" ]
|
[ -f "$PROJECT_ROOT/scripts/check.sh" ]
|
||||||
[ -x "$PROJECT_ROOT/scripts/check.sh" ]
|
[ -x "$PROJECT_ROOT/scripts/check.sh" ]
|
||||||
|
|
||||||
# Verify it has the expected structure
|
run bash -c "grep -q 'Mole Check' '$PROJECT_ROOT/scripts/check.sh'"
|
||||||
run bash -c "grep -q 'Quality Checks' '$PROJECT_ROOT/scripts/check.sh'"
|
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "build-analyze.sh detects missing Go toolchain" {
|
@test "test.sh script exists and is valid" {
|
||||||
if command -v go > /dev/null 2>&1; then
|
[ -f "$PROJECT_ROOT/scripts/test.sh" ]
|
||||||
# Go is installed, verify script doesn't error out
|
[ -x "$PROJECT_ROOT/scripts/test.sh" ]
|
||||||
# (Don't actually build - too slow)
|
|
||||||
run bash -c "grep -q 'go build' '$PROJECT_ROOT/scripts/build-analyze.sh'"
|
run bash -c "grep -q 'Mole Test Runner' '$PROJECT_ROOT/scripts/test.sh'"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
else
|
|
||||||
# Go is missing, verify proper error handling
|
|
||||||
run "$PROJECT_ROOT/scripts/build-analyze.sh"
|
|
||||||
[ "$status" -ne 0 ]
|
|
||||||
[[ "$output" == *"Go not installed"* ]]
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "build-analyze.sh has version info support" {
|
@test "test.sh includes test lint step" {
|
||||||
# Don't actually build in tests - too slow (10-30 seconds)
|
run bash -c "grep -q 'Test script lint' '$PROJECT_ROOT/scripts/test.sh'"
|
||||||
# Just verify the script contains version info logic
|
|
||||||
run bash -c "grep -q 'VERSION=' '$PROJECT_ROOT/scripts/build-analyze.sh'"
|
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
run bash -c "grep -q 'BUILD_TIME=' '$PROJECT_ROOT/scripts/build-analyze.sh'"
|
}
|
||||||
|
|
||||||
|
@test "Makefile has build target for Go binaries" {
|
||||||
|
run bash -c "grep -q 'go build' '$PROJECT_ROOT/Makefile'"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "setup-quick-launchers.sh has detect_mo function" {
|
@test "setup-quick-launchers.sh has detect_mo function" {
|
||||||
# Don't actually run the script - it opens Raycast and creates files
|
|
||||||
# Just verify it contains the detection logic
|
|
||||||
run bash -c "grep -q 'detect_mo()' '$PROJECT_ROOT/scripts/setup-quick-launchers.sh'"
|
run bash -c "grep -q 'detect_mo()' '$PROJECT_ROOT/scripts/setup-quick-launchers.sh'"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "setup-quick-launchers.sh has Raycast script generation" {
|
@test "setup-quick-launchers.sh has Raycast script generation" {
|
||||||
# Don't actually run the script - it opens Raycast
|
|
||||||
# Just verify it contains Raycast workflow creation logic
|
|
||||||
run bash -c "grep -q 'create_raycast_commands' '$PROJECT_ROOT/scripts/setup-quick-launchers.sh'"
|
run bash -c "grep -q 'create_raycast_commands' '$PROJECT_ROOT/scripts/setup-quick-launchers.sh'"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
run bash -c "grep -q 'write_raycast_script' '$PROJECT_ROOT/scripts/setup-quick-launchers.sh'"
|
run bash -c "grep -q 'write_raycast_script' '$PROJECT_ROOT/scripts/setup-quick-launchers.sh'"
|
||||||
|
|||||||
@@ -6,41 +6,31 @@ setup_file() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
setup() {
|
setup() {
|
||||||
# Source common.sh first (required by sudo_manager)
|
|
||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/core/sudo.sh"
|
source "$PROJECT_ROOT/lib/core/sudo.sh"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test sudo session detection
|
|
||||||
@test "has_sudo_session returns 1 when no sudo session" {
|
@test "has_sudo_session returns 1 when no sudo session" {
|
||||||
# Most test environments don't have active sudo
|
# shellcheck disable=SC2329
|
||||||
# This test verifies the function handles no-sudo gracefully
|
sudo() { return 1; }
|
||||||
|
export -f sudo
|
||||||
run has_sudo_session
|
run has_sudo_session
|
||||||
# Either no sudo (status 1) or sudo available (status 0)
|
|
||||||
# Both are valid - we just check it doesn't crash
|
|
||||||
[ "$status" -eq 0 ] || [ "$status" -eq 1 ]
|
[ "$status" -eq 0 ] || [ "$status" -eq 1 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test sudo keepalive lifecycle
|
|
||||||
@test "sudo keepalive functions don't crash" {
|
@test "sudo keepalive functions don't crash" {
|
||||||
# Test that keepalive functions can be called without errors
|
|
||||||
# We can't actually test sudo without prompting, but we can test structure
|
|
||||||
|
|
||||||
# Mock sudo to avoid actual auth
|
|
||||||
# shellcheck disable=SC2329
|
# shellcheck disable=SC2329
|
||||||
function sudo() {
|
function sudo() {
|
||||||
return 1 # Simulate no sudo available
|
return 1 # Simulate no sudo available
|
||||||
}
|
}
|
||||||
export -f sudo
|
export -f sudo
|
||||||
|
|
||||||
# These should not crash even without real sudo
|
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/core/sudo.sh'; has_sudo_session"
|
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/core/sudo.sh'; has_sudo_session"
|
||||||
[ "$status" -eq 1 ] # Expected: no sudo session
|
[ "$status" -eq 1 ] # Expected: no sudo session
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test keepalive PID management
|
|
||||||
@test "_start_sudo_keepalive returns a PID" {
|
@test "_start_sudo_keepalive returns a PID" {
|
||||||
# Mock sudo to simulate successful session
|
|
||||||
function sudo() {
|
function sudo() {
|
||||||
case "$1" in
|
case "$1" in
|
||||||
-n) return 0 ;; # Simulate valid sudo session
|
-n) return 0 ;; # Simulate valid sudo session
|
||||||
@@ -50,19 +40,15 @@ setup() {
|
|||||||
}
|
}
|
||||||
export -f sudo
|
export -f sudo
|
||||||
|
|
||||||
# Start keepalive (will run in background)
|
|
||||||
local pid
|
local pid
|
||||||
pid=$(bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/core/sudo.sh'; _start_sudo_keepalive")
|
pid=$(bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/core/sudo.sh'; _start_sudo_keepalive")
|
||||||
|
|
||||||
# Should return a PID (number)
|
|
||||||
[[ "$pid" =~ ^[0-9]+$ ]]
|
[[ "$pid" =~ ^[0-9]+$ ]]
|
||||||
|
|
||||||
# Clean up background process
|
|
||||||
kill "$pid" 2>/dev/null || true
|
kill "$pid" 2>/dev/null || true
|
||||||
wait "$pid" 2>/dev/null || true
|
wait "$pid" 2>/dev/null || true
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test _stop_sudo_keepalive
|
|
||||||
@test "_stop_sudo_keepalive handles invalid PID gracefully" {
|
@test "_stop_sudo_keepalive handles invalid PID gracefully" {
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/core/sudo.sh'; _stop_sudo_keepalive ''"
|
run bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/core/sudo.sh'; _stop_sudo_keepalive ''"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
@@ -73,16 +59,13 @@ setup() {
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Test stop_sudo_session cleanup
|
|
||||||
@test "stop_sudo_session cleans up keepalive process" {
|
@test "stop_sudo_session cleans up keepalive process" {
|
||||||
# Set a fake PID
|
|
||||||
export MOLE_SUDO_KEEPALIVE_PID="99999"
|
export MOLE_SUDO_KEEPALIVE_PID="99999"
|
||||||
|
|
||||||
run bash -c "export MOLE_SUDO_KEEPALIVE_PID=99999; source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/core/sudo.sh'; stop_sudo_session"
|
run bash -c "export MOLE_SUDO_KEEPALIVE_PID=99999; source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/core/sudo.sh'; stop_sudo_session"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test global state management
|
|
||||||
@test "sudo manager initializes global state correctly" {
|
@test "sudo manager initializes global state correctly" {
|
||||||
result=$(bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/core/sudo.sh'; echo \$MOLE_SUDO_ESTABLISHED")
|
result=$(bash -c "source '$PROJECT_ROOT/lib/core/common.sh'; source '$PROJECT_ROOT/lib/core/sudo.sh'; echo \$MOLE_SUDO_ESTABLISHED")
|
||||||
[[ "$result" == "false" ]] || [[ -z "$result" ]]
|
[[ "$result" == "false" ]] || [[ -z "$result" ]]
|
||||||
|
|||||||
@@ -234,7 +234,6 @@ source "$PROJECT_ROOT/lib/clean/brew.sh"
|
|||||||
mkdir -p "$HOME/.cache/mole"
|
mkdir -p "$HOME/.cache/mole"
|
||||||
rm -f "$HOME/.cache/mole/brew_last_cleanup"
|
rm -f "$HOME/.cache/mole/brew_last_cleanup"
|
||||||
|
|
||||||
# Create a large enough Homebrew cache to pass pre-check (>50MB)
|
|
||||||
mkdir -p "$HOME/Library/Caches/Homebrew"
|
mkdir -p "$HOME/Library/Caches/Homebrew"
|
||||||
dd if=/dev/zero of="$HOME/Library/Caches/Homebrew/test.tar.gz" bs=1024 count=51200 2>/dev/null
|
dd if=/dev/zero of="$HOME/Library/Caches/Homebrew/test.tar.gz" bs=1024 count=51200 2>/dev/null
|
||||||
|
|
||||||
@@ -262,7 +261,6 @@ brew() {
|
|||||||
|
|
||||||
clean_homebrew
|
clean_homebrew
|
||||||
|
|
||||||
# Cleanup test files
|
|
||||||
rm -rf "$HOME/Library/Caches/Homebrew"
|
rm -rf "$HOME/Library/Caches/Homebrew"
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
@@ -549,7 +547,6 @@ EOF
|
|||||||
mkdir -p "$state_dir/com.example.app.savedState"
|
mkdir -p "$state_dir/com.example.app.savedState"
|
||||||
touch "$state_dir/com.example.app.savedState/data.plist"
|
touch "$state_dir/com.example.app.savedState/data.plist"
|
||||||
|
|
||||||
# Make the file old (31+ days) - MOLE_SAVED_STATE_AGE_DAYS now defaults to 30
|
|
||||||
touch -t 202301010000 "$state_dir/com.example.app.savedState/data.plist"
|
touch -t 202301010000 "$state_dir/com.example.app.savedState/data.plist"
|
||||||
|
|
||||||
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
|
||||||
@@ -584,7 +581,6 @@ EOF
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
# Mock qlmanage and cleanup_path to avoid system calls
|
|
||||||
qlmanage() { return 0; }
|
qlmanage() { return 0; }
|
||||||
cleanup_path() {
|
cleanup_path() {
|
||||||
local path="$1"
|
local path="$1"
|
||||||
@@ -624,7 +620,6 @@ echo "$size"
|
|||||||
EOF
|
EOF
|
||||||
|
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
# Should be >= 10 KB
|
|
||||||
[ "$output" -ge 10 ]
|
[ "$output" -ge 10 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -647,9 +642,6 @@ EOF
|
|||||||
[[ "$output" == *"Repaired 2 corrupted preference files"* ]]
|
[[ "$output" == *"Repaired 2 corrupted preference files"* ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Tests for new system cleaning features (v1.15.2)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@test "clean_deep_system cleans memory exception reports" {
|
@test "clean_deep_system cleans memory exception reports" {
|
||||||
run bash --noprofile --norc <<'EOF'
|
run bash --noprofile --norc <<'EOF'
|
||||||
@@ -711,12 +703,9 @@ EOF
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "clean_deep_system validates symbolication cache size before cleaning" {
|
@test "clean_deep_system validates symbolication cache size before cleaning" {
|
||||||
# This test verifies the size threshold logic directly
|
|
||||||
# Testing that sizes > 1GB trigger cleanup
|
|
||||||
run bash --noprofile --norc <<'EOF'
|
run bash --noprofile --norc <<'EOF'
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
# Simulate size check logic
|
|
||||||
symbolication_size_mb="2048" # 2GB
|
symbolication_size_mb="2048" # 2GB
|
||||||
|
|
||||||
if [[ -n "$symbolication_size_mb" && "$symbolication_size_mb" =~ ^[0-9]+$ ]]; then
|
if [[ -n "$symbolication_size_mb" && "$symbolication_size_mb" =~ ^[0-9]+$ ]]; then
|
||||||
@@ -735,11 +724,9 @@ EOF
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "clean_deep_system skips symbolication cache when small" {
|
@test "clean_deep_system skips symbolication cache when small" {
|
||||||
# This test verifies sizes < 1GB don't trigger cleanup
|
|
||||||
run bash --noprofile --norc <<'EOF'
|
run bash --noprofile --norc <<'EOF'
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
# Simulate size check logic with small cache
|
|
||||||
symbolication_size_mb="500" # 500MB < 1GB
|
symbolication_size_mb="500" # 500MB < 1GB
|
||||||
|
|
||||||
if [[ -n "$symbolication_size_mb" && "$symbolication_size_mb" =~ ^[0-9]+$ ]]; then
|
if [[ -n "$symbolication_size_mb" && "$symbolication_size_mb" =~ ^[0-9]+$ ]]; then
|
||||||
@@ -758,11 +745,9 @@ EOF
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "clean_deep_system handles symbolication cache size check failure" {
|
@test "clean_deep_system handles symbolication cache size check failure" {
|
||||||
# This test verifies invalid/empty size values don't trigger cleanup
|
|
||||||
run bash --noprofile --norc <<'EOF'
|
run bash --noprofile --norc <<'EOF'
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
# Simulate size check logic with empty/invalid value
|
|
||||||
symbolication_size_mb="" # Empty - simulates failure
|
symbolication_size_mb="" # Empty - simulates failure
|
||||||
|
|
||||||
if [[ -n "$symbolication_size_mb" && "$symbolication_size_mb" =~ ^[0-9]+$ ]]; then
|
if [[ -n "$symbolication_size_mb" && "$symbolication_size_mb" =~ ^[0-9]+$ ]]; then
|
||||||
@@ -785,12 +770,7 @@ EOF
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Removed tests for opt_startup_items_cleanup
|
|
||||||
# This optimization was removed due to high risk of deleting legitimate app helpers
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Tests for new system optimizations (v1.16.3+)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@test "opt_memory_pressure_relief skips when pressure is normal" {
|
@test "opt_memory_pressure_relief skips when pressure is normal" {
|
||||||
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
|
||||||
@@ -798,7 +778,6 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
|
||||||
# Mock memory_pressure to indicate normal pressure
|
|
||||||
memory_pressure() {
|
memory_pressure() {
|
||||||
echo "System-wide memory free percentage: 50%"
|
echo "System-wide memory free percentage: 50%"
|
||||||
return 0
|
return 0
|
||||||
@@ -818,14 +797,12 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
|
||||||
# Mock memory_pressure to indicate high pressure
|
|
||||||
memory_pressure() {
|
memory_pressure() {
|
||||||
echo "System-wide memory free percentage: warning"
|
echo "System-wide memory free percentage: warning"
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
export -f memory_pressure
|
export -f memory_pressure
|
||||||
|
|
||||||
# Mock sudo purge
|
|
||||||
sudo() {
|
sudo() {
|
||||||
if [[ "$1" == "purge" ]]; then
|
if [[ "$1" == "purge" ]]; then
|
||||||
echo "purge:executed"
|
echo "purge:executed"
|
||||||
@@ -849,13 +826,11 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
|
||||||
# Mock route to indicate healthy routing
|
|
||||||
route() {
|
route() {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
export -f route
|
export -f route
|
||||||
|
|
||||||
# Mock dscacheutil to indicate healthy DNS
|
|
||||||
dscacheutil() {
|
dscacheutil() {
|
||||||
echo "ip_address: 93.184.216.34"
|
echo "ip_address: 93.184.216.34"
|
||||||
return 0
|
return 0
|
||||||
@@ -875,7 +850,6 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
|
||||||
# Mock route to fail (network issue)
|
|
||||||
route() {
|
route() {
|
||||||
if [[ "$2" == "get" ]]; then
|
if [[ "$2" == "get" ]]; then
|
||||||
return 1
|
return 1
|
||||||
@@ -888,7 +862,6 @@ route() {
|
|||||||
}
|
}
|
||||||
export -f route
|
export -f route
|
||||||
|
|
||||||
# Mock sudo
|
|
||||||
sudo() {
|
sudo() {
|
||||||
if [[ "$1" == "route" || "$1" == "arp" ]]; then
|
if [[ "$1" == "route" || "$1" == "arp" ]]; then
|
||||||
shift
|
shift
|
||||||
@@ -899,14 +872,12 @@ sudo() {
|
|||||||
}
|
}
|
||||||
export -f sudo
|
export -f sudo
|
||||||
|
|
||||||
# Mock arp
|
|
||||||
arp() {
|
arp() {
|
||||||
echo "arp:cleared"
|
echo "arp:cleared"
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
export -f arp
|
export -f arp
|
||||||
|
|
||||||
# Mock dscacheutil
|
|
||||||
dscacheutil() {
|
dscacheutil() {
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
@@ -926,7 +897,6 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
|
||||||
# Mock stat to return correct owner
|
|
||||||
stat() {
|
stat() {
|
||||||
if [[ "$2" == "%Su" ]]; then
|
if [[ "$2" == "%Su" ]]; then
|
||||||
echo "$USER"
|
echo "$USER"
|
||||||
@@ -936,7 +906,6 @@ stat() {
|
|||||||
}
|
}
|
||||||
export -f stat
|
export -f stat
|
||||||
|
|
||||||
# Mock test to indicate directories are writable
|
|
||||||
test() {
|
test() {
|
||||||
if [[ "$1" == "-e" || "$1" == "-w" ]]; then
|
if [[ "$1" == "-e" || "$1" == "-w" ]]; then
|
||||||
return 0
|
return 0
|
||||||
@@ -958,7 +927,6 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
|
||||||
# Mock stat to return wrong owner
|
|
||||||
stat() {
|
stat() {
|
||||||
if [[ "$2" == "%Su" ]]; then
|
if [[ "$2" == "%Su" ]]; then
|
||||||
echo "root"
|
echo "root"
|
||||||
@@ -968,7 +936,6 @@ stat() {
|
|||||||
}
|
}
|
||||||
export -f stat
|
export -f stat
|
||||||
|
|
||||||
# Mock sudo diskutil
|
|
||||||
sudo() {
|
sudo() {
|
||||||
if [[ "$1" == "diskutil" && "$2" == "resetUserPermissions" ]]; then
|
if [[ "$1" == "diskutil" && "$2" == "resetUserPermissions" ]]; then
|
||||||
echo "diskutil:resetUserPermissions"
|
echo "diskutil:resetUserPermissions"
|
||||||
@@ -1000,7 +967,6 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
|
||||||
# Mock system_profiler to indicate Bluetooth keyboard connected
|
|
||||||
system_profiler() {
|
system_profiler() {
|
||||||
cat << 'PROFILER_OUT'
|
cat << 'PROFILER_OUT'
|
||||||
Bluetooth:
|
Bluetooth:
|
||||||
@@ -1025,7 +991,6 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
|
||||||
# Mock system_profiler to indicate Bluetooth headphones (no HID)
|
|
||||||
system_profiler() {
|
system_profiler() {
|
||||||
cat << 'PROFILER_OUT'
|
cat << 'PROFILER_OUT'
|
||||||
Bluetooth:
|
Bluetooth:
|
||||||
@@ -1037,7 +1002,6 @@ PROFILER_OUT
|
|||||||
}
|
}
|
||||||
export -f system_profiler
|
export -f system_profiler
|
||||||
|
|
||||||
# Mock pgrep to indicate Spotify is running
|
|
||||||
pgrep() {
|
pgrep() {
|
||||||
if [[ "$2" == "Spotify" ]]; then
|
if [[ "$2" == "Spotify" ]]; then
|
||||||
echo "12345"
|
echo "12345"
|
||||||
@@ -1060,7 +1024,6 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
|
||||||
# Mock system_profiler to return Bluetooth audio as default output (Method 1)
|
|
||||||
system_profiler() {
|
system_profiler() {
|
||||||
if [[ "$1" == "SPAudioDataType" ]]; then
|
if [[ "$1" == "SPAudioDataType" ]]; then
|
||||||
cat << 'AUDIO_OUT'
|
cat << 'AUDIO_OUT'
|
||||||
@@ -1082,7 +1045,6 @@ AUDIO_OUT
|
|||||||
}
|
}
|
||||||
export -f system_profiler
|
export -f system_profiler
|
||||||
|
|
||||||
# Mock awk to process audio output
|
|
||||||
awk() {
|
awk() {
|
||||||
if [[ "${*}" == *"Default Output Device"* ]]; then
|
if [[ "${*}" == *"Default Output Device"* ]]; then
|
||||||
cat << 'AWK_OUT'
|
cat << 'AWK_OUT'
|
||||||
@@ -1111,7 +1073,6 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
|
||||||
# Mock system_profiler (no HID devices, just audio)
|
|
||||||
system_profiler() {
|
system_profiler() {
|
||||||
cat << 'PROFILER_OUT'
|
cat << 'PROFILER_OUT'
|
||||||
Bluetooth:
|
Bluetooth:
|
||||||
@@ -1123,7 +1084,6 @@ PROFILER_OUT
|
|||||||
}
|
}
|
||||||
export -f system_profiler
|
export -f system_profiler
|
||||||
|
|
||||||
# Mock pgrep (no media apps running)
|
|
||||||
pgrep() {
|
pgrep() {
|
||||||
if [[ "$2" == "bluetoothd" ]]; then
|
if [[ "$2" == "bluetoothd" ]]; then
|
||||||
return 1 # bluetoothd not running after TERM
|
return 1 # bluetoothd not running after TERM
|
||||||
@@ -1132,7 +1092,6 @@ pgrep() {
|
|||||||
}
|
}
|
||||||
export -f pgrep
|
export -f pgrep
|
||||||
|
|
||||||
# Mock sudo pkill
|
|
||||||
sudo() {
|
sudo() {
|
||||||
if [[ "$1" == "pkill" ]]; then
|
if [[ "$1" == "pkill" ]]; then
|
||||||
echo "pkill:bluetoothd:$2"
|
echo "pkill:bluetoothd:$2"
|
||||||
@@ -1158,7 +1117,6 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
source "$PROJECT_ROOT/lib/optimize/tasks.sh"
|
||||||
|
|
||||||
# Mock mdutil
|
|
||||||
mdutil() {
|
mdutil() {
|
||||||
if [[ "$1" == "-s" ]]; then
|
if [[ "$1" == "-s" ]]; then
|
||||||
echo "Indexing enabled."
|
echo "Indexing enabled."
|
||||||
@@ -1168,13 +1126,11 @@ mdutil() {
|
|||||||
}
|
}
|
||||||
export -f mdutil
|
export -f mdutil
|
||||||
|
|
||||||
# Mock mdfind (fast search)
|
|
||||||
mdfind() {
|
mdfind() {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
export -f mdfind
|
export -f mdfind
|
||||||
|
|
||||||
# Mock date to simulate fast search (< 3 seconds)
|
|
||||||
date() {
|
date() {
|
||||||
echo "1000"
|
echo "1000"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,17 +1,11 @@
|
|||||||
#!/usr/bin/env bats
|
#!/usr/bin/env bats
|
||||||
# Timeout functionality tests
|
|
||||||
# Tests for lib/core/timeout.sh
|
|
||||||
|
|
||||||
setup() {
|
setup() {
|
||||||
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
export PROJECT_ROOT
|
export PROJECT_ROOT
|
||||||
export MO_DEBUG=0 # Disable debug output for cleaner tests
|
export MO_DEBUG=0
|
||||||
}
|
}
|
||||||
|
|
||||||
# =================================================================
|
|
||||||
# Basic Timeout Functionality
|
|
||||||
# =================================================================
|
|
||||||
|
|
||||||
@test "run_with_timeout: command completes before timeout" {
|
@test "run_with_timeout: command completes before timeout" {
|
||||||
result=$(bash -c "
|
result=$(bash -c "
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
@@ -48,10 +42,6 @@ setup() {
|
|||||||
[[ "$result" == "no_timeout" ]]
|
[[ "$result" == "no_timeout" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# =================================================================
|
|
||||||
# Exit Code Handling
|
|
||||||
# =================================================================
|
|
||||||
|
|
||||||
@test "run_with_timeout: preserves command exit code on success" {
|
@test "run_with_timeout: preserves command exit code on success" {
|
||||||
bash -c "
|
bash -c "
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
@@ -65,7 +55,7 @@ setup() {
|
|||||||
@test "run_with_timeout: preserves command exit code on failure" {
|
@test "run_with_timeout: preserves command exit code on failure" {
|
||||||
set +e
|
set +e
|
||||||
bash -c "
|
bash -c "
|
||||||
set +e # Don't exit on error
|
set +e
|
||||||
source '$PROJECT_ROOT/lib/core/timeout.sh'
|
source '$PROJECT_ROOT/lib/core/timeout.sh'
|
||||||
run_with_timeout 5 false
|
run_with_timeout 5 false
|
||||||
exit \$?
|
exit \$?
|
||||||
@@ -76,8 +66,6 @@ setup() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "run_with_timeout: returns 124 on timeout (if using gtimeout)" {
|
@test "run_with_timeout: returns 124 on timeout (if using gtimeout)" {
|
||||||
# This test only passes if gtimeout/timeout is available
|
|
||||||
# Skip if using shell fallback (can't guarantee exit code 124 in all cases)
|
|
||||||
if ! command -v gtimeout >/dev/null 2>&1 && ! command -v timeout >/dev/null 2>&1; then
|
if ! command -v gtimeout >/dev/null 2>&1 && ! command -v timeout >/dev/null 2>&1; then
|
||||||
skip "gtimeout/timeout not available"
|
skip "gtimeout/timeout not available"
|
||||||
fi
|
fi
|
||||||
@@ -94,12 +82,7 @@ setup() {
|
|||||||
[[ $exit_code -eq 124 ]]
|
[[ $exit_code -eq 124 ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# =================================================================
|
|
||||||
# Timeout Behavior
|
|
||||||
# =================================================================
|
|
||||||
|
|
||||||
@test "run_with_timeout: kills long-running command" {
|
@test "run_with_timeout: kills long-running command" {
|
||||||
# Command should be killed after 2 seconds
|
|
||||||
start_time=$(date +%s)
|
start_time=$(date +%s)
|
||||||
set +e
|
set +e
|
||||||
bash -c "
|
bash -c "
|
||||||
@@ -111,13 +94,10 @@ setup() {
|
|||||||
end_time=$(date +%s)
|
end_time=$(date +%s)
|
||||||
duration=$((end_time - start_time))
|
duration=$((end_time - start_time))
|
||||||
|
|
||||||
# Should complete in ~2 seconds, not 30
|
|
||||||
# Allow some margin (up to 5 seconds for slow systems)
|
|
||||||
[[ $duration -lt 10 ]]
|
[[ $duration -lt 10 ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "run_with_timeout: handles fast-completing commands" {
|
@test "run_with_timeout: handles fast-completing commands" {
|
||||||
# Fast command should complete immediately
|
|
||||||
start_time=$(date +%s)
|
start_time=$(date +%s)
|
||||||
bash -c "
|
bash -c "
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
@@ -127,14 +107,9 @@ setup() {
|
|||||||
end_time=$(date +%s)
|
end_time=$(date +%s)
|
||||||
duration=$((end_time - start_time))
|
duration=$((end_time - start_time))
|
||||||
|
|
||||||
# Should complete in ~0 seconds
|
|
||||||
[[ $duration -lt 3 ]]
|
[[ $duration -lt 3 ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# =================================================================
|
|
||||||
# Pipefail Compatibility
|
|
||||||
# =================================================================
|
|
||||||
|
|
||||||
@test "run_with_timeout: works in pipefail mode" {
|
@test "run_with_timeout: works in pipefail mode" {
|
||||||
result=$(bash -c "
|
result=$(bash -c "
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
@@ -154,10 +129,6 @@ setup() {
|
|||||||
[[ "$result" == "survived" ]]
|
[[ "$result" == "survived" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# =================================================================
|
|
||||||
# Command Arguments
|
|
||||||
# =================================================================
|
|
||||||
|
|
||||||
@test "run_with_timeout: handles commands with arguments" {
|
@test "run_with_timeout: handles commands with arguments" {
|
||||||
result=$(bash -c "
|
result=$(bash -c "
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
@@ -176,10 +147,6 @@ setup() {
|
|||||||
[[ "$result" == "hello world" ]]
|
[[ "$result" == "hello world" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# =================================================================
|
|
||||||
# Debug Logging
|
|
||||||
# =================================================================
|
|
||||||
|
|
||||||
@test "run_with_timeout: debug logging when MO_DEBUG=1" {
|
@test "run_with_timeout: debug logging when MO_DEBUG=1" {
|
||||||
output=$(bash -c "
|
output=$(bash -c "
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
@@ -187,29 +154,20 @@ setup() {
|
|||||||
source '$PROJECT_ROOT/lib/core/timeout.sh'
|
source '$PROJECT_ROOT/lib/core/timeout.sh'
|
||||||
run_with_timeout 5 echo 'test' 2>&1
|
run_with_timeout 5 echo 'test' 2>&1
|
||||||
")
|
")
|
||||||
# Should contain debug output
|
|
||||||
[[ "$output" =~ TIMEOUT ]]
|
[[ "$output" =~ TIMEOUT ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "run_with_timeout: no debug logging when MO_DEBUG=0" {
|
@test "run_with_timeout: no debug logging when MO_DEBUG=0" {
|
||||||
# When MO_DEBUG=0, no debug messages should appear during function execution
|
|
||||||
# (Initialization messages may appear if module is loaded for first time)
|
|
||||||
output=$(bash -c "
|
output=$(bash -c "
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
export MO_DEBUG=0
|
export MO_DEBUG=0
|
||||||
unset MO_TIMEOUT_INITIALIZED # Force re-initialization
|
unset MO_TIMEOUT_INITIALIZED
|
||||||
source '$PROJECT_ROOT/lib/core/timeout.sh'
|
source '$PROJECT_ROOT/lib/core/timeout.sh'
|
||||||
# Capture only the function call output, not initialization
|
|
||||||
run_with_timeout 5 echo 'test'
|
run_with_timeout 5 echo 'test'
|
||||||
" 2>/dev/null) # Discard stderr (initialization messages)
|
" 2>/dev/null)
|
||||||
# Should only have command output
|
|
||||||
[[ "$output" == "test" ]]
|
[[ "$output" == "test" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# =================================================================
|
|
||||||
# Module Loading
|
|
||||||
# =================================================================
|
|
||||||
|
|
||||||
@test "timeout.sh: prevents multiple sourcing" {
|
@test "timeout.sh: prevents multiple sourcing" {
|
||||||
result=$(bash -c "
|
result=$(bash -c "
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|||||||
2
tests/tmp-update.2FMNHj/fake-script-dir/mole
Executable file
2
tests/tmp-update.2FMNHj/fake-script-dir/mole
Executable file
@@ -0,0 +1,2 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
echo "Already on latest version"
|
||||||
@@ -24,7 +24,6 @@ create_fake_utils() {
|
|||||||
local dir="$1"
|
local dir="$1"
|
||||||
mkdir -p "$dir"
|
mkdir -p "$dir"
|
||||||
|
|
||||||
# Fake sudo
|
|
||||||
cat > "$dir/sudo" <<'SCRIPT'
|
cat > "$dir/sudo" <<'SCRIPT'
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
if [[ "$1" == "-n" || "$1" == "-v" ]]; then
|
if [[ "$1" == "-n" || "$1" == "-v" ]]; then
|
||||||
@@ -34,7 +33,6 @@ exec "$@"
|
|||||||
SCRIPT
|
SCRIPT
|
||||||
chmod +x "$dir/sudo"
|
chmod +x "$dir/sudo"
|
||||||
|
|
||||||
# Fake bioutil
|
|
||||||
cat > "$dir/bioutil" <<'SCRIPT'
|
cat > "$dir/bioutil" <<'SCRIPT'
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
if [[ "$1" == "-r" ]]; then
|
if [[ "$1" == "-r" ]]; then
|
||||||
@@ -49,7 +47,6 @@ SCRIPT
|
|||||||
@test "touchid status reflects pam file contents" {
|
@test "touchid status reflects pam file contents" {
|
||||||
pam_file="$HOME/pam_test"
|
pam_file="$HOME/pam_test"
|
||||||
cat > "$pam_file" <<'EOF'
|
cat > "$pam_file" <<'EOF'
|
||||||
# comment
|
|
||||||
auth sufficient pam_opendirectory.so
|
auth sufficient pam_opendirectory.so
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
@@ -69,7 +66,6 @@ EOF
|
|||||||
@test "enable_touchid inserts pam_tid line in pam file" {
|
@test "enable_touchid inserts pam_tid line in pam file" {
|
||||||
pam_file="$HOME/pam_enable"
|
pam_file="$HOME/pam_enable"
|
||||||
cat > "$pam_file" <<'EOF'
|
cat > "$pam_file" <<'EOF'
|
||||||
# test pam
|
|
||||||
auth sufficient pam_opendirectory.so
|
auth sufficient pam_opendirectory.so
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
|
|||||||
@@ -7,26 +7,20 @@ setup_file() {
|
|||||||
|
|
||||||
setup() {
|
setup() {
|
||||||
export MOLE_BASE_LOADED=1 # mock base loaded
|
export MOLE_BASE_LOADED=1 # mock base loaded
|
||||||
# Mocking base functions used in ui.sh if any (mostly spinner/logging stuff, unlikely to affect read_key)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "read_key maps j/k/h/l to navigation" {
|
@test "read_key maps j/k/h/l to navigation" {
|
||||||
# Source the UI library
|
|
||||||
source "$PROJECT_ROOT/lib/core/ui.sh"
|
source "$PROJECT_ROOT/lib/core/ui.sh"
|
||||||
|
|
||||||
# Test j -> DOWN
|
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/ui.sh'; echo -n 'j' | read_key"
|
run bash -c "source '$PROJECT_ROOT/lib/core/ui.sh'; echo -n 'j' | read_key"
|
||||||
[ "$output" = "DOWN" ]
|
[ "$output" = "DOWN" ]
|
||||||
|
|
||||||
# Test k -> UP
|
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/ui.sh'; echo -n 'k' | read_key"
|
run bash -c "source '$PROJECT_ROOT/lib/core/ui.sh'; echo -n 'k' | read_key"
|
||||||
[ "$output" = "UP" ]
|
[ "$output" = "UP" ]
|
||||||
|
|
||||||
# Test h -> LEFT
|
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/ui.sh'; echo -n 'h' | read_key"
|
run bash -c "source '$PROJECT_ROOT/lib/core/ui.sh'; echo -n 'h' | read_key"
|
||||||
[ "$output" = "LEFT" ]
|
[ "$output" = "LEFT" ]
|
||||||
|
|
||||||
# Test l -> RIGHT
|
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/ui.sh'; echo -n 'l' | read_key"
|
run bash -c "source '$PROJECT_ROOT/lib/core/ui.sh'; echo -n 'l' | read_key"
|
||||||
[ "$output" = "RIGHT" ]
|
[ "$output" = "RIGHT" ]
|
||||||
}
|
}
|
||||||
@@ -40,7 +34,6 @@ setup() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "read_key respects MOLE_READ_KEY_FORCE_CHAR" {
|
@test "read_key respects MOLE_READ_KEY_FORCE_CHAR" {
|
||||||
# When force char is on, j should return CHAR:j
|
|
||||||
run bash -c "export MOLE_READ_KEY_FORCE_CHAR=1; source '$PROJECT_ROOT/lib/core/ui.sh'; echo -n 'j' | read_key"
|
run bash -c "export MOLE_READ_KEY_FORCE_CHAR=1; source '$PROJECT_ROOT/lib/core/ui.sh'; echo -n 'j' | read_key"
|
||||||
[ "$output" = "CHAR:j" ]
|
[ "$output" = "CHAR:j" ]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -73,7 +73,6 @@ calculate_total_size "$files"
|
|||||||
EOF
|
EOF
|
||||||
)"
|
)"
|
||||||
|
|
||||||
# Result should be >=3 KB (some filesystems allocate slightly more)
|
|
||||||
[ "$result" -ge 3 ]
|
[ "$result" -ge 3 ]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -85,7 +84,6 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/uninstall/batch.sh"
|
source "$PROJECT_ROOT/lib/uninstall/batch.sh"
|
||||||
|
|
||||||
# Test stubs
|
|
||||||
request_sudo_access() { return 0; }
|
request_sudo_access() { return 0; }
|
||||||
start_inline_spinner() { :; }
|
start_inline_spinner() { :; }
|
||||||
stop_inline_spinner() { :; }
|
stop_inline_spinner() { :; }
|
||||||
@@ -110,10 +108,8 @@ files_cleaned=0
|
|||||||
total_items=0
|
total_items=0
|
||||||
total_size_cleaned=0
|
total_size_cleaned=0
|
||||||
|
|
||||||
# Use the actual bash function directly, don't pipe printf as that complicates stdin
|
|
||||||
batch_uninstall_applications
|
batch_uninstall_applications
|
||||||
|
|
||||||
# Verify cleanup
|
|
||||||
[[ ! -d "$app_bundle" ]] || exit 1
|
[[ ! -d "$app_bundle" ]] || exit 1
|
||||||
[[ ! -d "$HOME/Library/Application Support/TestApp" ]] || exit 1
|
[[ ! -d "$HOME/Library/Application Support/TestApp" ]] || exit 1
|
||||||
[[ ! -d "$HOME/Library/Caches/TestApp" ]] || exit 1
|
[[ ! -d "$HOME/Library/Caches/TestApp" ]] || exit 1
|
||||||
@@ -144,7 +140,6 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/uninstall/batch.sh"
|
source "$PROJECT_ROOT/lib/uninstall/batch.sh"
|
||||||
|
|
||||||
# Valid base64 encoded path list
|
|
||||||
valid_data=$(printf '/path/one
|
valid_data=$(printf '/path/one
|
||||||
/path/two' | base64)
|
/path/two' | base64)
|
||||||
result=$(decode_file_list "$valid_data" "TestApp")
|
result=$(decode_file_list "$valid_data" "TestApp")
|
||||||
@@ -160,12 +155,9 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/uninstall/batch.sh"
|
source "$PROJECT_ROOT/lib/uninstall/batch.sh"
|
||||||
|
|
||||||
# Invalid base64 - function should return empty and fail
|
|
||||||
if result=$(decode_file_list "not-valid-base64!!!" "TestApp" 2>/dev/null); then
|
if result=$(decode_file_list "not-valid-base64!!!" "TestApp" 2>/dev/null); then
|
||||||
# If decode succeeded, result should be empty
|
|
||||||
[[ -z "$result" ]]
|
[[ -z "$result" ]]
|
||||||
else
|
else
|
||||||
# Function returned error, which is expected
|
|
||||||
true
|
true
|
||||||
fi
|
fi
|
||||||
EOF
|
EOF
|
||||||
@@ -179,10 +171,8 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/uninstall/batch.sh"
|
source "$PROJECT_ROOT/lib/uninstall/batch.sh"
|
||||||
|
|
||||||
# Empty base64
|
|
||||||
empty_data=$(printf '' | base64)
|
empty_data=$(printf '' | base64)
|
||||||
result=$(decode_file_list "$empty_data" "TestApp" 2>/dev/null) || true
|
result=$(decode_file_list "$empty_data" "TestApp" 2>/dev/null) || true
|
||||||
# Empty result is acceptable
|
|
||||||
[[ -z "$result" ]]
|
[[ -z "$result" ]]
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
@@ -195,13 +185,10 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/uninstall/batch.sh"
|
source "$PROJECT_ROOT/lib/uninstall/batch.sh"
|
||||||
|
|
||||||
# Relative path - function should reject it
|
|
||||||
bad_data=$(printf 'relative/path' | base64)
|
bad_data=$(printf 'relative/path' | base64)
|
||||||
if result=$(decode_file_list "$bad_data" "TestApp" 2>/dev/null); then
|
if result=$(decode_file_list "$bad_data" "TestApp" 2>/dev/null); then
|
||||||
# Should return empty string
|
|
||||||
[[ -z "$result" ]]
|
[[ -z "$result" ]]
|
||||||
else
|
else
|
||||||
# Or return error code
|
|
||||||
true
|
true
|
||||||
fi
|
fi
|
||||||
EOF
|
EOF
|
||||||
@@ -215,22 +202,16 @@ set -euo pipefail
|
|||||||
source "$PROJECT_ROOT/lib/core/common.sh"
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
source "$PROJECT_ROOT/lib/uninstall/batch.sh"
|
source "$PROJECT_ROOT/lib/uninstall/batch.sh"
|
||||||
|
|
||||||
# Test data: absolute paths
|
|
||||||
test_paths="/path/to/file1
|
test_paths="/path/to/file1
|
||||||
/path/to/file2"
|
/path/to/file2"
|
||||||
|
|
||||||
# Encode with whatever base64 is available (no flags)
|
|
||||||
encoded_data=$(printf '%s' "$test_paths" | base64 | tr -d '\n')
|
encoded_data=$(printf '%s' "$test_paths" | base64 | tr -d '\n')
|
||||||
|
|
||||||
# decode_file_list should handle it regardless of BSD (-D) or GNU (-d)
|
|
||||||
result=$(decode_file_list "$encoded_data" "TestApp")
|
result=$(decode_file_list "$encoded_data" "TestApp")
|
||||||
|
|
||||||
# Verify result contains expected paths
|
|
||||||
[[ "$result" == *"/path/to/file1"* ]] || exit 1
|
[[ "$result" == *"/path/to/file1"* ]] || exit 1
|
||||||
[[ "$result" == *"/path/to/file2"* ]] || exit 1
|
[[ "$result" == *"/path/to/file2"* ]] || exit 1
|
||||||
|
|
||||||
# Verify the function tries both -D and -d by checking it doesn't fail
|
|
||||||
# This tests the fallback logic in decode_file_list
|
|
||||||
[[ -n "$result" ]] || exit 1
|
[[ -n "$result" ]] || exit 1
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
|
|||||||
@@ -4,12 +4,23 @@ setup_file() {
|
|||||||
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
export PROJECT_ROOT
|
export PROJECT_ROOT
|
||||||
|
|
||||||
# Create a dummy cache directory for tests
|
ORIGINAL_HOME="${HOME:-}"
|
||||||
|
export ORIGINAL_HOME
|
||||||
|
|
||||||
|
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-update-manager.XXXXXX")"
|
||||||
|
export HOME
|
||||||
|
|
||||||
mkdir -p "${HOME}/.cache/mole"
|
mkdir -p "${HOME}/.cache/mole"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
teardown_file() {
|
||||||
|
rm -rf "$HOME"
|
||||||
|
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
|
||||||
|
export HOME="$ORIGINAL_HOME"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
setup() {
|
setup() {
|
||||||
# Default values for tests
|
|
||||||
BREW_OUTDATED_COUNT=0
|
BREW_OUTDATED_COUNT=0
|
||||||
BREW_FORMULA_OUTDATED_COUNT=0
|
BREW_FORMULA_OUTDATED_COUNT=0
|
||||||
BREW_CASK_OUTDATED_COUNT=0
|
BREW_CASK_OUTDATED_COUNT=0
|
||||||
@@ -17,7 +28,6 @@ setup() {
|
|||||||
MACOS_UPDATE_AVAILABLE=false
|
MACOS_UPDATE_AVAILABLE=false
|
||||||
MOLE_UPDATE_AVAILABLE=false
|
MOLE_UPDATE_AVAILABLE=false
|
||||||
|
|
||||||
# Create a temporary bin directory for mocks
|
|
||||||
export MOCK_BIN_DIR="$BATS_TMPDIR/mole-mocks-$$"
|
export MOCK_BIN_DIR="$BATS_TMPDIR/mole-mocks-$$"
|
||||||
mkdir -p "$MOCK_BIN_DIR"
|
mkdir -p "$MOCK_BIN_DIR"
|
||||||
export PATH="$MOCK_BIN_DIR:$PATH"
|
export PATH="$MOCK_BIN_DIR:$PATH"
|
||||||
@@ -28,7 +38,6 @@ teardown() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
read_key() {
|
read_key() {
|
||||||
# Default mock: press ESC to cancel
|
|
||||||
echo "ESC"
|
echo "ESC"
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
@@ -147,7 +156,8 @@ perform_updates
|
|||||||
EOF
|
EOF
|
||||||
|
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
[[ "$output" == *"Homebrew formulae updated"* ]]
|
[[ "$output" == *"Updating Mole"* ]]
|
||||||
[[ "$output" == *"Already on latest version"* ]]
|
[[ "$output" == *"Mole updated"* ]]
|
||||||
[[ "$output" == *"MOLE_CACHE_RESET"* ]]
|
[[ "$output" == *"MOLE_CACHE_RESET"* ]]
|
||||||
|
[[ "$output" == *"All updates completed"* ]]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ setup_file() {
|
|||||||
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
export PROJECT_ROOT
|
export PROJECT_ROOT
|
||||||
|
|
||||||
# Read current version from mole file (single source of truth)
|
|
||||||
CURRENT_VERSION="$(grep '^VERSION=' "$PROJECT_ROOT/mole" | head -1 | sed 's/VERSION="\(.*\)"/\1/')"
|
CURRENT_VERSION="$(grep '^VERSION=' "$PROJECT_ROOT/mole" | head -1 | sed 's/VERSION="\(.*\)"/\1/')"
|
||||||
export CURRENT_VERSION
|
export CURRENT_VERSION
|
||||||
|
|
||||||
|
|||||||
104
tests/user_clean_core.bats
Normal file
104
tests/user_clean_core.bats
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
|
setup_file() {
|
||||||
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
|
export PROJECT_ROOT
|
||||||
|
|
||||||
|
ORIGINAL_HOME="${HOME:-}"
|
||||||
|
export ORIGINAL_HOME
|
||||||
|
|
||||||
|
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-user-core.XXXXXX")"
|
||||||
|
export HOME
|
||||||
|
|
||||||
|
mkdir -p "$HOME"
|
||||||
|
}
|
||||||
|
|
||||||
|
teardown_file() {
|
||||||
|
rm -rf "$HOME"
|
||||||
|
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
|
||||||
|
export HOME="$ORIGINAL_HOME"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_user_essentials respects Trash whitelist" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/user.sh"
|
||||||
|
start_section_spinner() { :; }
|
||||||
|
stop_section_spinner() { :; }
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
note_activity() { :; }
|
||||||
|
is_path_whitelisted() { [[ "$1" == "$HOME/.Trash" ]]; }
|
||||||
|
clean_user_essentials
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Trash"* ]]
|
||||||
|
[[ "$output" == *"whitelist"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_macos_system_caches calls safe_clean for core paths" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/user.sh"
|
||||||
|
stop_section_spinner() { :; }
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_macos_system_caches
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Saved application states"* ]]
|
||||||
|
[[ "$output" == *"QuickLook"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_sandboxed_app_caches skips protected containers" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" DRY_RUN=true /bin/bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/user.sh"
|
||||||
|
start_section_spinner() { :; }
|
||||||
|
stop_section_spinner() { :; }
|
||||||
|
bytes_to_human() { echo "0B"; }
|
||||||
|
note_activity() { :; }
|
||||||
|
safe_clean() { :; }
|
||||||
|
should_protect_data() { return 0; }
|
||||||
|
is_critical_system_component() { return 0; }
|
||||||
|
files_cleaned=0
|
||||||
|
total_size_cleaned=0
|
||||||
|
total_items=0
|
||||||
|
mkdir -p "$HOME/Library/Containers/com.example.app/Data/Library/Caches"
|
||||||
|
process_container_cache "$HOME/Library/Containers/com.example.app"
|
||||||
|
clean_sandboxed_app_caches
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" != *"Sandboxed app caches"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_finder_metadata respects protection flag" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" PROTECT_FINDER_METADATA=true /bin/bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/user.sh"
|
||||||
|
stop_section_spinner() { :; }
|
||||||
|
note_activity() { :; }
|
||||||
|
clean_finder_metadata
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Finder metadata"* ]]
|
||||||
|
[[ "$output" == *"protected"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "check_ios_device_backups returns when no backup dir" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" /bin/bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/user.sh"
|
||||||
|
check_ios_device_backups
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
}
|
||||||
61
tests/user_clean_extra.bats
Normal file
61
tests/user_clean_extra.bats
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
|
setup_file() {
|
||||||
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
|
export PROJECT_ROOT
|
||||||
|
|
||||||
|
ORIGINAL_HOME="${HOME:-}"
|
||||||
|
export ORIGINAL_HOME
|
||||||
|
|
||||||
|
HOME="$(mktemp -d "${BATS_TEST_DIRNAME}/tmp-user-clean.XXXXXX")"
|
||||||
|
export HOME
|
||||||
|
|
||||||
|
mkdir -p "$HOME"
|
||||||
|
}
|
||||||
|
|
||||||
|
teardown_file() {
|
||||||
|
rm -rf "$HOME"
|
||||||
|
if [[ -n "${ORIGINAL_HOME:-}" ]]; then
|
||||||
|
export HOME="$ORIGINAL_HOME"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_browsers calls expected cache paths" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/user.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_browsers
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Safari cache"* ]]
|
||||||
|
[[ "$output" == *"Firefox cache"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_application_support_logs skips when no access" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/user.sh"
|
||||||
|
note_activity() { :; }
|
||||||
|
clean_application_support_logs
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" == *"Skipped: No permission"* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "clean_apple_silicon_caches exits when not M-series" {
|
||||||
|
run env HOME="$HOME" PROJECT_ROOT="$PROJECT_ROOT" IS_M_SERIES=false bash --noprofile --norc <<'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
source "$PROJECT_ROOT/lib/core/common.sh"
|
||||||
|
source "$PROJECT_ROOT/lib/clean/user.sh"
|
||||||
|
safe_clean() { echo "$2"; }
|
||||||
|
clean_apple_silicon_caches
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ -z "$output" ]]
|
||||||
|
}
|
||||||
@@ -1,7 +1,5 @@
|
|||||||
#!/usr/bin/env bats
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
# Tests for user file handling utilities in lib/core/base.sh
|
|
||||||
# Covers: ensure_user_dir, ensure_user_file, get_invoking_user, etc.
|
|
||||||
|
|
||||||
setup_file() {
|
setup_file() {
|
||||||
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
||||||
@@ -28,18 +26,12 @@ setup() {
|
|||||||
mkdir -p "$HOME"
|
mkdir -p "$HOME"
|
||||||
}
|
}
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Darwin Version Detection Tests
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@test "get_darwin_major returns numeric version on macOS" {
|
@test "get_darwin_major returns numeric version on macOS" {
|
||||||
result=$(bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; get_darwin_major")
|
result=$(bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; get_darwin_major")
|
||||||
# Should be a number (e.g., 23, 24, etc.)
|
|
||||||
[[ "$result" =~ ^[0-9]+$ ]]
|
[[ "$result" =~ ^[0-9]+$ ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "get_darwin_major returns 999 on failure (mock uname failure)" {
|
@test "get_darwin_major returns 999 on failure (mock uname failure)" {
|
||||||
# Mock uname to fail and verify fallback behavior
|
|
||||||
result=$(bash -c "
|
result=$(bash -c "
|
||||||
uname() { return 1; }
|
uname() { return 1; }
|
||||||
export -f uname
|
export -f uname
|
||||||
@@ -50,21 +42,13 @@ setup() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "is_darwin_ge correctly compares versions" {
|
@test "is_darwin_ge correctly compares versions" {
|
||||||
# Should return true for minimum <= current
|
|
||||||
run bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; is_darwin_ge 1"
|
run bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; is_darwin_ge 1"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
|
|
||||||
# Should return false for very high version requirement (unless on futuristic macOS)
|
|
||||||
# Note: With our 999 fallback, this will actually succeed on error, which is correct behavior
|
|
||||||
result=$(bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; is_darwin_ge 100 && echo 'yes' || echo 'no'")
|
result=$(bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; is_darwin_ge 100 && echo 'yes' || echo 'no'")
|
||||||
# Just verify command runs without error
|
|
||||||
[[ -n "$result" ]]
|
[[ -n "$result" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# User Context Detection Tests
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@test "is_root_user detects non-root correctly" {
|
@test "is_root_user detects non-root correctly" {
|
||||||
result=$(bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; is_root_user && echo 'root' || echo 'not-root'")
|
result=$(bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; is_root_user && echo 'root' || echo 'not-root'")
|
||||||
[ "$result" = "not-root" ]
|
[ "$result" = "not-root" ]
|
||||||
@@ -73,7 +57,6 @@ setup() {
|
|||||||
@test "get_invoking_user returns current user when not sudo" {
|
@test "get_invoking_user returns current user when not sudo" {
|
||||||
result=$(bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; get_invoking_user")
|
result=$(bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; get_invoking_user")
|
||||||
[ -n "$result" ]
|
[ -n "$result" ]
|
||||||
# Should be current user
|
|
||||||
[ "$result" = "${USER:-$(whoami)}" ]
|
[ "$result" = "${USER:-$(whoami)}" ]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -105,10 +88,6 @@ setup() {
|
|||||||
[ -z "$result" ] || [ "$result" = "~nonexistent_user_12345" ]
|
[ -z "$result" ] || [ "$result" = "~nonexistent_user_12345" ]
|
||||||
}
|
}
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Directory Creation Tests
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@test "ensure_user_dir creates simple directory" {
|
@test "ensure_user_dir creates simple directory" {
|
||||||
test_dir="$HOME/.cache/test"
|
test_dir="$HOME/.cache/test"
|
||||||
bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; ensure_user_dir '$test_dir'"
|
bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; ensure_user_dir '$test_dir'"
|
||||||
@@ -147,9 +126,6 @@ setup() {
|
|||||||
[ "$dir_uid" = "$current_uid" ]
|
[ "$dir_uid" = "$current_uid" ]
|
||||||
}
|
}
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# File Creation Tests
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@test "ensure_user_file creates file and parent directories" {
|
@test "ensure_user_file creates file and parent directories" {
|
||||||
test_file="$HOME/.config/mole/test.log"
|
test_file="$HOME/.config/mole/test.log"
|
||||||
@@ -168,7 +144,6 @@ setup() {
|
|||||||
bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; ensure_user_file '$test_file'"
|
bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; ensure_user_file '$test_file'"
|
||||||
echo "content" > "$test_file"
|
echo "content" > "$test_file"
|
||||||
bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; ensure_user_file '$test_file'"
|
bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; ensure_user_file '$test_file'"
|
||||||
# Should preserve existing content
|
|
||||||
[ -f "$test_file" ]
|
[ -f "$test_file" ]
|
||||||
[ "$(cat "$test_file")" = "content" ]
|
[ "$(cat "$test_file")" = "content" ]
|
||||||
}
|
}
|
||||||
@@ -193,30 +168,18 @@ setup() {
|
|||||||
[ "$file_uid" = "$current_uid" ]
|
[ "$file_uid" = "$current_uid" ]
|
||||||
}
|
}
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Performance Tests (Early Stop Optimization)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@test "ensure_user_dir early stop optimization works" {
|
@test "ensure_user_dir early stop optimization works" {
|
||||||
# Create a nested structure
|
|
||||||
test_dir="$HOME/.cache/perf/test/nested"
|
test_dir="$HOME/.cache/perf/test/nested"
|
||||||
bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; ensure_user_dir '$test_dir'"
|
bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; ensure_user_dir '$test_dir'"
|
||||||
|
|
||||||
# Call again - should detect correct ownership and stop early
|
|
||||||
# This is a behavioral test; we verify it doesn't fail
|
|
||||||
bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; ensure_user_dir '$test_dir'"
|
bash -c "source '$PROJECT_ROOT/lib/core/base.sh'; ensure_user_dir '$test_dir'"
|
||||||
[ -d "$test_dir" ]
|
[ -d "$test_dir" ]
|
||||||
|
|
||||||
# Verify ownership is still correct
|
|
||||||
current_uid=$(id -u)
|
current_uid=$(id -u)
|
||||||
dir_uid=$(/usr/bin/stat -f%u "$test_dir")
|
dir_uid=$(/usr/bin/stat -f%u "$test_dir")
|
||||||
[ "$dir_uid" = "$current_uid" ]
|
[ "$dir_uid" = "$current_uid" ]
|
||||||
}
|
}
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Integration Tests
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@test "ensure_user_dir and ensure_user_file work together" {
|
@test "ensure_user_dir and ensure_user_file work together" {
|
||||||
cache_dir="$HOME/.cache/mole"
|
cache_dir="$HOME/.cache/mole"
|
||||||
cache_file="$cache_dir/integration_test.log"
|
cache_file="$cache_dir/integration_test.log"
|
||||||
@@ -241,7 +204,6 @@ setup() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@test "ensure functions handle concurrent calls safely" {
|
@test "ensure functions handle concurrent calls safely" {
|
||||||
# Simulate concurrent directory creation
|
|
||||||
bash -c "source '$PROJECT_ROOT/lib/core/base.sh'
|
bash -c "source '$PROJECT_ROOT/lib/core/base.sh'
|
||||||
ensure_user_dir '$HOME/.cache/concurrent' &
|
ensure_user_dir '$HOME/.cache/concurrent' &
|
||||||
ensure_user_dir '$HOME/.cache/concurrent' &
|
ensure_user_dir '$HOME/.cache/concurrent' &
|
||||||
@@ -249,4 +211,4 @@ setup() {
|
|||||||
"
|
"
|
||||||
|
|
||||||
[ -d "$HOME/.cache/concurrent" ]
|
[ -d "$HOME/.cache/concurrent" ]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -55,9 +55,7 @@ setup() {
|
|||||||
while IFS= read -r line; do
|
while IFS= read -r line; do
|
||||||
lines+=("$line")
|
lines+=("$line")
|
||||||
done < "$WHITELIST_PATH"
|
done < "$WHITELIST_PATH"
|
||||||
# Header is at least two lines (comments), plus two unique patterns
|
|
||||||
[ "${#lines[@]}" -ge 4 ]
|
[ "${#lines[@]}" -ge 4 ]
|
||||||
# Ensure duplicate was not written twice
|
|
||||||
occurrences=$(grep -c "$HOME/.cache/foo" "$WHITELIST_PATH")
|
occurrences=$(grep -c "$HOME/.cache/foo" "$WHITELIST_PATH")
|
||||||
[ "$occurrences" -eq 1 ]
|
[ "$occurrences" -eq 1 ]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,29 +0,0 @@
|
|||||||
#!/usr/bin/env bats
|
|
||||||
|
|
||||||
setup_file() {
|
|
||||||
PROJECT_ROOT="$(cd "${BATS_TEST_DIRNAME}/.." && pwd)"
|
|
||||||
export PROJECT_ROOT
|
|
||||||
}
|
|
||||||
|
|
||||||
@test "shellcheck passes for test scripts" {
|
|
||||||
if ! command -v shellcheck > /dev/null 2>&1; then
|
|
||||||
skip "shellcheck not installed"
|
|
||||||
fi
|
|
||||||
|
|
||||||
run env PROJECT_ROOT="$PROJECT_ROOT" bash --noprofile --norc << 'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
cd "$PROJECT_ROOT"
|
|
||||||
targets=()
|
|
||||||
while IFS= read -r file; do
|
|
||||||
targets+=("$file")
|
|
||||||
done < <(find "$PROJECT_ROOT/tests" -type f \( -name '*.bats' -o -name '*.sh' \) | sort)
|
|
||||||
if [[ ${#targets[@]} -eq 0 ]]; then
|
|
||||||
echo "No test shell files found"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
shellcheck --rcfile "$PROJECT_ROOT/.shellcheckrc" "${targets[@]}"
|
|
||||||
EOF
|
|
||||||
|
|
||||||
printf '%s\n' "$output" >&3
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user