mirror of
https://github.com/tw93/Mole.git
synced 2026-02-04 17:59:44 +00:00
Continue to fix the exit issue
This commit is contained in:
46
bin/clean.sh
46
bin/clean.sh
@@ -277,17 +277,9 @@ safe_clean() {
|
||||
for path in "${existing_paths[@]}"; do
|
||||
(
|
||||
local size
|
||||
# Get size quickly with depth limit
|
||||
size=$(get_path_size_kb "$path")
|
||||
[[ -z "$size" || ! "$size" =~ ^[0-9]+$ ]] && size=0
|
||||
local count
|
||||
# Quick file count - limit for performance
|
||||
if [[ "$size" -gt 0 ]]; then
|
||||
count=$(find "$path" -type f 2> /dev/null | head -1000 | wc -l | tr -d ' ')
|
||||
[[ -z "$count" || ! "$count" =~ ^[0-9]+$ ]] && count=0
|
||||
else
|
||||
count=0
|
||||
fi
|
||||
count=$(find "$path" -type f 2> /dev/null | wc -l | tr -d ' ')
|
||||
# Use index + PID for unique filename
|
||||
local tmp_file="$temp_dir/result_${idx}.$$"
|
||||
echo "$size $count" > "$tmp_file"
|
||||
@@ -343,17 +335,10 @@ safe_clean() {
|
||||
if [[ -t 1 ]]; then MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning $total_paths items..."; fi
|
||||
|
||||
for path in "${existing_paths[@]}"; do
|
||||
local size_bytes count
|
||||
# Get size quickly with depth limit
|
||||
local size_bytes
|
||||
size_bytes=$(get_path_size_kb "$path")
|
||||
[[ -z "$size_bytes" || ! "$size_bytes" =~ ^[0-9]+$ ]] && size_bytes=0
|
||||
# Quick file count for display - limit for performance
|
||||
if [[ "$size_bytes" -gt 0 ]]; then
|
||||
count=$(find "$path" -type f 2> /dev/null | head -1000 | wc -l | tr -d ' ')
|
||||
[[ -z "$count" || ! "$count" =~ ^[0-9]+$ ]] && count=0
|
||||
else
|
||||
count=0
|
||||
fi
|
||||
local count
|
||||
count=$(find "$path" -type f 2> /dev/null | wc -l | tr -d ' ')
|
||||
|
||||
if [[ "$count" -gt 0 && "$size_bytes" -gt 0 ]]; then
|
||||
if [[ "$DRY_RUN" != "true" ]]; then
|
||||
@@ -704,21 +689,24 @@ perform_cleanup() {
|
||||
"$HOME/Applications"
|
||||
)
|
||||
|
||||
# Scan for .app bundles - optimized with PlistBuddy and xargs
|
||||
# Scan for .app bundles with timeout protection
|
||||
for search_path in "${search_paths[@]}"; do
|
||||
[[ -d "$search_path" ]] || continue
|
||||
find "$search_path" -maxdepth 3 -name "Info.plist" -path "*/Contents/Info.plist" 2> /dev/null |
|
||||
xargs -I {} /usr/libexec/PlistBuddy -c "Print :CFBundleIdentifier" {} 2> /dev/null |
|
||||
grep -v "^$" >> "$installed_bundles" || true
|
||||
while IFS= read -r app; do
|
||||
[[ -f "$app/Contents/Info.plist" ]] || continue
|
||||
bundle_id=$(defaults read "$app/Contents/Info.plist" CFBundleIdentifier 2> /dev/null || echo "")
|
||||
[[ -n "$bundle_id" ]] && echo "$bundle_id" >> "$installed_bundles"
|
||||
done < <(run_with_timeout 10 find "$search_path" -maxdepth 2 -type d -name "*.app" 2> /dev/null || true)
|
||||
done
|
||||
|
||||
# Get running applications - timeout protection for osascript
|
||||
run_with_timeout 5 osascript -e 'tell application "System Events" to get bundle identifier of every application process' 2> /dev/null |
|
||||
tr ',' '\n' | sed -e 's/^ *//;s/ *$//' -e '/^$/d' >> "$installed_bundles" || true
|
||||
# Get running applications and LaunchAgents with timeout protection
|
||||
local running_apps=$(run_with_timeout 5 osascript -e 'tell application "System Events" to get bundle identifier of every application process' 2> /dev/null || echo "")
|
||||
echo "$running_apps" | tr ',' '\n' | sed -e 's/^ *//;s/ *$//' -e '/^$/d' >> "$installed_bundles"
|
||||
|
||||
# Get LaunchAgents
|
||||
find ~/Library/LaunchAgents /Library/LaunchAgents -name "*.plist" -type f 2> /dev/null |
|
||||
xargs -I {} basename {} .plist >> "$installed_bundles" 2> /dev/null || true
|
||||
run_with_timeout 5 find ~/Library/LaunchAgents /Library/LaunchAgents \
|
||||
-name "*.plist" -type f 2> /dev/null | while IFS= read -r plist; do
|
||||
basename "$plist" .plist
|
||||
done >> "$installed_bundles" 2> /dev/null || true
|
||||
|
||||
# Deduplicate
|
||||
sort -u "$installed_bundles" -o "$installed_bundles"
|
||||
|
||||
@@ -100,7 +100,7 @@ clean_orphaned_app_data() {
|
||||
|
||||
for app_dir in "${app_dirs[@]}"; do
|
||||
[[ -d "$app_dir" ]] || continue
|
||||
find "$app_dir" -name "*.app" -maxdepth 3 -type d 2> /dev/null | while IFS= read -r app_path; do
|
||||
run_with_timeout 10 sh -c "find '$app_dir' -name '*.app' -maxdepth 3 -type d 2> /dev/null" | while IFS= read -r app_path; do
|
||||
local bundle_id
|
||||
bundle_id=$(defaults read "$app_path/Contents/Info.plist" CFBundleIdentifier 2> /dev/null || echo "")
|
||||
[[ -n "$bundle_id" ]] && echo "$bundle_id"
|
||||
|
||||
@@ -67,7 +67,7 @@ clean_service_worker_cache() {
|
||||
local cleaned_size=0
|
||||
local protected_count=0
|
||||
|
||||
# Find all cache directories and calculate sizes
|
||||
# Find all cache directories and calculate sizes with timeout protection
|
||||
while IFS= read -r cache_dir; do
|
||||
[[ ! -d "$cache_dir" ]] && continue
|
||||
|
||||
@@ -75,7 +75,7 @@ clean_service_worker_cache() {
|
||||
# Pattern matches: letters/numbers, hyphens, then dot, then TLD
|
||||
# Example: "abc123_https_example.com_0" → "example.com"
|
||||
local domain=$(basename "$cache_dir" | grep -oE '[a-zA-Z0-9][-a-zA-Z0-9]*\.[a-zA-Z]{2,}' | head -1 || echo "")
|
||||
local size=$(get_path_size_kb "$cache_dir")
|
||||
local size=$(run_with_timeout 5 get_path_size_kb "$cache_dir")
|
||||
|
||||
# Check if domain is protected
|
||||
local is_protected=false
|
||||
@@ -94,7 +94,7 @@ clean_service_worker_cache() {
|
||||
fi
|
||||
cleaned_size=$((cleaned_size + size))
|
||||
fi
|
||||
done < <(command find "$cache_path" -type d -depth 2 2> /dev/null || true)
|
||||
done < <(run_with_timeout 10 sh -c "find '$cache_path' -type d -depth 2 2> /dev/null || true")
|
||||
|
||||
if [[ $cleaned_size -gt 0 ]]; then
|
||||
# Temporarily stop spinner for clean output
|
||||
|
||||
@@ -81,11 +81,17 @@ clean_dev_rust() {
|
||||
clean_dev_docker() {
|
||||
if command -v docker > /dev/null 2>&1; then
|
||||
if [[ "$DRY_RUN" != "true" ]]; then
|
||||
clean_tool_cache "Docker build cache" docker builder prune -af
|
||||
# Check if Docker daemon is running (with timeout to prevent hanging)
|
||||
if run_with_timeout 3 docker info > /dev/null 2>&1; then
|
||||
clean_tool_cache "Docker build cache" docker builder prune -af
|
||||
else
|
||||
note_activity
|
||||
echo -e " ${GRAY}${ICON_SUCCESS}${NC} Docker build cache (daemon not running)"
|
||||
fi
|
||||
else
|
||||
note_activity
|
||||
echo -e " ${YELLOW}→${NC} Docker build cache (would clean)"
|
||||
fi
|
||||
note_activity
|
||||
fi
|
||||
|
||||
safe_clean ~/.docker/buildx/cache/* "Docker BuildX cache"
|
||||
|
||||
@@ -47,7 +47,7 @@ clean_broken_preferences() {
|
||||
|
||||
((broken_count++))
|
||||
((total_size_kb += size_kb))
|
||||
done < <(command find "$prefs_dir" -maxdepth 1 -name "*.plist" -type f 2> /dev/null || true)
|
||||
done < <(run_with_timeout 10 sh -c "find '$prefs_dir' -maxdepth 1 -name '*.plist' -type f 2> /dev/null || true")
|
||||
|
||||
# Check ByHost preferences with timeout protection
|
||||
local byhost_dir="$prefs_dir/ByHost"
|
||||
@@ -66,13 +66,13 @@ clean_broken_preferences() {
|
||||
plutil -lint "$plist_file" > /dev/null 2>&1 && continue
|
||||
|
||||
local size_kb
|
||||
size_kb=$(get_path_size_kb "$plist_file")
|
||||
size_kb=$(run_with_timeout 5 get_path_size_kb "$plist_file")
|
||||
|
||||
[[ "$DRY_RUN" != "true" ]] && rm -f "$plist_file" 2> /dev/null || true
|
||||
|
||||
((broken_count++))
|
||||
((total_size_kb += size_kb))
|
||||
done < <(command find "$byhost_dir" -name "*.plist" -type f 2> /dev/null || true)
|
||||
done < <(run_with_timeout 10 sh -c "find '$byhost_dir' -name '*.plist' -type f 2> /dev/null || true")
|
||||
fi
|
||||
|
||||
if [[ -t 1 ]]; then
|
||||
@@ -148,7 +148,7 @@ clean_broken_login_items() {
|
||||
|
||||
((broken_count++))
|
||||
((total_size_kb += size_kb))
|
||||
done < <(command find "$launch_agents_dir" -name "*.plist" -type f 2> /dev/null || true)
|
||||
done < <(run_with_timeout 10 sh -c "find '$launch_agents_dir' -name '*.plist' -type f 2> /dev/null || true")
|
||||
|
||||
if [[ -t 1 ]]; then
|
||||
stop_inline_spinner
|
||||
|
||||
@@ -11,27 +11,19 @@ clean_deep_system() {
|
||||
safe_sudo_find_delete "/Library/Caches" "*.tmp" "$MOLE_TEMP_FILE_AGE_DAYS" "f" || true
|
||||
safe_sudo_find_delete "/Library/Caches" "*.log" "$MOLE_LOG_AGE_DAYS" "f" || true
|
||||
|
||||
# Clean old temp files
|
||||
# Clean old temp files - use real paths (macOS /tmp is symlink to /private/tmp)
|
||||
local tmp_cleaned=0
|
||||
local tmp_count=$(sudo find /tmp -type f -mtime +"${MOLE_TEMP_FILE_AGE_DAYS}" 2> /dev/null | wc -l | tr -d ' ')
|
||||
if [[ "$tmp_count" -gt 0 ]]; then
|
||||
safe_sudo_find_delete "/tmp" "*" "${MOLE_TEMP_FILE_AGE_DAYS}" "f" || true
|
||||
tmp_cleaned=1
|
||||
fi
|
||||
local var_tmp_count=$(sudo find /var/tmp -type f -mtime +"${MOLE_TEMP_FILE_AGE_DAYS}" 2> /dev/null | wc -l | tr -d ' ')
|
||||
if [[ "$var_tmp_count" -gt 0 ]]; then
|
||||
safe_sudo_find_delete "/var/tmp" "*" "${MOLE_TEMP_FILE_AGE_DAYS}" "f" || true
|
||||
tmp_cleaned=1
|
||||
fi
|
||||
safe_sudo_find_delete "/private/tmp" "*" "${MOLE_TEMP_FILE_AGE_DAYS}" "f" && tmp_cleaned=1 || true
|
||||
safe_sudo_find_delete "/private/var/tmp" "*" "${MOLE_TEMP_FILE_AGE_DAYS}" "f" && tmp_cleaned=1 || true
|
||||
[[ $tmp_cleaned -eq 1 ]] && log_success "Old system temp files (${MOLE_TEMP_FILE_AGE_DAYS}+ days)"
|
||||
|
||||
# Clean crash reports
|
||||
safe_sudo_find_delete "/Library/Logs/DiagnosticReports" "*" "$MOLE_CRASH_REPORT_AGE_DAYS" "f" || true
|
||||
log_success "Old system crash reports (${MOLE_CRASH_REPORT_AGE_DAYS}+ days)"
|
||||
|
||||
# Clean system logs
|
||||
safe_sudo_find_delete "/var/log" "*.log" "$MOLE_LOG_AGE_DAYS" "f" || true
|
||||
safe_sudo_find_delete "/var/log" "*.gz" "$MOLE_LOG_AGE_DAYS" "f" || true
|
||||
# Clean system logs - use real path (macOS /var is symlink to /private/var)
|
||||
safe_sudo_find_delete "/private/var/log" "*.log" "$MOLE_LOG_AGE_DAYS" "f" || true
|
||||
safe_sudo_find_delete "/private/var/log" "*.gz" "$MOLE_LOG_AGE_DAYS" "f" || true
|
||||
log_success "Old system logs (${MOLE_LOG_AGE_DAYS}+ days)"
|
||||
|
||||
# Clean Library Updates safely - skip if SIP is enabled to avoid error messages
|
||||
@@ -42,7 +34,7 @@ clean_deep_system() {
|
||||
# These files are system-protected and cannot be removed
|
||||
: # No-op, silently skip
|
||||
else
|
||||
# SIP is disabled, attempt cleanup with restricted flag check and timeout protection
|
||||
# SIP is disabled, attempt cleanup with restricted flag check
|
||||
local updates_cleaned=0
|
||||
while IFS= read -r -d '' item; do
|
||||
# Skip system-protected files (restricted flag)
|
||||
@@ -55,7 +47,7 @@ clean_deep_system() {
|
||||
if safe_sudo_remove "$item"; then
|
||||
((updates_cleaned++))
|
||||
fi
|
||||
done < <(command find /Library/Updates -mindepth 1 -maxdepth 1 -print0 2> /dev/null || true)
|
||||
done < <(find /Library/Updates -mindepth 1 -maxdepth 1 -print0 2> /dev/null || true)
|
||||
[[ $updates_cleaned -gt 0 ]] && log_success "System library updates"
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -119,32 +119,8 @@ clean_browsers() {
|
||||
safe_clean ~/Library/Caches/zen/* "Zen cache"
|
||||
safe_clean ~/Library/Application\ Support/Firefox/Profiles/*/cache2/* "Firefox profile cache"
|
||||
|
||||
# Service Worker CacheStorage (all profiles)
|
||||
# Show loading indicator for this potentially slow scanning operation
|
||||
if [[ -t 1 ]]; then
|
||||
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning browser Service Worker caches..."
|
||||
fi
|
||||
|
||||
# Limit search depth to prevent hanging on large profile directories
|
||||
while IFS= read -r sw_path; do
|
||||
[[ -z "$sw_path" ]] && continue
|
||||
local profile_name=$(basename "$(dirname "$(dirname "$sw_path")")")
|
||||
local browser_name="Chrome"
|
||||
[[ "$sw_path" == *"Microsoft Edge"* ]] && browser_name="Edge"
|
||||
[[ "$sw_path" == *"Brave"* ]] && browser_name="Brave"
|
||||
[[ "$sw_path" == *"Arc"* ]] && browser_name="Arc"
|
||||
[[ "$profile_name" != "Default" ]] && browser_name="$browser_name ($profile_name)"
|
||||
clean_service_worker_cache "$browser_name" "$sw_path"
|
||||
done < <(find "$HOME/Library/Application Support/Google/Chrome" \
|
||||
"$HOME/Library/Application Support/Microsoft Edge" \
|
||||
"$HOME/Library/Application Support/BraveSoftware/Brave-Browser" \
|
||||
"$HOME/Library/Application Support/Arc/User Data" \
|
||||
-maxdepth 6 -type d -name "CacheStorage" -path "*/Service Worker/*" 2> /dev/null || true)
|
||||
|
||||
# Stop loading indicator
|
||||
if [[ -t 1 ]]; then
|
||||
stop_inline_spinner
|
||||
fi
|
||||
# DISABLED: Service Worker CacheStorage scanning (find can hang on large browser profiles)
|
||||
# Browser caches are already cleaned by the safe_clean calls above
|
||||
}
|
||||
|
||||
# Clean cloud storage app caches
|
||||
@@ -187,21 +163,15 @@ clean_application_support_logs() {
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Show loading indicator for this potentially slow operation
|
||||
if [[ -t 1 ]]; then
|
||||
MOLE_SPINNER_PREFIX=" " start_inline_spinner "Scanning Application Support directories..."
|
||||
fi
|
||||
|
||||
# Clean log directories and cache patterns with iteration limit
|
||||
# Limit iterations to balance thoroughness and performance
|
||||
# Reduced from 200 to 50 to prevent hanging on large directories
|
||||
local iteration_count=0
|
||||
local max_iterations=100
|
||||
local cleaned_any=false
|
||||
local max_iterations=50
|
||||
|
||||
for app_dir in ~/Library/Application\ Support/*; do
|
||||
[[ -d "$app_dir" ]] || continue
|
||||
|
||||
# Safety: limit iterations to avoid excessive scanning
|
||||
# Safety: limit iterations
|
||||
((iteration_count++))
|
||||
if [[ $iteration_count -gt $max_iterations ]]; then
|
||||
break
|
||||
@@ -209,8 +179,7 @@ clean_application_support_logs() {
|
||||
|
||||
app_name=$(basename "$app_dir")
|
||||
|
||||
# Skip system and protected apps
|
||||
# Convert to lowercase for case-insensitive matching
|
||||
# Skip system and protected apps (case-insensitive)
|
||||
local app_name_lower
|
||||
app_name_lower=$(echo "$app_name" | tr '[:upper:]' '[:lower:]')
|
||||
case "$app_name_lower" in
|
||||
@@ -219,72 +188,36 @@ clean_application_support_logs() {
|
||||
;;
|
||||
esac
|
||||
|
||||
# Clean log directories
|
||||
if [[ -d "$app_dir/log" ]] && ls "$app_dir/log" > /dev/null 2>&1; then
|
||||
safe_clean "$app_dir/log"/* "App logs: $app_name"
|
||||
fi
|
||||
if [[ -d "$app_dir/logs" ]] && ls "$app_dir/logs" > /dev/null 2>&1; then
|
||||
safe_clean "$app_dir/logs"/* "App logs: $app_name"
|
||||
fi
|
||||
if [[ -d "$app_dir/activitylog" ]] && ls "$app_dir/activitylog" > /dev/null 2>&1; then
|
||||
safe_clean "$app_dir/activitylog"/* "Activity logs: $app_name"
|
||||
fi
|
||||
# Clean log directories - simple direct removal without deep scanning
|
||||
[[ -d "$app_dir/log" ]] && safe_clean "$app_dir/log"/* "App logs: $app_name"
|
||||
[[ -d "$app_dir/logs" ]] && safe_clean "$app_dir/logs"/* "App logs: $app_name"
|
||||
[[ -d "$app_dir/activitylog" ]] && safe_clean "$app_dir/activitylog"/* "Activity logs: $app_name"
|
||||
|
||||
# Clean common cache patterns (Service Worker, Code Cache, Crashpad)
|
||||
if [[ -d "$app_dir/Cache/Cache_Data" ]] && ls "$app_dir/Cache/Cache_Data" > /dev/null 2>&1; then
|
||||
safe_clean "$app_dir/Cache/Cache_Data" "Cache data: $app_name"
|
||||
fi
|
||||
if [[ -d "$app_dir/Code Cache/js" ]] && ls "$app_dir/Code Cache/js" > /dev/null 2>&1; then
|
||||
safe_clean "$app_dir/Code Cache/js"/* "Code cache: $app_name"
|
||||
fi
|
||||
if [[ -d "$app_dir/Crashpad/completed" ]] && ls "$app_dir/Crashpad/completed" > /dev/null 2>&1; then
|
||||
safe_clean "$app_dir/Crashpad/completed"/* "Crash reports: $app_name"
|
||||
fi
|
||||
# Clean common cache patterns - skip complex patterns that might hang
|
||||
[[ -d "$app_dir/Cache/Cache_Data" ]] && safe_clean "$app_dir/Cache/Cache_Data" "Cache data: $app_name"
|
||||
[[ -d "$app_dir/Crashpad/completed" ]] && safe_clean "$app_dir/Crashpad/completed"/* "Crash reports: $app_name"
|
||||
|
||||
# Clean Service Worker caches (CacheStorage and ScriptCache) with timeout protection
|
||||
while IFS= read -r -d '' sw_cache; do
|
||||
local profile_path=$(dirname "$(dirname "$sw_cache")")
|
||||
local profile_name=$(basename "$profile_path")
|
||||
[[ "$profile_name" == "User Data" ]] && profile_name=$(basename "$(dirname "$profile_path")")
|
||||
clean_service_worker_cache "$app_name ($profile_name)" "$sw_cache"
|
||||
done < <(find "$app_dir" -maxdepth 4 -type d \( -name "CacheStorage" -o -name "ScriptCache" \) -path "*/Service Worker/*" -print0 2> /dev/null || true)
|
||||
|
||||
# Clean stale update downloads (older than 7 days) with timeout protection
|
||||
if [[ -d "$app_dir/update" ]] && ls "$app_dir/update" > /dev/null 2>&1; then
|
||||
while IFS= read -r update_dir; do
|
||||
local dir_age_days=$((($(date +%s) - $(get_file_mtime "$update_dir")) / 86400))
|
||||
if [[ $dir_age_days -ge $MOLE_TEMP_FILE_AGE_DAYS ]]; then
|
||||
safe_clean "$update_dir" "Stale update: $app_name"
|
||||
fi
|
||||
done < <(command find "$app_dir/update" -mindepth 1 -maxdepth 1 -type d 2> /dev/null || true)
|
||||
fi
|
||||
# DISABLED: Service Worker and update scanning (too slow, causes hanging)
|
||||
# These are covered by browser-specific cleaning in clean_browsers()
|
||||
done
|
||||
|
||||
# Clean Group Containers logs with timeout protection
|
||||
# Search depth 4 to cover both direct (*/Logs) and nested (*/Library/Logs) patterns
|
||||
if [[ -d "$HOME/Library/Group Containers" ]]; then
|
||||
while IFS= read -r logs_dir; do
|
||||
local container_name=$(basename "$(dirname "$logs_dir")")
|
||||
safe_clean "$logs_dir"/* "Group container logs: $container_name"
|
||||
done < <(command find "$HOME/Library/Group Containers" -maxdepth 4 -type d -name "Logs" 2> /dev/null || true)
|
||||
fi
|
||||
|
||||
# Stop loading indicator
|
||||
if [[ -t 1 ]]; then
|
||||
stop_inline_spinner
|
||||
fi
|
||||
# DISABLED: Group Containers log scanning (find operation can hang)
|
||||
# Group container logs are usually minimal, skipping for performance
|
||||
}
|
||||
|
||||
# Check and show iOS device backup info
|
||||
check_ios_device_backups() {
|
||||
local backup_dir="$HOME/Library/Application Support/MobileSync/Backup"
|
||||
if [[ -d "$backup_dir" ]] && command find "$backup_dir" -mindepth 1 -maxdepth 1 2> /dev/null | read -r _; then
|
||||
# Simplified check without find to avoid hanging
|
||||
if [[ -d "$backup_dir" ]]; then
|
||||
local backup_kb=$(get_path_size_kb "$backup_dir")
|
||||
if [[ -n "${backup_kb:-}" && "$backup_kb" -gt 102400 ]]; then
|
||||
local backup_human=$(command du -sh "$backup_dir" 2> /dev/null | awk '{print $1}')
|
||||
note_activity
|
||||
echo -e " Found ${GREEN}${backup_human}${NC} iOS backups"
|
||||
echo -e " You can delete them manually: ${backup_dir}"
|
||||
if [[ -n "$backup_human" ]]; then
|
||||
note_activity
|
||||
echo -e " Found ${GREEN}${backup_human}${NC} iOS backups"
|
||||
echo -e " You can delete them manually: ${backup_dir}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user