Try wget https://bash.commongrounds.cc/uploads/1752732884_consgram.sh from the console
#!/bin/bash
# Consoligram v1.0 by AmFile.org
# Consolidates files based on top-level directory structure similarities
# Offers option to delete redundant directories with fewer/older files
# Skips on all errors, including input/output errors
# Places consolidated files in SOURCE_DIR/Consolidated_Files
# Ensures immediate progress display and comprehensive logging
# Configuration
SCRIPT_NAME="Consoligram v1.0 by AmFile.org"
SOURCE_DIR="$1" # Source directory (pass as argument, e.g., /path/to/source)
TARGET_DIR="$SOURCE_DIR/Consolidated_Files" # Destination for consolidated files
LOG_FILE="$SOURCE_DIR/consoligram.log"
DRY_RUN=true # Set to false to actually move/delete files
# Announce program name
echo "$SCRIPT_NAME"
# Check if source directory is provided and exists
if [ -z "$SOURCE_DIR" ] || [ ! -d "$SOURCE_DIR" ]; then
echo "Usage: $0 <source_directory>"
echo "Error: Source directory does not exist or was not provided."
exit 1
fi
# Create target directory and log file
mkdir -p "$TARGET_DIR" 2>>"$LOG_FILE" || {
echo "Warning: Cannot create target directory $TARGET_DIR, continuing."
log_message "Error creating target directory $TARGET_DIR, continuing."
}
echo "$SCRIPT_NAME - Consolidation Log - $(date)" > "$LOG_FILE" 2>/dev/null || {
echo "Warning: Cannot write to log file $LOG_FILE, continuing without logging."
LOG_FILE="/dev/null"
}
# Function to log messages
log_message() {
local message="$1"
echo "$(date): $message" >> "$LOG_FILE" 2>/dev/null || true
}
# Function to display progress bar
show_progress() {
local current=$1
local total=$2
local label=$3
local width=50
local percent=$((current * 100 / total))
local filled=$((width * current / total))
local empty=$((width - filled))
local bar=""
for ((i=0; i<filled; i++)); do bar+="#"; done
for ((i=0; i<empty; i++)); do bar+="-"; done
printf "\r%s: [%s] %d%% (%d/%d files)" "$label" "$bar" "$percent" "$current" "$total"
}
# Step 1: Count total files for indexing progress
log_message "Counting files in $SOURCE_DIR..."
total_files=$(find "$SOURCE_DIR" -type f ! -path '*/\.*' -print0 2>>"$LOG_FILE" | tr -dc '\0' | wc -c)
if [ $total_files -eq 0 ]; then
log_message "No files found in $SOURCE_DIR."
echo "No files to process."
exit 0
fi
log_message "Found $total_files files to index."
current_file=0
# Initialize progress bar
show_progress $current_file $total_files "Indexing"
# Step 2: Collect all files and group by top-level directory
log_message "Indexing files in $SOURCE_DIR..."
declare -A file_checksums # checksum -> file path
declare -A latest_files # checksum -> mtime
declare -A dir_files # top_dir -> list of files
declare -A dir_checksums # top_dir -> list of checksums
while IFS= read -r -d '' file; do
# Skip directories and hidden files
[[ -f "$file" && ! "$file" =~ /\..* ]] || continue
# Update indexing progress
((current_file++))
show_progress $current_file $total_files "Indexing"
# Log file being processed
log_message "Processing file: $file"
# Get top-level directory
rel_path="${file#$SOURCE_DIR/}"
top_dir="${rel_path%%/*}"
if [ -z "$top_dir" ]; then top_dir="root"; fi
# Compute MD5 checksum
checksum=$(md5sum "$file" 2>>"$LOG_FILE" | awk '{print $1}' || {
log_message "Error computing checksum for $file, skipping."
continue
})
if [ -z "$checksum" ]; then
log_message "Empty checksum for $file, skipping."
continue
fi
# Get modification time
mtime=$(stat -c %Y "$file" 2>>"$LOG_FILE" || {
log_message "Error getting mtime for $file, skipping."
continue
})
# Store file info
file_checksums[$checksum]="$file"
if [[ -z "${latest_files[$checksum]}" || $mtime -gt ${latest_files[$checksum]} ]]; then
latest_files[$checksum]=$mtime
fi
# Group files by top-level directory
dir_files[$top_dir]="${dir_files[$top_dir]} $file"
dir_checksums[$top_dir]="${dir_checksums[$top_dir]} $checksum"
done < <(find "$SOURCE_DIR" -type f -print0 2>>"$LOG_FILE")
# Complete indexing progress
echo ""
log_message "Indexing complete. Processed $current_file files."
# Step 3: Consolidate files
log_message "Consolidating files..."
total_unique_files=${#file_checksums[@]}
if [ $total_unique_files -eq 0 ]; then
log_message "No unique files to consolidate."
echo "No unique files to process."
exit 0
fi
current_file=0
show_progress $current_file $total_unique_files "Overall Progress"
for checksum in "${!file_checksums[@]}"; do
((current_file++))
show_progress $current_file $total_unique_files "Overall Progress"
file="${file_checksums[$checksum]}"
mtime=$(stat -c %Y "$file" 2>>"$LOG_FILE" || {
log_message "Error getting mtime for $file during consolidation, skipping."
continue
})
# Skip if not the latest version
if [[ $mtime -lt ${latest_files[$checksum]} ]]; then
log_message "Skipping older file: $file"
continue
fi
# Get top-level directory for target path
rel_path="${file#$SOURCE_DIR/}"
top_dir="${rel_path%%/*}"
if [ -z "$top_dir" ]; then top_dir="root"; fi
filename=$(basename "$file")
target_path="$TARGET_DIR/$top_dir"
# Create target directory
mkdir -p "$target_path" 2>>"$LOG_FILE" || {
log_message "Error creating target directory $target_path, skipping file $file."
continue
}
# Handle naming conflicts
target_file="$target_path/$filename"
counter=1
base_name="${filename%.*}"
ext="${filename##*.}"
while [[ -f "$target_file" ]]; do
target_file="$target_path/${base_name}_${counter}.${ext}"
((counter++))
done
# Move or simulate moving
if [ "$DRY_RUN" = true ]; then
log_message "[DRY RUN] Would move $file to $target_file"
else
mv "$file" "$target_file" 2>>"$LOG_FILE" || {
log_message "Error moving $file to $target_file, skipping."
continue
}
log_message "Moved $file to $target_file"
fi
done
echo "" # New line after progress bar
log_message "Consolidation complete. Processed $current_file unique files."
# Step 4: Identify redundant directories
log_message "Analyzing directory structures for redundancy..."
declare -A dir_decisions # Store deletion decisions
redundant_dirs=()
for top_dir in "${!dir_files[@]}"; do
for other_dir in "${!dir_files[@]}"; do
[[ "$top_dir" != "$other_dir" ]] || continue
# Compare checksums to check for identical content
if [[ "${dir_checksums[$top_dir]}" == "${dir_checksums[$other_dir]}" ]]; then
# Count files and get latest mtime
top_files=(${dir_files[$top_dir]})
other_files=(${dir_files[$other_dir]})
top_count=${#top_files[@]}
other_count=${#other_files[@]}
top_mtime=0
other_mtime=0
for file in "${top_files[@]}"; do
mtime=$(stat -c %Y "$file" 2>>"$LOG_FILE" || continue)
[[ $mtime -gt $top_mtime ]] && top_mtime=$mtime
done
for file in "${other_files[@]}"; do
mtime=$(stat -c %Y "$file" 2>>"$LOG_FILE" || continue)
[[ $mtime -gt $other_mtime ]] && other_mtime=$mtime
done
# Mark directory with fewer and older files as redundant
if [[ $top_count -le $other_count && $top_mtime -lt $other_mtime ]]; then
redundant_dirs+=("$SOURCE_DIR/$top_dir")
fi
fi
done
done
# Step 5: Prompt for deletion of redundant directories
if [ ${#redundant_dirs[@]} -gt 0 ]; then
log_message "Found redundant directories: ${redundant_dirs[*]}"
global_decision=""
for dir in "${redundant_dirs[@]}"; do
# Skip if decision already made globally
if [[ "$global_decision" == "yes" || "$global_decision" == "no" ]]; then
decision="$global_decision"
else
echo "Directory '$dir' has fewer/older files and is redundant."
echo "Delete this directory? Options: [Y]es, [N]o, [A]ll (delete all redundant), [S]kip (keep all)"
read -p "Enter choice (Y/N/A/S): " choice
case "$choice" in
[Yy]*)
decision="yes"
;;
[Nn]*)
decision="no"
;;
[Aa]*)
decision="yes"
global_decision="yes"
;;
[Ss]*)
decision="no"
global_decision="no"
;;
*)
log_message "Invalid choice for $dir, keeping directory."
decision="no"
;;
esac
fi
# Delete or simulate deletion
if [[ "$decision" == "yes" ]]; then
if [ "$DRY_RUN" = true ]; then
log_message "[DRY RUN] Would delete directory $dir"
else
rm -rf "$dir" 2>>"$LOG_FILE" || {
log_message "Error deleting directory $dir, skipping."
continue
}
log_message "Deleted directory $dir"
fi
else
log_message "Keeping directory $dir"
fi
done
else
log_message "No redundant directories found."
fi
log_message "Consolidation complete. Check $LOG_FILE for details."
echo "$SCRIPT_NAME: Done. Dry run: $DRY_RUN. Set DRY_RUN=false to move/delete files."
BASH to Home