script enhancement
This commit is contained in:
parent
bb2ebaaa5d
commit
d799a2e8bd
@ -60,8 +60,9 @@ CHECK_TRANSFER_INTEGRITY="true"
|
|||||||
# Optionally, set USE_SYSLOG="true" to also log messages to syslog.
|
# Optionally, set USE_SYSLOG="true" to also log messages to syslog.
|
||||||
USE_SYSLOG="false"
|
USE_SYSLOG="false"
|
||||||
|
|
||||||
# Auto-create directories
|
# Auto-create directories - commented out from config file
|
||||||
mkdir -p "${DIR_GAMES_DST}" "${DIR_APPS_DST}" \
|
# These should be created in a script, not in the config file
|
||||||
"${DIR_MOVIES_DST}" "${DIR_BOOKS_DST}" \
|
# mkdir -p "${DIR_GAMES_DST}" "${DIR_APPS_DST}" \
|
||||||
"${DIR_TV_DST}" "${DIR_MUSIC_DST}" \
|
# "${DIR_MOVIES_DST}" "${DIR_BOOKS_DST}" \
|
||||||
"${DEFAULT_DST}" 2>/dev/null || true
|
# "${DIR_TV_DST}" "${DIR_MUSIC_DST}" \
|
||||||
|
# "${DEFAULT_DST}" 2>/dev/null || true
|
13
install.sh
Normal file → Executable file
13
install.sh
Normal file → Executable file
@ -15,7 +15,7 @@ fi
|
|||||||
echo "Checking dependencies..."
|
echo "Checking dependencies..."
|
||||||
declare -A PKGS=(
|
declare -A PKGS=(
|
||||||
[transmission-cli]="transmission-remote"
|
[transmission-cli]="transmission-remote"
|
||||||
[unrar]="unrar"
|
[unrar-free]="unrar-free"
|
||||||
[unzip]="unzip"
|
[unzip]="unzip"
|
||||||
[p7zip-full]="7z"
|
[p7zip-full]="7z"
|
||||||
[parallel]="parallel"
|
[parallel]="parallel"
|
||||||
@ -83,9 +83,20 @@ chown $TORRENT_USER:$TORRENT_GROUP /etc/torrent/backups
|
|||||||
# If this is a first-time install, copy the default config
|
# If this is a first-time install, copy the default config
|
||||||
if [ ! -f "/etc/torrent/mover.conf" ]; then
|
if [ ! -f "/etc/torrent/mover.conf" ]; then
|
||||||
mv /etc/torrent/mover.conf.new /etc/torrent/mover.conf
|
mv /etc/torrent/mover.conf.new /etc/torrent/mover.conf
|
||||||
|
echo "Config file installed at /etc/torrent/mover.conf"
|
||||||
|
echo "Please run 'torrent-config edit' to set up your configuration"
|
||||||
else
|
else
|
||||||
echo "Existing configuration found at /etc/torrent/mover.conf"
|
echo "Existing configuration found at /etc/torrent/mover.conf"
|
||||||
echo "New configuration is at /etc/torrent/mover.conf.new"
|
echo "New configuration is at /etc/torrent/mover.conf.new"
|
||||||
|
echo "You can compare them with: diff /etc/torrent/mover.conf /etc/torrent/mover.conf.new"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run torrent-config to validate the configuration
|
||||||
|
echo "Validating configuration..."
|
||||||
|
if /usr/local/bin/torrent-config validate 2>/dev/null; then
|
||||||
|
echo "Configuration validation passed."
|
||||||
|
else
|
||||||
|
echo "Configuration requires setup. Please run 'torrent-config edit' to configure."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Create log rotation configuration
|
# Create log rotation configuration
|
||||||
|
@ -83,6 +83,24 @@ main() {
|
|||||||
[[ -n "${DIR_TV_DST}" ]] && REQUIRED_DIRS+=("${DIR_TV_DST}")
|
[[ -n "${DIR_TV_DST}" ]] && REQUIRED_DIRS+=("${DIR_TV_DST}")
|
||||||
[[ -n "${DIR_MUSIC_DST}" ]] && REQUIRED_DIRS+=("${DIR_MUSIC_DST}")
|
[[ -n "${DIR_MUSIC_DST}" ]] && REQUIRED_DIRS+=("${DIR_MUSIC_DST}")
|
||||||
|
|
||||||
|
# Create required directories if they don't exist
|
||||||
|
log_info "Creating required directories if they don't exist..."
|
||||||
|
for dir in "${REQUIRED_DIRS[@]}"; do
|
||||||
|
if [[ -n "$dir" ]]; then
|
||||||
|
if [[ ! -d "$dir" ]]; then
|
||||||
|
log_info "Creating directory: $dir"
|
||||||
|
if mkdir -p "$dir"; then
|
||||||
|
chmod 775 "$dir"
|
||||||
|
chown ${TORRENT_USER:-debian-transmission}:${TORRENT_GROUP:-debian-transmission} "$dir"
|
||||||
|
log_info "Created directory: $dir"
|
||||||
|
else
|
||||||
|
log_error "Failed to create directory: $dir"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Now validate that all required directories exist and are writable
|
||||||
validate_directories "${REQUIRED_DIRS[@]}" || exit 1
|
validate_directories "${REQUIRED_DIRS[@]}" || exit 1
|
||||||
|
|
||||||
init_checksum_db
|
init_checksum_db
|
||||||
|
@ -1,45 +1,134 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
# Archive extraction handler for torrent-mover
|
# Archive extraction handler for torrent-mover
|
||||||
|
|
||||||
# Improved Archive Extraction Handler
|
# extract_single_archive: Extract a single archive with proper error handling
|
||||||
# For each archive found in the source directory, create a subdirectory in the destination
|
extract_single_archive() {
|
||||||
# named after the archive (without its extension) and extract into that subdirectory.
|
local archive="$1"
|
||||||
# The archive is retained in the source, so it will remain until the ratio
|
local target_dir="$2"
|
||||||
# limits are reached and Transmission removes the torrent data.
|
local archive_type="${archive##*.}"
|
||||||
handle_archives() {
|
local extract_success=1
|
||||||
local src="$1" dst="$2"
|
local tmp_marker="${target_dir}/.extraction_in_progress"
|
||||||
find "${src}" -type f \( -iname "*.rar" -o -iname "*.zip" -o -iname "*.7z" \) | while read -r arch; do
|
|
||||||
log_info "Extracting archive: ${arch}"
|
|
||||||
local base
|
|
||||||
base=$(basename "${arch}")
|
|
||||||
local subdir="${dst}/${base%.*}"
|
|
||||||
mkdir -p "${subdir}" || { log_error "Failed to create subdirectory ${subdir}"; continue; }
|
|
||||||
|
|
||||||
# Apply proper permissions to the extraction directory
|
# Create extraction marker to indicate incomplete extraction
|
||||||
chmod 775 "${subdir}"
|
touch "${tmp_marker}"
|
||||||
chown ${TORRENT_USER:-debian-transmission}:${TORRENT_GROUP:-debian-transmission} "${subdir}"
|
|
||||||
|
|
||||||
local extract_success=0
|
# Ensure proper permissions for extraction directory
|
||||||
case "${arch##*.}" in
|
chmod 775 "${target_dir}"
|
||||||
|
chown ${TORRENT_USER:-debian-transmission}:${TORRENT_GROUP:-debian-transmission} "${target_dir}"
|
||||||
|
|
||||||
|
# Extract based on archive type
|
||||||
|
case "${archive_type,,}" in # Use lowercase comparison
|
||||||
rar)
|
rar)
|
||||||
retry_command "unrar x -o- \"${arch}\" \"${subdir}\"" 3 10
|
log_debug "Extracting RAR archive: ${archive}"
|
||||||
|
# Check which unrar variant is available
|
||||||
|
if command -v unrar-free &>/dev/null; then
|
||||||
|
# unrar-free has different syntax
|
||||||
|
retry_command "unrar-free x \"${archive}\" \"${target_dir}\"" 3 10
|
||||||
|
else
|
||||||
|
retry_command "unrar x -o- \"${archive}\" \"${target_dir}\"" 3 10
|
||||||
|
fi
|
||||||
extract_success=$?
|
extract_success=$?
|
||||||
;;
|
;;
|
||||||
zip)
|
zip)
|
||||||
retry_command "unzip -o \"${arch}\" -d \"${subdir}\"" 3 10
|
log_debug "Extracting ZIP archive: ${archive}"
|
||||||
|
retry_command "unzip -o \"${archive}\" -d \"${target_dir}\"" 3 10
|
||||||
extract_success=$?
|
extract_success=$?
|
||||||
;;
|
;;
|
||||||
7z)
|
7z|7zip)
|
||||||
retry_command "7z x \"${arch}\" -o\"${subdir}\"" 3 10
|
log_debug "Extracting 7Z archive: ${archive}"
|
||||||
|
retry_command "7z x \"${archive}\" -o\"${target_dir}\"" 3 10
|
||||||
extract_success=$?
|
extract_success=$?
|
||||||
;;
|
;;
|
||||||
|
*)
|
||||||
|
log_error "Unknown archive type: ${archive_type}"
|
||||||
|
extract_success=1
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
if [ $extract_success -eq 0 ]; then
|
# Apply consistent permissions to all extracted files and directories
|
||||||
|
if [[ ${extract_success} -eq 0 ]]; then
|
||||||
|
log_debug "Setting permissions for extracted files in ${target_dir}"
|
||||||
|
find "${target_dir}" -type d -exec chmod 775 {} \;
|
||||||
|
find "${target_dir}" -type f -exec chmod 664 {} \;
|
||||||
|
find "${target_dir}" -exec chown ${TORRENT_USER:-debian-transmission}:${TORRENT_GROUP:-debian-transmission} {} \;
|
||||||
|
|
||||||
|
# Remove the extraction marker to indicate successful completion
|
||||||
|
rm -f "${tmp_marker}"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
log_error "Extraction failed for ${archive}"
|
||||||
|
# Keep marker to indicate failed extraction
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# handle_archives: Process all archives in a source directory
|
||||||
|
# Returns: 0 if all archives extracted successfully or no archives found, 1 if any failed
|
||||||
|
handle_archives() {
|
||||||
|
local src="$1" dst="$2"
|
||||||
|
local overall_success=0
|
||||||
|
local archive_found=0
|
||||||
|
local extraction_errors=0
|
||||||
|
|
||||||
|
# Check if source and destination are valid
|
||||||
|
if [[ ! -d "${src}" ]]; then
|
||||||
|
log_error "Source directory missing: ${src}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -d "${dst}" ]]; then
|
||||||
|
log_error "Destination directory missing: ${dst}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Find all archives and extract them
|
||||||
|
find "${src}" -type f \( -iname "*.rar" -o -iname "*.zip" -o -iname "*.7z" \) | while read -r arch; do
|
||||||
|
archive_found=1
|
||||||
|
log_info "Processing archive: ${arch}"
|
||||||
|
|
||||||
|
# Create extraction subdirectory
|
||||||
|
local base
|
||||||
|
base=$(basename "${arch}")
|
||||||
|
local subdir="${dst}/${base%.*}"
|
||||||
|
|
||||||
|
if ! mkdir -p "${subdir}"; then
|
||||||
|
log_error "Failed to create subdirectory ${subdir} for archive extraction"
|
||||||
|
extraction_errors=$((extraction_errors + 1))
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract the archive
|
||||||
|
if ! extract_single_archive "${arch}" "${subdir}"; then
|
||||||
|
log_error "Extraction failed for ${arch}"
|
||||||
|
extraction_errors=$((extraction_errors + 1))
|
||||||
|
else
|
||||||
log_info "Archive ${arch} extracted successfully to ${subdir}"
|
log_info "Archive ${arch} extracted successfully to ${subdir}"
|
||||||
log_info "Archive ${arch} retained in source until ratio limits are reached."
|
log_info "Archive ${arch} retained in source until ratio limits are reached."
|
||||||
else
|
|
||||||
log_error "Failed to extract archive ${arch}"
|
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
# Check for cleanup of any incomplete extractions from previous runs
|
||||||
|
find "${dst}" -name ".extraction_in_progress" | while read -r marker; do
|
||||||
|
local problem_dir=$(dirname "${marker}")
|
||||||
|
log_warn "Found incomplete extraction in ${problem_dir} from previous run"
|
||||||
|
|
||||||
|
# Option 1: Remove incomplete directory
|
||||||
|
# rm -rf "${problem_dir}"
|
||||||
|
|
||||||
|
# Option 2: Mark as incomplete but leave content
|
||||||
|
touch "${problem_dir}/.incomplete_extraction"
|
||||||
|
rm -f "${marker}"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Return success if no archives found or all extracted successfully
|
||||||
|
if [[ ${archive_found} -eq 0 ]]; then
|
||||||
|
log_debug "No archives found in ${src}"
|
||||||
|
return 0
|
||||||
|
elif [[ ${extraction_errors} -eq 0 ]]; then
|
||||||
|
log_info "All archives extracted successfully"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
log_warn "${extraction_errors} archives failed to extract properly"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
}
|
}
|
@ -71,10 +71,22 @@ parse_args() {
|
|||||||
|
|
||||||
# check_dependencies: Ensures required commands are available.
|
# check_dependencies: Ensures required commands are available.
|
||||||
check_dependencies() {
|
check_dependencies() {
|
||||||
local deps=("transmission-remote" "unrar" "unzip" "7z" "parallel" "bc")
|
local deps=("transmission-remote" "unzip" "7z" "parallel" "bc")
|
||||||
for dep in "${deps[@]}"; do
|
for dep in "${deps[@]}"; do
|
||||||
command -v "${dep}" >/dev/null 2>&1 || { log_error "Missing dependency: ${dep}"; exit 1; }
|
command -v "${dep}" >/dev/null 2>&1 || { log_error "Missing dependency: ${dep}"; exit 1; }
|
||||||
done
|
done
|
||||||
|
|
||||||
|
# Check for unrar or unrar-free
|
||||||
|
if command -v unrar &>/dev/null; then
|
||||||
|
log_debug "Found unrar command"
|
||||||
|
elif command -v unrar-free &>/dev/null; then
|
||||||
|
log_debug "Found unrar-free command"
|
||||||
|
# Create an alias for unrar to point to unrar-free
|
||||||
|
alias unrar="unrar-free"
|
||||||
|
else
|
||||||
|
log_error "Missing dependency: unrar or unrar-free"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
# check_disk_usage: Warn if disk usage is over 90%.
|
# check_disk_usage: Warn if disk usage is over 90%.
|
||||||
@ -98,30 +110,108 @@ check_disk_usage() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# run_command_safely: Safer version of command execution that prevents injection
|
||||||
|
run_command_safely() {
|
||||||
|
# Instead of using eval with a command string, this function accepts the command and arguments separately
|
||||||
|
# This prevents command injection vulnerabilities
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
log_error "No command provided to run_command_safely"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_debug "Running command: $*"
|
||||||
|
"$@"
|
||||||
|
return $?
|
||||||
|
}
|
||||||
|
|
||||||
# retry_command: Execute a command with retries
|
# retry_command: Execute a command with retries
|
||||||
retry_command() {
|
retry_command() {
|
||||||
local cmd="$1"
|
local cmd="$1"
|
||||||
local max_attempts="${2:-3}" # Default to 3 attempts
|
local max_attempts="${2:-3}" # Default to 3 attempts
|
||||||
local wait_time="${3:-10}" # Default to 10 seconds wait between attempts
|
local wait_time="${3:-10}" # Default to 10 seconds wait between attempts
|
||||||
local attempt=1
|
local attempt=1
|
||||||
|
local exit_code=0
|
||||||
|
local error_output=""
|
||||||
|
|
||||||
|
# Create a temporary file for capturing error output
|
||||||
|
local error_file
|
||||||
|
error_file=$(mktemp)
|
||||||
|
|
||||||
while (( attempt <= max_attempts )); do
|
while (( attempt <= max_attempts )); do
|
||||||
log_debug "Attempt $attempt of $max_attempts: $cmd"
|
log_debug "Attempt $attempt of $max_attempts: $cmd"
|
||||||
if eval "$cmd"; then
|
|
||||||
|
# Execute command and capture both exit code and stderr
|
||||||
|
error_output=$( { eval "$cmd"; exit_code=$?; } 2>&1 > >(tee /dev/stderr) )
|
||||||
|
|
||||||
|
if [[ ${exit_code} -eq 0 ]]; then
|
||||||
|
log_debug "Command succeeded on attempt $attempt"
|
||||||
|
rm -f "${error_file}"
|
||||||
return 0
|
return 0
|
||||||
else
|
else
|
||||||
log_warn "Command failed (attempt $attempt): $cmd"
|
# Log detailed error information
|
||||||
|
echo "${error_output}" > "${error_file}"
|
||||||
|
log_warn "Command failed (attempt $attempt, exit code: ${exit_code})"
|
||||||
|
log_debug "Error details: $(head -n 5 "${error_file}")"
|
||||||
|
|
||||||
if (( attempt == max_attempts )); then
|
if (( attempt == max_attempts )); then
|
||||||
log_error "Maximum attempts reached for: $cmd"
|
log_error "Maximum attempts reached for command, last exit code: ${exit_code}"
|
||||||
return 1
|
log_error "Last error output: $(head -n 10 "${error_file}")"
|
||||||
|
rm -f "${error_file}"
|
||||||
|
return ${exit_code}
|
||||||
fi
|
fi
|
||||||
sleep "$wait_time"
|
|
||||||
|
# Exponential backoff - wait longer for each successive attempt
|
||||||
|
local adjusted_wait=$((wait_time * attempt))
|
||||||
|
log_debug "Waiting ${adjusted_wait} seconds before retry"
|
||||||
|
sleep ${adjusted_wait}
|
||||||
(( attempt++ ))
|
(( attempt++ ))
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
rm -f "${error_file}"
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# run_in_transaction: Runs commands with an atomic operation guarantee
|
||||||
|
# If any command fails, attempts to roll back changes
|
||||||
|
run_in_transaction() {
|
||||||
|
local action_desc="$1"
|
||||||
|
local cleanup_cmd="$2"
|
||||||
|
local main_cmd="$3"
|
||||||
|
|
||||||
|
log_debug "Starting transaction: ${action_desc}"
|
||||||
|
|
||||||
|
# Create marker file to indicate transaction in progress
|
||||||
|
local transaction_id
|
||||||
|
transaction_id=$(date +%s)-$$
|
||||||
|
local transaction_marker="/tmp/torrent-mover-transaction-${transaction_id}"
|
||||||
|
echo "${action_desc}" > "${transaction_marker}"
|
||||||
|
|
||||||
|
# Execute the main command
|
||||||
|
if ! eval "${main_cmd}"; then
|
||||||
|
log_error "Transaction failed: ${action_desc}"
|
||||||
|
|
||||||
|
# Only run cleanup if it exists
|
||||||
|
if [[ -n "${cleanup_cmd}" ]]; then
|
||||||
|
log_info "Attempting transaction rollback"
|
||||||
|
if ! eval "${cleanup_cmd}"; then
|
||||||
|
log_error "Rollback failed, manual intervention may be required"
|
||||||
|
else
|
||||||
|
log_info "Rollback completed successfully"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean up marker
|
||||||
|
rm -f "${transaction_marker}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean up marker on success
|
||||||
|
rm -f "${transaction_marker}"
|
||||||
|
log_debug "Transaction completed successfully: ${action_desc}"
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
# validate_directories: Ensure required directories exist and are writable
|
# validate_directories: Ensure required directories exist and are writable
|
||||||
validate_directories() {
|
validate_directories() {
|
||||||
local directories=("$@")
|
local directories=("$@")
|
||||||
|
@ -16,9 +16,44 @@ record_checksums() {
|
|||||||
mv "${CHECKSUM_DB}.tmp" "${CHECKSUM_DB}"
|
mv "${CHECKSUM_DB}.tmp" "${CHECKSUM_DB}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# generate_checksums: Common function to generate checksums efficiently
|
||||||
|
generate_checksums() {
|
||||||
|
local dir="$1"
|
||||||
|
local cache_file="${CHECKSUM_DB}.$(echo "$dir" | md5sum | cut -d' ' -f1)"
|
||||||
|
local last_modified_file
|
||||||
|
|
||||||
|
# Skip if directory doesn't exist
|
||||||
|
if [[ ! -d "${dir}" ]]; then
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get the most recently modified file in the directory
|
||||||
|
last_modified_file=$(find "${dir}" -type f ! \( -iname "*.nfo" -o -iname "*.sfv" \) -exec stat -c "%Y %n" {} \; | sort -nr | head -n1 | cut -d' ' -f2-)
|
||||||
|
|
||||||
|
# If cache exists and no files were modified since last cache, use cache
|
||||||
|
if [[ -f "${cache_file}" ]] && [[ -n "${last_modified_file}" ]]; then
|
||||||
|
local cache_time file_time
|
||||||
|
cache_time=$(stat -c "%Y" "${cache_file}")
|
||||||
|
file_time=$(stat -c "%Y" "${last_modified_file}")
|
||||||
|
|
||||||
|
if (( cache_time >= file_time )); then
|
||||||
|
log_debug "Using cached checksums for ${dir}"
|
||||||
|
cat "${cache_file}"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Generate new checksums with parallel processing
|
||||||
|
log_debug "Generating fresh checksums for ${dir}"
|
||||||
|
find "${dir}" -type f ! \( -iname "*.nfo" -o -iname "*.sfv" \) -print0 | \
|
||||||
|
parallel -0 -j ${PARALLEL_THREADS:-$(nproc)} md5sum | sort | tee "${cache_file}"
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
# file_metadata: Returns an md5 hash for file metadata.
|
# file_metadata: Returns an md5 hash for file metadata.
|
||||||
file_metadata() {
|
file_metadata() {
|
||||||
find "$1" -type f ! \( -iname "*.nfo" -o -iname "*.sfv" \) -exec md5sum {} \; | sort | awk '{print $1}'
|
generate_checksums "$1" | awk '{print $1}'
|
||||||
}
|
}
|
||||||
|
|
||||||
# files_need_processing: Checks if the source files need processing.
|
# files_need_processing: Checks if the source files need processing.
|
||||||
@ -67,7 +102,7 @@ files_need_processing() {
|
|||||||
|
|
||||||
log_info "Generating source checksums..."
|
log_info "Generating source checksums..."
|
||||||
local src_checksums
|
local src_checksums
|
||||||
src_checksums=$(find "${src}" -type f ! \( -iname "*.nfo" -o -iname "*.sfv" \) -exec md5sum {} \; | sort)
|
src_checksums=$(generate_checksums "${src}")
|
||||||
log_info "First 5 source checksums:"
|
log_info "First 5 source checksums:"
|
||||||
echo "${src_checksums}" | head -n 5 | while read -r line; do
|
echo "${src_checksums}" | head -n 5 | while read -r line; do
|
||||||
log_info " ${line}"
|
log_info " ${line}"
|
||||||
@ -78,7 +113,7 @@ files_need_processing() {
|
|||||||
log_info "Checking against target: ${target}"
|
log_info "Checking against target: ${target}"
|
||||||
log_info "Generating target checksums..."
|
log_info "Generating target checksums..."
|
||||||
local target_checksums
|
local target_checksums
|
||||||
target_checksums=$(find "${target}" -type f ! \( -iname "*.nfo" -o -iname "*.sfv" \) -exec md5sum {} \; | sort)
|
target_checksums=$(generate_checksums "${target}")
|
||||||
log_info "First 5 target checksums:"
|
log_info "First 5 target checksums:"
|
||||||
echo "${target_checksums}" | head -n 5 | while read -r line; do
|
echo "${target_checksums}" | head -n 5 | while read -r line; do
|
||||||
log_info " ${line}"
|
log_info " ${line}"
|
||||||
@ -133,47 +168,107 @@ copy_files() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# check_seeding_status: Check if torrent is still seeding
|
||||||
|
check_seeding_status() {
|
||||||
|
local id="$1"
|
||||||
|
local status
|
||||||
|
|
||||||
|
# Get torrent status from transmission
|
||||||
|
status=$(transmission-remote --auth "${TRANSMISSION_USER}:${TRANSMISSION_PASS}" --torrent "${id}" --info | grep "State:" | awk '{print $2}')
|
||||||
|
|
||||||
|
# Return 0 if seeding (meaning it's active), 1 if it's not seeding
|
||||||
|
if [[ "$status" == "Seeding" ]]; then
|
||||||
|
log_info "Torrent ${id} is actively seeding"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
log_info "Torrent ${id} is not seeding (status: ${status})"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# safe_move_files: Either move files or create hardlinks depending on seeding status
|
||||||
|
safe_move_files() {
|
||||||
|
local dst="$1" src="$2" id="$3"
|
||||||
|
|
||||||
|
# If torrent is seeding, use hardlinks instead of moving
|
||||||
|
if check_seeding_status "${id}"; then
|
||||||
|
log_info "Using hardlinks for seeding torrent ${id}"
|
||||||
|
if (( PARALLEL_PROCESSING )); then
|
||||||
|
# Using cp with --link to create hardlinks instead of copying
|
||||||
|
retry_command "find \"${src}\" -type f -print0 | parallel -0 -j ${PARALLEL_THREADS:-$(nproc)} cp --link {} \"${dst}/\" 2>/dev/null || cp {} \"${dst}/\"" 3 15
|
||||||
|
# Handle directories separately - we need to create them first
|
||||||
|
retry_command "find \"${src}\" -type d -print0 | parallel -0 -j ${PARALLEL_THREADS:-$(nproc)} mkdir -p \"${dst}/{}\"" 3 15
|
||||||
|
else
|
||||||
|
# Non-parallel hardlink creation
|
||||||
|
retry_command "find \"${src}\" -type f -exec cp --link {} \"${dst}/\" \; 2>/dev/null || cp {} \"${dst}/\"" 3 15
|
||||||
|
retry_command "find \"${src}\" -type d -exec mkdir -p \"${dst}/{}\" \;" 3 15
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# If not seeding, proceed with normal move operation
|
||||||
|
move_files "${dst}" "${src}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
# process_copy: Validates directories, then copies/moves files from source to destination.
|
# process_copy: Validates directories, then copies/moves files from source to destination.
|
||||||
# Optionally verifies integrity after transfer if CHECK_TRANSFER_INTEGRITY is "true".
|
# Optionally verifies integrity after transfer if CHECK_TRANSFER_INTEGRITY is "true".
|
||||||
process_copy() {
|
process_copy() {
|
||||||
local id="$1" hash="$2" src="$3" dst="$4"
|
local id="$1" hash="$2" src="$3" dst="$4"
|
||||||
|
local operation_result=0
|
||||||
|
|
||||||
if [[ ! -d "${src}" ]]; then
|
if [[ ! -d "${src}" ]]; then
|
||||||
log_error "Source directory missing: ${src}"
|
log_error "Source directory missing: ${src}"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Create destination with proper error handling
|
||||||
if [[ ! -d "${dst}" ]]; then
|
if [[ ! -d "${dst}" ]]; then
|
||||||
log_info "Creating destination directory: ${dst}"
|
log_info "Creating destination directory: ${dst}"
|
||||||
mkdir -p "${dst}" || { log_error "Failed to create directory: ${dst}"; return 1; }
|
if ! mkdir -p "${dst}"; then
|
||||||
|
log_error "Failed to create directory: ${dst}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
chmod 775 "${dst}"
|
chmod 775 "${dst}"
|
||||||
chown ${TORRENT_USER:-debian-transmission}:${TORRENT_GROUP:-debian-transmission} "${dst}"
|
chown ${TORRENT_USER:-debian-transmission}:${TORRENT_GROUP:-debian-transmission} "${dst}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ ! -w "${dst}" ]]; then
|
if [[ ! -w "${dst}" ]]; then
|
||||||
log_error "No write permissions for: ${dst}"
|
log_error "No write permissions for: ${dst}"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if (( DRY_RUN )); then
|
if (( DRY_RUN )); then
|
||||||
log_info "[DRY RUN] Would process torrent ${id}:"
|
log_info "[DRY RUN] Would process torrent ${id}:"
|
||||||
log_info " - Copy files from ${src} to ${dst}"
|
log_info " - Copy files from ${src} to ${dst}"
|
||||||
log_info " - File count: $(find "${src}" -maxdepth 1 -type f | wc -l)"
|
log_info " - File count: $(find "${src}" -maxdepth 1 -type f | wc -l)"
|
||||||
return
|
return 0
|
||||||
fi
|
fi
|
||||||
handle_archives "${src}" "${dst}"
|
|
||||||
|
# Extract archives first
|
||||||
|
if ! handle_archives "${src}" "${dst}"; then
|
||||||
|
log_warn "Archive extraction had issues for ${src}, continuing with regular files"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Process files atomically
|
||||||
case "${COPY_MODE}" in
|
case "${COPY_MODE}" in
|
||||||
move)
|
move)
|
||||||
log_info "Moving files from ${src} to ${dst}"
|
log_info "Moving files from ${src} to ${dst}"
|
||||||
move_files "${dst}" "${src}"
|
safe_move_files "${dst}" "${src}" "${id}"
|
||||||
|
operation_result=$?
|
||||||
;;
|
;;
|
||||||
copy)
|
copy)
|
||||||
log_info "Copying files from ${src} to ${dst}"
|
log_info "Copying files from ${src} to ${dst}"
|
||||||
copy_files "${dst}" "${src}"
|
copy_files "${dst}" "${src}"
|
||||||
|
operation_result=$?
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
if [ $? -eq 0 ]; then
|
|
||||||
|
if [[ ${operation_result} -eq 0 ]]; then
|
||||||
if [[ "${CHECK_TRANSFER_INTEGRITY}" == "true" ]]; then
|
if [[ "${CHECK_TRANSFER_INTEGRITY}" == "true" ]]; then
|
||||||
log_info "Verifying integrity of transferred files..."
|
log_info "Verifying integrity of transferred files..."
|
||||||
local src_checksum target_checksum
|
local src_checksum target_checksum
|
||||||
src_checksum=$(find "${src}" -type f ! \( -iname "*.nfo" -o -iname "*.sfv" \) -exec md5sum {} \; | sort)
|
src_checksum=$(generate_checksums "${src}")
|
||||||
target_checksum=$(find "${dst}" -type f ! \( -iname "*.nfo" -o -iname "*.sfv" \) -exec md5sum {} \; | sort)
|
target_checksum=$(generate_checksums "${dst}")
|
||||||
|
|
||||||
if diff <(echo "${src_checksum}") <(echo "${target_checksum}") >/dev/null; then
|
if diff <(echo "${src_checksum}") <(echo "${target_checksum}") >/dev/null; then
|
||||||
log_info "Integrity check passed."
|
log_info "Integrity check passed."
|
||||||
else
|
else
|
||||||
@ -181,11 +276,13 @@ process_copy() {
|
|||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
log_info "Transfer completed successfully"
|
log_info "Transfer completed successfully"
|
||||||
mark_processed "${hash}"
|
mark_processed "${hash}"
|
||||||
else
|
else
|
||||||
log_error "Transfer failed for ${src}"
|
log_error "Transfer failed for ${src}"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
Loading…
x
Reference in New Issue
Block a user