massive improvement
This commit is contained in:
@@ -4,9 +4,38 @@
|
||||
# Configuration variables with defaults
|
||||
INSTALL_DIR="/opt/transmission-rss-manager"
|
||||
SERVICE_NAME="transmission-rss-manager"
|
||||
USER=$(logname || echo $SUDO_USER)
|
||||
PORT=3000
|
||||
|
||||
# Get default user safely - avoid using root
|
||||
get_default_user() {
|
||||
local default_user
|
||||
|
||||
# Try logname first to get the user who invoked sudo
|
||||
if command -v logname &> /dev/null; then
|
||||
default_user=$(logname 2>/dev/null)
|
||||
fi
|
||||
|
||||
# If logname failed, try SUDO_USER
|
||||
if [ -z "$default_user" ] && [ -n "$SUDO_USER" ]; then
|
||||
default_user="$SUDO_USER"
|
||||
fi
|
||||
|
||||
# Fallback to current user if both methods failed
|
||||
if [ -z "$default_user" ]; then
|
||||
default_user="$(whoami)"
|
||||
fi
|
||||
|
||||
# Ensure the user is not root
|
||||
if [ "$default_user" = "root" ]; then
|
||||
echo "nobody"
|
||||
else
|
||||
echo "$default_user"
|
||||
fi
|
||||
}
|
||||
|
||||
# Initialize default user
|
||||
USER=$(get_default_user)
|
||||
|
||||
# Transmission configuration variables
|
||||
TRANSMISSION_REMOTE=false
|
||||
TRANSMISSION_HOST="localhost"
|
||||
@@ -21,43 +50,124 @@ TRANSMISSION_DIR_MAPPING="{}"
|
||||
MEDIA_DIR="/mnt/media"
|
||||
ENABLE_BOOK_SORTING=true
|
||||
|
||||
# Helper function to validate port number
|
||||
validate_port() {
|
||||
local port="$1"
|
||||
if [[ "$port" =~ ^[0-9]+$ ]] && [ "$port" -ge 1 ] && [ "$port" -le 65535 ]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Helper function to validate URL hostname
|
||||
validate_hostname() {
|
||||
local hostname="$1"
|
||||
if [[ "$hostname" =~ ^[a-zA-Z0-9]([a-zA-Z0-9\-\.]+[a-zA-Z0-9])?$ ]]; then
|
||||
return 0
|
||||
elif [[ "$hostname" =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function gather_configuration() {
|
||||
log "INFO" "Starting configuration gathering"
|
||||
echo -e "${BOLD}Installation Configuration:${NC}"
|
||||
echo -e "Please provide the following configuration parameters:"
|
||||
echo
|
||||
|
||||
read -p "Installation directory [$INSTALL_DIR]: " input_install_dir
|
||||
INSTALL_DIR=${input_install_dir:-$INSTALL_DIR}
|
||||
if [ -n "$input_install_dir" ]; then
|
||||
# Validate installation directory
|
||||
if [[ ! "$input_install_dir" =~ ^/ ]]; then
|
||||
log "WARN" "Installation directory must be an absolute path. Using default."
|
||||
else
|
||||
INSTALL_DIR="$input_install_dir"
|
||||
fi
|
||||
fi
|
||||
|
||||
read -p "Web interface port [$PORT]: " input_port
|
||||
PORT=${input_port:-$PORT}
|
||||
# Get and validate port
|
||||
while true; do
|
||||
read -p "Web interface port [$PORT]: " input_port
|
||||
if [ -z "$input_port" ]; then
|
||||
break
|
||||
elif validate_port "$input_port"; then
|
||||
PORT="$input_port"
|
||||
break
|
||||
else
|
||||
log "WARN" "Invalid port number. Port must be between 1 and 65535."
|
||||
fi
|
||||
done
|
||||
|
||||
# Get user
|
||||
read -p "Run as user [$USER]: " input_user
|
||||
USER=${input_user:-$USER}
|
||||
if [ -n "$input_user" ]; then
|
||||
# Check if user exists
|
||||
if id "$input_user" &>/dev/null; then
|
||||
USER="$input_user"
|
||||
else
|
||||
log "WARN" "User $input_user does not exist. Using $USER instead."
|
||||
fi
|
||||
fi
|
||||
|
||||
echo
|
||||
echo -e "${BOLD}Transmission Configuration:${NC}"
|
||||
echo -e "Configure connection to your Transmission client:"
|
||||
echo
|
||||
|
||||
# Ask if Transmission is remote
|
||||
read -p "Is Transmission running on a remote server? (y/n) [n]: " input_remote
|
||||
if [[ $input_remote =~ ^[Yy]$ ]]; then
|
||||
TRANSMISSION_REMOTE=true
|
||||
|
||||
read -p "Remote Transmission host [localhost]: " input_trans_host
|
||||
TRANSMISSION_HOST=${input_trans_host:-$TRANSMISSION_HOST}
|
||||
# Get and validate hostname
|
||||
while true; do
|
||||
read -p "Remote Transmission host [localhost]: " input_trans_host
|
||||
if [ -z "$input_trans_host" ]; then
|
||||
break
|
||||
elif validate_hostname "$input_trans_host"; then
|
||||
TRANSMISSION_HOST="$input_trans_host"
|
||||
break
|
||||
else
|
||||
log "WARN" "Invalid hostname format."
|
||||
fi
|
||||
done
|
||||
|
||||
read -p "Remote Transmission port [9091]: " input_trans_port
|
||||
TRANSMISSION_PORT=${input_trans_port:-$TRANSMISSION_PORT}
|
||||
# Get and validate port
|
||||
while true; do
|
||||
read -p "Remote Transmission port [9091]: " input_trans_port
|
||||
if [ -z "$input_trans_port" ]; then
|
||||
break
|
||||
elif validate_port "$input_trans_port"; then
|
||||
TRANSMISSION_PORT="$input_trans_port"
|
||||
break
|
||||
else
|
||||
log "WARN" "Invalid port number. Port must be between 1 and 65535."
|
||||
fi
|
||||
done
|
||||
|
||||
# Get credentials
|
||||
read -p "Remote Transmission username []: " input_trans_user
|
||||
TRANSMISSION_USER=${input_trans_user:-$TRANSMISSION_USER}
|
||||
|
||||
read -p "Remote Transmission password []: " input_trans_pass
|
||||
TRANSMISSION_PASS=${input_trans_pass:-$TRANSMISSION_PASS}
|
||||
# Use read -s for password to avoid showing it on screen
|
||||
read -s -p "Remote Transmission password []: " input_trans_pass
|
||||
echo # Add a newline after the password input
|
||||
if [ -n "$input_trans_pass" ]; then
|
||||
# TODO: In a production environment, consider encrypting this password
|
||||
TRANSMISSION_PASS="$input_trans_pass"
|
||||
fi
|
||||
|
||||
read -p "Remote Transmission RPC path [/transmission/rpc]: " input_trans_path
|
||||
TRANSMISSION_RPC_PATH=${input_trans_path:-$TRANSMISSION_RPC_PATH}
|
||||
if [ -n "$input_trans_path" ]; then
|
||||
# Ensure path starts with / for consistency
|
||||
if [[ ! "$input_trans_path" =~ ^/ ]]; then
|
||||
input_trans_path="/$input_trans_path"
|
||||
fi
|
||||
TRANSMISSION_RPC_PATH="$input_trans_path"
|
||||
fi
|
||||
|
||||
# Configure directory mapping for remote setup
|
||||
echo
|
||||
@@ -74,17 +184,20 @@ function gather_configuration() {
|
||||
read -p "Local directory that corresponds to the remote download directory: " LOCAL_DOWNLOAD_DIR
|
||||
LOCAL_DOWNLOAD_DIR=${LOCAL_DOWNLOAD_DIR:-"/mnt/transmission-downloads"}
|
||||
|
||||
# Create mapping JSON
|
||||
TRANSMISSION_DIR_MAPPING=$(cat <<EOF
|
||||
{
|
||||
"$REMOTE_DOWNLOAD_DIR": "$LOCAL_DOWNLOAD_DIR"
|
||||
}
|
||||
EOF
|
||||
)
|
||||
# Create mapping JSON - use proper JSON escaping for directory paths
|
||||
REMOTE_DOWNLOAD_DIR_ESCAPED=$(echo "$REMOTE_DOWNLOAD_DIR" | sed 's/\\/\\\\/g' | sed 's/"/\\"/g')
|
||||
LOCAL_DOWNLOAD_DIR_ESCAPED=$(echo "$LOCAL_DOWNLOAD_DIR" | sed 's/\\/\\\\/g' | sed 's/"/\\"/g')
|
||||
|
||||
TRANSMISSION_DIR_MAPPING="{\"$REMOTE_DOWNLOAD_DIR_ESCAPED\": \"$LOCAL_DOWNLOAD_DIR_ESCAPED\"}"
|
||||
|
||||
# Create the local directory
|
||||
mkdir -p "$LOCAL_DOWNLOAD_DIR"
|
||||
chown -R $USER:$USER "$LOCAL_DOWNLOAD_DIR"
|
||||
if ! mkdir -p "$LOCAL_DOWNLOAD_DIR"; then
|
||||
log "ERROR" "Failed to create local download directory: $LOCAL_DOWNLOAD_DIR"
|
||||
else
|
||||
if ! chown -R "$USER:$USER" "$LOCAL_DOWNLOAD_DIR"; then
|
||||
log "ERROR" "Failed to set permissions on local download directory: $LOCAL_DOWNLOAD_DIR"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Ask if want to add more mappings
|
||||
while true; do
|
||||
@@ -97,14 +210,23 @@ EOF
|
||||
read -p "Corresponding local directory path: " local_dir
|
||||
|
||||
if [ -n "$remote_dir" ] && [ -n "$local_dir" ]; then
|
||||
# Update mapping JSON (remove the last "}" and add the new mapping)
|
||||
TRANSMISSION_DIR_MAPPING="${TRANSMISSION_DIR_MAPPING%\}}, \"$remote_dir\": \"$local_dir\" }"
|
||||
# Escape directory paths for JSON
|
||||
remote_dir_escaped=$(echo "$remote_dir" | sed 's/\\/\\\\/g' | sed 's/"/\\"/g')
|
||||
local_dir_escaped=$(echo "$local_dir" | sed 's/\\/\\\\/g' | sed 's/"/\\"/g')
|
||||
|
||||
# Update mapping JSON (proper JSON manipulation)
|
||||
# Remove the closing brace, add a comma and the new mapping, then close with brace
|
||||
TRANSMISSION_DIR_MAPPING="${TRANSMISSION_DIR_MAPPING%\}}, \"$remote_dir_escaped\": \"$local_dir_escaped\"}"
|
||||
|
||||
# Create the local directory
|
||||
mkdir -p "$local_dir"
|
||||
chown -R $USER:$USER "$local_dir"
|
||||
|
||||
echo -e "${GREEN}Mapping added: $remote_dir → $local_dir${NC}"
|
||||
if ! mkdir -p "$local_dir"; then
|
||||
log "ERROR" "Failed to create directory: $local_dir"
|
||||
else
|
||||
if ! chown -R "$USER:$USER" "$local_dir"; then
|
||||
log "WARN" "Failed to set permissions on directory: $local_dir"
|
||||
fi
|
||||
log "INFO" "Mapping added: $remote_dir → $local_dir"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
@@ -112,25 +234,112 @@ EOF
|
||||
TRANSMISSION_DOWNLOAD_DIR=$REMOTE_DOWNLOAD_DIR
|
||||
else
|
||||
read -p "Transmission download directory [/var/lib/transmission-daemon/downloads]: " input_trans_dir
|
||||
TRANSMISSION_DOWNLOAD_DIR=${input_trans_dir:-$TRANSMISSION_DOWNLOAD_DIR}
|
||||
if [ -n "$input_trans_dir" ]; then
|
||||
if [[ ! "$input_trans_dir" =~ ^/ ]]; then
|
||||
log "WARN" "Download directory must be an absolute path. Using default."
|
||||
else
|
||||
TRANSMISSION_DOWNLOAD_DIR="$input_trans_dir"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
echo
|
||||
echo -e "${BOLD}Media Destination Configuration:${NC}"
|
||||
|
||||
read -p "Media destination base directory [/mnt/media]: " input_media_dir
|
||||
MEDIA_DIR=${input_media_dir:-$MEDIA_DIR}
|
||||
if [ -n "$input_media_dir" ]; then
|
||||
if [[ ! "$input_media_dir" =~ ^/ ]]; then
|
||||
log "WARN" "Media directory must be an absolute path. Using default."
|
||||
else
|
||||
MEDIA_DIR="$input_media_dir"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Ask about enabling book/magazine sorting
|
||||
echo
|
||||
echo -e "${BOLD}Content Type Configuration:${NC}"
|
||||
read -p "Enable book and magazine sorting? (y/n) [y]: " input_book_sorting
|
||||
ENABLE_BOOK_SORTING=true
|
||||
if [[ $input_book_sorting =~ ^[Nn]$ ]]; then
|
||||
ENABLE_BOOK_SORTING=false
|
||||
else
|
||||
ENABLE_BOOK_SORTING=true
|
||||
fi
|
||||
|
||||
# Security configuration
|
||||
echo
|
||||
echo -e "${BOLD}Security Configuration:${NC}"
|
||||
|
||||
# Ask about enabling authentication
|
||||
read -p "Enable authentication? (y/n) [n]: " input_auth_enabled
|
||||
AUTH_ENABLED=false
|
||||
ADMIN_USERNAME=""
|
||||
ADMIN_PASSWORD=""
|
||||
|
||||
if [[ $input_auth_enabled =~ ^[Yy]$ ]]; then
|
||||
AUTH_ENABLED=true
|
||||
|
||||
# Get admin username and password
|
||||
read -p "Admin username [admin]: " input_admin_username
|
||||
ADMIN_USERNAME=${input_admin_username:-"admin"}
|
||||
|
||||
# Use read -s for password to avoid showing it on screen
|
||||
read -s -p "Admin password: " input_admin_password
|
||||
echo # Add a newline after the password input
|
||||
|
||||
if [ -z "$input_admin_password" ]; then
|
||||
# Generate a random password if none provided
|
||||
ADMIN_PASSWORD=$(openssl rand -base64 12)
|
||||
echo -e "${YELLOW}Generated random admin password: $ADMIN_PASSWORD${NC}"
|
||||
echo -e "${YELLOW}Please save this password somewhere safe!${NC}"
|
||||
else
|
||||
ADMIN_PASSWORD="$input_admin_password"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Ask about enabling HTTPS
|
||||
read -p "Enable HTTPS? (requires SSL certificate) (y/n) [n]: " input_https_enabled
|
||||
HTTPS_ENABLED=false
|
||||
SSL_CERT_PATH=""
|
||||
SSL_KEY_PATH=""
|
||||
|
||||
if [[ $input_https_enabled =~ ^[Yy]$ ]]; then
|
||||
HTTPS_ENABLED=true
|
||||
|
||||
# Get SSL certificate paths
|
||||
read -p "SSL certificate path: " input_ssl_cert_path
|
||||
if [ -n "$input_ssl_cert_path" ]; then
|
||||
# Check if file exists
|
||||
if [ -f "$input_ssl_cert_path" ]; then
|
||||
SSL_CERT_PATH="$input_ssl_cert_path"
|
||||
else
|
||||
log "WARN" "SSL certificate file not found. HTTPS will be disabled."
|
||||
HTTPS_ENABLED=false
|
||||
fi
|
||||
else
|
||||
log "WARN" "SSL certificate path not provided. HTTPS will be disabled."
|
||||
HTTPS_ENABLED=false
|
||||
fi
|
||||
|
||||
# Only ask for key if cert was found
|
||||
if [ "$HTTPS_ENABLED" = true ]; then
|
||||
read -p "SSL key path: " input_ssl_key_path
|
||||
if [ -n "$input_ssl_key_path" ]; then
|
||||
# Check if file exists
|
||||
if [ -f "$input_ssl_key_path" ]; then
|
||||
SSL_KEY_PATH="$input_ssl_key_path"
|
||||
else
|
||||
log "WARN" "SSL key file not found. HTTPS will be disabled."
|
||||
HTTPS_ENABLED=false
|
||||
fi
|
||||
else
|
||||
log "WARN" "SSL key path not provided. HTTPS will be disabled."
|
||||
HTTPS_ENABLED=false
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
echo
|
||||
log "INFO" "Configuration gathering complete"
|
||||
echo -e "${GREEN}Configuration complete!${NC}"
|
||||
echo
|
||||
}
|
||||
|
||||
@@ -2,27 +2,53 @@
|
||||
# Dependencies module for Transmission RSS Manager Installation
|
||||
|
||||
function install_dependencies() {
|
||||
echo -e "${YELLOW}Installing dependencies...${NC}"
|
||||
log "INFO" "Installing dependencies..."
|
||||
|
||||
# Update package index
|
||||
apt-get update
|
||||
|
||||
# Install Node.js and npm if not already installed
|
||||
if ! command_exists node; then
|
||||
echo "Installing Node.js and npm..."
|
||||
apt-get install -y ca-certificates curl gnupg
|
||||
mkdir -p /etc/apt/keyrings
|
||||
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
|
||||
echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_18.x nodistro main" > /etc/apt/sources.list.d/nodesource.list
|
||||
# Check for package manager
|
||||
if command -v apt-get &> /dev/null; then
|
||||
# Update package index
|
||||
apt-get update
|
||||
apt-get install -y nodejs
|
||||
else
|
||||
echo "Node.js is already installed."
|
||||
fi
|
||||
|
||||
# Install additional dependencies
|
||||
echo "Installing additional dependencies..."
|
||||
apt-get install -y unrar unzip p7zip-full nginx
|
||||
# Install Node.js and npm if not already installed
|
||||
if ! command_exists node; then
|
||||
log "INFO" "Installing Node.js and npm..."
|
||||
apt-get install -y ca-certificates curl gnupg
|
||||
mkdir -p /etc/apt/keyrings
|
||||
|
||||
# Check if download succeeds
|
||||
if ! curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg; then
|
||||
log "ERROR" "Failed to download Node.js GPG key"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_18.x nodistro main" > /etc/apt/sources.list.d/nodesource.list
|
||||
|
||||
# Update again after adding repo
|
||||
apt-get update
|
||||
|
||||
# Install nodejs
|
||||
if ! apt-get install -y nodejs; then
|
||||
log "ERROR" "Failed to install Node.js"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
log "INFO" "Node.js is already installed."
|
||||
fi
|
||||
|
||||
# Install additional dependencies
|
||||
log "INFO" "Installing additional dependencies..."
|
||||
apt-get install -y unrar unzip p7zip-full nginx
|
||||
else
|
||||
log "ERROR" "This installer requires apt-get package manager"
|
||||
log "INFO" "Please install the following dependencies manually:"
|
||||
log "INFO" "- Node.js (v18.x)"
|
||||
log "INFO" "- npm"
|
||||
log "INFO" "- unrar"
|
||||
log "INFO" "- unzip"
|
||||
log "INFO" "- p7zip-full"
|
||||
log "INFO" "- nginx"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if all dependencies were installed successfully
|
||||
local dependencies=("node" "npm" "unrar" "unzip" "7z" "nginx")
|
||||
@@ -35,26 +61,49 @@ function install_dependencies() {
|
||||
done
|
||||
|
||||
if [ ${#missing_deps[@]} -eq 0 ]; then
|
||||
echo -e "${GREEN}All dependencies installed successfully.${NC}"
|
||||
log "INFO" "All dependencies installed successfully."
|
||||
else
|
||||
echo -e "${RED}Failed to install some dependencies: ${missing_deps[*]}${NC}"
|
||||
echo -e "${YELLOW}Please install them manually and rerun this script.${NC}"
|
||||
log "ERROR" "Failed to install some dependencies: ${missing_deps[*]}"
|
||||
log "WARN" "Please install them manually and rerun this script."
|
||||
|
||||
# More helpful information based on which deps are missing
|
||||
if [[ " ${missing_deps[*]} " =~ " node " ]]; then
|
||||
log "INFO" "To install Node.js manually, visit: https://nodejs.org/en/download/"
|
||||
fi
|
||||
|
||||
if [[ " ${missing_deps[*]} " =~ " nginx " ]]; then
|
||||
log "INFO" "To install nginx manually: sudo apt-get install nginx"
|
||||
fi
|
||||
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function create_directories() {
|
||||
echo -e "${YELLOW}Creating installation directories...${NC}"
|
||||
log "INFO" "Creating installation directories..."
|
||||
|
||||
# Create main installation directory
|
||||
mkdir -p $INSTALL_DIR
|
||||
mkdir -p $INSTALL_DIR/logs
|
||||
mkdir -p $INSTALL_DIR/public/js
|
||||
mkdir -p $INSTALL_DIR/public/css
|
||||
mkdir -p $INSTALL_DIR/modules
|
||||
# Check if INSTALL_DIR is defined
|
||||
if [ -z "$INSTALL_DIR" ]; then
|
||||
log "ERROR" "INSTALL_DIR is not defined"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create directory for file storage
|
||||
mkdir -p $INSTALL_DIR/data
|
||||
# Create directories and check for errors
|
||||
DIRECTORIES=(
|
||||
"$INSTALL_DIR"
|
||||
"$INSTALL_DIR/logs"
|
||||
"$INSTALL_DIR/public/js"
|
||||
"$INSTALL_DIR/public/css"
|
||||
"$INSTALL_DIR/modules"
|
||||
"$INSTALL_DIR/data"
|
||||
)
|
||||
|
||||
echo -e "${GREEN}Directories created successfully.${NC}"
|
||||
for dir in "${DIRECTORIES[@]}"; do
|
||||
if ! mkdir -p "$dir"; then
|
||||
log "ERROR" "Failed to create directory: $dir"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
log "INFO" "Directories created successfully."
|
||||
}
|
||||
|
||||
@@ -18,11 +18,14 @@ function create_config_files() {
|
||||
"dependencies": {
|
||||
"express": "^4.18.2",
|
||||
"body-parser": "^1.20.2",
|
||||
"transmission": "^0.4.10",
|
||||
"transmission-promise": "^1.1.5",
|
||||
"adm-zip": "^0.5.10",
|
||||
"node-fetch": "^2.6.9",
|
||||
"xml2js": "^0.5.0",
|
||||
"cors": "^2.8.5"
|
||||
"cors": "^2.8.5",
|
||||
"bcrypt": "^5.1.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"morgan": "^1.10.0"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
517
modules/post-processor.js
Normal file
517
modules/post-processor.js
Normal file
@@ -0,0 +1,517 @@
|
||||
/**
|
||||
* Post-Processor Module
|
||||
* Handles the organization and processing of completed downloads
|
||||
*/
|
||||
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
const util = require('util');
|
||||
const exec = util.promisify(require('child_process').exec);
|
||||
const crypto = require('crypto');
|
||||
|
||||
class PostProcessor {
|
||||
constructor(config, transmissionClient) {
|
||||
if (!config) {
|
||||
throw new Error('Configuration is required for Post Processor');
|
||||
}
|
||||
|
||||
if (!transmissionClient) {
|
||||
throw new Error('Transmission client is required for Post Processor');
|
||||
}
|
||||
|
||||
this.config = config;
|
||||
this.transmissionClient = transmissionClient;
|
||||
this.isProcessing = false;
|
||||
this.processingQueue = [];
|
||||
this.processIntervalId = null;
|
||||
this.checkIntervalSeconds = config.seedingRequirements?.checkIntervalSeconds || 300;
|
||||
this.destinationPaths = config.destinationPaths || {};
|
||||
this.processingOptions = config.processingOptions || {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the post-processor
|
||||
* @returns {boolean} Whether the processor started successfully
|
||||
*/
|
||||
start() {
|
||||
if (this.processIntervalId) {
|
||||
console.log('Post-processor is already running');
|
||||
return false;
|
||||
}
|
||||
|
||||
console.log(`Starting post-processor, check interval: ${this.checkIntervalSeconds} seconds`);
|
||||
|
||||
// Run immediately
|
||||
this.checkCompletedDownloads();
|
||||
|
||||
// Then set up interval
|
||||
this.processIntervalId = setInterval(() => {
|
||||
this.checkCompletedDownloads();
|
||||
}, this.checkIntervalSeconds * 1000);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the post-processor
|
||||
* @returns {boolean} Whether the processor stopped successfully
|
||||
*/
|
||||
stop() {
|
||||
if (!this.processIntervalId) {
|
||||
console.log('Post-processor is not running');
|
||||
return false;
|
||||
}
|
||||
|
||||
clearInterval(this.processIntervalId);
|
||||
this.processIntervalId = null;
|
||||
console.log('Post-processor stopped');
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check for completed downloads that meet seeding requirements
|
||||
*/
|
||||
async checkCompletedDownloads() {
|
||||
if (this.isProcessing) {
|
||||
console.log('Post-processor is already running a processing cycle, skipping');
|
||||
return;
|
||||
}
|
||||
|
||||
this.isProcessing = true;
|
||||
|
||||
try {
|
||||
console.log('Checking for completed downloads...');
|
||||
|
||||
// Get all torrents
|
||||
const torrentsResult = await this.transmissionClient.getTorrents();
|
||||
|
||||
if (!torrentsResult.success) {
|
||||
console.error('Failed to get torrents from Transmission:', torrentsResult.error);
|
||||
this.isProcessing = false;
|
||||
return;
|
||||
}
|
||||
|
||||
const torrents = torrentsResult.torrents;
|
||||
|
||||
// Filter completed torrents
|
||||
const completedTorrents = torrents.filter(torrent =>
|
||||
torrent.percentDone === 1 && // Fully downloaded
|
||||
torrent.status !== 0 && // Not stopped
|
||||
torrent.doneDate > 0 // Has a completion date
|
||||
);
|
||||
|
||||
console.log(`Found ${completedTorrents.length} completed torrents`);
|
||||
|
||||
// Check each completed torrent against requirements
|
||||
for (const torrent of completedTorrents) {
|
||||
// Skip already processed torrents
|
||||
if (this.processingQueue.includes(torrent.id)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if it meets seeding requirements
|
||||
const reqResult = await this.transmissionClient.verifyTorrentSeedingRequirements(
|
||||
torrent.id,
|
||||
this.config.seedingRequirements || {}
|
||||
);
|
||||
|
||||
if (!reqResult.success) {
|
||||
console.error(`Error checking requirements for ${torrent.name}:`, reqResult.error);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (reqResult.requirementsMet) {
|
||||
console.log(`Torrent ${torrent.name} has met seeding requirements, queuing for processing`);
|
||||
|
||||
// Add to processing queue
|
||||
this.processingQueue.push(torrent.id);
|
||||
|
||||
// Process the torrent
|
||||
await this.processTorrent(reqResult.torrent);
|
||||
|
||||
// Remove from queue after processing
|
||||
this.processingQueue = this.processingQueue.filter(id => id !== torrent.id);
|
||||
} else {
|
||||
const { currentRatio, currentSeedingTimeMinutes } = reqResult;
|
||||
const { minRatio, minTimeMinutes } = this.config.seedingRequirements || { minRatio: 1.0, minTimeMinutes: 60 };
|
||||
|
||||
console.log(`Torrent ${torrent.name} has not met seeding requirements yet:`);
|
||||
console.log(`- Ratio: ${currentRatio.toFixed(2)} / ${minRatio} (${reqResult.ratioMet ? 'Met' : 'Not Met'})`);
|
||||
console.log(`- Time: ${Math.floor(currentSeedingTimeMinutes)} / ${minTimeMinutes} minutes (${reqResult.timeMet ? 'Met' : 'Not Met'})`);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error in post-processor cycle:', error);
|
||||
} finally {
|
||||
this.isProcessing = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a completed torrent
|
||||
* @param {Object} torrent - Torrent object
|
||||
*/
|
||||
async processTorrent(torrent) {
|
||||
console.log(`Processing torrent: ${torrent.name}`);
|
||||
|
||||
try {
|
||||
// Get detailed info with file analysis
|
||||
const details = await this.transmissionClient.getTorrentDetails(torrent.id);
|
||||
|
||||
if (!details.success) {
|
||||
console.error(`Failed to get details for torrent ${torrent.name}:`, details.error);
|
||||
return;
|
||||
}
|
||||
|
||||
torrent = details.torrent;
|
||||
const mediaInfo = torrent.mediaInfo || { type: 'unknown' };
|
||||
|
||||
console.log(`Detected media type: ${mediaInfo.type}`);
|
||||
|
||||
// Determine destination path based on content type
|
||||
let destinationDir = this.getDestinationPath(mediaInfo.type);
|
||||
|
||||
if (!destinationDir) {
|
||||
console.error(`No destination directory configured for media type: ${mediaInfo.type}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create the destination directory if it doesn't exist
|
||||
await this.createDirectoryIfNotExists(destinationDir);
|
||||
|
||||
// If we're creating category folders, add category-specific subdirectory
|
||||
if (this.processingOptions.createCategoryFolders) {
|
||||
const categoryFolder = this.getCategoryFolder(torrent, mediaInfo);
|
||||
if (categoryFolder) {
|
||||
destinationDir = path.join(destinationDir, categoryFolder);
|
||||
await this.createDirectoryIfNotExists(destinationDir);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Processing to destination: ${destinationDir}`);
|
||||
|
||||
// Process files based on content type
|
||||
if (mediaInfo.type === 'archive' && this.processingOptions.extractArchives) {
|
||||
await this.processArchives(torrent, mediaInfo, destinationDir);
|
||||
} else {
|
||||
await this.processStandardFiles(torrent, mediaInfo, destinationDir);
|
||||
}
|
||||
|
||||
console.log(`Finished processing torrent: ${torrent.name}`);
|
||||
} catch (error) {
|
||||
console.error(`Error processing torrent ${torrent.name}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the appropriate destination path for a media type
|
||||
* @param {string} mediaType - Type of media
|
||||
* @returns {string} Destination path
|
||||
*/
|
||||
getDestinationPath(mediaType) {
|
||||
switch (mediaType) {
|
||||
case 'movie':
|
||||
return this.destinationPaths.movies;
|
||||
case 'tvshow':
|
||||
return this.destinationPaths.tvShows;
|
||||
case 'audio':
|
||||
return this.destinationPaths.music;
|
||||
case 'book':
|
||||
return this.destinationPaths.books;
|
||||
case 'magazine':
|
||||
return this.destinationPaths.magazines;
|
||||
default:
|
||||
return this.destinationPaths.software;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a category folder name based on the content
|
||||
* @param {Object} torrent - Torrent object
|
||||
* @param {Object} mediaInfo - Media information
|
||||
* @returns {string} Folder name
|
||||
*/
|
||||
getCategoryFolder(torrent, mediaInfo) {
|
||||
const name = torrent.name;
|
||||
|
||||
switch (mediaInfo.type) {
|
||||
case 'movie': {
|
||||
// For movies, use the first letter of the title
|
||||
const firstLetter = name.replace(/^[^a-zA-Z0-9]+/, '').charAt(0).toUpperCase();
|
||||
return firstLetter || '#';
|
||||
}
|
||||
case 'tvshow': {
|
||||
// For TV shows, extract the show name
|
||||
const showName = name.replace(/[sS]\d{2}[eE]\d{2}.*$/, '').trim();
|
||||
return showName;
|
||||
}
|
||||
case 'audio': {
|
||||
// For music, try to extract artist name
|
||||
const artistMatch = name.match(/^(.*?)\s*-\s*/);
|
||||
return artistMatch ? artistMatch[1].trim() : 'Unsorted';
|
||||
}
|
||||
case 'book': {
|
||||
// For books, use the first letter of title or author names
|
||||
const firstLetter = name.replace(/^[^a-zA-Z0-9]+/, '').charAt(0).toUpperCase();
|
||||
return firstLetter || '#';
|
||||
}
|
||||
case 'magazine': {
|
||||
// For magazines, use the magazine name if possible
|
||||
const magazineMatch = name.match(/^(.*?)\s*(?:Issue|Vol|Volume)/i);
|
||||
return magazineMatch ? magazineMatch[1].trim() : 'Unsorted';
|
||||
}
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process archive files (extract them)
|
||||
* @param {Object} torrent - Torrent object
|
||||
* @param {Object} mediaInfo - Media information
|
||||
* @param {string} destinationDir - Destination directory
|
||||
*/
|
||||
async processArchives(torrent, mediaInfo, destinationDir) {
|
||||
console.log(`Processing archives in ${torrent.name}`);
|
||||
|
||||
const archiveFiles = mediaInfo.archiveFiles;
|
||||
const torrentDir = torrent.downloadDir;
|
||||
|
||||
for (const file of archiveFiles) {
|
||||
const filePath = path.join(torrentDir, file.name);
|
||||
|
||||
try {
|
||||
// Create a unique extraction directory
|
||||
const extractionDirName = path.basename(file.name, path.extname(file.name));
|
||||
const extractionDir = path.join(destinationDir, extractionDirName);
|
||||
|
||||
await this.createDirectoryIfNotExists(extractionDir);
|
||||
|
||||
console.log(`Extracting ${filePath} to ${extractionDir}`);
|
||||
|
||||
// Extract the archive based on type
|
||||
if (/\.zip$/i.test(file.name)) {
|
||||
await exec(`unzip -o "${filePath}" -d "${extractionDir}"`);
|
||||
} else if (/\.rar$/i.test(file.name)) {
|
||||
await exec(`unrar x -o+ "${filePath}" "${extractionDir}"`);
|
||||
} else if (/\.7z$/i.test(file.name)) {
|
||||
await exec(`7z x "${filePath}" -o"${extractionDir}"`);
|
||||
} else if (/\.tar(\.(gz|bz2|xz))?$/i.test(file.name)) {
|
||||
await exec(`tar -xf "${filePath}" -C "${extractionDir}"`);
|
||||
} else {
|
||||
console.log(`Unknown archive format for ${file.name}, skipping extraction`);
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(`Successfully extracted ${file.name}`);
|
||||
|
||||
// Delete archive if option is enabled
|
||||
if (this.processingOptions.deleteArchives) {
|
||||
try {
|
||||
console.log(`Deleting archive after extraction: ${filePath}`);
|
||||
await fs.unlink(filePath);
|
||||
} catch (deleteError) {
|
||||
console.error(`Failed to delete archive ${filePath}:`, deleteError);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error extracting archive ${filePath}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process standard (non-archive) files
|
||||
* @param {Object} torrent - Torrent object
|
||||
* @param {Object} mediaInfo - Media information
|
||||
* @param {string} destinationDir - Destination directory
|
||||
*/
|
||||
async processStandardFiles(torrent, mediaInfo, destinationDir) {
|
||||
console.log(`Processing standard files in ${torrent.name}`);
|
||||
|
||||
const torrentDir = torrent.downloadDir;
|
||||
const allFiles = [];
|
||||
|
||||
// Collect all files based on media type
|
||||
switch (mediaInfo.type) {
|
||||
case 'movie':
|
||||
case 'tvshow':
|
||||
allFiles.push(...mediaInfo.videoFiles);
|
||||
break;
|
||||
case 'audio':
|
||||
allFiles.push(...mediaInfo.audioFiles);
|
||||
break;
|
||||
case 'book':
|
||||
case 'magazine':
|
||||
allFiles.push(...mediaInfo.documentFiles);
|
||||
break;
|
||||
default:
|
||||
// For unknown/software, add all files except samples if enabled
|
||||
for (const type of Object.keys(mediaInfo)) {
|
||||
if (Array.isArray(mediaInfo[type])) {
|
||||
allFiles.push(...mediaInfo[type]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out sample files if option is enabled
|
||||
let filesToProcess = allFiles;
|
||||
if (this.processingOptions.ignoreSample) {
|
||||
filesToProcess = allFiles.filter(file => !file.isSample);
|
||||
console.log(`Filtered out ${allFiles.length - filesToProcess.length} sample files`);
|
||||
}
|
||||
|
||||
// Process each file
|
||||
for (const file of filesToProcess) {
|
||||
const sourceFilePath = path.join(torrentDir, file.name);
|
||||
let destFileName = file.name;
|
||||
|
||||
// Generate a better filename if rename option is enabled
|
||||
if (this.processingOptions.renameFiles) {
|
||||
destFileName = this.generateBetterFilename(file.name, mediaInfo.type);
|
||||
}
|
||||
|
||||
const destFilePath = path.join(destinationDir, destFileName);
|
||||
|
||||
try {
|
||||
// Check if destination file already exists with the same name
|
||||
const fileExists = await this.fileExists(destFilePath);
|
||||
|
||||
if (fileExists) {
|
||||
if (this.processingOptions.autoReplaceUpgrades) {
|
||||
// Compare file sizes to see if the new one is larger (potentially higher quality)
|
||||
const existingStats = await fs.stat(destFilePath);
|
||||
|
||||
if (file.size > existingStats.size) {
|
||||
console.log(`Replacing existing file with larger version: ${destFilePath}`);
|
||||
await fs.copyFile(sourceFilePath, destFilePath);
|
||||
} else {
|
||||
console.log(`Skipping ${file.name}, existing file is same or better quality`);
|
||||
}
|
||||
} else {
|
||||
// Generate a unique filename
|
||||
const uniqueDestFilePath = this.makeFilenameUnique(destFilePath);
|
||||
console.log(`Copying ${file.name} to ${uniqueDestFilePath}`);
|
||||
await fs.copyFile(sourceFilePath, uniqueDestFilePath);
|
||||
}
|
||||
} else {
|
||||
// File doesn't exist, simple copy
|
||||
console.log(`Copying ${file.name} to ${destFilePath}`);
|
||||
await fs.copyFile(sourceFilePath, destFilePath);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error processing file ${file.name}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a better filename based on content type
|
||||
* @param {string} originalFilename - Original filename
|
||||
* @param {string} mediaType - Media type
|
||||
* @returns {string} Improved filename
|
||||
*/
|
||||
generateBetterFilename(originalFilename, mediaType) {
|
||||
// Get the file extension
|
||||
const ext = path.extname(originalFilename);
|
||||
const basename = path.basename(originalFilename, ext);
|
||||
|
||||
// Clean up common issues in filenames
|
||||
let cleanName = basename
|
||||
.replace(/\[.*?\]|\(.*?\)/g, '') // Remove content in brackets/parentheses
|
||||
.replace(/\._/g, '.') // Remove underscore after dots
|
||||
.replace(/\./g, ' ') // Replace dots with spaces
|
||||
.replace(/_/g, ' ') // Replace underscores with spaces
|
||||
.replace(/\s{2,}/g, ' ') // Replace multiple spaces with a single one
|
||||
.trim();
|
||||
|
||||
// Media type specific formatting
|
||||
switch (mediaType) {
|
||||
case 'movie':
|
||||
// Keep (year) format for movies if present
|
||||
const yearMatch = basename.match(/\(*(19|20)\d{2}\)*$/);
|
||||
if (yearMatch) {
|
||||
const year = yearMatch[0].replace(/[()]/g, '');
|
||||
// Remove any year that might have been part of the clean name already
|
||||
cleanName = cleanName.replace(/(19|20)\d{2}/g, '').trim();
|
||||
// Add the year in a consistent format
|
||||
cleanName = `${cleanName} (${year})`;
|
||||
}
|
||||
break;
|
||||
|
||||
case 'tvshow':
|
||||
// Keep season and episode info for TV shows
|
||||
const episodeMatch = basename.match(/[sS](\d{1,2})[eE](\d{1,2})/);
|
||||
if (episodeMatch) {
|
||||
const seasonNum = parseInt(episodeMatch[1], 10);
|
||||
const episodeNum = parseInt(episodeMatch[2], 10);
|
||||
|
||||
// First, remove any existing season/episode info from clean name
|
||||
cleanName = cleanName.replace(/[sS]\d{1,2}[eE]\d{1,2}/g, '').trim();
|
||||
|
||||
// Add back the season/episode in a consistent format
|
||||
cleanName = `${cleanName} S${seasonNum.toString().padStart(2, '0')}E${episodeNum.toString().padStart(2, '0')}`;
|
||||
}
|
||||
break;
|
||||
|
||||
case 'audio':
|
||||
// Try to organize as "Artist - Title" for music
|
||||
const musicMatch = basename.match(/^(.*?)\s*-\s*(.*?)$/);
|
||||
if (musicMatch && musicMatch[1] && musicMatch[2]) {
|
||||
const artist = musicMatch[1].trim();
|
||||
const title = musicMatch[2].trim();
|
||||
cleanName = `${artist} - ${title}`;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
return cleanName + ext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a filename unique by adding a suffix
|
||||
* @param {string} filepath - Original filepath
|
||||
* @returns {string} Unique filepath
|
||||
*/
|
||||
makeFilenameUnique(filepath) {
|
||||
const ext = path.extname(filepath);
|
||||
const basename = path.basename(filepath, ext);
|
||||
const dirname = path.dirname(filepath);
|
||||
|
||||
// Add a timestamp to make it unique
|
||||
const timestamp = new Date().toISOString().replace(/[-:]/g, '').replace('T', '_').substring(0, 15);
|
||||
return path.join(dirname, `${basename}_${timestamp}${ext}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a directory if it doesn't exist
|
||||
* @param {string} dirPath - Directory path
|
||||
*/
|
||||
async createDirectoryIfNotExists(dirPath) {
|
||||
try {
|
||||
await fs.mkdir(dirPath, { recursive: true });
|
||||
} catch (error) {
|
||||
// Ignore error if directory already exists
|
||||
if (error.code !== 'EEXIST') {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file exists
|
||||
* @param {string} filePath - File path
|
||||
* @returns {Promise<boolean>} Whether the file exists
|
||||
*/
|
||||
async fileExists(filePath) {
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = PostProcessor;
|
||||
@@ -1,4 +1,4 @@
|
||||
// rssFeedManager.js
|
||||
// rss-feed-manager.js - Handles RSS feed fetching, parsing, and torrent management
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
const fetch = require('node-fetch');
|
||||
@@ -7,13 +7,22 @@ const crypto = require('crypto');
|
||||
|
||||
class RssFeedManager {
|
||||
constructor(config) {
|
||||
if (!config) {
|
||||
throw new Error('Configuration is required');
|
||||
}
|
||||
|
||||
this.config = config;
|
||||
this.feeds = config.feeds || [];
|
||||
this.items = [];
|
||||
this.updateIntervalId = null;
|
||||
this.updateIntervalMinutes = config.updateIntervalMinutes || 60;
|
||||
this.parser = new xml2js.Parser({ explicitArray: false });
|
||||
|
||||
// Ensure dataPath is properly defined
|
||||
this.dataPath = path.join(__dirname, '..', 'data');
|
||||
|
||||
// Maximum items to keep in memory to prevent memory leaks
|
||||
this.maxItemsInMemory = config.maxItemsInMemory || 5000;
|
||||
}
|
||||
|
||||
async start() {
|
||||
@@ -21,15 +30,28 @@ class RssFeedManager {
|
||||
return;
|
||||
}
|
||||
|
||||
// Run update immediately
|
||||
await this.updateAllFeeds();
|
||||
|
||||
// Then set up interval
|
||||
this.updateIntervalId = setInterval(async () => {
|
||||
await this.updateAllFeeds();
|
||||
}, this.updateIntervalMinutes * 60 * 1000);
|
||||
|
||||
console.log(`RSS feed manager started, interval: ${this.updateIntervalMinutes} minutes`);
|
||||
try {
|
||||
// Load existing feeds and items
|
||||
await this.loadFeeds();
|
||||
await this.loadItems();
|
||||
|
||||
// Run update immediately
|
||||
await this.updateAllFeeds().catch(error => {
|
||||
console.error('Error in initial feed update:', error);
|
||||
});
|
||||
|
||||
// Then set up interval
|
||||
this.updateIntervalId = setInterval(async () => {
|
||||
await this.updateAllFeeds().catch(error => {
|
||||
console.error('Error in scheduled feed update:', error);
|
||||
});
|
||||
}, this.updateIntervalMinutes * 60 * 1000);
|
||||
|
||||
console.log(`RSS feed manager started, interval: ${this.updateIntervalMinutes} minutes`);
|
||||
} catch (error) {
|
||||
console.error('Failed to start RSS feed manager:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
stop() {
|
||||
@@ -47,7 +69,19 @@ class RssFeedManager {
|
||||
|
||||
const results = [];
|
||||
|
||||
// Check if feeds array is valid
|
||||
if (!Array.isArray(this.feeds)) {
|
||||
console.error('Feeds is not an array:', this.feeds);
|
||||
this.feeds = [];
|
||||
return results;
|
||||
}
|
||||
|
||||
for (const feed of this.feeds) {
|
||||
if (!feed || !feed.id || !feed.url) {
|
||||
console.error('Invalid feed object:', feed);
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await this.updateFeed(feed);
|
||||
results.push({
|
||||
@@ -65,30 +99,65 @@ class RssFeedManager {
|
||||
}
|
||||
}
|
||||
|
||||
// Save updated items
|
||||
await this.saveItems();
|
||||
try {
|
||||
// Save updated items and truncate if necessary
|
||||
this.trimItemsIfNeeded();
|
||||
await this.saveItems();
|
||||
await this.saveFeeds();
|
||||
} catch (error) {
|
||||
console.error('Error saving data after feed update:', error);
|
||||
}
|
||||
|
||||
console.log('RSS feed update completed');
|
||||
return results;
|
||||
}
|
||||
|
||||
// Trim items to prevent memory bloat
|
||||
trimItemsIfNeeded() {
|
||||
if (this.items.length > this.maxItemsInMemory) {
|
||||
console.log(`Trimming items from ${this.items.length} to ${this.maxItemsInMemory}`);
|
||||
|
||||
// Sort by date (newest first) and keep only the newest maxItemsInMemory items
|
||||
this.items.sort((a, b) => new Date(b.added) - new Date(a.added));
|
||||
this.items = this.items.slice(0, this.maxItemsInMemory);
|
||||
}
|
||||
}
|
||||
|
||||
async updateFeed(feed) {
|
||||
console.log(`Updating feed: ${feed.name} (${feed.url})`);
|
||||
if (!feed || !feed.url) {
|
||||
throw new Error('Invalid feed configuration');
|
||||
}
|
||||
|
||||
console.log(`Updating feed: ${feed.name || 'Unnamed'} (${feed.url})`);
|
||||
|
||||
try {
|
||||
const response = await fetch(feed.url);
|
||||
const response = await fetch(feed.url, {
|
||||
timeout: 30000, // 30 second timeout
|
||||
headers: {
|
||||
'User-Agent': 'Transmission-RSS-Manager/1.2.0'
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error ${response.status}: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const xml = await response.text();
|
||||
|
||||
if (!xml || xml.trim() === '') {
|
||||
throw new Error('Empty feed content');
|
||||
}
|
||||
|
||||
const result = await this.parseXml(xml);
|
||||
|
||||
if (!result) {
|
||||
throw new Error('Failed to parse XML feed');
|
||||
}
|
||||
|
||||
const rssItems = this.extractItems(result, feed);
|
||||
const newItems = this.processNewItems(rssItems, feed);
|
||||
|
||||
console.log(`Found ${rssItems.length} items, ${newItems.length} new items in feed: ${feed.name}`);
|
||||
console.log(`Found ${rssItems.length} items, ${newItems.length} new items in feed: ${feed.name || 'Unnamed'}`);
|
||||
|
||||
return {
|
||||
totalItems: rssItems.length,
|
||||
@@ -101,6 +170,10 @@ class RssFeedManager {
|
||||
}
|
||||
|
||||
parseXml(xml) {
|
||||
if (!xml || typeof xml !== 'string') {
|
||||
return Promise.reject(new Error('Invalid XML input'));
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this.parser.parseString(xml, (error, result) => {
|
||||
if (error) {
|
||||
@@ -113,17 +186,33 @@ class RssFeedManager {
|
||||
}
|
||||
|
||||
extractItems(parsedXml, feed) {
|
||||
if (!parsedXml || !feed) {
|
||||
console.error('Invalid parsed XML or feed');
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
// Handle standard RSS 2.0
|
||||
if (parsedXml.rss && parsedXml.rss.channel) {
|
||||
const channel = parsedXml.rss.channel;
|
||||
const items = Array.isArray(channel.item) ? channel.item : [channel.item].filter(Boolean);
|
||||
|
||||
if (!channel.item) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const items = Array.isArray(channel.item)
|
||||
? channel.item.filter(Boolean)
|
||||
: (channel.item ? [channel.item] : []);
|
||||
|
||||
return items.map(item => this.normalizeRssItem(item, feed));
|
||||
}
|
||||
|
||||
// Handle Atom
|
||||
if (parsedXml.feed && parsedXml.feed.entry) {
|
||||
const entries = Array.isArray(parsedXml.feed.entry) ? parsedXml.feed.entry : [parsedXml.feed.entry].filter(Boolean);
|
||||
const entries = Array.isArray(parsedXml.feed.entry)
|
||||
? parsedXml.feed.entry.filter(Boolean)
|
||||
: (parsedXml.feed.entry ? [parsedXml.feed.entry] : []);
|
||||
|
||||
return entries.map(entry => this.normalizeAtomItem(entry, feed));
|
||||
}
|
||||
|
||||
@@ -135,88 +224,155 @@ class RssFeedManager {
|
||||
}
|
||||
|
||||
normalizeRssItem(item, feed) {
|
||||
// Create a unique ID for the item
|
||||
const idContent = `${feed.id}:${item.title}:${item.pubDate || ''}:${item.link || ''}`;
|
||||
const id = crypto.createHash('md5').update(idContent).digest('hex');
|
||||
|
||||
// Extract enclosure (torrent link)
|
||||
let torrentLink = item.link || '';
|
||||
let fileSize = 0;
|
||||
|
||||
if (item.enclosure) {
|
||||
torrentLink = item.enclosure.$ ? item.enclosure.$.url : item.enclosure.url || torrentLink;
|
||||
fileSize = item.enclosure.$ ? parseInt(item.enclosure.$.length || 0, 10) : parseInt(item.enclosure.length || 0, 10);
|
||||
if (!item || !feed) {
|
||||
console.error('Invalid RSS item or feed');
|
||||
return null;
|
||||
}
|
||||
|
||||
// Handle custom namespaces (common in torrent feeds)
|
||||
let category = '';
|
||||
let size = fileSize;
|
||||
|
||||
if (item.category) {
|
||||
category = Array.isArray(item.category) ? item.category[0] : item.category;
|
||||
try {
|
||||
// Create a unique ID for the item
|
||||
const title = item.title || 'Untitled';
|
||||
const pubDate = item.pubDate || '';
|
||||
const link = item.link || '';
|
||||
const idContent = `${feed.id}:${title}:${pubDate}:${link}`;
|
||||
const id = crypto.createHash('md5').update(idContent).digest('hex');
|
||||
|
||||
// Extract enclosure (torrent link)
|
||||
let torrentLink = link;
|
||||
let fileSize = 0;
|
||||
|
||||
if (item.enclosure) {
|
||||
if (item.enclosure.$) {
|
||||
torrentLink = item.enclosure.$.url || torrentLink;
|
||||
fileSize = parseInt(item.enclosure.$.length || 0, 10);
|
||||
} else if (typeof item.enclosure === 'object') {
|
||||
torrentLink = item.enclosure.url || torrentLink;
|
||||
fileSize = parseInt(item.enclosure.length || 0, 10);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle custom namespaces (common in torrent feeds)
|
||||
let category = '';
|
||||
let size = fileSize;
|
||||
|
||||
if (item.category) {
|
||||
category = Array.isArray(item.category) ? item.category[0] : item.category;
|
||||
// Handle if category is an object with a value property
|
||||
if (typeof category === 'object' && category._) {
|
||||
category = category._;
|
||||
}
|
||||
}
|
||||
|
||||
// Some feeds use torrent:contentLength
|
||||
if (item['torrent:contentLength']) {
|
||||
const contentLength = parseInt(item['torrent:contentLength'], 10);
|
||||
if (!isNaN(contentLength)) {
|
||||
size = contentLength;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id,
|
||||
feedId: feed.id,
|
||||
title,
|
||||
link,
|
||||
torrentLink,
|
||||
pubDate: pubDate || new Date().toISOString(),
|
||||
category: category || '',
|
||||
description: item.description || '',
|
||||
size: !isNaN(size) ? size : 0,
|
||||
downloaded: false,
|
||||
ignored: false,
|
||||
added: new Date().toISOString()
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error normalizing RSS item:', error);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Some feeds use torrent:contentLength
|
||||
if (item['torrent:contentLength']) {
|
||||
size = parseInt(item['torrent:contentLength'], 10);
|
||||
}
|
||||
|
||||
return {
|
||||
id,
|
||||
feedId: feed.id,
|
||||
title: item.title || 'Untitled',
|
||||
link: item.link || '',
|
||||
torrentLink: torrentLink,
|
||||
pubDate: item.pubDate || new Date().toISOString(),
|
||||
category: category,
|
||||
description: item.description || '',
|
||||
size: size || 0,
|
||||
downloaded: false,
|
||||
ignored: false,
|
||||
added: new Date().toISOString()
|
||||
};
|
||||
}
|
||||
|
||||
normalizeAtomItem(entry, feed) {
|
||||
// Create a unique ID for the item
|
||||
const idContent = `${feed.id}:${entry.title}:${entry.updated || ''}:${entry.id || ''}`;
|
||||
const id = crypto.createHash('md5').update(idContent).digest('hex');
|
||||
|
||||
// Extract link
|
||||
let link = '';
|
||||
let torrentLink = '';
|
||||
|
||||
if (entry.link) {
|
||||
if (Array.isArray(entry.link)) {
|
||||
const links = entry.link;
|
||||
link = links.find(l => l.$.rel === 'alternate')?.$.href || links[0]?.$.href || '';
|
||||
torrentLink = links.find(l => l.$.type && l.$.type.includes('torrent'))?.$.href || link;
|
||||
} else {
|
||||
link = entry.link.$.href || '';
|
||||
torrentLink = link;
|
||||
}
|
||||
if (!entry || !feed) {
|
||||
console.error('Invalid Atom entry or feed');
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
id,
|
||||
feedId: feed.id,
|
||||
title: entry.title || 'Untitled',
|
||||
link: link,
|
||||
torrentLink: torrentLink,
|
||||
pubDate: entry.updated || entry.published || new Date().toISOString(),
|
||||
category: entry.category?.$.term || '',
|
||||
description: entry.summary || entry.content || '',
|
||||
size: 0, // Atom doesn't typically include file size
|
||||
downloaded: false,
|
||||
ignored: false,
|
||||
added: new Date().toISOString()
|
||||
};
|
||||
try {
|
||||
// Create a unique ID for the item
|
||||
const title = entry.title || 'Untitled';
|
||||
const updated = entry.updated || '';
|
||||
const entryId = entry.id || '';
|
||||
const idContent = `${feed.id}:${title}:${updated}:${entryId}`;
|
||||
const id = crypto.createHash('md5').update(idContent).digest('hex');
|
||||
|
||||
// Extract link
|
||||
let link = '';
|
||||
let torrentLink = '';
|
||||
|
||||
if (entry.link) {
|
||||
if (Array.isArray(entry.link)) {
|
||||
const links = entry.link.filter(l => l && l.$);
|
||||
const alternateLink = links.find(l => l.$ && l.$.rel === 'alternate');
|
||||
const torrentTypeLink = links.find(l => l.$ && l.$.type && l.$.type.includes('torrent'));
|
||||
|
||||
link = alternateLink && alternateLink.$ && alternateLink.$.href ?
|
||||
alternateLink.$.href :
|
||||
(links[0] && links[0].$ && links[0].$.href ? links[0].$.href : '');
|
||||
|
||||
torrentLink = torrentTypeLink && torrentTypeLink.$ && torrentTypeLink.$.href ?
|
||||
torrentTypeLink.$.href : link;
|
||||
} else if (entry.link.$ && entry.link.$.href) {
|
||||
link = entry.link.$.href;
|
||||
torrentLink = link;
|
||||
}
|
||||
}
|
||||
|
||||
// Extract category
|
||||
let category = '';
|
||||
if (entry.category && entry.category.$ && entry.category.$.term) {
|
||||
category = entry.category.$.term;
|
||||
}
|
||||
|
||||
// Extract content
|
||||
let description = '';
|
||||
if (entry.summary) {
|
||||
description = entry.summary;
|
||||
} else if (entry.content) {
|
||||
description = entry.content;
|
||||
}
|
||||
|
||||
return {
|
||||
id,
|
||||
feedId: feed.id,
|
||||
title,
|
||||
link,
|
||||
torrentLink,
|
||||
pubDate: entry.updated || entry.published || new Date().toISOString(),
|
||||
category,
|
||||
description,
|
||||
size: 0, // Atom doesn't typically include file size
|
||||
downloaded: false,
|
||||
ignored: false,
|
||||
added: new Date().toISOString()
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error normalizing Atom item:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
processNewItems(rssItems, feed) {
|
||||
if (!Array.isArray(rssItems) || !feed) {
|
||||
console.error('Invalid RSS items array or feed');
|
||||
return [];
|
||||
}
|
||||
|
||||
const newItems = [];
|
||||
|
||||
for (const item of rssItems) {
|
||||
// Filter out null items
|
||||
const validItems = rssItems.filter(item => item !== null);
|
||||
|
||||
for (const item of validItems) {
|
||||
// Check if item already exists in our list
|
||||
const existingItem = this.items.find(i => i.id === item.id);
|
||||
|
||||
@@ -236,28 +392,34 @@ class RssFeedManager {
|
||||
}
|
||||
|
||||
matchesFilters(item, filters) {
|
||||
if (!filters || filters.length === 0) {
|
||||
if (!item) return false;
|
||||
|
||||
if (!filters || !Array.isArray(filters) || filters.length === 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check if the item matches any of the filters
|
||||
return filters.some(filter => {
|
||||
if (!filter) return true;
|
||||
|
||||
// Title check
|
||||
if (filter.title && !item.title.toLowerCase().includes(filter.title.toLowerCase())) {
|
||||
if (filter.title && typeof item.title === 'string' &&
|
||||
!item.title.toLowerCase().includes(filter.title.toLowerCase())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Category check
|
||||
if (filter.category && !item.category.toLowerCase().includes(filter.category.toLowerCase())) {
|
||||
if (filter.category && typeof item.category === 'string' &&
|
||||
!item.category.toLowerCase().includes(filter.category.toLowerCase())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Size check
|
||||
if (filter.minSize && item.size < filter.minSize) {
|
||||
// Size checks
|
||||
if (filter.minSize && typeof item.size === 'number' && item.size < filter.minSize) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (filter.maxSize && item.size > filter.maxSize) {
|
||||
if (filter.maxSize && typeof item.size === 'number' && item.size > filter.maxSize) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -267,6 +429,8 @@ class RssFeedManager {
|
||||
}
|
||||
|
||||
queueItemForDownload(item) {
|
||||
if (!item) return;
|
||||
|
||||
// Mark the item as queued for download
|
||||
console.log(`Auto-downloading item: ${item.title}`);
|
||||
|
||||
@@ -278,8 +442,8 @@ class RssFeedManager {
|
||||
|
||||
async saveItems() {
|
||||
try {
|
||||
// Create data directory if it doesn't exist
|
||||
await fs.mkdir(this.dataPath, { recursive: true });
|
||||
// Ensure data directory exists
|
||||
await this.ensureDataDirectory();
|
||||
|
||||
// Save items to file
|
||||
await fs.writeFile(
|
||||
@@ -296,10 +460,10 @@ class RssFeedManager {
|
||||
}
|
||||
}
|
||||
|
||||
async saveConfig() {
|
||||
async saveFeeds() {
|
||||
try {
|
||||
// Create data directory if it doesn't exist
|
||||
await fs.mkdir(this.dataPath, { recursive: true });
|
||||
// Ensure data directory exists
|
||||
await this.ensureDataDirectory();
|
||||
|
||||
// Save feeds to file
|
||||
await fs.writeFile(
|
||||
@@ -316,6 +480,15 @@ class RssFeedManager {
|
||||
}
|
||||
}
|
||||
|
||||
async ensureDataDirectory() {
|
||||
try {
|
||||
await fs.mkdir(this.dataPath, { recursive: true });
|
||||
} catch (error) {
|
||||
console.error('Error creating data directory:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async loadItems() {
|
||||
try {
|
||||
const filePath = path.join(this.dataPath, 'rss-items.json');
|
||||
@@ -325,17 +498,80 @@ class RssFeedManager {
|
||||
await fs.access(filePath);
|
||||
} catch (error) {
|
||||
console.log('No saved RSS items found');
|
||||
this.items = [];
|
||||
return false;
|
||||
}
|
||||
|
||||
// Load items from file
|
||||
const data = await fs.readFile(filePath, 'utf8');
|
||||
this.items = JSON.parse(data);
|
||||
|
||||
console.log(`Loaded ${this.items.length} RSS items from disk`);
|
||||
return true;
|
||||
if (!data || data.trim() === '') {
|
||||
console.log('Empty RSS items file');
|
||||
this.items = [];
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const items = JSON.parse(data);
|
||||
|
||||
if (Array.isArray(items)) {
|
||||
this.items = items;
|
||||
console.log(`Loaded ${this.items.length} RSS items from disk`);
|
||||
return true;
|
||||
} else {
|
||||
console.error('RSS items file does not contain an array');
|
||||
this.items = [];
|
||||
return false;
|
||||
}
|
||||
} catch (parseError) {
|
||||
console.error('Error parsing RSS items JSON:', parseError);
|
||||
this.items = [];
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error loading RSS items:', error);
|
||||
this.items = [];
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async loadFeeds() {
|
||||
try {
|
||||
const filePath = path.join(this.dataPath, 'rss-feeds.json');
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
} catch (error) {
|
||||
console.log('No saved RSS feeds found, using config feeds');
|
||||
return false;
|
||||
}
|
||||
|
||||
// Load feeds from file
|
||||
const data = await fs.readFile(filePath, 'utf8');
|
||||
|
||||
if (!data || data.trim() === '') {
|
||||
console.log('Empty RSS feeds file, using config feeds');
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const feeds = JSON.parse(data);
|
||||
|
||||
if (Array.isArray(feeds)) {
|
||||
this.feeds = feeds;
|
||||
console.log(`Loaded ${this.feeds.length} RSS feeds from disk`);
|
||||
return true;
|
||||
} else {
|
||||
console.error('RSS feeds file does not contain an array');
|
||||
return false;
|
||||
}
|
||||
} catch (parseError) {
|
||||
console.error('Error parsing RSS feeds JSON:', parseError);
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error loading RSS feeds:', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -343,33 +579,56 @@ class RssFeedManager {
|
||||
// Public API methods
|
||||
|
||||
getAllFeeds() {
|
||||
return this.feeds;
|
||||
return Array.isArray(this.feeds) ? this.feeds : [];
|
||||
}
|
||||
|
||||
addFeed(feedData) {
|
||||
if (!feedData || !feedData.url) {
|
||||
throw new Error('Feed URL is required');
|
||||
}
|
||||
|
||||
// Generate an ID for the feed
|
||||
const id = crypto.randomBytes(8).toString('hex');
|
||||
|
||||
const newFeed = {
|
||||
id,
|
||||
name: feedData.name,
|
||||
name: feedData.name || 'Unnamed Feed',
|
||||
url: feedData.url,
|
||||
autoDownload: feedData.autoDownload || false,
|
||||
filters: feedData.filters || [],
|
||||
autoDownload: !!feedData.autoDownload,
|
||||
filters: Array.isArray(feedData.filters) ? feedData.filters : [],
|
||||
added: new Date().toISOString()
|
||||
};
|
||||
|
||||
if (!Array.isArray(this.feeds)) {
|
||||
this.feeds = [];
|
||||
}
|
||||
|
||||
this.feeds.push(newFeed);
|
||||
|
||||
// Save the updated feeds
|
||||
this.saveFeeds().catch(err => {
|
||||
console.error('Error saving feeds after adding new feed:', err);
|
||||
});
|
||||
|
||||
console.log(`Added new feed: ${newFeed.name} (${newFeed.url})`);
|
||||
|
||||
return newFeed;
|
||||
}
|
||||
|
||||
updateFeedConfig(feedId, updates) {
|
||||
const feedIndex = this.feeds.findIndex(f => f.id === feedId);
|
||||
if (!feedId || !updates) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!Array.isArray(this.feeds)) {
|
||||
console.error('Feeds is not an array');
|
||||
return false;
|
||||
}
|
||||
|
||||
const feedIndex = this.feeds.findIndex(f => f && f.id === feedId);
|
||||
|
||||
if (feedIndex === -1) {
|
||||
console.error(`Feed with ID ${feedId} not found`);
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -381,28 +640,52 @@ class RssFeedManager {
|
||||
added: this.feeds[feedIndex].added
|
||||
};
|
||||
|
||||
// Save the updated feeds
|
||||
this.saveFeeds().catch(err => {
|
||||
console.error('Error saving feeds after updating feed:', err);
|
||||
});
|
||||
|
||||
console.log(`Updated feed: ${this.feeds[feedIndex].name}`);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
removeFeed(feedId) {
|
||||
const initialLength = this.feeds.length;
|
||||
this.feeds = this.feeds.filter(f => f.id !== feedId);
|
||||
if (!feedId || !Array.isArray(this.feeds)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return this.feeds.length !== initialLength;
|
||||
const initialLength = this.feeds.length;
|
||||
this.feeds = this.feeds.filter(f => f && f.id !== feedId);
|
||||
|
||||
if (this.feeds.length !== initialLength) {
|
||||
// Save the updated feeds
|
||||
this.saveFeeds().catch(err => {
|
||||
console.error('Error saving feeds after removing feed:', err);
|
||||
});
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
getAllItems() {
|
||||
return this.items;
|
||||
return Array.isArray(this.items) ? this.items : [];
|
||||
}
|
||||
|
||||
getUndownloadedItems() {
|
||||
return this.items.filter(item => !item.downloaded && !item.ignored);
|
||||
if (!Array.isArray(this.items)) {
|
||||
return [];
|
||||
}
|
||||
return this.items.filter(item => item && !item.downloaded && !item.ignored);
|
||||
}
|
||||
|
||||
filterItems(filters) {
|
||||
return this.items.filter(item => this.matchesFilters(item, [filters]));
|
||||
if (!filters || !Array.isArray(this.items)) {
|
||||
return [];
|
||||
}
|
||||
return this.items.filter(item => item && this.matchesFilters(item, [filters]));
|
||||
}
|
||||
|
||||
async downloadItem(item, transmissionClient) {
|
||||
@@ -421,7 +704,7 @@ class RssFeedManager {
|
||||
}
|
||||
|
||||
return new Promise((resolve) => {
|
||||
transmissionClient.addUrl(item.torrentLink, (err, result) => {
|
||||
transmissionClient.addUrl(item.torrentLink, async (err, result) => {
|
||||
if (err) {
|
||||
console.error(`Error adding torrent for ${item.title}:`, err);
|
||||
resolve({
|
||||
@@ -437,9 +720,11 @@ class RssFeedManager {
|
||||
item.downloadDate = new Date().toISOString();
|
||||
|
||||
// Save the updated items
|
||||
this.saveItems().catch(err => {
|
||||
try {
|
||||
await this.saveItems();
|
||||
} catch (err) {
|
||||
console.error('Error saving items after download:', err);
|
||||
});
|
||||
}
|
||||
|
||||
console.log(`Successfully added torrent for item: ${item.title}`);
|
||||
|
||||
|
||||
@@ -3,13 +3,47 @@
|
||||
|
||||
# Setup systemd service
|
||||
function setup_service() {
|
||||
echo -e "${YELLOW}Setting up systemd service...${NC}"
|
||||
log "INFO" "Setting up systemd service..."
|
||||
|
||||
# Ensure required variables are set
|
||||
if [ -z "$SERVICE_NAME" ]; then
|
||||
log "ERROR" "SERVICE_NAME variable is not set"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$USER" ]; then
|
||||
log "ERROR" "USER variable is not set"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$INSTALL_DIR" ]; then
|
||||
log "ERROR" "INSTALL_DIR variable is not set"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$PORT" ]; then
|
||||
log "ERROR" "PORT variable is not set"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if systemd is available
|
||||
if ! command -v systemctl &> /dev/null; then
|
||||
log "ERROR" "systemd is not available on this system"
|
||||
log "INFO" "Please set up the service manually using your system's service manager"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Create backup of existing service file if it exists
|
||||
if [ -f "/etc/systemd/system/$SERVICE_NAME.service" ]; then
|
||||
backup_file "/etc/systemd/system/$SERVICE_NAME.service"
|
||||
fi
|
||||
|
||||
# Create systemd service file
|
||||
cat > /etc/systemd/system/$SERVICE_NAME.service << EOF
|
||||
SERVICE_FILE="/etc/systemd/system/$SERVICE_NAME.service"
|
||||
cat > "$SERVICE_FILE" << EOF
|
||||
[Unit]
|
||||
Description=Transmission RSS Manager
|
||||
After=network.target
|
||||
After=network.target transmission-daemon.service
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
@@ -23,22 +57,77 @@ StandardOutput=journal
|
||||
StandardError=journal
|
||||
Environment=PORT=$PORT
|
||||
Environment=NODE_ENV=production
|
||||
Environment=DEBUG_ENABLED=false
|
||||
Environment=LOG_FILE=$INSTALL_DIR/logs/transmission-rss-manager.log
|
||||
# Generate a random JWT secret for security
|
||||
Environment=JWT_SECRET=$(openssl rand -hex 32)
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
# Create nginx configuration for proxy
|
||||
echo -e "${YELLOW}Setting up Nginx reverse proxy...${NC}"
|
||||
|
||||
# Check if default nginx file exists, back it up if it does
|
||||
if [ -f /etc/nginx/sites-enabled/default ]; then
|
||||
mv /etc/nginx/sites-enabled/default /etc/nginx/sites-enabled/default.bak
|
||||
echo "Backed up default nginx configuration."
|
||||
# Create logs directory
|
||||
mkdir -p "$INSTALL_DIR/logs"
|
||||
chown -R $USER:$USER "$INSTALL_DIR/logs"
|
||||
|
||||
# Check if file was created successfully
|
||||
if [ ! -f "$SERVICE_FILE" ]; then
|
||||
log "ERROR" "Failed to create systemd service file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Create nginx configuration
|
||||
cat > /etc/nginx/sites-available/$SERVICE_NAME << EOF
|
||||
log "INFO" "Setting up Nginx reverse proxy..."
|
||||
|
||||
# Check if nginx is installed
|
||||
if ! command -v nginx &> /dev/null; then
|
||||
log "ERROR" "Nginx is not installed"
|
||||
log "INFO" "Skipping Nginx configuration. Please configure your web server manually."
|
||||
|
||||
# Reload systemd and enable service
|
||||
systemctl daemon-reload
|
||||
systemctl enable "$SERVICE_NAME"
|
||||
|
||||
log "INFO" "Systemd service has been created and enabled."
|
||||
log "INFO" "The service will start automatically after installation."
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Detect nginx configuration directory
|
||||
NGINX_AVAILABLE_DIR=""
|
||||
NGINX_ENABLED_DIR=""
|
||||
|
||||
if [ -d "/etc/nginx/sites-available" ] && [ -d "/etc/nginx/sites-enabled" ]; then
|
||||
# Debian/Ubuntu style
|
||||
NGINX_AVAILABLE_DIR="/etc/nginx/sites-available"
|
||||
NGINX_ENABLED_DIR="/etc/nginx/sites-enabled"
|
||||
elif [ -d "/etc/nginx/conf.d" ]; then
|
||||
# CentOS/RHEL style
|
||||
NGINX_AVAILABLE_DIR="/etc/nginx/conf.d"
|
||||
NGINX_ENABLED_DIR="/etc/nginx/conf.d"
|
||||
else
|
||||
log "WARN" "Unable to determine Nginx configuration directory"
|
||||
log "INFO" "Please configure Nginx manually"
|
||||
|
||||
# Reload systemd and enable service
|
||||
systemctl daemon-reload
|
||||
systemctl enable "$SERVICE_NAME"
|
||||
|
||||
log "INFO" "Systemd service has been created and enabled."
|
||||
log "INFO" "The service will start automatically after installation."
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Check if default nginx file exists, back it up if it does
|
||||
if [ -f "$NGINX_ENABLED_DIR/default" ]; then
|
||||
backup_file "$NGINX_ENABLED_DIR/default"
|
||||
if [ -f "$NGINX_ENABLED_DIR/default.bak" ]; then
|
||||
log "INFO" "Backed up default nginx configuration."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create nginx configuration file
|
||||
NGINX_CONFIG_FILE="$NGINX_AVAILABLE_DIR/$SERVICE_NAME.conf"
|
||||
cat > "$NGINX_CONFIG_FILE" << EOF
|
||||
server {
|
||||
listen 80;
|
||||
server_name _;
|
||||
@@ -57,27 +146,36 @@ server {
|
||||
}
|
||||
EOF
|
||||
|
||||
# Create symbolic link to enable the site
|
||||
ln -sf /etc/nginx/sites-available/$SERVICE_NAME /etc/nginx/sites-enabled/
|
||||
# Check if Debian/Ubuntu style (need symlink between available and enabled)
|
||||
if [ "$NGINX_AVAILABLE_DIR" != "$NGINX_ENABLED_DIR" ]; then
|
||||
# Create symbolic link to enable the site (if it doesn't already exist)
|
||||
if [ ! -h "$NGINX_ENABLED_DIR/$SERVICE_NAME.conf" ]; then
|
||||
ln -sf "$NGINX_CONFIG_FILE" "$NGINX_ENABLED_DIR/"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Test nginx configuration
|
||||
nginx -t
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
if nginx -t; then
|
||||
# Reload nginx
|
||||
systemctl reload nginx
|
||||
echo -e "${GREEN}Nginx configuration has been set up successfully.${NC}"
|
||||
log "INFO" "Nginx configuration has been set up successfully."
|
||||
else
|
||||
echo -e "${RED}Nginx configuration test failed. Please check the configuration manually.${NC}"
|
||||
echo -e "${YELLOW}You may need to correct the configuration before the web interface will be accessible.${NC}"
|
||||
log "ERROR" "Nginx configuration test failed. Please check the configuration manually."
|
||||
log "WARN" "You may need to correct the configuration before the web interface will be accessible."
|
||||
fi
|
||||
|
||||
# Check for port conflicts
|
||||
if ss -lnt | grep ":$PORT " &> /dev/null; then
|
||||
log "WARN" "Port $PORT is already in use. This may cause conflicts with the service."
|
||||
log "WARN" "Consider changing the port if you encounter issues."
|
||||
fi
|
||||
|
||||
# Reload systemd
|
||||
systemctl daemon-reload
|
||||
|
||||
# Enable the service to start on boot
|
||||
systemctl enable $SERVICE_NAME
|
||||
systemctl enable "$SERVICE_NAME"
|
||||
|
||||
echo -e "${GREEN}Systemd service has been created and enabled.${NC}"
|
||||
echo -e "${YELLOW}The service will start automatically after installation.${NC}"
|
||||
log "INFO" "Systemd service has been created and enabled."
|
||||
log "INFO" "The service will start automatically after installation."
|
||||
}
|
||||
517
modules/transmission-client.js
Normal file
517
modules/transmission-client.js
Normal file
@@ -0,0 +1,517 @@
|
||||
/**
|
||||
* Transmission Client Module
|
||||
* Enhanced integration with Transmission BitTorrent client
|
||||
*/
|
||||
|
||||
const Transmission = require('transmission-promise');
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
const util = require('util');
|
||||
const exec = util.promisify(require('child_process').exec);
|
||||
|
||||
class TransmissionClient {
|
||||
constructor(config) {
|
||||
if (!config) {
|
||||
throw new Error('Configuration is required for Transmission client');
|
||||
}
|
||||
|
||||
this.config = config;
|
||||
this.client = null;
|
||||
this.dirMappings = null;
|
||||
this.lastSessionId = null;
|
||||
this.connectRetries = 0;
|
||||
this.maxRetries = 5;
|
||||
this.retryDelay = 5000; // 5 seconds
|
||||
|
||||
// Initialize directory mappings if remote
|
||||
if (config.remoteConfig && config.remoteConfig.isRemote && config.remoteConfig.directoryMapping) {
|
||||
this.dirMappings = config.remoteConfig.directoryMapping;
|
||||
}
|
||||
|
||||
// Initialize the connection
|
||||
this.initializeConnection();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the connection to Transmission
|
||||
*/
|
||||
initializeConnection() {
|
||||
const { host, port, username, password, path: rpcPath } = this.config.transmissionConfig;
|
||||
|
||||
try {
|
||||
this.client = new Transmission({
|
||||
host: host || 'localhost',
|
||||
port: port || 9091,
|
||||
username: username || '',
|
||||
password: password || '',
|
||||
path: rpcPath || '/transmission/rpc',
|
||||
timeout: 30000 // 30 seconds
|
||||
});
|
||||
|
||||
console.log(`Initialized Transmission client connection to ${host}:${port}${rpcPath}`);
|
||||
} catch (error) {
|
||||
console.error('Failed to initialize Transmission client:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get client status and session information
|
||||
* @returns {Promise<Object>} Status information
|
||||
*/
|
||||
async getStatus() {
|
||||
try {
|
||||
const sessionInfo = await this.client.sessionStats();
|
||||
const version = await this.client.sessionGet();
|
||||
|
||||
return {
|
||||
connected: true,
|
||||
version: version.version,
|
||||
rpcVersion: version['rpc-version'],
|
||||
downloadSpeed: sessionInfo.downloadSpeed,
|
||||
uploadSpeed: sessionInfo.uploadSpeed,
|
||||
torrentCount: sessionInfo.torrentCount,
|
||||
activeTorrentCount: sessionInfo.activeTorrentCount
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error getting Transmission status:', error);
|
||||
|
||||
if (error.message.includes('Connection refused') && this.connectRetries < this.maxRetries) {
|
||||
this.connectRetries++;
|
||||
console.log(`Retrying connection (${this.connectRetries}/${this.maxRetries})...`);
|
||||
|
||||
return new Promise((resolve) => {
|
||||
setTimeout(async () => {
|
||||
this.initializeConnection();
|
||||
try {
|
||||
const status = await this.getStatus();
|
||||
this.connectRetries = 0; // Reset retries on success
|
||||
resolve(status);
|
||||
} catch (retryError) {
|
||||
resolve({
|
||||
connected: false,
|
||||
error: retryError.message
|
||||
});
|
||||
}
|
||||
}, this.retryDelay);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
connected: false,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a torrent from a URL or magnet link
|
||||
* @param {string} url - Torrent URL or magnet link
|
||||
* @param {Object} options - Additional options
|
||||
* @returns {Promise<Object>} Result with torrent ID
|
||||
*/
|
||||
async addTorrent(url, options = {}) {
|
||||
try {
|
||||
const downloadDir = options.downloadDir || null;
|
||||
const result = await this.client.addUrl(url, {
|
||||
"download-dir": downloadDir,
|
||||
paused: options.paused || false
|
||||
});
|
||||
|
||||
console.log(`Added torrent from ${url}, ID: ${result.id}`);
|
||||
return {
|
||||
success: true,
|
||||
id: result.id,
|
||||
name: result.name,
|
||||
hashString: result.hashString
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error adding torrent from ${url}:`, error);
|
||||
return {
|
||||
success: false,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all torrents with detailed information
|
||||
* @param {Array} ids - Optional array of torrent IDs to filter
|
||||
* @returns {Promise<Array>} Array of torrent objects
|
||||
*/
|
||||
async getTorrents(ids = null) {
|
||||
try {
|
||||
const torrents = await this.client.get(ids);
|
||||
|
||||
// Map remote paths to local paths if needed
|
||||
if (this.dirMappings && Object.keys(this.dirMappings).length > 0) {
|
||||
torrents.torrents = torrents.torrents.map(torrent => {
|
||||
torrent.downloadDir = this.mapRemotePathToLocal(torrent.downloadDir);
|
||||
return torrent;
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
torrents: torrents.torrents
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error getting torrents:', error);
|
||||
return {
|
||||
success: false,
|
||||
error: error.message,
|
||||
torrents: []
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop torrents by IDs
|
||||
* @param {Array|number} ids - Torrent ID(s) to stop
|
||||
* @returns {Promise<Object>} Result
|
||||
*/
|
||||
async stopTorrents(ids) {
|
||||
try {
|
||||
await this.client.stop(ids);
|
||||
return {
|
||||
success: true,
|
||||
message: 'Torrents stopped successfully'
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error stopping torrents ${ids}:`, error);
|
||||
return {
|
||||
success: false,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start torrents by IDs
|
||||
* @param {Array|number} ids - Torrent ID(s) to start
|
||||
* @returns {Promise<Object>} Result
|
||||
*/
|
||||
async startTorrents(ids) {
|
||||
try {
|
||||
await this.client.start(ids);
|
||||
return {
|
||||
success: true,
|
||||
message: 'Torrents started successfully'
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error starting torrents ${ids}:`, error);
|
||||
return {
|
||||
success: false,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove torrents by IDs
|
||||
* @param {Array|number} ids - Torrent ID(s) to remove
|
||||
* @param {boolean} deleteLocalData - Whether to delete local data
|
||||
* @returns {Promise<Object>} Result
|
||||
*/
|
||||
async removeTorrents(ids, deleteLocalData = false) {
|
||||
try {
|
||||
await this.client.remove(ids, deleteLocalData);
|
||||
return {
|
||||
success: true,
|
||||
message: `Torrents removed successfully${deleteLocalData ? ' with data' : ''}`
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error removing torrents ${ids}:`, error);
|
||||
return {
|
||||
success: false,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get detailed information for a specific torrent
|
||||
* @param {number} id - Torrent ID
|
||||
* @returns {Promise<Object>} Torrent details
|
||||
*/
|
||||
async getTorrentDetails(id) {
|
||||
try {
|
||||
const fields = [
|
||||
'id', 'name', 'status', 'hashString', 'downloadDir', 'totalSize',
|
||||
'percentDone', 'addedDate', 'doneDate', 'uploadRatio', 'rateDownload',
|
||||
'rateUpload', 'downloadedEver', 'uploadedEver', 'seedRatioLimit',
|
||||
'error', 'errorString', 'files', 'fileStats', 'peers', 'peersFrom',
|
||||
'pieces', 'trackers', 'trackerStats', 'labels'
|
||||
];
|
||||
|
||||
const result = await this.client.get(id, fields);
|
||||
|
||||
if (!result.torrents || result.torrents.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Torrent not found'
|
||||
};
|
||||
}
|
||||
|
||||
let torrent = result.torrents[0];
|
||||
|
||||
// Map download directory if needed
|
||||
if (this.dirMappings) {
|
||||
torrent.downloadDir = this.mapRemotePathToLocal(torrent.downloadDir);
|
||||
}
|
||||
|
||||
// Process files for extra information if available
|
||||
if (torrent.files && torrent.files.length > 0) {
|
||||
torrent.mediaInfo = await this.analyzeMediaFiles(torrent.files, torrent.downloadDir);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
torrent
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error getting torrent details for ID ${id}:`, error);
|
||||
return {
|
||||
success: false,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Map a remote path to a local path
|
||||
* @param {string} remotePath - Path on the remote server
|
||||
* @returns {string} Local path
|
||||
*/
|
||||
mapRemotePathToLocal(remotePath) {
|
||||
if (!this.dirMappings || !remotePath) {
|
||||
return remotePath;
|
||||
}
|
||||
|
||||
for (const [remote, local] of Object.entries(this.dirMappings)) {
|
||||
if (remotePath.startsWith(remote)) {
|
||||
return remotePath.replace(remote, local);
|
||||
}
|
||||
}
|
||||
|
||||
return remotePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze media files in a torrent
|
||||
* @param {Array} files - Torrent files
|
||||
* @param {string} baseDir - Base directory of the torrent
|
||||
* @returns {Promise<Object>} Media info
|
||||
*/
|
||||
async analyzeMediaFiles(files, baseDir) {
|
||||
try {
|
||||
const mediaInfo = {
|
||||
type: 'unknown',
|
||||
videoFiles: [],
|
||||
audioFiles: [],
|
||||
imageFiles: [],
|
||||
documentFiles: [],
|
||||
archiveFiles: [],
|
||||
otherFiles: [],
|
||||
totalVideoSize: 0,
|
||||
totalAudioSize: 0,
|
||||
totalImageSize: 0,
|
||||
totalDocumentSize: 0,
|
||||
totalArchiveSize: 0,
|
||||
totalOtherSize: 0
|
||||
};
|
||||
|
||||
// File type patterns
|
||||
const videoPattern = /\.(mp4|mkv|avi|mov|wmv|flv|webm|m4v|mpg|mpeg|3gp|ts)$/i;
|
||||
const audioPattern = /\.(mp3|flac|wav|aac|ogg|m4a|wma|opus)$/i;
|
||||
const imagePattern = /\.(jpg|jpeg|png|gif|bmp|tiff|webp|svg)$/i;
|
||||
const documentPattern = /\.(pdf|doc|docx|xls|xlsx|ppt|pptx|txt|rtf|odt|ods|odp|epub|mobi|azw3)$/i;
|
||||
const archivePattern = /\.(zip|rar|7z|tar|gz|bz2|xz|iso)$/i;
|
||||
const subtitlePattern = /\.(srt|sub|sbv|vtt|ass|ssa)$/i;
|
||||
const samplePattern = /sample|trailer/i;
|
||||
|
||||
// Count files by category
|
||||
for (const file of files) {
|
||||
const fileName = path.basename(file.name).toLowerCase();
|
||||
const fileSize = file.length;
|
||||
|
||||
const fileInfo = {
|
||||
name: file.name,
|
||||
size: fileSize,
|
||||
extension: path.extname(file.name).substr(1).toLowerCase(),
|
||||
isSample: samplePattern.test(fileName)
|
||||
};
|
||||
|
||||
if (videoPattern.test(fileName)) {
|
||||
mediaInfo.videoFiles.push(fileInfo);
|
||||
mediaInfo.totalVideoSize += fileSize;
|
||||
} else if (audioPattern.test(fileName)) {
|
||||
mediaInfo.audioFiles.push(fileInfo);
|
||||
mediaInfo.totalAudioSize += fileSize;
|
||||
} else if (imagePattern.test(fileName)) {
|
||||
mediaInfo.imageFiles.push(fileInfo);
|
||||
mediaInfo.totalImageSize += fileSize;
|
||||
} else if (documentPattern.test(fileName)) {
|
||||
mediaInfo.documentFiles.push(fileInfo);
|
||||
mediaInfo.totalDocumentSize += fileSize;
|
||||
} else if (archivePattern.test(fileName)) {
|
||||
mediaInfo.archiveFiles.push(fileInfo);
|
||||
mediaInfo.totalArchiveSize += fileSize;
|
||||
} else if (!subtitlePattern.test(fileName)) {
|
||||
mediaInfo.otherFiles.push(fileInfo);
|
||||
mediaInfo.totalOtherSize += fileSize;
|
||||
}
|
||||
}
|
||||
|
||||
// Determine content type based on file distribution
|
||||
if (mediaInfo.videoFiles.length > 0 &&
|
||||
mediaInfo.totalVideoSize > (mediaInfo.totalAudioSize + mediaInfo.totalDocumentSize)) {
|
||||
mediaInfo.type = 'video';
|
||||
|
||||
// Determine if it's a movie or TV show
|
||||
const tvEpisodePattern = /(s\d{1,2}e\d{1,2}|\d{1,2}x\d{1,2})/i;
|
||||
const movieYearPattern = /\(?(19|20)\d{2}\)?/;
|
||||
|
||||
let tvShowMatch = false;
|
||||
|
||||
for (const file of mediaInfo.videoFiles) {
|
||||
if (tvEpisodePattern.test(file.name)) {
|
||||
tvShowMatch = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (tvShowMatch) {
|
||||
mediaInfo.type = 'tvshow';
|
||||
} else if (movieYearPattern.test(files[0].name)) {
|
||||
mediaInfo.type = 'movie';
|
||||
}
|
||||
} else if (mediaInfo.audioFiles.length > 0 &&
|
||||
mediaInfo.totalAudioSize > (mediaInfo.totalVideoSize + mediaInfo.totalDocumentSize)) {
|
||||
mediaInfo.type = 'audio';
|
||||
} else if (mediaInfo.documentFiles.length > 0 &&
|
||||
mediaInfo.totalDocumentSize > (mediaInfo.totalVideoSize + mediaInfo.totalAudioSize)) {
|
||||
// Check if it's a book or magazine
|
||||
const magazinePattern = /(magazine|issue|volume|vol\.)\s*\d+/i;
|
||||
|
||||
let isMagazine = false;
|
||||
for (const file of mediaInfo.documentFiles) {
|
||||
if (magazinePattern.test(file.name)) {
|
||||
isMagazine = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
mediaInfo.type = isMagazine ? 'magazine' : 'book';
|
||||
} else if (mediaInfo.archiveFiles.length > 0 &&
|
||||
mediaInfo.totalArchiveSize > (mediaInfo.totalVideoSize + mediaInfo.totalAudioSize + mediaInfo.totalDocumentSize)) {
|
||||
// If archives dominate, we need to check their content
|
||||
mediaInfo.type = 'archive';
|
||||
}
|
||||
|
||||
return mediaInfo;
|
||||
} catch (error) {
|
||||
console.error('Error analyzing media files:', error);
|
||||
return { type: 'unknown', error: error.message };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get session stats from Transmission
|
||||
* @returns {Promise<Object>} Stats
|
||||
*/
|
||||
async getSessionStats() {
|
||||
try {
|
||||
const stats = await this.client.sessionStats();
|
||||
return {
|
||||
success: true,
|
||||
stats
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error getting session stats:', error);
|
||||
return {
|
||||
success: false,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set session parameters
|
||||
* @param {Object} params - Session parameters
|
||||
* @returns {Promise<Object>} Result
|
||||
*/
|
||||
async setSessionParams(params) {
|
||||
try {
|
||||
await this.client.sessionSet(params);
|
||||
return {
|
||||
success: true,
|
||||
message: 'Session parameters updated successfully'
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error setting session parameters:', error);
|
||||
return {
|
||||
success: false,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify if a torrent has met seeding requirements
|
||||
* @param {number} id - Torrent ID
|
||||
* @param {Object} requirements - Seeding requirements
|
||||
* @returns {Promise<Object>} Whether requirements are met
|
||||
*/
|
||||
async verifyTorrentSeedingRequirements(id, requirements) {
|
||||
try {
|
||||
const { minRatio = 1.0, minTimeMinutes = 60 } = requirements;
|
||||
|
||||
const details = await this.getTorrentDetails(id);
|
||||
|
||||
if (!details.success) {
|
||||
return {
|
||||
success: false,
|
||||
error: details.error
|
||||
};
|
||||
}
|
||||
|
||||
const torrent = details.torrent;
|
||||
|
||||
// Check if download is complete
|
||||
if (torrent.percentDone < 1.0) {
|
||||
return {
|
||||
success: true,
|
||||
requirementsMet: false,
|
||||
reason: 'Download not complete',
|
||||
torrent
|
||||
};
|
||||
}
|
||||
|
||||
// Check ratio requirement
|
||||
const ratioMet = torrent.uploadRatio >= minRatio;
|
||||
|
||||
// Check time requirement (doneDate is unix timestamp in seconds)
|
||||
const seedingTimeMinutes = (Date.now() / 1000 - torrent.doneDate) / 60;
|
||||
const timeMet = seedingTimeMinutes >= minTimeMinutes;
|
||||
|
||||
return {
|
||||
success: true,
|
||||
requirementsMet: ratioMet && timeMet,
|
||||
ratioMet,
|
||||
timeMet,
|
||||
currentRatio: torrent.uploadRatio,
|
||||
currentSeedingTimeMinutes: seedingTimeMinutes,
|
||||
torrent
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error checking torrent seeding requirements for ID ${id}:`, error);
|
||||
return {
|
||||
success: false,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TransmissionClient;
|
||||
@@ -17,10 +17,20 @@ function log() {
|
||||
"ERROR")
|
||||
echo -e "${timestamp} ${RED}[ERROR]${NC} $message"
|
||||
;;
|
||||
"DEBUG")
|
||||
if [ "${DEBUG_ENABLED}" = "true" ]; then
|
||||
echo -e "${timestamp} ${BOLD}[DEBUG]${NC} $message"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo -e "${timestamp} [LOG] $message"
|
||||
;;
|
||||
esac
|
||||
|
||||
# If log file is specified, also write to log file
|
||||
if [ -n "${LOG_FILE}" ]; then
|
||||
echo "${timestamp} [${level}] ${message}" >> "${LOG_FILE}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to check if a command exists
|
||||
@@ -35,6 +45,38 @@ function backup_file() {
|
||||
local backup="${file}.bak.$(date +%Y%m%d%H%M%S)"
|
||||
cp "$file" "$backup"
|
||||
log "INFO" "Created backup of $file at $backup"
|
||||
echo "$backup"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to manage config file updates
|
||||
function update_config_file() {
|
||||
local config_file=$1
|
||||
local is_update=$2
|
||||
|
||||
if [ "$is_update" = true ] && [ -f "$config_file" ]; then
|
||||
# Backup the existing config file
|
||||
local backup_file=$(backup_file "$config_file")
|
||||
log "INFO" "Existing configuration backed up to $backup_file"
|
||||
|
||||
# We'll let the server.js handle merging the config
|
||||
log "INFO" "Existing configuration will be preserved"
|
||||
|
||||
# Update the config version if needed
|
||||
local current_version=$(grep -o '"version": "[^"]*"' "$config_file" | cut -d'"' -f4)
|
||||
if [ -n "$current_version" ]; then
|
||||
local new_version="1.2.0"
|
||||
if [ "$current_version" != "$new_version" ]; then
|
||||
log "INFO" "Updating config version from $current_version to $new_version"
|
||||
sed -i "s/\"version\": \"$current_version\"/\"version\": \"$new_version\"/" "$config_file"
|
||||
fi
|
||||
fi
|
||||
|
||||
return 0
|
||||
else
|
||||
# New installation, config file will be created by finalize_setup
|
||||
log "INFO" "New configuration will be created"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -58,6 +100,10 @@ function create_dir_if_not_exists() {
|
||||
function finalize_setup() {
|
||||
log "INFO" "Setting up final permissions and configurations..."
|
||||
|
||||
# Ensure logs directory exists
|
||||
mkdir -p "$INSTALL_DIR/logs"
|
||||
log "INFO" "Created logs directory: $INSTALL_DIR/logs"
|
||||
|
||||
# Set proper ownership for the installation directory
|
||||
chown -R $USER:$USER $INSTALL_DIR
|
||||
|
||||
@@ -77,25 +123,19 @@ function finalize_setup() {
|
||||
log "INFO" "Installing NPM packages..."
|
||||
cd $INSTALL_DIR && npm install
|
||||
|
||||
# Start the service
|
||||
log "INFO" "Starting the service..."
|
||||
systemctl daemon-reload
|
||||
systemctl enable $SERVICE_NAME
|
||||
systemctl start $SERVICE_NAME
|
||||
|
||||
# Check if service started successfully
|
||||
sleep 2
|
||||
if systemctl is-active --quiet $SERVICE_NAME; then
|
||||
log "INFO" "Service started successfully!"
|
||||
else
|
||||
log "ERROR" "Service failed to start. Check logs with: journalctl -u $SERVICE_NAME"
|
||||
fi
|
||||
|
||||
# Create default configuration if it doesn't exist
|
||||
if [ ! -f "$INSTALL_DIR/config.json" ]; then
|
||||
# Handle configuration file
|
||||
if ! update_config_file "$INSTALL_DIR/config.json" "$IS_UPDATE"; then
|
||||
log "INFO" "Creating default configuration file..."
|
||||
|
||||
# Create the users array content for JSON
|
||||
USER_JSON=""
|
||||
if [ "${AUTH_ENABLED}" = "true" ] && [ -n "${ADMIN_USERNAME}" ]; then
|
||||
USER_JSON="{ \"username\": \"${ADMIN_USERNAME}\", \"password\": \"${ADMIN_PASSWORD}\", \"role\": \"admin\" }"
|
||||
fi
|
||||
|
||||
cat > $INSTALL_DIR/config.json << EOF
|
||||
{
|
||||
"version": "1.2.0",
|
||||
"transmissionConfig": {
|
||||
"host": "${TRANSMISSION_HOST}",
|
||||
"port": ${TRANSMISSION_PORT},
|
||||
@@ -132,12 +172,38 @@ function finalize_setup() {
|
||||
"removeDuplicates": true,
|
||||
"keepOnlyBestVersion": true
|
||||
},
|
||||
"securitySettings": {
|
||||
"authEnabled": ${AUTH_ENABLED:-false},
|
||||
"httpsEnabled": ${HTTPS_ENABLED:-false},
|
||||
"sslCertPath": "${SSL_CERT_PATH:-""}",
|
||||
"sslKeyPath": "${SSL_KEY_PATH:-""}",
|
||||
"users": [
|
||||
${USER_JSON}
|
||||
]
|
||||
},
|
||||
"rssFeeds": [],
|
||||
"rssUpdateIntervalMinutes": 60,
|
||||
"autoProcessing": false
|
||||
"autoProcessing": false,
|
||||
"port": ${PORT},
|
||||
"logLevel": "info"
|
||||
}
|
||||
EOF
|
||||
chown $USER:$USER $INSTALL_DIR/config.json
|
||||
log "INFO" "Default configuration created successfully"
|
||||
fi
|
||||
|
||||
# Start the service
|
||||
log "INFO" "Starting the service..."
|
||||
systemctl daemon-reload
|
||||
systemctl enable $SERVICE_NAME
|
||||
systemctl start $SERVICE_NAME
|
||||
|
||||
# Check if service started successfully
|
||||
sleep 2
|
||||
if systemctl is-active --quiet $SERVICE_NAME; then
|
||||
log "INFO" "Service started successfully!"
|
||||
else
|
||||
log "ERROR" "Service failed to start. Check logs with: journalctl -u $SERVICE_NAME"
|
||||
fi
|
||||
|
||||
log "INFO" "Setup finalized!"
|
||||
|
||||
Reference in New Issue
Block a user