Compare commits

..

1 Commits

Author SHA1 Message Date
github-actions[bot]
ee2ae608de chore: add VERSION v0.5.3 2026-01-07 19:52:00 +00:00
7 changed files with 626 additions and 654 deletions

View File

@@ -4,7 +4,7 @@
## 🔗 Related PR / Issue
Fixes: #
Link: #
## ✅ Prerequisites (**X** in brackets)

View File

@@ -1 +1 @@
0.5.5
0.5.3

841
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -25,33 +25,33 @@
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@prisma/adapter-better-sqlite3": "^7.2.0",
"@prisma/client": "^7.2.0",
"@prisma/adapter-better-sqlite3": "^7.1.0",
"@prisma/client": "^7.1.0",
"@radix-ui/react-dropdown-menu": "^2.1.16",
"@radix-ui/react-slot": "^1.2.4",
"@t3-oss/env-nextjs": "^0.13.10",
"@tailwindcss/typography": "^0.5.19",
"@tanstack/react-query": "^5.90.16",
"@trpc/client": "^11.8.1",
"@tanstack/react-query": "^5.90.12",
"@trpc/client": "^11.8.0",
"@trpc/react-query": "^11.8.1",
"@trpc/server": "^11.8.1",
"@trpc/server": "^11.8.0",
"@types/react-syntax-highlighter": "^15.5.13",
"@types/ws": "^8.18.1",
"@xterm/addon-fit": "^0.11.0",
"@xterm/addon-web-links": "^0.12.0",
"@xterm/xterm": "^6.0.0",
"@xterm/addon-fit": "^0.10.0",
"@xterm/addon-web-links": "^0.11.0",
"@xterm/xterm": "^5.5.0",
"axios": "^1.13.2",
"bcryptjs": "^3.0.3",
"better-sqlite3": "^12.6.0",
"better-sqlite3": "^12.5.0",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"cron-validator": "^1.4.0",
"dotenv": "^17.2.3",
"jsonwebtoken": "^9.0.3",
"lucide-react": "^0.562.0",
"next": "^16.1.1",
"lucide-react": "^0.561.0",
"next": "^16.0.10",
"node-cron": "^4.2.1",
"node-pty": "^1.1.0",
"node-pty": "^1.0.0",
"react": "^19.2.3",
"react-dom": "^19.2.3",
"react-markdown": "^10.1.0",
@@ -62,37 +62,37 @@
"strip-ansi": "^7.1.2",
"superjson": "^2.2.6",
"tailwind-merge": "^3.4.0",
"ws": "^8.19.0",
"zod": "^4.3.5"
"ws": "^8.18.3",
"zod": "^4.1.13"
},
"devDependencies": {
"@tailwindcss/postcss": "^4.1.18",
"@testing-library/jest-dom": "^6.9.1",
"@testing-library/react": "^16.3.1",
"@testing-library/react": "^16.3.0",
"@testing-library/user-event": "^14.6.1",
"@types/bcryptjs": "^3.0.0",
"@types/better-sqlite3": "^7.6.13",
"@types/jsonwebtoken": "^9.0.10",
"@types/node": "^24.10.4",
"@types/node-cron": "^3.0.11",
"@types/react": "^19.2.8",
"@types/react": "^19.2.7",
"@types/react-dom": "^19.2.3",
"@vitejs/plugin-react": "^5.1.2",
"@vitest/coverage-v8": "^4.0.17",
"@vitest/ui": "^4.0.17",
"baseline-browser-mapping": "^2.9.14",
"eslint": "^9.39.2",
"eslint-config-next": "^16.1.1",
"jsdom": "^27.4.0",
"@vitest/coverage-v8": "^4.0.15",
"@vitest/ui": "^4.0.14",
"baseline-browser-mapping": "^2.9.3",
"eslint": "^9.39.1",
"eslint-config-next": "^16.1.0",
"jsdom": "^27.3.0",
"postcss": "^8.5.6",
"prettier": "^3.7.4",
"prettier-plugin-tailwindcss": "^0.7.2",
"prisma": "^7.2.0",
"prisma": "^7.1.0",
"tailwindcss": "^4.1.18",
"tsx": "^4.21.0",
"typescript": "^5.9.3",
"typescript-eslint": "^8.53.0",
"vitest": "^4.0.17"
"typescript-eslint": "^8.48.1",
"vitest": "^4.0.14"
},
"ct3aMetadata": {
"initVersion": "7.39.3"
@@ -104,4 +104,4 @@
"overrides": {
"prismjs": "^1.30.0"
}
}
}

View File

@@ -1610,7 +1610,6 @@ class ScriptExecutionHandler {
// TerminalHandler removed - not used by current application
app.prepare().then(() => {
console.log('> Next.js app prepared successfully');
const httpServer = createServer(async (req, res) => {
try {
// Be sure to pass `true` as the second argument to `url.parse`.
@@ -1716,9 +1715,4 @@ app.prepare().then(() => {
autoSyncModule.setupGracefulShutdown();
}
});
}).catch((err) => {
console.error('> Failed to start server:', err.message);
console.error('> If you see "Could not find a production build", run: npm run build');
console.error('> Full error:', err);
process.exit(1);
});

View File

@@ -238,27 +238,6 @@ export const versionRouter = createTRPCRouter({
// Clear/create the log file
await writeFile(logPath, '', 'utf-8');
// Always fetch the latest update.sh from GitHub before running
// This ensures we always use the newest update script, avoiding
// the "chicken-and-egg" problem where old scripts can't update properly
const updateScriptUrl = 'https://raw.githubusercontent.com/community-scripts/ProxmoxVE-Local/main/update.sh';
try {
const response = await fetch(updateScriptUrl);
if (response.ok) {
const latestScript = await response.text();
await writeFile(updateScriptPath, latestScript, { mode: 0o755 });
// Log that we fetched the latest script
await writeFile(logPath, '[INFO] Fetched latest update.sh from GitHub\n', { flag: 'a' });
} else {
// If fetch fails, log warning but continue with local script
await writeFile(logPath, `[WARNING] Could not fetch latest update.sh (HTTP ${response.status}), using local version\n`, { flag: 'a' });
}
} catch (fetchError) {
// If fetch fails, log warning but continue with local script
const errorMsg = fetchError instanceof Error ? fetchError.message : 'Unknown error';
await writeFile(logPath, `[WARNING] Could not fetch latest update.sh: ${errorMsg}, using local version\n`, { flag: 'a' });
}
// Spawn the update script as a detached process using nohup
// This allows it to run independently and kill the parent Node.js process
// Redirect output to log file

356
update.sh
View File

@@ -4,7 +4,7 @@
# Enhanced update script for ProxmoxVE-Local
# Fetches latest release from GitHub and backs up data directory
set -euo pipefail # Exit on error, undefined vars, pipe failures
set -euo pipefail # Exit on error, undefined vars, pipe failures
# Add error trap for debugging
trap 'echo "Error occurred at line $LINENO, command: $BASH_COMMAND"' ERR
@@ -38,7 +38,7 @@ load_github_token() {
log "Using GitHub token from environment variable"
return 0
fi
# Try .env file
if [ -f ".env" ]; then
local env_token
@@ -49,21 +49,21 @@ load_github_token() {
return 0
fi
fi
# Try .github_token file
if [ -f ".github_token" ]; then
GITHUB_TOKEN=$(cat .github_token | tr -d '\n\r')
log "Using GitHub token from .github_token file"
return 0
fi
# Try ~/.github_token file
if [ -f "$HOME/.github_token" ]; then
GITHUB_TOKEN=$(cat "$HOME/.github_token" | tr -d '\n\r')
log "Using GitHub token from ~/.github_token file"
return 0
fi
log_warning "No GitHub token found. Using unauthenticated requests (lower rate limits)"
log_warning "To use a token, add GITHUB_TOKEN=your_token to .env file or set GITHUB_TOKEN environment variable"
return 1
@@ -72,7 +72,7 @@ load_github_token() {
# Initialize log file
init_log() {
# Clear/create log file
>"$LOG_FILE"
> "$LOG_FILE"
log "Starting ProxmoxVE-Local update process..."
log "Log file: $LOG_FILE"
}
@@ -97,40 +97,40 @@ log_warning() {
# Check if required tools are available
check_dependencies() {
log "Checking dependencies..."
local missing_deps=()
if ! command -v curl &>/dev/null; then
if ! command -v curl &> /dev/null; then
missing_deps+=("curl")
fi
if ! command -v jq &>/dev/null; then
if ! command -v jq &> /dev/null; then
missing_deps+=("jq")
fi
if ! command -v npm &>/dev/null; then
if ! command -v npm &> /dev/null; then
missing_deps+=("npm")
fi
if ! command -v node &>/dev/null; then
if ! command -v node &> /dev/null; then
missing_deps+=("node")
fi
if [ ${#missing_deps[@]} -ne 0 ]; then
log_error "Missing dependencies: ${missing_deps[*]}"
log_error "Please install the missing dependencies and try again."
exit 1
fi
log_success "All dependencies are available"
}
# Get latest release info from GitHub API
get_latest_release() {
log "Fetching latest release information from GitHub..."
local curl_opts="-s --connect-timeout 15 --max-time 60 --retry 2 --retry-delay 3"
# Add authentication header if token is available
if [ -n "$GITHUB_TOKEN" ]; then
curl_opts="$curl_opts -H \"Authorization: token $GITHUB_TOKEN\""
@@ -138,35 +138,35 @@ get_latest_release() {
else
log "Using unauthenticated GitHub API request (lower rate limits)"
fi
local release_info
if ! release_info=$(eval "curl $curl_opts \"$GITHUB_API/releases/latest\""); then
log_error "Failed to fetch release information from GitHub API (timeout or network error)"
exit 1
fi
# Check if response is valid JSON
if ! echo "$release_info" | jq empty 2>/dev/null; then
log_error "Invalid JSON response from GitHub API"
log "Response: $release_info"
exit 1
fi
local tag_name
local download_url
local published_at
tag_name=$(echo "$release_info" | jq -r '.tag_name')
download_url=$(echo "$release_info" | jq -r '.tarball_url')
published_at=$(echo "$release_info" | jq -r '.published_at')
if [ "$tag_name" = "null" ] || [ "$download_url" = "null" ] || [ -z "$tag_name" ] || [ -z "$download_url" ]; then
log_error "Failed to parse release information from API response"
log "Tag name: $tag_name"
log "Download URL: $download_url"
exit 1
fi
log_success "Latest release: $tag_name (published: $published_at)"
echo "$tag_name|$download_url"
}
@@ -174,16 +174,16 @@ get_latest_release() {
# Backup data directory, .env file, and scripts directories
backup_data() {
log "Creating backup directory at $BACKUP_DIR..."
if ! mkdir -p "$BACKUP_DIR"; then
log_error "Failed to create backup directory"
exit 1
fi
# Backup data directory
if [ -d "$DATA_DIR" ]; then
log "Backing up data directory..."
if ! cp -r "$DATA_DIR" "$BACKUP_DIR/data"; then
log_error "Failed to backup data directory"
exit 1
@@ -193,7 +193,7 @@ backup_data() {
else
log_warning "Data directory not found, skipping backup"
fi
# Backup .env file
if [ -f ".env" ]; then
log "Backing up .env file..."
@@ -206,7 +206,7 @@ backup_data() {
else
log_warning ".env file not found, skipping backup"
fi
# Backup scripts directories
local scripts_dirs=("scripts/ct" "scripts/install" "scripts/tools" "scripts/vm")
for scripts_dir in "${scripts_dirs[@]}"; do
@@ -230,60 +230,60 @@ download_release() {
local release_info="$1"
local tag_name="${release_info%|*}"
local download_url="${release_info#*|}"
log "Downloading release $tag_name..."
local temp_dir="/tmp/pve-update-$$"
local archive_file="$temp_dir/release.tar.gz"
# Create temporary directory
if ! mkdir -p "$temp_dir"; then
log_error "Failed to create temporary directory"
exit 1
fi
# Download release with timeout and progress
if ! curl -L --connect-timeout 30 --max-time 300 --retry 3 --retry-delay 5 -o "$archive_file" "$download_url" 2>/dev/null; then
log_error "Failed to download release from GitHub"
rm -rf "$temp_dir"
exit 1
fi
# Verify download
if [ ! -f "$archive_file" ] || [ ! -s "$archive_file" ]; then
log_error "Downloaded file is empty or missing"
rm -rf "$temp_dir"
exit 1
fi
log_success "Downloaded release"
# Extract release
if ! tar -xzf "$archive_file" -C "$temp_dir" 2>/dev/null; then
log_error "Failed to extract release"
rm -rf "$temp_dir"
exit 1
fi
# Find the extracted directory (GitHub tarballs have a root directory)
local extracted_dir
extracted_dir=$(find "$temp_dir" -maxdepth 1 -type d -name "community-scripts-ProxmoxVE-Local-*" 2>/dev/null | head -1)
# Try alternative patterns if not found
if [ -z "$extracted_dir" ]; then
extracted_dir=$(find "$temp_dir" -maxdepth 1 -type d -name "${REPO_NAME}-*" 2>/dev/null | head -1)
fi
if [ -z "$extracted_dir" ]; then
extracted_dir=$(find "$temp_dir" -maxdepth 1 -type d ! -name "$temp_dir" 2>/dev/null | head -1)
fi
if [ -z "$extracted_dir" ]; then
log_error "Could not find extracted directory"
rm -rf "$temp_dir"
exit 1
fi
log_success "Release extracted successfully"
echo "$extracted_dir"
}
@@ -291,11 +291,11 @@ download_release() {
# Clear the original directory before updating
clear_original_directory() {
log "Clearing original directory..."
# Remove old lock files and node_modules before update
rm -f package-lock.json 2>/dev/null
rm -rf node_modules 2>/dev/null
# List of files/directories to preserve (already backed up)
local preserve_patterns=(
"data"
@@ -308,48 +308,48 @@ clear_original_directory() {
".git"
"scripts"
)
# Remove all files except preserved ones
while IFS= read -r file; do
local should_preserve=false
local filename=$(basename "$file")
for pattern in "${preserve_patterns[@]}"; do
if [[ "$filename" == $pattern ]]; then
should_preserve=true
break
fi
done
if [ "$should_preserve" = false ]; then
rm -f "$file"
fi
done < <(find . -maxdepth 1 -type f ! -name ".*")
# Remove all directories except preserved ones
while IFS= read -r dir; do
local should_preserve=false
local dirname=$(basename "$dir")
for pattern in "${preserve_patterns[@]}"; do
if [[ "$dirname" == $pattern ]]; then
should_preserve=true
break
fi
done
if [ "$should_preserve" = false ]; then
rm -rf "$dir"
fi
done < <(find . -maxdepth 1 -type d ! -name "." ! -name "..")
log_success "Original directory cleared"
}
# Restore backup files before building
restore_backup_files() {
log "Restoring .env, data directory, and scripts directories from backup..."
if [ -d "$BACKUP_DIR" ]; then
# Restore .env file
if [ -f "$BACKUP_DIR/.env" ]; then
@@ -365,7 +365,7 @@ restore_backup_files() {
else
log_warning "No .env file backup found"
fi
# Restore data directory
if [ -d "$BACKUP_DIR/data" ]; then
if [ -d "data" ]; then
@@ -380,24 +380,24 @@ restore_backup_files() {
else
log_warning "No data directory backup found"
fi
# Restore scripts directories
local scripts_dirs=("ct" "install" "tools" "vm")
for backup_name in "${scripts_dirs[@]}"; do
if [ -d "$BACKUP_DIR/$backup_name" ]; then
local target_dir="scripts/$backup_name"
log "Restoring $target_dir directory from backup..."
# Ensure scripts directory exists
if [ ! -d "scripts" ]; then
mkdir -p "scripts"
fi
# Remove existing directory if it exists
if [ -d "$target_dir" ]; then
rm -rf "$target_dir"
fi
if cp -r "$BACKUP_DIR/$backup_name" "$target_dir"; then
log_success "$target_dir directory restored from backup"
else
@@ -417,13 +417,7 @@ restore_backup_files() {
# Verify database was restored correctly
verify_database_restored() {
log "Verifying database was restored correctly..."
# Ensure data directory exists (will be auto-created by app if needed)
if [ ! -d "data" ]; then
log "Creating data directory..."
mkdir -p data
fi
# Check for both possible database filenames
local db_file=""
if [ -f "data/database.sqlite" ]; then
@@ -431,25 +425,23 @@ verify_database_restored() {
elif [ -f "data/settings.db" ]; then
db_file="data/settings.db"
else
# Database doesn't exist yet - this is OK for new installations
# The app will create it automatically via Prisma migrations
log_warning "No existing database file found - will be created automatically on first start"
return 0
log_error "Database file not found after restore! (checked database.sqlite and settings.db)"
return 1
fi
local db_size=$(stat -f%z "$db_file" 2>/dev/null || stat -c%s "$db_file" 2>/dev/null)
if [ "$db_size" -eq 0 ]; then
log_warning "Database file is empty - will be recreated by Prisma migrations"
return 0 # Don't fail the update, let Prisma recreate the database
return 0 # Don't fail the update, let Prisma recreate the database
fi
log_success "Database verified (file: $db_file, size: $db_size bytes)"
}
# Ensure DATABASE_URL is set in .env file for Prisma
ensure_database_url() {
log "Ensuring DATABASE_URL is set in .env file..."
# Check if .env file exists
if [ ! -f ".env" ]; then
log_warning ".env file not found, creating from .env.example..."
@@ -460,19 +452,19 @@ ensure_database_url() {
return 1
fi
fi
# Check if DATABASE_URL is already set
if grep -q "^DATABASE_URL=" .env; then
log "DATABASE_URL already exists in .env file"
return 0
fi
# Add DATABASE_URL to .env file
log "Adding DATABASE_URL to .env file..."
echo "" >>.env
echo "# Database" >>.env
echo "DATABASE_URL=\"file:./data/settings.db\"" >>.env
echo "" >> .env
echo "# Database" >> .env
echo "DATABASE_URL=\"file:./data/settings.db\"" >> .env
log_success "DATABASE_URL added to .env file"
}
@@ -489,9 +481,11 @@ check_service() {
fi
}
# Stop the application before updating
stop_application() {
# Change to the application directory if we're not already there
local app_dir
if [ -f "package.json" ] && [ -f "server.js" ]; then
@@ -509,9 +503,9 @@ stop_application() {
return 1
fi
fi
log "Working from application directory: $(pwd)"
# Check if systemd service is running and disable it temporarily
if check_service && systemctl is-active --quiet pvescriptslocal.service; then
log "Disabling systemd service temporarily to prevent auto-restart..."
@@ -524,7 +518,7 @@ stop_application() {
else
log "No running systemd service found"
fi
# Kill any remaining npm/node processes
log "Killing any remaining npm/node processes..."
local pids
@@ -543,9 +537,9 @@ stop_application() {
# Update application files
update_files() {
local source_dir="$1"
log "Updating application files..."
# List of files/directories to exclude from update
local exclude_patterns=(
"data"
@@ -561,48 +555,48 @@ update_files() {
"scripts/tools"
"scripts/vm"
)
# Find the actual source directory (strip the top-level directory)
local actual_source_dir
actual_source_dir=$(find "$source_dir" -maxdepth 1 -type d -name "community-scripts-ProxmoxVE-Local-*" | head -1)
if [ -z "$actual_source_dir" ]; then
log_error "Could not find the actual source directory in $source_dir"
return 1
fi
# Verify critical files exist in source
if [ ! -f "$actual_source_dir/package.json" ]; then
log_error "package.json not found in source directory!"
return 1
fi
# Use process substitution instead of pipe to avoid subshell issues
local files_copied=0
local files_excluded=0
# Create a temporary file list to avoid process substitution issues
local file_list="/tmp/file_list_$$.txt"
find "$actual_source_dir" -type f >"$file_list"
find "$actual_source_dir" -type f > "$file_list"
while IFS= read -r file; do
local rel_path="${file#$actual_source_dir/}"
local should_exclude=false
for pattern in "${exclude_patterns[@]}"; do
if [[ "$rel_path" == $pattern ]] || [[ "$rel_path" == $pattern/* ]]; then
should_exclude=true
break
fi
done
if [ "$should_exclude" = false ]; then
local target_dir
target_dir=$(dirname "$rel_path")
if [ "$target_dir" != "." ]; then
mkdir -p "$target_dir"
fi
if ! cp "$file" "$rel_path"; then
log_error "Failed to copy $rel_path"
rm -f "$file_list"
@@ -612,47 +606,48 @@ update_files() {
else
files_excluded=$((files_excluded + 1))
fi
done <"$file_list"
done < "$file_list"
# Clean up temporary file
rm -f "$file_list"
# Verify critical files were copied
if [ ! -f "package.json" ]; then
log_error "package.json was not copied to target directory!"
return 1
fi
if [ ! -f "package-lock.json" ]; then
log_warning "package-lock.json was not copied!"
fi
log_success "Application files updated successfully ($files_copied files)"
}
# Install dependencies and build
install_and_build() {
log "Installing dependencies..."
# Verify package.json exists
if [ ! -f "package.json" ]; then
log_error "package.json not found! Cannot install dependencies."
return 1
fi
if [ ! -f "package-lock.json" ]; then
log_warning "No package-lock.json found, npm will generate one"
fi
# Create temporary file for npm output
local npm_log="/tmp/npm_install_$$.log"
# Ensure NODE_ENV is not set to production during install (we need devDependencies for build)
local old_node_env="${NODE_ENV:-}"
export NODE_ENV=development
# Run npm install to get ALL dependencies including devDependencies
if ! npm install --include=dev >"$npm_log" 2>&1; then
if ! npm install --include=dev > "$npm_log" 2>&1; then
log_error "Failed to install dependencies"
log_error "npm install output (last 30 lines):"
tail -30 "$npm_log" | while read -r line; do
@@ -661,20 +656,20 @@ install_and_build() {
rm -f "$npm_log"
return 1
fi
# Restore NODE_ENV
if [ -n "$old_node_env" ]; then
export NODE_ENV="$old_node_env"
else
unset NODE_ENV
fi
log_success "Dependencies installed successfully"
rm -f "$npm_log"
# Generate Prisma client
log "Generating Prisma client..."
if ! npx prisma generate >"$npm_log" 2>&1; then
if ! npx prisma generate > "$npm_log" 2>&1; then
log_error "Failed to generate Prisma client"
log_error "Prisma generate output:"
cat "$npm_log" | while read -r line; do
@@ -684,7 +679,7 @@ install_and_build() {
return 1
fi
log_success "Prisma client generated successfully"
# Check if Prisma migrations exist and are compatible
if [ -d "prisma/migrations" ]; then
log "Existing migration history detected"
@@ -693,10 +688,10 @@ install_and_build() {
else
log_warning "No existing migration history found - this may be a fresh install"
fi
# Run Prisma migrations
log "Running Prisma migrations..."
if ! npx prisma migrate deploy >"$npm_log" 2>&1; then
if ! npx prisma migrate deploy > "$npm_log" 2>&1; then
log_warning "Prisma migrations failed or no migrations to run"
log "Prisma migrate output:"
cat "$npm_log" | while read -r line; do
@@ -706,18 +701,15 @@ install_and_build() {
log_success "Prisma migrations completed successfully"
fi
rm -f "$npm_log"
log "Building application..."
# Set NODE_ENV to production for build
export NODE_ENV=production
# Unset TURBOPACK to prevent "Multiple bundler flags" error with --webpack
unset TURBOPACK 2>/dev/null || true
export TURBOPACK=''
# Create temporary file for npm build output
local build_log="/tmp/npm_build_$$.log"
if ! TURBOPACK='' npm run build >"$build_log" 2>&1; then
if ! npm run build > "$build_log" 2>&1; then
log_error "Failed to build application"
log_error "npm run build output:"
cat "$build_log" | while read -r line; do
@@ -726,18 +718,18 @@ install_and_build() {
rm -f "$build_log"
return 1
fi
# Log success and clean up
log_success "Application built successfully"
rm -f "$build_log"
log_success "Dependencies installed and application built successfully"
}
# Start the application after updating
start_application() {
log "Starting application..."
# Use the global variable to determine how to start
if [ "$SERVICE_WAS_RUNNING" = true ] && check_service; then
log "Service was running before update, re-enabling and starting systemd service..."
@@ -769,11 +761,11 @@ start_application() {
# Start application with npm
start_with_npm() {
log "Starting application with npm start..."
# Start in background
nohup npm start >server.log 2>&1 &
nohup npm start > server.log 2>&1 &
local npm_pid=$!
# Wait a moment and check if it started
sleep 3
if kill -0 $npm_pid 2>/dev/null; then
@@ -784,30 +776,13 @@ start_with_npm() {
fi
}
# Re-enable the systemd service on failure to prevent users from being locked out
re_enable_service_on_failure() {
if check_service; then
log "Re-enabling systemd service after failure..."
if systemctl enable pvescriptslocal.service 2>/dev/null; then
log_success "Service re-enabled"
if systemctl start pvescriptslocal.service 2>/dev/null; then
log_success "Service started"
else
log_warning "Failed to start service - manual intervention may be required"
fi
else
log_warning "Failed to re-enable service - manual intervention may be required"
fi
fi
}
# Rollback function
rollback() {
log_warning "Rolling back to previous version..."
if [ -d "$BACKUP_DIR" ]; then
log "Restoring from backup directory: $BACKUP_DIR"
# Restore data directory
if [ -d "$BACKUP_DIR/data" ]; then
log "Restoring data directory..."
@@ -822,7 +797,7 @@ rollback() {
else
log_warning "No data directory backup found"
fi
# Restore .env file
if [ -f "$BACKUP_DIR/.env" ]; then
log "Restoring .env file..."
@@ -837,24 +812,24 @@ rollback() {
else
log_warning "No .env file backup found"
fi
# Restore scripts directories
local scripts_dirs=("ct" "install" "tools" "vm")
for backup_name in "${scripts_dirs[@]}"; do
if [ -d "$BACKUP_DIR/$backup_name" ]; then
local target_dir="scripts/$backup_name"
log "Restoring $target_dir directory from backup..."
# Ensure scripts directory exists
if [ ! -d "scripts" ]; then
mkdir -p "scripts"
fi
# Remove existing directory if it exists
if [ -d "$target_dir" ]; then
rm -rf "$target_dir"
fi
if mv "$BACKUP_DIR/$backup_name" "$target_dir"; then
log_success "$target_dir directory restored from backup"
else
@@ -864,17 +839,14 @@ rollback() {
log_warning "No $backup_name directory backup found"
fi
done
# Clean up backup directory
log "Cleaning up backup directory..."
rm -rf "$BACKUP_DIR"
else
log_error "No backup directory found for rollback"
fi
# Re-enable the service so users aren't locked out
re_enable_service_on_failure
log_error "Update failed. Please check the logs and try again."
exit 1
}
@@ -893,14 +865,14 @@ check_node_version() {
log "Detected Node.js version: $current"
if ((major_version == 24)); then
log_success "Node.js 24 already installed"
elif ((major_version < 24)); then
if (( major_version < 24 )); then
log_warning "Node.js < 24 detected → upgrading to Node.js 24 LTS..."
upgrade_node_to_24
else
elif (( major_version > 24 )); then
log_warning "Node.js > 24 detected → script tested only up to Node 24"
log "Continuing anyway…"
else
log_success "Node.js 24 already installed"
fi
}
@@ -908,39 +880,22 @@ check_node_version() {
upgrade_node_to_24() {
log "Preparing Node.js 24 upgrade…"
# Remove old nodesource repo files if they exist
# Remove old nodesource repo if it exists
if [ -f /etc/apt/sources.list.d/nodesource.list ]; then
log "Removing old nodesource.list file..."
rm -f /etc/apt/sources.list.d/nodesource.list
fi
if [ -f /etc/apt/sources.list.d/nodesource.sources ]; then
log "Removing old nodesource.sources file..."
rm -f /etc/apt/sources.list.d/nodesource.sources
fi
# Update apt cache first
log "Updating apt cache..."
apt-get update >>"$LOG_FILE" 2>&1 || true
# Install NodeSource repo for Node.js 24
log "Downloading Node.js 24 setup script..."
if ! curl -fsSL https://deb.nodesource.com/setup_24.x -o /tmp/node24_setup.sh; then
log_error "Failed to download Node.js 24 setup script"
re_enable_service_on_failure
exit 1
fi
if ! bash /tmp/node24_setup.sh >/tmp/node24_setup.log 2>&1; then
curl -fsSL https://deb.nodesource.com/setup_24.x -o /tmp/node24_setup.sh
if ! bash /tmp/node24_setup.sh > /tmp/node24_setup.log 2>&1; then
log_error "Failed to configure Node.js 24 repository"
tail -20 /tmp/node24_setup.log | while read -r line; do log_error "$line"; done
re_enable_service_on_failure
exit 1
fi
log "Installing Node.js 24…"
if ! apt-get install -y nodejs >>"$LOG_FILE" 2>&1; then
if ! apt-get install -y nodejs >> "$LOG_FILE" 2>&1; then
log_error "Failed to install Node.js 24"
re_enable_service_on_failure
exit 1
fi
@@ -957,21 +912,21 @@ main() {
init_log
log "Running as detached process"
sleep 3
else
init_log
fi
# Check if we're running from the application directory and not already relocated
if [ -z "${PVE_UPDATE_RELOCATED:-}" ] && [ -f "package.json" ] && [ -f "server.js" ]; then
log "Detected running from application directory"
bash "$0" --relocated
exit $?
fi
# Ensure we're in the application directory
local app_dir
# First check if we're already in the right directory
if [ -f "package.json" ] && [ -f "server.js" ]; then
app_dir="$(pwd)"
@@ -988,76 +943,79 @@ main() {
exit 1
fi
fi
# Check dependencies
check_dependencies
# Load GitHub token for higher rate limits
load_github_token
# Check if service was running before update
if check_service && systemctl is-active --quiet pvescriptslocal.service; then
SERVICE_WAS_RUNNING=true
else
SERVICE_WAS_RUNNING=false
fi
# Get latest release info
local release_info
release_info=$(get_latest_release)
# Backup data directory
backup_data
# Stop the application before updating
stop_application
# Check Node.js version
check_node_version
#Update Node.js to 24
upgrade_node_to_24
# Download and extract release
local source_dir
source_dir=$(download_release "$release_info")
# Clear the original directory before updating
clear_original_directory
# Update files
if ! update_files "$source_dir"; then
log_error "File update failed, rolling back..."
rollback
fi
# Restore .env and data directory before building
restore_backup_files
# Verify database was restored correctly
if ! verify_database_restored; then
log_error "Database verification failed, rolling back..."
rollback
fi
# Ensure DATABASE_URL is set for Prisma
ensure_database_url
# Install dependencies and build
if ! install_and_build; then
log_error "Install and build failed, rolling back..."
rollback
fi
# Start the application
if ! start_application; then
log_error "Failed to start application after update"
rollback
fi
# Cleanup only after successful start
rm -rf "$source_dir"
rm -rf "/tmp/pve-update-$$"
rm -rf "$BACKUP_DIR"
log "Backup directory cleaned up"
log_success "Update completed successfully!"
}
@@ -1065,4 +1023,4 @@ main() {
if ! main "$@"; then
log_error "Update script failed with exit code $?"
exit 1
fi
fi