Compare commits
78 Commits
feat/use_n
...
fix/398
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
886c3e37ff | ||
|
|
38deb09aa9 | ||
|
|
6d326dce1f | ||
|
|
6c8e177d3e | ||
|
|
879a548345 | ||
|
|
64cd81d5ba | ||
|
|
61e75949c8 | ||
|
|
a5d24bfad7 | ||
|
|
04595c0093 | ||
|
|
06fdb4889d | ||
|
|
38d4f9f918 | ||
|
|
63dc7c6983 | ||
|
|
d57c6059fc | ||
|
|
eb152f9fae | ||
|
|
1a8e98fec0 | ||
|
|
83a1c7ea31 | ||
|
|
79c63a7d3d | ||
|
|
753721eee0 | ||
|
|
09607296af | ||
|
|
c88040084a | ||
|
|
2573eb7314 | ||
|
|
414c356446 | ||
|
|
c38ded7a39 | ||
|
|
0cfed84cd0 | ||
|
|
9611bc9bcf | ||
|
|
6fe2a790fd | ||
|
|
5ea71837e7 | ||
|
|
bf5ebc72b6 | ||
|
|
a32c7bcbba | ||
|
|
98c6e79db6 | ||
|
|
c962a9cd5a | ||
|
|
5d20a6d694 | ||
|
|
cb4e8c543a | ||
|
|
2ba213de49 | ||
|
|
849aabb575 | ||
|
|
dd33df2033 | ||
|
|
94eb2820fd | ||
|
|
e49708770c | ||
|
|
5eafa01843 | ||
|
|
0c1477e087 | ||
|
|
ef73d98873 | ||
|
|
ec92c0ea6d | ||
|
|
ee14b89868 | ||
|
|
be68160cd9 | ||
|
|
dbc15b1bc3 | ||
|
|
dc6ce16e5a | ||
|
|
0c9d4ad6e2 | ||
|
|
13d57b77d4 | ||
|
|
f9e5bd5bf0 | ||
|
|
adf2b06efa | ||
|
|
80e3966e4e | ||
|
|
3662a057dc | ||
|
|
bdf336f9bf | ||
|
|
f6c310fa22 | ||
|
|
d658894b7f | ||
|
|
783744b497 | ||
|
|
de9ac41f76 | ||
|
|
060202e557 | ||
|
|
8d45ac14cc | ||
|
|
47ee2247c8 | ||
|
|
c16c8d54db | ||
|
|
3e669a0739 | ||
|
|
02e175c8a0 | ||
|
|
b4e98e7624 | ||
|
|
2392529092 | ||
|
|
f9f5772d92 | ||
|
|
4267d7340e | ||
|
|
dcf923551b | ||
|
|
580b623939 | ||
|
|
ac21fbb181 | ||
|
|
588ae65dfd | ||
|
|
30acba39a5 | ||
|
|
3a5bb3dc45 | ||
|
|
f42c0d956e | ||
|
|
afc87910e6 | ||
|
|
8f0ae3a341 | ||
|
|
b5450bd221 | ||
|
|
88dbe4ea85 |
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@@ -4,7 +4,7 @@
|
||||
|
||||
|
||||
## 🔗 Related PR / Issue
|
||||
Link: #
|
||||
Fixes: #
|
||||
|
||||
|
||||
## ✅ Prerequisites (**X** in brackets)
|
||||
|
||||
@@ -100,7 +100,7 @@ apt install -y nodejs
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone https://github.com/community-scripts/ProxmoxVE-Local.git /opt/PVESciptslocal
|
||||
cd PVESciptslocal
|
||||
cd /opt/PVESciptslocal
|
||||
|
||||
# Install dependencies and build
|
||||
npm install
|
||||
|
||||
1149
package-lock.json
generated
1149
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
72
package.json
72
package.json
@@ -25,35 +25,35 @@
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@prisma/adapter-better-sqlite3": "^7.0.1",
|
||||
"@prisma/client": "^7.0.1",
|
||||
"@prisma/adapter-better-sqlite3": "^7.2.0",
|
||||
"@prisma/client": "^7.2.0",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.16",
|
||||
"@radix-ui/react-slot": "^1.2.4",
|
||||
"@t3-oss/env-nextjs": "^0.13.8",
|
||||
"@t3-oss/env-nextjs": "^0.13.10",
|
||||
"@tailwindcss/typography": "^0.5.19",
|
||||
"@tanstack/react-query": "^5.90.11",
|
||||
"@trpc/client": "^11.7.2",
|
||||
"@trpc/react-query": "^11.7.2",
|
||||
"@trpc/server": "^11.7.2",
|
||||
"@tanstack/react-query": "^5.90.18",
|
||||
"@trpc/client": "^11.8.1",
|
||||
"@trpc/react-query": "^11.8.1",
|
||||
"@trpc/server": "^11.8.1",
|
||||
"@types/react-syntax-highlighter": "^15.5.13",
|
||||
"@types/ws": "^8.18.1",
|
||||
"@xterm/addon-fit": "^0.10.0",
|
||||
"@xterm/addon-web-links": "^0.11.0",
|
||||
"@xterm/xterm": "^5.5.0",
|
||||
"@xterm/addon-fit": "^0.11.0",
|
||||
"@xterm/addon-web-links": "^0.12.0",
|
||||
"@xterm/xterm": "^6.0.0",
|
||||
"axios": "^1.13.2",
|
||||
"bcryptjs": "^3.0.3",
|
||||
"better-sqlite3": "^12.5.0",
|
||||
"better-sqlite3": "^12.6.0",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cron-validator": "^1.4.0",
|
||||
"dotenv": "^17.2.3",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"lucide-react": "^0.555.0",
|
||||
"next": "^16.0.6",
|
||||
"jsonwebtoken": "^9.0.3",
|
||||
"lucide-react": "^0.562.0",
|
||||
"next": "^16.1.3",
|
||||
"node-cron": "^4.2.1",
|
||||
"node-pty": "^1.0.0",
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"node-pty": "^1.1.0",
|
||||
"react": "^19.2.3",
|
||||
"react-dom": "^19.2.3",
|
||||
"react-markdown": "^10.1.0",
|
||||
"react-syntax-highlighter": "^16.1.0",
|
||||
"refractor": "^5.0.0",
|
||||
@@ -62,37 +62,37 @@
|
||||
"strip-ansi": "^7.1.2",
|
||||
"superjson": "^2.2.6",
|
||||
"tailwind-merge": "^3.4.0",
|
||||
"ws": "^8.18.3",
|
||||
"zod": "^4.1.13"
|
||||
"ws": "^8.19.0",
|
||||
"zod": "^4.3.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/postcss": "^4.1.17",
|
||||
"@tailwindcss/postcss": "^4.1.18",
|
||||
"@testing-library/jest-dom": "^6.9.1",
|
||||
"@testing-library/react": "^16.3.0",
|
||||
"@testing-library/react": "^16.3.1",
|
||||
"@testing-library/user-event": "^14.6.1",
|
||||
"@types/bcryptjs": "^3.0.0",
|
||||
"@types/better-sqlite3": "^7.6.13",
|
||||
"@types/jsonwebtoken": "^9.0.10",
|
||||
"@types/node": "^24.10.1",
|
||||
"@types/node": "^24.10.9",
|
||||
"@types/node-cron": "^3.0.11",
|
||||
"@types/react": "^19.2.7",
|
||||
"@types/react": "^19.2.8",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@vitejs/plugin-react": "^5.1.1",
|
||||
"@vitest/coverage-v8": "^4.0.15",
|
||||
"@vitest/ui": "^4.0.14",
|
||||
"baseline-browser-mapping": "^2.8.32",
|
||||
"eslint": "^9.39.1",
|
||||
"eslint-config-next": "^16.0.6",
|
||||
"jsdom": "^27.2.0",
|
||||
"@vitejs/plugin-react": "^5.1.2",
|
||||
"@vitest/coverage-v8": "^4.0.17",
|
||||
"@vitest/ui": "^4.0.17",
|
||||
"baseline-browser-mapping": "^2.9.15",
|
||||
"eslint": "^9.39.2",
|
||||
"eslint-config-next": "^16.1.3",
|
||||
"jsdom": "^27.4.0",
|
||||
"postcss": "^8.5.6",
|
||||
"prettier": "^3.7.3",
|
||||
"prettier": "^3.8.0",
|
||||
"prettier-plugin-tailwindcss": "^0.7.2",
|
||||
"prisma": "^7.0.1",
|
||||
"tailwindcss": "^4.1.17",
|
||||
"prisma": "^7.2.0",
|
||||
"tailwindcss": "^4.1.18",
|
||||
"tsx": "^4.21.0",
|
||||
"typescript": "^5.9.3",
|
||||
"typescript-eslint": "^8.48.1",
|
||||
"vitest": "^4.0.14"
|
||||
"typescript-eslint": "^8.53.0",
|
||||
"vitest": "^4.0.17"
|
||||
},
|
||||
"ct3aMetadata": {
|
||||
"initVersion": "7.39.3"
|
||||
@@ -104,4 +104,4 @@
|
||||
"overrides": {
|
||||
"prismjs": "^1.30.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: tteck (tteckster)
|
||||
# Co-Author: MickLesk
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: michelroegl-brunner
|
||||
# License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/LICENSE
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: community-scripts ORG
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/branch/main/LICENSE
|
||||
# Revision: 1
|
||||
@@ -502,4 +502,4 @@ if validate_ip_cidr "192.168.1.100/24"; then
|
||||
echo "Valid IP/CIDR"
|
||||
fi
|
||||
|
||||
EXAMPLES
|
||||
EXAMPLES
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/LICENSE
|
||||
|
||||
# ==============================================================================
|
||||
@@ -123,6 +123,7 @@ icons() {
|
||||
CREATING="${TAB}🚀${TAB}${CL}"
|
||||
ADVANCED="${TAB}🧩${TAB}${CL}"
|
||||
FUSE="${TAB}🗂️${TAB}${CL}"
|
||||
GPU="${TAB}🎮${TAB}${CL}"
|
||||
HOURGLASS="${TAB}⏳${TAB}"
|
||||
}
|
||||
|
||||
@@ -808,18 +809,15 @@ cleanup_lxc() {
|
||||
find /tmp /var/tmp -type f -name 'tmp*' -delete 2>/dev/null || true
|
||||
find /tmp /var/tmp -type f -name 'tempfile*' -delete 2>/dev/null || true
|
||||
|
||||
# Truncate writable log files silently (permission errors ignored)
|
||||
if command -v truncate >/dev/null 2>&1; then
|
||||
find /var/log -type f -writable -print0 2>/dev/null |
|
||||
xargs -0 -n1 truncate -s 0 2>/dev/null || true
|
||||
# Node.js npm - directly remove cache directory
|
||||
# npm cache clean/verify can fail with ENOTEMPTY errors, so we skip them
|
||||
if command -v npm &>/dev/null; then
|
||||
rm -rf /root/.npm/_cacache /root/.npm/_logs 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Node.js npm
|
||||
if command -v npm &>/dev/null; then $STD npm cache clean --force || true; fi
|
||||
# Node.js yarn
|
||||
if command -v yarn &>/dev/null; then $STD yarn cache clean || true; fi
|
||||
if command -v yarn &>/dev/null; then yarn cache clean &>/dev/null || true; fi
|
||||
# Node.js pnpm
|
||||
if command -v pnpm &>/dev/null; then $STD pnpm store prune || true; fi
|
||||
if command -v pnpm &>/dev/null; then pnpm store prune &>/dev/null || true; fi
|
||||
# Go
|
||||
if command -v go &>/dev/null; then $STD go clean -cache -modcache || true; fi
|
||||
# Rust cargo
|
||||
@@ -827,11 +825,8 @@ cleanup_lxc() {
|
||||
# Ruby gem
|
||||
if command -v gem &>/dev/null; then $STD gem cleanup || true; fi
|
||||
# Composer (PHP)
|
||||
if command -v composer &>/dev/null; then $STD composer clear-cache || true; fi
|
||||
if command -v composer &>/dev/null; then COMPOSER_ALLOW_SUPERUSER=1 $STD composer clear-cache || true; fi
|
||||
|
||||
if command -v journalctl &>/dev/null; then
|
||||
$STD journalctl --vacuum-time=10m || true
|
||||
fi
|
||||
msg_ok "Cleaned"
|
||||
}
|
||||
|
||||
@@ -887,4 +882,4 @@ check_or_create_swap() {
|
||||
# SIGNAL TRAPS
|
||||
# ==============================================================================
|
||||
|
||||
trap 'stop_spinner' EXIT INT TERM
|
||||
trap 'stop_spinner' EXIT INT TERM
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# ------------------------------------------------------------------------------
|
||||
# ERROR HANDLER - ERROR & SIGNAL MANAGEMENT
|
||||
# ------------------------------------------------------------------------------
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# ------------------------------------------------------------------------------
|
||||
@@ -34,9 +34,9 @@
|
||||
# * Node.js/npm errors (243-249, 254)
|
||||
# * Python/pip/uv errors (210-212)
|
||||
# * PostgreSQL errors (231-234)
|
||||
# * MySQL/MariaDB errors (260-263)
|
||||
# * MongoDB errors (251-253)
|
||||
# * Proxmox custom codes (200-209, 213-223, 225)
|
||||
# * MySQL/MariaDB errors (241-244)
|
||||
# * MongoDB errors (251-254)
|
||||
# * Proxmox custom codes (200-231)
|
||||
# - Returns description string for given exit code
|
||||
# ------------------------------------------------------------------------------
|
||||
explain_exit_code() {
|
||||
@@ -319,4 +319,4 @@ catch_errors() {
|
||||
trap on_exit EXIT
|
||||
trap on_interrupt INT
|
||||
trap on_terminate TERM
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: tteck (tteckster)
|
||||
# Co-Author: MickLesk
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
@@ -222,21 +222,12 @@ motd_ssh() {
|
||||
# Set terminal to 256-color mode
|
||||
grep -qxF "export TERM='xterm-256color'" /root/.bashrc || echo "export TERM='xterm-256color'" >>/root/.bashrc
|
||||
|
||||
# Get OS information (Debian / Ubuntu)
|
||||
if [ -f "/etc/os-release" ]; then
|
||||
OS_NAME=$(grep ^NAME /etc/os-release | cut -d= -f2 | tr -d '"')
|
||||
OS_VERSION=$(grep ^VERSION_ID /etc/os-release | cut -d= -f2 | tr -d '"')
|
||||
elif [ -f "/etc/debian_version" ]; then
|
||||
OS_NAME="Debian"
|
||||
OS_VERSION=$(cat /etc/debian_version)
|
||||
fi
|
||||
|
||||
PROFILE_FILE="/etc/profile.d/00_lxc-details.sh"
|
||||
echo "echo -e \"\"" >"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${BOLD}${APPLICATION} LXC Container${CL}"\" >>"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${TAB}${GATEWAY}${YW} Provided by: ${GN}community-scripts ORG ${YW}| GitHub: ${GN}https://github.com/community-scripts/ProxmoxVE${CL}\"" >>"$PROFILE_FILE"
|
||||
echo "echo \"\"" >>"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${TAB}${OS}${YW} OS: ${GN}${OS_NAME} - Version: ${OS_VERSION}${CL}\"" >>"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${TAB}${OS}${YW} OS: ${GN}\$(grep ^NAME /etc/os-release | cut -d= -f2 | tr -d '\"') - Version: \$(grep ^VERSION_ID /etc/os-release | cut -d= -f2 | tr -d '\"')${CL}\"" >>"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${TAB}${HOSTNAME}${YW} Hostname: ${GN}\$(hostname)${CL}\"" >>"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${TAB}${INFO}${YW} IP Address: ${GN}\$(hostname -I | awk '{print \$1}')${CL}\"" >>"$PROFILE_FILE"
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
SCRIPT_DIR="$(dirname "$0")"
|
||||
source "$SCRIPT_DIR/../core/build.func"
|
||||
# Copyright (c) 2021-2025 tteck
|
||||
# Copyright (c) 2021-2026 tteck
|
||||
# Author: tteck (tteckster)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://www.debian.org/
|
||||
@@ -40,5 +40,5 @@ start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
msg_ok "Completed successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2025 tteck
|
||||
# Copyright (c) 2021-2026 tteck
|
||||
# Author: tteck (tteckster)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://www.debian.org/
|
||||
|
||||
@@ -1610,6 +1610,7 @@ class ScriptExecutionHandler {
|
||||
// TerminalHandler removed - not used by current application
|
||||
|
||||
app.prepare().then(() => {
|
||||
console.log('> Next.js app prepared successfully');
|
||||
const httpServer = createServer(async (req, res) => {
|
||||
try {
|
||||
// Be sure to pass `true` as the second argument to `url.parse`.
|
||||
@@ -1715,4 +1716,9 @@ app.prepare().then(() => {
|
||||
autoSyncModule.setupGracefulShutdown();
|
||||
}
|
||||
});
|
||||
}).catch((err) => {
|
||||
console.error('> Failed to start server:', err.message);
|
||||
console.error('> If you see "Could not find a production build", run: npm run build');
|
||||
console.error('> Full error:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
@@ -71,6 +71,7 @@ export function ConfigurationModal({
|
||||
} else {
|
||||
// Advanced mode: all vars with defaults
|
||||
const defaults: EnvVars = {
|
||||
var_ctid: '', // Empty = use next available ID
|
||||
// Resources from JSON
|
||||
var_cpu: resources?.cpu ?? 1,
|
||||
var_ram: resources?.ram ?? 1024,
|
||||
@@ -87,6 +88,7 @@ export function ConfigurationModal({
|
||||
var_mtu: 1500,
|
||||
var_mac: '',
|
||||
var_ns: '',
|
||||
var_searchdomain: '',
|
||||
|
||||
// Identity
|
||||
var_hostname: slug,
|
||||
@@ -211,6 +213,14 @@ export function ConfigurationModal({
|
||||
if (advancedVars.var_vlan && !validatePositiveInt(advancedVars.var_vlan as string | number | undefined)) {
|
||||
newErrors.var_vlan = 'Must be a positive integer';
|
||||
}
|
||||
// Container ID (CTID): if set, must be integer >= 100
|
||||
const ctidVal = advancedVars.var_ctid;
|
||||
if (ctidVal !== '' && ctidVal !== undefined && typeof ctidVal !== 'boolean') {
|
||||
const ctidNum = typeof ctidVal === 'string' ? parseInt(ctidVal, 10) : ctidVal;
|
||||
if (isNaN(ctidNum) || ctidNum < 100) {
|
||||
newErrors.var_ctid = 'Must be 100 or greater';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setErrors(newErrors);
|
||||
@@ -281,7 +291,12 @@ export function ConfigurationModal({
|
||||
const cleaned: EnvVars = {};
|
||||
for (const [key, value] of Object.entries(envVars)) {
|
||||
if (value !== '' && value !== undefined) {
|
||||
cleaned[key] = value;
|
||||
// Send var_ctid as number so the script receives a numeric ID
|
||||
if (key === 'var_ctid') {
|
||||
cleaned[key] = Number(value);
|
||||
} else {
|
||||
cleaned[key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -374,6 +389,35 @@ export function ConfigurationModal({
|
||||
) : (
|
||||
/* Advanced Mode */
|
||||
<div className="space-y-6">
|
||||
{/* Container ID (CTID) - at top so user can set a specific ID */}
|
||||
<div>
|
||||
<h3 className="text-lg font-medium text-foreground mb-4">Container ID (CTID)</h3>
|
||||
<div className="grid grid-cols-2 gap-4">
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-foreground mb-2">
|
||||
Container ID (CTID)
|
||||
</label>
|
||||
<Input
|
||||
type="number"
|
||||
min="100"
|
||||
value={typeof advancedVars.var_ctid === 'boolean' ? '' : (advancedVars.var_ctid ?? '')}
|
||||
onChange={(e) => {
|
||||
const v = e.target.value;
|
||||
updateAdvancedVar('var_ctid', v === '' ? '' : parseInt(v, 10) || '');
|
||||
}}
|
||||
placeholder="Auto (next available)"
|
||||
className={errors.var_ctid ? 'border-destructive' : ''}
|
||||
/>
|
||||
{errors.var_ctid && (
|
||||
<p className="mt-1 text-xs text-destructive">{errors.var_ctid}</p>
|
||||
)}
|
||||
<p className="mt-1 text-xs text-muted-foreground">
|
||||
Leave empty to use the next available ID. Must be 100 or greater.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Resources */}
|
||||
<div>
|
||||
<h3 className="text-lg font-medium text-foreground mb-4">Resources</h3>
|
||||
@@ -613,6 +657,17 @@ export function ConfigurationModal({
|
||||
<p className="mt-1 text-xs text-destructive">{errors.var_ns}</p>
|
||||
)}
|
||||
</div>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-foreground mb-2">
|
||||
DNS Search Domain
|
||||
</label>
|
||||
<Input
|
||||
type="text"
|
||||
value={typeof advancedVars.var_searchdomain === 'boolean' ? '' : String(advancedVars.var_searchdomain ?? '')}
|
||||
onChange={(e) => updateAdvancedVar('var_searchdomain', e.target.value)}
|
||||
placeholder="e.g. local, home.lan"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ export function Footer({ onOpenReleaseNotes }: FooterProps) {
|
||||
<div className="container mx-auto px-4">
|
||||
<div className="flex flex-col sm:flex-row items-center justify-between gap-2 text-sm text-muted-foreground">
|
||||
<div className="flex items-center gap-2">
|
||||
<span>© 2024 PVE Scripts Local</span>
|
||||
<span>© 2026 PVE Scripts Local</span>
|
||||
{versionData?.success && versionData.version && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
|
||||
@@ -416,11 +416,20 @@ export function VersionDisplay({ onOpenReleaseNotes }: VersionDisplayProps = {})
|
||||
setShowUpdateConfirmation(true);
|
||||
};
|
||||
|
||||
// Helper to generate secure random string
|
||||
function getSecureRandomString(length: number): string {
|
||||
const array = new Uint8Array(length);
|
||||
window.crypto.getRandomValues(array);
|
||||
// Convert to base36 string (alphanumeric)
|
||||
return Array.from(array, b => b.toString(36)).join('').substr(0, length);
|
||||
}
|
||||
|
||||
const handleConfirmUpdate = () => {
|
||||
// Close the confirmation modal
|
||||
setShowUpdateConfirmation(false);
|
||||
// Start the actual update process
|
||||
const sessionId = `update_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
||||
const randomSuffix = getSecureRandomString(9);
|
||||
const sessionId = `update_${Date.now()}_${randomSuffix}`;
|
||||
const startTime = Date.now();
|
||||
|
||||
setIsUpdating(true);
|
||||
|
||||
@@ -238,6 +238,27 @@ export const versionRouter = createTRPCRouter({
|
||||
// Clear/create the log file
|
||||
await writeFile(logPath, '', 'utf-8');
|
||||
|
||||
// Always fetch the latest update.sh from GitHub before running
|
||||
// This ensures we always use the newest update script, avoiding
|
||||
// the "chicken-and-egg" problem where old scripts can't update properly
|
||||
const updateScriptUrl = 'https://raw.githubusercontent.com/community-scripts/ProxmoxVE-Local/main/update.sh';
|
||||
try {
|
||||
const response = await fetch(updateScriptUrl);
|
||||
if (response.ok) {
|
||||
const latestScript = await response.text();
|
||||
await writeFile(updateScriptPath, latestScript, { mode: 0o755 });
|
||||
// Log that we fetched the latest script
|
||||
await writeFile(logPath, '[INFO] Fetched latest update.sh from GitHub\n', { flag: 'a' });
|
||||
} else {
|
||||
// If fetch fails, log warning but continue with local script
|
||||
await writeFile(logPath, `[WARNING] Could not fetch latest update.sh (HTTP ${response.status}), using local version\n`, { flag: 'a' });
|
||||
}
|
||||
} catch (fetchError) {
|
||||
// If fetch fails, log warning but continue with local script
|
||||
const errorMsg = fetchError instanceof Error ? fetchError.message : 'Unknown error';
|
||||
await writeFile(logPath, `[WARNING] Could not fetch latest update.sh: ${errorMsg}, using local version\n`, { flag: 'a' });
|
||||
}
|
||||
|
||||
// Spawn the update script as a detached process using nohup
|
||||
// This allows it to run independently and kill the parent Node.js process
|
||||
// Redirect output to log file
|
||||
|
||||
@@ -1,9 +1,22 @@
|
||||
import 'dotenv/config'
|
||||
import { PrismaClient } from '../../prisma/generated/prisma/client.ts'
|
||||
import { PrismaBetterSqlite3 } from '@prisma/adapter-better-sqlite3'
|
||||
import { existsSync, mkdirSync } from 'fs'
|
||||
import { dirname } from 'path'
|
||||
|
||||
const globalForPrisma = globalThis;
|
||||
|
||||
// Ensure database directory exists before initializing Prisma
|
||||
// DATABASE_URL format: file:/path/to/database.db
|
||||
const dbUrl = process.env.DATABASE_URL || 'file:./data/settings.db';
|
||||
const dbPath = dbUrl.replace(/^file:/, '');
|
||||
const dbDir = dirname(dbPath);
|
||||
|
||||
if (!existsSync(dbDir)) {
|
||||
console.log(`Creating database directory: ${dbDir}`);
|
||||
mkdirSync(dbDir, { recursive: true });
|
||||
}
|
||||
|
||||
const adapter = new PrismaBetterSqlite3({ url: process.env.DATABASE_URL });
|
||||
|
||||
export const prisma = globalForPrisma.prisma ?? new PrismaClient({ adapter });
|
||||
|
||||
@@ -1,9 +1,22 @@
|
||||
import 'dotenv/config'
|
||||
import { PrismaClient } from '../../prisma/generated/prisma/client'
|
||||
import { PrismaBetterSqlite3 } from '@prisma/adapter-better-sqlite3'
|
||||
import { existsSync, mkdirSync } from 'fs'
|
||||
import { dirname } from 'path'
|
||||
|
||||
const globalForPrisma = globalThis as { prisma?: PrismaClient };
|
||||
|
||||
// Ensure database directory exists before initializing Prisma
|
||||
// DATABASE_URL format: file:/path/to/database.db
|
||||
const dbUrl = process.env.DATABASE_URL || 'file:./data/settings.db';
|
||||
const dbPath = dbUrl.replace(/^file:/, '');
|
||||
const dbDir = dirname(dbPath);
|
||||
|
||||
if (!existsSync(dbDir)) {
|
||||
console.log(`Creating database directory: ${dbDir}`);
|
||||
mkdirSync(dbDir, { recursive: true });
|
||||
}
|
||||
|
||||
const adapter = new PrismaBetterSqlite3({ url: process.env.DATABASE_URL! });
|
||||
|
||||
export const prisma: PrismaClient = globalForPrisma.prisma ?? new PrismaClient({
|
||||
|
||||
356
update.sh
356
update.sh
@@ -4,7 +4,7 @@
|
||||
# Enhanced update script for ProxmoxVE-Local
|
||||
# Fetches latest release from GitHub and backs up data directory
|
||||
|
||||
set -euo pipefail # Exit on error, undefined vars, pipe failures
|
||||
set -euo pipefail # Exit on error, undefined vars, pipe failures
|
||||
|
||||
# Add error trap for debugging
|
||||
trap 'echo "Error occurred at line $LINENO, command: $BASH_COMMAND"' ERR
|
||||
@@ -38,7 +38,7 @@ load_github_token() {
|
||||
log "Using GitHub token from environment variable"
|
||||
return 0
|
||||
fi
|
||||
|
||||
|
||||
# Try .env file
|
||||
if [ -f ".env" ]; then
|
||||
local env_token
|
||||
@@ -49,21 +49,21 @@ load_github_token() {
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
# Try .github_token file
|
||||
if [ -f ".github_token" ]; then
|
||||
GITHUB_TOKEN=$(cat .github_token | tr -d '\n\r')
|
||||
log "Using GitHub token from .github_token file"
|
||||
return 0
|
||||
fi
|
||||
|
||||
|
||||
# Try ~/.github_token file
|
||||
if [ -f "$HOME/.github_token" ]; then
|
||||
GITHUB_TOKEN=$(cat "$HOME/.github_token" | tr -d '\n\r')
|
||||
log "Using GitHub token from ~/.github_token file"
|
||||
return 0
|
||||
fi
|
||||
|
||||
|
||||
log_warning "No GitHub token found. Using unauthenticated requests (lower rate limits)"
|
||||
log_warning "To use a token, add GITHUB_TOKEN=your_token to .env file or set GITHUB_TOKEN environment variable"
|
||||
return 1
|
||||
@@ -72,7 +72,7 @@ load_github_token() {
|
||||
# Initialize log file
|
||||
init_log() {
|
||||
# Clear/create log file
|
||||
> "$LOG_FILE"
|
||||
>"$LOG_FILE"
|
||||
log "Starting ProxmoxVE-Local update process..."
|
||||
log "Log file: $LOG_FILE"
|
||||
}
|
||||
@@ -97,40 +97,40 @@ log_warning() {
|
||||
# Check if required tools are available
|
||||
check_dependencies() {
|
||||
log "Checking dependencies..."
|
||||
|
||||
|
||||
local missing_deps=()
|
||||
|
||||
if ! command -v curl &> /dev/null; then
|
||||
|
||||
if ! command -v curl &>/dev/null; then
|
||||
missing_deps+=("curl")
|
||||
fi
|
||||
|
||||
if ! command -v jq &> /dev/null; then
|
||||
|
||||
if ! command -v jq &>/dev/null; then
|
||||
missing_deps+=("jq")
|
||||
fi
|
||||
|
||||
if ! command -v npm &> /dev/null; then
|
||||
|
||||
if ! command -v npm &>/dev/null; then
|
||||
missing_deps+=("npm")
|
||||
fi
|
||||
|
||||
if ! command -v node &> /dev/null; then
|
||||
|
||||
if ! command -v node &>/dev/null; then
|
||||
missing_deps+=("node")
|
||||
fi
|
||||
|
||||
|
||||
if [ ${#missing_deps[@]} -ne 0 ]; then
|
||||
log_error "Missing dependencies: ${missing_deps[*]}"
|
||||
log_error "Please install the missing dependencies and try again."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
log_success "All dependencies are available"
|
||||
}
|
||||
|
||||
# Get latest release info from GitHub API
|
||||
get_latest_release() {
|
||||
log "Fetching latest release information from GitHub..."
|
||||
|
||||
|
||||
local curl_opts="-s --connect-timeout 15 --max-time 60 --retry 2 --retry-delay 3"
|
||||
|
||||
|
||||
# Add authentication header if token is available
|
||||
if [ -n "$GITHUB_TOKEN" ]; then
|
||||
curl_opts="$curl_opts -H \"Authorization: token $GITHUB_TOKEN\""
|
||||
@@ -138,35 +138,35 @@ get_latest_release() {
|
||||
else
|
||||
log "Using unauthenticated GitHub API request (lower rate limits)"
|
||||
fi
|
||||
|
||||
|
||||
local release_info
|
||||
if ! release_info=$(eval "curl $curl_opts \"$GITHUB_API/releases/latest\""); then
|
||||
log_error "Failed to fetch release information from GitHub API (timeout or network error)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# Check if response is valid JSON
|
||||
if ! echo "$release_info" | jq empty 2>/dev/null; then
|
||||
log_error "Invalid JSON response from GitHub API"
|
||||
log "Response: $release_info"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
local tag_name
|
||||
local download_url
|
||||
local published_at
|
||||
|
||||
|
||||
tag_name=$(echo "$release_info" | jq -r '.tag_name')
|
||||
download_url=$(echo "$release_info" | jq -r '.tarball_url')
|
||||
published_at=$(echo "$release_info" | jq -r '.published_at')
|
||||
|
||||
|
||||
if [ "$tag_name" = "null" ] || [ "$download_url" = "null" ] || [ -z "$tag_name" ] || [ -z "$download_url" ]; then
|
||||
log_error "Failed to parse release information from API response"
|
||||
log "Tag name: $tag_name"
|
||||
log "Download URL: $download_url"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
log_success "Latest release: $tag_name (published: $published_at)"
|
||||
echo "$tag_name|$download_url"
|
||||
}
|
||||
@@ -174,16 +174,16 @@ get_latest_release() {
|
||||
# Backup data directory, .env file, and scripts directories
|
||||
backup_data() {
|
||||
log "Creating backup directory at $BACKUP_DIR..."
|
||||
|
||||
|
||||
if ! mkdir -p "$BACKUP_DIR"; then
|
||||
log_error "Failed to create backup directory"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# Backup data directory
|
||||
if [ -d "$DATA_DIR" ]; then
|
||||
log "Backing up data directory..."
|
||||
|
||||
|
||||
if ! cp -r "$DATA_DIR" "$BACKUP_DIR/data"; then
|
||||
log_error "Failed to backup data directory"
|
||||
exit 1
|
||||
@@ -193,7 +193,7 @@ backup_data() {
|
||||
else
|
||||
log_warning "Data directory not found, skipping backup"
|
||||
fi
|
||||
|
||||
|
||||
# Backup .env file
|
||||
if [ -f ".env" ]; then
|
||||
log "Backing up .env file..."
|
||||
@@ -206,7 +206,7 @@ backup_data() {
|
||||
else
|
||||
log_warning ".env file not found, skipping backup"
|
||||
fi
|
||||
|
||||
|
||||
# Backup scripts directories
|
||||
local scripts_dirs=("scripts/ct" "scripts/install" "scripts/tools" "scripts/vm")
|
||||
for scripts_dir in "${scripts_dirs[@]}"; do
|
||||
@@ -230,60 +230,60 @@ download_release() {
|
||||
local release_info="$1"
|
||||
local tag_name="${release_info%|*}"
|
||||
local download_url="${release_info#*|}"
|
||||
|
||||
|
||||
log "Downloading release $tag_name..."
|
||||
|
||||
|
||||
local temp_dir="/tmp/pve-update-$$"
|
||||
local archive_file="$temp_dir/release.tar.gz"
|
||||
|
||||
|
||||
# Create temporary directory
|
||||
if ! mkdir -p "$temp_dir"; then
|
||||
log_error "Failed to create temporary directory"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# Download release with timeout and progress
|
||||
if ! curl -L --connect-timeout 30 --max-time 300 --retry 3 --retry-delay 5 -o "$archive_file" "$download_url" 2>/dev/null; then
|
||||
log_error "Failed to download release from GitHub"
|
||||
rm -rf "$temp_dir"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# Verify download
|
||||
if [ ! -f "$archive_file" ] || [ ! -s "$archive_file" ]; then
|
||||
log_error "Downloaded file is empty or missing"
|
||||
rm -rf "$temp_dir"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
log_success "Downloaded release"
|
||||
|
||||
|
||||
# Extract release
|
||||
if ! tar -xzf "$archive_file" -C "$temp_dir" 2>/dev/null; then
|
||||
log_error "Failed to extract release"
|
||||
rm -rf "$temp_dir"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# Find the extracted directory (GitHub tarballs have a root directory)
|
||||
local extracted_dir
|
||||
extracted_dir=$(find "$temp_dir" -maxdepth 1 -type d -name "community-scripts-ProxmoxVE-Local-*" 2>/dev/null | head -1)
|
||||
|
||||
|
||||
# Try alternative patterns if not found
|
||||
if [ -z "$extracted_dir" ]; then
|
||||
extracted_dir=$(find "$temp_dir" -maxdepth 1 -type d -name "${REPO_NAME}-*" 2>/dev/null | head -1)
|
||||
fi
|
||||
|
||||
|
||||
if [ -z "$extracted_dir" ]; then
|
||||
extracted_dir=$(find "$temp_dir" -maxdepth 1 -type d ! -name "$temp_dir" 2>/dev/null | head -1)
|
||||
fi
|
||||
|
||||
|
||||
if [ -z "$extracted_dir" ]; then
|
||||
log_error "Could not find extracted directory"
|
||||
rm -rf "$temp_dir"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
log_success "Release extracted successfully"
|
||||
echo "$extracted_dir"
|
||||
}
|
||||
@@ -291,11 +291,11 @@ download_release() {
|
||||
# Clear the original directory before updating
|
||||
clear_original_directory() {
|
||||
log "Clearing original directory..."
|
||||
|
||||
|
||||
# Remove old lock files and node_modules before update
|
||||
rm -f package-lock.json 2>/dev/null
|
||||
rm -rf node_modules 2>/dev/null
|
||||
|
||||
|
||||
# List of files/directories to preserve (already backed up)
|
||||
local preserve_patterns=(
|
||||
"data"
|
||||
@@ -308,48 +308,48 @@ clear_original_directory() {
|
||||
".git"
|
||||
"scripts"
|
||||
)
|
||||
|
||||
|
||||
# Remove all files except preserved ones
|
||||
while IFS= read -r file; do
|
||||
local should_preserve=false
|
||||
local filename=$(basename "$file")
|
||||
|
||||
|
||||
for pattern in "${preserve_patterns[@]}"; do
|
||||
if [[ "$filename" == $pattern ]]; then
|
||||
should_preserve=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
if [ "$should_preserve" = false ]; then
|
||||
rm -f "$file"
|
||||
fi
|
||||
done < <(find . -maxdepth 1 -type f ! -name ".*")
|
||||
|
||||
|
||||
# Remove all directories except preserved ones
|
||||
while IFS= read -r dir; do
|
||||
local should_preserve=false
|
||||
local dirname=$(basename "$dir")
|
||||
|
||||
|
||||
for pattern in "${preserve_patterns[@]}"; do
|
||||
if [[ "$dirname" == $pattern ]]; then
|
||||
should_preserve=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
if [ "$should_preserve" = false ]; then
|
||||
rm -rf "$dir"
|
||||
fi
|
||||
done < <(find . -maxdepth 1 -type d ! -name "." ! -name "..")
|
||||
|
||||
|
||||
log_success "Original directory cleared"
|
||||
}
|
||||
|
||||
# Restore backup files before building
|
||||
restore_backup_files() {
|
||||
log "Restoring .env, data directory, and scripts directories from backup..."
|
||||
|
||||
|
||||
if [ -d "$BACKUP_DIR" ]; then
|
||||
# Restore .env file
|
||||
if [ -f "$BACKUP_DIR/.env" ]; then
|
||||
@@ -365,7 +365,7 @@ restore_backup_files() {
|
||||
else
|
||||
log_warning "No .env file backup found"
|
||||
fi
|
||||
|
||||
|
||||
# Restore data directory
|
||||
if [ -d "$BACKUP_DIR/data" ]; then
|
||||
if [ -d "data" ]; then
|
||||
@@ -380,24 +380,24 @@ restore_backup_files() {
|
||||
else
|
||||
log_warning "No data directory backup found"
|
||||
fi
|
||||
|
||||
|
||||
# Restore scripts directories
|
||||
local scripts_dirs=("ct" "install" "tools" "vm")
|
||||
for backup_name in "${scripts_dirs[@]}"; do
|
||||
if [ -d "$BACKUP_DIR/$backup_name" ]; then
|
||||
local target_dir="scripts/$backup_name"
|
||||
log "Restoring $target_dir directory from backup..."
|
||||
|
||||
|
||||
# Ensure scripts directory exists
|
||||
if [ ! -d "scripts" ]; then
|
||||
mkdir -p "scripts"
|
||||
fi
|
||||
|
||||
|
||||
# Remove existing directory if it exists
|
||||
if [ -d "$target_dir" ]; then
|
||||
rm -rf "$target_dir"
|
||||
fi
|
||||
|
||||
|
||||
if cp -r "$BACKUP_DIR/$backup_name" "$target_dir"; then
|
||||
log_success "$target_dir directory restored from backup"
|
||||
else
|
||||
@@ -417,7 +417,13 @@ restore_backup_files() {
|
||||
# Verify database was restored correctly
|
||||
verify_database_restored() {
|
||||
log "Verifying database was restored correctly..."
|
||||
|
||||
|
||||
# Ensure data directory exists (will be auto-created by app if needed)
|
||||
if [ ! -d "data" ]; then
|
||||
log "Creating data directory..."
|
||||
mkdir -p data
|
||||
fi
|
||||
|
||||
# Check for both possible database filenames
|
||||
local db_file=""
|
||||
if [ -f "data/database.sqlite" ]; then
|
||||
@@ -425,23 +431,25 @@ verify_database_restored() {
|
||||
elif [ -f "data/settings.db" ]; then
|
||||
db_file="data/settings.db"
|
||||
else
|
||||
log_error "Database file not found after restore! (checked database.sqlite and settings.db)"
|
||||
return 1
|
||||
# Database doesn't exist yet - this is OK for new installations
|
||||
# The app will create it automatically via Prisma migrations
|
||||
log_warning "No existing database file found - will be created automatically on first start"
|
||||
return 0
|
||||
fi
|
||||
|
||||
|
||||
local db_size=$(stat -f%z "$db_file" 2>/dev/null || stat -c%s "$db_file" 2>/dev/null)
|
||||
if [ "$db_size" -eq 0 ]; then
|
||||
log_warning "Database file is empty - will be recreated by Prisma migrations"
|
||||
return 0 # Don't fail the update, let Prisma recreate the database
|
||||
return 0 # Don't fail the update, let Prisma recreate the database
|
||||
fi
|
||||
|
||||
|
||||
log_success "Database verified (file: $db_file, size: $db_size bytes)"
|
||||
}
|
||||
|
||||
# Ensure DATABASE_URL is set in .env file for Prisma
|
||||
ensure_database_url() {
|
||||
log "Ensuring DATABASE_URL is set in .env file..."
|
||||
|
||||
|
||||
# Check if .env file exists
|
||||
if [ ! -f ".env" ]; then
|
||||
log_warning ".env file not found, creating from .env.example..."
|
||||
@@ -452,19 +460,19 @@ ensure_database_url() {
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
# Check if DATABASE_URL is already set
|
||||
if grep -q "^DATABASE_URL=" .env; then
|
||||
log "DATABASE_URL already exists in .env file"
|
||||
return 0
|
||||
fi
|
||||
|
||||
|
||||
# Add DATABASE_URL to .env file
|
||||
log "Adding DATABASE_URL to .env file..."
|
||||
echo "" >> .env
|
||||
echo "# Database" >> .env
|
||||
echo "DATABASE_URL=\"file:./data/settings.db\"" >> .env
|
||||
|
||||
echo "" >>.env
|
||||
echo "# Database" >>.env
|
||||
echo "DATABASE_URL=\"file:./data/settings.db\"" >>.env
|
||||
|
||||
log_success "DATABASE_URL added to .env file"
|
||||
}
|
||||
|
||||
@@ -481,11 +489,9 @@ check_service() {
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# Stop the application before updating
|
||||
stop_application() {
|
||||
|
||||
|
||||
|
||||
# Change to the application directory if we're not already there
|
||||
local app_dir
|
||||
if [ -f "package.json" ] && [ -f "server.js" ]; then
|
||||
@@ -503,9 +509,9 @@ stop_application() {
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
log "Working from application directory: $(pwd)"
|
||||
|
||||
|
||||
# Check if systemd service is running and disable it temporarily
|
||||
if check_service && systemctl is-active --quiet pvescriptslocal.service; then
|
||||
log "Disabling systemd service temporarily to prevent auto-restart..."
|
||||
@@ -518,7 +524,7 @@ stop_application() {
|
||||
else
|
||||
log "No running systemd service found"
|
||||
fi
|
||||
|
||||
|
||||
# Kill any remaining npm/node processes
|
||||
log "Killing any remaining npm/node processes..."
|
||||
local pids
|
||||
@@ -537,9 +543,9 @@ stop_application() {
|
||||
# Update application files
|
||||
update_files() {
|
||||
local source_dir="$1"
|
||||
|
||||
|
||||
log "Updating application files..."
|
||||
|
||||
|
||||
# List of files/directories to exclude from update
|
||||
local exclude_patterns=(
|
||||
"data"
|
||||
@@ -555,48 +561,48 @@ update_files() {
|
||||
"scripts/tools"
|
||||
"scripts/vm"
|
||||
)
|
||||
|
||||
|
||||
# Find the actual source directory (strip the top-level directory)
|
||||
local actual_source_dir
|
||||
actual_source_dir=$(find "$source_dir" -maxdepth 1 -type d -name "community-scripts-ProxmoxVE-Local-*" | head -1)
|
||||
|
||||
|
||||
if [ -z "$actual_source_dir" ]; then
|
||||
log_error "Could not find the actual source directory in $source_dir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
|
||||
# Verify critical files exist in source
|
||||
if [ ! -f "$actual_source_dir/package.json" ]; then
|
||||
log_error "package.json not found in source directory!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
|
||||
# Use process substitution instead of pipe to avoid subshell issues
|
||||
local files_copied=0
|
||||
local files_excluded=0
|
||||
|
||||
|
||||
# Create a temporary file list to avoid process substitution issues
|
||||
local file_list="/tmp/file_list_$$.txt"
|
||||
find "$actual_source_dir" -type f > "$file_list"
|
||||
|
||||
find "$actual_source_dir" -type f >"$file_list"
|
||||
|
||||
while IFS= read -r file; do
|
||||
local rel_path="${file#$actual_source_dir/}"
|
||||
local should_exclude=false
|
||||
|
||||
|
||||
for pattern in "${exclude_patterns[@]}"; do
|
||||
if [[ "$rel_path" == $pattern ]] || [[ "$rel_path" == $pattern/* ]]; then
|
||||
should_exclude=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
if [ "$should_exclude" = false ]; then
|
||||
local target_dir
|
||||
target_dir=$(dirname "$rel_path")
|
||||
if [ "$target_dir" != "." ]; then
|
||||
mkdir -p "$target_dir"
|
||||
fi
|
||||
|
||||
|
||||
if ! cp "$file" "$rel_path"; then
|
||||
log_error "Failed to copy $rel_path"
|
||||
rm -f "$file_list"
|
||||
@@ -606,48 +612,47 @@ update_files() {
|
||||
else
|
||||
files_excluded=$((files_excluded + 1))
|
||||
fi
|
||||
done < "$file_list"
|
||||
|
||||
done <"$file_list"
|
||||
|
||||
# Clean up temporary file
|
||||
rm -f "$file_list"
|
||||
|
||||
|
||||
# Verify critical files were copied
|
||||
if [ ! -f "package.json" ]; then
|
||||
log_error "package.json was not copied to target directory!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
|
||||
if [ ! -f "package-lock.json" ]; then
|
||||
log_warning "package-lock.json was not copied!"
|
||||
fi
|
||||
|
||||
|
||||
log_success "Application files updated successfully ($files_copied files)"
|
||||
}
|
||||
|
||||
|
||||
# Install dependencies and build
|
||||
install_and_build() {
|
||||
log "Installing dependencies..."
|
||||
|
||||
|
||||
# Verify package.json exists
|
||||
if [ ! -f "package.json" ]; then
|
||||
log_error "package.json not found! Cannot install dependencies."
|
||||
return 1
|
||||
fi
|
||||
|
||||
|
||||
if [ ! -f "package-lock.json" ]; then
|
||||
log_warning "No package-lock.json found, npm will generate one"
|
||||
fi
|
||||
|
||||
|
||||
# Create temporary file for npm output
|
||||
local npm_log="/tmp/npm_install_$$.log"
|
||||
|
||||
|
||||
# Ensure NODE_ENV is not set to production during install (we need devDependencies for build)
|
||||
local old_node_env="${NODE_ENV:-}"
|
||||
export NODE_ENV=development
|
||||
|
||||
|
||||
# Run npm install to get ALL dependencies including devDependencies
|
||||
if ! npm install --include=dev > "$npm_log" 2>&1; then
|
||||
if ! npm install --include=dev >"$npm_log" 2>&1; then
|
||||
log_error "Failed to install dependencies"
|
||||
log_error "npm install output (last 30 lines):"
|
||||
tail -30 "$npm_log" | while read -r line; do
|
||||
@@ -656,20 +661,20 @@ install_and_build() {
|
||||
rm -f "$npm_log"
|
||||
return 1
|
||||
fi
|
||||
|
||||
|
||||
# Restore NODE_ENV
|
||||
if [ -n "$old_node_env" ]; then
|
||||
export NODE_ENV="$old_node_env"
|
||||
else
|
||||
unset NODE_ENV
|
||||
fi
|
||||
|
||||
|
||||
log_success "Dependencies installed successfully"
|
||||
rm -f "$npm_log"
|
||||
|
||||
|
||||
# Generate Prisma client
|
||||
log "Generating Prisma client..."
|
||||
if ! npx prisma generate > "$npm_log" 2>&1; then
|
||||
if ! npx prisma generate >"$npm_log" 2>&1; then
|
||||
log_error "Failed to generate Prisma client"
|
||||
log_error "Prisma generate output:"
|
||||
cat "$npm_log" | while read -r line; do
|
||||
@@ -679,7 +684,7 @@ install_and_build() {
|
||||
return 1
|
||||
fi
|
||||
log_success "Prisma client generated successfully"
|
||||
|
||||
|
||||
# Check if Prisma migrations exist and are compatible
|
||||
if [ -d "prisma/migrations" ]; then
|
||||
log "Existing migration history detected"
|
||||
@@ -688,10 +693,10 @@ install_and_build() {
|
||||
else
|
||||
log_warning "No existing migration history found - this may be a fresh install"
|
||||
fi
|
||||
|
||||
|
||||
# Run Prisma migrations
|
||||
log "Running Prisma migrations..."
|
||||
if ! npx prisma migrate deploy > "$npm_log" 2>&1; then
|
||||
if ! npx prisma migrate deploy >"$npm_log" 2>&1; then
|
||||
log_warning "Prisma migrations failed or no migrations to run"
|
||||
log "Prisma migrate output:"
|
||||
cat "$npm_log" | while read -r line; do
|
||||
@@ -701,15 +706,18 @@ install_and_build() {
|
||||
log_success "Prisma migrations completed successfully"
|
||||
fi
|
||||
rm -f "$npm_log"
|
||||
|
||||
|
||||
log "Building application..."
|
||||
# Set NODE_ENV to production for build
|
||||
export NODE_ENV=production
|
||||
|
||||
# Unset TURBOPACK to prevent "Multiple bundler flags" error with --webpack
|
||||
unset TURBOPACK 2>/dev/null || true
|
||||
export TURBOPACK=''
|
||||
|
||||
# Create temporary file for npm build output
|
||||
local build_log="/tmp/npm_build_$$.log"
|
||||
|
||||
if ! npm run build > "$build_log" 2>&1; then
|
||||
|
||||
if ! TURBOPACK='' npm run build >"$build_log" 2>&1; then
|
||||
log_error "Failed to build application"
|
||||
log_error "npm run build output:"
|
||||
cat "$build_log" | while read -r line; do
|
||||
@@ -718,18 +726,18 @@ install_and_build() {
|
||||
rm -f "$build_log"
|
||||
return 1
|
||||
fi
|
||||
|
||||
|
||||
# Log success and clean up
|
||||
log_success "Application built successfully"
|
||||
rm -f "$build_log"
|
||||
|
||||
|
||||
log_success "Dependencies installed and application built successfully"
|
||||
}
|
||||
|
||||
# Start the application after updating
|
||||
start_application() {
|
||||
log "Starting application..."
|
||||
|
||||
|
||||
# Use the global variable to determine how to start
|
||||
if [ "$SERVICE_WAS_RUNNING" = true ] && check_service; then
|
||||
log "Service was running before update, re-enabling and starting systemd service..."
|
||||
@@ -761,11 +769,11 @@ start_application() {
|
||||
# Start application with npm
|
||||
start_with_npm() {
|
||||
log "Starting application with npm start..."
|
||||
|
||||
|
||||
# Start in background
|
||||
nohup npm start > server.log 2>&1 &
|
||||
nohup npm start >server.log 2>&1 &
|
||||
local npm_pid=$!
|
||||
|
||||
|
||||
# Wait a moment and check if it started
|
||||
sleep 3
|
||||
if kill -0 $npm_pid 2>/dev/null; then
|
||||
@@ -776,13 +784,30 @@ start_with_npm() {
|
||||
fi
|
||||
}
|
||||
|
||||
# Re-enable the systemd service on failure to prevent users from being locked out
|
||||
re_enable_service_on_failure() {
|
||||
if check_service; then
|
||||
log "Re-enabling systemd service after failure..."
|
||||
if systemctl enable pvescriptslocal.service 2>/dev/null; then
|
||||
log_success "Service re-enabled"
|
||||
if systemctl start pvescriptslocal.service 2>/dev/null; then
|
||||
log_success "Service started"
|
||||
else
|
||||
log_warning "Failed to start service - manual intervention may be required"
|
||||
fi
|
||||
else
|
||||
log_warning "Failed to re-enable service - manual intervention may be required"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Rollback function
|
||||
rollback() {
|
||||
log_warning "Rolling back to previous version..."
|
||||
|
||||
|
||||
if [ -d "$BACKUP_DIR" ]; then
|
||||
log "Restoring from backup directory: $BACKUP_DIR"
|
||||
|
||||
|
||||
# Restore data directory
|
||||
if [ -d "$BACKUP_DIR/data" ]; then
|
||||
log "Restoring data directory..."
|
||||
@@ -797,7 +822,7 @@ rollback() {
|
||||
else
|
||||
log_warning "No data directory backup found"
|
||||
fi
|
||||
|
||||
|
||||
# Restore .env file
|
||||
if [ -f "$BACKUP_DIR/.env" ]; then
|
||||
log "Restoring .env file..."
|
||||
@@ -812,24 +837,24 @@ rollback() {
|
||||
else
|
||||
log_warning "No .env file backup found"
|
||||
fi
|
||||
|
||||
|
||||
# Restore scripts directories
|
||||
local scripts_dirs=("ct" "install" "tools" "vm")
|
||||
for backup_name in "${scripts_dirs[@]}"; do
|
||||
if [ -d "$BACKUP_DIR/$backup_name" ]; then
|
||||
local target_dir="scripts/$backup_name"
|
||||
log "Restoring $target_dir directory from backup..."
|
||||
|
||||
|
||||
# Ensure scripts directory exists
|
||||
if [ ! -d "scripts" ]; then
|
||||
mkdir -p "scripts"
|
||||
fi
|
||||
|
||||
|
||||
# Remove existing directory if it exists
|
||||
if [ -d "$target_dir" ]; then
|
||||
rm -rf "$target_dir"
|
||||
fi
|
||||
|
||||
|
||||
if mv "$BACKUP_DIR/$backup_name" "$target_dir"; then
|
||||
log_success "$target_dir directory restored from backup"
|
||||
else
|
||||
@@ -839,14 +864,17 @@ rollback() {
|
||||
log_warning "No $backup_name directory backup found"
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
# Clean up backup directory
|
||||
log "Cleaning up backup directory..."
|
||||
rm -rf "$BACKUP_DIR"
|
||||
else
|
||||
log_error "No backup directory found for rollback"
|
||||
fi
|
||||
|
||||
|
||||
# Re-enable the service so users aren't locked out
|
||||
re_enable_service_on_failure
|
||||
|
||||
log_error "Update failed. Please check the logs and try again."
|
||||
exit 1
|
||||
}
|
||||
@@ -865,14 +893,14 @@ check_node_version() {
|
||||
|
||||
log "Detected Node.js version: $current"
|
||||
|
||||
if (( major_version < 24 )); then
|
||||
if ((major_version == 24)); then
|
||||
log_success "Node.js 24 already installed"
|
||||
elif ((major_version < 24)); then
|
||||
log_warning "Node.js < 24 detected → upgrading to Node.js 24 LTS..."
|
||||
upgrade_node_to_24
|
||||
elif (( major_version > 24 )); then
|
||||
else
|
||||
log_warning "Node.js > 24 detected → script tested only up to Node 24"
|
||||
log "Continuing anyway…"
|
||||
else
|
||||
log_success "Node.js 24 already installed"
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -880,22 +908,39 @@ check_node_version() {
|
||||
upgrade_node_to_24() {
|
||||
log "Preparing Node.js 24 upgrade…"
|
||||
|
||||
# Remove old nodesource repo if it exists
|
||||
# Remove old nodesource repo files if they exist
|
||||
if [ -f /etc/apt/sources.list.d/nodesource.list ]; then
|
||||
log "Removing old nodesource.list file..."
|
||||
rm -f /etc/apt/sources.list.d/nodesource.list
|
||||
fi
|
||||
if [ -f /etc/apt/sources.list.d/nodesource.sources ]; then
|
||||
log "Removing old nodesource.sources file..."
|
||||
rm -f /etc/apt/sources.list.d/nodesource.sources
|
||||
fi
|
||||
|
||||
# Update apt cache first
|
||||
log "Updating apt cache..."
|
||||
apt-get update >>"$LOG_FILE" 2>&1 || true
|
||||
|
||||
# Install NodeSource repo for Node.js 24
|
||||
curl -fsSL https://deb.nodesource.com/setup_24.x -o /tmp/node24_setup.sh
|
||||
if ! bash /tmp/node24_setup.sh > /tmp/node24_setup.log 2>&1; then
|
||||
log "Downloading Node.js 24 setup script..."
|
||||
if ! curl -fsSL https://deb.nodesource.com/setup_24.x -o /tmp/node24_setup.sh; then
|
||||
log_error "Failed to download Node.js 24 setup script"
|
||||
re_enable_service_on_failure
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! bash /tmp/node24_setup.sh >/tmp/node24_setup.log 2>&1; then
|
||||
log_error "Failed to configure Node.js 24 repository"
|
||||
tail -20 /tmp/node24_setup.log | while read -r line; do log_error "$line"; done
|
||||
re_enable_service_on_failure
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log "Installing Node.js 24…"
|
||||
if ! apt-get install -y nodejs >> "$LOG_FILE" 2>&1; then
|
||||
if ! apt-get install -y nodejs >>"$LOG_FILE" 2>&1; then
|
||||
log_error "Failed to install Node.js 24"
|
||||
re_enable_service_on_failure
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -912,21 +957,21 @@ main() {
|
||||
init_log
|
||||
log "Running as detached process"
|
||||
sleep 3
|
||||
|
||||
|
||||
else
|
||||
init_log
|
||||
fi
|
||||
|
||||
|
||||
# Check if we're running from the application directory and not already relocated
|
||||
if [ -z "${PVE_UPDATE_RELOCATED:-}" ] && [ -f "package.json" ] && [ -f "server.js" ]; then
|
||||
log "Detected running from application directory"
|
||||
bash "$0" --relocated
|
||||
exit $?
|
||||
fi
|
||||
|
||||
|
||||
# Ensure we're in the application directory
|
||||
local app_dir
|
||||
|
||||
|
||||
# First check if we're already in the right directory
|
||||
if [ -f "package.json" ] && [ -f "server.js" ]; then
|
||||
app_dir="$(pwd)"
|
||||
@@ -943,79 +988,76 @@ main() {
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
# Check dependencies
|
||||
check_dependencies
|
||||
|
||||
|
||||
# Load GitHub token for higher rate limits
|
||||
load_github_token
|
||||
|
||||
|
||||
# Check if service was running before update
|
||||
if check_service && systemctl is-active --quiet pvescriptslocal.service; then
|
||||
SERVICE_WAS_RUNNING=true
|
||||
else
|
||||
SERVICE_WAS_RUNNING=false
|
||||
fi
|
||||
|
||||
|
||||
# Get latest release info
|
||||
local release_info
|
||||
release_info=$(get_latest_release)
|
||||
|
||||
|
||||
# Backup data directory
|
||||
backup_data
|
||||
|
||||
|
||||
# Stop the application before updating
|
||||
stop_application
|
||||
|
||||
# Check Node.js version
|
||||
check_node_version
|
||||
|
||||
#Update Node.js to 24
|
||||
upgrade_node_to_24
|
||||
|
||||
# Download and extract release
|
||||
local source_dir
|
||||
source_dir=$(download_release "$release_info")
|
||||
|
||||
|
||||
# Clear the original directory before updating
|
||||
clear_original_directory
|
||||
|
||||
|
||||
# Update files
|
||||
if ! update_files "$source_dir"; then
|
||||
log_error "File update failed, rolling back..."
|
||||
rollback
|
||||
fi
|
||||
|
||||
|
||||
# Restore .env and data directory before building
|
||||
restore_backup_files
|
||||
|
||||
|
||||
# Verify database was restored correctly
|
||||
if ! verify_database_restored; then
|
||||
log_error "Database verification failed, rolling back..."
|
||||
rollback
|
||||
fi
|
||||
|
||||
|
||||
# Ensure DATABASE_URL is set for Prisma
|
||||
ensure_database_url
|
||||
|
||||
|
||||
# Install dependencies and build
|
||||
if ! install_and_build; then
|
||||
log_error "Install and build failed, rolling back..."
|
||||
rollback
|
||||
fi
|
||||
|
||||
|
||||
# Start the application
|
||||
if ! start_application; then
|
||||
log_error "Failed to start application after update"
|
||||
rollback
|
||||
fi
|
||||
|
||||
|
||||
# Cleanup only after successful start
|
||||
rm -rf "$source_dir"
|
||||
rm -rf "/tmp/pve-update-$$"
|
||||
rm -rf "$BACKUP_DIR"
|
||||
log "Backup directory cleaned up"
|
||||
|
||||
|
||||
log_success "Update completed successfully!"
|
||||
}
|
||||
|
||||
@@ -1023,4 +1065,4 @@ main() {
|
||||
if ! main "$@"; then
|
||||
log_error "Update script failed with exit code $?"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
Reference in New Issue
Block a user