Compare commits
124 Commits
fix/source
...
fix/466
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e1d270d52c | ||
|
|
20dbcae42a | ||
|
|
8e8c724392 | ||
|
|
201b33ec84 | ||
|
|
6d2df9929c | ||
|
|
f33504baf5 | ||
|
|
4bc5f4d6ad | ||
|
|
a52a897346 | ||
|
|
1d585d4d3f | ||
|
|
d4b8ceb581 | ||
|
|
7079c236ab | ||
|
|
0678aba911 | ||
|
|
ffdd742aa0 | ||
|
|
f4de214a83 | ||
|
|
3b0da19cd1 | ||
|
|
08bc4ab37b | ||
|
|
d2e7477898 | ||
|
|
b5c6beafff | ||
|
|
a34566651a | ||
|
|
4628e67e5c | ||
|
|
578fa28461 | ||
|
|
9e6154b0de | ||
|
|
d29f71a92f | ||
|
|
aea14cda7e | ||
|
|
4893ccda6e | ||
|
|
a56c625b4f | ||
|
|
54b2187f98 | ||
|
|
2f4e8606ed | ||
|
|
ff5478dd72 | ||
|
|
944a527972 | ||
|
|
c4479c1932 | ||
|
|
9998e48621 | ||
|
|
34eade3971 | ||
|
|
82be47b959 | ||
|
|
9b77fc7ddb | ||
|
|
db12ac4219 | ||
|
|
c06b8e6731 | ||
|
|
14e01513e3 | ||
|
|
f66d1db861 | ||
|
|
886c3e37ff | ||
|
|
38deb09aa9 | ||
|
|
2e4634ca25 | ||
|
|
a82bc02b15 | ||
|
|
2ea44e6b24 | ||
|
|
6d326dce1f | ||
|
|
6c8e177d3e | ||
|
|
879a548345 | ||
|
|
64cd81d5ba | ||
|
|
61e75949c8 | ||
|
|
a5d24bfad7 | ||
|
|
04595c0093 | ||
|
|
06fdb4889d | ||
|
|
38d4f9f918 | ||
|
|
63dc7c6983 | ||
|
|
d57c6059fc | ||
|
|
eb152f9fae | ||
|
|
1a8e98fec0 | ||
|
|
83a1c7ea31 | ||
|
|
79c63a7d3d | ||
|
|
753721eee0 | ||
|
|
09607296af | ||
|
|
c88040084a | ||
|
|
2573eb7314 | ||
|
|
414c356446 | ||
|
|
c38ded7a39 | ||
|
|
0cfed84cd0 | ||
|
|
9611bc9bcf | ||
|
|
6fe2a790fd | ||
|
|
5ea71837e7 | ||
|
|
bf5ebc72b6 | ||
|
|
a32c7bcbba | ||
|
|
98c6e79db6 | ||
|
|
c962a9cd5a | ||
|
|
5d20a6d694 | ||
|
|
cb4e8c543a | ||
|
|
2ba213de49 | ||
|
|
849aabb575 | ||
|
|
dd33df2033 | ||
|
|
94eb2820fd | ||
|
|
e49708770c | ||
|
|
5eafa01843 | ||
|
|
0c1477e087 | ||
|
|
ef73d98873 | ||
|
|
ec92c0ea6d | ||
|
|
ee14b89868 | ||
|
|
be68160cd9 | ||
|
|
dbc15b1bc3 | ||
|
|
dc6ce16e5a | ||
|
|
0c9d4ad6e2 | ||
|
|
13d57b77d4 | ||
|
|
f9e5bd5bf0 | ||
|
|
adf2b06efa | ||
|
|
80e3966e4e | ||
|
|
3662a057dc | ||
|
|
bdf336f9bf | ||
|
|
f6c310fa22 | ||
|
|
d658894b7f | ||
|
|
783744b497 | ||
|
|
de9ac41f76 | ||
|
|
060202e557 | ||
|
|
8d45ac14cc | ||
|
|
47ee2247c8 | ||
|
|
c16c8d54db | ||
|
|
3e669a0739 | ||
|
|
02e175c8a0 | ||
|
|
b4e98e7624 | ||
|
|
2392529092 | ||
|
|
f9f5772d92 | ||
|
|
4267d7340e | ||
|
|
dcf923551b | ||
|
|
69a5ac3a56 | ||
|
|
7b8c1ebdf1 | ||
|
|
580b623939 | ||
|
|
ac21fbb181 | ||
|
|
588ae65dfd | ||
|
|
30acba39a5 | ||
|
|
3a5bb3dc45 | ||
|
|
f42c0d956e | ||
|
|
0ed13fcf0f | ||
|
|
afc87910e6 | ||
|
|
b97eca9620 | ||
|
|
8f0ae3a341 | ||
|
|
b5450bd221 | ||
|
|
88dbe4ea85 |
@@ -18,7 +18,12 @@ ALLOWED_SCRIPT_PATHS="scripts/"
|
|||||||
WEBSOCKET_PORT="3001"
|
WEBSOCKET_PORT="3001"
|
||||||
|
|
||||||
# User settings
|
# User settings
|
||||||
|
# Optional tokens for private repos: GITHUB_TOKEN (GitHub), GITLAB_TOKEN (GitLab),
|
||||||
|
# BITBUCKET_APP_PASSWORD or BITBUCKET_TOKEN (Bitbucket). REPO_URL and added repos
|
||||||
|
# can be GitHub, GitLab, Bitbucket, or custom Git servers.
|
||||||
GITHUB_TOKEN=
|
GITHUB_TOKEN=
|
||||||
|
GITLAB_TOKEN=
|
||||||
|
BITBUCKET_APP_PASSWORD=
|
||||||
SAVE_FILTER=false
|
SAVE_FILTER=false
|
||||||
FILTERS=
|
FILTERS=
|
||||||
AUTH_USERNAME=
|
AUTH_USERNAME=
|
||||||
|
|||||||
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
|
|
||||||
## 🔗 Related PR / Issue
|
## 🔗 Related PR / Issue
|
||||||
Link: #
|
Fixes: #
|
||||||
|
|
||||||
|
|
||||||
## ✅ Prerequisites (**X** in brackets)
|
## ✅ Prerequisites (**X** in brackets)
|
||||||
|
|||||||
18
.github/workflows/publish_release.yml
vendored
18
.github/workflows/publish_release.yml
vendored
@@ -31,20 +31,24 @@ jobs:
|
|||||||
echo "Found draft version: ${{ steps.draft.outputs.tag_name }}"
|
echo "Found draft version: ${{ steps.draft.outputs.tag_name }}"
|
||||||
|
|
||||||
|
|
||||||
- name: Create branch and commit VERSION
|
- name: Create branch and commit VERSION and package.json
|
||||||
run: |
|
run: |
|
||||||
branch="update-version-${{ steps.draft.outputs.tag_name }}"
|
branch="update-version-${{ steps.draft.outputs.tag_name }}"
|
||||||
# Delete remote branch if exists
|
# Delete remote branch if exists
|
||||||
git push origin --delete "$branch" || echo "No remote branch to delete"
|
git push origin --delete "$branch" || echo "No remote branch to delete"
|
||||||
git fetch origin main
|
git fetch origin main
|
||||||
git checkout -b "$branch" origin/main
|
git checkout -b "$branch" origin/main
|
||||||
# Write VERSION file and timestamp to ensure a diff
|
# Version without 'v' prefix (e.g. v1.2.3 -> 1.2.3)
|
||||||
version="${{ steps.draft.outputs.tag_name }}"
|
version="${{ steps.draft.outputs.tag_name }}"
|
||||||
echo "$version" | sed 's/^v//' > VERSION
|
version_plain=$(echo "$version" | sed 's/^v//')
|
||||||
git add VERSION
|
# Write VERSION file
|
||||||
|
echo "$version_plain" > VERSION
|
||||||
|
# Update package.json version
|
||||||
|
jq --arg v "$version_plain" '.version = $v' package.json > package.json.tmp && mv package.json.tmp package.json
|
||||||
|
git add VERSION package.json
|
||||||
git config user.name "github-actions[bot]"
|
git config user.name "github-actions[bot]"
|
||||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
git commit -m "chore: add VERSION $version" --allow-empty
|
git commit -m "chore: bump version to $version_plain (VERSION + package.json)" --allow-empty
|
||||||
|
|
||||||
- name: Push changes
|
- name: Push changes
|
||||||
run: |
|
run: |
|
||||||
@@ -57,8 +61,8 @@ jobs:
|
|||||||
pr_url=$(gh pr create \
|
pr_url=$(gh pr create \
|
||||||
--base main \
|
--base main \
|
||||||
--head update-version-${{ steps.draft.outputs.tag_name }} \
|
--head update-version-${{ steps.draft.outputs.tag_name }} \
|
||||||
--title "chore: add VERSION ${{ steps.draft.outputs.tag_name }}" \
|
--title "chore: bump version to ${{ steps.draft.outputs.tag_name }} (VERSION + package.json)" \
|
||||||
--body "Adds VERSION file for release ${{ steps.draft.outputs.tag_name }}" \
|
--body "Updates VERSION file and package.json version for release ${{ steps.draft.outputs.tag_name }}" \
|
||||||
--label automated)
|
--label automated)
|
||||||
|
|
||||||
pr_number=$(echo "$pr_url" | awk -F/ '{print $NF}')
|
pr_number=$(echo "$pr_url" | awk -F/ '{print $NF}')
|
||||||
|
|||||||
@@ -100,7 +100,7 @@ apt install -y nodejs
|
|||||||
```bash
|
```bash
|
||||||
# Clone the repository
|
# Clone the repository
|
||||||
git clone https://github.com/community-scripts/ProxmoxVE-Local.git /opt/PVESciptslocal
|
git clone https://github.com/community-scripts/ProxmoxVE-Local.git /opt/PVESciptslocal
|
||||||
cd PVESciptslocal
|
cd /opt/PVESciptslocal
|
||||||
|
|
||||||
# Install dependencies and build
|
# Install dependencies and build
|
||||||
npm install
|
npm install
|
||||||
|
|||||||
1405
package-lock.json
generated
1405
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
76
package.json
76
package.json
@@ -25,35 +25,35 @@
|
|||||||
"typecheck": "tsc --noEmit"
|
"typecheck": "tsc --noEmit"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@prisma/adapter-better-sqlite3": "^7.0.1",
|
"@prisma/adapter-better-sqlite3": "^7.3.0",
|
||||||
"@prisma/client": "^7.0.1",
|
"@prisma/client": "^7.3.0",
|
||||||
"@radix-ui/react-dropdown-menu": "^2.1.16",
|
"@radix-ui/react-dropdown-menu": "^2.1.16",
|
||||||
"@radix-ui/react-slot": "^1.2.4",
|
"@radix-ui/react-slot": "^1.2.4",
|
||||||
"@t3-oss/env-nextjs": "^0.13.8",
|
"@t3-oss/env-nextjs": "^0.13.10",
|
||||||
"@tailwindcss/typography": "^0.5.19",
|
"@tailwindcss/typography": "^0.5.19",
|
||||||
"@tanstack/react-query": "^5.90.11",
|
"@tanstack/react-query": "^5.90.20",
|
||||||
"@trpc/client": "^11.7.2",
|
"@trpc/client": "^11.8.1",
|
||||||
"@trpc/react-query": "^11.7.2",
|
"@trpc/react-query": "^11.8.1",
|
||||||
"@trpc/server": "^11.7.2",
|
"@trpc/server": "^11.8.1",
|
||||||
"@types/react-syntax-highlighter": "^15.5.13",
|
"@types/react-syntax-highlighter": "^15.5.13",
|
||||||
"@types/ws": "^8.18.1",
|
"@types/ws": "^8.18.1",
|
||||||
"@xterm/addon-fit": "^0.10.0",
|
"@xterm/addon-fit": "^0.11.0",
|
||||||
"@xterm/addon-web-links": "^0.11.0",
|
"@xterm/addon-web-links": "^0.12.0",
|
||||||
"@xterm/xterm": "^5.5.0",
|
"@xterm/xterm": "^6.0.0",
|
||||||
"axios": "^1.13.2",
|
"axios": "^1.13.2",
|
||||||
"bcryptjs": "^3.0.3",
|
"bcryptjs": "^3.0.3",
|
||||||
"better-sqlite3": "^12.5.0",
|
"better-sqlite3": "^12.6.2",
|
||||||
"class-variance-authority": "^0.7.1",
|
"class-variance-authority": "^0.7.1",
|
||||||
"clsx": "^2.1.1",
|
"clsx": "^2.1.1",
|
||||||
"cron-validator": "^1.4.0",
|
"cron-validator": "^1.4.0",
|
||||||
"dotenv": "^17.2.3",
|
"dotenv": "^17.2.3",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.3",
|
||||||
"lucide-react": "^0.555.0",
|
"lucide-react": "^0.562.0",
|
||||||
"next": "^16.0.6",
|
"next": ">=16.1.5",
|
||||||
"node-cron": "^4.2.1",
|
"node-cron": "^4.2.1",
|
||||||
"node-pty": "^1.0.0",
|
"node-pty": "^1.1.0",
|
||||||
"react": "^19.2.0",
|
"react": "^19.2.3",
|
||||||
"react-dom": "^19.2.0",
|
"react-dom": "^19.2.3",
|
||||||
"react-markdown": "^10.1.0",
|
"react-markdown": "^10.1.0",
|
||||||
"react-syntax-highlighter": "^16.1.0",
|
"react-syntax-highlighter": "^16.1.0",
|
||||||
"refractor": "^5.0.0",
|
"refractor": "^5.0.0",
|
||||||
@@ -62,37 +62,38 @@
|
|||||||
"strip-ansi": "^7.1.2",
|
"strip-ansi": "^7.1.2",
|
||||||
"superjson": "^2.2.6",
|
"superjson": "^2.2.6",
|
||||||
"tailwind-merge": "^3.4.0",
|
"tailwind-merge": "^3.4.0",
|
||||||
"ws": "^8.18.3",
|
"ws": "^8.19.0",
|
||||||
"zod": "^4.1.13"
|
"zod": "^4.3.5"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@tailwindcss/postcss": "^4.1.17",
|
"next": ">=16.1.5",
|
||||||
|
"@tailwindcss/postcss": "^4.1.18",
|
||||||
"@testing-library/jest-dom": "^6.9.1",
|
"@testing-library/jest-dom": "^6.9.1",
|
||||||
"@testing-library/react": "^16.3.0",
|
"@testing-library/react": "^16.3.2",
|
||||||
"@testing-library/user-event": "^14.6.1",
|
"@testing-library/user-event": "^14.6.1",
|
||||||
"@types/bcryptjs": "^3.0.0",
|
"@types/bcryptjs": "^3.0.0",
|
||||||
"@types/better-sqlite3": "^7.6.13",
|
"@types/better-sqlite3": "^7.6.13",
|
||||||
"@types/jsonwebtoken": "^9.0.10",
|
"@types/jsonwebtoken": "^9.0.10",
|
||||||
"@types/node": "^24.10.1",
|
"@types/node": "^24.10.9",
|
||||||
"@types/node-cron": "^3.0.11",
|
"@types/node-cron": "^3.0.11",
|
||||||
"@types/react": "^19.2.7",
|
"@types/react": "^19.2.8",
|
||||||
"@types/react-dom": "^19.2.3",
|
"@types/react-dom": "^19.2.3",
|
||||||
"@vitejs/plugin-react": "^5.1.1",
|
"@vitejs/plugin-react": "^5.1.2",
|
||||||
"@vitest/coverage-v8": "^4.0.15",
|
"@vitest/coverage-v8": "^4.0.17",
|
||||||
"@vitest/ui": "^4.0.14",
|
"@vitest/ui": "^4.0.17",
|
||||||
"baseline-browser-mapping": "^2.8.32",
|
"baseline-browser-mapping": "^2.9.15",
|
||||||
"eslint": "^9.39.1",
|
"eslint": "^9.39.2",
|
||||||
"eslint-config-next": "^16.0.6",
|
"eslint-config-next": "^16.1.3",
|
||||||
"jsdom": "^27.2.0",
|
"jsdom": "^27.4.0",
|
||||||
"postcss": "^8.5.6",
|
"postcss": "^8.5.6",
|
||||||
"prettier": "^3.7.3",
|
"prettier": "^3.8.0",
|
||||||
"prettier-plugin-tailwindcss": "^0.7.2",
|
"prettier-plugin-tailwindcss": "^0.7.2",
|
||||||
"prisma": "^7.0.1",
|
"prisma": "^7.3.0",
|
||||||
"tailwindcss": "^4.1.17",
|
"tailwindcss": "^4.1.18",
|
||||||
"tsx": "^4.21.0",
|
"tsx": "^4.21.0",
|
||||||
"typescript": "^5.9.3",
|
"typescript": "^5.9.3",
|
||||||
"typescript-eslint": "^8.48.1",
|
"typescript-eslint": "^8.54.0",
|
||||||
"vitest": "^4.0.14"
|
"vitest": "^4.0.17"
|
||||||
},
|
},
|
||||||
"ct3aMetadata": {
|
"ct3aMetadata": {
|
||||||
"initVersion": "7.39.3"
|
"initVersion": "7.39.3"
|
||||||
@@ -102,6 +103,7 @@
|
|||||||
"node": ">=24.0.0"
|
"node": ">=24.0.0"
|
||||||
},
|
},
|
||||||
"overrides": {
|
"overrides": {
|
||||||
"prismjs": "^1.30.0"
|
"prismjs": "^1.30.0",
|
||||||
|
"hono": ">=4.11.7"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright (c) 2021-2025 community-scripts ORG
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
# Author: tteck (tteckster)
|
# Author: tteck (tteckster)
|
||||||
# Co-Author: MickLesk
|
# Co-Author: MickLesk
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
@@ -11,6 +11,9 @@ source "$(dirname "${BASH_SOURCE[0]}")/error-handler.func"
|
|||||||
load_functions
|
load_functions
|
||||||
catch_errors
|
catch_errors
|
||||||
|
|
||||||
|
# Get LXC IP address (must be called INSIDE container, after network is up)
|
||||||
|
get_lxc_ip
|
||||||
|
|
||||||
# This function enables IPv6 if it's not disabled and sets verbose mode
|
# This function enables IPv6 if it's not disabled and sets verbose mode
|
||||||
verb_ip6() {
|
verb_ip6() {
|
||||||
set_std_mode # Set STD mode based on VERBOSE
|
set_std_mode # Set STD mode based on VERBOSE
|
||||||
@@ -125,22 +128,13 @@ update_os() {
|
|||||||
# This function modifies the message of the day (motd) and SSH settings
|
# This function modifies the message of the day (motd) and SSH settings
|
||||||
motd_ssh() {
|
motd_ssh() {
|
||||||
echo "export TERM='xterm-256color'" >>/root/.bashrc
|
echo "export TERM='xterm-256color'" >>/root/.bashrc
|
||||||
IP=$(ip -4 addr show eth0 | awk '/inet / {print $2}' | cut -d/ -f1 | head -n 1)
|
|
||||||
|
|
||||||
if [ -f "/etc/os-release" ]; then
|
|
||||||
OS_NAME=$(grep ^NAME /etc/os-release | cut -d= -f2 | tr -d '"')
|
|
||||||
OS_VERSION=$(grep ^VERSION_ID /etc/os-release | cut -d= -f2 | tr -d '"')
|
|
||||||
else
|
|
||||||
OS_NAME="Alpine Linux"
|
|
||||||
OS_VERSION="Unknown"
|
|
||||||
fi
|
|
||||||
|
|
||||||
PROFILE_FILE="/etc/profile.d/00_lxc-details.sh"
|
PROFILE_FILE="/etc/profile.d/00_lxc-details.sh"
|
||||||
echo "echo -e \"\"" >"$PROFILE_FILE"
|
echo "echo -e \"\"" >"$PROFILE_FILE"
|
||||||
echo -e "echo -e \"${BOLD}${APPLICATION} LXC Container${CL}"\" >>"$PROFILE_FILE"
|
echo -e "echo -e \"${BOLD}${APPLICATION} LXC Container${CL}"\" >>"$PROFILE_FILE"
|
||||||
echo -e "echo -e \"${TAB}${GATEWAY}${YW} Provided by: ${GN}community-scripts ORG ${YW}| GitHub: ${GN}https://github.com/community-scripts/ProxmoxVE${CL}\"" >>"$PROFILE_FILE"
|
echo -e "echo -e \"${TAB}${GATEWAY}${YW} Provided by: ${GN}community-scripts ORG ${YW}| GitHub: ${GN}https://github.com/community-scripts/ProxmoxVE${CL}\"" >>"$PROFILE_FILE"
|
||||||
echo "echo \"\"" >>"$PROFILE_FILE"
|
echo "echo \"\"" >>"$PROFILE_FILE"
|
||||||
echo -e "echo -e \"${TAB}${OS}${YW} OS: ${GN}${OS_NAME} - Version: ${OS_VERSION}${CL}\"" >>"$PROFILE_FILE"
|
echo -e "echo -e \"${TAB}${OS}${YW} OS: ${GN}\$(grep ^NAME /etc/os-release | cut -d= -f2 | tr -d '\"') - Version: \$(grep ^VERSION_ID /etc/os-release | cut -d= -f2 | tr -d '\"')${CL}\"" >>"$PROFILE_FILE"
|
||||||
echo -e "echo -e \"${TAB}${HOSTNAME}${YW} Hostname: ${GN}\$(hostname)${CL}\"" >>"$PROFILE_FILE"
|
echo -e "echo -e \"${TAB}${HOSTNAME}${YW} Hostname: ${GN}\$(hostname)${CL}\"" >>"$PROFILE_FILE"
|
||||||
echo -e "echo -e \"${TAB}${INFO}${YW} IP Address: ${GN}\$(ip -4 addr show eth0 | awk '/inet / {print \$2}' | cut -d/ -f1 | head -n 1)${CL}\"" >>"$PROFILE_FILE"
|
echo -e "echo -e \"${TAB}${INFO}${YW} IP Address: ${GN}\$(ip -4 addr show eth0 | awk '/inet / {print \$2}' | cut -d/ -f1 | head -n 1)${CL}\"" >>"$PROFILE_FILE"
|
||||||
|
|
||||||
|
|||||||
188
scripts/core/alpine-tools.func
Normal file
188
scripts/core/alpine-tools.func
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
#!/bin/ash
|
||||||
|
# shellcheck shell=ash
|
||||||
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
|
# Author: MickLesk
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
|
||||||
|
if ! command -v curl >/dev/null 2>&1; then
|
||||||
|
apk update && apk add curl >/dev/null 2>&1
|
||||||
|
fi
|
||||||
|
source "$(dirname "${BASH_SOURCE[0]}")/core.func"
|
||||||
|
source "$(dirname "${BASH_SOURCE[0]}")/error-handler.func"
|
||||||
|
load_functions
|
||||||
|
catch_errors
|
||||||
|
|
||||||
|
# Get LXC IP address (must be called INSIDE container, after network is up)
|
||||||
|
get_lxc_ip
|
||||||
|
|
||||||
|
# This function enables IPv6 if it's not disabled and sets verbose mode
|
||||||
|
verb_ip6() {
|
||||||
|
set_std_mode # Set STD mode based on VERBOSE
|
||||||
|
|
||||||
|
if [ "${IPV6_METHOD:-}" = "disable" ]; then
|
||||||
|
msg_info "Disabling IPv6 (this may affect some services)"
|
||||||
|
$STD sysctl -w net.ipv6.conf.all.disable_ipv6=1
|
||||||
|
$STD sysctl -w net.ipv6.conf.default.disable_ipv6=1
|
||||||
|
$STD sysctl -w net.ipv6.conf.lo.disable_ipv6=1
|
||||||
|
mkdir -p /etc/sysctl.d
|
||||||
|
$STD tee /etc/sysctl.d/99-disable-ipv6.conf >/dev/null <<EOF
|
||||||
|
net.ipv6.conf.all.disable_ipv6 = 1
|
||||||
|
net.ipv6.conf.default.disable_ipv6 = 1
|
||||||
|
net.ipv6.conf.lo.disable_ipv6 = 1
|
||||||
|
EOF
|
||||||
|
$STD rc-update add sysctl default
|
||||||
|
msg_ok "Disabled IPv6"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
set -Eeuo pipefail
|
||||||
|
trap 'error_handler $? $LINENO "$BASH_COMMAND"' ERR
|
||||||
|
trap on_exit EXIT
|
||||||
|
trap on_interrupt INT
|
||||||
|
trap on_terminate TERM
|
||||||
|
|
||||||
|
error_handler() {
|
||||||
|
local exit_code="$1"
|
||||||
|
local line_number="$2"
|
||||||
|
local command="$3"
|
||||||
|
|
||||||
|
if [[ "$exit_code" -eq 0 ]]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf "\e[?25h"
|
||||||
|
echo -e "\n${RD}[ERROR]${CL} in line ${RD}$line_number${CL}: exit code ${RD}$exit_code${CL}: while executing command ${YW}$command${CL}\n"
|
||||||
|
exit "$exit_code"
|
||||||
|
}
|
||||||
|
|
||||||
|
on_exit() {
|
||||||
|
local exit_code="$?"
|
||||||
|
[[ -n "${lockfile:-}" && -e "$lockfile" ]] && rm -f "$lockfile"
|
||||||
|
exit "$exit_code"
|
||||||
|
}
|
||||||
|
|
||||||
|
on_interrupt() {
|
||||||
|
echo -e "\n${RD}Interrupted by user (SIGINT)${CL}"
|
||||||
|
exit 130
|
||||||
|
}
|
||||||
|
|
||||||
|
on_terminate() {
|
||||||
|
echo -e "\n${RD}Terminated by signal (SIGTERM)${CL}"
|
||||||
|
exit 143
|
||||||
|
}
|
||||||
|
|
||||||
|
# This function sets up the Container OS by generating the locale, setting the timezone, and checking the network connection
|
||||||
|
setting_up_container() {
|
||||||
|
msg_info "Setting up Container OS"
|
||||||
|
while [ $i -gt 0 ]; do
|
||||||
|
if [ "$(ip addr show | grep 'inet ' | grep -v '127.0.0.1' | awk '{print $2}' | cut -d'/' -f1)" != "" ]; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
echo 1>&2 -en "${CROSS}${RD} No Network! "
|
||||||
|
sleep $RETRY_EVERY
|
||||||
|
i=$((i - 1))
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ "$(ip addr show | grep 'inet ' | grep -v '127.0.0.1' | awk '{print $2}' | cut -d'/' -f1)" = "" ]; then
|
||||||
|
echo 1>&2 -e "\n${CROSS}${RD} No Network After $RETRY_NUM Tries${CL}"
|
||||||
|
echo -e "${NETWORK}Check Network Settings"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
msg_ok "Set up Container OS"
|
||||||
|
msg_ok "Network Connected: ${BL}$(ip addr show | grep 'inet ' | awk '{print $2}' | cut -d'/' -f1 | tail -n1)${CL}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# This function checks the network connection by pinging a known IP address and prompts the user to continue if the internet is not connected
|
||||||
|
network_check() {
|
||||||
|
set +e
|
||||||
|
trap - ERR
|
||||||
|
if ping -c 1 -W 1 1.1.1.1 &>/dev/null || ping -c 1 -W 1 8.8.8.8 &>/dev/null || ping -c 1 -W 1 9.9.9.9 &>/dev/null; then
|
||||||
|
ipv4_status="${GN}✔${CL} IPv4"
|
||||||
|
else
|
||||||
|
ipv4_status="${RD}✖${CL} IPv4"
|
||||||
|
read -r -p "Internet NOT connected. Continue anyway? <y/N> " prompt
|
||||||
|
if [[ "${prompt,,}" =~ ^(y|yes)$ ]]; then
|
||||||
|
echo -e "${INFO}${RD}Expect Issues Without Internet${CL}"
|
||||||
|
else
|
||||||
|
echo -e "${NETWORK}Check Network Settings"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
RESOLVEDIP=$(getent hosts github.com | awk '{ print $1 }')
|
||||||
|
if [[ -z "$RESOLVEDIP" ]]; then
|
||||||
|
msg_error "Internet: ${ipv4_status} DNS Failed"
|
||||||
|
else
|
||||||
|
msg_ok "Internet: ${ipv4_status} DNS: ${BL}${RESOLVEDIP}${CL}"
|
||||||
|
fi
|
||||||
|
set -e
|
||||||
|
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
|
||||||
|
}
|
||||||
|
|
||||||
|
# This function updates the Container OS by running apt-get update and upgrade
|
||||||
|
update_os() {
|
||||||
|
msg_info "Updating Container OS"
|
||||||
|
$STD apk -U upgrade
|
||||||
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
||||||
|
msg_ok "Updated Container OS"
|
||||||
|
}
|
||||||
|
|
||||||
|
# This function modifies the message of the day (motd) and SSH settings
|
||||||
|
motd_ssh() {
|
||||||
|
echo "export TERM='xterm-256color'" >>/root/.bashrc
|
||||||
|
|
||||||
|
PROFILE_FILE="/etc/profile.d/00_lxc-details.sh"
|
||||||
|
echo "echo -e \"\"" >"$PROFILE_FILE"
|
||||||
|
echo -e "echo -e \"${BOLD}${APPLICATION} LXC Container${CL}"\" >>"$PROFILE_FILE"
|
||||||
|
echo -e "echo -e \"${TAB}${GATEWAY}${YW} Provided by: ${GN}community-scripts ORG ${YW}| GitHub: ${GN}https://github.com/community-scripts/ProxmoxVE${CL}\"" >>"$PROFILE_FILE"
|
||||||
|
echo "echo \"\"" >>"$PROFILE_FILE"
|
||||||
|
echo -e "echo -e \"${TAB}${OS}${YW} OS: ${GN}\$(grep ^NAME /etc/os-release | cut -d= -f2 | tr -d '\"') - Version: \$(grep ^VERSION_ID /etc/os-release | cut -d= -f2 | tr -d '\"')${CL}\"" >>"$PROFILE_FILE"
|
||||||
|
echo -e "echo -e \"${TAB}${HOSTNAME}${YW} Hostname: ${GN}\$(hostname)${CL}\"" >>"$PROFILE_FILE"
|
||||||
|
echo -e "echo -e \"${TAB}${INFO}${YW} IP Address: ${GN}\$(ip -4 addr show eth0 | awk '/inet / {print \$2}' | cut -d/ -f1 | head -n 1)${CL}\"" >>"$PROFILE_FILE"
|
||||||
|
|
||||||
|
# Configure SSH if enabled
|
||||||
|
if [[ "${SSH_ROOT}" == "yes" ]]; then
|
||||||
|
# Enable sshd service
|
||||||
|
$STD rc-update add sshd
|
||||||
|
# Allow root login via SSH
|
||||||
|
sed -i "s/#PermitRootLogin prohibit-password/PermitRootLogin yes/g" /etc/ssh/sshd_config
|
||||||
|
# Start the sshd service
|
||||||
|
$STD /etc/init.d/sshd start
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Validate Timezone for some LXC's
|
||||||
|
validate_tz() {
|
||||||
|
[[ -f "/usr/share/zoneinfo/$1" ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
# This function customizes the container and enables passwordless login for the root user
|
||||||
|
customize() {
|
||||||
|
if [[ "$PASSWORD" == "" ]]; then
|
||||||
|
msg_info "Customizing Container"
|
||||||
|
passwd -d root >/dev/null 2>&1
|
||||||
|
|
||||||
|
# Ensure agetty is available
|
||||||
|
apk add --no-cache --force-broken-world util-linux >/dev/null 2>&1
|
||||||
|
|
||||||
|
# Create persistent autologin boot script
|
||||||
|
mkdir -p /etc/local.d
|
||||||
|
cat <<'EOF' >/etc/local.d/autologin.start
|
||||||
|
#!/bin/sh
|
||||||
|
sed -i 's|^tty1::respawn:.*|tty1::respawn:/sbin/agetty --autologin root --noclear tty1 38400 linux|' /etc/inittab
|
||||||
|
kill -HUP 1
|
||||||
|
EOF
|
||||||
|
touch /root/.hushlogin
|
||||||
|
|
||||||
|
chmod +x /etc/local.d/autologin.start
|
||||||
|
rc-update add local >/dev/null 2>&1
|
||||||
|
|
||||||
|
# Apply autologin immediately for current session
|
||||||
|
/etc/local.d/autologin.start
|
||||||
|
|
||||||
|
msg_ok "Customized Container"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "bash -c \"\$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/ct/${app}.sh)\"" >/usr/bin/update
|
||||||
|
chmod +x /usr/bin/update
|
||||||
|
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright (c) 2021-2025 community-scripts ORG
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
# Author: michelroegl-brunner
|
# Author: michelroegl-brunner
|
||||||
# License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/LICENSE
|
# License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/LICENSE
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
505
scripts/core/cloud-init.func
Normal file
505
scripts/core/cloud-init.func
Normal file
@@ -0,0 +1,505 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
|
# Author: community-scripts ORG
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/branch/main/LICENSE
|
||||||
|
# Revision: 1
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# CLOUD-INIT.FUNC - VM CLOUD-INIT CONFIGURATION LIBRARY
|
||||||
|
# ==============================================================================
|
||||||
|
#
|
||||||
|
# Universal helper library for Cloud-Init configuration in Proxmox VMs.
|
||||||
|
# Provides functions for:
|
||||||
|
#
|
||||||
|
# - Native Proxmox Cloud-Init setup (user, password, network, SSH keys)
|
||||||
|
# - Interactive configuration dialogs (whiptail)
|
||||||
|
# - IP address retrieval via qemu-guest-agent
|
||||||
|
# - Cloud-Init status monitoring and waiting
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/cloud-init.func)
|
||||||
|
# setup_cloud_init "$VMID" "$STORAGE" "$HN" "yes"
|
||||||
|
#
|
||||||
|
# Compatible with: Debian, Ubuntu, and all Cloud-Init enabled distributions
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# SECTION 1: CONFIGURATION DEFAULTS
|
||||||
|
# ==============================================================================
|
||||||
|
# These can be overridden before sourcing this library
|
||||||
|
|
||||||
|
CLOUDINIT_DEFAULT_USER="${CLOUDINIT_DEFAULT_USER:-root}"
|
||||||
|
CLOUDINIT_DNS_SERVERS="${CLOUDINIT_DNS_SERVERS:-1.1.1.1 8.8.8.8}"
|
||||||
|
CLOUDINIT_SEARCH_DOMAIN="${CLOUDINIT_SEARCH_DOMAIN:-local}"
|
||||||
|
CLOUDINIT_SSH_KEYS="${CLOUDINIT_SSH_KEYS:-/root/.ssh/authorized_keys}"
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# SECTION 2: HELPER FUNCTIONS
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# _ci_msg - Internal message helper with fallback
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
function _ci_msg_info() { msg_info "$1" 2>/dev/null || echo "[INFO] $1"; }
|
||||||
|
function _ci_msg_ok() { msg_ok "$1" 2>/dev/null || echo "[OK] $1"; }
|
||||||
|
function _ci_msg_warn() { msg_warn "$1" 2>/dev/null || echo "[WARN] $1"; }
|
||||||
|
function _ci_msg_error() { msg_error "$1" 2>/dev/null || echo "[ERROR] $1"; }
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# validate_ip_cidr - Validate IP address in CIDR format
|
||||||
|
# Usage: validate_ip_cidr "192.168.1.100/24" && echo "Valid"
|
||||||
|
# Returns: 0 if valid, 1 if invalid
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
function validate_ip_cidr() {
|
||||||
|
local ip_cidr="$1"
|
||||||
|
# Match: 0-255.0-255.0-255.0-255/0-32
|
||||||
|
if [[ "$ip_cidr" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}/([0-9]|[1-2][0-9]|3[0-2])$ ]]; then
|
||||||
|
# Validate each octet is 0-255
|
||||||
|
local ip="${ip_cidr%/*}"
|
||||||
|
IFS='.' read -ra octets <<<"$ip"
|
||||||
|
for octet in "${octets[@]}"; do
|
||||||
|
((octet > 255)) && return 1
|
||||||
|
done
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# validate_ip - Validate plain IP address (no CIDR)
|
||||||
|
# Usage: validate_ip "192.168.1.1" && echo "Valid"
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
function validate_ip() {
|
||||||
|
local ip="$1"
|
||||||
|
if [[ "$ip" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]]; then
|
||||||
|
IFS='.' read -ra octets <<<"$ip"
|
||||||
|
for octet in "${octets[@]}"; do
|
||||||
|
((octet > 255)) && return 1
|
||||||
|
done
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# SECTION 3: MAIN CLOUD-INIT FUNCTIONS
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# setup_cloud_init - Configures Proxmox Native Cloud-Init
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# Parameters:
|
||||||
|
# $1 - VMID (required)
|
||||||
|
# $2 - Storage name (required)
|
||||||
|
# $3 - Hostname (optional, default: vm-<vmid>)
|
||||||
|
# $4 - Enable Cloud-Init (yes/no, default: no)
|
||||||
|
# $5 - User (optional, default: root)
|
||||||
|
# $6 - Network mode (dhcp/static, default: dhcp)
|
||||||
|
# $7 - Static IP (optional, format: 192.168.1.100/24)
|
||||||
|
# $8 - Gateway (optional)
|
||||||
|
# $9 - Nameservers (optional, default: 1.1.1.1 8.8.8.8)
|
||||||
|
#
|
||||||
|
# Returns: 0 on success, 1 on failure
|
||||||
|
# Exports: CLOUDINIT_USER, CLOUDINIT_PASSWORD, CLOUDINIT_CRED_FILE
|
||||||
|
# ==============================================================================
|
||||||
|
function setup_cloud_init() {
|
||||||
|
local vmid="$1"
|
||||||
|
local storage="$2"
|
||||||
|
local hostname="${3:-vm-${vmid}}"
|
||||||
|
local enable="${4:-no}"
|
||||||
|
local ciuser="${5:-$CLOUDINIT_DEFAULT_USER}"
|
||||||
|
local network_mode="${6:-dhcp}"
|
||||||
|
local static_ip="${7:-}"
|
||||||
|
local gateway="${8:-}"
|
||||||
|
local nameservers="${9:-$CLOUDINIT_DNS_SERVERS}"
|
||||||
|
|
||||||
|
# Skip if not enabled
|
||||||
|
if [ "$enable" != "yes" ]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate static IP if provided
|
||||||
|
if [ "$network_mode" = "static" ]; then
|
||||||
|
if [ -n "$static_ip" ] && ! validate_ip_cidr "$static_ip"; then
|
||||||
|
_ci_msg_error "Invalid static IP format: $static_ip (expected: x.x.x.x/xx)"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if [ -n "$gateway" ] && ! validate_ip "$gateway"; then
|
||||||
|
_ci_msg_error "Invalid gateway IP format: $gateway"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
_ci_msg_info "Configuring Cloud-Init"
|
||||||
|
|
||||||
|
# Create Cloud-Init drive (try ide2 first, then scsi1 as fallback)
|
||||||
|
if ! qm set "$vmid" --ide2 "${storage}:cloudinit" >/dev/null 2>&1; then
|
||||||
|
qm set "$vmid" --scsi1 "${storage}:cloudinit" >/dev/null 2>&1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set user
|
||||||
|
qm set "$vmid" --ciuser "$ciuser" >/dev/null
|
||||||
|
|
||||||
|
# Generate and set secure random password
|
||||||
|
local cipassword=$(openssl rand -base64 16)
|
||||||
|
qm set "$vmid" --cipassword "$cipassword" >/dev/null
|
||||||
|
|
||||||
|
# Add SSH keys if available
|
||||||
|
if [ -f "$CLOUDINIT_SSH_KEYS" ]; then
|
||||||
|
qm set "$vmid" --sshkeys "$CLOUDINIT_SSH_KEYS" >/dev/null 2>&1 || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Configure network
|
||||||
|
if [ "$network_mode" = "static" ] && [ -n "$static_ip" ] && [ -n "$gateway" ]; then
|
||||||
|
qm set "$vmid" --ipconfig0 "ip=${static_ip},gw=${gateway}" >/dev/null
|
||||||
|
else
|
||||||
|
qm set "$vmid" --ipconfig0 "ip=dhcp" >/dev/null
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set DNS servers
|
||||||
|
qm set "$vmid" --nameserver "$nameservers" >/dev/null
|
||||||
|
|
||||||
|
# Set search domain
|
||||||
|
qm set "$vmid" --searchdomain "$CLOUDINIT_SEARCH_DOMAIN" >/dev/null
|
||||||
|
|
||||||
|
# Enable package upgrades on first boot (if supported by Proxmox version)
|
||||||
|
qm set "$vmid" --ciupgrade 1 >/dev/null 2>&1 || true
|
||||||
|
|
||||||
|
# Save credentials to file (with restrictive permissions)
|
||||||
|
local cred_file="/tmp/${hostname}-${vmid}-cloud-init-credentials.txt"
|
||||||
|
umask 077
|
||||||
|
cat >"$cred_file" <<EOF
|
||||||
|
╔══════════════════════════════════════════════════════════════════╗
|
||||||
|
║ ⚠️ SECURITY WARNING: DELETE THIS FILE AFTER NOTING CREDENTIALS ║
|
||||||
|
╚══════════════════════════════════════════════════════════════════╝
|
||||||
|
|
||||||
|
Cloud-Init Credentials
|
||||||
|
────────────────────────────────────────
|
||||||
|
VM ID: ${vmid}
|
||||||
|
Hostname: ${hostname}
|
||||||
|
Created: $(date)
|
||||||
|
|
||||||
|
Username: ${ciuser}
|
||||||
|
Password: ${cipassword}
|
||||||
|
|
||||||
|
Network: ${network_mode}$([ "$network_mode" = "static" ] && echo " (IP: ${static_ip}, GW: ${gateway})" || echo " (DHCP)")
|
||||||
|
DNS: ${nameservers}
|
||||||
|
|
||||||
|
────────────────────────────────────────
|
||||||
|
SSH Access (if keys configured):
|
||||||
|
ssh ${ciuser}@<vm-ip>
|
||||||
|
|
||||||
|
Proxmox UI Configuration:
|
||||||
|
VM ${vmid} > Cloud-Init > Edit
|
||||||
|
- User, Password, SSH Keys
|
||||||
|
- Network (IP Config)
|
||||||
|
- DNS, Search Domain
|
||||||
|
|
||||||
|
────────────────────────────────────────
|
||||||
|
🗑️ To delete this file:
|
||||||
|
rm -f ${cred_file}
|
||||||
|
────────────────────────────────────────
|
||||||
|
EOF
|
||||||
|
chmod 600 "$cred_file"
|
||||||
|
|
||||||
|
_ci_msg_ok "Cloud-Init configured (User: ${ciuser})"
|
||||||
|
|
||||||
|
# Export for use in calling script (DO NOT display password here - will be shown in summary)
|
||||||
|
export CLOUDINIT_USER="$ciuser"
|
||||||
|
export CLOUDINIT_PASSWORD="$cipassword"
|
||||||
|
export CLOUDINIT_CRED_FILE="$cred_file"
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# SECTION 4: INTERACTIVE CONFIGURATION
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# configure_cloud_init_interactive - Whiptail dialog for Cloud-Init setup
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# Prompts user for Cloud-Init configuration choices
|
||||||
|
# Returns configuration via exported variables:
|
||||||
|
# - CLOUDINIT_ENABLE (yes/no)
|
||||||
|
# - CLOUDINIT_USER
|
||||||
|
# - CLOUDINIT_NETWORK_MODE (dhcp/static)
|
||||||
|
# - CLOUDINIT_IP (if static)
|
||||||
|
# - CLOUDINIT_GW (if static)
|
||||||
|
# - CLOUDINIT_DNS
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
function configure_cloud_init_interactive() {
|
||||||
|
local default_user="${1:-root}"
|
||||||
|
|
||||||
|
# Check if whiptail is available
|
||||||
|
if ! command -v whiptail >/dev/null 2>&1; then
|
||||||
|
echo "Warning: whiptail not available, skipping interactive configuration"
|
||||||
|
export CLOUDINIT_ENABLE="no"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Ask if user wants to enable Cloud-Init
|
||||||
|
if ! (whiptail --backtitle "Proxmox VE Helper Scripts" --title "CLOUD-INIT" \
|
||||||
|
--yesno "Enable Cloud-Init for VM configuration?\n\nCloud-Init allows automatic configuration of:\n• User accounts and passwords\n• SSH keys\n• Network settings (DHCP/Static)\n• DNS configuration\n\nYou can also configure these settings later in Proxmox UI." 16 68); then
|
||||||
|
export CLOUDINIT_ENABLE="no"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
export CLOUDINIT_ENABLE="yes"
|
||||||
|
|
||||||
|
# Username
|
||||||
|
if CLOUDINIT_USER=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox \
|
||||||
|
"Cloud-Init Username" 8 58 "$default_user" --title "USERNAME" 3>&1 1>&2 2>&3); then
|
||||||
|
export CLOUDINIT_USER="${CLOUDINIT_USER:-$default_user}"
|
||||||
|
else
|
||||||
|
export CLOUDINIT_USER="$default_user"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Network configuration
|
||||||
|
if (whiptail --backtitle "Proxmox VE Helper Scripts" --title "NETWORK MODE" \
|
||||||
|
--yesno "Use DHCP for network configuration?\n\nSelect 'No' for static IP configuration." 10 58); then
|
||||||
|
export CLOUDINIT_NETWORK_MODE="dhcp"
|
||||||
|
else
|
||||||
|
export CLOUDINIT_NETWORK_MODE="static"
|
||||||
|
|
||||||
|
# Static IP with validation
|
||||||
|
while true; do
|
||||||
|
if CLOUDINIT_IP=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox \
|
||||||
|
"Static IP Address (CIDR format)\nExample: 192.168.1.100/24" 9 58 "" --title "IP ADDRESS" 3>&1 1>&2 2>&3); then
|
||||||
|
if validate_ip_cidr "$CLOUDINIT_IP"; then
|
||||||
|
export CLOUDINIT_IP
|
||||||
|
break
|
||||||
|
else
|
||||||
|
whiptail --backtitle "Proxmox VE Helper Scripts" --title "INVALID IP" \
|
||||||
|
--msgbox "Invalid IP format: $CLOUDINIT_IP\n\nPlease use CIDR format: x.x.x.x/xx\nExample: 192.168.1.100/24" 10 50
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
_ci_msg_warn "Static IP required, falling back to DHCP"
|
||||||
|
export CLOUDINIT_NETWORK_MODE="dhcp"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Gateway with validation
|
||||||
|
if [ "$CLOUDINIT_NETWORK_MODE" = "static" ]; then
|
||||||
|
while true; do
|
||||||
|
if CLOUDINIT_GW=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox \
|
||||||
|
"Gateway IP Address\nExample: 192.168.1.1" 8 58 "" --title "GATEWAY" 3>&1 1>&2 2>&3); then
|
||||||
|
if validate_ip "$CLOUDINIT_GW"; then
|
||||||
|
export CLOUDINIT_GW
|
||||||
|
break
|
||||||
|
else
|
||||||
|
whiptail --backtitle "Proxmox VE Helper Scripts" --title "INVALID GATEWAY" \
|
||||||
|
--msgbox "Invalid gateway format: $CLOUDINIT_GW\n\nPlease use format: x.x.x.x\nExample: 192.168.1.1" 10 50
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
_ci_msg_warn "Gateway required, falling back to DHCP"
|
||||||
|
export CLOUDINIT_NETWORK_MODE="dhcp"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# DNS Servers
|
||||||
|
if CLOUDINIT_DNS=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox \
|
||||||
|
"DNS Servers (space-separated)" 8 58 "1.1.1.1 8.8.8.8" --title "DNS SERVERS" 3>&1 1>&2 2>&3); then
|
||||||
|
export CLOUDINIT_DNS="${CLOUDINIT_DNS:-1.1.1.1 8.8.8.8}"
|
||||||
|
else
|
||||||
|
export CLOUDINIT_DNS="1.1.1.1 8.8.8.8"
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# SECTION 5: UTILITY FUNCTIONS
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# display_cloud_init_info - Show Cloud-Init summary after setup
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
function display_cloud_init_info() {
|
||||||
|
local vmid="$1"
|
||||||
|
local hostname="${2:-}"
|
||||||
|
|
||||||
|
if [ -n "$CLOUDINIT_CRED_FILE" ] && [ -f "$CLOUDINIT_CRED_FILE" ]; then
|
||||||
|
if [ -n "${INFO:-}" ]; then
|
||||||
|
echo -e "\n${INFO}${BOLD:-}${GN:-} Cloud-Init Configuration:${CL:-}"
|
||||||
|
echo -e "${TAB:- }${DGN:-}User: ${BGN:-}${CLOUDINIT_USER:-root}${CL:-}"
|
||||||
|
echo -e "${TAB:- }${DGN:-}Password: ${BGN:-}${CLOUDINIT_PASSWORD}${CL:-}"
|
||||||
|
echo -e "${TAB:- }${DGN:-}Credentials: ${BL:-}${CLOUDINIT_CRED_FILE}${CL:-}"
|
||||||
|
echo -e "${TAB:- }${RD:-}⚠️ Delete credentials file after noting password!${CL:-}"
|
||||||
|
echo -e "${TAB:- }${YW:-}💡 Configure in Proxmox UI: VM ${vmid} > Cloud-Init${CL:-}"
|
||||||
|
else
|
||||||
|
echo ""
|
||||||
|
echo "[INFO] Cloud-Init Configuration:"
|
||||||
|
echo " User: ${CLOUDINIT_USER:-root}"
|
||||||
|
echo " Password: ${CLOUDINIT_PASSWORD}"
|
||||||
|
echo " Credentials: ${CLOUDINIT_CRED_FILE}"
|
||||||
|
echo " ⚠️ Delete credentials file after noting password!"
|
||||||
|
echo " Configure in Proxmox UI: VM ${vmid} > Cloud-Init"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# cleanup_cloud_init_credentials - Remove credentials file
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# Usage: cleanup_cloud_init_credentials
|
||||||
|
# Call this after user has noted/saved the credentials
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
function cleanup_cloud_init_credentials() {
|
||||||
|
if [ -n "$CLOUDINIT_CRED_FILE" ] && [ -f "$CLOUDINIT_CRED_FILE" ]; then
|
||||||
|
rm -f "$CLOUDINIT_CRED_FILE"
|
||||||
|
_ci_msg_ok "Credentials file removed: $CLOUDINIT_CRED_FILE"
|
||||||
|
unset CLOUDINIT_CRED_FILE
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# has_cloud_init - Check if VM has Cloud-Init configured
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
function has_cloud_init() {
|
||||||
|
local vmid="$1"
|
||||||
|
qm config "$vmid" 2>/dev/null | grep -qE "(ide2|scsi1):.*cloudinit"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# regenerate_cloud_init - Regenerate Cloud-Init configuration
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
function regenerate_cloud_init() {
|
||||||
|
local vmid="$1"
|
||||||
|
|
||||||
|
if has_cloud_init "$vmid"; then
|
||||||
|
_ci_msg_info "Regenerating Cloud-Init configuration"
|
||||||
|
qm cloudinit update "$vmid" >/dev/null 2>&1 || true
|
||||||
|
_ci_msg_ok "Cloud-Init configuration regenerated"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
_ci_msg_warn "VM $vmid does not have Cloud-Init configured"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# get_vm_ip - Get VM IP address via qemu-guest-agent
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
function get_vm_ip() {
|
||||||
|
local vmid="$1"
|
||||||
|
local timeout="${2:-30}"
|
||||||
|
|
||||||
|
local elapsed=0
|
||||||
|
while [ $elapsed -lt $timeout ]; do
|
||||||
|
local vm_ip=$(qm guest cmd "$vmid" network-get-interfaces 2>/dev/null |
|
||||||
|
jq -r '.[] | select(.name != "lo") | ."ip-addresses"[]? | select(."ip-address-type" == "ipv4") | ."ip-address"' 2>/dev/null | head -1)
|
||||||
|
|
||||||
|
if [ -n "$vm_ip" ]; then
|
||||||
|
echo "$vm_ip"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
sleep 2
|
||||||
|
elapsed=$((elapsed + 2))
|
||||||
|
done
|
||||||
|
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# wait_for_cloud_init - Wait for Cloud-Init to complete (requires SSH access)
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
function wait_for_cloud_init() {
|
||||||
|
local vmid="$1"
|
||||||
|
local timeout="${2:-300}"
|
||||||
|
local vm_ip="${3:-}"
|
||||||
|
|
||||||
|
# Get IP if not provided
|
||||||
|
if [ -z "$vm_ip" ]; then
|
||||||
|
vm_ip=$(get_vm_ip "$vmid" 60)
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$vm_ip" ]; then
|
||||||
|
_ci_msg_warn "Unable to determine VM IP address"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
_ci_msg_info "Waiting for Cloud-Init to complete on ${vm_ip}"
|
||||||
|
|
||||||
|
local elapsed=0
|
||||||
|
while [ $elapsed -lt $timeout ]; do
|
||||||
|
if timeout 10 ssh -o ConnectTimeout=5 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null \
|
||||||
|
"${CLOUDINIT_USER:-root}@${vm_ip}" "cloud-init status --wait" 2>/dev/null; then
|
||||||
|
_ci_msg_ok "Cloud-Init completed successfully"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
sleep 10
|
||||||
|
elapsed=$((elapsed + 10))
|
||||||
|
done
|
||||||
|
|
||||||
|
_ci_msg_warn "Cloud-Init did not complete within ${timeout}s"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# SECTION 6: EXPORTS
|
||||||
|
# ==============================================================================
|
||||||
|
# Export all functions for use in other scripts
|
||||||
|
|
||||||
|
export -f setup_cloud_init 2>/dev/null || true
|
||||||
|
export -f configure_cloud_init_interactive 2>/dev/null || true
|
||||||
|
export -f display_cloud_init_info 2>/dev/null || true
|
||||||
|
export -f cleanup_cloud_init_credentials 2>/dev/null || true
|
||||||
|
export -f has_cloud_init 2>/dev/null || true
|
||||||
|
export -f regenerate_cloud_init 2>/dev/null || true
|
||||||
|
export -f get_vm_ip 2>/dev/null || true
|
||||||
|
export -f wait_for_cloud_init 2>/dev/null || true
|
||||||
|
export -f validate_ip_cidr 2>/dev/null || true
|
||||||
|
export -f validate_ip 2>/dev/null || true
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# SECTION 7: EXAMPLES & DOCUMENTATION
|
||||||
|
# ==============================================================================
|
||||||
|
: <<'EXAMPLES'
|
||||||
|
|
||||||
|
# Example 1: Simple DHCP setup (most common)
|
||||||
|
setup_cloud_init "$VMID" "$STORAGE" "$HN" "yes"
|
||||||
|
|
||||||
|
# Example 2: Static IP setup
|
||||||
|
setup_cloud_init "$VMID" "$STORAGE" "myserver" "yes" "root" "static" "192.168.1.100/24" "192.168.1.1"
|
||||||
|
|
||||||
|
# Example 3: Interactive configuration in advanced_settings()
|
||||||
|
configure_cloud_init_interactive "admin"
|
||||||
|
if [ "$CLOUDINIT_ENABLE" = "yes" ]; then
|
||||||
|
setup_cloud_init "$VMID" "$STORAGE" "$HN" "yes" "$CLOUDINIT_USER" \
|
||||||
|
"$CLOUDINIT_NETWORK_MODE" "$CLOUDINIT_IP" "$CLOUDINIT_GW" "$CLOUDINIT_DNS"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Example 4: Display info after VM creation
|
||||||
|
display_cloud_init_info "$VMID" "$HN"
|
||||||
|
|
||||||
|
# Example 5: Check if VM has Cloud-Init
|
||||||
|
if has_cloud_init "$VMID"; then
|
||||||
|
echo "Cloud-Init is configured"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Example 6: Wait for Cloud-Init to complete after VM start
|
||||||
|
if [ "$START_VM" = "yes" ]; then
|
||||||
|
qm start "$VMID"
|
||||||
|
sleep 30
|
||||||
|
wait_for_cloud_init "$VMID" 300
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Example 7: Cleanup credentials file after user has noted password
|
||||||
|
display_cloud_init_info "$VMID" "$HN"
|
||||||
|
read -p "Have you saved the credentials? (y/N): " -r
|
||||||
|
[[ $REPLY =~ ^[Yy]$ ]] && cleanup_cloud_init_credentials
|
||||||
|
|
||||||
|
# Example 8: Validate IP before using
|
||||||
|
if validate_ip_cidr "192.168.1.100/24"; then
|
||||||
|
echo "Valid IP/CIDR"
|
||||||
|
fi
|
||||||
|
|
||||||
|
EXAMPLES
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
# Copyright (c) 2021-2025 community-scripts ORG
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
# License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/LICENSE
|
# License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/LICENSE
|
||||||
|
|
||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
@@ -123,9 +123,38 @@ icons() {
|
|||||||
CREATING="${TAB}🚀${TAB}${CL}"
|
CREATING="${TAB}🚀${TAB}${CL}"
|
||||||
ADVANCED="${TAB}🧩${TAB}${CL}"
|
ADVANCED="${TAB}🧩${TAB}${CL}"
|
||||||
FUSE="${TAB}🗂️${TAB}${CL}"
|
FUSE="${TAB}🗂️${TAB}${CL}"
|
||||||
|
GPU="${TAB}🎮${TAB}${CL}"
|
||||||
HOURGLASS="${TAB}⏳${TAB}"
|
HOURGLASS="${TAB}⏳${TAB}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# ensure_profile_loaded()
|
||||||
|
#
|
||||||
|
# - Sources /etc/profile.d/*.sh scripts if not already loaded
|
||||||
|
# - Fixes PATH issues when running via pct enter/exec (non-login shells)
|
||||||
|
# - Safe to call multiple times (uses guard variable)
|
||||||
|
# - Should be called in update_script() or any script running inside LXC
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
ensure_profile_loaded() {
|
||||||
|
# Skip if already loaded or running on Proxmox host
|
||||||
|
[[ -n "${_PROFILE_LOADED:-}" ]] && return
|
||||||
|
command -v pveversion &>/dev/null && return
|
||||||
|
|
||||||
|
# Source all profile.d scripts to ensure PATH is complete
|
||||||
|
if [[ -d /etc/profile.d ]]; then
|
||||||
|
for script in /etc/profile.d/*.sh; do
|
||||||
|
[[ -r "$script" ]] && source "$script"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Also ensure /usr/local/bin is in PATH (common install location)
|
||||||
|
if [[ ":$PATH:" != *":/usr/local/bin:"* ]]; then
|
||||||
|
export PATH="/usr/local/bin:$PATH"
|
||||||
|
fi
|
||||||
|
|
||||||
|
export _PROFILE_LOADED=1
|
||||||
|
}
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# default_vars()
|
# default_vars()
|
||||||
#
|
#
|
||||||
@@ -786,11 +815,9 @@ is_verbose_mode() {
|
|||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# cleanup_lxc()
|
# cleanup_lxc()
|
||||||
#
|
#
|
||||||
# - Comprehensive cleanup of package managers, caches, and logs
|
# - Cleans package manager and language caches (safe for installs AND updates)
|
||||||
# - Supports Alpine (apk), Debian/Ubuntu (apt), and language package managers
|
# - Supports Alpine (apk), Debian/Ubuntu (apt), Python, Node.js, Go, Rust, Ruby, PHP
|
||||||
# - Cleans: Python (pip/uv), Node.js (npm/yarn/pnpm), Go, Rust, Ruby, PHP
|
# - Uses fallback error handling to prevent cleanup failures from breaking installs
|
||||||
# - Truncates log files and vacuums systemd journal
|
|
||||||
# - Run at end of container creation to minimize disk usage
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
cleanup_lxc() {
|
cleanup_lxc() {
|
||||||
msg_info "Cleaning up"
|
msg_info "Cleaning up"
|
||||||
@@ -799,39 +826,53 @@ cleanup_lxc() {
|
|||||||
$STD apk cache clean || true
|
$STD apk cache clean || true
|
||||||
rm -rf /var/cache/apk/*
|
rm -rf /var/cache/apk/*
|
||||||
else
|
else
|
||||||
$STD apt -y autoremove || true
|
$STD apt -y autoremove 2>/dev/null || msg_warn "apt autoremove failed (non-critical)"
|
||||||
$STD apt -y autoclean || true
|
$STD apt -y autoclean 2>/dev/null || msg_warn "apt autoclean failed (non-critical)"
|
||||||
$STD apt -y clean || true
|
$STD apt -y clean 2>/dev/null || msg_warn "apt clean failed (non-critical)"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Clear temp artifacts (keep sockets/FIFOs; ignore errors)
|
|
||||||
find /tmp /var/tmp -type f -name 'tmp*' -delete 2>/dev/null || true
|
find /tmp /var/tmp -type f -name 'tmp*' -delete 2>/dev/null || true
|
||||||
find /tmp /var/tmp -type f -name 'tempfile*' -delete 2>/dev/null || true
|
find /tmp /var/tmp -type f -name 'tempfile*' -delete 2>/dev/null || true
|
||||||
|
|
||||||
# Truncate writable log files silently (permission errors ignored)
|
# Python
|
||||||
if command -v truncate >/dev/null 2>&1; then
|
if command -v pip &>/dev/null; then
|
||||||
find /var/log -type f -writable -print0 2>/dev/null |
|
rm -rf /root/.cache/pip 2>/dev/null || true
|
||||||
xargs -0 -n1 truncate -s 0 2>/dev/null || true
|
fi
|
||||||
|
if command -v uv &>/dev/null; then
|
||||||
|
rm -rf /root/.cache/uv 2>/dev/null || true
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Node.js npm
|
# Node.js
|
||||||
if command -v npm &>/dev/null; then $STD npm cache clean --force || true; fi
|
if command -v npm &>/dev/null; then
|
||||||
# Node.js yarn
|
rm -rf /root/.npm/_cacache /root/.npm/_logs 2>/dev/null || true
|
||||||
if command -v yarn &>/dev/null; then $STD yarn cache clean || true; fi
|
|
||||||
# Node.js pnpm
|
|
||||||
if command -v pnpm &>/dev/null; then $STD pnpm store prune || true; fi
|
|
||||||
# Go
|
|
||||||
if command -v go &>/dev/null; then $STD go clean -cache -modcache || true; fi
|
|
||||||
# Rust cargo
|
|
||||||
if command -v cargo &>/dev/null; then $STD cargo clean || true; fi
|
|
||||||
# Ruby gem
|
|
||||||
if command -v gem &>/dev/null; then $STD gem cleanup || true; fi
|
|
||||||
# Composer (PHP)
|
|
||||||
if command -v composer &>/dev/null; then $STD composer clear-cache || true; fi
|
|
||||||
|
|
||||||
if command -v journalctl &>/dev/null; then
|
|
||||||
$STD journalctl --vacuum-time=10m || true
|
|
||||||
fi
|
fi
|
||||||
|
if command -v yarn &>/dev/null; then
|
||||||
|
rm -rf /root/.cache/yarn /root/.yarn/cache 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
if command -v pnpm &>/dev/null; then
|
||||||
|
pnpm store prune &>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Go (only build cache, not modules)
|
||||||
|
if command -v go &>/dev/null; then
|
||||||
|
$STD go clean -cache 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Rust (only registry cache, not build artifacts)
|
||||||
|
if command -v cargo &>/dev/null; then
|
||||||
|
rm -rf /root/.cargo/registry/cache /root/.cargo/.package-cache 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Ruby
|
||||||
|
if command -v gem &>/dev/null; then
|
||||||
|
rm -rf /root/.gem/cache 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# PHP
|
||||||
|
if command -v composer &>/dev/null; then
|
||||||
|
rm -rf /root/.composer/cache 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
msg_ok "Cleaned"
|
msg_ok "Cleaned"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -883,6 +924,93 @@ check_or_create_swap() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# Loads LOCAL_IP from persistent store or detects if missing.
|
||||||
|
#
|
||||||
|
# Description:
|
||||||
|
# - Loads from /run/local-ip.env or performs runtime lookup
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
function get_lxc_ip() {
|
||||||
|
local IP_FILE="/run/local-ip.env"
|
||||||
|
if [[ -f "$IP_FILE" ]]; then
|
||||||
|
# shellcheck disable=SC1090
|
||||||
|
source "$IP_FILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -z "${LOCAL_IP:-}" ]]; then
|
||||||
|
get_current_ip() {
|
||||||
|
local ip
|
||||||
|
|
||||||
|
# Try direct interface lookup for eth0 FIRST (most reliable for LXC) - IPv4
|
||||||
|
ip=$(ip -4 addr show eth0 2>/dev/null | awk '/inet / {print $2}' | cut -d/ -f1 | head -n1)
|
||||||
|
if [[ -n "$ip" && "$ip" =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||||
|
echo "$ip"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Fallback: Try hostname -I (returns IPv4 first if available)
|
||||||
|
if command -v hostname >/dev/null 2>&1; then
|
||||||
|
ip=$(hostname -I 2>/dev/null | awk '{print $1}')
|
||||||
|
if [[ -n "$ip" && "$ip" =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||||
|
echo "$ip"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Try routing table with IPv4 targets
|
||||||
|
local ipv4_targets=("8.8.8.8" "1.1.1.1" "default")
|
||||||
|
for target in "${ipv4_targets[@]}"; do
|
||||||
|
if [[ "$target" == "default" ]]; then
|
||||||
|
ip=$(ip route get 1 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||||||
|
else
|
||||||
|
ip=$(ip route get "$target" 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||||||
|
fi
|
||||||
|
if [[ -n "$ip" ]]; then
|
||||||
|
echo "$ip"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# IPv6 fallback: Try direct interface lookup for eth0
|
||||||
|
ip=$(ip -6 addr show eth0 scope global 2>/dev/null | awk '/inet6 / {print $2}' | cut -d/ -f1 | head -n1)
|
||||||
|
if [[ -n "$ip" && "$ip" =~ : ]]; then
|
||||||
|
echo "$ip"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# IPv6 fallback: Try hostname -I for IPv6
|
||||||
|
if command -v hostname >/dev/null 2>&1; then
|
||||||
|
ip=$(hostname -I 2>/dev/null | tr ' ' '\n' | grep -E ':' | head -n1)
|
||||||
|
if [[ -n "$ip" && "$ip" =~ : ]]; then
|
||||||
|
echo "$ip"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# IPv6 fallback: Use routing table with IPv6 targets
|
||||||
|
local ipv6_targets=("2001:4860:4860::8888" "2606:4700:4700::1111")
|
||||||
|
for target in "${ipv6_targets[@]}"; do
|
||||||
|
ip=$(ip -6 route get "$target" 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||||||
|
if [[ -n "$ip" && "$ip" =~ : ]]; then
|
||||||
|
echo "$ip"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
LOCAL_IP="$(get_current_ip || true)"
|
||||||
|
if [[ -z "$LOCAL_IP" ]]; then
|
||||||
|
msg_error "Could not determine LOCAL_IP"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
export LOCAL_IP
|
||||||
|
}
|
||||||
|
|
||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
# SIGNAL TRAPS
|
# SIGNAL TRAPS
|
||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
|
|||||||
322
scripts/core/error-handler.func
Normal file
322
scripts/core/error-handler.func
Normal file
@@ -0,0 +1,322 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# ERROR HANDLER - ERROR & SIGNAL MANAGEMENT
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
|
# Author: MickLesk (CanbiZ)
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
#
|
||||||
|
# Provides comprehensive error handling and signal management for all scripts.
|
||||||
|
# Includes:
|
||||||
|
# - Exit code explanations (shell, package managers, databases, custom codes)
|
||||||
|
# - Error handler with detailed logging
|
||||||
|
# - Signal handlers (EXIT, INT, TERM)
|
||||||
|
# - Initialization function for trap setup
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# source <(curl -fsSL .../error_handler.func)
|
||||||
|
# catch_errors
|
||||||
|
#
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# SECTION 1: EXIT CODE EXPLANATIONS
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# explain_exit_code()
|
||||||
|
#
|
||||||
|
# - Maps numeric exit codes to human-readable error descriptions
|
||||||
|
# - Supports:
|
||||||
|
# * Generic/Shell errors (1, 2, 126, 127, 128, 130, 137, 139, 143)
|
||||||
|
# * Package manager errors (APT, DPKG: 100, 101, 255)
|
||||||
|
# * Node.js/npm errors (243-249, 254)
|
||||||
|
# * Python/pip/uv errors (210-212)
|
||||||
|
# * PostgreSQL errors (231-234)
|
||||||
|
# * MySQL/MariaDB errors (241-244)
|
||||||
|
# * MongoDB errors (251-254)
|
||||||
|
# * Proxmox custom codes (200-231)
|
||||||
|
# - Returns description string for given exit code
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
explain_exit_code() {
|
||||||
|
local code="$1"
|
||||||
|
case "$code" in
|
||||||
|
# --- Generic / Shell ---
|
||||||
|
1) echo "General error / Operation not permitted" ;;
|
||||||
|
2) echo "Misuse of shell builtins (e.g. syntax error)" ;;
|
||||||
|
126) echo "Command invoked cannot execute (permission problem?)" ;;
|
||||||
|
127) echo "Command not found" ;;
|
||||||
|
128) echo "Invalid argument to exit" ;;
|
||||||
|
130) echo "Terminated by Ctrl+C (SIGINT)" ;;
|
||||||
|
137) echo "Killed (SIGKILL / Out of memory?)" ;;
|
||||||
|
139) echo "Segmentation fault (core dumped)" ;;
|
||||||
|
143) echo "Terminated (SIGTERM)" ;;
|
||||||
|
|
||||||
|
# --- Package manager / APT / DPKG ---
|
||||||
|
100) echo "APT: Package manager error (broken packages / dependency problems)" ;;
|
||||||
|
101) echo "APT: Configuration error (bad sources.list, malformed config)" ;;
|
||||||
|
255) echo "DPKG: Fatal internal error" ;;
|
||||||
|
|
||||||
|
# --- Node.js / npm / pnpm / yarn ---
|
||||||
|
243) echo "Node.js: Out of memory (JavaScript heap out of memory)" ;;
|
||||||
|
245) echo "Node.js: Invalid command-line option" ;;
|
||||||
|
246) echo "Node.js: Internal JavaScript Parse Error" ;;
|
||||||
|
247) echo "Node.js: Fatal internal error" ;;
|
||||||
|
248) echo "Node.js: Invalid C++ addon / N-API failure" ;;
|
||||||
|
249) echo "Node.js: Inspector error" ;;
|
||||||
|
254) echo "npm/pnpm/yarn: Unknown fatal error" ;;
|
||||||
|
|
||||||
|
# --- Python / pip / uv ---
|
||||||
|
210) echo "Python: Virtualenv / uv environment missing or broken" ;;
|
||||||
|
211) echo "Python: Dependency resolution failed" ;;
|
||||||
|
212) echo "Python: Installation aborted (permissions or EXTERNALLY-MANAGED)" ;;
|
||||||
|
|
||||||
|
# --- PostgreSQL ---
|
||||||
|
231) echo "PostgreSQL: Connection failed (server not running / wrong socket)" ;;
|
||||||
|
232) echo "PostgreSQL: Authentication failed (bad user/password)" ;;
|
||||||
|
233) echo "PostgreSQL: Database does not exist" ;;
|
||||||
|
234) echo "PostgreSQL: Fatal error in query / syntax" ;;
|
||||||
|
|
||||||
|
# --- MySQL / MariaDB ---
|
||||||
|
241) echo "MySQL/MariaDB: Connection failed (server not running / wrong socket)" ;;
|
||||||
|
242) echo "MySQL/MariaDB: Authentication failed (bad user/password)" ;;
|
||||||
|
243) echo "MySQL/MariaDB: Database does not exist" ;;
|
||||||
|
244) echo "MySQL/MariaDB: Fatal error in query / syntax" ;;
|
||||||
|
|
||||||
|
# --- MongoDB ---
|
||||||
|
251) echo "MongoDB: Connection failed (server not running)" ;;
|
||||||
|
252) echo "MongoDB: Authentication failed (bad user/password)" ;;
|
||||||
|
253) echo "MongoDB: Database not found" ;;
|
||||||
|
254) echo "MongoDB: Fatal query error" ;;
|
||||||
|
|
||||||
|
# --- Proxmox Custom Codes ---
|
||||||
|
200) echo "Proxmox: Failed to create lock file" ;;
|
||||||
|
203) echo "Proxmox: Missing CTID variable" ;;
|
||||||
|
204) echo "Proxmox: Missing PCT_OSTYPE variable" ;;
|
||||||
|
205) echo "Proxmox: Invalid CTID (<100)" ;;
|
||||||
|
206) echo "Proxmox: CTID already in use" ;;
|
||||||
|
207) echo "Proxmox: Password contains unescaped special characters" ;;
|
||||||
|
208) echo "Proxmox: Invalid configuration (DNS/MAC/Network format)" ;;
|
||||||
|
209) echo "Proxmox: Container creation failed" ;;
|
||||||
|
210) echo "Proxmox: Cluster not quorate" ;;
|
||||||
|
211) echo "Proxmox: Timeout waiting for template lock" ;;
|
||||||
|
212) echo "Proxmox: Storage type 'iscsidirect' does not support containers (VMs only)" ;;
|
||||||
|
213) echo "Proxmox: Storage type does not support 'rootdir' content" ;;
|
||||||
|
214) echo "Proxmox: Not enough storage space" ;;
|
||||||
|
215) echo "Proxmox: Container created but not listed (ghost state)" ;;
|
||||||
|
216) echo "Proxmox: RootFS entry missing in config" ;;
|
||||||
|
217) echo "Proxmox: Storage not accessible" ;;
|
||||||
|
219) echo "Proxmox: CephFS does not support containers - use RBD" ;;
|
||||||
|
224) echo "Proxmox: PBS storage is for backups only" ;;
|
||||||
|
218) echo "Proxmox: Template file corrupted or incomplete" ;;
|
||||||
|
220) echo "Proxmox: Unable to resolve template path" ;;
|
||||||
|
221) echo "Proxmox: Template file not readable" ;;
|
||||||
|
222) echo "Proxmox: Template download failed" ;;
|
||||||
|
223) echo "Proxmox: Template not available after download" ;;
|
||||||
|
225) echo "Proxmox: No template available for OS/Version" ;;
|
||||||
|
231) echo "Proxmox: LXC stack upgrade failed" ;;
|
||||||
|
|
||||||
|
# --- Default ---
|
||||||
|
*) echo "Unknown error" ;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# SECTION 2: ERROR HANDLERS
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# error_handler()
|
||||||
|
#
|
||||||
|
# - Main error handler triggered by ERR trap
|
||||||
|
# - Arguments: exit_code, command, line_number
|
||||||
|
# - Behavior:
|
||||||
|
# * Returns silently if exit_code is 0 (success)
|
||||||
|
# * Sources explain_exit_code() for detailed error description
|
||||||
|
# * Displays error message with:
|
||||||
|
# - Line number where error occurred
|
||||||
|
# - Exit code with explanation
|
||||||
|
# - Command that failed
|
||||||
|
# * Shows last 20 lines of SILENT_LOGFILE if available
|
||||||
|
# * Copies log to container /root for later inspection
|
||||||
|
# * Exits with original exit code
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
error_handler() {
|
||||||
|
local exit_code=${1:-$?}
|
||||||
|
local command=${2:-${BASH_COMMAND:-unknown}}
|
||||||
|
local line_number=${BASH_LINENO[0]:-unknown}
|
||||||
|
|
||||||
|
command="${command//\$STD/}"
|
||||||
|
|
||||||
|
if [[ "$exit_code" -eq 0 ]]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
local explanation
|
||||||
|
explanation="$(explain_exit_code "$exit_code")"
|
||||||
|
|
||||||
|
printf "\e[?25h"
|
||||||
|
|
||||||
|
# Use msg_error if available, fallback to echo
|
||||||
|
if declare -f msg_error >/dev/null 2>&1; then
|
||||||
|
msg_error "in line ${line_number}: exit code ${exit_code} (${explanation}): while executing command ${command}"
|
||||||
|
else
|
||||||
|
echo -e "\n${RD}[ERROR]${CL} in line ${RD}${line_number}${CL}: exit code ${RD}${exit_code}${CL} (${explanation}): while executing command ${YWB}${command}${CL}\n"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -n "${DEBUG_LOGFILE:-}" ]]; then
|
||||||
|
{
|
||||||
|
echo "------ ERROR ------"
|
||||||
|
echo "Timestamp : $(date '+%Y-%m-%d %H:%M:%S')"
|
||||||
|
echo "Exit Code : $exit_code ($explanation)"
|
||||||
|
echo "Line : $line_number"
|
||||||
|
echo "Command : $command"
|
||||||
|
echo "-------------------"
|
||||||
|
} >>"$DEBUG_LOGFILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get active log file (BUILD_LOG or INSTALL_LOG)
|
||||||
|
local active_log=""
|
||||||
|
if declare -f get_active_logfile >/dev/null 2>&1; then
|
||||||
|
active_log="$(get_active_logfile)"
|
||||||
|
elif [[ -n "${SILENT_LOGFILE:-}" ]]; then
|
||||||
|
active_log="$SILENT_LOGFILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -n "$active_log" && -s "$active_log" ]]; then
|
||||||
|
echo "--- Last 20 lines of silent log ---"
|
||||||
|
tail -n 20 "$active_log"
|
||||||
|
echo "-----------------------------------"
|
||||||
|
|
||||||
|
# Detect context: Container (INSTALL_LOG set + /root exists) vs Host (BUILD_LOG)
|
||||||
|
if [[ -n "${INSTALL_LOG:-}" && -d /root ]]; then
|
||||||
|
# CONTAINER CONTEXT: Copy log and create flag file for host
|
||||||
|
local container_log="/root/.install-${SESSION_ID:-error}.log"
|
||||||
|
cp "$active_log" "$container_log" 2>/dev/null || true
|
||||||
|
|
||||||
|
# Create error flag file with exit code for host detection
|
||||||
|
echo "$exit_code" >"/root/.install-${SESSION_ID:-error}.failed" 2>/dev/null || true
|
||||||
|
|
||||||
|
if declare -f msg_custom >/dev/null 2>&1; then
|
||||||
|
msg_custom "📋" "${YW}" "Log saved to: ${container_log}"
|
||||||
|
else
|
||||||
|
echo -e "${YW}Log saved to:${CL} ${BL}${container_log}${CL}"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# HOST CONTEXT: Show local log path and offer container cleanup
|
||||||
|
if declare -f msg_custom >/dev/null 2>&1; then
|
||||||
|
msg_custom "📋" "${YW}" "Full log: ${active_log}"
|
||||||
|
else
|
||||||
|
echo -e "${YW}Full log:${CL} ${BL}${active_log}${CL}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Offer to remove container if it exists (build errors after container creation)
|
||||||
|
if [[ -n "${CTID:-}" ]] && command -v pct &>/dev/null && pct status "$CTID" &>/dev/null; then
|
||||||
|
echo ""
|
||||||
|
echo -en "${YW}Remove broken container ${CTID}? (Y/n) [auto-remove in 60s]: ${CL}"
|
||||||
|
|
||||||
|
if read -t 60 -r response; then
|
||||||
|
if [[ -z "$response" || "$response" =~ ^[Yy]$ ]]; then
|
||||||
|
echo -e "\n${YW}Removing container ${CTID}${CL}"
|
||||||
|
pct stop "$CTID" &>/dev/null || true
|
||||||
|
pct destroy "$CTID" &>/dev/null || true
|
||||||
|
echo -e "${GN}✔${CL} Container ${CTID} removed"
|
||||||
|
elif [[ "$response" =~ ^[Nn]$ ]]; then
|
||||||
|
echo -e "\n${YW}Container ${CTID} kept for debugging${CL}"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# Timeout - auto-remove
|
||||||
|
echo -e "\n${YW}No response - auto-removing container${CL}"
|
||||||
|
pct stop "$CTID" &>/dev/null || true
|
||||||
|
pct destroy "$CTID" &>/dev/null || true
|
||||||
|
echo -e "${GN}✔${CL} Container ${CTID} removed"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit "$exit_code"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# SECTION 3: SIGNAL HANDLERS
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# on_exit()
|
||||||
|
#
|
||||||
|
# - EXIT trap handler
|
||||||
|
# - Cleans up lock files if lockfile variable is set
|
||||||
|
# - Exits with captured exit code
|
||||||
|
# - Always runs on script termination (success or failure)
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
on_exit() {
|
||||||
|
local exit_code=$?
|
||||||
|
[[ -n "${lockfile:-}" && -e "$lockfile" ]] && rm -f "$lockfile"
|
||||||
|
exit "$exit_code"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# on_interrupt()
|
||||||
|
#
|
||||||
|
# - SIGINT (Ctrl+C) trap handler
|
||||||
|
# - Displays "Interrupted by user" message
|
||||||
|
# - Exits with code 130 (128 + SIGINT=2)
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
on_interrupt() {
|
||||||
|
if declare -f msg_error >/dev/null 2>&1; then
|
||||||
|
msg_error "Interrupted by user (SIGINT)"
|
||||||
|
else
|
||||||
|
echo -e "\n${RD}Interrupted by user (SIGINT)${CL}"
|
||||||
|
fi
|
||||||
|
exit 130
|
||||||
|
}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# on_terminate()
|
||||||
|
#
|
||||||
|
# - SIGTERM trap handler
|
||||||
|
# - Displays "Terminated by signal" message
|
||||||
|
# - Exits with code 143 (128 + SIGTERM=15)
|
||||||
|
# - Triggered by external process termination
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
on_terminate() {
|
||||||
|
if declare -f msg_error >/dev/null 2>&1; then
|
||||||
|
msg_error "Terminated by signal (SIGTERM)"
|
||||||
|
else
|
||||||
|
echo -e "\n${RD}Terminated by signal (SIGTERM)${CL}"
|
||||||
|
fi
|
||||||
|
exit 143
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# SECTION 4: INITIALIZATION
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# catch_errors()
|
||||||
|
#
|
||||||
|
# - Initializes error handling and signal traps
|
||||||
|
# - Enables strict error handling:
|
||||||
|
# * set -Ee: Exit on error, inherit ERR trap in functions
|
||||||
|
# * set -o pipefail: Pipeline fails if any command fails
|
||||||
|
# * set -u: (optional) Exit on undefined variable (if STRICT_UNSET=1)
|
||||||
|
# - Sets up traps:
|
||||||
|
# * ERR → error_handler
|
||||||
|
# * EXIT → on_exit
|
||||||
|
# * INT → on_interrupt
|
||||||
|
# * TERM → on_terminate
|
||||||
|
# - Call this function early in every script
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
catch_errors() {
|
||||||
|
set -Ee -o pipefail
|
||||||
|
if [ "${STRICT_UNSET:-0}" = "1" ]; then
|
||||||
|
set -u
|
||||||
|
fi
|
||||||
|
|
||||||
|
trap 'error_handler' ERR
|
||||||
|
trap on_exit EXIT
|
||||||
|
trap on_interrupt INT
|
||||||
|
trap on_terminate TERM
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright (c) 2021-2025 community-scripts ORG
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
# Author: tteck (tteckster)
|
# Author: tteck (tteckster)
|
||||||
# Co-Author: MickLesk
|
# Co-Author: MickLesk
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
@@ -37,6 +37,9 @@ source "$(dirname "${BASH_SOURCE[0]}")/error-handler.func"
|
|||||||
load_functions
|
load_functions
|
||||||
catch_errors
|
catch_errors
|
||||||
|
|
||||||
|
# Get LXC IP address (must be called INSIDE container, after network is up)
|
||||||
|
get_lxc_ip
|
||||||
|
|
||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
# SECTION 2: NETWORK & CONNECTIVITY
|
# SECTION 2: NETWORK & CONNECTIVITY
|
||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
@@ -76,6 +79,13 @@ EOF
|
|||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
setting_up_container() {
|
setting_up_container() {
|
||||||
msg_info "Setting up Container OS"
|
msg_info "Setting up Container OS"
|
||||||
|
|
||||||
|
# Fix Debian 13 LXC template bug where / is owned by nobody
|
||||||
|
# Only attempt in privileged containers (unprivileged cannot chown /)
|
||||||
|
if [[ "$(stat -c '%U' /)" != "root" ]]; then
|
||||||
|
(chown root:root / 2>/dev/null) || true
|
||||||
|
fi
|
||||||
|
|
||||||
for ((i = RETRY_NUM; i > 0; i--)); do
|
for ((i = RETRY_NUM; i > 0; i--)); do
|
||||||
if [ "$(hostname -I)" != "" ]; then
|
if [ "$(hostname -I)" != "" ]; then
|
||||||
break
|
break
|
||||||
@@ -222,21 +232,12 @@ motd_ssh() {
|
|||||||
# Set terminal to 256-color mode
|
# Set terminal to 256-color mode
|
||||||
grep -qxF "export TERM='xterm-256color'" /root/.bashrc || echo "export TERM='xterm-256color'" >>/root/.bashrc
|
grep -qxF "export TERM='xterm-256color'" /root/.bashrc || echo "export TERM='xterm-256color'" >>/root/.bashrc
|
||||||
|
|
||||||
# Get OS information (Debian / Ubuntu)
|
|
||||||
if [ -f "/etc/os-release" ]; then
|
|
||||||
OS_NAME=$(grep ^NAME /etc/os-release | cut -d= -f2 | tr -d '"')
|
|
||||||
OS_VERSION=$(grep ^VERSION_ID /etc/os-release | cut -d= -f2 | tr -d '"')
|
|
||||||
elif [ -f "/etc/debian_version" ]; then
|
|
||||||
OS_NAME="Debian"
|
|
||||||
OS_VERSION=$(cat /etc/debian_version)
|
|
||||||
fi
|
|
||||||
|
|
||||||
PROFILE_FILE="/etc/profile.d/00_lxc-details.sh"
|
PROFILE_FILE="/etc/profile.d/00_lxc-details.sh"
|
||||||
echo "echo -e \"\"" >"$PROFILE_FILE"
|
echo "echo -e \"\"" >"$PROFILE_FILE"
|
||||||
echo -e "echo -e \"${BOLD}${APPLICATION} LXC Container${CL}"\" >>"$PROFILE_FILE"
|
echo -e "echo -e \"${BOLD}${APPLICATION} LXC Container${CL}"\" >>"$PROFILE_FILE"
|
||||||
echo -e "echo -e \"${TAB}${GATEWAY}${YW} Provided by: ${GN}community-scripts ORG ${YW}| GitHub: ${GN}https://github.com/community-scripts/ProxmoxVE${CL}\"" >>"$PROFILE_FILE"
|
echo -e "echo -e \"${TAB}${GATEWAY}${YW} Provided by: ${GN}community-scripts ORG ${YW}| GitHub: ${GN}https://github.com/community-scripts/ProxmoxVE${CL}\"" >>"$PROFILE_FILE"
|
||||||
echo "echo \"\"" >>"$PROFILE_FILE"
|
echo "echo \"\"" >>"$PROFILE_FILE"
|
||||||
echo -e "echo -e \"${TAB}${OS}${YW} OS: ${GN}${OS_NAME} - Version: ${OS_VERSION}${CL}\"" >>"$PROFILE_FILE"
|
echo -e "echo -e \"${TAB}${OS}${YW} OS: ${GN}\$(grep ^NAME /etc/os-release | cut -d= -f2 | tr -d '\"') - Version: \$(grep ^VERSION_ID /etc/os-release | cut -d= -f2 | tr -d '\"')${CL}\"" >>"$PROFILE_FILE"
|
||||||
echo -e "echo -e \"${TAB}${HOSTNAME}${YW} Hostname: ${GN}\$(hostname)${CL}\"" >>"$PROFILE_FILE"
|
echo -e "echo -e \"${TAB}${HOSTNAME}${YW} Hostname: ${GN}\$(hostname)${CL}\"" >>"$PROFILE_FILE"
|
||||||
echo -e "echo -e \"${TAB}${INFO}${YW} IP Address: ${GN}\$(hostname -I | awk '{print \$1}')${CL}\"" >>"$PROFILE_FILE"
|
echo -e "echo -e \"${TAB}${INFO}${YW} IP Address: ${GN}\$(hostname -I | awk '{print \$1}')${CL}\"" >>"$PROFILE_FILE"
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
44
scripts/ct/debian.sh
Normal file
44
scripts/ct/debian.sh
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
SCRIPT_DIR="$(dirname "$0")"
|
||||||
|
source "$SCRIPT_DIR/../core/build.func"
|
||||||
|
# Copyright (c) 2021-2026 tteck
|
||||||
|
# Author: tteck (tteckster)
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# Source: https://www.debian.org/
|
||||||
|
|
||||||
|
APP="Debian"
|
||||||
|
var_tags="${var_tags:-os}"
|
||||||
|
var_cpu="${var_cpu:-1}"
|
||||||
|
var_ram="${var_ram:-512}"
|
||||||
|
var_disk="${var_disk:-2}"
|
||||||
|
var_os="${var_os:-debian}"
|
||||||
|
var_version="${var_version:-13}"
|
||||||
|
var_unprivileged="${var_unprivileged:-1}"
|
||||||
|
|
||||||
|
header_info "$APP"
|
||||||
|
variables
|
||||||
|
color
|
||||||
|
catch_errors
|
||||||
|
|
||||||
|
function update_script() {
|
||||||
|
header_info
|
||||||
|
check_container_storage
|
||||||
|
check_container_resources
|
||||||
|
if [[ ! -d /var ]]; then
|
||||||
|
msg_error "No ${APP} Installation Found!"
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
msg_info "Updating $APP LXC"
|
||||||
|
$STD apt update
|
||||||
|
$STD apt -y upgrade
|
||||||
|
msg_ok "Updated $APP LXC"
|
||||||
|
msg_ok "Updated successfully!"
|
||||||
|
exit
|
||||||
|
}
|
||||||
|
|
||||||
|
start
|
||||||
|
build_container
|
||||||
|
description
|
||||||
|
|
||||||
|
msg_ok "Completed successfully!\n"
|
||||||
|
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||||
18
scripts/install/debian-install.sh
Normal file
18
scripts/install/debian-install.sh
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Copyright (c) 2021-2026 tteck
|
||||||
|
# Author: tteck (tteckster)
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# Source: https://www.debian.org/
|
||||||
|
|
||||||
|
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||||
|
color
|
||||||
|
verb_ip6
|
||||||
|
catch_errors
|
||||||
|
setting_up_container
|
||||||
|
network_check
|
||||||
|
update_os
|
||||||
|
|
||||||
|
motd_ssh
|
||||||
|
customize
|
||||||
|
cleanup_lxc
|
||||||
113
server.js
113
server.js
@@ -3,6 +3,7 @@ import { parse } from 'url';
|
|||||||
import next from 'next';
|
import next from 'next';
|
||||||
import { WebSocketServer } from 'ws';
|
import { WebSocketServer } from 'ws';
|
||||||
import { spawn } from 'child_process';
|
import { spawn } from 'child_process';
|
||||||
|
import { existsSync } from 'fs';
|
||||||
import { join, resolve } from 'path';
|
import { join, resolve } from 'path';
|
||||||
import stripAnsi from 'strip-ansi';
|
import stripAnsi from 'strip-ansi';
|
||||||
import { spawn as ptySpawn } from 'node-pty';
|
import { spawn as ptySpawn } from 'node-pty';
|
||||||
@@ -56,6 +57,8 @@ const handle = app.getRequestHandler();
|
|||||||
* @property {string} user
|
* @property {string} user
|
||||||
* @property {string} password
|
* @property {string} password
|
||||||
* @property {number} [id]
|
* @property {number} [id]
|
||||||
|
* @property {string} [auth_type]
|
||||||
|
* @property {string} [ssh_key_path]
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -82,6 +85,7 @@ const handle = app.getRequestHandler();
|
|||||||
* @property {number} [cloneCount]
|
* @property {number} [cloneCount]
|
||||||
* @property {string[]} [hostnames]
|
* @property {string[]} [hostnames]
|
||||||
* @property {'lxc'|'vm'} [containerType]
|
* @property {'lxc'|'vm'} [containerType]
|
||||||
|
* @property {Record<string, string|number|boolean>} [envVars]
|
||||||
*/
|
*/
|
||||||
|
|
||||||
class ScriptExecutionHandler {
|
class ScriptExecutionHandler {
|
||||||
@@ -294,26 +298,45 @@ class ScriptExecutionHandler {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve full server from DB when client sends server with id but no ssh_key_path (e.g. for Shell/Update over SSH).
|
||||||
|
* @param {ServerInfo|null} server - Server from WebSocket message
|
||||||
|
* @returns {Promise<ServerInfo|null>} Same server or full server from DB
|
||||||
|
*/
|
||||||
|
async resolveServerForSSH(server) {
|
||||||
|
if (!server?.id) return server;
|
||||||
|
if (server.auth_type === 'key' && (!server.ssh_key_path || !existsSync(server.ssh_key_path))) {
|
||||||
|
const full = await this.db.getServerById(server.id);
|
||||||
|
return /** @type {ServerInfo|null} */ (full ?? server);
|
||||||
|
}
|
||||||
|
return server;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {ExtendedWebSocket} ws
|
* @param {ExtendedWebSocket} ws
|
||||||
* @param {WebSocketMessage} message
|
* @param {WebSocketMessage} message
|
||||||
*/
|
*/
|
||||||
async handleMessage(ws, message) {
|
async handleMessage(ws, message) {
|
||||||
const { action, scriptPath, executionId, input, mode, server, isUpdate, isShell, isBackup, isClone, containerId, storage, backupStorage, cloneCount, hostnames, containerType } = message;
|
const { action, scriptPath, executionId, input, mode, server, isUpdate, isShell, isBackup, isClone, containerId, storage, backupStorage, cloneCount, hostnames, containerType, envVars } = message;
|
||||||
|
|
||||||
switch (action) {
|
switch (action) {
|
||||||
case 'start':
|
case 'start':
|
||||||
if (scriptPath && executionId) {
|
if (scriptPath && executionId) {
|
||||||
|
let serverToUse = server;
|
||||||
|
if (serverToUse?.id) {
|
||||||
|
serverToUse = await this.resolveServerForSSH(serverToUse) ?? serverToUse;
|
||||||
|
}
|
||||||
|
const resolved = serverToUse ?? server;
|
||||||
if (isClone && containerId && storage && server && cloneCount && hostnames && containerType) {
|
if (isClone && containerId && storage && server && cloneCount && hostnames && containerType) {
|
||||||
await this.startSSHCloneExecution(ws, containerId, executionId, storage, server, containerType, cloneCount, hostnames);
|
await this.startSSHCloneExecution(ws, containerId, executionId, storage, /** @type {ServerInfo} */ (resolved), containerType, cloneCount, hostnames);
|
||||||
} else if (isBackup && containerId && storage) {
|
} else if (isBackup && containerId && storage) {
|
||||||
await this.startBackupExecution(ws, containerId, executionId, storage, mode, server);
|
await this.startBackupExecution(ws, containerId, executionId, storage, mode, resolved);
|
||||||
} else if (isUpdate && containerId) {
|
} else if (isUpdate && containerId) {
|
||||||
await this.startUpdateExecution(ws, containerId, executionId, mode, server, backupStorage);
|
await this.startUpdateExecution(ws, containerId, executionId, mode, resolved, backupStorage);
|
||||||
} else if (isShell && containerId) {
|
} else if (isShell && containerId) {
|
||||||
await this.startShellExecution(ws, containerId, executionId, mode, server);
|
await this.startShellExecution(ws, containerId, executionId, mode, resolved, containerType);
|
||||||
} else {
|
} else {
|
||||||
await this.startScriptExecution(ws, scriptPath, executionId, mode, server);
|
await this.startScriptExecution(ws, scriptPath, executionId, mode, resolved, envVars);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
this.sendMessage(ws, {
|
this.sendMessage(ws, {
|
||||||
@@ -351,8 +374,9 @@ class ScriptExecutionHandler {
|
|||||||
* @param {string} executionId
|
* @param {string} executionId
|
||||||
* @param {string} mode
|
* @param {string} mode
|
||||||
* @param {ServerInfo|null} server
|
* @param {ServerInfo|null} server
|
||||||
|
* @param {Object} [envVars] - Optional environment variables to pass to the script
|
||||||
*/
|
*/
|
||||||
async startScriptExecution(ws, scriptPath, executionId, mode = 'local', server = null) {
|
async startScriptExecution(ws, scriptPath, executionId, mode = 'local', server = null, envVars = {}) {
|
||||||
/** @type {number|null} */
|
/** @type {number|null} */
|
||||||
let installationId = null;
|
let installationId = null;
|
||||||
|
|
||||||
@@ -381,7 +405,7 @@ class ScriptExecutionHandler {
|
|||||||
|
|
||||||
// Handle SSH execution
|
// Handle SSH execution
|
||||||
if (mode === 'ssh' && server) {
|
if (mode === 'ssh' && server) {
|
||||||
await this.startSSHScriptExecution(ws, scriptPath, executionId, server, installationId);
|
await this.startSSHScriptExecution(ws, scriptPath, executionId, server, installationId, envVars);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -407,19 +431,32 @@ class ScriptExecutionHandler {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Format environment variables for local execution
|
||||||
|
// Convert envVars object to environment variables
|
||||||
|
const envWithVars = {
|
||||||
|
...process.env,
|
||||||
|
TERM: 'xterm-256color', // Enable proper terminal support
|
||||||
|
FORCE_ANSI: 'true', // Allow ANSI codes for proper display
|
||||||
|
COLUMNS: '80', // Set terminal width
|
||||||
|
LINES: '24' // Set terminal height
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add envVars to environment
|
||||||
|
if (envVars && typeof envVars === 'object') {
|
||||||
|
for (const [key, value] of Object.entries(envVars)) {
|
||||||
|
/** @type {Record<string, string>} */
|
||||||
|
const envRecord = envWithVars;
|
||||||
|
envRecord[key] = String(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Start script execution with pty for proper TTY support
|
// Start script execution with pty for proper TTY support
|
||||||
const childProcess = ptySpawn('bash', [resolvedPath], {
|
const childProcess = ptySpawn('bash', [resolvedPath], {
|
||||||
cwd: scriptsDir,
|
cwd: scriptsDir,
|
||||||
name: 'xterm-256color',
|
name: 'xterm-256color',
|
||||||
cols: 80,
|
cols: 80,
|
||||||
rows: 24,
|
rows: 24,
|
||||||
env: {
|
env: envWithVars
|
||||||
...process.env,
|
|
||||||
TERM: 'xterm-256color', // Enable proper terminal support
|
|
||||||
FORCE_ANSI: 'true', // Allow ANSI codes for proper display
|
|
||||||
COLUMNS: '80', // Set terminal width
|
|
||||||
LINES: '24' // Set terminal height
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// pty handles encoding automatically
|
// pty handles encoding automatically
|
||||||
@@ -522,8 +559,9 @@ class ScriptExecutionHandler {
|
|||||||
* @param {string} executionId
|
* @param {string} executionId
|
||||||
* @param {ServerInfo} server
|
* @param {ServerInfo} server
|
||||||
* @param {number|null} installationId
|
* @param {number|null} installationId
|
||||||
|
* @param {Object} [envVars] - Optional environment variables to pass to the script
|
||||||
*/
|
*/
|
||||||
async startSSHScriptExecution(ws, scriptPath, executionId, server, installationId = null) {
|
async startSSHScriptExecution(ws, scriptPath, executionId, server, installationId = null, envVars = {}) {
|
||||||
const sshService = getSSHExecutionService();
|
const sshService = getSSHExecutionService();
|
||||||
|
|
||||||
// Send start message
|
// Send start message
|
||||||
@@ -612,7 +650,8 @@ class ScriptExecutionHandler {
|
|||||||
|
|
||||||
// Clean up
|
// Clean up
|
||||||
this.activeExecutions.delete(executionId);
|
this.activeExecutions.delete(executionId);
|
||||||
}
|
},
|
||||||
|
envVars
|
||||||
));
|
));
|
||||||
|
|
||||||
// Store the execution with installation ID
|
// Store the execution with installation ID
|
||||||
@@ -1136,10 +1175,11 @@ class ScriptExecutionHandler {
|
|||||||
const hostname = hostnames[i];
|
const hostname = hostnames[i];
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Read config file to get hostname/name
|
// Read config file to get hostname/name (node-specific path)
|
||||||
|
const nodeName = server.name;
|
||||||
const configPath = containerType === 'lxc'
|
const configPath = containerType === 'lxc'
|
||||||
? `/etc/pve/lxc/${nextId}.conf`
|
? `/etc/pve/nodes/${nodeName}/lxc/${nextId}.conf`
|
||||||
: `/etc/pve/qemu-server/${nextId}.conf`;
|
: `/etc/pve/nodes/${nodeName}/qemu-server/${nextId}.conf`;
|
||||||
|
|
||||||
let configContent = '';
|
let configContent = '';
|
||||||
await new Promise(/** @type {(resolve: (value?: void) => void) => void} */ ((resolve) => {
|
await new Promise(/** @type {(resolve: (value?: void) => void) => void} */ ((resolve) => {
|
||||||
@@ -1457,21 +1497,21 @@ class ScriptExecutionHandler {
|
|||||||
* @param {string} executionId
|
* @param {string} executionId
|
||||||
* @param {string} mode
|
* @param {string} mode
|
||||||
* @param {ServerInfo|null} server
|
* @param {ServerInfo|null} server
|
||||||
|
* @param {'lxc'|'vm'} [containerType='lxc']
|
||||||
*/
|
*/
|
||||||
async startShellExecution(ws, containerId, executionId, mode = 'local', server = null) {
|
async startShellExecution(ws, containerId, executionId, mode = 'local', server = null, containerType = 'lxc') {
|
||||||
try {
|
try {
|
||||||
|
const typeLabel = containerType === 'vm' ? 'VM' : 'container';
|
||||||
// Send start message
|
|
||||||
this.sendMessage(ws, {
|
this.sendMessage(ws, {
|
||||||
type: 'start',
|
type: 'start',
|
||||||
data: `Starting shell session for container ${containerId}...`,
|
data: `Starting shell session for ${typeLabel} ${containerId}...`,
|
||||||
timestamp: Date.now()
|
timestamp: Date.now()
|
||||||
});
|
});
|
||||||
|
|
||||||
if (mode === 'ssh' && server) {
|
if (mode === 'ssh' && server) {
|
||||||
await this.startSSHShellExecution(ws, containerId, executionId, server);
|
await this.startSSHShellExecution(ws, containerId, executionId, server, containerType);
|
||||||
} else {
|
} else {
|
||||||
await this.startLocalShellExecution(ws, containerId, executionId);
|
await this.startLocalShellExecution(ws, containerId, executionId, containerType);
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -1488,12 +1528,12 @@ class ScriptExecutionHandler {
|
|||||||
* @param {ExtendedWebSocket} ws
|
* @param {ExtendedWebSocket} ws
|
||||||
* @param {string} containerId
|
* @param {string} containerId
|
||||||
* @param {string} executionId
|
* @param {string} executionId
|
||||||
|
* @param {'lxc'|'vm'} [containerType='lxc']
|
||||||
*/
|
*/
|
||||||
async startLocalShellExecution(ws, containerId, executionId) {
|
async startLocalShellExecution(ws, containerId, executionId, containerType = 'lxc') {
|
||||||
const { spawn } = await import('node-pty');
|
const { spawn } = await import('node-pty');
|
||||||
|
const shellCommand = containerType === 'vm' ? `qm terminal ${containerId}` : `pct enter ${containerId}`;
|
||||||
// Create a shell process that will run pct enter
|
const childProcess = spawn('bash', ['-c', shellCommand], {
|
||||||
const childProcess = spawn('bash', ['-c', `pct enter ${containerId}`], {
|
|
||||||
name: 'xterm-color',
|
name: 'xterm-color',
|
||||||
cols: 80,
|
cols: 80,
|
||||||
rows: 24,
|
rows: 24,
|
||||||
@@ -1536,14 +1576,15 @@ class ScriptExecutionHandler {
|
|||||||
* @param {string} containerId
|
* @param {string} containerId
|
||||||
* @param {string} executionId
|
* @param {string} executionId
|
||||||
* @param {ServerInfo} server
|
* @param {ServerInfo} server
|
||||||
|
* @param {'lxc'|'vm'} [containerType='lxc']
|
||||||
*/
|
*/
|
||||||
async startSSHShellExecution(ws, containerId, executionId, server) {
|
async startSSHShellExecution(ws, containerId, executionId, server, containerType = 'lxc') {
|
||||||
const sshService = getSSHExecutionService();
|
const sshService = getSSHExecutionService();
|
||||||
|
const shellCommand = containerType === 'vm' ? `qm terminal ${containerId}` : `pct enter ${containerId}`;
|
||||||
try {
|
try {
|
||||||
const execution = await sshService.executeCommand(
|
const execution = await sshService.executeCommand(
|
||||||
server,
|
server,
|
||||||
`pct enter ${containerId}`,
|
shellCommand,
|
||||||
/** @param {string} data */
|
/** @param {string} data */
|
||||||
(data) => {
|
(data) => {
|
||||||
this.sendMessage(ws, {
|
this.sendMessage(ws, {
|
||||||
@@ -1593,6 +1634,7 @@ class ScriptExecutionHandler {
|
|||||||
// TerminalHandler removed - not used by current application
|
// TerminalHandler removed - not used by current application
|
||||||
|
|
||||||
app.prepare().then(() => {
|
app.prepare().then(() => {
|
||||||
|
console.log('> Next.js app prepared successfully');
|
||||||
const httpServer = createServer(async (req, res) => {
|
const httpServer = createServer(async (req, res) => {
|
||||||
try {
|
try {
|
||||||
// Be sure to pass `true` as the second argument to `url.parse`.
|
// Be sure to pass `true` as the second argument to `url.parse`.
|
||||||
@@ -1698,4 +1740,9 @@ app.prepare().then(() => {
|
|||||||
autoSyncModule.setupGracefulShutdown();
|
autoSyncModule.setupGracefulShutdown();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
}).catch((err) => {
|
||||||
|
console.error('> Failed to start server:', err.message);
|
||||||
|
console.error('> If you see "Could not find a production build", run: npm run build');
|
||||||
|
console.error('> Full error:', err);
|
||||||
|
process.exit(1);
|
||||||
});
|
});
|
||||||
|
|||||||
1001
src/app/_components/ConfigurationModal.tsx
Normal file
1001
src/app/_components/ConfigurationModal.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,9 @@ import { ScriptDetailModal } from "./ScriptDetailModal";
|
|||||||
import { CategorySidebar } from "./CategorySidebar";
|
import { CategorySidebar } from "./CategorySidebar";
|
||||||
import { FilterBar, type FilterState } from "./FilterBar";
|
import { FilterBar, type FilterState } from "./FilterBar";
|
||||||
import { ViewToggle } from "./ViewToggle";
|
import { ViewToggle } from "./ViewToggle";
|
||||||
|
import { ConfirmationModal } from "./ConfirmationModal";
|
||||||
import { Button } from "./ui/button";
|
import { Button } from "./ui/button";
|
||||||
|
import { RefreshCw } from "lucide-react";
|
||||||
import type { ScriptCard as ScriptCardType } from "~/types/script";
|
import type { ScriptCard as ScriptCardType } from "~/types/script";
|
||||||
import type { Server } from "~/types/server";
|
import type { Server } from "~/types/server";
|
||||||
import { getDefaultFilters, mergeFiltersWithDefaults } from "./filterUtils";
|
import { getDefaultFilters, mergeFiltersWithDefaults } from "./filterUtils";
|
||||||
@@ -32,8 +34,15 @@ export function DownloadedScriptsTab({
|
|||||||
const [filters, setFilters] = useState<FilterState>(getDefaultFilters());
|
const [filters, setFilters] = useState<FilterState>(getDefaultFilters());
|
||||||
const [saveFiltersEnabled, setSaveFiltersEnabled] = useState(false);
|
const [saveFiltersEnabled, setSaveFiltersEnabled] = useState(false);
|
||||||
const [isLoadingFilters, setIsLoadingFilters] = useState(true);
|
const [isLoadingFilters, setIsLoadingFilters] = useState(true);
|
||||||
|
const [updateAllConfirmOpen, setUpdateAllConfirmOpen] = useState(false);
|
||||||
|
const [updateResult, setUpdateResult] = useState<{
|
||||||
|
successCount: number;
|
||||||
|
failCount: number;
|
||||||
|
failed: { slug: string; error: string }[];
|
||||||
|
} | null>(null);
|
||||||
const gridRef = useRef<HTMLDivElement>(null);
|
const gridRef = useRef<HTMLDivElement>(null);
|
||||||
|
|
||||||
|
const utils = api.useUtils();
|
||||||
const {
|
const {
|
||||||
data: scriptCardsData,
|
data: scriptCardsData,
|
||||||
isLoading: githubLoading,
|
isLoading: githubLoading,
|
||||||
@@ -50,6 +59,30 @@ export function DownloadedScriptsTab({
|
|||||||
{ enabled: !!selectedSlug },
|
{ enabled: !!selectedSlug },
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const loadMultipleScriptsMutation = api.scripts.loadMultipleScripts.useMutation({
|
||||||
|
onSuccess: (data) => {
|
||||||
|
void utils.scripts.getAllDownloadedScripts.invalidate();
|
||||||
|
void utils.scripts.getScriptCardsWithCategories.invalidate();
|
||||||
|
setUpdateResult({
|
||||||
|
successCount: data.successful?.length ?? 0,
|
||||||
|
failCount: data.failed?.length ?? 0,
|
||||||
|
failed: (data.failed ?? []).map((f) => ({
|
||||||
|
slug: f.slug,
|
||||||
|
error: f.error ?? "Unknown error",
|
||||||
|
})),
|
||||||
|
});
|
||||||
|
setTimeout(() => setUpdateResult(null), 8000);
|
||||||
|
},
|
||||||
|
onError: (error) => {
|
||||||
|
setUpdateResult({
|
||||||
|
successCount: 0,
|
||||||
|
failCount: 1,
|
||||||
|
failed: [{ slug: "Request failed", error: error.message }],
|
||||||
|
});
|
||||||
|
setTimeout(() => setUpdateResult(null), 8000);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
// Load SAVE_FILTER setting, saved filters, and view mode on component mount
|
// Load SAVE_FILTER setting, saved filters, and view mode on component mount
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const loadSettings = async () => {
|
const loadSettings = async () => {
|
||||||
@@ -416,6 +449,21 @@ export function DownloadedScriptsTab({
|
|||||||
setSelectedSlug(null);
|
setSelectedSlug(null);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleUpdateAllClick = () => {
|
||||||
|
setUpdateResult(null);
|
||||||
|
setUpdateAllConfirmOpen(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleUpdateAllConfirm = () => {
|
||||||
|
setUpdateAllConfirmOpen(false);
|
||||||
|
const slugs = downloadedScripts
|
||||||
|
.map((s) => s.slug)
|
||||||
|
.filter((slug): slug is string => Boolean(slug));
|
||||||
|
if (slugs.length > 0) {
|
||||||
|
loadMultipleScriptsMutation.mutate({ slugs });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
if (githubLoading || localLoading) {
|
if (githubLoading || localLoading) {
|
||||||
return (
|
return (
|
||||||
<div className="flex items-center justify-center py-12">
|
<div className="flex items-center justify-center py-12">
|
||||||
@@ -508,6 +556,43 @@ export function DownloadedScriptsTab({
|
|||||||
|
|
||||||
{/* Main Content */}
|
{/* Main Content */}
|
||||||
<div className="order-1 min-w-0 flex-1 lg:order-2" ref={gridRef}>
|
<div className="order-1 min-w-0 flex-1 lg:order-2" ref={gridRef}>
|
||||||
|
{/* Update all downloaded scripts */}
|
||||||
|
<div className="mb-4 flex flex-wrap items-center gap-3">
|
||||||
|
<Button
|
||||||
|
onClick={handleUpdateAllClick}
|
||||||
|
disabled={loadMultipleScriptsMutation.isPending}
|
||||||
|
variant="secondary"
|
||||||
|
size="default"
|
||||||
|
className="flex items-center gap-2"
|
||||||
|
>
|
||||||
|
{loadMultipleScriptsMutation.isPending ? (
|
||||||
|
<>
|
||||||
|
<RefreshCw className="h-4 w-4 animate-spin" />
|
||||||
|
<span>Updating...</span>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<RefreshCw className="h-4 w-4" />
|
||||||
|
<span>Update all downloaded scripts</span>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
|
{updateResult && (
|
||||||
|
<span className="text-muted-foreground text-sm">
|
||||||
|
Updated {updateResult.successCount} successfully
|
||||||
|
{updateResult.failCount > 0
|
||||||
|
? `, ${updateResult.failCount} failed`
|
||||||
|
: ""}
|
||||||
|
.
|
||||||
|
{updateResult.failCount > 0 && updateResult.failed.length > 0 && (
|
||||||
|
<span className="ml-1" title={updateResult.failed.map((f) => `${f.slug}: ${f.error}`).join("\n")}>
|
||||||
|
(hover for details)
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
{/* Enhanced Filter Bar */}
|
{/* Enhanced Filter Bar */}
|
||||||
<FilterBar
|
<FilterBar
|
||||||
filters={filters}
|
filters={filters}
|
||||||
@@ -621,6 +706,17 @@ export function DownloadedScriptsTab({
|
|||||||
onClose={handleCloseModal}
|
onClose={handleCloseModal}
|
||||||
onInstallScript={onInstallScript}
|
onInstallScript={onInstallScript}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
<ConfirmationModal
|
||||||
|
isOpen={updateAllConfirmOpen}
|
||||||
|
onClose={() => setUpdateAllConfirmOpen(false)}
|
||||||
|
onConfirm={handleUpdateAllConfirm}
|
||||||
|
title="Update all downloaded scripts"
|
||||||
|
message={`Update all ${downloadedScripts.length} downloaded scripts? This may take several minutes.`}
|
||||||
|
variant="simple"
|
||||||
|
confirmButtonText="Update all"
|
||||||
|
cancelButtonText="Cancel"
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -2,26 +2,31 @@
|
|||||||
|
|
||||||
import { useState, useEffect } from 'react';
|
import { useState, useEffect } from 'react';
|
||||||
import type { Server } from '../../types/server';
|
import type { Server } from '../../types/server';
|
||||||
|
import type { Script } from '../../types/script';
|
||||||
import { Button } from './ui/button';
|
import { Button } from './ui/button';
|
||||||
import { ColorCodedDropdown } from './ColorCodedDropdown';
|
import { ColorCodedDropdown } from './ColorCodedDropdown';
|
||||||
import { SettingsModal } from './SettingsModal';
|
import { SettingsModal } from './SettingsModal';
|
||||||
|
import { ConfigurationModal, type EnvVars } from './ConfigurationModal';
|
||||||
import { useRegisterModal } from './modal/ModalStackProvider';
|
import { useRegisterModal } from './modal/ModalStackProvider';
|
||||||
|
|
||||||
|
|
||||||
interface ExecutionModeModalProps {
|
interface ExecutionModeModalProps {
|
||||||
isOpen: boolean;
|
isOpen: boolean;
|
||||||
onClose: () => void;
|
onClose: () => void;
|
||||||
onExecute: (mode: 'local' | 'ssh', server?: Server) => void;
|
onExecute: (mode: 'local' | 'ssh', server?: Server, envVars?: EnvVars) => void;
|
||||||
scriptName: string;
|
scriptName: string;
|
||||||
|
script?: Script | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function ExecutionModeModal({ isOpen, onClose, onExecute, scriptName }: ExecutionModeModalProps) {
|
export function ExecutionModeModal({ isOpen, onClose, onExecute, scriptName, script }: ExecutionModeModalProps) {
|
||||||
useRegisterModal(isOpen, { id: 'execution-mode-modal', allowEscape: true, onClose });
|
useRegisterModal(isOpen, { id: 'execution-mode-modal', allowEscape: true, onClose });
|
||||||
const [servers, setServers] = useState<Server[]>([]);
|
const [servers, setServers] = useState<Server[]>([]);
|
||||||
const [loading, setLoading] = useState(false);
|
const [loading, setLoading] = useState(false);
|
||||||
const [error, setError] = useState<string | null>(null);
|
const [error, setError] = useState<string | null>(null);
|
||||||
const [selectedServer, setSelectedServer] = useState<Server | null>(null);
|
const [selectedServer, setSelectedServer] = useState<Server | null>(null);
|
||||||
const [settingsModalOpen, setSettingsModalOpen] = useState(false);
|
const [settingsModalOpen, setSettingsModalOpen] = useState(false);
|
||||||
|
const [configModalOpen, setConfigModalOpen] = useState(false);
|
||||||
|
const [configMode, setConfigMode] = useState<'default' | 'advanced'>('default');
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (isOpen) {
|
if (isOpen) {
|
||||||
@@ -64,19 +69,25 @@ export function ExecutionModeModal({ isOpen, onClose, onExecute, scriptName }: E
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleExecute = () => {
|
const handleConfigModeSelect = (mode: 'default' | 'advanced') => {
|
||||||
if (!selectedServer) {
|
if (!selectedServer) {
|
||||||
setError('Please select a server for SSH execution');
|
setError('Please select a server first');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
setConfigMode(mode);
|
||||||
onExecute('ssh', selectedServer);
|
setConfigModalOpen(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleConfigConfirm = (envVars: EnvVars) => {
|
||||||
|
if (!selectedServer) return;
|
||||||
|
setConfigModalOpen(false);
|
||||||
|
onExecute('ssh', selectedServer, envVars);
|
||||||
onClose();
|
onClose();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
const handleServerSelect = (server: Server | null) => {
|
const handleServerSelect = (server: Server | null) => {
|
||||||
setSelectedServer(server);
|
setSelectedServer(server);
|
||||||
|
setError(null); // Clear error when server is selected
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@@ -164,6 +175,31 @@ export function ExecutionModeModal({ isOpen, onClose, onExecute, scriptName }: E
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Configuration Mode Selection */}
|
||||||
|
<div className="space-y-3">
|
||||||
|
<p className="text-sm text-muted-foreground text-center">
|
||||||
|
Choose configuration mode:
|
||||||
|
</p>
|
||||||
|
<div className="flex gap-3">
|
||||||
|
<Button
|
||||||
|
onClick={() => handleConfigModeSelect('default')}
|
||||||
|
variant="default"
|
||||||
|
size="default"
|
||||||
|
className="flex-1"
|
||||||
|
>
|
||||||
|
Default
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
onClick={() => handleConfigModeSelect('advanced')}
|
||||||
|
variant="outline"
|
||||||
|
size="default"
|
||||||
|
className="flex-1"
|
||||||
|
>
|
||||||
|
Advanced (Beta)
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
{/* Action Buttons */}
|
{/* Action Buttons */}
|
||||||
<div className="flex justify-end space-x-3">
|
<div className="flex justify-end space-x-3">
|
||||||
<Button
|
<Button
|
||||||
@@ -173,13 +209,6 @@ export function ExecutionModeModal({ isOpen, onClose, onExecute, scriptName }: E
|
|||||||
>
|
>
|
||||||
Cancel
|
Cancel
|
||||||
</Button>
|
</Button>
|
||||||
<Button
|
|
||||||
onClick={handleExecute}
|
|
||||||
variant="default"
|
|
||||||
size="default"
|
|
||||||
>
|
|
||||||
Install
|
|
||||||
</Button>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
@@ -204,6 +233,33 @@ export function ExecutionModeModal({ isOpen, onClose, onExecute, scriptName }: E
|
|||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Configuration Mode Selection - only show when server is selected */}
|
||||||
|
{selectedServer && (
|
||||||
|
<div className="space-y-3 pt-4 border-t border-border">
|
||||||
|
<p className="text-sm text-muted-foreground text-center">
|
||||||
|
Choose configuration mode:
|
||||||
|
</p>
|
||||||
|
<div className="flex gap-3">
|
||||||
|
<Button
|
||||||
|
onClick={() => handleConfigModeSelect('default')}
|
||||||
|
variant="default"
|
||||||
|
size="default"
|
||||||
|
className="flex-1"
|
||||||
|
>
|
||||||
|
Default
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
onClick={() => handleConfigModeSelect('advanced')}
|
||||||
|
variant="outline"
|
||||||
|
size="default"
|
||||||
|
className="flex-1"
|
||||||
|
>
|
||||||
|
Advanced
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
{/* Action Buttons */}
|
{/* Action Buttons */}
|
||||||
<div className="flex justify-end space-x-3">
|
<div className="flex justify-end space-x-3">
|
||||||
<Button
|
<Button
|
||||||
@@ -213,15 +269,6 @@ export function ExecutionModeModal({ isOpen, onClose, onExecute, scriptName }: E
|
|||||||
>
|
>
|
||||||
Cancel
|
Cancel
|
||||||
</Button>
|
</Button>
|
||||||
<Button
|
|
||||||
onClick={handleExecute}
|
|
||||||
disabled={!selectedServer}
|
|
||||||
variant="default"
|
|
||||||
size="default"
|
|
||||||
className={!selectedServer ? 'bg-muted-foreground cursor-not-allowed' : ''}
|
|
||||||
>
|
|
||||||
Run on Server
|
|
||||||
</Button>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
@@ -234,6 +281,16 @@ export function ExecutionModeModal({ isOpen, onClose, onExecute, scriptName }: E
|
|||||||
isOpen={settingsModalOpen}
|
isOpen={settingsModalOpen}
|
||||||
onClose={handleSettingsModalClose}
|
onClose={handleSettingsModalClose}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
{/* Configuration Modal */}
|
||||||
|
<ConfigurationModal
|
||||||
|
isOpen={configModalOpen}
|
||||||
|
onClose={() => setConfigModalOpen(false)}
|
||||||
|
onConfirm={handleConfigConfirm}
|
||||||
|
script={script ?? null}
|
||||||
|
server={selectedServer}
|
||||||
|
mode={configMode}
|
||||||
|
/>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ export function Footer({ onOpenReleaseNotes }: FooterProps) {
|
|||||||
<div className="container mx-auto px-4">
|
<div className="container mx-auto px-4">
|
||||||
<div className="flex flex-col sm:flex-row items-center justify-between gap-2 text-sm text-muted-foreground">
|
<div className="flex flex-col sm:flex-row items-center justify-between gap-2 text-sm text-muted-foreground">
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<span>© 2024 PVE Scripts Local</span>
|
<span>© 2026 PVE Scripts Local</span>
|
||||||
{versionData?.success && versionData.version && (
|
{versionData?.success && versionData.version && (
|
||||||
<Button
|
<Button
|
||||||
variant="ghost"
|
variant="ghost"
|
||||||
|
|||||||
@@ -1617,7 +1617,7 @@ export function GeneralSettingsModal({
|
|||||||
<Input
|
<Input
|
||||||
id="new-repo-url"
|
id="new-repo-url"
|
||||||
type="url"
|
type="url"
|
||||||
placeholder="https://github.com/owner/repo"
|
placeholder="https://github.com/owner/repo or https://git.example.com/owner/repo"
|
||||||
value={newRepoUrl}
|
value={newRepoUrl}
|
||||||
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
|
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
|
||||||
setNewRepoUrl(e.target.value)
|
setNewRepoUrl(e.target.value)
|
||||||
@@ -1626,8 +1626,9 @@ export function GeneralSettingsModal({
|
|||||||
className="w-full"
|
className="w-full"
|
||||||
/>
|
/>
|
||||||
<p className="text-muted-foreground mt-1 text-xs">
|
<p className="text-muted-foreground mt-1 text-xs">
|
||||||
Enter a GitHub repository URL (e.g.,
|
Supported: GitHub, GitLab, Bitbucket, or custom Git
|
||||||
https://github.com/owner/repo)
|
servers (e.g. https://github.com/owner/repo,
|
||||||
|
https://gitlab.com/owner/repo)
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<div className="border-border flex items-center justify-between gap-3 rounded-lg border p-3">
|
<div className="border-border flex items-center justify-between gap-3 rounded-lg border p-3">
|
||||||
|
|||||||
@@ -80,6 +80,7 @@ export function InstalledScriptsTab() {
|
|||||||
id: number;
|
id: number;
|
||||||
containerId: string;
|
containerId: string;
|
||||||
server?: any;
|
server?: any;
|
||||||
|
containerType?: 'lxc' | 'vm';
|
||||||
} | null>(null);
|
} | null>(null);
|
||||||
const [showBackupPrompt, setShowBackupPrompt] = useState(false);
|
const [showBackupPrompt, setShowBackupPrompt] = useState(false);
|
||||||
const [showStorageSelection, setShowStorageSelection] = useState(false);
|
const [showStorageSelection, setShowStorageSelection] = useState(false);
|
||||||
@@ -1167,6 +1168,7 @@ export function InstalledScriptsTab() {
|
|||||||
id: script.id,
|
id: script.id,
|
||||||
containerId: script.container_id,
|
containerId: script.container_id,
|
||||||
server: server,
|
server: server,
|
||||||
|
containerType: script.is_vm ? 'vm' : 'lxc',
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1452,6 +1454,13 @@ export function InstalledScriptsTab() {
|
|||||||
{/* Shell Terminal */}
|
{/* Shell Terminal */}
|
||||||
{openingShell && (
|
{openingShell && (
|
||||||
<div className="mb-8" data-terminal="shell">
|
<div className="mb-8" data-terminal="shell">
|
||||||
|
{openingShell.containerType === 'vm' && (
|
||||||
|
<p className="text-muted-foreground mb-2 text-sm">
|
||||||
|
VM shell uses the Proxmox serial console. The VM must have a
|
||||||
|
serial port configured (e.g. <code className="bg-muted rounded px-1">qm set {openingShell.containerId} -serial0 socket</code>).
|
||||||
|
Detach with <kbd className="bg-muted rounded px-1">Ctrl+O</kbd>.
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
<Terminal
|
<Terminal
|
||||||
scriptPath={`shell-${openingShell.containerId}`}
|
scriptPath={`shell-${openingShell.containerId}`}
|
||||||
onClose={handleCloseShellTerminal}
|
onClose={handleCloseShellTerminal}
|
||||||
@@ -1459,6 +1468,7 @@ export function InstalledScriptsTab() {
|
|||||||
server={openingShell.server}
|
server={openingShell.server}
|
||||||
isShell={true}
|
isShell={true}
|
||||||
containerId={openingShell.containerId}
|
containerId={openingShell.containerId}
|
||||||
|
containerType={openingShell.containerType}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
@@ -1538,7 +1548,7 @@ export function InstalledScriptsTab() {
|
|||||||
>
|
>
|
||||||
{showAutoDetectForm
|
{showAutoDetectForm
|
||||||
? "Cancel Auto-Detect"
|
? "Cancel Auto-Detect"
|
||||||
: '🔍 Auto-Detect LXC Containers (Must contain a tag with "community-script")'}
|
: '🔍 Auto-Detect Containers & VMs (tag: community-script)'}
|
||||||
</Button>
|
</Button>
|
||||||
<Button
|
<Button
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
@@ -1764,12 +1774,11 @@ export function InstalledScriptsTab() {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Auto-Detect LXC Containers Form */}
|
{/* Auto-Detect Containers & VMs Form */}
|
||||||
{showAutoDetectForm && (
|
{showAutoDetectForm && (
|
||||||
<div className="bg-card border-border mb-6 rounded-lg border p-4 shadow-sm sm:p-6">
|
<div className="bg-card border-border mb-6 rounded-lg border p-4 shadow-sm sm:p-6">
|
||||||
<h3 className="text-foreground mb-4 text-lg font-semibold sm:mb-6">
|
<h3 className="text-foreground mb-4 text-lg font-semibold sm:mb-6">
|
||||||
Auto-Detect LXC Containers (Must contain a tag with
|
Auto-Detect Containers & VMs (tag: community-script)
|
||||||
"community-script")
|
|
||||||
</h3>
|
</h3>
|
||||||
<div className="space-y-4 sm:space-y-6">
|
<div className="space-y-4 sm:space-y-6">
|
||||||
<div className="bg-muted/30 border-muted rounded-lg border p-4">
|
<div className="bg-muted/30 border-muted rounded-lg border p-4">
|
||||||
@@ -1795,12 +1804,12 @@ export function InstalledScriptsTab() {
|
|||||||
<p>This feature will:</p>
|
<p>This feature will:</p>
|
||||||
<ul className="mt-1 list-inside list-disc space-y-1">
|
<ul className="mt-1 list-inside list-disc space-y-1">
|
||||||
<li>Connect to the selected server via SSH</li>
|
<li>Connect to the selected server via SSH</li>
|
||||||
<li>Scan all LXC config files in /etc/pve/lxc/</li>
|
<li>Scan LXC configs in /etc/pve/lxc/ and VM configs in /etc/pve/qemu-server/</li>
|
||||||
<li>
|
<li>
|
||||||
Find containers with "community-script" in
|
Find containers and VMs with "community-script" in
|
||||||
their tags
|
their tags
|
||||||
</li>
|
</li>
|
||||||
<li>Extract the container ID and hostname</li>
|
<li>Extract the container/VM ID and hostname or name</li>
|
||||||
<li>Add them as installed script entries</li>
|
<li>Add them as installed script entries</li>
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
@@ -2302,6 +2311,11 @@ export function InstalledScriptsTab() {
|
|||||||
"stopped"
|
"stopped"
|
||||||
}
|
}
|
||||||
className="text-muted-foreground hover:text-foreground hover:bg-muted/20 focus:bg-muted/20"
|
className="text-muted-foreground hover:text-foreground hover:bg-muted/20 focus:bg-muted/20"
|
||||||
|
title={
|
||||||
|
script.is_vm
|
||||||
|
? "VM serial console (requires serial port; detach with Ctrl+O)"
|
||||||
|
: undefined
|
||||||
|
}
|
||||||
>
|
>
|
||||||
Shell
|
Shell
|
||||||
</DropdownMenuItem>
|
</DropdownMenuItem>
|
||||||
|
|||||||
@@ -270,22 +270,21 @@ export function PBSCredentialsModal({
|
|||||||
htmlFor="pbs-fingerprint"
|
htmlFor="pbs-fingerprint"
|
||||||
className="text-foreground mb-1 block text-sm font-medium"
|
className="text-foreground mb-1 block text-sm font-medium"
|
||||||
>
|
>
|
||||||
Fingerprint <span className="text-error">*</span>
|
Fingerprint
|
||||||
</label>
|
</label>
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
id="pbs-fingerprint"
|
id="pbs-fingerprint"
|
||||||
value={pbsFingerprint}
|
value={pbsFingerprint}
|
||||||
onChange={(e) => setPbsFingerprint(e.target.value)}
|
onChange={(e) => setPbsFingerprint(e.target.value)}
|
||||||
required
|
|
||||||
disabled={isLoading}
|
disabled={isLoading}
|
||||||
className="bg-card text-foreground placeholder-muted-foreground focus:ring-ring focus:border-ring border-border w-full rounded-md border px-3 py-2 shadow-sm focus:ring-2 focus:outline-none"
|
className="bg-card text-foreground placeholder-muted-foreground focus:ring-ring focus:border-ring border-border w-full rounded-md border px-3 py-2 shadow-sm focus:ring-2 focus:outline-none"
|
||||||
placeholder="e.g., 7b:e5:87:38:5e:16:05:d1:12:22:7f:73:d2:e2:d0:cf:8c:cb:28:e2:74:0c:78:91:1a:71:74:2e:79:20:5a:02"
|
placeholder="e.g., 7b:e5:87:38:5e:16:05:d1:12:22:7f:73:d2:e2:d0:cf:8c:cb:28:e2:74:0c:78:91:1a:71:74:2e:79:20:5a:02"
|
||||||
/>
|
/>
|
||||||
<p className="text-muted-foreground mt-1 text-xs">
|
<p className="text-muted-foreground mt-1 text-xs">
|
||||||
Server fingerprint for auto-acceptance. You can find this on
|
Leave empty if PBS uses a trusted CA (e.g. Let's Encrypt).
|
||||||
your PBS dashboard by clicking the "Show Fingerprint"
|
For self-signed certificates, enter the server fingerprint from
|
||||||
button.
|
the PBS dashboard ("Show Fingerprint").
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ interface ScriptDetailModalProps {
|
|||||||
scriptName: string,
|
scriptName: string,
|
||||||
mode?: "local" | "ssh",
|
mode?: "local" | "ssh",
|
||||||
server?: Server,
|
server?: Server,
|
||||||
|
envVars?: Record<string, string | number | boolean>,
|
||||||
) => void;
|
) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -183,7 +184,7 @@ export function ScriptDetailModal({
|
|||||||
setExecutionModeOpen(true);
|
setExecutionModeOpen(true);
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleExecuteScript = (mode: "local" | "ssh", server?: Server) => {
|
const handleExecuteScript = (mode: "local" | "ssh", server?: Server, envVars?: Record<string, string | number | boolean>) => {
|
||||||
if (!script || !onInstallScript) return;
|
if (!script || !onInstallScript) return;
|
||||||
|
|
||||||
// Find the script path based on selected version type
|
// Find the script path based on selected version type
|
||||||
@@ -197,8 +198,8 @@ export function ScriptDetailModal({
|
|||||||
const scriptPath = `scripts/${scriptMethod.script}`;
|
const scriptPath = `scripts/${scriptMethod.script}`;
|
||||||
const scriptName = script.name;
|
const scriptName = script.name;
|
||||||
|
|
||||||
// Pass execution mode and server info to the parent
|
// Pass execution mode, server info, and envVars to the parent
|
||||||
onInstallScript(scriptPath, scriptName, mode, server);
|
onInstallScript(scriptPath, scriptName, mode, server, envVars);
|
||||||
|
|
||||||
onClose(); // Close the modal when starting installation
|
onClose(); // Close the modal when starting installation
|
||||||
}
|
}
|
||||||
@@ -935,6 +936,7 @@ export function ScriptDetailModal({
|
|||||||
{script && (
|
{script && (
|
||||||
<ExecutionModeModal
|
<ExecutionModeModal
|
||||||
scriptName={script.name}
|
scriptName={script.name}
|
||||||
|
script={script}
|
||||||
isOpen={executionModeOpen}
|
isOpen={executionModeOpen}
|
||||||
onClose={() => setExecutionModeOpen(false)}
|
onClose={() => setExecutionModeOpen(false)}
|
||||||
onExecute={handleExecuteScript}
|
onExecute={handleExecuteScript}
|
||||||
|
|||||||
@@ -438,6 +438,11 @@ export function ServerForm({
|
|||||||
{errors.password && (
|
{errors.password && (
|
||||||
<p className="text-destructive mt-1 text-sm">{errors.password}</p>
|
<p className="text-destructive mt-1 text-sm">{errors.password}</p>
|
||||||
)}
|
)}
|
||||||
|
<p className="text-muted-foreground mt-1 text-xs">
|
||||||
|
SSH key is recommended when possible. Special characters (e.g.{" "}
|
||||||
|
<code className="rounded bg-muted px-0.5">{"{ } $ \" '"}</code>) are
|
||||||
|
supported.
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ interface TerminalProps {
|
|||||||
cloneCount?: number;
|
cloneCount?: number;
|
||||||
hostnames?: string[];
|
hostnames?: string[];
|
||||||
containerType?: 'lxc' | 'vm';
|
containerType?: 'lxc' | 'vm';
|
||||||
|
envVars?: Record<string, string | number | boolean>;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface TerminalMessage {
|
interface TerminalMessage {
|
||||||
@@ -29,7 +30,7 @@ interface TerminalMessage {
|
|||||||
timestamp: number;
|
timestamp: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function Terminal({ scriptPath, onClose, mode = 'local', server, isUpdate = false, isShell = false, isBackup = false, isClone = false, containerId, storage, backupStorage, executionId: propExecutionId, cloneCount, hostnames, containerType }: TerminalProps) {
|
export function Terminal({ scriptPath, onClose, mode = 'local', server, isUpdate = false, isShell = false, isBackup = false, isClone = false, containerId, storage, backupStorage, executionId: propExecutionId, cloneCount, hostnames, containerType, envVars }: TerminalProps) {
|
||||||
const [isConnected, setIsConnected] = useState(false);
|
const [isConnected, setIsConnected] = useState(false);
|
||||||
const [isRunning, setIsRunning] = useState(false);
|
const [isRunning, setIsRunning] = useState(false);
|
||||||
const [isClient, setIsClient] = useState(false);
|
const [isClient, setIsClient] = useState(false);
|
||||||
@@ -360,7 +361,8 @@ export function Terminal({ scriptPath, onClose, mode = 'local', server, isUpdate
|
|||||||
backupStorage,
|
backupStorage,
|
||||||
cloneCount,
|
cloneCount,
|
||||||
hostnames,
|
hostnames,
|
||||||
containerType
|
containerType,
|
||||||
|
envVars
|
||||||
};
|
};
|
||||||
ws.send(JSON.stringify(message));
|
ws.send(JSON.stringify(message));
|
||||||
}
|
}
|
||||||
@@ -400,7 +402,7 @@ export function Terminal({ scriptPath, onClose, mode = 'local', server, isUpdate
|
|||||||
wsRef.current.close();
|
wsRef.current.close();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}, [scriptPath, mode, server, isUpdate, isShell, containerId, isMobile]);
|
}, [scriptPath, mode, server, isUpdate, isShell, containerId, isMobile, envVars]);
|
||||||
|
|
||||||
const startScript = () => {
|
const startScript = () => {
|
||||||
if (wsRef.current && wsRef.current.readyState === WebSocket.OPEN && !isRunning) {
|
if (wsRef.current && wsRef.current.readyState === WebSocket.OPEN && !isRunning) {
|
||||||
@@ -417,6 +419,7 @@ export function Terminal({ scriptPath, onClose, mode = 'local', server, isUpdate
|
|||||||
executionId: newExecutionId,
|
executionId: newExecutionId,
|
||||||
mode,
|
mode,
|
||||||
server,
|
server,
|
||||||
|
envVars,
|
||||||
isUpdate,
|
isUpdate,
|
||||||
isShell,
|
isShell,
|
||||||
isBackup,
|
isBackup,
|
||||||
|
|||||||
@@ -416,11 +416,20 @@ export function VersionDisplay({ onOpenReleaseNotes }: VersionDisplayProps = {})
|
|||||||
setShowUpdateConfirmation(true);
|
setShowUpdateConfirmation(true);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Helper to generate secure random string
|
||||||
|
function getSecureRandomString(length: number): string {
|
||||||
|
const array = new Uint8Array(length);
|
||||||
|
window.crypto.getRandomValues(array);
|
||||||
|
// Convert to base36 string (alphanumeric)
|
||||||
|
return Array.from(array, b => b.toString(36)).join('').substr(0, length);
|
||||||
|
}
|
||||||
|
|
||||||
const handleConfirmUpdate = () => {
|
const handleConfirmUpdate = () => {
|
||||||
// Close the confirmation modal
|
// Close the confirmation modal
|
||||||
setShowUpdateConfirmation(false);
|
setShowUpdateConfirmation(false);
|
||||||
// Start the actual update process
|
// Start the actual update process
|
||||||
const sessionId = `update_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
const randomSuffix = getSecureRandomString(9);
|
||||||
|
const sessionId = `update_${Date.now()}_${randomSuffix}`;
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
|
|
||||||
setIsUpdating(true);
|
setIsUpdating(true);
|
||||||
|
|||||||
96
src/app/api/servers/[id]/discover-ssh-keys/route.ts
Normal file
96
src/app/api/servers/[id]/discover-ssh-keys/route.ts
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
import type { NextRequest } from 'next/server';
|
||||||
|
import { NextResponse } from 'next/server';
|
||||||
|
import { getDatabase } from '../../../../../server/database-prisma';
|
||||||
|
import { getSSHExecutionService } from '../../../../../server/ssh-execution-service';
|
||||||
|
import type { Server } from '~/types/server';
|
||||||
|
|
||||||
|
const DISCOVER_TIMEOUT_MS = 10_000;
|
||||||
|
|
||||||
|
/** Match lines that look like SSH public keys (same as build.func) */
|
||||||
|
const SSH_PUBKEY_RE = /^(ssh-(rsa|ed25519)|ecdsa-sha2-nistp256|sk-(ssh-ed25519|ecdsa-sha2-nistp256))\s+/;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run a command on the Proxmox host and return buffered stdout.
|
||||||
|
* Resolves when the process exits or rejects on timeout/spawn error.
|
||||||
|
*/
|
||||||
|
function runRemoteCommand(
|
||||||
|
server: Server,
|
||||||
|
command: string,
|
||||||
|
timeoutMs: number
|
||||||
|
): Promise<{ stdout: string; exitCode: number }> {
|
||||||
|
const ssh = getSSHExecutionService();
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const chunks: string[] = [];
|
||||||
|
let settled = false;
|
||||||
|
|
||||||
|
const finish = (stdout: string, exitCode: number) => {
|
||||||
|
if (settled) return;
|
||||||
|
settled = true;
|
||||||
|
clearTimeout(timer);
|
||||||
|
resolve({ stdout, exitCode });
|
||||||
|
};
|
||||||
|
|
||||||
|
const timer = setTimeout(() => {
|
||||||
|
if (settled) return;
|
||||||
|
settled = true;
|
||||||
|
reject(new Error('SSH discover keys timeout'));
|
||||||
|
}, timeoutMs);
|
||||||
|
|
||||||
|
ssh
|
||||||
|
.executeCommand(
|
||||||
|
server,
|
||||||
|
command,
|
||||||
|
(data: string) => chunks.push(data),
|
||||||
|
() => {},
|
||||||
|
(code: number) => finish(chunks.join(''), code)
|
||||||
|
)
|
||||||
|
.catch((err) => {
|
||||||
|
if (!settled) {
|
||||||
|
settled = true;
|
||||||
|
clearTimeout(timer);
|
||||||
|
reject(err);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function GET(
|
||||||
|
_request: NextRequest,
|
||||||
|
{ params }: { params: Promise<{ id: string }> }
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const { id: idParam } = await params;
|
||||||
|
const id = parseInt(idParam);
|
||||||
|
if (isNaN(id)) {
|
||||||
|
return NextResponse.json({ error: 'Invalid server ID' }, { status: 400 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const db = getDatabase();
|
||||||
|
const server = await db.getServerById(id) as Server | null;
|
||||||
|
|
||||||
|
if (!server) {
|
||||||
|
return NextResponse.json({ error: 'Server not found' }, { status: 404 });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Same paths as native build.func ssh_discover_default_files()
|
||||||
|
const remoteScript = `bash -c 'for f in /root/.ssh/authorized_keys /root/.ssh/authorized_keys2 /root/.ssh/*.pub /etc/ssh/authorized_keys /etc/ssh/authorized_keys.d/* 2>/dev/null; do [ -f "$f" ] && [ -r "$f" ] && grep -E "^(ssh-(rsa|ed25519)|ecdsa-sha2-nistp256|sk-)" "$f" 2>/dev/null; done | sort -u'`;
|
||||||
|
|
||||||
|
const { stdout } = await runRemoteCommand(server, remoteScript, DISCOVER_TIMEOUT_MS);
|
||||||
|
|
||||||
|
const keys = stdout
|
||||||
|
.split(/\r?\n/)
|
||||||
|
.map((line) => line.trim())
|
||||||
|
.filter((line) => line.length > 0 && SSH_PUBKEY_RE.test(line));
|
||||||
|
|
||||||
|
return NextResponse.json({ keys });
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error discovering SSH keys:', error);
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -32,6 +32,7 @@ export default function Home() {
|
|||||||
name: string;
|
name: string;
|
||||||
mode?: "local" | "ssh";
|
mode?: "local" | "ssh";
|
||||||
server?: Server;
|
server?: Server;
|
||||||
|
envVars?: Record<string, string | number | boolean>;
|
||||||
} | null>(null);
|
} | null>(null);
|
||||||
const [activeTab, setActiveTab] = useState<
|
const [activeTab, setActiveTab] = useState<
|
||||||
"scripts" | "downloaded" | "installed" | "backups"
|
"scripts" | "downloaded" | "installed" | "backups"
|
||||||
@@ -209,8 +210,9 @@ export default function Home() {
|
|||||||
scriptName: string,
|
scriptName: string,
|
||||||
mode?: "local" | "ssh",
|
mode?: "local" | "ssh",
|
||||||
server?: Server,
|
server?: Server,
|
||||||
|
envVars?: Record<string, string | number | boolean>,
|
||||||
) => {
|
) => {
|
||||||
setRunningScript({ path: scriptPath, name: scriptName, mode, server });
|
setRunningScript({ path: scriptPath, name: scriptName, mode, server, envVars });
|
||||||
// Scroll to terminal after a short delay to ensure it's rendered
|
// Scroll to terminal after a short delay to ensure it's rendered
|
||||||
setTimeout(scrollToTerminal, 100);
|
setTimeout(scrollToTerminal, 100);
|
||||||
};
|
};
|
||||||
@@ -360,6 +362,7 @@ export default function Home() {
|
|||||||
onClose={handleCloseTerminal}
|
onClose={handleCloseTerminal}
|
||||||
mode={runningScript.mode}
|
mode={runningScript.mode}
|
||||||
server={runningScript.server}
|
server={runningScript.server}
|
||||||
|
envVars={runningScript.envVars}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -23,8 +23,11 @@ export const env = createEnv({
|
|||||||
ALLOWED_SCRIPT_PATHS: z.string().default("scripts/"),
|
ALLOWED_SCRIPT_PATHS: z.string().default("scripts/"),
|
||||||
// WebSocket Configuration
|
// WebSocket Configuration
|
||||||
WEBSOCKET_PORT: z.string().default("3001"),
|
WEBSOCKET_PORT: z.string().default("3001"),
|
||||||
// GitHub Configuration
|
// Git provider tokens (optional, for private repos)
|
||||||
GITHUB_TOKEN: z.string().optional(),
|
GITHUB_TOKEN: z.string().optional(),
|
||||||
|
GITLAB_TOKEN: z.string().optional(),
|
||||||
|
BITBUCKET_APP_PASSWORD: z.string().optional(),
|
||||||
|
BITBUCKET_TOKEN: z.string().optional(),
|
||||||
// Authentication Configuration
|
// Authentication Configuration
|
||||||
AUTH_USERNAME: z.string().optional(),
|
AUTH_USERNAME: z.string().optional(),
|
||||||
AUTH_PASSWORD_HASH: z.string().optional(),
|
AUTH_PASSWORD_HASH: z.string().optional(),
|
||||||
@@ -62,8 +65,10 @@ export const env = createEnv({
|
|||||||
ALLOWED_SCRIPT_PATHS: process.env.ALLOWED_SCRIPT_PATHS,
|
ALLOWED_SCRIPT_PATHS: process.env.ALLOWED_SCRIPT_PATHS,
|
||||||
// WebSocket Configuration
|
// WebSocket Configuration
|
||||||
WEBSOCKET_PORT: process.env.WEBSOCKET_PORT,
|
WEBSOCKET_PORT: process.env.WEBSOCKET_PORT,
|
||||||
// GitHub Configuration
|
|
||||||
GITHUB_TOKEN: process.env.GITHUB_TOKEN,
|
GITHUB_TOKEN: process.env.GITHUB_TOKEN,
|
||||||
|
GITLAB_TOKEN: process.env.GITLAB_TOKEN,
|
||||||
|
BITBUCKET_APP_PASSWORD: process.env.BITBUCKET_APP_PASSWORD,
|
||||||
|
BITBUCKET_TOKEN: process.env.BITBUCKET_TOKEN,
|
||||||
// Authentication Configuration
|
// Authentication Configuration
|
||||||
AUTH_USERNAME: process.env.AUTH_USERNAME,
|
AUTH_USERNAME: process.env.AUTH_USERNAME,
|
||||||
AUTH_PASSWORD_HASH: process.env.AUTH_PASSWORD_HASH,
|
AUTH_PASSWORD_HASH: process.env.AUTH_PASSWORD_HASH,
|
||||||
|
|||||||
@@ -418,44 +418,46 @@ async function isVM(scriptId: number, containerId: string, serverId: number | nu
|
|||||||
return false; // Default to LXC if SSH fails
|
return false; // Default to LXC if SSH fails
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check both config file paths
|
// Node-specific paths (multi-node Proxmox: /etc/pve/nodes/NODENAME/...)
|
||||||
const vmConfigPath = `/etc/pve/qemu-server/${containerId}.conf`;
|
const nodeName = (server as Server).name;
|
||||||
const lxcConfigPath = `/etc/pve/lxc/${containerId}.conf`;
|
const vmConfigPathNode = `/etc/pve/nodes/${nodeName}/qemu-server/${containerId}.conf`;
|
||||||
|
const lxcConfigPathNode = `/etc/pve/nodes/${nodeName}/lxc/${containerId}.conf`;
|
||||||
// Check VM config file
|
// Fallback for single-node or when server.name is not the Proxmox node name
|
||||||
let vmConfigExists = false;
|
const vmConfigPathFallback = `/etc/pve/qemu-server/${containerId}.conf`;
|
||||||
await new Promise<void>((resolve) => {
|
const lxcConfigPathFallback = `/etc/pve/lxc/${containerId}.conf`;
|
||||||
void sshExecutionService.executeCommand(
|
|
||||||
server as Server,
|
|
||||||
`test -f "${vmConfigPath}" && echo "exists" || echo "not_exists"`,
|
|
||||||
(data: string) => {
|
|
||||||
if (data.includes('exists')) {
|
|
||||||
vmConfigExists = true;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
() => resolve(),
|
|
||||||
() => resolve()
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
if (vmConfigExists) {
|
|
||||||
return true; // VM config file exists
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check LXC config file (not needed for return value, but check for completeness)
|
|
||||||
await new Promise<void>((resolve) => {
|
|
||||||
void sshExecutionService.executeCommand(
|
|
||||||
server as Server,
|
|
||||||
`test -f "${lxcConfigPath}" && echo "exists" || echo "not_exists"`,
|
|
||||||
(_data: string) => {
|
|
||||||
// Data handler not needed - just checking if file exists
|
|
||||||
},
|
|
||||||
() => resolve(),
|
|
||||||
() => resolve()
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
return false; // Always LXC since VM config doesn't exist
|
const checkPathExists = (path: string): Promise<boolean> =>
|
||||||
|
new Promise<boolean>((resolve) => {
|
||||||
|
let exists = false;
|
||||||
|
void sshExecutionService.executeCommand(
|
||||||
|
server as Server,
|
||||||
|
`test -f "${path}" && echo "exists" || echo "not_exists"`,
|
||||||
|
(data: string) => {
|
||||||
|
if (data.includes('exists')) exists = true;
|
||||||
|
},
|
||||||
|
() => resolve(exists),
|
||||||
|
() => resolve(exists)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Prefer node-specific paths first
|
||||||
|
const vmConfigExistsNode = await checkPathExists(vmConfigPathNode);
|
||||||
|
if (vmConfigExistsNode) {
|
||||||
|
return true; // VM config file exists on node
|
||||||
|
}
|
||||||
|
|
||||||
|
const lxcConfigExistsNode = await checkPathExists(lxcConfigPathNode);
|
||||||
|
if (lxcConfigExistsNode) {
|
||||||
|
return false; // LXC config file exists on node
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: single-node or server.name not matching Proxmox node name
|
||||||
|
const vmConfigExistsFallback = await checkPathExists(vmConfigPathFallback);
|
||||||
|
if (vmConfigExistsFallback) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false; // LXC (or neither path exists)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error determining container type:', error);
|
console.error('Error determining container type:', error);
|
||||||
return false; // Default to LXC on error
|
return false; // Default to LXC on error
|
||||||
@@ -971,10 +973,11 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Helper function to check config file for community-script tag and extract hostname/name
|
// Helper function to check config file for community-script tag and extract hostname/name
|
||||||
|
const nodeName = (server as Server).name;
|
||||||
const checkConfigAndExtractInfo = async (id: string, isVM: boolean): Promise<any> => {
|
const checkConfigAndExtractInfo = async (id: string, isVM: boolean): Promise<any> => {
|
||||||
const configPath = isVM
|
const configPath = isVM
|
||||||
? `/etc/pve/qemu-server/${id}.conf`
|
? `/etc/pve/nodes/${nodeName}/qemu-server/${id}.conf`
|
||||||
: `/etc/pve/lxc/${id}.conf`;
|
: `/etc/pve/nodes/${nodeName}/lxc/${id}.conf`;
|
||||||
|
|
||||||
const readCommand = `cat "${configPath}" 2>/dev/null`;
|
const readCommand = `cat "${configPath}" 2>/dev/null`;
|
||||||
|
|
||||||
@@ -1060,7 +1063,7 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
reject(new Error(`pct list failed: ${error}`));
|
reject(new Error(`pct list failed: ${error}`));
|
||||||
},
|
},
|
||||||
(_exitCode: number) => {
|
(_exitCode: number) => {
|
||||||
resolve();
|
setImmediate(() => resolve());
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -1079,7 +1082,7 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
reject(new Error(`qm list failed: ${error}`));
|
reject(new Error(`qm list failed: ${error}`));
|
||||||
},
|
},
|
||||||
(_exitCode: number) => {
|
(_exitCode: number) => {
|
||||||
resolve();
|
setImmediate(() => resolve());
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -1318,10 +1321,10 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
|
|
||||||
// Check if ID exists in either pct list (containers) or qm list (VMs)
|
// Check if ID exists in either pct list (containers) or qm list (VMs)
|
||||||
if (!existingIds.has(containerId)) {
|
if (!existingIds.has(containerId)) {
|
||||||
// Also verify config file doesn't exist as a double-check
|
// Also verify config file doesn't exist as a double-check (node-specific paths)
|
||||||
// Check both container and VM config paths
|
const nodeName = (server as Server).name;
|
||||||
const checkContainerCommand = `test -f "/etc/pve/lxc/${containerId}.conf" && echo "exists" || echo "not_found"`;
|
const checkContainerCommand = `test -f "/etc/pve/nodes/${nodeName}/lxc/${containerId}.conf" && echo "exists" || echo "not_found"`;
|
||||||
const checkVMCommand = `test -f "/etc/pve/qemu-server/${containerId}.conf" && echo "exists" || echo "not_found"`;
|
const checkVMCommand = `test -f "/etc/pve/nodes/${nodeName}/qemu-server/${containerId}.conf" && echo "exists" || echo "not_found"`;
|
||||||
|
|
||||||
const configExists = await new Promise<boolean>((resolve) => {
|
const configExists = await new Promise<boolean>((resolve) => {
|
||||||
let combinedOutput = '';
|
let combinedOutput = '';
|
||||||
@@ -2068,32 +2071,72 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the script's interface_port from metadata (prioritize metadata over existing database values)
|
// Resolve app slug from /usr/bin/update (community-scripts) when available; else from hostname/suffix.
|
||||||
let detectedPort = 80; // Default fallback
|
let slugFromUpdate: string | null = null;
|
||||||
|
try {
|
||||||
|
const updateCommand = `pct exec ${scriptData.container_id} -- cat /usr/bin/update 2>/dev/null`;
|
||||||
|
let updateOutput = '';
|
||||||
|
await new Promise<void>((resolve) => {
|
||||||
|
void sshExecutionService.executeCommand(
|
||||||
|
server as Server,
|
||||||
|
updateCommand,
|
||||||
|
(data: string) => { updateOutput += data; },
|
||||||
|
() => {},
|
||||||
|
() => resolve()
|
||||||
|
);
|
||||||
|
});
|
||||||
|
const ctSlugMatch = /ct\/([a-zA-Z0-9_.-]+)\.sh/.exec(updateOutput);
|
||||||
|
if (ctSlugMatch?.[1]) {
|
||||||
|
slugFromUpdate = ctSlugMatch[1].trim().toLowerCase();
|
||||||
|
console.log('🔍 Slug from /usr/bin/update:', slugFromUpdate);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Container may not be from community-scripts; use hostname fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the script's interface_port from metadata. Primary: slug from /usr/bin/update; fallback: hostname/suffix.
|
||||||
|
let detectedPort = 80; // Default fallback
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Import localScriptsService to get script metadata
|
|
||||||
const { localScriptsService } = await import('~/server/services/localScripts');
|
const { localScriptsService } = await import('~/server/services/localScripts');
|
||||||
|
|
||||||
// Get all scripts and find the one matching our script name
|
|
||||||
const allScripts = await localScriptsService.getAllScripts();
|
const allScripts = await localScriptsService.getAllScripts();
|
||||||
|
|
||||||
// Extract script slug from script_name (remove .sh extension)
|
const nameFromHostname = scriptData.script_name.replace(/\.sh$/, '').toLowerCase();
|
||||||
const scriptSlug = scriptData.script_name.replace(/\.sh$/, '');
|
|
||||||
console.log('🔍 Looking for script with slug:', scriptSlug);
|
// Primary: slug from /usr/bin/update (community-scripts)
|
||||||
|
let scriptMetadata =
|
||||||
const scriptMetadata = allScripts.find(script => script.slug === scriptSlug);
|
slugFromUpdate != null
|
||||||
|
? allScripts.find((s) => s.slug === slugFromUpdate)
|
||||||
|
: undefined;
|
||||||
|
if (scriptMetadata) {
|
||||||
|
console.log('🔍 Using slug from /usr/bin/update for metadata:', scriptMetadata.slug);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: exact hostname then hostname ends with slug (longest wins)
|
||||||
|
if (!scriptMetadata) {
|
||||||
|
scriptMetadata = allScripts.find((script) => script.slug === nameFromHostname);
|
||||||
|
if (!scriptMetadata) {
|
||||||
|
const suffixMatches = allScripts.filter((script) => nameFromHostname.endsWith(script.slug));
|
||||||
|
scriptMetadata =
|
||||||
|
suffixMatches.length > 0
|
||||||
|
? suffixMatches.reduce((a, b) => (a.slug.length >= b.slug.length ? a : b))
|
||||||
|
: undefined;
|
||||||
|
if (scriptMetadata) {
|
||||||
|
console.log('🔍 Matched metadata by slug suffix in hostname:', scriptMetadata.slug);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (scriptMetadata?.interface_port) {
|
if (scriptMetadata?.interface_port) {
|
||||||
detectedPort = scriptMetadata.interface_port;
|
detectedPort = scriptMetadata.interface_port;
|
||||||
console.log('📋 Found interface_port in metadata:', detectedPort);
|
console.log('📋 Found interface_port in metadata:', detectedPort);
|
||||||
} else {
|
} else {
|
||||||
console.log('📋 No interface_port found in metadata, using default port 80');
|
console.log('📋 No interface_port found in metadata, using default port 80');
|
||||||
detectedPort = 80; // Default to port 80 if no metadata port found
|
detectedPort = 80;
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log('⚠️ Error getting script metadata, using default port 80:', error);
|
console.log('⚠️ Error getting script metadata, using default port 80:', error);
|
||||||
detectedPort = 80; // Default to port 80 if metadata lookup fails
|
detectedPort = 80;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('🎯 Final detected port:', detectedPort);
|
console.log('🎯 Final detected port:', detectedPort);
|
||||||
@@ -2197,8 +2240,9 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read config file
|
// Read config file (node-specific path)
|
||||||
const configPath = `/etc/pve/lxc/${script.container_id}.conf`;
|
const nodeName = (server as Server).name;
|
||||||
|
const configPath = `/etc/pve/nodes/${nodeName}/lxc/${script.container_id}.conf`;
|
||||||
const readCommand = `cat "${configPath}" 2>/dev/null`;
|
const readCommand = `cat "${configPath}" 2>/dev/null`;
|
||||||
let rawConfig = '';
|
let rawConfig = '';
|
||||||
|
|
||||||
@@ -2328,8 +2372,9 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write config file using heredoc for safe escaping
|
// Write config file using heredoc for safe escaping (node-specific path)
|
||||||
const configPath = `/etc/pve/lxc/${script.container_id}.conf`;
|
const nodeName = (server as Server).name;
|
||||||
|
const configPath = `/etc/pve/nodes/${nodeName}/lxc/${script.container_id}.conf`;
|
||||||
const writeCommand = `cat > "${configPath}" << 'EOFCONFIG'
|
const writeCommand = `cat > "${configPath}" << 'EOFCONFIG'
|
||||||
${rawConfig}
|
${rawConfig}
|
||||||
EOFCONFIG`;
|
EOFCONFIG`;
|
||||||
@@ -2737,9 +2782,10 @@ EOFCONFIG`;
|
|||||||
const { getSSHExecutionService } = await import('~/server/ssh-execution-service');
|
const { getSSHExecutionService } = await import('~/server/ssh-execution-service');
|
||||||
const sshExecutionService = getSSHExecutionService();
|
const sshExecutionService = getSSHExecutionService();
|
||||||
|
|
||||||
|
const nodeName = (server as Server).name;
|
||||||
const configPath = input.containerType === 'lxc'
|
const configPath = input.containerType === 'lxc'
|
||||||
? `/etc/pve/lxc/${input.containerId}.conf`
|
? `/etc/pve/nodes/${nodeName}/lxc/${input.containerId}.conf`
|
||||||
: `/etc/pve/qemu-server/${input.containerId}.conf`;
|
: `/etc/pve/nodes/${nodeName}/qemu-server/${input.containerId}.conf`;
|
||||||
|
|
||||||
let configContent = '';
|
let configContent = '';
|
||||||
await new Promise<void>((resolve) => {
|
await new Promise<void>((resolve) => {
|
||||||
@@ -3131,10 +3177,11 @@ EOFCONFIG`;
|
|||||||
const { getSSHExecutionService } = await import('~/server/ssh-execution-service');
|
const { getSSHExecutionService } = await import('~/server/ssh-execution-service');
|
||||||
const sshExecutionService = getSSHExecutionService();
|
const sshExecutionService = getSSHExecutionService();
|
||||||
|
|
||||||
// Read config file to get hostname/name
|
// Read config file to get hostname/name (node-specific path)
|
||||||
|
const nodeName = (server as Server).name;
|
||||||
const configPath = input.containerType === 'lxc'
|
const configPath = input.containerType === 'lxc'
|
||||||
? `/etc/pve/lxc/${input.containerId}.conf`
|
? `/etc/pve/nodes/${nodeName}/lxc/${input.containerId}.conf`
|
||||||
: `/etc/pve/qemu-server/${input.containerId}.conf`;
|
: `/etc/pve/nodes/${nodeName}/qemu-server/${input.containerId}.conf`;
|
||||||
|
|
||||||
let configContent = '';
|
let configContent = '';
|
||||||
await new Promise<void>((resolve) => {
|
await new Promise<void>((resolve) => {
|
||||||
|
|||||||
@@ -7,7 +7,10 @@ import { localScriptsService } from "~/server/services/localScripts";
|
|||||||
import { scriptDownloaderService } from "~/server/services/scriptDownloader.js";
|
import { scriptDownloaderService } from "~/server/services/scriptDownloader.js";
|
||||||
import { AutoSyncService } from "~/server/services/autoSyncService";
|
import { AutoSyncService } from "~/server/services/autoSyncService";
|
||||||
import { repositoryService } from "~/server/services/repositoryService";
|
import { repositoryService } from "~/server/services/repositoryService";
|
||||||
|
import { getStorageService } from "~/server/services/storageService";
|
||||||
|
import { getDatabase } from "~/server/database-prisma";
|
||||||
import type { ScriptCard } from "~/types/script";
|
import type { ScriptCard } from "~/types/script";
|
||||||
|
import type { Server } from "~/types/server";
|
||||||
|
|
||||||
export const scriptsRouter = createTRPCRouter({
|
export const scriptsRouter = createTRPCRouter({
|
||||||
// Get all available scripts
|
// Get all available scripts
|
||||||
@@ -637,5 +640,194 @@ export const scriptsRouter = createTRPCRouter({
|
|||||||
status: null
|
status: null
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
}),
|
||||||
|
|
||||||
|
// Get rootfs storages for a server (for container creation)
|
||||||
|
getRootfsStorages: publicProcedure
|
||||||
|
.input(z.object({
|
||||||
|
serverId: z.number(),
|
||||||
|
forceRefresh: z.boolean().optional().default(false)
|
||||||
|
}))
|
||||||
|
.query(async ({ input }) => {
|
||||||
|
try {
|
||||||
|
const db = getDatabase();
|
||||||
|
const server = await db.getServerById(input.serverId);
|
||||||
|
|
||||||
|
if (!server) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Server not found',
|
||||||
|
storages: []
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get server hostname to filter storages by node assignment
|
||||||
|
const { getSSHExecutionService } = await import('~/server/ssh-execution-service');
|
||||||
|
const sshExecutionService = getSSHExecutionService();
|
||||||
|
let serverHostname = '';
|
||||||
|
try {
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
void sshExecutionService.executeCommand(
|
||||||
|
server as Server,
|
||||||
|
'hostname',
|
||||||
|
(data: string) => {
|
||||||
|
serverHostname += data;
|
||||||
|
},
|
||||||
|
(error: string) => {
|
||||||
|
reject(new Error(`Failed to get hostname: ${error}`));
|
||||||
|
},
|
||||||
|
(exitCode: number) => {
|
||||||
|
if (exitCode === 0) {
|
||||||
|
resolve();
|
||||||
|
} else {
|
||||||
|
reject(new Error(`hostname command failed with exit code ${exitCode}`));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error getting server hostname:', error);
|
||||||
|
// Continue without filtering if hostname can't be retrieved
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedHostname = serverHostname.trim().toLowerCase();
|
||||||
|
|
||||||
|
const storageService = getStorageService();
|
||||||
|
const allStorages = await storageService.getStorages(server as Server, input.forceRefresh);
|
||||||
|
|
||||||
|
// Filter storages by node hostname matching and content type (rootdir for containers)
|
||||||
|
const rootfsStorages = allStorages.filter(storage => {
|
||||||
|
// Check content type - must have rootdir for containers
|
||||||
|
const hasRootdir = storage.content.includes('rootdir');
|
||||||
|
if (!hasRootdir) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If storage has no nodes specified, it's available on all nodes
|
||||||
|
if (!storage.nodes || storage.nodes.length === 0) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we couldn't get hostname, include all storages (fallback)
|
||||||
|
if (!normalizedHostname) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if server hostname is in the nodes array (case-insensitive, trimmed)
|
||||||
|
const normalizedNodes = storage.nodes.map(node => node.trim().toLowerCase());
|
||||||
|
return normalizedNodes.includes(normalizedHostname);
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
storages: rootfsStorages.map(s => ({
|
||||||
|
name: s.name,
|
||||||
|
type: s.type,
|
||||||
|
content: s.content
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching rootfs storages:', error);
|
||||||
|
// Return empty array on error (as per plan requirement)
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Failed to fetch storages',
|
||||||
|
storages: []
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
|
||||||
|
// Get template storages for a server (for template storage selection)
|
||||||
|
getTemplateStorages: publicProcedure
|
||||||
|
.input(z.object({
|
||||||
|
serverId: z.number(),
|
||||||
|
forceRefresh: z.boolean().optional().default(false)
|
||||||
|
}))
|
||||||
|
.query(async ({ input }) => {
|
||||||
|
try {
|
||||||
|
const db = getDatabase();
|
||||||
|
const server = await db.getServerById(input.serverId);
|
||||||
|
|
||||||
|
if (!server) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Server not found',
|
||||||
|
storages: []
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get server hostname to filter storages by node assignment
|
||||||
|
const { getSSHExecutionService } = await import('~/server/ssh-execution-service');
|
||||||
|
const sshExecutionService = getSSHExecutionService();
|
||||||
|
let serverHostname = '';
|
||||||
|
try {
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
void sshExecutionService.executeCommand(
|
||||||
|
server as Server,
|
||||||
|
'hostname',
|
||||||
|
(data: string) => {
|
||||||
|
serverHostname += data;
|
||||||
|
},
|
||||||
|
(error: string) => {
|
||||||
|
reject(new Error(`Failed to get hostname: ${error}`));
|
||||||
|
},
|
||||||
|
(exitCode: number) => {
|
||||||
|
if (exitCode === 0) {
|
||||||
|
resolve();
|
||||||
|
} else {
|
||||||
|
reject(new Error(`hostname command failed with exit code ${exitCode}`));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error getting server hostname:', error);
|
||||||
|
// Continue without filtering if hostname can't be retrieved
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedHostname = serverHostname.trim().toLowerCase();
|
||||||
|
|
||||||
|
const storageService = getStorageService();
|
||||||
|
const allStorages = await storageService.getStorages(server as Server, input.forceRefresh);
|
||||||
|
|
||||||
|
// Filter storages by node hostname matching and content type (vztmpl for templates)
|
||||||
|
const templateStorages = allStorages.filter(storage => {
|
||||||
|
// Check content type - must have vztmpl for templates
|
||||||
|
const hasVztmpl = storage.content.includes('vztmpl');
|
||||||
|
if (!hasVztmpl) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If storage has no nodes specified, it's available on all nodes
|
||||||
|
if (!storage.nodes || storage.nodes.length === 0) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we couldn't get hostname, include all storages (fallback)
|
||||||
|
if (!normalizedHostname) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if server hostname is in the nodes array (case-insensitive, trimmed)
|
||||||
|
const normalizedNodes = storage.nodes.map(node => node.trim().toLowerCase());
|
||||||
|
return normalizedNodes.includes(normalizedHostname);
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
storages: templateStorages.map(s => ({
|
||||||
|
name: s.name,
|
||||||
|
type: s.type,
|
||||||
|
content: s.content
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching template storages:', error);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Failed to fetch storages',
|
||||||
|
storages: []
|
||||||
|
};
|
||||||
|
}
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -238,6 +238,27 @@ export const versionRouter = createTRPCRouter({
|
|||||||
// Clear/create the log file
|
// Clear/create the log file
|
||||||
await writeFile(logPath, '', 'utf-8');
|
await writeFile(logPath, '', 'utf-8');
|
||||||
|
|
||||||
|
// Always fetch the latest update.sh from GitHub before running
|
||||||
|
// This ensures we always use the newest update script, avoiding
|
||||||
|
// the "chicken-and-egg" problem where old scripts can't update properly
|
||||||
|
const updateScriptUrl = 'https://raw.githubusercontent.com/community-scripts/ProxmoxVE-Local/main/update.sh';
|
||||||
|
try {
|
||||||
|
const response = await fetch(updateScriptUrl);
|
||||||
|
if (response.ok) {
|
||||||
|
const latestScript = await response.text();
|
||||||
|
await writeFile(updateScriptPath, latestScript, { mode: 0o755 });
|
||||||
|
// Log that we fetched the latest script
|
||||||
|
await writeFile(logPath, '[INFO] Fetched latest update.sh from GitHub\n', { flag: 'a' });
|
||||||
|
} else {
|
||||||
|
// If fetch fails, log warning but continue with local script
|
||||||
|
await writeFile(logPath, `[WARNING] Could not fetch latest update.sh (HTTP ${response.status}), using local version\n`, { flag: 'a' });
|
||||||
|
}
|
||||||
|
} catch (fetchError) {
|
||||||
|
// If fetch fails, log warning but continue with local script
|
||||||
|
const errorMsg = fetchError instanceof Error ? fetchError.message : 'Unknown error';
|
||||||
|
await writeFile(logPath, `[WARNING] Could not fetch latest update.sh: ${errorMsg}, using local version\n`, { flag: 'a' });
|
||||||
|
}
|
||||||
|
|
||||||
// Spawn the update script as a detached process using nohup
|
// Spawn the update script as a detached process using nohup
|
||||||
// This allows it to run independently and kill the parent Node.js process
|
// This allows it to run independently and kill the parent Node.js process
|
||||||
// Redirect output to log file
|
// Redirect output to log file
|
||||||
|
|||||||
@@ -1,9 +1,22 @@
|
|||||||
import 'dotenv/config'
|
import 'dotenv/config'
|
||||||
import { PrismaClient } from '../../prisma/generated/prisma/client.ts'
|
import { PrismaClient } from '../../prisma/generated/prisma/client.ts'
|
||||||
import { PrismaBetterSqlite3 } from '@prisma/adapter-better-sqlite3'
|
import { PrismaBetterSqlite3 } from '@prisma/adapter-better-sqlite3'
|
||||||
|
import { existsSync, mkdirSync } from 'fs'
|
||||||
|
import { dirname } from 'path'
|
||||||
|
|
||||||
const globalForPrisma = globalThis;
|
const globalForPrisma = globalThis;
|
||||||
|
|
||||||
|
// Ensure database directory exists before initializing Prisma
|
||||||
|
// DATABASE_URL format: file:/path/to/database.db
|
||||||
|
const dbUrl = process.env.DATABASE_URL || 'file:./data/settings.db';
|
||||||
|
const dbPath = dbUrl.replace(/^file:/, '');
|
||||||
|
const dbDir = dirname(dbPath);
|
||||||
|
|
||||||
|
if (!existsSync(dbDir)) {
|
||||||
|
console.log(`Creating database directory: ${dbDir}`);
|
||||||
|
mkdirSync(dbDir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
const adapter = new PrismaBetterSqlite3({ url: process.env.DATABASE_URL });
|
const adapter = new PrismaBetterSqlite3({ url: process.env.DATABASE_URL });
|
||||||
|
|
||||||
export const prisma = globalForPrisma.prisma ?? new PrismaClient({ adapter });
|
export const prisma = globalForPrisma.prisma ?? new PrismaClient({ adapter });
|
||||||
|
|||||||
@@ -1,9 +1,22 @@
|
|||||||
import 'dotenv/config'
|
import 'dotenv/config'
|
||||||
import { PrismaClient } from '../../prisma/generated/prisma/client'
|
import { PrismaClient } from '../../prisma/generated/prisma/client'
|
||||||
import { PrismaBetterSqlite3 } from '@prisma/adapter-better-sqlite3'
|
import { PrismaBetterSqlite3 } from '@prisma/adapter-better-sqlite3'
|
||||||
|
import { existsSync, mkdirSync } from 'fs'
|
||||||
|
import { dirname } from 'path'
|
||||||
|
|
||||||
const globalForPrisma = globalThis as { prisma?: PrismaClient };
|
const globalForPrisma = globalThis as { prisma?: PrismaClient };
|
||||||
|
|
||||||
|
// Ensure database directory exists before initializing Prisma
|
||||||
|
// DATABASE_URL format: file:/path/to/database.db
|
||||||
|
const dbUrl = process.env.DATABASE_URL || 'file:./data/settings.db';
|
||||||
|
const dbPath = dbUrl.replace(/^file:/, '');
|
||||||
|
const dbDir = dirname(dbPath);
|
||||||
|
|
||||||
|
if (!existsSync(dbDir)) {
|
||||||
|
console.log(`Creating database directory: ${dbDir}`);
|
||||||
|
mkdirSync(dbDir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
const adapter = new PrismaBetterSqlite3({ url: process.env.DATABASE_URL! });
|
const adapter = new PrismaBetterSqlite3({ url: process.env.DATABASE_URL! });
|
||||||
|
|
||||||
export const prisma: PrismaClient = globalForPrisma.prisma ?? new PrismaClient({
|
export const prisma: PrismaClient = globalForPrisma.prisma ?? new PrismaClient({
|
||||||
|
|||||||
55
src/server/lib/gitProvider/bitbucket.ts
Normal file
55
src/server/lib/gitProvider/bitbucket.ts
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import type { DirEntry, GitProvider } from './types';
|
||||||
|
import { parseRepoUrl } from '../repositoryUrlValidation';
|
||||||
|
|
||||||
|
export class BitbucketProvider implements GitProvider {
|
||||||
|
async listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
||||||
|
const { owner, repo } = parseRepoUrl(repoUrl);
|
||||||
|
const listUrl = `https://api.bitbucket.org/2.0/repositories/${owner}/${repo}/src/${encodeURIComponent(branch)}/${path}`;
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
const token = process.env.BITBUCKET_APP_PASSWORD ?? process.env.BITBUCKET_TOKEN;
|
||||||
|
if (token) {
|
||||||
|
const auth = Buffer.from(`:${token}`).toString('base64');
|
||||||
|
headers.Authorization = `Basic ${auth}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(listUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Bitbucket API error: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = (await response.json()) as { values?: { path: string; type: string }[] };
|
||||||
|
const data = body.values ?? (Array.isArray(body) ? body : []);
|
||||||
|
if (!Array.isArray(data)) {
|
||||||
|
throw new Error('Bitbucket API returned unexpected response');
|
||||||
|
}
|
||||||
|
return data.map((item: { path: string; type: string }) => {
|
||||||
|
const name = item.path.split('/').pop() ?? item.path;
|
||||||
|
return {
|
||||||
|
name,
|
||||||
|
path: item.path,
|
||||||
|
type: item.type === 'commit_directory' ? ('dir' as const) : ('file' as const),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
||||||
|
const { owner, repo } = parseRepoUrl(repoUrl);
|
||||||
|
const rawUrl = `https://api.bitbucket.org/2.0/repositories/${owner}/${repo}/src/${encodeURIComponent(branch)}/${filePath}`;
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
const token = process.env.BITBUCKET_APP_PASSWORD ?? process.env.BITBUCKET_TOKEN;
|
||||||
|
if (token) {
|
||||||
|
const auth = Buffer.from(`:${token}`).toString('base64');
|
||||||
|
headers.Authorization = `Basic ${auth}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(rawUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
return response.text();
|
||||||
|
}
|
||||||
|
}
|
||||||
44
src/server/lib/gitProvider/custom.ts
Normal file
44
src/server/lib/gitProvider/custom.ts
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import type { DirEntry, GitProvider } from "./types";
|
||||||
|
import { parseRepoUrl } from "../repositoryUrlValidation";
|
||||||
|
|
||||||
|
export class CustomProvider implements GitProvider {
|
||||||
|
async listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
||||||
|
const { origin, owner, repo } = parseRepoUrl(repoUrl);
|
||||||
|
const apiUrl = `${origin}/api/v1/repos/${owner}/${repo}/contents/${path}?ref=${encodeURIComponent(branch)}`;
|
||||||
|
const headers: Record<string, string> = { "User-Agent": "PVEScripts-Local/1.0" };
|
||||||
|
const token = process.env.GITEA_TOKEN ?? process.env.GIT_TOKEN;
|
||||||
|
if (token) headers.Authorization = `token ${token}`;
|
||||||
|
|
||||||
|
const response = await fetch(apiUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Custom Git server: list directory failed (${response.status}).`);
|
||||||
|
}
|
||||||
|
const data = (await response.json()) as { type: string; name: string; path: string }[];
|
||||||
|
if (!Array.isArray(data)) {
|
||||||
|
const single = data as unknown as { type?: string; name?: string; path?: string };
|
||||||
|
if (single?.name) {
|
||||||
|
return [{ name: single.name, path: single.path ?? path, type: single.type === "dir" ? "dir" : "file" }];
|
||||||
|
}
|
||||||
|
throw new Error("Custom Git server returned unexpected response");
|
||||||
|
}
|
||||||
|
return data.map((item) => ({
|
||||||
|
name: item.name,
|
||||||
|
path: item.path,
|
||||||
|
type: item.type === "dir" ? ("dir" as const) : ("file" as const),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
async downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
||||||
|
const { origin, owner, repo } = parseRepoUrl(repoUrl);
|
||||||
|
const rawUrl = `${origin}/${owner}/${repo}/raw/${encodeURIComponent(branch)}/${filePath}`;
|
||||||
|
const headers: Record<string, string> = { "User-Agent": "PVEScripts-Local/1.0" };
|
||||||
|
const token = process.env.GITEA_TOKEN ?? process.env.GIT_TOKEN;
|
||||||
|
if (token) headers.Authorization = `token ${token}`;
|
||||||
|
|
||||||
|
const response = await fetch(rawUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to download ${filePath} from custom Git server (${response.status}).`);
|
||||||
|
}
|
||||||
|
return response.text();
|
||||||
|
}
|
||||||
|
}
|
||||||
60
src/server/lib/gitProvider/github.ts
Normal file
60
src/server/lib/gitProvider/github.ts
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import type { DirEntry, GitProvider } from './types';
|
||||||
|
import { parseRepoUrl } from '../repositoryUrlValidation';
|
||||||
|
|
||||||
|
export class GitHubProvider implements GitProvider {
|
||||||
|
async listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
||||||
|
const { owner, repo } = parseRepoUrl(repoUrl);
|
||||||
|
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/contents/${path}?ref=${encodeURIComponent(branch)}`;
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
Accept: 'application/vnd.github.v3+json',
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
const token = process.env.GITHUB_TOKEN;
|
||||||
|
if (token) headers.Authorization = `token ${token}`;
|
||||||
|
|
||||||
|
const response = await fetch(apiUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
if (response.status === 403) {
|
||||||
|
const err = new Error(
|
||||||
|
`GitHub API rate limit exceeded. Consider setting GITHUB_TOKEN. Status: ${response.status} ${response.statusText}`
|
||||||
|
);
|
||||||
|
(err as Error & { name: string }).name = 'RateLimitError';
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await response.json()) as { type: string; name: string; path: string }[];
|
||||||
|
if (!Array.isArray(data)) {
|
||||||
|
throw new Error('GitHub API returned unexpected response');
|
||||||
|
}
|
||||||
|
return data.map((item) => ({
|
||||||
|
name: item.name,
|
||||||
|
path: item.path,
|
||||||
|
type: item.type === 'dir' ? ('dir' as const) : ('file' as const),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
async downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
||||||
|
const { owner, repo } = parseRepoUrl(repoUrl);
|
||||||
|
const rawUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${encodeURIComponent(branch)}/${filePath}`;
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
const token = process.env.GITHUB_TOKEN;
|
||||||
|
if (token) headers.Authorization = `token ${token}`;
|
||||||
|
|
||||||
|
const response = await fetch(rawUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
if (response.status === 403) {
|
||||||
|
const err = new Error(
|
||||||
|
`GitHub rate limit exceeded while downloading ${filePath}. Consider setting GITHUB_TOKEN.`
|
||||||
|
);
|
||||||
|
(err as Error & { name: string }).name = 'RateLimitError';
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
return response.text();
|
||||||
|
}
|
||||||
|
}
|
||||||
58
src/server/lib/gitProvider/gitlab.ts
Normal file
58
src/server/lib/gitProvider/gitlab.ts
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
import type { DirEntry, GitProvider } from './types';
|
||||||
|
import { parseRepoUrl } from '../repositoryUrlValidation';
|
||||||
|
|
||||||
|
export class GitLabProvider implements GitProvider {
|
||||||
|
private getBaseUrl(repoUrl: string): string {
|
||||||
|
const { origin } = parseRepoUrl(repoUrl);
|
||||||
|
return origin;
|
||||||
|
}
|
||||||
|
|
||||||
|
private getProjectId(repoUrl: string): string {
|
||||||
|
const { owner, repo } = parseRepoUrl(repoUrl);
|
||||||
|
return encodeURIComponent(`${owner}/${repo}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
||||||
|
const baseUrl = this.getBaseUrl(repoUrl);
|
||||||
|
const projectId = this.getProjectId(repoUrl);
|
||||||
|
const apiUrl = `${baseUrl}/api/v4/projects/${projectId}/repository/tree?path=${encodeURIComponent(path)}&ref=${encodeURIComponent(branch)}&per_page=100`;
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
const token = process.env.GITLAB_TOKEN;
|
||||||
|
if (token) headers['PRIVATE-TOKEN'] = token;
|
||||||
|
|
||||||
|
const response = await fetch(apiUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`GitLab API error: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await response.json()) as { type: string; name: string; path: string }[];
|
||||||
|
if (!Array.isArray(data)) {
|
||||||
|
throw new Error('GitLab API returned unexpected response');
|
||||||
|
}
|
||||||
|
return data.map((item) => ({
|
||||||
|
name: item.name,
|
||||||
|
path: item.path,
|
||||||
|
type: item.type === 'tree' ? ('dir' as const) : ('file' as const),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
async downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
||||||
|
const baseUrl = this.getBaseUrl(repoUrl);
|
||||||
|
const projectId = this.getProjectId(repoUrl);
|
||||||
|
const encodedPath = encodeURIComponent(filePath);
|
||||||
|
const rawUrl = `${baseUrl}/api/v4/projects/${projectId}/repository/files/${encodedPath}/raw?ref=${encodeURIComponent(branch)}`;
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
const token = process.env.GITLAB_TOKEN;
|
||||||
|
if (token) headers['PRIVATE-TOKEN'] = token;
|
||||||
|
|
||||||
|
const response = await fetch(rawUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
return response.text();
|
||||||
|
}
|
||||||
|
}
|
||||||
1
src/server/lib/gitProvider/index.js
Normal file
1
src/server/lib/gitProvider/index.js
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export { listDirectory, downloadRawFile, getRepoProvider } from "./index.ts";
|
||||||
28
src/server/lib/gitProvider/index.ts
Normal file
28
src/server/lib/gitProvider/index.ts
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
import type { DirEntry, GitProvider } from "./types";
|
||||||
|
import { getRepoProvider } from "../repositoryUrlValidation";
|
||||||
|
import { GitHubProvider } from "./github";
|
||||||
|
import { GitLabProvider } from "./gitlab";
|
||||||
|
import { BitbucketProvider } from "./bitbucket";
|
||||||
|
import { CustomProvider } from "./custom";
|
||||||
|
|
||||||
|
const providers: Record<string, GitProvider> = {
|
||||||
|
github: new GitHubProvider(),
|
||||||
|
gitlab: new GitLabProvider(),
|
||||||
|
bitbucket: new BitbucketProvider(),
|
||||||
|
custom: new CustomProvider(),
|
||||||
|
};
|
||||||
|
|
||||||
|
export type { DirEntry, GitProvider };
|
||||||
|
export { getRepoProvider };
|
||||||
|
|
||||||
|
export function getGitProvider(repoUrl: string): GitProvider {
|
||||||
|
return providers[getRepoProvider(repoUrl)]!;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
||||||
|
return getGitProvider(repoUrl).listDirectory(repoUrl, path, branch);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
||||||
|
return getGitProvider(repoUrl).downloadRawFile(repoUrl, filePath, branch);
|
||||||
|
}
|
||||||
14
src/server/lib/gitProvider/types.ts
Normal file
14
src/server/lib/gitProvider/types.ts
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
/**
|
||||||
|
* Git provider interface for listing and downloading repository files.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export type DirEntry = {
|
||||||
|
name: string;
|
||||||
|
path: string;
|
||||||
|
type: 'file' | 'dir';
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface GitProvider {
|
||||||
|
listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]>;
|
||||||
|
downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string>;
|
||||||
|
}
|
||||||
37
src/server/lib/repositoryUrlValidation.js
Normal file
37
src/server/lib/repositoryUrlValidation.js
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
/**
|
||||||
|
* Repository URL validation (JS mirror for server.js).
|
||||||
|
*/
|
||||||
|
const VALID_REPO_URL =
|
||||||
|
/^(https?:\/\/)(github\.com|gitlab\.com|bitbucket\.org|[^/]+)\/[^/]+\/[^/]+$/;
|
||||||
|
|
||||||
|
export const REPO_URL_ERROR_MESSAGE =
|
||||||
|
'Invalid repository URL. Supported: GitHub, GitLab, Bitbucket, and custom Git servers (e.g. https://host/owner/repo).';
|
||||||
|
|
||||||
|
export function isValidRepositoryUrl(url) {
|
||||||
|
if (typeof url !== 'string' || !url.trim()) return false;
|
||||||
|
return VALID_REPO_URL.test(url.trim());
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getRepoProvider(url) {
|
||||||
|
if (!isValidRepositoryUrl(url)) throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
|
const normalized = url.trim().toLowerCase();
|
||||||
|
if (normalized.includes('github.com')) return 'github';
|
||||||
|
if (normalized.includes('gitlab.com')) return 'gitlab';
|
||||||
|
if (normalized.includes('bitbucket.org')) return 'bitbucket';
|
||||||
|
return 'custom';
|
||||||
|
}
|
||||||
|
|
||||||
|
export function parseRepoUrl(url) {
|
||||||
|
if (!isValidRepositoryUrl(url)) throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
|
try {
|
||||||
|
const u = new URL(url.trim());
|
||||||
|
const pathParts = u.pathname.replace(/^\/+/, '').replace(/\.git\/?$/, '').split('/');
|
||||||
|
return {
|
||||||
|
origin: u.origin,
|
||||||
|
owner: pathParts[0] ?? '',
|
||||||
|
repo: pathParts[1] ?? '',
|
||||||
|
};
|
||||||
|
} catch {
|
||||||
|
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
|
}
|
||||||
|
}
|
||||||
57
src/server/lib/repositoryUrlValidation.ts
Normal file
57
src/server/lib/repositoryUrlValidation.ts
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
/**
|
||||||
|
* Repository URL validation and provider detection.
|
||||||
|
* Supports GitHub, GitLab, Bitbucket, and custom Git servers.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const VALID_REPO_URL =
|
||||||
|
/^(https?:\/\/)(github\.com|gitlab\.com|bitbucket\.org|[^/]+)\/[^/]+\/[^/]+$/;
|
||||||
|
|
||||||
|
export const REPO_URL_ERROR_MESSAGE =
|
||||||
|
'Invalid repository URL. Supported: GitHub, GitLab, Bitbucket, and custom Git servers (e.g. https://host/owner/repo).';
|
||||||
|
|
||||||
|
export type RepoProvider = 'github' | 'gitlab' | 'bitbucket' | 'custom';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a string is a valid repository URL (format only).
|
||||||
|
*/
|
||||||
|
export function isValidRepositoryUrl(url: string): boolean {
|
||||||
|
if (typeof url !== 'string' || !url.trim()) return false;
|
||||||
|
return VALID_REPO_URL.test(url.trim());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detect the Git provider from a repository URL.
|
||||||
|
*/
|
||||||
|
export function getRepoProvider(url: string): RepoProvider {
|
||||||
|
if (!isValidRepositoryUrl(url)) {
|
||||||
|
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
|
}
|
||||||
|
const normalized = url.trim().toLowerCase();
|
||||||
|
if (normalized.includes('github.com')) return 'github';
|
||||||
|
if (normalized.includes('gitlab.com')) return 'gitlab';
|
||||||
|
if (normalized.includes('bitbucket.org')) return 'bitbucket';
|
||||||
|
return 'custom';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse owner and repo from a repository URL (path segments).
|
||||||
|
* Works for GitHub, GitLab, Bitbucket, and custom (host/owner/repo).
|
||||||
|
*/
|
||||||
|
export function parseRepoUrl(url: string): { origin: string; owner: string; repo: string } {
|
||||||
|
if (!isValidRepositoryUrl(url)) {
|
||||||
|
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const u = new URL(url.trim());
|
||||||
|
const pathParts = u.pathname.replace(/^\/+/, '').replace(/\.git\/?$/, '').split('/');
|
||||||
|
const owner = pathParts[0] ?? '';
|
||||||
|
const repo = pathParts[1] ?? '';
|
||||||
|
return {
|
||||||
|
origin: u.origin,
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
};
|
||||||
|
} catch {
|
||||||
|
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -327,13 +327,16 @@ class BackupService {
|
|||||||
// PBS supports PBS_PASSWORD and PBS_REPOSITORY environment variables for non-interactive login
|
// PBS supports PBS_PASSWORD and PBS_REPOSITORY environment variables for non-interactive login
|
||||||
const repository = `root@pam@${pbsIp}:${pbsDatastore}`;
|
const repository = `root@pam@${pbsIp}:${pbsDatastore}`;
|
||||||
|
|
||||||
// Escape password for shell safety (single quotes)
|
// Escape password and fingerprint for shell safety (single quotes)
|
||||||
const escapedPassword = credential.pbs_password.replace(/'/g, "'\\''");
|
const escapedPassword = credential.pbs_password.replace(/'/g, "'\\''");
|
||||||
|
const fingerprint = credential.pbs_fingerprint?.trim() ?? '';
|
||||||
// Use PBS_PASSWORD environment variable for non-interactive authentication
|
const escapedFingerprint = fingerprint ? fingerprint.replace(/'/g, "'\\''") : '';
|
||||||
// Auto-accept fingerprint by piping "y" to stdin
|
const envParts = [`PBS_PASSWORD='${escapedPassword}'`, `PBS_REPOSITORY='${repository}'`];
|
||||||
// PBS will use PBS_PASSWORD env var if available, avoiding interactive prompt
|
if (escapedFingerprint) {
|
||||||
const fullCommand = `echo "y" | PBS_PASSWORD='${escapedPassword}' PBS_REPOSITORY='${repository}' timeout 10 proxmox-backup-client login --repository ${repository} 2>&1`;
|
envParts.push(`PBS_FINGERPRINT='${escapedFingerprint}'`);
|
||||||
|
}
|
||||||
|
const envStr = envParts.join(' ');
|
||||||
|
const fullCommand = `${envStr} timeout 10 proxmox-backup-client login --repository ${repository} 2>&1`;
|
||||||
|
|
||||||
console.log(`[BackupService] Logging into PBS: ${repository}`);
|
console.log(`[BackupService] Logging into PBS: ${repository}`);
|
||||||
|
|
||||||
@@ -419,9 +422,12 @@ class BackupService {
|
|||||||
|
|
||||||
// Build full repository string: root@pam@<IP>:<DATASTORE>
|
// Build full repository string: root@pam@<IP>:<DATASTORE>
|
||||||
const repository = `root@pam@${pbsIp}:${pbsDatastore}`;
|
const repository = `root@pam@${pbsIp}:${pbsDatastore}`;
|
||||||
|
const fingerprint = credential.pbs_fingerprint?.trim() ?? '';
|
||||||
|
const escapedFingerprint = fingerprint ? fingerprint.replace(/'/g, "'\\''") : '';
|
||||||
|
const snapshotEnvParts = escapedFingerprint ? [`PBS_FINGERPRINT='${escapedFingerprint}'`] : [];
|
||||||
|
const snapshotEnvStr = snapshotEnvParts.length ? snapshotEnvParts.join(' ') + ' ' : '';
|
||||||
// Use correct command: snapshot list ct/<CT_ID> --repository <full_repo_string>
|
// Use correct command: snapshot list ct/<CT_ID> --repository <full_repo_string>
|
||||||
const command = `timeout 30 proxmox-backup-client snapshot list ct/${ctId} --repository ${repository} 2>&1 || echo "PBS_ERROR"`;
|
const command = `${snapshotEnvStr}timeout 30 proxmox-backup-client snapshot list ct/${ctId} --repository ${repository} 2>&1 || echo "PBS_ERROR"`;
|
||||||
let output = '';
|
let output = '';
|
||||||
|
|
||||||
console.log(`[BackupService] Discovering PBS backups for CT ${ctId} on repository ${repository}`);
|
console.log(`[BackupService] Discovering PBS backups for CT ${ctId} on repository ${repository}`);
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
// JavaScript wrapper for githubJsonService (for use with node server.js)
|
// JavaScript wrapper for githubJsonService (for use with node server.js)
|
||||||
import { writeFile, mkdir, readdir, readFile } from 'fs/promises';
|
import { writeFile, mkdir, readdir, readFile, unlink } from 'fs/promises';
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
import { repositoryService } from './repositoryService.js';
|
import { repositoryService } from './repositoryService.js';
|
||||||
|
import { listDirectory, downloadRawFile } from '../lib/gitProvider/index.js';
|
||||||
|
|
||||||
// Get environment variables
|
// Get environment variables
|
||||||
const getEnv = () => ({
|
const getEnv = () => ({
|
||||||
@@ -28,76 +29,9 @@ class GitHubJsonService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
getBaseUrl(repoUrl) {
|
|
||||||
const urlMatch = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
|
||||||
if (!urlMatch) {
|
|
||||||
throw new Error(`Invalid GitHub repository URL: ${repoUrl}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const [, owner, repo] = urlMatch;
|
|
||||||
return `https://api.github.com/repos/${owner}/${repo}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
extractRepoPath(repoUrl) {
|
|
||||||
const match = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
|
||||||
if (!match) {
|
|
||||||
throw new Error('Invalid GitHub repository URL');
|
|
||||||
}
|
|
||||||
return `${match[1]}/${match[2]}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
async fetchFromGitHub(repoUrl, endpoint) {
|
|
||||||
const baseUrl = this.getBaseUrl(repoUrl);
|
|
||||||
const env = getEnv();
|
|
||||||
|
|
||||||
const headers = {
|
|
||||||
'Accept': 'application/vnd.github.v3+json',
|
|
||||||
'User-Agent': 'PVEScripts-Local/1.0',
|
|
||||||
};
|
|
||||||
|
|
||||||
if (env.GITHUB_TOKEN) {
|
|
||||||
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(`${baseUrl}${endpoint}`, { headers });
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
if (response.status === 403) {
|
|
||||||
const error = new Error(`GitHub API rate limit exceeded. Consider setting GITHUB_TOKEN for higher limits. Status: ${response.status} ${response.statusText}`);
|
|
||||||
error.name = 'RateLimitError';
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return response.json();
|
|
||||||
}
|
|
||||||
|
|
||||||
async downloadJsonFile(repoUrl, filePath) {
|
async downloadJsonFile(repoUrl, filePath) {
|
||||||
this.initializeConfig();
|
this.initializeConfig();
|
||||||
const repoPath = this.extractRepoPath(repoUrl);
|
const content = await downloadRawFile(repoUrl, filePath, this.branch);
|
||||||
const rawUrl = `https://raw.githubusercontent.com/${repoPath}/${this.branch}/${filePath}`;
|
|
||||||
const env = getEnv();
|
|
||||||
|
|
||||||
const headers = {
|
|
||||||
'User-Agent': 'PVEScripts-Local/1.0',
|
|
||||||
};
|
|
||||||
|
|
||||||
if (env.GITHUB_TOKEN) {
|
|
||||||
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(rawUrl, { headers });
|
|
||||||
if (!response.ok) {
|
|
||||||
if (response.status === 403) {
|
|
||||||
const error = new Error(`GitHub rate limit exceeded while downloading ${filePath}. Consider setting GITHUB_TOKEN for higher limits.`);
|
|
||||||
error.name = 'RateLimitError';
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const content = await response.text();
|
|
||||||
const script = JSON.parse(content);
|
const script = JSON.parse(content);
|
||||||
script.repository_url = repoUrl;
|
script.repository_url = repoUrl;
|
||||||
return script;
|
return script;
|
||||||
@@ -105,16 +39,13 @@ class GitHubJsonService {
|
|||||||
|
|
||||||
async getJsonFiles(repoUrl) {
|
async getJsonFiles(repoUrl) {
|
||||||
this.initializeConfig();
|
this.initializeConfig();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const files = await this.fetchFromGitHub(
|
const entries = await listDirectory(repoUrl, this.jsonFolder, this.branch);
|
||||||
repoUrl,
|
return entries
|
||||||
`/contents/${this.jsonFolder}?ref=${this.branch}`
|
.filter((e) => e.type === 'file' && e.name.endsWith('.json'))
|
||||||
);
|
.map((e) => ({ name: e.name, path: e.path }));
|
||||||
|
|
||||||
return files.filter(file => file.name.endsWith('.json'));
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error fetching JSON files from GitHub (${repoUrl}):`, error);
|
console.error(`Error fetching JSON files from repository (${repoUrl}):`, error);
|
||||||
throw new Error(`Failed to fetch script files from repository: ${repoUrl}`);
|
throw new Error(`Failed to fetch script files from repository: ${repoUrl}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -232,25 +163,42 @@ class GitHubJsonService {
|
|||||||
const localFiles = await this.getLocalJsonFiles();
|
const localFiles = await this.getLocalJsonFiles();
|
||||||
console.log(`Found ${localFiles.length} local JSON files`);
|
console.log(`Found ${localFiles.length} local JSON files`);
|
||||||
|
|
||||||
|
// Delete local JSON files that belong to this repo but are no longer in the remote
|
||||||
|
const remoteFilenames = new Set(githubFiles.map((f) => f.name));
|
||||||
|
const deletedFiles = await this.deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames);
|
||||||
|
if (deletedFiles.length > 0) {
|
||||||
|
console.log(`Removed ${deletedFiles.length} obsolete JSON file(s) no longer in ${repoUrl}`);
|
||||||
|
}
|
||||||
|
|
||||||
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
|
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
|
||||||
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
|
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
|
||||||
|
|
||||||
if (filesToSync.length === 0) {
|
if (filesToSync.length === 0) {
|
||||||
|
const msg =
|
||||||
|
deletedFiles.length > 0
|
||||||
|
? `All JSON files are up to date for repository: ${repoUrl}. Removed ${deletedFiles.length} obsolete file(s).`
|
||||||
|
: `All JSON files are up to date for repository: ${repoUrl}`;
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: `All JSON files are up to date for repository: ${repoUrl}`,
|
message: msg,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: []
|
syncedFiles: [],
|
||||||
|
deletedFiles
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
|
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
|
||||||
|
|
||||||
|
const msg =
|
||||||
|
deletedFiles.length > 0
|
||||||
|
? `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}, removed ${deletedFiles.length} obsolete file(s).`
|
||||||
|
: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`;
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`,
|
message: msg,
|
||||||
count: syncedFiles.length,
|
count: syncedFiles.length,
|
||||||
syncedFiles
|
syncedFiles,
|
||||||
|
deletedFiles
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`JSON sync failed for ${repoUrl}:`, error);
|
console.error(`JSON sync failed for ${repoUrl}:`, error);
|
||||||
@@ -258,7 +206,8 @@ class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: []
|
syncedFiles: [],
|
||||||
|
deletedFiles: []
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -274,13 +223,15 @@ class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: 'No enabled repositories found',
|
message: 'No enabled repositories found',
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: []
|
syncedFiles: [],
|
||||||
|
deletedFiles: []
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Found ${enabledRepos.length} enabled repositories`);
|
console.log(`Found ${enabledRepos.length} enabled repositories`);
|
||||||
|
|
||||||
const allSyncedFiles = [];
|
const allSyncedFiles = [];
|
||||||
|
const allDeletedFiles = [];
|
||||||
const processedSlugs = new Set();
|
const processedSlugs = new Set();
|
||||||
let totalSynced = 0;
|
let totalSynced = 0;
|
||||||
|
|
||||||
@@ -291,6 +242,7 @@ class GitHubJsonService {
|
|||||||
const result = await this.syncJsonFilesForRepo(repo.url);
|
const result = await this.syncJsonFilesForRepo(repo.url);
|
||||||
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
|
allDeletedFiles.push(...(result.deletedFiles ?? []));
|
||||||
const newFiles = result.syncedFiles.filter(file => {
|
const newFiles = result.syncedFiles.filter(file => {
|
||||||
const slug = file.replace('.json', '');
|
const slug = file.replace('.json', '');
|
||||||
if (processedSlugs.has(slug)) {
|
if (processedSlugs.has(slug)) {
|
||||||
@@ -312,11 +264,16 @@ class GitHubJsonService {
|
|||||||
|
|
||||||
await this.updateExistingFilesWithRepositoryUrl();
|
await this.updateExistingFilesWithRepositoryUrl();
|
||||||
|
|
||||||
|
const msg =
|
||||||
|
allDeletedFiles.length > 0
|
||||||
|
? `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories, removed ${allDeletedFiles.length} obsolete file(s).`
|
||||||
|
: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`;
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`,
|
message: msg,
|
||||||
count: totalSynced,
|
count: totalSynced,
|
||||||
syncedFiles: allSyncedFiles
|
syncedFiles: allSyncedFiles,
|
||||||
|
deletedFiles: allDeletedFiles
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Multi-repository JSON sync failed:', error);
|
console.error('Multi-repository JSON sync failed:', error);
|
||||||
@@ -324,7 +281,8 @@ class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: []
|
syncedFiles: [],
|
||||||
|
deletedFiles: []
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -366,6 +324,32 @@ class GitHubJsonService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames) {
|
||||||
|
this.initializeConfig();
|
||||||
|
const localFiles = await this.getLocalJsonFiles();
|
||||||
|
const deletedFiles = [];
|
||||||
|
|
||||||
|
for (const file of localFiles) {
|
||||||
|
try {
|
||||||
|
const filePath = join(this.localJsonDirectory, file);
|
||||||
|
const content = await readFile(filePath, 'utf-8');
|
||||||
|
const script = JSON.parse(content);
|
||||||
|
|
||||||
|
if (script.repository_url === repoUrl && !remoteFilenames.has(file)) {
|
||||||
|
await unlink(filePath);
|
||||||
|
const slug = file.replace(/\.json$/, '');
|
||||||
|
this.scriptCache.delete(slug);
|
||||||
|
deletedFiles.push(file);
|
||||||
|
console.log(`Removed obsolete script JSON: ${file} (no longer in ${repoUrl})`);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// If we can't read or parse the file, skip (do not delete)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return deletedFiles;
|
||||||
|
}
|
||||||
|
|
||||||
async findFilesToSyncForRepo(repoUrl, githubFiles, localFiles) {
|
async findFilesToSyncForRepo(repoUrl, githubFiles, localFiles) {
|
||||||
const filesToSync = [];
|
const filesToSync = [];
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import { writeFile, mkdir, readdir, readFile } from 'fs/promises';
|
import { writeFile, mkdir, readdir, readFile, unlink } from 'fs/promises';
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
import { env } from '../../env.js';
|
import { env } from '../../env.js';
|
||||||
import type { Script, ScriptCard, GitHubFile } from '../../types/script';
|
import type { Script, ScriptCard, GitHubFile } from '../../types/script';
|
||||||
import { repositoryService } from './repositoryService';
|
import { repositoryService } from './repositoryService';
|
||||||
|
import { listDirectory, downloadRawFile } from '~/server/lib/gitProvider';
|
||||||
|
|
||||||
export class GitHubJsonService {
|
export class GitHubJsonService {
|
||||||
private branch: string | null = null;
|
private branch: string | null = null;
|
||||||
@@ -22,96 +23,24 @@ export class GitHubJsonService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private getBaseUrl(repoUrl: string): string {
|
|
||||||
const urlMatch = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
|
||||||
if (!urlMatch) {
|
|
||||||
throw new Error(`Invalid GitHub repository URL: ${repoUrl}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const [, owner, repo] = urlMatch;
|
|
||||||
return `https://api.github.com/repos/${owner}/${repo}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
private extractRepoPath(repoUrl: string): string {
|
|
||||||
const match = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
|
||||||
if (!match) {
|
|
||||||
throw new Error('Invalid GitHub repository URL');
|
|
||||||
}
|
|
||||||
return `${match[1]}/${match[2]}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async fetchFromGitHub<T>(repoUrl: string, endpoint: string): Promise<T> {
|
|
||||||
const baseUrl = this.getBaseUrl(repoUrl);
|
|
||||||
|
|
||||||
const headers: HeadersInit = {
|
|
||||||
'Accept': 'application/vnd.github.v3+json',
|
|
||||||
'User-Agent': 'PVEScripts-Local/1.0',
|
|
||||||
};
|
|
||||||
|
|
||||||
// Add GitHub token authentication if available
|
|
||||||
if (env.GITHUB_TOKEN) {
|
|
||||||
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(`${baseUrl}${endpoint}`, { headers });
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
if (response.status === 403) {
|
|
||||||
const error = new Error(`GitHub API rate limit exceeded. Consider setting GITHUB_TOKEN for higher limits. Status: ${response.status} ${response.statusText}`);
|
|
||||||
error.name = 'RateLimitError';
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json();
|
|
||||||
return data as T;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async downloadJsonFile(repoUrl: string, filePath: string): Promise<Script> {
|
private async downloadJsonFile(repoUrl: string, filePath: string): Promise<Script> {
|
||||||
this.initializeConfig();
|
this.initializeConfig();
|
||||||
const repoPath = this.extractRepoPath(repoUrl);
|
const content = await downloadRawFile(repoUrl, filePath, this.branch!);
|
||||||
const rawUrl = `https://raw.githubusercontent.com/${repoPath}/${this.branch!}/${filePath}`;
|
|
||||||
|
|
||||||
const headers: HeadersInit = {
|
|
||||||
'User-Agent': 'PVEScripts-Local/1.0',
|
|
||||||
};
|
|
||||||
|
|
||||||
// Add GitHub token authentication if available
|
|
||||||
if (env.GITHUB_TOKEN) {
|
|
||||||
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(rawUrl, { headers });
|
|
||||||
if (!response.ok) {
|
|
||||||
if (response.status === 403) {
|
|
||||||
const error = new Error(`GitHub rate limit exceeded while downloading ${filePath}. Consider setting GITHUB_TOKEN for higher limits. Status: ${response.status} ${response.statusText}`);
|
|
||||||
error.name = 'RateLimitError';
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const content = await response.text();
|
|
||||||
const script = JSON.parse(content) as Script;
|
const script = JSON.parse(content) as Script;
|
||||||
// Add repository_url to script
|
|
||||||
script.repository_url = repoUrl;
|
script.repository_url = repoUrl;
|
||||||
return script;
|
return script;
|
||||||
}
|
}
|
||||||
|
|
||||||
async getJsonFiles(repoUrl: string): Promise<GitHubFile[]> {
|
async getJsonFiles(repoUrl: string): Promise<GitHubFile[]> {
|
||||||
this.initializeConfig();
|
this.initializeConfig();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const files = await this.fetchFromGitHub<GitHubFile[]>(
|
const entries = await listDirectory(repoUrl, this.jsonFolder!, this.branch!);
|
||||||
repoUrl,
|
const files: GitHubFile[] = entries
|
||||||
`/contents/${this.jsonFolder!}?ref=${this.branch!}`
|
.filter((e) => e.type === 'file' && e.name.endsWith('.json'))
|
||||||
);
|
.map((e) => ({ name: e.name, path: e.path } as GitHubFile));
|
||||||
|
return files;
|
||||||
// Filter for JSON files only
|
|
||||||
return files.filter(file => file.name.endsWith('.json'));
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error fetching JSON files from GitHub (${repoUrl}):`, error);
|
console.error(`Error fetching JSON files from repository (${repoUrl}):`, error);
|
||||||
throw new Error(`Failed to fetch script files from repository: ${repoUrl}`);
|
throw new Error(`Failed to fetch script files from repository: ${repoUrl}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -229,12 +158,11 @@ export class GitHubJsonService {
|
|||||||
/**
|
/**
|
||||||
* Sync JSON files from a specific repository
|
* Sync JSON files from a specific repository
|
||||||
*/
|
*/
|
||||||
async syncJsonFilesForRepo(repoUrl: string): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[] }> {
|
async syncJsonFilesForRepo(repoUrl: string): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[]; deletedFiles: string[] }> {
|
||||||
try {
|
try {
|
||||||
console.log(`Starting JSON sync from repository: ${repoUrl}`);
|
console.log(`Starting JSON sync from repository: ${repoUrl}`);
|
||||||
|
|
||||||
// Get file list from GitHub
|
console.log(`Fetching file list from repository (${repoUrl})...`);
|
||||||
console.log(`Fetching file list from GitHub (${repoUrl})...`);
|
|
||||||
const githubFiles = await this.getJsonFiles(repoUrl);
|
const githubFiles = await this.getJsonFiles(repoUrl);
|
||||||
console.log(`Found ${githubFiles.length} JSON files in repository ${repoUrl}`);
|
console.log(`Found ${githubFiles.length} JSON files in repository ${repoUrl}`);
|
||||||
|
|
||||||
@@ -242,28 +170,45 @@ export class GitHubJsonService {
|
|||||||
const localFiles = await this.getLocalJsonFiles();
|
const localFiles = await this.getLocalJsonFiles();
|
||||||
console.log(`Found ${localFiles.length} local JSON files`);
|
console.log(`Found ${localFiles.length} local JSON files`);
|
||||||
|
|
||||||
|
// Delete local JSON files that belong to this repo but are no longer in the remote
|
||||||
|
const remoteFilenames = new Set(githubFiles.map((f) => f.name));
|
||||||
|
const deletedFiles = await this.deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames);
|
||||||
|
if (deletedFiles.length > 0) {
|
||||||
|
console.log(`Removed ${deletedFiles.length} obsolete JSON file(s) no longer in ${repoUrl}`);
|
||||||
|
}
|
||||||
|
|
||||||
// Compare and find files that need syncing
|
// Compare and find files that need syncing
|
||||||
// For multi-repo support, we need to check if file exists AND if it's from this repo
|
// For multi-repo support, we need to check if file exists AND if it's from this repo
|
||||||
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
|
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
|
||||||
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
|
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
|
||||||
|
|
||||||
if (filesToSync.length === 0) {
|
if (filesToSync.length === 0) {
|
||||||
|
const msg =
|
||||||
|
deletedFiles.length > 0
|
||||||
|
? `All JSON files are up to date for repository: ${repoUrl}. Removed ${deletedFiles.length} obsolete file(s).`
|
||||||
|
: `All JSON files are up to date for repository: ${repoUrl}`;
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: `All JSON files are up to date for repository: ${repoUrl}`,
|
message: msg,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: []
|
syncedFiles: [],
|
||||||
|
deletedFiles
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Download and save only the files that need syncing
|
// Download and save only the files that need syncing
|
||||||
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
|
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
|
||||||
|
|
||||||
|
const msg =
|
||||||
|
deletedFiles.length > 0
|
||||||
|
? `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}, removed ${deletedFiles.length} obsolete file(s).`
|
||||||
|
: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`;
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`,
|
message: msg,
|
||||||
count: syncedFiles.length,
|
count: syncedFiles.length,
|
||||||
syncedFiles
|
syncedFiles,
|
||||||
|
deletedFiles
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`JSON sync failed for ${repoUrl}:`, error);
|
console.error(`JSON sync failed for ${repoUrl}:`, error);
|
||||||
@@ -271,7 +216,8 @@ export class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: []
|
syncedFiles: [],
|
||||||
|
deletedFiles: []
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -279,7 +225,7 @@ export class GitHubJsonService {
|
|||||||
/**
|
/**
|
||||||
* Sync JSON files from all enabled repositories (main repo has priority)
|
* Sync JSON files from all enabled repositories (main repo has priority)
|
||||||
*/
|
*/
|
||||||
async syncJsonFiles(): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[] }> {
|
async syncJsonFiles(): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[]; deletedFiles: string[] }> {
|
||||||
try {
|
try {
|
||||||
console.log('Starting multi-repository JSON sync...');
|
console.log('Starting multi-repository JSON sync...');
|
||||||
|
|
||||||
@@ -290,13 +236,15 @@ export class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: 'No enabled repositories found',
|
message: 'No enabled repositories found',
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: []
|
syncedFiles: [],
|
||||||
|
deletedFiles: []
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Found ${enabledRepos.length} enabled repositories`);
|
console.log(`Found ${enabledRepos.length} enabled repositories`);
|
||||||
|
|
||||||
const allSyncedFiles: string[] = [];
|
const allSyncedFiles: string[] = [];
|
||||||
|
const allDeletedFiles: string[] = [];
|
||||||
const processedSlugs = new Set<string>(); // Track slugs we've already processed
|
const processedSlugs = new Set<string>(); // Track slugs we've already processed
|
||||||
let totalSynced = 0;
|
let totalSynced = 0;
|
||||||
|
|
||||||
@@ -308,6 +256,7 @@ export class GitHubJsonService {
|
|||||||
const result = await this.syncJsonFilesForRepo(repo.url);
|
const result = await this.syncJsonFilesForRepo(repo.url);
|
||||||
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
|
allDeletedFiles.push(...(result.deletedFiles ?? []));
|
||||||
// Only count files that weren't already processed from a higher priority repo
|
// Only count files that weren't already processed from a higher priority repo
|
||||||
const newFiles = result.syncedFiles.filter(file => {
|
const newFiles = result.syncedFiles.filter(file => {
|
||||||
const slug = file.replace('.json', '');
|
const slug = file.replace('.json', '');
|
||||||
@@ -331,11 +280,16 @@ export class GitHubJsonService {
|
|||||||
// Also update existing files that don't have repository_url set (backward compatibility)
|
// Also update existing files that don't have repository_url set (backward compatibility)
|
||||||
await this.updateExistingFilesWithRepositoryUrl();
|
await this.updateExistingFilesWithRepositoryUrl();
|
||||||
|
|
||||||
|
const msg =
|
||||||
|
allDeletedFiles.length > 0
|
||||||
|
? `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories, removed ${allDeletedFiles.length} obsolete file(s).`
|
||||||
|
: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`;
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`,
|
message: msg,
|
||||||
count: totalSynced,
|
count: totalSynced,
|
||||||
syncedFiles: allSyncedFiles
|
syncedFiles: allSyncedFiles,
|
||||||
|
deletedFiles: allDeletedFiles
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Multi-repository JSON sync failed:', error);
|
console.error('Multi-repository JSON sync failed:', error);
|
||||||
@@ -343,7 +297,8 @@ export class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: []
|
syncedFiles: [],
|
||||||
|
deletedFiles: []
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -388,6 +343,36 @@ export class GitHubJsonService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete local JSON files that belong to this repo but are no longer in the remote list.
|
||||||
|
* Returns the list of deleted filenames.
|
||||||
|
*/
|
||||||
|
private async deleteLocalFilesRemovedFromRepo(repoUrl: string, remoteFilenames: Set<string>): Promise<string[]> {
|
||||||
|
this.initializeConfig();
|
||||||
|
const localFiles = await this.getLocalJsonFiles();
|
||||||
|
const deletedFiles: string[] = [];
|
||||||
|
|
||||||
|
for (const file of localFiles) {
|
||||||
|
try {
|
||||||
|
const filePath = join(this.localJsonDirectory!, file);
|
||||||
|
const content = await readFile(filePath, 'utf-8');
|
||||||
|
const script = JSON.parse(content) as Script;
|
||||||
|
|
||||||
|
if (script.repository_url === repoUrl && !remoteFilenames.has(file)) {
|
||||||
|
await unlink(filePath);
|
||||||
|
const slug = file.replace(/\.json$/, '');
|
||||||
|
this.scriptCache.delete(slug);
|
||||||
|
deletedFiles.push(file);
|
||||||
|
console.log(`Removed obsolete script JSON: ${file} (no longer in ${repoUrl})`);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// If we can't read or parse the file, skip (do not delete)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return deletedFiles;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find files that need syncing for a specific repository
|
* Find files that need syncing for a specific repository
|
||||||
* This checks if file exists locally AND if it's from the same repository
|
* This checks if file exists locally AND if it's from the same repository
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
// JavaScript wrapper for repositoryService (for use with node server.js)
|
// JavaScript wrapper for repositoryService (for use with node server.js)
|
||||||
import { prisma } from '../db.js';
|
import { prisma } from '../db.js';
|
||||||
|
import { isValidRepositoryUrl, REPO_URL_ERROR_MESSAGE } from '../lib/repositoryUrlValidation.js';
|
||||||
|
|
||||||
class RepositoryService {
|
class RepositoryService {
|
||||||
/**
|
/**
|
||||||
@@ -89,9 +90,8 @@ class RepositoryService {
|
|||||||
* Create a new repository
|
* Create a new repository
|
||||||
*/
|
*/
|
||||||
async createRepository(data) {
|
async createRepository(data) {
|
||||||
// Validate GitHub URL
|
if (!isValidRepositoryUrl(data.url)) {
|
||||||
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for duplicates
|
// Check for duplicates
|
||||||
@@ -122,10 +122,9 @@ class RepositoryService {
|
|||||||
* Update repository
|
* Update repository
|
||||||
*/
|
*/
|
||||||
async updateRepository(id, data) {
|
async updateRepository(id, data) {
|
||||||
// If updating URL, validate it
|
|
||||||
if (data.url) {
|
if (data.url) {
|
||||||
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
if (!isValidRepositoryUrl(data.url)) {
|
||||||
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for duplicates (excluding current repo)
|
// Check for duplicates (excluding current repo)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
/* eslint-disable @typescript-eslint/prefer-regexp-exec */
|
|
||||||
import { prisma } from '../db';
|
import { prisma } from '../db';
|
||||||
|
import { isValidRepositoryUrl, REPO_URL_ERROR_MESSAGE } from '../lib/repositoryUrlValidation';
|
||||||
|
|
||||||
export class RepositoryService {
|
export class RepositoryService {
|
||||||
/**
|
/**
|
||||||
@@ -93,9 +93,8 @@ export class RepositoryService {
|
|||||||
enabled?: boolean;
|
enabled?: boolean;
|
||||||
priority?: number;
|
priority?: number;
|
||||||
}) {
|
}) {
|
||||||
// Validate GitHub URL
|
if (!isValidRepositoryUrl(data.url)) {
|
||||||
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for duplicates
|
// Check for duplicates
|
||||||
@@ -130,10 +129,9 @@ export class RepositoryService {
|
|||||||
url?: string;
|
url?: string;
|
||||||
priority?: number;
|
priority?: number;
|
||||||
}) {
|
}) {
|
||||||
// If updating URL, validate it
|
|
||||||
if (data.url) {
|
if (data.url) {
|
||||||
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
if (!isValidRepositoryUrl(data.url)) {
|
||||||
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for duplicates (excluding current repo)
|
// Check for duplicates (excluding current repo)
|
||||||
|
|||||||
@@ -250,9 +250,16 @@ class RestoreService {
|
|||||||
const targetFolder = `/var/lib/vz/dump/vzdump-lxc-${ctId}-${snapshotNameForPath}`;
|
const targetFolder = `/var/lib/vz/dump/vzdump-lxc-${ctId}-${snapshotNameForPath}`;
|
||||||
const targetTar = `${targetFolder}.tar`;
|
const targetTar = `${targetFolder}.tar`;
|
||||||
|
|
||||||
// Use PBS_PASSWORD env var and add timeout for long downloads
|
// Use PBS_PASSWORD env var and add timeout for long downloads; PBS_FINGERPRINT when set for cert validation
|
||||||
const escapedPassword = credential.pbs_password.replace(/'/g, "'\\''");
|
const escapedPassword = credential.pbs_password.replace(/'/g, "'\\''");
|
||||||
const restoreCommand = `PBS_PASSWORD='${escapedPassword}' PBS_REPOSITORY='${repository}' timeout 300 proxmox-backup-client restore "${snapshotPath}" root.pxar "${targetFolder}" --repository '${repository}' 2>&1`;
|
const fingerprint = credential.pbs_fingerprint?.trim() ?? '';
|
||||||
|
const escapedFingerprint = fingerprint ? fingerprint.replace(/'/g, "'\\''") : '';
|
||||||
|
const restoreEnvParts = [`PBS_PASSWORD='${escapedPassword}'`, `PBS_REPOSITORY='${repository}'`];
|
||||||
|
if (escapedFingerprint) {
|
||||||
|
restoreEnvParts.push(`PBS_FINGERPRINT='${escapedFingerprint}'`);
|
||||||
|
}
|
||||||
|
const restoreEnvStr = restoreEnvParts.join(' ');
|
||||||
|
const restoreCommand = `${restoreEnvStr} timeout 300 proxmox-backup-client restore "${snapshotPath}" root.pxar "${targetFolder}" --repository '${repository}' 2>&1`;
|
||||||
|
|
||||||
let output = '';
|
let output = '';
|
||||||
let exitCode = 0;
|
let exitCode = 0;
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
// Real JavaScript implementation for script downloading
|
// Real JavaScript implementation for script downloading
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
import { writeFile, mkdir, access, readFile, unlink } from 'fs/promises';
|
import { writeFile, mkdir, access, readFile, unlink } from 'fs/promises';
|
||||||
|
import { downloadRawFile } from '../lib/gitProvider/index.js';
|
||||||
|
|
||||||
export class ScriptDownloaderService {
|
export class ScriptDownloaderService {
|
||||||
constructor() {
|
constructor() {
|
||||||
@@ -82,51 +83,18 @@ export class ScriptDownloaderService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract repository path from GitHub URL
|
* Download a file from the repository (GitHub, GitLab, Bitbucket, or custom)
|
||||||
* @param {string} repoUrl - The GitHub repository URL
|
* @param {string} repoUrl - The repository URL
|
||||||
* @returns {string}
|
|
||||||
*/
|
|
||||||
extractRepoPath(repoUrl) {
|
|
||||||
const match = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
|
||||||
if (!match) {
|
|
||||||
throw new Error(`Invalid GitHub repository URL: ${repoUrl}`);
|
|
||||||
}
|
|
||||||
return `${match[1]}/${match[2]}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Download a file from GitHub
|
|
||||||
* @param {string} repoUrl - The GitHub repository URL
|
|
||||||
* @param {string} filePath - The file path within the repository
|
* @param {string} filePath - The file path within the repository
|
||||||
* @param {string} [branch] - The branch to download from
|
* @param {string} [branch] - The branch to download from
|
||||||
* @returns {Promise<string>}
|
* @returns {Promise<string>}
|
||||||
*/
|
*/
|
||||||
async downloadFileFromGitHub(repoUrl, filePath, branch = 'main') {
|
async downloadFileFromRepo(repoUrl, filePath, branch = 'main') {
|
||||||
this.initializeConfig();
|
|
||||||
if (!repoUrl) {
|
if (!repoUrl) {
|
||||||
throw new Error('Repository URL is not set');
|
throw new Error('Repository URL is not set');
|
||||||
}
|
}
|
||||||
|
console.log(`Downloading from repository: ${repoUrl} (${filePath})`);
|
||||||
const repoPath = this.extractRepoPath(repoUrl);
|
return downloadRawFile(repoUrl, filePath, branch);
|
||||||
const url = `https://raw.githubusercontent.com/${repoPath}/${branch}/${filePath}`;
|
|
||||||
|
|
||||||
/** @type {Record<string, string>} */
|
|
||||||
const headers = {
|
|
||||||
'User-Agent': 'PVEScripts-Local/1.0',
|
|
||||||
};
|
|
||||||
|
|
||||||
// Add GitHub token authentication if available
|
|
||||||
if (process.env.GITHUB_TOKEN) {
|
|
||||||
headers.Authorization = `token ${process.env.GITHUB_TOKEN}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(`Downloading from GitHub: ${url}`);
|
|
||||||
const response = await fetch(url, { headers });
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`Failed to download ${filePath} from ${repoUrl}: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return response.text();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -184,9 +152,8 @@ export class ScriptDownloaderService {
|
|||||||
const fileName = scriptPath.split('/').pop();
|
const fileName = scriptPath.split('/').pop();
|
||||||
|
|
||||||
if (fileName) {
|
if (fileName) {
|
||||||
// Download from GitHub using the script's repository URL
|
|
||||||
console.log(`Downloading script file: ${scriptPath} from ${repoUrl}`);
|
console.log(`Downloading script file: ${scriptPath} from ${repoUrl}`);
|
||||||
const content = await this.downloadFileFromGitHub(repoUrl, scriptPath, branch);
|
const content = await this.downloadFileFromRepo(repoUrl, scriptPath, branch);
|
||||||
|
|
||||||
// Determine target directory based on script path
|
// Determine target directory based on script path
|
||||||
let targetDir;
|
let targetDir;
|
||||||
@@ -250,7 +217,7 @@ export class ScriptDownloaderService {
|
|||||||
const installScriptName = `${script.slug}-install.sh`;
|
const installScriptName = `${script.slug}-install.sh`;
|
||||||
try {
|
try {
|
||||||
console.log(`Downloading install script: install/${installScriptName} from ${repoUrl}`);
|
console.log(`Downloading install script: install/${installScriptName} from ${repoUrl}`);
|
||||||
const installContent = await this.downloadFileFromGitHub(repoUrl, `install/${installScriptName}`, branch);
|
const installContent = await this.downloadFileFromRepo(repoUrl, `install/${installScriptName}`, branch);
|
||||||
const localInstallPath = join(this.scriptsDirectory, 'install', installScriptName);
|
const localInstallPath = join(this.scriptsDirectory, 'install', installScriptName);
|
||||||
await writeFile(localInstallPath, installContent, 'utf-8');
|
await writeFile(localInstallPath, installContent, 'utf-8');
|
||||||
files.push(`install/${installScriptName}`);
|
files.push(`install/${installScriptName}`);
|
||||||
@@ -274,7 +241,7 @@ export class ScriptDownloaderService {
|
|||||||
const alpineInstallScriptName = `alpine-${script.slug}-install.sh`;
|
const alpineInstallScriptName = `alpine-${script.slug}-install.sh`;
|
||||||
try {
|
try {
|
||||||
console.log(`[${script.slug}] Downloading alpine install script: install/${alpineInstallScriptName} from ${repoUrl}`);
|
console.log(`[${script.slug}] Downloading alpine install script: install/${alpineInstallScriptName} from ${repoUrl}`);
|
||||||
const alpineInstallContent = await this.downloadFileFromGitHub(repoUrl, `install/${alpineInstallScriptName}`, branch);
|
const alpineInstallContent = await this.downloadFileFromRepo(repoUrl, `install/${alpineInstallScriptName}`, branch);
|
||||||
const localAlpineInstallPath = join(this.scriptsDirectory, 'install', alpineInstallScriptName);
|
const localAlpineInstallPath = join(this.scriptsDirectory, 'install', alpineInstallScriptName);
|
||||||
await writeFile(localAlpineInstallPath, alpineInstallContent, 'utf-8');
|
await writeFile(localAlpineInstallPath, alpineInstallContent, 'utf-8');
|
||||||
files.push(`install/${alpineInstallScriptName}`);
|
files.push(`install/${alpineInstallScriptName}`);
|
||||||
@@ -681,7 +648,7 @@ export class ScriptDownloaderService {
|
|||||||
console.log(`[Comparison] Local file size: ${localContent.length} bytes`);
|
console.log(`[Comparison] Local file size: ${localContent.length} bytes`);
|
||||||
|
|
||||||
// Download remote content from the script's repository
|
// Download remote content from the script's repository
|
||||||
const remoteContent = await this.downloadFileFromGitHub(repoUrl, remotePath, branch);
|
const remoteContent = await this.downloadFileFromRepo(repoUrl, remotePath, branch);
|
||||||
console.log(`[Comparison] Remote file size: ${remoteContent.length} bytes`);
|
console.log(`[Comparison] Remote file size: ${remoteContent.length} bytes`);
|
||||||
|
|
||||||
// Apply modification only for CT scripts, not for other script types
|
// Apply modification only for CT scripts, not for other script types
|
||||||
@@ -739,7 +706,7 @@ export class ScriptDownloaderService {
|
|||||||
// Find the corresponding script path in install_methods
|
// Find the corresponding script path in install_methods
|
||||||
const method = script.install_methods?.find(m => m.script === filePath);
|
const method = script.install_methods?.find(m => m.script === filePath);
|
||||||
if (method?.script) {
|
if (method?.script) {
|
||||||
const downloadedContent = await this.downloadFileFromGitHub(repoUrl, method.script, branch);
|
const downloadedContent = await this.downloadFileFromRepo(repoUrl, method.script, branch);
|
||||||
remoteContent = this.modifyScriptContent(downloadedContent);
|
remoteContent = this.modifyScriptContent(downloadedContent);
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
@@ -756,7 +723,7 @@ export class ScriptDownloaderService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
remoteContent = await this.downloadFileFromGitHub(repoUrl, filePath, branch);
|
remoteContent = await this.downloadFileFromRepo(repoUrl, filePath, branch);
|
||||||
} catch {
|
} catch {
|
||||||
// Error downloading remote install script
|
// Error downloading remote install script
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
import { spawn } from 'child_process';
|
import { spawn } from 'child_process';
|
||||||
import { spawn as ptySpawn } from 'node-pty';
|
import { spawn as ptySpawn } from 'node-pty';
|
||||||
import { existsSync } from 'fs';
|
import { existsSync, writeFileSync, chmodSync, unlinkSync } from 'fs';
|
||||||
|
import { join } from 'path';
|
||||||
|
import { tmpdir } from 'os';
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -85,9 +87,10 @@ class SSHExecutionService {
|
|||||||
* @param {Function} onData - Callback for data output
|
* @param {Function} onData - Callback for data output
|
||||||
* @param {Function} onError - Callback for errors
|
* @param {Function} onError - Callback for errors
|
||||||
* @param {Function} onExit - Callback for process exit
|
* @param {Function} onExit - Callback for process exit
|
||||||
|
* @param {Object} [envVars] - Optional environment variables to pass to the script
|
||||||
* @returns {Promise<Object>} Process information
|
* @returns {Promise<Object>} Process information
|
||||||
*/
|
*/
|
||||||
async executeScript(server, scriptPath, onData, onError, onExit) {
|
async executeScript(server, scriptPath, onData, onError, onExit, envVars = {}) {
|
||||||
try {
|
try {
|
||||||
await this.transferScriptsFolder(server, onData, onError);
|
await this.transferScriptsFolder(server, onData, onError);
|
||||||
|
|
||||||
@@ -98,8 +101,43 @@ class SSHExecutionService {
|
|||||||
// Build SSH command based on authentication type
|
// Build SSH command based on authentication type
|
||||||
const { command, args } = this.buildSSHCommand(server);
|
const { command, args } = this.buildSSHCommand(server);
|
||||||
|
|
||||||
|
// Format environment variables as var_name=value pairs
|
||||||
|
const envVarsString = Object.entries(envVars)
|
||||||
|
.map(([key, value]) => {
|
||||||
|
// Escape special characters in values
|
||||||
|
const escapedValue = String(value).replace(/'/g, "'\\''");
|
||||||
|
return `${key}='${escapedValue}'`;
|
||||||
|
})
|
||||||
|
.join(' ');
|
||||||
|
|
||||||
|
// Build the command with environment variables
|
||||||
|
let scriptCommand = `cd /tmp/scripts && chmod +x ${relativeScriptPath} && export TERM=xterm-256color && export COLUMNS=120 && export LINES=30 && export COLORTERM=truecolor && export FORCE_COLOR=1 && export NO_COLOR=0 && export CLICOLOR=1 && export CLICOLOR_FORCE=1`;
|
||||||
|
|
||||||
|
if (envVarsString) {
|
||||||
|
scriptCommand += ` && ${envVarsString} bash ${relativeScriptPath}`;
|
||||||
|
} else {
|
||||||
|
scriptCommand += ` && bash ${relativeScriptPath}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log the full command that will be executed
|
||||||
|
console.log('='.repeat(80));
|
||||||
|
console.log(`[SSH Execution] Executing on host: ${server.ip} (${server.name || 'Unnamed'})`);
|
||||||
|
console.log(`[SSH Execution] Script path: ${scriptPath}`);
|
||||||
|
console.log(`[SSH Execution] Relative script path: ${relativeScriptPath}`);
|
||||||
|
if (Object.keys(envVars).length > 0) {
|
||||||
|
console.log(`[SSH Execution] Environment variables (${Object.keys(envVars).length} vars):`);
|
||||||
|
Object.entries(envVars).forEach(([key, value]) => {
|
||||||
|
console.log(` ${key}=${String(value)}`);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
console.log(`[SSH Execution] No environment variables provided`);
|
||||||
|
}
|
||||||
|
console.log(`[SSH Execution] Full command:`);
|
||||||
|
console.log(scriptCommand);
|
||||||
|
console.log('='.repeat(80));
|
||||||
|
|
||||||
// Add the script execution command to the args
|
// Add the script execution command to the args
|
||||||
args.push(`cd /tmp/scripts && chmod +x ${relativeScriptPath} && export TERM=xterm-256color && export COLUMNS=120 && export LINES=30 && export COLORTERM=truecolor && export FORCE_COLOR=1 && export NO_COLOR=0 && export CLICOLOR=1 && export CLICOLOR_FORCE=1 && bash ${relativeScriptPath}`);
|
args.push(scriptCommand);
|
||||||
|
|
||||||
// Use ptySpawn for proper terminal emulation and color support
|
// Use ptySpawn for proper terminal emulation and color support
|
||||||
const sshCommand = ptySpawn(command, args, {
|
const sshCommand = ptySpawn(command, args, {
|
||||||
@@ -158,26 +196,45 @@ class SSHExecutionService {
|
|||||||
*/
|
*/
|
||||||
async transferScriptsFolder(server, onData, onError) {
|
async transferScriptsFolder(server, onData, onError) {
|
||||||
const { ip, user, password, auth_type = 'password', ssh_key_passphrase, ssh_key_path, ssh_port = 22 } = server;
|
const { ip, user, password, auth_type = 'password', ssh_key_passphrase, ssh_key_path, ssh_port = 22 } = server;
|
||||||
|
|
||||||
|
const cleanupTempFile = (/** @type {string | null} */ tempPath) => {
|
||||||
|
if (tempPath) {
|
||||||
|
try {
|
||||||
|
unlinkSync(tempPath);
|
||||||
|
} catch (_) {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
|
/** @type {string | null} */
|
||||||
|
let tempPath = null;
|
||||||
try {
|
try {
|
||||||
// Build rsync command based on authentication type
|
// Build rsync command based on authentication type.
|
||||||
|
// Use sshpass -f with a temp file so password/passphrase never go through the shell (safe for special chars like {, $, ").
|
||||||
let rshCommand;
|
let rshCommand;
|
||||||
if (auth_type === 'key') {
|
if (auth_type === 'key') {
|
||||||
if (!ssh_key_path || !existsSync(ssh_key_path)) {
|
if (!ssh_key_path || !existsSync(ssh_key_path)) {
|
||||||
throw new Error('SSH key file not found');
|
throw new Error('SSH key file not found');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ssh_key_passphrase) {
|
if (ssh_key_passphrase) {
|
||||||
rshCommand = `sshpass -P passphrase -p ${ssh_key_passphrase} ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
tempPath = join(tmpdir(), `sshpass-${process.pid}-${Date.now()}.tmp`);
|
||||||
|
writeFileSync(tempPath, ssh_key_passphrase);
|
||||||
|
chmodSync(tempPath, 0o600);
|
||||||
|
rshCommand = `sshpass -P passphrase -f ${tempPath} ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
||||||
} else {
|
} else {
|
||||||
rshCommand = `ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
rshCommand = `ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Password authentication
|
// Password authentication
|
||||||
rshCommand = `sshpass -p ${password} ssh -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
tempPath = join(tmpdir(), `sshpass-${process.pid}-${Date.now()}.tmp`);
|
||||||
|
writeFileSync(tempPath, password ?? '');
|
||||||
|
chmodSync(tempPath, 0o600);
|
||||||
|
rshCommand = `sshpass -f ${tempPath} ssh -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
||||||
}
|
}
|
||||||
|
|
||||||
const rsyncCommand = spawn('rsync', [
|
const rsyncCommand = spawn('rsync', [
|
||||||
'-avz',
|
'-avz',
|
||||||
'--delete',
|
'--delete',
|
||||||
@@ -190,31 +247,31 @@ class SSHExecutionService {
|
|||||||
stdio: ['pipe', 'pipe', 'pipe']
|
stdio: ['pipe', 'pipe', 'pipe']
|
||||||
});
|
});
|
||||||
|
|
||||||
rsyncCommand.stdout.on('data', (/** @type {Buffer} */ data) => {
|
rsyncCommand.stdout.on('data', (/** @type {Buffer} */ data) => {
|
||||||
// Ensure proper UTF-8 encoding for ANSI colors
|
const output = data.toString('utf8');
|
||||||
const output = data.toString('utf8');
|
onData(output);
|
||||||
onData(output);
|
});
|
||||||
});
|
|
||||||
|
|
||||||
rsyncCommand.stderr.on('data', (/** @type {Buffer} */ data) => {
|
rsyncCommand.stderr.on('data', (/** @type {Buffer} */ data) => {
|
||||||
// Ensure proper UTF-8 encoding for ANSI colors
|
const output = data.toString('utf8');
|
||||||
const output = data.toString('utf8');
|
onError(output);
|
||||||
onError(output);
|
});
|
||||||
});
|
|
||||||
|
|
||||||
rsyncCommand.on('close', (code) => {
|
rsyncCommand.on('close', (code) => {
|
||||||
if (code === 0) {
|
cleanupTempFile(tempPath);
|
||||||
resolve();
|
if (code === 0) {
|
||||||
} else {
|
resolve();
|
||||||
reject(new Error(`rsync failed with code ${code}`));
|
} else {
|
||||||
}
|
reject(new Error(`rsync failed with code ${code}`));
|
||||||
});
|
}
|
||||||
|
});
|
||||||
|
|
||||||
rsyncCommand.on('error', (error) => {
|
rsyncCommand.on('error', (error) => {
|
||||||
reject(error);
|
cleanupTempFile(tempPath);
|
||||||
});
|
reject(error);
|
||||||
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
cleanupTempFile(tempPath);
|
||||||
reject(error);
|
reject(error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -169,16 +169,17 @@ class SSHService {
|
|||||||
const timeout = 10000;
|
const timeout = 10000;
|
||||||
let resolved = false;
|
let resolved = false;
|
||||||
|
|
||||||
|
// Pass password via env so it is not embedded in the script (safe for special chars like {, $, ").
|
||||||
const expectScript = `#!/usr/bin/expect -f
|
const expectScript = `#!/usr/bin/expect -f
|
||||||
set timeout 10
|
set timeout 10
|
||||||
spawn ssh -p ${ssh_port} -o ConnectTimeout=10 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o LogLevel=ERROR -o PasswordAuthentication=yes -o PubkeyAuthentication=no ${user}@${ip} "echo SSH_LOGIN_SUCCESS"
|
spawn ssh -p ${ssh_port} -o ConnectTimeout=10 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o LogLevel=ERROR -o PasswordAuthentication=yes -o PubkeyAuthentication=no ${user}@${ip} "echo SSH_LOGIN_SUCCESS"
|
||||||
expect {
|
expect {
|
||||||
"password:" {
|
"password:" {
|
||||||
send "${password}\r"
|
send "$env(SSH_PASSWORD)\\r"
|
||||||
exp_continue
|
exp_continue
|
||||||
}
|
}
|
||||||
"Password:" {
|
"Password:" {
|
||||||
send "${password}\r"
|
send "$env(SSH_PASSWORD)\\r"
|
||||||
exp_continue
|
exp_continue
|
||||||
}
|
}
|
||||||
"SSH_LOGIN_SUCCESS" {
|
"SSH_LOGIN_SUCCESS" {
|
||||||
@@ -193,7 +194,8 @@ expect {
|
|||||||
}`;
|
}`;
|
||||||
|
|
||||||
const expectCommand = spawn('expect', ['-c', expectScript], {
|
const expectCommand = spawn('expect', ['-c', expectScript], {
|
||||||
stdio: ['pipe', 'pipe', 'pipe']
|
stdio: ['pipe', 'pipe', 'pipe'],
|
||||||
|
env: { ...process.env, SSH_PASSWORD: password ?? '' }
|
||||||
});
|
});
|
||||||
|
|
||||||
const timer = setTimeout(() => {
|
const timer = setTimeout(() => {
|
||||||
|
|||||||
356
update.sh
356
update.sh
@@ -4,7 +4,7 @@
|
|||||||
# Enhanced update script for ProxmoxVE-Local
|
# Enhanced update script for ProxmoxVE-Local
|
||||||
# Fetches latest release from GitHub and backs up data directory
|
# Fetches latest release from GitHub and backs up data directory
|
||||||
|
|
||||||
set -euo pipefail # Exit on error, undefined vars, pipe failures
|
set -euo pipefail # Exit on error, undefined vars, pipe failures
|
||||||
|
|
||||||
# Add error trap for debugging
|
# Add error trap for debugging
|
||||||
trap 'echo "Error occurred at line $LINENO, command: $BASH_COMMAND"' ERR
|
trap 'echo "Error occurred at line $LINENO, command: $BASH_COMMAND"' ERR
|
||||||
@@ -38,7 +38,7 @@ load_github_token() {
|
|||||||
log "Using GitHub token from environment variable"
|
log "Using GitHub token from environment variable"
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Try .env file
|
# Try .env file
|
||||||
if [ -f ".env" ]; then
|
if [ -f ".env" ]; then
|
||||||
local env_token
|
local env_token
|
||||||
@@ -49,21 +49,21 @@ load_github_token() {
|
|||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Try .github_token file
|
# Try .github_token file
|
||||||
if [ -f ".github_token" ]; then
|
if [ -f ".github_token" ]; then
|
||||||
GITHUB_TOKEN=$(cat .github_token | tr -d '\n\r')
|
GITHUB_TOKEN=$(cat .github_token | tr -d '\n\r')
|
||||||
log "Using GitHub token from .github_token file"
|
log "Using GitHub token from .github_token file"
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Try ~/.github_token file
|
# Try ~/.github_token file
|
||||||
if [ -f "$HOME/.github_token" ]; then
|
if [ -f "$HOME/.github_token" ]; then
|
||||||
GITHUB_TOKEN=$(cat "$HOME/.github_token" | tr -d '\n\r')
|
GITHUB_TOKEN=$(cat "$HOME/.github_token" | tr -d '\n\r')
|
||||||
log "Using GitHub token from ~/.github_token file"
|
log "Using GitHub token from ~/.github_token file"
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
log_warning "No GitHub token found. Using unauthenticated requests (lower rate limits)"
|
log_warning "No GitHub token found. Using unauthenticated requests (lower rate limits)"
|
||||||
log_warning "To use a token, add GITHUB_TOKEN=your_token to .env file or set GITHUB_TOKEN environment variable"
|
log_warning "To use a token, add GITHUB_TOKEN=your_token to .env file or set GITHUB_TOKEN environment variable"
|
||||||
return 1
|
return 1
|
||||||
@@ -72,7 +72,7 @@ load_github_token() {
|
|||||||
# Initialize log file
|
# Initialize log file
|
||||||
init_log() {
|
init_log() {
|
||||||
# Clear/create log file
|
# Clear/create log file
|
||||||
> "$LOG_FILE"
|
>"$LOG_FILE"
|
||||||
log "Starting ProxmoxVE-Local update process..."
|
log "Starting ProxmoxVE-Local update process..."
|
||||||
log "Log file: $LOG_FILE"
|
log "Log file: $LOG_FILE"
|
||||||
}
|
}
|
||||||
@@ -97,40 +97,40 @@ log_warning() {
|
|||||||
# Check if required tools are available
|
# Check if required tools are available
|
||||||
check_dependencies() {
|
check_dependencies() {
|
||||||
log "Checking dependencies..."
|
log "Checking dependencies..."
|
||||||
|
|
||||||
local missing_deps=()
|
local missing_deps=()
|
||||||
|
|
||||||
if ! command -v curl &> /dev/null; then
|
if ! command -v curl &>/dev/null; then
|
||||||
missing_deps+=("curl")
|
missing_deps+=("curl")
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! command -v jq &> /dev/null; then
|
if ! command -v jq &>/dev/null; then
|
||||||
missing_deps+=("jq")
|
missing_deps+=("jq")
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! command -v npm &> /dev/null; then
|
if ! command -v npm &>/dev/null; then
|
||||||
missing_deps+=("npm")
|
missing_deps+=("npm")
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! command -v node &> /dev/null; then
|
if ! command -v node &>/dev/null; then
|
||||||
missing_deps+=("node")
|
missing_deps+=("node")
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ ${#missing_deps[@]} -ne 0 ]; then
|
if [ ${#missing_deps[@]} -ne 0 ]; then
|
||||||
log_error "Missing dependencies: ${missing_deps[*]}"
|
log_error "Missing dependencies: ${missing_deps[*]}"
|
||||||
log_error "Please install the missing dependencies and try again."
|
log_error "Please install the missing dependencies and try again."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
log_success "All dependencies are available"
|
log_success "All dependencies are available"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Get latest release info from GitHub API
|
# Get latest release info from GitHub API
|
||||||
get_latest_release() {
|
get_latest_release() {
|
||||||
log "Fetching latest release information from GitHub..."
|
log "Fetching latest release information from GitHub..."
|
||||||
|
|
||||||
local curl_opts="-s --connect-timeout 15 --max-time 60 --retry 2 --retry-delay 3"
|
local curl_opts="-s --connect-timeout 15 --max-time 60 --retry 2 --retry-delay 3"
|
||||||
|
|
||||||
# Add authentication header if token is available
|
# Add authentication header if token is available
|
||||||
if [ -n "$GITHUB_TOKEN" ]; then
|
if [ -n "$GITHUB_TOKEN" ]; then
|
||||||
curl_opts="$curl_opts -H \"Authorization: token $GITHUB_TOKEN\""
|
curl_opts="$curl_opts -H \"Authorization: token $GITHUB_TOKEN\""
|
||||||
@@ -138,35 +138,35 @@ get_latest_release() {
|
|||||||
else
|
else
|
||||||
log "Using unauthenticated GitHub API request (lower rate limits)"
|
log "Using unauthenticated GitHub API request (lower rate limits)"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
local release_info
|
local release_info
|
||||||
if ! release_info=$(eval "curl $curl_opts \"$GITHUB_API/releases/latest\""); then
|
if ! release_info=$(eval "curl $curl_opts \"$GITHUB_API/releases/latest\""); then
|
||||||
log_error "Failed to fetch release information from GitHub API (timeout or network error)"
|
log_error "Failed to fetch release information from GitHub API (timeout or network error)"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check if response is valid JSON
|
# Check if response is valid JSON
|
||||||
if ! echo "$release_info" | jq empty 2>/dev/null; then
|
if ! echo "$release_info" | jq empty 2>/dev/null; then
|
||||||
log_error "Invalid JSON response from GitHub API"
|
log_error "Invalid JSON response from GitHub API"
|
||||||
log "Response: $release_info"
|
log "Response: $release_info"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
local tag_name
|
local tag_name
|
||||||
local download_url
|
local download_url
|
||||||
local published_at
|
local published_at
|
||||||
|
|
||||||
tag_name=$(echo "$release_info" | jq -r '.tag_name')
|
tag_name=$(echo "$release_info" | jq -r '.tag_name')
|
||||||
download_url=$(echo "$release_info" | jq -r '.tarball_url')
|
download_url=$(echo "$release_info" | jq -r '.tarball_url')
|
||||||
published_at=$(echo "$release_info" | jq -r '.published_at')
|
published_at=$(echo "$release_info" | jq -r '.published_at')
|
||||||
|
|
||||||
if [ "$tag_name" = "null" ] || [ "$download_url" = "null" ] || [ -z "$tag_name" ] || [ -z "$download_url" ]; then
|
if [ "$tag_name" = "null" ] || [ "$download_url" = "null" ] || [ -z "$tag_name" ] || [ -z "$download_url" ]; then
|
||||||
log_error "Failed to parse release information from API response"
|
log_error "Failed to parse release information from API response"
|
||||||
log "Tag name: $tag_name"
|
log "Tag name: $tag_name"
|
||||||
log "Download URL: $download_url"
|
log "Download URL: $download_url"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
log_success "Latest release: $tag_name (published: $published_at)"
|
log_success "Latest release: $tag_name (published: $published_at)"
|
||||||
echo "$tag_name|$download_url"
|
echo "$tag_name|$download_url"
|
||||||
}
|
}
|
||||||
@@ -174,16 +174,16 @@ get_latest_release() {
|
|||||||
# Backup data directory, .env file, and scripts directories
|
# Backup data directory, .env file, and scripts directories
|
||||||
backup_data() {
|
backup_data() {
|
||||||
log "Creating backup directory at $BACKUP_DIR..."
|
log "Creating backup directory at $BACKUP_DIR..."
|
||||||
|
|
||||||
if ! mkdir -p "$BACKUP_DIR"; then
|
if ! mkdir -p "$BACKUP_DIR"; then
|
||||||
log_error "Failed to create backup directory"
|
log_error "Failed to create backup directory"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Backup data directory
|
# Backup data directory
|
||||||
if [ -d "$DATA_DIR" ]; then
|
if [ -d "$DATA_DIR" ]; then
|
||||||
log "Backing up data directory..."
|
log "Backing up data directory..."
|
||||||
|
|
||||||
if ! cp -r "$DATA_DIR" "$BACKUP_DIR/data"; then
|
if ! cp -r "$DATA_DIR" "$BACKUP_DIR/data"; then
|
||||||
log_error "Failed to backup data directory"
|
log_error "Failed to backup data directory"
|
||||||
exit 1
|
exit 1
|
||||||
@@ -193,7 +193,7 @@ backup_data() {
|
|||||||
else
|
else
|
||||||
log_warning "Data directory not found, skipping backup"
|
log_warning "Data directory not found, skipping backup"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Backup .env file
|
# Backup .env file
|
||||||
if [ -f ".env" ]; then
|
if [ -f ".env" ]; then
|
||||||
log "Backing up .env file..."
|
log "Backing up .env file..."
|
||||||
@@ -206,7 +206,7 @@ backup_data() {
|
|||||||
else
|
else
|
||||||
log_warning ".env file not found, skipping backup"
|
log_warning ".env file not found, skipping backup"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Backup scripts directories
|
# Backup scripts directories
|
||||||
local scripts_dirs=("scripts/ct" "scripts/install" "scripts/tools" "scripts/vm")
|
local scripts_dirs=("scripts/ct" "scripts/install" "scripts/tools" "scripts/vm")
|
||||||
for scripts_dir in "${scripts_dirs[@]}"; do
|
for scripts_dir in "${scripts_dirs[@]}"; do
|
||||||
@@ -230,60 +230,60 @@ download_release() {
|
|||||||
local release_info="$1"
|
local release_info="$1"
|
||||||
local tag_name="${release_info%|*}"
|
local tag_name="${release_info%|*}"
|
||||||
local download_url="${release_info#*|}"
|
local download_url="${release_info#*|}"
|
||||||
|
|
||||||
log "Downloading release $tag_name..."
|
log "Downloading release $tag_name..."
|
||||||
|
|
||||||
local temp_dir="/tmp/pve-update-$$"
|
local temp_dir="/tmp/pve-update-$$"
|
||||||
local archive_file="$temp_dir/release.tar.gz"
|
local archive_file="$temp_dir/release.tar.gz"
|
||||||
|
|
||||||
# Create temporary directory
|
# Create temporary directory
|
||||||
if ! mkdir -p "$temp_dir"; then
|
if ! mkdir -p "$temp_dir"; then
|
||||||
log_error "Failed to create temporary directory"
|
log_error "Failed to create temporary directory"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Download release with timeout and progress
|
# Download release with timeout and progress
|
||||||
if ! curl -L --connect-timeout 30 --max-time 300 --retry 3 --retry-delay 5 -o "$archive_file" "$download_url" 2>/dev/null; then
|
if ! curl -L --connect-timeout 30 --max-time 300 --retry 3 --retry-delay 5 -o "$archive_file" "$download_url" 2>/dev/null; then
|
||||||
log_error "Failed to download release from GitHub"
|
log_error "Failed to download release from GitHub"
|
||||||
rm -rf "$temp_dir"
|
rm -rf "$temp_dir"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Verify download
|
# Verify download
|
||||||
if [ ! -f "$archive_file" ] || [ ! -s "$archive_file" ]; then
|
if [ ! -f "$archive_file" ] || [ ! -s "$archive_file" ]; then
|
||||||
log_error "Downloaded file is empty or missing"
|
log_error "Downloaded file is empty or missing"
|
||||||
rm -rf "$temp_dir"
|
rm -rf "$temp_dir"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
log_success "Downloaded release"
|
log_success "Downloaded release"
|
||||||
|
|
||||||
# Extract release
|
# Extract release
|
||||||
if ! tar -xzf "$archive_file" -C "$temp_dir" 2>/dev/null; then
|
if ! tar -xzf "$archive_file" -C "$temp_dir" 2>/dev/null; then
|
||||||
log_error "Failed to extract release"
|
log_error "Failed to extract release"
|
||||||
rm -rf "$temp_dir"
|
rm -rf "$temp_dir"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Find the extracted directory (GitHub tarballs have a root directory)
|
# Find the extracted directory (GitHub tarballs have a root directory)
|
||||||
local extracted_dir
|
local extracted_dir
|
||||||
extracted_dir=$(find "$temp_dir" -maxdepth 1 -type d -name "community-scripts-ProxmoxVE-Local-*" 2>/dev/null | head -1)
|
extracted_dir=$(find "$temp_dir" -maxdepth 1 -type d -name "community-scripts-ProxmoxVE-Local-*" 2>/dev/null | head -1)
|
||||||
|
|
||||||
# Try alternative patterns if not found
|
# Try alternative patterns if not found
|
||||||
if [ -z "$extracted_dir" ]; then
|
if [ -z "$extracted_dir" ]; then
|
||||||
extracted_dir=$(find "$temp_dir" -maxdepth 1 -type d -name "${REPO_NAME}-*" 2>/dev/null | head -1)
|
extracted_dir=$(find "$temp_dir" -maxdepth 1 -type d -name "${REPO_NAME}-*" 2>/dev/null | head -1)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -z "$extracted_dir" ]; then
|
if [ -z "$extracted_dir" ]; then
|
||||||
extracted_dir=$(find "$temp_dir" -maxdepth 1 -type d ! -name "$temp_dir" 2>/dev/null | head -1)
|
extracted_dir=$(find "$temp_dir" -maxdepth 1 -type d ! -name "$temp_dir" 2>/dev/null | head -1)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -z "$extracted_dir" ]; then
|
if [ -z "$extracted_dir" ]; then
|
||||||
log_error "Could not find extracted directory"
|
log_error "Could not find extracted directory"
|
||||||
rm -rf "$temp_dir"
|
rm -rf "$temp_dir"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
log_success "Release extracted successfully"
|
log_success "Release extracted successfully"
|
||||||
echo "$extracted_dir"
|
echo "$extracted_dir"
|
||||||
}
|
}
|
||||||
@@ -291,11 +291,11 @@ download_release() {
|
|||||||
# Clear the original directory before updating
|
# Clear the original directory before updating
|
||||||
clear_original_directory() {
|
clear_original_directory() {
|
||||||
log "Clearing original directory..."
|
log "Clearing original directory..."
|
||||||
|
|
||||||
# Remove old lock files and node_modules before update
|
# Remove old lock files and node_modules before update
|
||||||
rm -f package-lock.json 2>/dev/null
|
rm -f package-lock.json 2>/dev/null
|
||||||
rm -rf node_modules 2>/dev/null
|
rm -rf node_modules 2>/dev/null
|
||||||
|
|
||||||
# List of files/directories to preserve (already backed up)
|
# List of files/directories to preserve (already backed up)
|
||||||
local preserve_patterns=(
|
local preserve_patterns=(
|
||||||
"data"
|
"data"
|
||||||
@@ -308,48 +308,48 @@ clear_original_directory() {
|
|||||||
".git"
|
".git"
|
||||||
"scripts"
|
"scripts"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Remove all files except preserved ones
|
# Remove all files except preserved ones
|
||||||
while IFS= read -r file; do
|
while IFS= read -r file; do
|
||||||
local should_preserve=false
|
local should_preserve=false
|
||||||
local filename=$(basename "$file")
|
local filename=$(basename "$file")
|
||||||
|
|
||||||
for pattern in "${preserve_patterns[@]}"; do
|
for pattern in "${preserve_patterns[@]}"; do
|
||||||
if [[ "$filename" == $pattern ]]; then
|
if [[ "$filename" == $pattern ]]; then
|
||||||
should_preserve=true
|
should_preserve=true
|
||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
if [ "$should_preserve" = false ]; then
|
if [ "$should_preserve" = false ]; then
|
||||||
rm -f "$file"
|
rm -f "$file"
|
||||||
fi
|
fi
|
||||||
done < <(find . -maxdepth 1 -type f ! -name ".*")
|
done < <(find . -maxdepth 1 -type f ! -name ".*")
|
||||||
|
|
||||||
# Remove all directories except preserved ones
|
# Remove all directories except preserved ones
|
||||||
while IFS= read -r dir; do
|
while IFS= read -r dir; do
|
||||||
local should_preserve=false
|
local should_preserve=false
|
||||||
local dirname=$(basename "$dir")
|
local dirname=$(basename "$dir")
|
||||||
|
|
||||||
for pattern in "${preserve_patterns[@]}"; do
|
for pattern in "${preserve_patterns[@]}"; do
|
||||||
if [[ "$dirname" == $pattern ]]; then
|
if [[ "$dirname" == $pattern ]]; then
|
||||||
should_preserve=true
|
should_preserve=true
|
||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
if [ "$should_preserve" = false ]; then
|
if [ "$should_preserve" = false ]; then
|
||||||
rm -rf "$dir"
|
rm -rf "$dir"
|
||||||
fi
|
fi
|
||||||
done < <(find . -maxdepth 1 -type d ! -name "." ! -name "..")
|
done < <(find . -maxdepth 1 -type d ! -name "." ! -name "..")
|
||||||
|
|
||||||
log_success "Original directory cleared"
|
log_success "Original directory cleared"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Restore backup files before building
|
# Restore backup files before building
|
||||||
restore_backup_files() {
|
restore_backup_files() {
|
||||||
log "Restoring .env, data directory, and scripts directories from backup..."
|
log "Restoring .env, data directory, and scripts directories from backup..."
|
||||||
|
|
||||||
if [ -d "$BACKUP_DIR" ]; then
|
if [ -d "$BACKUP_DIR" ]; then
|
||||||
# Restore .env file
|
# Restore .env file
|
||||||
if [ -f "$BACKUP_DIR/.env" ]; then
|
if [ -f "$BACKUP_DIR/.env" ]; then
|
||||||
@@ -365,7 +365,7 @@ restore_backup_files() {
|
|||||||
else
|
else
|
||||||
log_warning "No .env file backup found"
|
log_warning "No .env file backup found"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Restore data directory
|
# Restore data directory
|
||||||
if [ -d "$BACKUP_DIR/data" ]; then
|
if [ -d "$BACKUP_DIR/data" ]; then
|
||||||
if [ -d "data" ]; then
|
if [ -d "data" ]; then
|
||||||
@@ -380,24 +380,24 @@ restore_backup_files() {
|
|||||||
else
|
else
|
||||||
log_warning "No data directory backup found"
|
log_warning "No data directory backup found"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Restore scripts directories
|
# Restore scripts directories
|
||||||
local scripts_dirs=("ct" "install" "tools" "vm")
|
local scripts_dirs=("ct" "install" "tools" "vm")
|
||||||
for backup_name in "${scripts_dirs[@]}"; do
|
for backup_name in "${scripts_dirs[@]}"; do
|
||||||
if [ -d "$BACKUP_DIR/$backup_name" ]; then
|
if [ -d "$BACKUP_DIR/$backup_name" ]; then
|
||||||
local target_dir="scripts/$backup_name"
|
local target_dir="scripts/$backup_name"
|
||||||
log "Restoring $target_dir directory from backup..."
|
log "Restoring $target_dir directory from backup..."
|
||||||
|
|
||||||
# Ensure scripts directory exists
|
# Ensure scripts directory exists
|
||||||
if [ ! -d "scripts" ]; then
|
if [ ! -d "scripts" ]; then
|
||||||
mkdir -p "scripts"
|
mkdir -p "scripts"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Remove existing directory if it exists
|
# Remove existing directory if it exists
|
||||||
if [ -d "$target_dir" ]; then
|
if [ -d "$target_dir" ]; then
|
||||||
rm -rf "$target_dir"
|
rm -rf "$target_dir"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if cp -r "$BACKUP_DIR/$backup_name" "$target_dir"; then
|
if cp -r "$BACKUP_DIR/$backup_name" "$target_dir"; then
|
||||||
log_success "$target_dir directory restored from backup"
|
log_success "$target_dir directory restored from backup"
|
||||||
else
|
else
|
||||||
@@ -417,7 +417,13 @@ restore_backup_files() {
|
|||||||
# Verify database was restored correctly
|
# Verify database was restored correctly
|
||||||
verify_database_restored() {
|
verify_database_restored() {
|
||||||
log "Verifying database was restored correctly..."
|
log "Verifying database was restored correctly..."
|
||||||
|
|
||||||
|
# Ensure data directory exists (will be auto-created by app if needed)
|
||||||
|
if [ ! -d "data" ]; then
|
||||||
|
log "Creating data directory..."
|
||||||
|
mkdir -p data
|
||||||
|
fi
|
||||||
|
|
||||||
# Check for both possible database filenames
|
# Check for both possible database filenames
|
||||||
local db_file=""
|
local db_file=""
|
||||||
if [ -f "data/database.sqlite" ]; then
|
if [ -f "data/database.sqlite" ]; then
|
||||||
@@ -425,23 +431,25 @@ verify_database_restored() {
|
|||||||
elif [ -f "data/settings.db" ]; then
|
elif [ -f "data/settings.db" ]; then
|
||||||
db_file="data/settings.db"
|
db_file="data/settings.db"
|
||||||
else
|
else
|
||||||
log_error "Database file not found after restore! (checked database.sqlite and settings.db)"
|
# Database doesn't exist yet - this is OK for new installations
|
||||||
return 1
|
# The app will create it automatically via Prisma migrations
|
||||||
|
log_warning "No existing database file found - will be created automatically on first start"
|
||||||
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
local db_size=$(stat -f%z "$db_file" 2>/dev/null || stat -c%s "$db_file" 2>/dev/null)
|
local db_size=$(stat -f%z "$db_file" 2>/dev/null || stat -c%s "$db_file" 2>/dev/null)
|
||||||
if [ "$db_size" -eq 0 ]; then
|
if [ "$db_size" -eq 0 ]; then
|
||||||
log_warning "Database file is empty - will be recreated by Prisma migrations"
|
log_warning "Database file is empty - will be recreated by Prisma migrations"
|
||||||
return 0 # Don't fail the update, let Prisma recreate the database
|
return 0 # Don't fail the update, let Prisma recreate the database
|
||||||
fi
|
fi
|
||||||
|
|
||||||
log_success "Database verified (file: $db_file, size: $db_size bytes)"
|
log_success "Database verified (file: $db_file, size: $db_size bytes)"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Ensure DATABASE_URL is set in .env file for Prisma
|
# Ensure DATABASE_URL is set in .env file for Prisma
|
||||||
ensure_database_url() {
|
ensure_database_url() {
|
||||||
log "Ensuring DATABASE_URL is set in .env file..."
|
log "Ensuring DATABASE_URL is set in .env file..."
|
||||||
|
|
||||||
# Check if .env file exists
|
# Check if .env file exists
|
||||||
if [ ! -f ".env" ]; then
|
if [ ! -f ".env" ]; then
|
||||||
log_warning ".env file not found, creating from .env.example..."
|
log_warning ".env file not found, creating from .env.example..."
|
||||||
@@ -452,19 +460,19 @@ ensure_database_url() {
|
|||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check if DATABASE_URL is already set
|
# Check if DATABASE_URL is already set
|
||||||
if grep -q "^DATABASE_URL=" .env; then
|
if grep -q "^DATABASE_URL=" .env; then
|
||||||
log "DATABASE_URL already exists in .env file"
|
log "DATABASE_URL already exists in .env file"
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Add DATABASE_URL to .env file
|
# Add DATABASE_URL to .env file
|
||||||
log "Adding DATABASE_URL to .env file..."
|
log "Adding DATABASE_URL to .env file..."
|
||||||
echo "" >> .env
|
echo "" >>.env
|
||||||
echo "# Database" >> .env
|
echo "# Database" >>.env
|
||||||
echo "DATABASE_URL=\"file:./data/settings.db\"" >> .env
|
echo "DATABASE_URL=\"file:./data/settings.db\"" >>.env
|
||||||
|
|
||||||
log_success "DATABASE_URL added to .env file"
|
log_success "DATABASE_URL added to .env file"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -481,11 +489,9 @@ check_service() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# Stop the application before updating
|
# Stop the application before updating
|
||||||
stop_application() {
|
stop_application() {
|
||||||
|
|
||||||
|
|
||||||
# Change to the application directory if we're not already there
|
# Change to the application directory if we're not already there
|
||||||
local app_dir
|
local app_dir
|
||||||
if [ -f "package.json" ] && [ -f "server.js" ]; then
|
if [ -f "package.json" ] && [ -f "server.js" ]; then
|
||||||
@@ -503,9 +509,9 @@ stop_application() {
|
|||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
log "Working from application directory: $(pwd)"
|
log "Working from application directory: $(pwd)"
|
||||||
|
|
||||||
# Check if systemd service is running and disable it temporarily
|
# Check if systemd service is running and disable it temporarily
|
||||||
if check_service && systemctl is-active --quiet pvescriptslocal.service; then
|
if check_service && systemctl is-active --quiet pvescriptslocal.service; then
|
||||||
log "Disabling systemd service temporarily to prevent auto-restart..."
|
log "Disabling systemd service temporarily to prevent auto-restart..."
|
||||||
@@ -518,7 +524,7 @@ stop_application() {
|
|||||||
else
|
else
|
||||||
log "No running systemd service found"
|
log "No running systemd service found"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Kill any remaining npm/node processes
|
# Kill any remaining npm/node processes
|
||||||
log "Killing any remaining npm/node processes..."
|
log "Killing any remaining npm/node processes..."
|
||||||
local pids
|
local pids
|
||||||
@@ -537,9 +543,9 @@ stop_application() {
|
|||||||
# Update application files
|
# Update application files
|
||||||
update_files() {
|
update_files() {
|
||||||
local source_dir="$1"
|
local source_dir="$1"
|
||||||
|
|
||||||
log "Updating application files..."
|
log "Updating application files..."
|
||||||
|
|
||||||
# List of files/directories to exclude from update
|
# List of files/directories to exclude from update
|
||||||
local exclude_patterns=(
|
local exclude_patterns=(
|
||||||
"data"
|
"data"
|
||||||
@@ -555,48 +561,48 @@ update_files() {
|
|||||||
"scripts/tools"
|
"scripts/tools"
|
||||||
"scripts/vm"
|
"scripts/vm"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Find the actual source directory (strip the top-level directory)
|
# Find the actual source directory (strip the top-level directory)
|
||||||
local actual_source_dir
|
local actual_source_dir
|
||||||
actual_source_dir=$(find "$source_dir" -maxdepth 1 -type d -name "community-scripts-ProxmoxVE-Local-*" | head -1)
|
actual_source_dir=$(find "$source_dir" -maxdepth 1 -type d -name "community-scripts-ProxmoxVE-Local-*" | head -1)
|
||||||
|
|
||||||
if [ -z "$actual_source_dir" ]; then
|
if [ -z "$actual_source_dir" ]; then
|
||||||
log_error "Could not find the actual source directory in $source_dir"
|
log_error "Could not find the actual source directory in $source_dir"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Verify critical files exist in source
|
# Verify critical files exist in source
|
||||||
if [ ! -f "$actual_source_dir/package.json" ]; then
|
if [ ! -f "$actual_source_dir/package.json" ]; then
|
||||||
log_error "package.json not found in source directory!"
|
log_error "package.json not found in source directory!"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Use process substitution instead of pipe to avoid subshell issues
|
# Use process substitution instead of pipe to avoid subshell issues
|
||||||
local files_copied=0
|
local files_copied=0
|
||||||
local files_excluded=0
|
local files_excluded=0
|
||||||
|
|
||||||
# Create a temporary file list to avoid process substitution issues
|
# Create a temporary file list to avoid process substitution issues
|
||||||
local file_list="/tmp/file_list_$$.txt"
|
local file_list="/tmp/file_list_$$.txt"
|
||||||
find "$actual_source_dir" -type f > "$file_list"
|
find "$actual_source_dir" -type f >"$file_list"
|
||||||
|
|
||||||
while IFS= read -r file; do
|
while IFS= read -r file; do
|
||||||
local rel_path="${file#$actual_source_dir/}"
|
local rel_path="${file#$actual_source_dir/}"
|
||||||
local should_exclude=false
|
local should_exclude=false
|
||||||
|
|
||||||
for pattern in "${exclude_patterns[@]}"; do
|
for pattern in "${exclude_patterns[@]}"; do
|
||||||
if [[ "$rel_path" == $pattern ]] || [[ "$rel_path" == $pattern/* ]]; then
|
if [[ "$rel_path" == $pattern ]] || [[ "$rel_path" == $pattern/* ]]; then
|
||||||
should_exclude=true
|
should_exclude=true
|
||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
if [ "$should_exclude" = false ]; then
|
if [ "$should_exclude" = false ]; then
|
||||||
local target_dir
|
local target_dir
|
||||||
target_dir=$(dirname "$rel_path")
|
target_dir=$(dirname "$rel_path")
|
||||||
if [ "$target_dir" != "." ]; then
|
if [ "$target_dir" != "." ]; then
|
||||||
mkdir -p "$target_dir"
|
mkdir -p "$target_dir"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! cp "$file" "$rel_path"; then
|
if ! cp "$file" "$rel_path"; then
|
||||||
log_error "Failed to copy $rel_path"
|
log_error "Failed to copy $rel_path"
|
||||||
rm -f "$file_list"
|
rm -f "$file_list"
|
||||||
@@ -606,48 +612,47 @@ update_files() {
|
|||||||
else
|
else
|
||||||
files_excluded=$((files_excluded + 1))
|
files_excluded=$((files_excluded + 1))
|
||||||
fi
|
fi
|
||||||
done < "$file_list"
|
done <"$file_list"
|
||||||
|
|
||||||
# Clean up temporary file
|
# Clean up temporary file
|
||||||
rm -f "$file_list"
|
rm -f "$file_list"
|
||||||
|
|
||||||
# Verify critical files were copied
|
# Verify critical files were copied
|
||||||
if [ ! -f "package.json" ]; then
|
if [ ! -f "package.json" ]; then
|
||||||
log_error "package.json was not copied to target directory!"
|
log_error "package.json was not copied to target directory!"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ ! -f "package-lock.json" ]; then
|
if [ ! -f "package-lock.json" ]; then
|
||||||
log_warning "package-lock.json was not copied!"
|
log_warning "package-lock.json was not copied!"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
log_success "Application files updated successfully ($files_copied files)"
|
log_success "Application files updated successfully ($files_copied files)"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# Install dependencies and build
|
# Install dependencies and build
|
||||||
install_and_build() {
|
install_and_build() {
|
||||||
log "Installing dependencies..."
|
log "Installing dependencies..."
|
||||||
|
|
||||||
# Verify package.json exists
|
# Verify package.json exists
|
||||||
if [ ! -f "package.json" ]; then
|
if [ ! -f "package.json" ]; then
|
||||||
log_error "package.json not found! Cannot install dependencies."
|
log_error "package.json not found! Cannot install dependencies."
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ ! -f "package-lock.json" ]; then
|
if [ ! -f "package-lock.json" ]; then
|
||||||
log_warning "No package-lock.json found, npm will generate one"
|
log_warning "No package-lock.json found, npm will generate one"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Create temporary file for npm output
|
# Create temporary file for npm output
|
||||||
local npm_log="/tmp/npm_install_$$.log"
|
local npm_log="/tmp/npm_install_$$.log"
|
||||||
|
|
||||||
# Ensure NODE_ENV is not set to production during install (we need devDependencies for build)
|
# Ensure NODE_ENV is not set to production during install (we need devDependencies for build)
|
||||||
local old_node_env="${NODE_ENV:-}"
|
local old_node_env="${NODE_ENV:-}"
|
||||||
export NODE_ENV=development
|
export NODE_ENV=development
|
||||||
|
|
||||||
# Run npm install to get ALL dependencies including devDependencies
|
# Run npm install to get ALL dependencies including devDependencies
|
||||||
if ! npm install --include=dev > "$npm_log" 2>&1; then
|
if ! npm install --include=dev >"$npm_log" 2>&1; then
|
||||||
log_error "Failed to install dependencies"
|
log_error "Failed to install dependencies"
|
||||||
log_error "npm install output (last 30 lines):"
|
log_error "npm install output (last 30 lines):"
|
||||||
tail -30 "$npm_log" | while read -r line; do
|
tail -30 "$npm_log" | while read -r line; do
|
||||||
@@ -656,20 +661,20 @@ install_and_build() {
|
|||||||
rm -f "$npm_log"
|
rm -f "$npm_log"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Restore NODE_ENV
|
# Restore NODE_ENV
|
||||||
if [ -n "$old_node_env" ]; then
|
if [ -n "$old_node_env" ]; then
|
||||||
export NODE_ENV="$old_node_env"
|
export NODE_ENV="$old_node_env"
|
||||||
else
|
else
|
||||||
unset NODE_ENV
|
unset NODE_ENV
|
||||||
fi
|
fi
|
||||||
|
|
||||||
log_success "Dependencies installed successfully"
|
log_success "Dependencies installed successfully"
|
||||||
rm -f "$npm_log"
|
rm -f "$npm_log"
|
||||||
|
|
||||||
# Generate Prisma client
|
# Generate Prisma client
|
||||||
log "Generating Prisma client..."
|
log "Generating Prisma client..."
|
||||||
if ! npx prisma generate > "$npm_log" 2>&1; then
|
if ! npx prisma generate >"$npm_log" 2>&1; then
|
||||||
log_error "Failed to generate Prisma client"
|
log_error "Failed to generate Prisma client"
|
||||||
log_error "Prisma generate output:"
|
log_error "Prisma generate output:"
|
||||||
cat "$npm_log" | while read -r line; do
|
cat "$npm_log" | while read -r line; do
|
||||||
@@ -679,7 +684,7 @@ install_and_build() {
|
|||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
log_success "Prisma client generated successfully"
|
log_success "Prisma client generated successfully"
|
||||||
|
|
||||||
# Check if Prisma migrations exist and are compatible
|
# Check if Prisma migrations exist and are compatible
|
||||||
if [ -d "prisma/migrations" ]; then
|
if [ -d "prisma/migrations" ]; then
|
||||||
log "Existing migration history detected"
|
log "Existing migration history detected"
|
||||||
@@ -688,10 +693,10 @@ install_and_build() {
|
|||||||
else
|
else
|
||||||
log_warning "No existing migration history found - this may be a fresh install"
|
log_warning "No existing migration history found - this may be a fresh install"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Run Prisma migrations
|
# Run Prisma migrations
|
||||||
log "Running Prisma migrations..."
|
log "Running Prisma migrations..."
|
||||||
if ! npx prisma migrate deploy > "$npm_log" 2>&1; then
|
if ! npx prisma migrate deploy >"$npm_log" 2>&1; then
|
||||||
log_warning "Prisma migrations failed or no migrations to run"
|
log_warning "Prisma migrations failed or no migrations to run"
|
||||||
log "Prisma migrate output:"
|
log "Prisma migrate output:"
|
||||||
cat "$npm_log" | while read -r line; do
|
cat "$npm_log" | while read -r line; do
|
||||||
@@ -701,15 +706,18 @@ install_and_build() {
|
|||||||
log_success "Prisma migrations completed successfully"
|
log_success "Prisma migrations completed successfully"
|
||||||
fi
|
fi
|
||||||
rm -f "$npm_log"
|
rm -f "$npm_log"
|
||||||
|
|
||||||
log "Building application..."
|
log "Building application..."
|
||||||
# Set NODE_ENV to production for build
|
# Set NODE_ENV to production for build
|
||||||
export NODE_ENV=production
|
export NODE_ENV=production
|
||||||
|
# Unset TURBOPACK to prevent "Multiple bundler flags" error with --webpack
|
||||||
|
unset TURBOPACK 2>/dev/null || true
|
||||||
|
export TURBOPACK=''
|
||||||
|
|
||||||
# Create temporary file for npm build output
|
# Create temporary file for npm build output
|
||||||
local build_log="/tmp/npm_build_$$.log"
|
local build_log="/tmp/npm_build_$$.log"
|
||||||
|
|
||||||
if ! npm run build > "$build_log" 2>&1; then
|
if ! TURBOPACK='' npm run build >"$build_log" 2>&1; then
|
||||||
log_error "Failed to build application"
|
log_error "Failed to build application"
|
||||||
log_error "npm run build output:"
|
log_error "npm run build output:"
|
||||||
cat "$build_log" | while read -r line; do
|
cat "$build_log" | while read -r line; do
|
||||||
@@ -718,18 +726,18 @@ install_and_build() {
|
|||||||
rm -f "$build_log"
|
rm -f "$build_log"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Log success and clean up
|
# Log success and clean up
|
||||||
log_success "Application built successfully"
|
log_success "Application built successfully"
|
||||||
rm -f "$build_log"
|
rm -f "$build_log"
|
||||||
|
|
||||||
log_success "Dependencies installed and application built successfully"
|
log_success "Dependencies installed and application built successfully"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Start the application after updating
|
# Start the application after updating
|
||||||
start_application() {
|
start_application() {
|
||||||
log "Starting application..."
|
log "Starting application..."
|
||||||
|
|
||||||
# Use the global variable to determine how to start
|
# Use the global variable to determine how to start
|
||||||
if [ "$SERVICE_WAS_RUNNING" = true ] && check_service; then
|
if [ "$SERVICE_WAS_RUNNING" = true ] && check_service; then
|
||||||
log "Service was running before update, re-enabling and starting systemd service..."
|
log "Service was running before update, re-enabling and starting systemd service..."
|
||||||
@@ -761,11 +769,11 @@ start_application() {
|
|||||||
# Start application with npm
|
# Start application with npm
|
||||||
start_with_npm() {
|
start_with_npm() {
|
||||||
log "Starting application with npm start..."
|
log "Starting application with npm start..."
|
||||||
|
|
||||||
# Start in background
|
# Start in background
|
||||||
nohup npm start > server.log 2>&1 &
|
nohup npm start >server.log 2>&1 &
|
||||||
local npm_pid=$!
|
local npm_pid=$!
|
||||||
|
|
||||||
# Wait a moment and check if it started
|
# Wait a moment and check if it started
|
||||||
sleep 3
|
sleep 3
|
||||||
if kill -0 $npm_pid 2>/dev/null; then
|
if kill -0 $npm_pid 2>/dev/null; then
|
||||||
@@ -776,13 +784,30 @@ start_with_npm() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Re-enable the systemd service on failure to prevent users from being locked out
|
||||||
|
re_enable_service_on_failure() {
|
||||||
|
if check_service; then
|
||||||
|
log "Re-enabling systemd service after failure..."
|
||||||
|
if systemctl enable pvescriptslocal.service 2>/dev/null; then
|
||||||
|
log_success "Service re-enabled"
|
||||||
|
if systemctl start pvescriptslocal.service 2>/dev/null; then
|
||||||
|
log_success "Service started"
|
||||||
|
else
|
||||||
|
log_warning "Failed to start service - manual intervention may be required"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
log_warning "Failed to re-enable service - manual intervention may be required"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
# Rollback function
|
# Rollback function
|
||||||
rollback() {
|
rollback() {
|
||||||
log_warning "Rolling back to previous version..."
|
log_warning "Rolling back to previous version..."
|
||||||
|
|
||||||
if [ -d "$BACKUP_DIR" ]; then
|
if [ -d "$BACKUP_DIR" ]; then
|
||||||
log "Restoring from backup directory: $BACKUP_DIR"
|
log "Restoring from backup directory: $BACKUP_DIR"
|
||||||
|
|
||||||
# Restore data directory
|
# Restore data directory
|
||||||
if [ -d "$BACKUP_DIR/data" ]; then
|
if [ -d "$BACKUP_DIR/data" ]; then
|
||||||
log "Restoring data directory..."
|
log "Restoring data directory..."
|
||||||
@@ -797,7 +822,7 @@ rollback() {
|
|||||||
else
|
else
|
||||||
log_warning "No data directory backup found"
|
log_warning "No data directory backup found"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Restore .env file
|
# Restore .env file
|
||||||
if [ -f "$BACKUP_DIR/.env" ]; then
|
if [ -f "$BACKUP_DIR/.env" ]; then
|
||||||
log "Restoring .env file..."
|
log "Restoring .env file..."
|
||||||
@@ -812,24 +837,24 @@ rollback() {
|
|||||||
else
|
else
|
||||||
log_warning "No .env file backup found"
|
log_warning "No .env file backup found"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Restore scripts directories
|
# Restore scripts directories
|
||||||
local scripts_dirs=("ct" "install" "tools" "vm")
|
local scripts_dirs=("ct" "install" "tools" "vm")
|
||||||
for backup_name in "${scripts_dirs[@]}"; do
|
for backup_name in "${scripts_dirs[@]}"; do
|
||||||
if [ -d "$BACKUP_DIR/$backup_name" ]; then
|
if [ -d "$BACKUP_DIR/$backup_name" ]; then
|
||||||
local target_dir="scripts/$backup_name"
|
local target_dir="scripts/$backup_name"
|
||||||
log "Restoring $target_dir directory from backup..."
|
log "Restoring $target_dir directory from backup..."
|
||||||
|
|
||||||
# Ensure scripts directory exists
|
# Ensure scripts directory exists
|
||||||
if [ ! -d "scripts" ]; then
|
if [ ! -d "scripts" ]; then
|
||||||
mkdir -p "scripts"
|
mkdir -p "scripts"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Remove existing directory if it exists
|
# Remove existing directory if it exists
|
||||||
if [ -d "$target_dir" ]; then
|
if [ -d "$target_dir" ]; then
|
||||||
rm -rf "$target_dir"
|
rm -rf "$target_dir"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if mv "$BACKUP_DIR/$backup_name" "$target_dir"; then
|
if mv "$BACKUP_DIR/$backup_name" "$target_dir"; then
|
||||||
log_success "$target_dir directory restored from backup"
|
log_success "$target_dir directory restored from backup"
|
||||||
else
|
else
|
||||||
@@ -839,14 +864,17 @@ rollback() {
|
|||||||
log_warning "No $backup_name directory backup found"
|
log_warning "No $backup_name directory backup found"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
# Clean up backup directory
|
# Clean up backup directory
|
||||||
log "Cleaning up backup directory..."
|
log "Cleaning up backup directory..."
|
||||||
rm -rf "$BACKUP_DIR"
|
rm -rf "$BACKUP_DIR"
|
||||||
else
|
else
|
||||||
log_error "No backup directory found for rollback"
|
log_error "No backup directory found for rollback"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Re-enable the service so users aren't locked out
|
||||||
|
re_enable_service_on_failure
|
||||||
|
|
||||||
log_error "Update failed. Please check the logs and try again."
|
log_error "Update failed. Please check the logs and try again."
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
@@ -865,14 +893,14 @@ check_node_version() {
|
|||||||
|
|
||||||
log "Detected Node.js version: $current"
|
log "Detected Node.js version: $current"
|
||||||
|
|
||||||
if (( major_version < 24 )); then
|
if ((major_version == 24)); then
|
||||||
|
log_success "Node.js 24 already installed"
|
||||||
|
elif ((major_version < 24)); then
|
||||||
log_warning "Node.js < 24 detected → upgrading to Node.js 24 LTS..."
|
log_warning "Node.js < 24 detected → upgrading to Node.js 24 LTS..."
|
||||||
upgrade_node_to_24
|
upgrade_node_to_24
|
||||||
elif (( major_version > 24 )); then
|
else
|
||||||
log_warning "Node.js > 24 detected → script tested only up to Node 24"
|
log_warning "Node.js > 24 detected → script tested only up to Node 24"
|
||||||
log "Continuing anyway…"
|
log "Continuing anyway…"
|
||||||
else
|
|
||||||
log_success "Node.js 24 already installed"
|
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -880,22 +908,39 @@ check_node_version() {
|
|||||||
upgrade_node_to_24() {
|
upgrade_node_to_24() {
|
||||||
log "Preparing Node.js 24 upgrade…"
|
log "Preparing Node.js 24 upgrade…"
|
||||||
|
|
||||||
# Remove old nodesource repo if it exists
|
# Remove old nodesource repo files if they exist
|
||||||
if [ -f /etc/apt/sources.list.d/nodesource.list ]; then
|
if [ -f /etc/apt/sources.list.d/nodesource.list ]; then
|
||||||
|
log "Removing old nodesource.list file..."
|
||||||
rm -f /etc/apt/sources.list.d/nodesource.list
|
rm -f /etc/apt/sources.list.d/nodesource.list
|
||||||
fi
|
fi
|
||||||
|
if [ -f /etc/apt/sources.list.d/nodesource.sources ]; then
|
||||||
|
log "Removing old nodesource.sources file..."
|
||||||
|
rm -f /etc/apt/sources.list.d/nodesource.sources
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Update apt cache first
|
||||||
|
log "Updating apt cache..."
|
||||||
|
apt-get update >>"$LOG_FILE" 2>&1 || true
|
||||||
|
|
||||||
# Install NodeSource repo for Node.js 24
|
# Install NodeSource repo for Node.js 24
|
||||||
curl -fsSL https://deb.nodesource.com/setup_24.x -o /tmp/node24_setup.sh
|
log "Downloading Node.js 24 setup script..."
|
||||||
if ! bash /tmp/node24_setup.sh > /tmp/node24_setup.log 2>&1; then
|
if ! curl -fsSL https://deb.nodesource.com/setup_24.x -o /tmp/node24_setup.sh; then
|
||||||
|
log_error "Failed to download Node.js 24 setup script"
|
||||||
|
re_enable_service_on_failure
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! bash /tmp/node24_setup.sh >/tmp/node24_setup.log 2>&1; then
|
||||||
log_error "Failed to configure Node.js 24 repository"
|
log_error "Failed to configure Node.js 24 repository"
|
||||||
tail -20 /tmp/node24_setup.log | while read -r line; do log_error "$line"; done
|
tail -20 /tmp/node24_setup.log | while read -r line; do log_error "$line"; done
|
||||||
|
re_enable_service_on_failure
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
log "Installing Node.js 24…"
|
log "Installing Node.js 24…"
|
||||||
if ! apt-get install -y nodejs >> "$LOG_FILE" 2>&1; then
|
if ! apt-get install -y nodejs >>"$LOG_FILE" 2>&1; then
|
||||||
log_error "Failed to install Node.js 24"
|
log_error "Failed to install Node.js 24"
|
||||||
|
re_enable_service_on_failure
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -912,21 +957,21 @@ main() {
|
|||||||
init_log
|
init_log
|
||||||
log "Running as detached process"
|
log "Running as detached process"
|
||||||
sleep 3
|
sleep 3
|
||||||
|
|
||||||
else
|
else
|
||||||
init_log
|
init_log
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check if we're running from the application directory and not already relocated
|
# Check if we're running from the application directory and not already relocated
|
||||||
if [ -z "${PVE_UPDATE_RELOCATED:-}" ] && [ -f "package.json" ] && [ -f "server.js" ]; then
|
if [ -z "${PVE_UPDATE_RELOCATED:-}" ] && [ -f "package.json" ] && [ -f "server.js" ]; then
|
||||||
log "Detected running from application directory"
|
log "Detected running from application directory"
|
||||||
bash "$0" --relocated
|
bash "$0" --relocated
|
||||||
exit $?
|
exit $?
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Ensure we're in the application directory
|
# Ensure we're in the application directory
|
||||||
local app_dir
|
local app_dir
|
||||||
|
|
||||||
# First check if we're already in the right directory
|
# First check if we're already in the right directory
|
||||||
if [ -f "package.json" ] && [ -f "server.js" ]; then
|
if [ -f "package.json" ] && [ -f "server.js" ]; then
|
||||||
app_dir="$(pwd)"
|
app_dir="$(pwd)"
|
||||||
@@ -943,79 +988,76 @@ main() {
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check dependencies
|
# Check dependencies
|
||||||
check_dependencies
|
check_dependencies
|
||||||
|
|
||||||
# Load GitHub token for higher rate limits
|
# Load GitHub token for higher rate limits
|
||||||
load_github_token
|
load_github_token
|
||||||
|
|
||||||
# Check if service was running before update
|
# Check if service was running before update
|
||||||
if check_service && systemctl is-active --quiet pvescriptslocal.service; then
|
if check_service && systemctl is-active --quiet pvescriptslocal.service; then
|
||||||
SERVICE_WAS_RUNNING=true
|
SERVICE_WAS_RUNNING=true
|
||||||
else
|
else
|
||||||
SERVICE_WAS_RUNNING=false
|
SERVICE_WAS_RUNNING=false
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Get latest release info
|
# Get latest release info
|
||||||
local release_info
|
local release_info
|
||||||
release_info=$(get_latest_release)
|
release_info=$(get_latest_release)
|
||||||
|
|
||||||
# Backup data directory
|
# Backup data directory
|
||||||
backup_data
|
backup_data
|
||||||
|
|
||||||
# Stop the application before updating
|
# Stop the application before updating
|
||||||
stop_application
|
stop_application
|
||||||
|
|
||||||
# Check Node.js version
|
# Check Node.js version
|
||||||
check_node_version
|
check_node_version
|
||||||
|
|
||||||
#Update Node.js to 24
|
|
||||||
upgrade_node_to_24
|
|
||||||
|
|
||||||
# Download and extract release
|
# Download and extract release
|
||||||
local source_dir
|
local source_dir
|
||||||
source_dir=$(download_release "$release_info")
|
source_dir=$(download_release "$release_info")
|
||||||
|
|
||||||
# Clear the original directory before updating
|
# Clear the original directory before updating
|
||||||
clear_original_directory
|
clear_original_directory
|
||||||
|
|
||||||
# Update files
|
# Update files
|
||||||
if ! update_files "$source_dir"; then
|
if ! update_files "$source_dir"; then
|
||||||
log_error "File update failed, rolling back..."
|
log_error "File update failed, rolling back..."
|
||||||
rollback
|
rollback
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Restore .env and data directory before building
|
# Restore .env and data directory before building
|
||||||
restore_backup_files
|
restore_backup_files
|
||||||
|
|
||||||
# Verify database was restored correctly
|
# Verify database was restored correctly
|
||||||
if ! verify_database_restored; then
|
if ! verify_database_restored; then
|
||||||
log_error "Database verification failed, rolling back..."
|
log_error "Database verification failed, rolling back..."
|
||||||
rollback
|
rollback
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Ensure DATABASE_URL is set for Prisma
|
# Ensure DATABASE_URL is set for Prisma
|
||||||
ensure_database_url
|
ensure_database_url
|
||||||
|
|
||||||
# Install dependencies and build
|
# Install dependencies and build
|
||||||
if ! install_and_build; then
|
if ! install_and_build; then
|
||||||
log_error "Install and build failed, rolling back..."
|
log_error "Install and build failed, rolling back..."
|
||||||
rollback
|
rollback
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Start the application
|
# Start the application
|
||||||
if ! start_application; then
|
if ! start_application; then
|
||||||
log_error "Failed to start application after update"
|
log_error "Failed to start application after update"
|
||||||
rollback
|
rollback
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Cleanup only after successful start
|
# Cleanup only after successful start
|
||||||
rm -rf "$source_dir"
|
rm -rf "$source_dir"
|
||||||
rm -rf "/tmp/pve-update-$$"
|
rm -rf "/tmp/pve-update-$$"
|
||||||
rm -rf "$BACKUP_DIR"
|
rm -rf "$BACKUP_DIR"
|
||||||
log "Backup directory cleaned up"
|
log "Backup directory cleaned up"
|
||||||
|
|
||||||
log_success "Update completed successfully!"
|
log_success "Update completed successfully!"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1023,4 +1065,4 @@ main() {
|
|||||||
if ! main "$@"; then
|
if ! main "$@"; then
|
||||||
log_error "Update script failed with exit code $?"
|
log_error "Update script failed with exit code $?"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|||||||
Reference in New Issue
Block a user