Compare commits
1 Commits
main
...
update-ver
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f6a8395c22 |
@@ -18,12 +18,7 @@ ALLOWED_SCRIPT_PATHS="scripts/"
|
|||||||
WEBSOCKET_PORT="3001"
|
WEBSOCKET_PORT="3001"
|
||||||
|
|
||||||
# User settings
|
# User settings
|
||||||
# Optional tokens for private repos: GITHUB_TOKEN (GitHub), GITLAB_TOKEN (GitLab),
|
|
||||||
# BITBUCKET_APP_PASSWORD or BITBUCKET_TOKEN (Bitbucket). REPO_URL and added repos
|
|
||||||
# can be GitHub, GitLab, Bitbucket, or custom Git servers.
|
|
||||||
GITHUB_TOKEN=
|
GITHUB_TOKEN=
|
||||||
GITLAB_TOKEN=
|
|
||||||
BITBUCKET_APP_PASSWORD=
|
|
||||||
SAVE_FILTER=false
|
SAVE_FILTER=false
|
||||||
FILTERS=
|
FILTERS=
|
||||||
AUTH_USERNAME=
|
AUTH_USERNAME=
|
||||||
|
|||||||
18
.github/workflows/publish_release.yml
vendored
18
.github/workflows/publish_release.yml
vendored
@@ -31,24 +31,20 @@ jobs:
|
|||||||
echo "Found draft version: ${{ steps.draft.outputs.tag_name }}"
|
echo "Found draft version: ${{ steps.draft.outputs.tag_name }}"
|
||||||
|
|
||||||
|
|
||||||
- name: Create branch and commit VERSION and package.json
|
- name: Create branch and commit VERSION
|
||||||
run: |
|
run: |
|
||||||
branch="update-version-${{ steps.draft.outputs.tag_name }}"
|
branch="update-version-${{ steps.draft.outputs.tag_name }}"
|
||||||
# Delete remote branch if exists
|
# Delete remote branch if exists
|
||||||
git push origin --delete "$branch" || echo "No remote branch to delete"
|
git push origin --delete "$branch" || echo "No remote branch to delete"
|
||||||
git fetch origin main
|
git fetch origin main
|
||||||
git checkout -b "$branch" origin/main
|
git checkout -b "$branch" origin/main
|
||||||
# Version without 'v' prefix (e.g. v1.2.3 -> 1.2.3)
|
# Write VERSION file and timestamp to ensure a diff
|
||||||
version="${{ steps.draft.outputs.tag_name }}"
|
version="${{ steps.draft.outputs.tag_name }}"
|
||||||
version_plain=$(echo "$version" | sed 's/^v//')
|
echo "$version" | sed 's/^v//' > VERSION
|
||||||
# Write VERSION file
|
git add VERSION
|
||||||
echo "$version_plain" > VERSION
|
|
||||||
# Update package.json version
|
|
||||||
jq --arg v "$version_plain" '.version = $v' package.json > package.json.tmp && mv package.json.tmp package.json
|
|
||||||
git add VERSION package.json
|
|
||||||
git config user.name "github-actions[bot]"
|
git config user.name "github-actions[bot]"
|
||||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
git commit -m "chore: bump version to $version_plain (VERSION + package.json)" --allow-empty
|
git commit -m "chore: add VERSION $version" --allow-empty
|
||||||
|
|
||||||
- name: Push changes
|
- name: Push changes
|
||||||
run: |
|
run: |
|
||||||
@@ -61,8 +57,8 @@ jobs:
|
|||||||
pr_url=$(gh pr create \
|
pr_url=$(gh pr create \
|
||||||
--base main \
|
--base main \
|
||||||
--head update-version-${{ steps.draft.outputs.tag_name }} \
|
--head update-version-${{ steps.draft.outputs.tag_name }} \
|
||||||
--title "chore: bump version to ${{ steps.draft.outputs.tag_name }} (VERSION + package.json)" \
|
--title "chore: add VERSION ${{ steps.draft.outputs.tag_name }}" \
|
||||||
--body "Updates VERSION file and package.json version for release ${{ steps.draft.outputs.tag_name }}" \
|
--body "Adds VERSION file for release ${{ steps.draft.outputs.tag_name }}" \
|
||||||
--label automated)
|
--label automated)
|
||||||
|
|
||||||
pr_number=$(echo "$pr_url" | awk -F/ '{print $NF}')
|
pr_number=$(echo "$pr_url" | awk -F/ '{print $NF}')
|
||||||
|
|||||||
732
package-lock.json
generated
732
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
32
package.json
32
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "pve-scripts-local",
|
"name": "pve-scripts-local",
|
||||||
"version": "0.5.6",
|
"version": "0.1.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -25,13 +25,13 @@
|
|||||||
"typecheck": "tsc --noEmit"
|
"typecheck": "tsc --noEmit"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@prisma/adapter-better-sqlite3": "^7.3.0",
|
"@prisma/adapter-better-sqlite3": "^7.2.0",
|
||||||
"@prisma/client": "^7.3.0",
|
"@prisma/client": "^7.2.0",
|
||||||
"@radix-ui/react-dropdown-menu": "^2.1.16",
|
"@radix-ui/react-dropdown-menu": "^2.1.16",
|
||||||
"@radix-ui/react-slot": "^1.2.4",
|
"@radix-ui/react-slot": "^1.2.4",
|
||||||
"@t3-oss/env-nextjs": "^0.13.10",
|
"@t3-oss/env-nextjs": "^0.13.10",
|
||||||
"@tailwindcss/typography": "^0.5.19",
|
"@tailwindcss/typography": "^0.5.19",
|
||||||
"@tanstack/react-query": "^5.90.20",
|
"@tanstack/react-query": "^5.90.16",
|
||||||
"@trpc/client": "^11.8.1",
|
"@trpc/client": "^11.8.1",
|
||||||
"@trpc/react-query": "^11.8.1",
|
"@trpc/react-query": "^11.8.1",
|
||||||
"@trpc/server": "^11.8.1",
|
"@trpc/server": "^11.8.1",
|
||||||
@@ -42,14 +42,14 @@
|
|||||||
"@xterm/xterm": "^6.0.0",
|
"@xterm/xterm": "^6.0.0",
|
||||||
"axios": "^1.13.2",
|
"axios": "^1.13.2",
|
||||||
"bcryptjs": "^3.0.3",
|
"bcryptjs": "^3.0.3",
|
||||||
"better-sqlite3": "^12.6.2",
|
"better-sqlite3": "^12.6.0",
|
||||||
"class-variance-authority": "^0.7.1",
|
"class-variance-authority": "^0.7.1",
|
||||||
"clsx": "^2.1.1",
|
"clsx": "^2.1.1",
|
||||||
"cron-validator": "^1.4.0",
|
"cron-validator": "^1.4.0",
|
||||||
"dotenv": "^17.2.3",
|
"dotenv": "^17.2.3",
|
||||||
"jsonwebtoken": "^9.0.3",
|
"jsonwebtoken": "^9.0.3",
|
||||||
"lucide-react": "^0.562.0",
|
"lucide-react": "^0.562.0",
|
||||||
"next": ">=16.1.5",
|
"next": "^16.1.1",
|
||||||
"node-cron": "^4.2.1",
|
"node-cron": "^4.2.1",
|
||||||
"node-pty": "^1.1.0",
|
"node-pty": "^1.1.0",
|
||||||
"react": "^19.2.3",
|
"react": "^19.2.3",
|
||||||
@@ -66,33 +66,32 @@
|
|||||||
"zod": "^4.3.5"
|
"zod": "^4.3.5"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"next": ">=16.1.5",
|
|
||||||
"@tailwindcss/postcss": "^4.1.18",
|
"@tailwindcss/postcss": "^4.1.18",
|
||||||
"@testing-library/jest-dom": "^6.9.1",
|
"@testing-library/jest-dom": "^6.9.1",
|
||||||
"@testing-library/react": "^16.3.2",
|
"@testing-library/react": "^16.3.1",
|
||||||
"@testing-library/user-event": "^14.6.1",
|
"@testing-library/user-event": "^14.6.1",
|
||||||
"@types/bcryptjs": "^3.0.0",
|
"@types/bcryptjs": "^3.0.0",
|
||||||
"@types/better-sqlite3": "^7.6.13",
|
"@types/better-sqlite3": "^7.6.13",
|
||||||
"@types/jsonwebtoken": "^9.0.10",
|
"@types/jsonwebtoken": "^9.0.10",
|
||||||
"@types/node": "^24.10.9",
|
"@types/node": "^24.10.4",
|
||||||
"@types/node-cron": "^3.0.11",
|
"@types/node-cron": "^3.0.11",
|
||||||
"@types/react": "^19.2.8",
|
"@types/react": "^19.2.8",
|
||||||
"@types/react-dom": "^19.2.3",
|
"@types/react-dom": "^19.2.3",
|
||||||
"@vitejs/plugin-react": "^5.1.2",
|
"@vitejs/plugin-react": "^5.1.2",
|
||||||
"@vitest/coverage-v8": "^4.0.17",
|
"@vitest/coverage-v8": "^4.0.17",
|
||||||
"@vitest/ui": "^4.0.17",
|
"@vitest/ui": "^4.0.17",
|
||||||
"baseline-browser-mapping": "^2.9.15",
|
"baseline-browser-mapping": "^2.9.14",
|
||||||
"eslint": "^9.39.2",
|
"eslint": "^9.39.2",
|
||||||
"eslint-config-next": "^16.1.3",
|
"eslint-config-next": "^16.1.1",
|
||||||
"jsdom": "^27.4.0",
|
"jsdom": "^27.4.0",
|
||||||
"postcss": "^8.5.6",
|
"postcss": "^8.5.6",
|
||||||
"prettier": "^3.8.0",
|
"prettier": "^3.7.4",
|
||||||
"prettier-plugin-tailwindcss": "^0.7.2",
|
"prettier-plugin-tailwindcss": "^0.7.2",
|
||||||
"prisma": "^7.3.0",
|
"prisma": "^7.2.0",
|
||||||
"tailwindcss": "^4.1.18",
|
"tailwindcss": "^4.1.18",
|
||||||
"tsx": "^4.21.0",
|
"tsx": "^4.21.0",
|
||||||
"typescript": "^5.9.3",
|
"typescript": "^5.9.3",
|
||||||
"typescript-eslint": "^8.54.0",
|
"typescript-eslint": "^8.53.0",
|
||||||
"vitest": "^4.0.17"
|
"vitest": "^4.0.17"
|
||||||
},
|
},
|
||||||
"ct3aMetadata": {
|
"ct3aMetadata": {
|
||||||
@@ -103,7 +102,6 @@
|
|||||||
"node": ">=24.0.0"
|
"node": ">=24.0.0"
|
||||||
},
|
},
|
||||||
"overrides": {
|
"overrides": {
|
||||||
"prismjs": "^1.30.0",
|
"prismjs": "^1.30.0"
|
||||||
"hono": ">=4.11.7"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -11,9 +11,6 @@ source "$(dirname "${BASH_SOURCE[0]}")/error-handler.func"
|
|||||||
load_functions
|
load_functions
|
||||||
catch_errors
|
catch_errors
|
||||||
|
|
||||||
# Get LXC IP address (must be called INSIDE container, after network is up)
|
|
||||||
get_lxc_ip
|
|
||||||
|
|
||||||
# This function enables IPv6 if it's not disabled and sets verbose mode
|
# This function enables IPv6 if it's not disabled and sets verbose mode
|
||||||
verb_ip6() {
|
verb_ip6() {
|
||||||
set_std_mode # Set STD mode based on VERBOSE
|
set_std_mode # Set STD mode based on VERBOSE
|
||||||
@@ -128,13 +125,22 @@ update_os() {
|
|||||||
# This function modifies the message of the day (motd) and SSH settings
|
# This function modifies the message of the day (motd) and SSH settings
|
||||||
motd_ssh() {
|
motd_ssh() {
|
||||||
echo "export TERM='xterm-256color'" >>/root/.bashrc
|
echo "export TERM='xterm-256color'" >>/root/.bashrc
|
||||||
|
IP=$(ip -4 addr show eth0 | awk '/inet / {print $2}' | cut -d/ -f1 | head -n 1)
|
||||||
|
|
||||||
|
if [ -f "/etc/os-release" ]; then
|
||||||
|
OS_NAME=$(grep ^NAME /etc/os-release | cut -d= -f2 | tr -d '"')
|
||||||
|
OS_VERSION=$(grep ^VERSION_ID /etc/os-release | cut -d= -f2 | tr -d '"')
|
||||||
|
else
|
||||||
|
OS_NAME="Alpine Linux"
|
||||||
|
OS_VERSION="Unknown"
|
||||||
|
fi
|
||||||
|
|
||||||
PROFILE_FILE="/etc/profile.d/00_lxc-details.sh"
|
PROFILE_FILE="/etc/profile.d/00_lxc-details.sh"
|
||||||
echo "echo -e \"\"" >"$PROFILE_FILE"
|
echo "echo -e \"\"" >"$PROFILE_FILE"
|
||||||
echo -e "echo -e \"${BOLD}${APPLICATION} LXC Container${CL}"\" >>"$PROFILE_FILE"
|
echo -e "echo -e \"${BOLD}${APPLICATION} LXC Container${CL}"\" >>"$PROFILE_FILE"
|
||||||
echo -e "echo -e \"${TAB}${GATEWAY}${YW} Provided by: ${GN}community-scripts ORG ${YW}| GitHub: ${GN}https://github.com/community-scripts/ProxmoxVE${CL}\"" >>"$PROFILE_FILE"
|
echo -e "echo -e \"${TAB}${GATEWAY}${YW} Provided by: ${GN}community-scripts ORG ${YW}| GitHub: ${GN}https://github.com/community-scripts/ProxmoxVE${CL}\"" >>"$PROFILE_FILE"
|
||||||
echo "echo \"\"" >>"$PROFILE_FILE"
|
echo "echo \"\"" >>"$PROFILE_FILE"
|
||||||
echo -e "echo -e \"${TAB}${OS}${YW} OS: ${GN}\$(grep ^NAME /etc/os-release | cut -d= -f2 | tr -d '\"') - Version: \$(grep ^VERSION_ID /etc/os-release | cut -d= -f2 | tr -d '\"')${CL}\"" >>"$PROFILE_FILE"
|
echo -e "echo -e \"${TAB}${OS}${YW} OS: ${GN}${OS_NAME} - Version: ${OS_VERSION}${CL}\"" >>"$PROFILE_FILE"
|
||||||
echo -e "echo -e \"${TAB}${HOSTNAME}${YW} Hostname: ${GN}\$(hostname)${CL}\"" >>"$PROFILE_FILE"
|
echo -e "echo -e \"${TAB}${HOSTNAME}${YW} Hostname: ${GN}\$(hostname)${CL}\"" >>"$PROFILE_FILE"
|
||||||
echo -e "echo -e \"${TAB}${INFO}${YW} IP Address: ${GN}\$(ip -4 addr show eth0 | awk '/inet / {print \$2}' | cut -d/ -f1 | head -n 1)${CL}\"" >>"$PROFILE_FILE"
|
echo -e "echo -e \"${TAB}${INFO}${YW} IP Address: ${GN}\$(ip -4 addr show eth0 | awk '/inet / {print \$2}' | cut -d/ -f1 | head -n 1)${CL}\"" >>"$PROFILE_FILE"
|
||||||
|
|
||||||
|
|||||||
@@ -1,188 +1,507 @@
|
|||||||
#!/bin/ash
|
#!/bin/ash
|
||||||
# shellcheck shell=ash
|
# shellcheck shell=ash
|
||||||
# Copyright (c) 2021-2026 community-scripts ORG
|
|
||||||
# Author: MickLesk
|
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
|
||||||
|
|
||||||
if ! command -v curl >/dev/null 2>&1; then
|
# Expects existing msg_* functions and optional $STD from the framework.
|
||||||
apk update && apk add curl >/dev/null 2>&1
|
|
||||||
fi
|
|
||||||
source "$(dirname "${BASH_SOURCE[0]}")/core.func"
|
|
||||||
source "$(dirname "${BASH_SOURCE[0]}")/error-handler.func"
|
|
||||||
load_functions
|
|
||||||
catch_errors
|
|
||||||
|
|
||||||
# Get LXC IP address (must be called INSIDE container, after network is up)
|
# ------------------------------
|
||||||
get_lxc_ip
|
# helpers
|
||||||
|
# ------------------------------
|
||||||
|
lower() { printf '%s' "$1" | tr '[:upper:]' '[:lower:]'; }
|
||||||
|
has() { command -v "$1" >/dev/null 2>&1; }
|
||||||
|
|
||||||
# This function enables IPv6 if it's not disabled and sets verbose mode
|
need_tool() {
|
||||||
verb_ip6() {
|
# usage: need_tool curl jq unzip ...
|
||||||
set_std_mode # Set STD mode based on VERBOSE
|
# setup missing tools via apk
|
||||||
|
local missing=0 t
|
||||||
if [ "${IPV6_METHOD:-}" = "disable" ]; then
|
for t in "$@"; do
|
||||||
msg_info "Disabling IPv6 (this may affect some services)"
|
if ! has "$t"; then missing=1; fi
|
||||||
$STD sysctl -w net.ipv6.conf.all.disable_ipv6=1
|
done
|
||||||
$STD sysctl -w net.ipv6.conf.default.disable_ipv6=1
|
if [ "$missing" -eq 1 ]; then
|
||||||
$STD sysctl -w net.ipv6.conf.lo.disable_ipv6=1
|
msg_info "Installing tools: $*"
|
||||||
mkdir -p /etc/sysctl.d
|
apk add --no-cache "$@" >/dev/null 2>&1 || {
|
||||||
$STD tee /etc/sysctl.d/99-disable-ipv6.conf >/dev/null <<EOF
|
msg_error "apk add failed for: $*"
|
||||||
net.ipv6.conf.all.disable_ipv6 = 1
|
return 1
|
||||||
net.ipv6.conf.default.disable_ipv6 = 1
|
}
|
||||||
net.ipv6.conf.lo.disable_ipv6 = 1
|
msg_ok "Tools ready: $*"
|
||||||
EOF
|
|
||||||
$STD rc-update add sysctl default
|
|
||||||
msg_ok "Disabled IPv6"
|
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
set -Eeuo pipefail
|
net_resolves() {
|
||||||
trap 'error_handler $? $LINENO "$BASH_COMMAND"' ERR
|
# better handling for missing getent on Alpine
|
||||||
trap on_exit EXIT
|
# usage: net_resolves api.github.com
|
||||||
trap on_interrupt INT
|
local host="$1"
|
||||||
trap on_terminate TERM
|
ping -c1 -W1 "$host" >/dev/null 2>&1 || nslookup "$host" >/dev/null 2>&1
|
||||||
|
}
|
||||||
|
|
||||||
error_handler() {
|
ensure_usr_local_bin_persist() {
|
||||||
local exit_code="$1"
|
local PROFILE_FILE="/etc/profile.d/10-localbin.sh"
|
||||||
local line_number="$2"
|
if [ ! -f "$PROFILE_FILE" ]; then
|
||||||
local command="$3"
|
echo 'case ":$PATH:" in *:/usr/local/bin:*) ;; *) export PATH="/usr/local/bin:$PATH";; esac' >"$PROFILE_FILE"
|
||||||
|
chmod +x "$PROFILE_FILE"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
if [[ "$exit_code" -eq 0 ]]; then
|
download_with_progress() {
|
||||||
|
# $1 url, $2 dest
|
||||||
|
local url="$1" out="$2" cl
|
||||||
|
need_tool curl pv || return 1
|
||||||
|
cl=$(curl -fsSLI "$url" 2>/dev/null | awk 'tolower($0) ~ /^content-length:/ {print $2}' | tr -d '\r')
|
||||||
|
if [ -n "$cl" ]; then
|
||||||
|
curl -fsSL "$url" | pv -s "$cl" >"$out" || {
|
||||||
|
msg_error "Download failed: $url"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
else
|
||||||
|
curl -fL# -o "$out" "$url" || {
|
||||||
|
msg_error "Download failed: $url"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# ------------------------------
|
||||||
|
# GitHub: check Release
|
||||||
|
# ------------------------------
|
||||||
|
check_for_gh_release() {
|
||||||
|
# app, repo, [pinned]
|
||||||
|
local app="$1" source="$2" pinned="${3:-}"
|
||||||
|
local app_lc
|
||||||
|
app_lc="$(lower "$app" | tr -d ' ')"
|
||||||
|
local current_file="$HOME/.${app_lc}"
|
||||||
|
local current="" release tag
|
||||||
|
|
||||||
|
msg_info "Check for update: $app"
|
||||||
|
|
||||||
|
net_resolves api.github.com || {
|
||||||
|
msg_error "DNS/network error: api.github.com"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
need_tool curl jq || return 1
|
||||||
|
|
||||||
|
tag=$(curl -fsSL "https://api.github.com/repos/${source}/releases/latest" | jq -r '.tag_name // empty')
|
||||||
|
[ -z "$tag" ] && {
|
||||||
|
msg_error "Unable to fetch latest tag for $app"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
release="${tag#v}"
|
||||||
|
|
||||||
|
[ -f "$current_file" ] && current="$(cat "$current_file")"
|
||||||
|
|
||||||
|
if [ -n "$pinned" ]; then
|
||||||
|
if [ "$pinned" = "$release" ]; then
|
||||||
|
msg_ok "$app pinned to v$pinned (no update)"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if [ "$current" = "$pinned" ]; then
|
||||||
|
msg_ok "$app pinned v$pinned installed (upstream v$release)"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
msg_info "$app pinned v$pinned (upstream v$release) → update/downgrade"
|
||||||
|
CHECK_UPDATE_RELEASE="$pinned"
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
printf "\e[?25h"
|
if [ "$release" != "$current" ] || [ ! -f "$current_file" ]; then
|
||||||
echo -e "\n${RD}[ERROR]${CL} in line ${RD}$line_number${CL}: exit code ${RD}$exit_code${CL}: while executing command ${YW}$command${CL}\n"
|
CHECK_UPDATE_RELEASE="$release"
|
||||||
exit "$exit_code"
|
msg_info "New release available: v$release (current: v${current:-none})"
|
||||||
}
|
return 0
|
||||||
|
|
||||||
on_exit() {
|
|
||||||
local exit_code="$?"
|
|
||||||
[[ -n "${lockfile:-}" && -e "$lockfile" ]] && rm -f "$lockfile"
|
|
||||||
exit "$exit_code"
|
|
||||||
}
|
|
||||||
|
|
||||||
on_interrupt() {
|
|
||||||
echo -e "\n${RD}Interrupted by user (SIGINT)${CL}"
|
|
||||||
exit 130
|
|
||||||
}
|
|
||||||
|
|
||||||
on_terminate() {
|
|
||||||
echo -e "\n${RD}Terminated by signal (SIGTERM)${CL}"
|
|
||||||
exit 143
|
|
||||||
}
|
|
||||||
|
|
||||||
# This function sets up the Container OS by generating the locale, setting the timezone, and checking the network connection
|
|
||||||
setting_up_container() {
|
|
||||||
msg_info "Setting up Container OS"
|
|
||||||
while [ $i -gt 0 ]; do
|
|
||||||
if [ "$(ip addr show | grep 'inet ' | grep -v '127.0.0.1' | awk '{print $2}' | cut -d'/' -f1)" != "" ]; then
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
echo 1>&2 -en "${CROSS}${RD} No Network! "
|
|
||||||
sleep $RETRY_EVERY
|
|
||||||
i=$((i - 1))
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ "$(ip addr show | grep 'inet ' | grep -v '127.0.0.1' | awk '{print $2}' | cut -d'/' -f1)" = "" ]; then
|
|
||||||
echo 1>&2 -e "\n${CROSS}${RD} No Network After $RETRY_NUM Tries${CL}"
|
|
||||||
echo -e "${NETWORK}Check Network Settings"
|
|
||||||
exit 1
|
|
||||||
fi
|
fi
|
||||||
msg_ok "Set up Container OS"
|
|
||||||
msg_ok "Network Connected: ${BL}$(ip addr show | grep 'inet ' | awk '{print $2}' | cut -d'/' -f1 | tail -n1)${CL}"
|
msg_ok "$app is up to date (v$release)"
|
||||||
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
# This function checks the network connection by pinging a known IP address and prompts the user to continue if the internet is not connected
|
# ------------------------------
|
||||||
network_check() {
|
# GitHub: get Release & deploy (Alpine)
|
||||||
set +e
|
# modes: tarball | prebuild | singlefile
|
||||||
trap - ERR
|
# ------------------------------
|
||||||
if ping -c 1 -W 1 1.1.1.1 &>/dev/null || ping -c 1 -W 1 8.8.8.8 &>/dev/null || ping -c 1 -W 1 9.9.9.9 &>/dev/null; then
|
fetch_and_deploy_gh() {
|
||||||
ipv4_status="${GN}✔${CL} IPv4"
|
# $1 app, $2 repo, [$3 mode], [$4 version], [$5 target], [$6 asset_pattern
|
||||||
|
local app="$1" repo="$2" mode="${3:-tarball}" version="${4:-latest}" target="${5:-/opt/$1}" pattern="${6:-}"
|
||||||
|
local app_lc
|
||||||
|
app_lc="$(lower "$app" | tr -d ' ')"
|
||||||
|
local vfile="$HOME/.${app_lc}"
|
||||||
|
local json url filename tmpd unpack
|
||||||
|
|
||||||
|
net_resolves api.github.com || {
|
||||||
|
msg_error "DNS/network error"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
need_tool curl jq tar || return 1
|
||||||
|
[ "$mode" = "prebuild" ] || [ "$mode" = "singlefile" ] && need_tool unzip >/dev/null 2>&1 || true
|
||||||
|
|
||||||
|
tmpd="$(mktemp -d)" || return 1
|
||||||
|
mkdir -p "$target"
|
||||||
|
|
||||||
|
# Release JSON
|
||||||
|
if [ "$version" = "latest" ]; then
|
||||||
|
json="$(curl -fsSL "https://api.github.com/repos/$repo/releases/latest")" || {
|
||||||
|
msg_error "GitHub API failed"
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
else
|
else
|
||||||
ipv4_status="${RD}✖${CL} IPv4"
|
json="$(curl -fsSL "https://api.github.com/repos/$repo/releases/tags/$version")" || {
|
||||||
read -r -p "Internet NOT connected. Continue anyway? <y/N> " prompt
|
msg_error "GitHub API failed"
|
||||||
if [[ "${prompt,,}" =~ ^(y|yes)$ ]]; then
|
rm -rf "$tmpd"
|
||||||
echo -e "${INFO}${RD}Expect Issues Without Internet${CL}"
|
return 1
|
||||||
|
}
|
||||||
|
fi
|
||||||
|
|
||||||
|
# correct Version
|
||||||
|
version="$(printf '%s' "$json" | jq -r '.tag_name // empty')"
|
||||||
|
version="${version#v}"
|
||||||
|
|
||||||
|
[ -z "$version" ] && {
|
||||||
|
msg_error "No tag in release json"
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
case "$mode" in
|
||||||
|
tarball | source)
|
||||||
|
url="$(printf '%s' "$json" | jq -r '.tarball_url // empty')"
|
||||||
|
[ -z "$url" ] && url="https://github.com/$repo/archive/refs/tags/v$version.tar.gz"
|
||||||
|
filename="${app_lc}-${version}.tar.gz"
|
||||||
|
download_with_progress "$url" "$tmpd/$filename" || {
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
tar -xzf "$tmpd/$filename" -C "$tmpd" || {
|
||||||
|
msg_error "tar extract failed"
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
unpack="$(find "$tmpd" -mindepth 1 -maxdepth 1 -type d | head -n1)"
|
||||||
|
# copy content of unpack to target
|
||||||
|
(cd "$unpack" && tar -cf - .) | (cd "$target" && tar -xf -) || {
|
||||||
|
msg_error "copy failed"
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
;;
|
||||||
|
prebuild)
|
||||||
|
[ -n "$pattern" ] || {
|
||||||
|
msg_error "prebuild requires asset pattern"
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
url="$(printf '%s' "$json" | jq -r '.assets[].browser_download_url' | awk -v p="$pattern" '
|
||||||
|
BEGIN{IGNORECASE=1}
|
||||||
|
$0 ~ p {print; exit}
|
||||||
|
')"
|
||||||
|
[ -z "$url" ] && {
|
||||||
|
msg_error "asset not found for pattern: $pattern"
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
filename="${url##*/}"
|
||||||
|
download_with_progress "$url" "$tmpd/$filename" || {
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
# unpack archive (Zip or tarball)
|
||||||
|
case "$filename" in
|
||||||
|
*.zip)
|
||||||
|
need_tool unzip || {
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
mkdir -p "$tmpd/unp"
|
||||||
|
unzip -q "$tmpd/$filename" -d "$tmpd/unp"
|
||||||
|
;;
|
||||||
|
*.tar.gz | *.tgz | *.tar.xz | *.tar.zst | *.tar.bz2)
|
||||||
|
mkdir -p "$tmpd/unp"
|
||||||
|
tar -xf "$tmpd/$filename" -C "$tmpd/unp"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
msg_error "unsupported archive: $filename"
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
# top-level folder strippen
|
||||||
|
if [ "$(find "$tmpd/unp" -mindepth 1 -maxdepth 1 -type d | wc -l)" -eq 1 ] && [ -z "$(find "$tmpd/unp" -mindepth 1 -maxdepth 1 -type f | head -n1)" ]; then
|
||||||
|
unpack="$(find "$tmpd/unp" -mindepth 1 -maxdepth 1 -type d)"
|
||||||
|
(cd "$unpack" && tar -cf - .) | (cd "$target" && tar -xf -) || {
|
||||||
|
msg_error "copy failed"
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
else
|
else
|
||||||
echo -e "${NETWORK}Check Network Settings"
|
(cd "$tmpd/unp" && tar -cf - .) | (cd "$target" && tar -xf -) || {
|
||||||
exit 1
|
msg_error "copy failed"
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
singlefile)
|
||||||
|
[ -n "$pattern" ] || {
|
||||||
|
msg_error "singlefile requires asset pattern"
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
url="$(printf '%s' "$json" | jq -r '.assets[].browser_download_url' | awk -v p="$pattern" '
|
||||||
|
BEGIN{IGNORECASE=1}
|
||||||
|
$0 ~ p {print; exit}
|
||||||
|
')"
|
||||||
|
[ -z "$url" ] && {
|
||||||
|
msg_error "asset not found for pattern: $pattern"
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
filename="${url##*/}"
|
||||||
|
download_with_progress "$url" "$target/$app" || {
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
chmod +x "$target/$app"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
msg_error "Unknown mode: $mode"
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
echo "$version" >"$vfile"
|
||||||
|
ensure_usr_local_bin_persist
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
msg_ok "Deployed $app ($version) → $target"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ------------------------------
|
||||||
|
# yq (mikefarah) – Alpine
|
||||||
|
# ------------------------------
|
||||||
|
setup_yq() {
|
||||||
|
# prefer apk, unless FORCE_GH=1
|
||||||
|
if [ "${FORCE_GH:-0}" != "1" ] && apk info -e yq >/dev/null 2>&1; then
|
||||||
|
msg_info "Updating yq via apk"
|
||||||
|
apk add --no-cache --upgrade yq >/dev/null 2>&1 || true
|
||||||
|
msg_ok "yq ready ($(yq --version 2>/dev/null))"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
need_tool curl || return 1
|
||||||
|
local arch bin url tmp
|
||||||
|
case "$(uname -m)" in
|
||||||
|
x86_64) arch="amd64" ;;
|
||||||
|
aarch64) arch="arm64" ;;
|
||||||
|
*)
|
||||||
|
msg_error "Unsupported arch for yq: $(uname -m)"
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
url="https://github.com/mikefarah/yq/releases/latest/download/yq_linux_${arch}"
|
||||||
|
tmp="$(mktemp)"
|
||||||
|
download_with_progress "$url" "$tmp" || return 1
|
||||||
|
install -m 0755 "$tmp" /usr/local/bin/yq
|
||||||
|
rm -f "$tmp"
|
||||||
|
msg_ok "Setup yq ($(yq --version 2>/dev/null))"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ------------------------------
|
||||||
|
# Adminer – Alpine
|
||||||
|
# ------------------------------
|
||||||
|
setup_adminer() {
|
||||||
|
need_tool curl || return 1
|
||||||
|
msg_info "Setup Adminer (Alpine)"
|
||||||
|
mkdir -p /var/www/localhost/htdocs/adminer
|
||||||
|
curl -fsSL https://github.com/vrana/adminer/releases/latest/download/adminer.php \
|
||||||
|
-o /var/www/localhost/htdocs/adminer/index.php || {
|
||||||
|
msg_error "Adminer download failed"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
msg_ok "Adminer at /adminer (served by your webserver)"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ------------------------------
|
||||||
|
# uv – Alpine (musl tarball)
|
||||||
|
# optional: PYTHON_VERSION="3.12"
|
||||||
|
# ------------------------------
|
||||||
|
setup_uv() {
|
||||||
|
need_tool curl tar || return 1
|
||||||
|
local UV_BIN="/usr/local/bin/uv"
|
||||||
|
local arch tarball url tmpd ver installed
|
||||||
|
|
||||||
|
case "$(uname -m)" in
|
||||||
|
x86_64) arch="x86_64-unknown-linux-musl" ;;
|
||||||
|
aarch64) arch="aarch64-unknown-linux-musl" ;;
|
||||||
|
*)
|
||||||
|
msg_error "Unsupported arch for uv: $(uname -m)"
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
ver="$(curl -fsSL https://api.github.com/repos/astral-sh/uv/releases/latest | jq -r '.tag_name' 2>/dev/null)"
|
||||||
|
ver="${ver#v}"
|
||||||
|
[ -z "$ver" ] && {
|
||||||
|
msg_error "uv: cannot determine latest version"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if has "$UV_BIN"; then
|
||||||
|
installed="$($UV_BIN -V 2>/dev/null | awk '{print $2}')"
|
||||||
|
[ "$installed" = "$ver" ] && {
|
||||||
|
msg_ok "uv $ver already installed"
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
msg_info "Updating uv $installed → $ver"
|
||||||
|
else
|
||||||
|
msg_info "Setup uv $ver"
|
||||||
|
fi
|
||||||
|
|
||||||
|
tmpd="$(mktemp -d)" || return 1
|
||||||
|
tarball="uv-${arch}.tar.gz"
|
||||||
|
url="https://github.com/astral-sh/uv/releases/download/v${ver}/${tarball}"
|
||||||
|
|
||||||
|
download_with_progress "$url" "$tmpd/uv.tar.gz" || {
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
tar -xzf "$tmpd/uv.tar.gz" -C "$tmpd" || {
|
||||||
|
msg_error "uv: extract failed"
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# tar contains ./uv
|
||||||
|
if [ -x "$tmpd/uv" ]; then
|
||||||
|
install -m 0755 "$tmpd/uv" "$UV_BIN"
|
||||||
|
else
|
||||||
|
# fallback: in subfolder
|
||||||
|
install -m 0755 "$tmpd"/*/uv "$UV_BIN" 2>/dev/null || {
|
||||||
|
msg_error "uv binary not found in tar"
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
fi
|
||||||
|
rm -rf "$tmpd"
|
||||||
|
ensure_usr_local_bin_persist
|
||||||
|
msg_ok "Setup uv $ver"
|
||||||
|
|
||||||
|
if [ -n "${PYTHON_VERSION:-}" ]; then
|
||||||
|
local match
|
||||||
|
match="$(uv python list --only-downloads 2>/dev/null | awk -v maj="$PYTHON_VERSION" '
|
||||||
|
$0 ~ "^cpython-"maj"\\." { print $0 }' | awk -F- '{print $2}' | sort -V | tail -n1)"
|
||||||
|
[ -z "$match" ] && {
|
||||||
|
msg_error "No matching Python for $PYTHON_VERSION"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
if ! uv python list | grep -q "cpython-${match}-linux"; then
|
||||||
|
msg_info "Installing Python $match via uv"
|
||||||
|
uv python install "$match" || {
|
||||||
|
msg_error "uv python install failed"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
msg_ok "Python $match installed (uv)"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
RESOLVEDIP=$(getent hosts github.com | awk '{ print $1 }')
|
}
|
||||||
if [[ -z "$RESOLVEDIP" ]]; then
|
|
||||||
msg_error "Internet: ${ipv4_status} DNS Failed"
|
# ------------------------------
|
||||||
|
# Java – Alpine (OpenJDK)
|
||||||
|
# JAVA_VERSION: 17|21 (Default 21)
|
||||||
|
# ------------------------------
|
||||||
|
setup_java() {
|
||||||
|
local JAVA_VERSION="${JAVA_VERSION:-21}" pkg
|
||||||
|
case "$JAVA_VERSION" in
|
||||||
|
17) pkg="openjdk17-jdk" ;;
|
||||||
|
21 | *) pkg="openjdk21-jdk" ;;
|
||||||
|
esac
|
||||||
|
msg_info "Setup Java (OpenJDK $JAVA_VERSION)"
|
||||||
|
apk add --no-cache "$pkg" >/dev/null 2>&1 || {
|
||||||
|
msg_error "apk add $pkg failed"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
# set JAVA_HOME
|
||||||
|
local prof="/etc/profile.d/20-java.sh"
|
||||||
|
if [ ! -f "$prof" ]; then
|
||||||
|
echo 'export JAVA_HOME=$(dirname $(dirname $(readlink -f $(command -v java))))' >"$prof"
|
||||||
|
echo 'case ":$PATH:" in *:$JAVA_HOME/bin:*) ;; *) export PATH="$JAVA_HOME/bin:$PATH";; esac' >>"$prof"
|
||||||
|
chmod +x "$prof"
|
||||||
|
fi
|
||||||
|
msg_ok "Java ready: $(java -version 2>&1 | head -n1)"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ------------------------------
|
||||||
|
# Go – Alpine (apk prefers, else tarball)
|
||||||
|
# ------------------------------
|
||||||
|
setup_go() {
|
||||||
|
if [ -z "${GO_VERSION:-}" ]; then
|
||||||
|
msg_info "Setup Go (apk)"
|
||||||
|
apk add --no-cache go >/dev/null 2>&1 || {
|
||||||
|
msg_error "apk add go failed"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
msg_ok "Go ready: $(go version 2>/dev/null)"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
need_tool curl tar || return 1
|
||||||
|
local ARCH TARBALL URL TMP
|
||||||
|
case "$(uname -m)" in
|
||||||
|
x86_64) ARCH="amd64" ;;
|
||||||
|
aarch64) ARCH="arm64" ;;
|
||||||
|
*)
|
||||||
|
msg_error "Unsupported arch for Go: $(uname -m)"
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
TARBALL="go${GO_VERSION}.linux-${ARCH}.tar.gz"
|
||||||
|
URL="https://go.dev/dl/${TARBALL}"
|
||||||
|
msg_info "Setup Go $GO_VERSION (tarball)"
|
||||||
|
TMP="$(mktemp)"
|
||||||
|
download_with_progress "$URL" "$TMP" || return 1
|
||||||
|
rm -rf /usr/local/go
|
||||||
|
tar -C /usr/local -xzf "$TMP" || {
|
||||||
|
msg_error "extract go failed"
|
||||||
|
rm -f "$TMP"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
rm -f "$TMP"
|
||||||
|
ln -sf /usr/local/go/bin/go /usr/local/bin/go
|
||||||
|
ln -sf /usr/local/go/bin/gofmt /usr/local/bin/gofmt
|
||||||
|
ensure_usr_local_bin_persist
|
||||||
|
msg_ok "Go ready: $(go version 2>/dev/null)"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ------------------------------
|
||||||
|
# Composer – Alpine
|
||||||
|
# uses php83-cli + openssl + phar
|
||||||
|
# ------------------------------
|
||||||
|
setup_composer() {
|
||||||
|
local COMPOSER_BIN="/usr/local/bin/composer"
|
||||||
|
if ! has php; then
|
||||||
|
# prefers php83
|
||||||
|
msg_info "Installing PHP CLI for Composer"
|
||||||
|
apk add --no-cache php83-cli php83-openssl php83-phar php83-iconv >/dev/null 2>&1 || {
|
||||||
|
# Fallback to generic php if 83 not available
|
||||||
|
apk add --no-cache php-cli php-openssl php-phar php-iconv >/dev/null 2>&1 || {
|
||||||
|
msg_error "Failed to install php-cli for composer"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
msg_ok "PHP CLI ready: $(php -v | head -n1)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -x "$COMPOSER_BIN" ]; then
|
||||||
|
msg_info "Updating Composer"
|
||||||
else
|
else
|
||||||
msg_ok "Internet: ${ipv4_status} DNS: ${BL}${RESOLVEDIP}${CL}"
|
msg_info "Setup Composer"
|
||||||
fi
|
|
||||||
set -e
|
|
||||||
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
|
|
||||||
}
|
|
||||||
|
|
||||||
# This function updates the Container OS by running apt-get update and upgrade
|
|
||||||
update_os() {
|
|
||||||
msg_info "Updating Container OS"
|
|
||||||
$STD apk -U upgrade
|
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
|
||||||
msg_ok "Updated Container OS"
|
|
||||||
}
|
|
||||||
|
|
||||||
# This function modifies the message of the day (motd) and SSH settings
|
|
||||||
motd_ssh() {
|
|
||||||
echo "export TERM='xterm-256color'" >>/root/.bashrc
|
|
||||||
|
|
||||||
PROFILE_FILE="/etc/profile.d/00_lxc-details.sh"
|
|
||||||
echo "echo -e \"\"" >"$PROFILE_FILE"
|
|
||||||
echo -e "echo -e \"${BOLD}${APPLICATION} LXC Container${CL}"\" >>"$PROFILE_FILE"
|
|
||||||
echo -e "echo -e \"${TAB}${GATEWAY}${YW} Provided by: ${GN}community-scripts ORG ${YW}| GitHub: ${GN}https://github.com/community-scripts/ProxmoxVE${CL}\"" >>"$PROFILE_FILE"
|
|
||||||
echo "echo \"\"" >>"$PROFILE_FILE"
|
|
||||||
echo -e "echo -e \"${TAB}${OS}${YW} OS: ${GN}\$(grep ^NAME /etc/os-release | cut -d= -f2 | tr -d '\"') - Version: \$(grep ^VERSION_ID /etc/os-release | cut -d= -f2 | tr -d '\"')${CL}\"" >>"$PROFILE_FILE"
|
|
||||||
echo -e "echo -e \"${TAB}${HOSTNAME}${YW} Hostname: ${GN}\$(hostname)${CL}\"" >>"$PROFILE_FILE"
|
|
||||||
echo -e "echo -e \"${TAB}${INFO}${YW} IP Address: ${GN}\$(ip -4 addr show eth0 | awk '/inet / {print \$2}' | cut -d/ -f1 | head -n 1)${CL}\"" >>"$PROFILE_FILE"
|
|
||||||
|
|
||||||
# Configure SSH if enabled
|
|
||||||
if [[ "${SSH_ROOT}" == "yes" ]]; then
|
|
||||||
# Enable sshd service
|
|
||||||
$STD rc-update add sshd
|
|
||||||
# Allow root login via SSH
|
|
||||||
sed -i "s/#PermitRootLogin prohibit-password/PermitRootLogin yes/g" /etc/ssh/sshd_config
|
|
||||||
# Start the sshd service
|
|
||||||
$STD /etc/init.d/sshd start
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Validate Timezone for some LXC's
|
|
||||||
validate_tz() {
|
|
||||||
[[ -f "/usr/share/zoneinfo/$1" ]]
|
|
||||||
}
|
|
||||||
|
|
||||||
# This function customizes the container and enables passwordless login for the root user
|
|
||||||
customize() {
|
|
||||||
if [[ "$PASSWORD" == "" ]]; then
|
|
||||||
msg_info "Customizing Container"
|
|
||||||
passwd -d root >/dev/null 2>&1
|
|
||||||
|
|
||||||
# Ensure agetty is available
|
|
||||||
apk add --no-cache --force-broken-world util-linux >/dev/null 2>&1
|
|
||||||
|
|
||||||
# Create persistent autologin boot script
|
|
||||||
mkdir -p /etc/local.d
|
|
||||||
cat <<'EOF' >/etc/local.d/autologin.start
|
|
||||||
#!/bin/sh
|
|
||||||
sed -i 's|^tty1::respawn:.*|tty1::respawn:/sbin/agetty --autologin root --noclear tty1 38400 linux|' /etc/inittab
|
|
||||||
kill -HUP 1
|
|
||||||
EOF
|
|
||||||
touch /root/.hushlogin
|
|
||||||
|
|
||||||
chmod +x /etc/local.d/autologin.start
|
|
||||||
rc-update add local >/dev/null 2>&1
|
|
||||||
|
|
||||||
# Apply autologin immediately for current session
|
|
||||||
/etc/local.d/autologin.start
|
|
||||||
|
|
||||||
msg_ok "Customized Container"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "bash -c \"\$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/ct/${app}.sh)\"" >/usr/bin/update
|
need_tool curl || return 1
|
||||||
chmod +x /usr/bin/update
|
curl -fsSL https://getcomposer.org/installer -o /tmp/composer-setup.php || {
|
||||||
|
msg_error "composer installer download failed"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
php /tmp/composer-setup.php --install-dir=/usr/local/bin --filename=composer >/dev/null 2>&1 || {
|
||||||
|
msg_error "composer install failed"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
rm -f /tmp/composer-setup.php
|
||||||
|
ensure_usr_local_bin_persist
|
||||||
|
msg_ok "Composer ready: $(composer --version 2>/dev/null)"
|
||||||
}
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -127,34 +127,6 @@ icons() {
|
|||||||
HOURGLASS="${TAB}⏳${TAB}"
|
HOURGLASS="${TAB}⏳${TAB}"
|
||||||
}
|
}
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
|
||||||
# ensure_profile_loaded()
|
|
||||||
#
|
|
||||||
# - Sources /etc/profile.d/*.sh scripts if not already loaded
|
|
||||||
# - Fixes PATH issues when running via pct enter/exec (non-login shells)
|
|
||||||
# - Safe to call multiple times (uses guard variable)
|
|
||||||
# - Should be called in update_script() or any script running inside LXC
|
|
||||||
# ------------------------------------------------------------------------------
|
|
||||||
ensure_profile_loaded() {
|
|
||||||
# Skip if already loaded or running on Proxmox host
|
|
||||||
[[ -n "${_PROFILE_LOADED:-}" ]] && return
|
|
||||||
command -v pveversion &>/dev/null && return
|
|
||||||
|
|
||||||
# Source all profile.d scripts to ensure PATH is complete
|
|
||||||
if [[ -d /etc/profile.d ]]; then
|
|
||||||
for script in /etc/profile.d/*.sh; do
|
|
||||||
[[ -r "$script" ]] && source "$script"
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Also ensure /usr/local/bin is in PATH (common install location)
|
|
||||||
if [[ ":$PATH:" != *":/usr/local/bin:"* ]]; then
|
|
||||||
export PATH="/usr/local/bin:$PATH"
|
|
||||||
fi
|
|
||||||
|
|
||||||
export _PROFILE_LOADED=1
|
|
||||||
}
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# default_vars()
|
# default_vars()
|
||||||
#
|
#
|
||||||
@@ -815,9 +787,11 @@ is_verbose_mode() {
|
|||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# cleanup_lxc()
|
# cleanup_lxc()
|
||||||
#
|
#
|
||||||
# - Cleans package manager and language caches (safe for installs AND updates)
|
# - Comprehensive cleanup of package managers, caches, and logs
|
||||||
# - Supports Alpine (apk), Debian/Ubuntu (apt), Python, Node.js, Go, Rust, Ruby, PHP
|
# - Supports Alpine (apk), Debian/Ubuntu (apt), and language package managers
|
||||||
# - Uses fallback error handling to prevent cleanup failures from breaking installs
|
# - Cleans: Python (pip/uv), Node.js (npm/yarn/pnpm), Go, Rust, Ruby, PHP
|
||||||
|
# - Truncates log files and vacuums systemd journal
|
||||||
|
# - Run at end of container creation to minimize disk usage
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
cleanup_lxc() {
|
cleanup_lxc() {
|
||||||
msg_info "Cleaning up"
|
msg_info "Cleaning up"
|
||||||
@@ -826,52 +800,32 @@ cleanup_lxc() {
|
|||||||
$STD apk cache clean || true
|
$STD apk cache clean || true
|
||||||
rm -rf /var/cache/apk/*
|
rm -rf /var/cache/apk/*
|
||||||
else
|
else
|
||||||
$STD apt -y autoremove 2>/dev/null || msg_warn "apt autoremove failed (non-critical)"
|
$STD apt -y autoremove || true
|
||||||
$STD apt -y autoclean 2>/dev/null || msg_warn "apt autoclean failed (non-critical)"
|
$STD apt -y autoclean || true
|
||||||
$STD apt -y clean 2>/dev/null || msg_warn "apt clean failed (non-critical)"
|
$STD apt -y clean || true
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Clear temp artifacts (keep sockets/FIFOs; ignore errors)
|
||||||
find /tmp /var/tmp -type f -name 'tmp*' -delete 2>/dev/null || true
|
find /tmp /var/tmp -type f -name 'tmp*' -delete 2>/dev/null || true
|
||||||
find /tmp /var/tmp -type f -name 'tempfile*' -delete 2>/dev/null || true
|
find /tmp /var/tmp -type f -name 'tempfile*' -delete 2>/dev/null || true
|
||||||
|
|
||||||
# Python
|
# Node.js npm - directly remove cache directory
|
||||||
if command -v pip &>/dev/null; then
|
# npm cache clean/verify can fail with ENOTEMPTY errors, so we skip them
|
||||||
rm -rf /root/.cache/pip 2>/dev/null || true
|
|
||||||
fi
|
|
||||||
if command -v uv &>/dev/null; then
|
|
||||||
rm -rf /root/.cache/uv 2>/dev/null || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Node.js
|
|
||||||
if command -v npm &>/dev/null; then
|
if command -v npm &>/dev/null; then
|
||||||
rm -rf /root/.npm/_cacache /root/.npm/_logs 2>/dev/null || true
|
rm -rf /root/.npm/_cacache /root/.npm/_logs 2>/dev/null || true
|
||||||
fi
|
fi
|
||||||
if command -v yarn &>/dev/null; then
|
# Node.js yarn
|
||||||
rm -rf /root/.cache/yarn /root/.yarn/cache 2>/dev/null || true
|
if command -v yarn &>/dev/null; then yarn cache clean &>/dev/null || true; fi
|
||||||
fi
|
# Node.js pnpm
|
||||||
if command -v pnpm &>/dev/null; then
|
if command -v pnpm &>/dev/null; then pnpm store prune &>/dev/null || true; fi
|
||||||
pnpm store prune &>/dev/null || true
|
# Go
|
||||||
fi
|
if command -v go &>/dev/null; then $STD go clean -cache -modcache || true; fi
|
||||||
|
# Rust cargo
|
||||||
# Go (only build cache, not modules)
|
if command -v cargo &>/dev/null; then $STD cargo clean || true; fi
|
||||||
if command -v go &>/dev/null; then
|
# Ruby gem
|
||||||
$STD go clean -cache 2>/dev/null || true
|
if command -v gem &>/dev/null; then $STD gem cleanup || true; fi
|
||||||
fi
|
# Composer (PHP)
|
||||||
|
if command -v composer &>/dev/null; then COMPOSER_ALLOW_SUPERUSER=1 $STD composer clear-cache || true; fi
|
||||||
# Rust (only registry cache, not build artifacts)
|
|
||||||
if command -v cargo &>/dev/null; then
|
|
||||||
rm -rf /root/.cargo/registry/cache /root/.cargo/.package-cache 2>/dev/null || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Ruby
|
|
||||||
if command -v gem &>/dev/null; then
|
|
||||||
rm -rf /root/.gem/cache 2>/dev/null || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
# PHP
|
|
||||||
if command -v composer &>/dev/null; then
|
|
||||||
rm -rf /root/.composer/cache 2>/dev/null || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
msg_ok "Cleaned"
|
msg_ok "Cleaned"
|
||||||
}
|
}
|
||||||
@@ -924,95 +878,8 @@ check_or_create_swap() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
|
||||||
# Loads LOCAL_IP from persistent store or detects if missing.
|
|
||||||
#
|
|
||||||
# Description:
|
|
||||||
# - Loads from /run/local-ip.env or performs runtime lookup
|
|
||||||
# ------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
function get_lxc_ip() {
|
|
||||||
local IP_FILE="/run/local-ip.env"
|
|
||||||
if [[ -f "$IP_FILE" ]]; then
|
|
||||||
# shellcheck disable=SC1090
|
|
||||||
source "$IP_FILE"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z "${LOCAL_IP:-}" ]]; then
|
|
||||||
get_current_ip() {
|
|
||||||
local ip
|
|
||||||
|
|
||||||
# Try direct interface lookup for eth0 FIRST (most reliable for LXC) - IPv4
|
|
||||||
ip=$(ip -4 addr show eth0 2>/dev/null | awk '/inet / {print $2}' | cut -d/ -f1 | head -n1)
|
|
||||||
if [[ -n "$ip" && "$ip" =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
|
||||||
echo "$ip"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Fallback: Try hostname -I (returns IPv4 first if available)
|
|
||||||
if command -v hostname >/dev/null 2>&1; then
|
|
||||||
ip=$(hostname -I 2>/dev/null | awk '{print $1}')
|
|
||||||
if [[ -n "$ip" && "$ip" =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
|
||||||
echo "$ip"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Try routing table with IPv4 targets
|
|
||||||
local ipv4_targets=("8.8.8.8" "1.1.1.1" "default")
|
|
||||||
for target in "${ipv4_targets[@]}"; do
|
|
||||||
if [[ "$target" == "default" ]]; then
|
|
||||||
ip=$(ip route get 1 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
|
||||||
else
|
|
||||||
ip=$(ip route get "$target" 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
|
||||||
fi
|
|
||||||
if [[ -n "$ip" ]]; then
|
|
||||||
echo "$ip"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
# IPv6 fallback: Try direct interface lookup for eth0
|
|
||||||
ip=$(ip -6 addr show eth0 scope global 2>/dev/null | awk '/inet6 / {print $2}' | cut -d/ -f1 | head -n1)
|
|
||||||
if [[ -n "$ip" && "$ip" =~ : ]]; then
|
|
||||||
echo "$ip"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# IPv6 fallback: Try hostname -I for IPv6
|
|
||||||
if command -v hostname >/dev/null 2>&1; then
|
|
||||||
ip=$(hostname -I 2>/dev/null | tr ' ' '\n' | grep -E ':' | head -n1)
|
|
||||||
if [[ -n "$ip" && "$ip" =~ : ]]; then
|
|
||||||
echo "$ip"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# IPv6 fallback: Use routing table with IPv6 targets
|
|
||||||
local ipv6_targets=("2001:4860:4860::8888" "2606:4700:4700::1111")
|
|
||||||
for target in "${ipv6_targets[@]}"; do
|
|
||||||
ip=$(ip -6 route get "$target" 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
|
||||||
if [[ -n "$ip" && "$ip" =~ : ]]; then
|
|
||||||
echo "$ip"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
LOCAL_IP="$(get_current_ip || true)"
|
|
||||||
if [[ -z "$LOCAL_IP" ]]; then
|
|
||||||
msg_error "Could not determine LOCAL_IP"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
export LOCAL_IP
|
|
||||||
}
|
|
||||||
|
|
||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
# SIGNAL TRAPS
|
# SIGNAL TRAPS
|
||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
|
|
||||||
trap 'stop_spinner' EXIT INT TERM
|
trap 'stop_spinner' EXIT INT TERM
|
||||||
|
|||||||
@@ -37,9 +37,6 @@ source "$(dirname "${BASH_SOURCE[0]}")/error-handler.func"
|
|||||||
load_functions
|
load_functions
|
||||||
catch_errors
|
catch_errors
|
||||||
|
|
||||||
# Get LXC IP address (must be called INSIDE container, after network is up)
|
|
||||||
get_lxc_ip
|
|
||||||
|
|
||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
# SECTION 2: NETWORK & CONNECTIVITY
|
# SECTION 2: NETWORK & CONNECTIVITY
|
||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
@@ -79,13 +76,6 @@ EOF
|
|||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
setting_up_container() {
|
setting_up_container() {
|
||||||
msg_info "Setting up Container OS"
|
msg_info "Setting up Container OS"
|
||||||
|
|
||||||
# Fix Debian 13 LXC template bug where / is owned by nobody
|
|
||||||
# Only attempt in privileged containers (unprivileged cannot chown /)
|
|
||||||
if [[ "$(stat -c '%U' /)" != "root" ]]; then
|
|
||||||
(chown root:root / 2>/dev/null) || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
for ((i = RETRY_NUM; i > 0; i--)); do
|
for ((i = RETRY_NUM; i > 0; i--)); do
|
||||||
if [ "$(hostname -I)" != "" ]; then
|
if [ "$(hostname -I)" != "" ]; then
|
||||||
break
|
break
|
||||||
|
|||||||
@@ -184,10 +184,7 @@ install_packages_with_retry() {
|
|||||||
local retry=0
|
local retry=0
|
||||||
|
|
||||||
while [[ $retry -le $max_retries ]]; do
|
while [[ $retry -le $max_retries ]]; do
|
||||||
if DEBIAN_FRONTEND=noninteractive $STD apt install -y \
|
if $STD apt install -y "${packages[@]}" 2>/dev/null; then
|
||||||
-o Dpkg::Options::="--force-confdef" \
|
|
||||||
-o Dpkg::Options::="--force-confold" \
|
|
||||||
"${packages[@]}" 2>/dev/null; then
|
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -214,10 +211,7 @@ upgrade_packages_with_retry() {
|
|||||||
local retry=0
|
local retry=0
|
||||||
|
|
||||||
while [[ $retry -le $max_retries ]]; do
|
while [[ $retry -le $max_retries ]]; do
|
||||||
if DEBIAN_FRONTEND=noninteractive $STD apt install --only-upgrade -y \
|
if $STD apt install --only-upgrade -y "${packages[@]}" 2>/dev/null; then
|
||||||
-o Dpkg::Options::="--force-confdef" \
|
|
||||||
-o Dpkg::Options::="--force-confold" \
|
|
||||||
"${packages[@]}" 2>/dev/null; then
|
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -574,8 +568,7 @@ EOF
|
|||||||
msg_error "Failed to download PHP keyring"
|
msg_error "Failed to download PHP keyring"
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
# Don't use /dev/null redirection for dpkg as it may use background processes
|
dpkg -i /tmp/debsuryorg-archive-keyring.deb >/dev/null 2>&1 || {
|
||||||
dpkg -i /tmp/debsuryorg-archive-keyring.deb >>"$(get_active_logfile)" 2>&1 || {
|
|
||||||
msg_error "Failed to install PHP keyring"
|
msg_error "Failed to install PHP keyring"
|
||||||
rm -f /tmp/debsuryorg-archive-keyring.deb
|
rm -f /tmp/debsuryorg-archive-keyring.deb
|
||||||
return 1
|
return 1
|
||||||
@@ -1845,9 +1838,8 @@ function fetch_and_deploy_gh_release() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
chmod 644 "$tmpdir/$filename"
|
chmod 644 "$tmpdir/$filename"
|
||||||
# SYSTEMD_OFFLINE=1 prevents systemd-tmpfiles failures in unprivileged LXC (Debian 13+/systemd 257+)
|
$STD apt install -y "$tmpdir/$filename" || {
|
||||||
SYSTEMD_OFFLINE=1 $STD apt install -y "$tmpdir/$filename" || {
|
$STD dpkg -i "$tmpdir/$filename" || {
|
||||||
SYSTEMD_OFFLINE=1 $STD dpkg -i "$tmpdir/$filename" || {
|
|
||||||
msg_error "Both apt and dpkg installation failed"
|
msg_error "Both apt and dpkg installation failed"
|
||||||
rm -rf "$tmpdir"
|
rm -rf "$tmpdir"
|
||||||
return 1
|
return 1
|
||||||
@@ -1902,7 +1894,7 @@ function fetch_and_deploy_gh_release() {
|
|||||||
rm -rf "$tmpdir" "$unpack_tmp"
|
rm -rf "$tmpdir" "$unpack_tmp"
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
elif [[ "$filename" == *.tar.* || "$filename" == *.tgz || "$filename" == *.txz ]]; then
|
elif [[ "$filename" == *.tar.* || "$filename" == *.tgz ]]; then
|
||||||
tar --no-same-owner -xf "$tmpdir/$filename" -C "$unpack_tmp" || {
|
tar --no-same-owner -xf "$tmpdir/$filename" -C "$unpack_tmp" || {
|
||||||
msg_error "Failed to extract TAR archive"
|
msg_error "Failed to extract TAR archive"
|
||||||
rm -rf "$tmpdir" "$unpack_tmp"
|
rm -rf "$tmpdir" "$unpack_tmp"
|
||||||
@@ -2006,6 +1998,50 @@ function fetch_and_deploy_gh_release() {
|
|||||||
rm -rf "$tmpdir"
|
rm -rf "$tmpdir"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# Loads LOCAL_IP from persistent store or detects if missing.
|
||||||
|
#
|
||||||
|
# Description:
|
||||||
|
# - Loads from /run/local-ip.env or performs runtime lookup
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
function import_local_ip() {
|
||||||
|
local IP_FILE="/run/local-ip.env"
|
||||||
|
if [[ -f "$IP_FILE" ]]; then
|
||||||
|
# shellcheck disable=SC1090
|
||||||
|
source "$IP_FILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -z "${LOCAL_IP:-}" ]]; then
|
||||||
|
get_current_ip() {
|
||||||
|
local targets=("8.8.8.8" "1.1.1.1" "192.168.1.1" "10.0.0.1" "172.16.0.1" "default")
|
||||||
|
local ip
|
||||||
|
|
||||||
|
for target in "${targets[@]}"; do
|
||||||
|
if [[ "$target" == "default" ]]; then
|
||||||
|
ip=$(ip route get 1 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||||||
|
else
|
||||||
|
ip=$(ip route get "$target" 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||||||
|
fi
|
||||||
|
if [[ -n "$ip" ]]; then
|
||||||
|
echo "$ip"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
LOCAL_IP="$(get_current_ip || true)"
|
||||||
|
if [[ -z "$LOCAL_IP" ]]; then
|
||||||
|
msg_error "Could not determine LOCAL_IP"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
export LOCAL_IP
|
||||||
|
}
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# Installs Adminer (Debian/Ubuntu via APT, Alpine via direct download).
|
# Installs Adminer (Debian/Ubuntu via APT, Alpine via direct download).
|
||||||
#
|
#
|
||||||
@@ -2633,7 +2669,6 @@ function setup_hwaccel() {
|
|||||||
# GPU Selection - Let user choose which GPU(s) to configure
|
# GPU Selection - Let user choose which GPU(s) to configure
|
||||||
# ═══════════════════════════════════════════════════════════════════════════
|
# ═══════════════════════════════════════════════════════════════════════════
|
||||||
local -a SELECTED_INDICES=()
|
local -a SELECTED_INDICES=()
|
||||||
local install_nvidia_drivers="yes"
|
|
||||||
|
|
||||||
if [[ $gpu_count -eq 1 ]]; then
|
if [[ $gpu_count -eq 1 ]]; then
|
||||||
# Single GPU - auto-select
|
# Single GPU - auto-select
|
||||||
@@ -2642,7 +2677,7 @@ function setup_hwaccel() {
|
|||||||
else
|
else
|
||||||
# Multiple GPUs - show selection menu
|
# Multiple GPUs - show selection menu
|
||||||
echo ""
|
echo ""
|
||||||
msg_custom "⚠" "${YW}" "Multiple GPUs detected:"
|
msg_info "Multiple GPUs detected:"
|
||||||
echo ""
|
echo ""
|
||||||
for i in "${!GPU_LIST[@]}"; do
|
for i in "${!GPU_LIST[@]}"; do
|
||||||
local type_display="${GPU_TYPES[$i]}"
|
local type_display="${GPU_TYPES[$i]}"
|
||||||
@@ -2695,30 +2730,6 @@ function setup_hwaccel() {
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Ask whether to install NVIDIA drivers in the container
|
|
||||||
local nvidia_selected="no"
|
|
||||||
for idx in "${SELECTED_INDICES[@]}"; do
|
|
||||||
if [[ "${GPU_TYPES[$idx]}" == "NVIDIA" ]]; then
|
|
||||||
nvidia_selected="yes"
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ "$nvidia_selected" == "yes" ]]; then
|
|
||||||
if [[ -n "${INSTALL_NVIDIA_DRIVERS:-}" ]]; then
|
|
||||||
install_nvidia_drivers="${INSTALL_NVIDIA_DRIVERS}"
|
|
||||||
else
|
|
||||||
echo ""
|
|
||||||
msg_custom "🎮" "${GN}" "NVIDIA GPU passthrough detected"
|
|
||||||
local nvidia_reply=""
|
|
||||||
read -r -t 60 -p "${TAB3}⚙️ Install NVIDIA driver libraries in the container? [Y/n] (auto-yes in 60s): " nvidia_reply || nvidia_reply=""
|
|
||||||
case "${nvidia_reply,,}" in
|
|
||||||
n | no) install_nvidia_drivers="no" ;;
|
|
||||||
*) install_nvidia_drivers="yes" ;;
|
|
||||||
esac
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# ═══════════════════════════════════════════════════════════════════════════
|
# ═══════════════════════════════════════════════════════════════════════════
|
||||||
# OS Detection
|
# OS Detection
|
||||||
# ═══════════════════════════════════════════════════════════════════════════
|
# ═══════════════════════════════════════════════════════════════════════════
|
||||||
@@ -2779,11 +2790,7 @@ function setup_hwaccel() {
|
|||||||
# NVIDIA GPUs
|
# NVIDIA GPUs
|
||||||
# ─────────────────────────────────────────────────────────────────────────
|
# ─────────────────────────────────────────────────────────────────────────
|
||||||
NVIDIA)
|
NVIDIA)
|
||||||
if [[ "$install_nvidia_drivers" == "yes" ]]; then
|
_setup_nvidia_gpu "$os_id" "$os_codename" "$os_version"
|
||||||
_setup_nvidia_gpu "$os_id" "$os_codename" "$os_version"
|
|
||||||
else
|
|
||||||
msg_warn "Skipping NVIDIA driver installation (user opted to install manually)"
|
|
||||||
fi
|
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
@@ -2913,15 +2920,8 @@ _setup_intel_legacy() {
|
|||||||
vainfo \
|
vainfo \
|
||||||
intel-gpu-tools 2>/dev/null || msg_warn "Some Intel legacy packages failed"
|
intel-gpu-tools 2>/dev/null || msg_warn "Some Intel legacy packages failed"
|
||||||
|
|
||||||
# beignet provides OpenCL for older Intel GPUs (Sandy Bridge to Broadwell)
|
# beignet provides OpenCL for older Intel GPUs (if available)
|
||||||
# Note: beignet-opencl-icd was removed in Debian 12+ and Ubuntu 22.04+
|
$STD apt -y install beignet-opencl-icd 2>/dev/null || true
|
||||||
# Check if package is available before attempting installation
|
|
||||||
if apt-cache show beignet-opencl-icd &>/dev/null; then
|
|
||||||
$STD apt -y install beignet-opencl-icd 2>/dev/null || msg_warn "beignet-opencl-icd installation failed (optional)"
|
|
||||||
else
|
|
||||||
msg_warn "beignet-opencl-icd not available - OpenCL support for legacy Intel GPU limited"
|
|
||||||
msg_warn "Note: Hardware video encoding/decoding (VA-API) still works without OpenCL"
|
|
||||||
fi
|
|
||||||
|
|
||||||
msg_ok "Intel Legacy GPU configured"
|
msg_ok "Intel Legacy GPU configured"
|
||||||
}
|
}
|
||||||
@@ -2989,24 +2989,16 @@ _setup_nvidia_gpu() {
|
|||||||
|
|
||||||
msg_info "Installing NVIDIA GPU drivers"
|
msg_info "Installing NVIDIA GPU drivers"
|
||||||
|
|
||||||
# Prevent interactive dialogs (e.g., "Mismatching nvidia kernel module" whiptail)
|
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
|
||||||
export NEEDRESTART_MODE=a
|
|
||||||
|
|
||||||
# Detect host driver version (passed through via /proc)
|
# Detect host driver version (passed through via /proc)
|
||||||
# Format varies by driver type:
|
|
||||||
# Proprietary: "NVRM version: NVIDIA UNIX x86_64 Kernel Module 550.54.14 Thu..."
|
|
||||||
# Open: "NVRM version: NVIDIA UNIX Open Kernel Module for x86_64 590.48.01 Release..."
|
|
||||||
# Use regex to extract version number (###.##.## pattern)
|
|
||||||
local nvidia_host_version=""
|
local nvidia_host_version=""
|
||||||
if [[ -f /proc/driver/nvidia/version ]]; then
|
if [[ -f /proc/driver/nvidia/version ]]; then
|
||||||
nvidia_host_version=$(grep -oP '\d{3,}\.\d+\.\d+' /proc/driver/nvidia/version 2>/dev/null | head -1)
|
nvidia_host_version=$(grep "NVRM version:" /proc/driver/nvidia/version 2>/dev/null | awk '{print $8}')
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -z "$nvidia_host_version" ]]; then
|
if [[ -z "$nvidia_host_version" ]]; then
|
||||||
msg_warn "NVIDIA host driver version not found in /proc/driver/nvidia/version"
|
msg_warn "NVIDIA host driver version not found in /proc/driver/nvidia/version"
|
||||||
msg_warn "Ensure NVIDIA drivers are installed on host and GPU passthrough is enabled"
|
msg_warn "Ensure NVIDIA drivers are installed on host and GPU passthrough is enabled"
|
||||||
$STD apt-get -y install va-driver-all vainfo 2>/dev/null || true
|
$STD apt -y install va-driver-all vainfo 2>/dev/null || true
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -3019,116 +3011,54 @@ _setup_nvidia_gpu() {
|
|||||||
sed -i -E 's/Components: (.*)$/Components: \1 contrib non-free non-free-firmware/g' /etc/apt/sources.list.d/debian.sources 2>/dev/null || true
|
sed -i -E 's/Components: (.*)$/Components: \1 contrib non-free non-free-firmware/g' /etc/apt/sources.list.d/debian.sources 2>/dev/null || true
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
$STD apt-get -y update 2>/dev/null || msg_warn "apt update failed - continuing anyway"
|
|
||||||
|
|
||||||
# For Debian 13 Trixie/Sid: Use Debian's own nvidia packages first (better compatibility)
|
# Determine CUDA repository
|
||||||
# NVIDIA's CUDA repo targets Debian 12 and may not have amd64 packages for Trixie
|
local cuda_repo="debian12"
|
||||||
if [[ "$os_codename" == "trixie" || "$os_codename" == "sid" ]]; then
|
case "$os_codename" in
|
||||||
msg_info "Debian ${os_codename}: Using Debian's NVIDIA packages"
|
bullseye) cuda_repo="debian11" ;;
|
||||||
|
bookworm) cuda_repo="debian12" ;;
|
||||||
|
trixie | sid) cuda_repo="debian12" ;; # Forward compatible
|
||||||
|
esac
|
||||||
|
|
||||||
# Extract major version for flexible matching (580.126.09 -> 580)
|
# Add NVIDIA CUDA repository
|
||||||
local nvidia_major_version="${nvidia_host_version%%.*}"
|
if [[ ! -f /usr/share/keyrings/cuda-archive-keyring.gpg ]]; then
|
||||||
|
msg_info "Adding NVIDIA CUDA repository (${cuda_repo})"
|
||||||
# Check what versions are actually available
|
local cuda_keyring
|
||||||
local available_version=""
|
cuda_keyring="$(mktemp)"
|
||||||
available_version=$(apt-cache madison libcuda1 2>/dev/null | awk '{print $3}' | grep -E "^${nvidia_major_version}\." | head -1 || true)
|
if curl -fsSL -o "$cuda_keyring" "https://developer.download.nvidia.com/compute/cuda/repos/${cuda_repo}/x86_64/cuda-keyring_1.1-1_all.deb" 2>/dev/null; then
|
||||||
|
$STD dpkg -i "$cuda_keyring" 2>/dev/null || true
|
||||||
if [[ -n "$available_version" ]]; then
|
|
||||||
msg_info "Found available NVIDIA version: ${available_version}"
|
|
||||||
local nvidia_pkgs="libcuda1=${available_version} libnvcuvid1=${available_version} libnvidia-encode1=${available_version} libnvidia-ml1=${available_version}"
|
|
||||||
if $STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends $nvidia_pkgs 2>/dev/null; then
|
|
||||||
msg_ok "Installed NVIDIA libraries (${available_version})"
|
|
||||||
else
|
|
||||||
msg_warn "Failed to install NVIDIA ${available_version} - trying unversioned"
|
|
||||||
$STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends libcuda1 libnvcuvid1 libnvidia-encode1 libnvidia-ml1 2>/dev/null || true
|
|
||||||
fi
|
|
||||||
else
|
else
|
||||||
# No matching major version - try latest available or unversioned
|
msg_warn "Failed to download NVIDIA CUDA keyring"
|
||||||
msg_warn "No NVIDIA packages for version ${nvidia_major_version}.x found in repos"
|
|
||||||
available_version=$(apt-cache madison libcuda1 2>/dev/null | awk '{print $3}' | head -1 || true)
|
|
||||||
if [[ -n "$available_version" ]]; then
|
|
||||||
msg_info "Trying latest available: ${available_version} (may cause version mismatch)"
|
|
||||||
$STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends \
|
|
||||||
libcuda1="${available_version}" libnvcuvid1="${available_version}" \
|
|
||||||
libnvidia-encode1="${available_version}" libnvidia-ml1="${available_version}" 2>/dev/null ||
|
|
||||||
$STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends \
|
|
||||||
libcuda1 libnvcuvid1 libnvidia-encode1 libnvidia-ml1 2>/dev/null ||
|
|
||||||
msg_warn "NVIDIA library installation failed - GPU compute may not work"
|
|
||||||
else
|
|
||||||
msg_warn "No NVIDIA packages available in Debian repos - GPU support disabled"
|
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
$STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends nvidia-smi 2>/dev/null || true
|
rm -f "$cuda_keyring"
|
||||||
|
fi
|
||||||
|
|
||||||
else
|
# Pin NVIDIA repo for version matching
|
||||||
# Debian 11/12: Use NVIDIA CUDA repository for version matching
|
cat <<'NVIDIA_PIN' >/etc/apt/preferences.d/nvidia-cuda-pin
|
||||||
local cuda_repo="debian12"
|
|
||||||
case "$os_codename" in
|
|
||||||
bullseye) cuda_repo="debian11" ;;
|
|
||||||
bookworm) cuda_repo="debian12" ;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
# Add NVIDIA CUDA repository
|
|
||||||
if [[ ! -f /usr/share/keyrings/cuda-archive-keyring.gpg ]]; then
|
|
||||||
msg_info "Adding NVIDIA CUDA repository (${cuda_repo})"
|
|
||||||
local cuda_keyring
|
|
||||||
cuda_keyring="$(mktemp)"
|
|
||||||
if curl -fsSL -o "$cuda_keyring" "https://developer.download.nvidia.com/compute/cuda/repos/${cuda_repo}/x86_64/cuda-keyring_1.1-1_all.deb" 2>/dev/null; then
|
|
||||||
$STD dpkg -i "$cuda_keyring" 2>/dev/null || true
|
|
||||||
else
|
|
||||||
msg_warn "Failed to download NVIDIA CUDA keyring"
|
|
||||||
fi
|
|
||||||
rm -f "$cuda_keyring"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Pin NVIDIA repo for version matching
|
|
||||||
cat <<'NVIDIA_PIN' >/etc/apt/preferences.d/nvidia-cuda-pin
|
|
||||||
Package: *
|
Package: *
|
||||||
Pin: origin developer.download.nvidia.com
|
Pin: origin developer.download.nvidia.com
|
||||||
Pin-Priority: 1001
|
Pin-Priority: 1001
|
||||||
NVIDIA_PIN
|
NVIDIA_PIN
|
||||||
|
|
||||||
$STD apt-get -y update 2>/dev/null || msg_warn "apt update failed - continuing anyway"
|
$STD apt -y update
|
||||||
|
|
||||||
# Extract major version for flexible matching (580.126.09 -> 580)
|
# Install version-matched NVIDIA libraries
|
||||||
local nvidia_major_version="${nvidia_host_version%%.*}"
|
local nvidia_pkgs="libcuda1=${nvidia_host_version}* libnvcuvid1=${nvidia_host_version}* libnvidia-encode1=${nvidia_host_version}* libnvidia-ml1=${nvidia_host_version}*"
|
||||||
|
|
||||||
# Check what versions are actually available in CUDA repo
|
msg_info "Installing NVIDIA libraries (version ${nvidia_host_version})"
|
||||||
local available_version=""
|
if $STD apt -y install --no-install-recommends $nvidia_pkgs 2>/dev/null; then
|
||||||
available_version=$(apt-cache madison libcuda1 2>/dev/null | awk '{print $3}' | grep -E "^${nvidia_major_version}\." | head -1 || true)
|
msg_ok "Installed version-matched NVIDIA libraries"
|
||||||
|
else
|
||||||
if [[ -n "$available_version" ]]; then
|
msg_warn "Version-pinned install failed - trying unpinned"
|
||||||
msg_info "Installing NVIDIA libraries (version ${available_version})"
|
if $STD apt -y install --no-install-recommends libcuda1 libnvcuvid1 libnvidia-encode1 libnvidia-ml1 2>/dev/null; then
|
||||||
local nvidia_pkgs="libcuda1=${available_version} libnvcuvid1=${available_version} libnvidia-encode1=${available_version} libnvidia-ml1=${available_version}"
|
msg_warn "Installed NVIDIA libraries (unpinned) - version mismatch may occur"
|
||||||
if $STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends $nvidia_pkgs 2>/dev/null; then
|
|
||||||
msg_ok "Installed version-matched NVIDIA libraries"
|
|
||||||
else
|
|
||||||
msg_warn "Version-pinned install failed - trying unpinned"
|
|
||||||
$STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends libcuda1 libnvcuvid1 libnvidia-encode1 libnvidia-ml1 2>/dev/null ||
|
|
||||||
msg_warn "NVIDIA library installation failed"
|
|
||||||
fi
|
|
||||||
else
|
else
|
||||||
msg_warn "No NVIDIA packages for version ${nvidia_major_version}.x in CUDA repo (host: ${nvidia_host_version})"
|
msg_warn "NVIDIA library installation failed"
|
||||||
# Try latest available version
|
|
||||||
available_version=$(apt-cache madison libcuda1 2>/dev/null | awk '{print $3}' | head -1 || true)
|
|
||||||
if [[ -n "$available_version" ]]; then
|
|
||||||
msg_info "Trying latest available: ${available_version} (version mismatch warning)"
|
|
||||||
if $STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends \
|
|
||||||
libcuda1="${available_version}" libnvcuvid1="${available_version}" \
|
|
||||||
libnvidia-encode1="${available_version}" libnvidia-ml1="${available_version}" 2>/dev/null; then
|
|
||||||
msg_ok "Installed NVIDIA libraries (${available_version}) - version differs from host"
|
|
||||||
else
|
|
||||||
$STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends libcuda1 libnvcuvid1 libnvidia-encode1 libnvidia-ml1 2>/dev/null ||
|
|
||||||
msg_warn "NVIDIA library installation failed"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
msg_warn "No NVIDIA packages available in CUDA repo - GPU support disabled"
|
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
$STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends nvidia-smi 2>/dev/null || true
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
$STD apt -y install --no-install-recommends nvidia-smi 2>/dev/null || true
|
||||||
|
|
||||||
elif [[ "$os_id" == "ubuntu" ]]; then
|
elif [[ "$os_id" == "ubuntu" ]]; then
|
||||||
# Ubuntu versioning
|
# Ubuntu versioning
|
||||||
local ubuntu_cuda_repo=""
|
local ubuntu_cuda_repo=""
|
||||||
@@ -3151,45 +3081,20 @@ NVIDIA_PIN
|
|||||||
rm -f "$cuda_keyring"
|
rm -f "$cuda_keyring"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
$STD apt-get -y update 2>/dev/null || msg_warn "apt update failed - continuing anyway"
|
$STD apt -y update
|
||||||
|
|
||||||
# Extract major version for flexible matching
|
# Try version-matched install
|
||||||
local nvidia_major_version="${nvidia_host_version%%.*}"
|
local nvidia_pkgs="libcuda1=${nvidia_host_version}* libnvcuvid1=${nvidia_host_version}* libnvidia-encode1=${nvidia_host_version}* libnvidia-ml1=${nvidia_host_version}*"
|
||||||
|
if $STD apt -y install --no-install-recommends $nvidia_pkgs 2>/dev/null; then
|
||||||
# Check what versions are available
|
msg_ok "Installed version-matched NVIDIA libraries"
|
||||||
local available_version=""
|
|
||||||
available_version=$(apt-cache madison libcuda1 2>/dev/null | awk '{print $3}' | grep -E "^${nvidia_major_version}\." | head -1 || true)
|
|
||||||
|
|
||||||
if [[ -n "$available_version" ]]; then
|
|
||||||
msg_info "Installing NVIDIA libraries (version ${available_version})"
|
|
||||||
local nvidia_pkgs="libcuda1=${available_version} libnvcuvid1=${available_version} libnvidia-encode1=${available_version} libnvidia-ml1=${available_version}"
|
|
||||||
if $STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends $nvidia_pkgs 2>/dev/null; then
|
|
||||||
msg_ok "Installed version-matched NVIDIA libraries"
|
|
||||||
else
|
|
||||||
# Fallback to Ubuntu repo packages with versioned nvidia-utils
|
|
||||||
msg_warn "CUDA repo install failed - trying Ubuntu native packages (nvidia-utils-${nvidia_major_version})"
|
|
||||||
if $STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends \
|
|
||||||
libnvidia-decode-${nvidia_major_version} libnvidia-encode-${nvidia_major_version} nvidia-utils-${nvidia_major_version} 2>/dev/null; then
|
|
||||||
msg_ok "Installed Ubuntu NVIDIA packages (${nvidia_major_version})"
|
|
||||||
else
|
|
||||||
msg_warn "NVIDIA driver installation failed - please install manually: apt install nvidia-utils-${nvidia_major_version}"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
else
|
else
|
||||||
msg_warn "No NVIDIA packages for version ${nvidia_major_version}.x in CUDA repo"
|
# Fallback to Ubuntu repo packages
|
||||||
# Fallback to Ubuntu repo packages with versioned nvidia-utils
|
$STD apt -y install --no-install-recommends libnvidia-decode libnvidia-encode nvidia-utils 2>/dev/null || msg_warn "NVIDIA installation failed"
|
||||||
msg_info "Trying Ubuntu native packages (nvidia-utils-${nvidia_major_version})"
|
|
||||||
if $STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends \
|
|
||||||
libnvidia-decode-${nvidia_major_version} libnvidia-encode-${nvidia_major_version} nvidia-utils-${nvidia_major_version} 2>/dev/null; then
|
|
||||||
msg_ok "Installed Ubuntu NVIDIA packages (${nvidia_major_version})"
|
|
||||||
else
|
|
||||||
msg_warn "NVIDIA driver installation failed - please install manually: apt install nvidia-utils-${nvidia_major_version}"
|
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# VA-API for hybrid setups (Intel + NVIDIA)
|
# VA-API for hybrid setups (Intel + NVIDIA)
|
||||||
$STD apt-get -y install va-driver-all vainfo 2>/dev/null || true
|
$STD apt -y install va-driver-all vainfo 2>/dev/null || true
|
||||||
|
|
||||||
msg_ok "NVIDIA GPU configured"
|
msg_ok "NVIDIA GPU configured"
|
||||||
}
|
}
|
||||||
@@ -3591,11 +3496,10 @@ IP_FILE="/run/local-ip.env"
|
|||||||
mkdir -p "$(dirname "$IP_FILE")"
|
mkdir -p "$(dirname "$IP_FILE")"
|
||||||
|
|
||||||
get_current_ip() {
|
get_current_ip() {
|
||||||
|
local targets=("8.8.8.8" "1.1.1.1" "192.168.1.1" "10.0.0.1" "172.16.0.1" "default")
|
||||||
local ip
|
local ip
|
||||||
|
|
||||||
# Try IPv4 targets first
|
for target in "${targets[@]}"; do
|
||||||
local ipv4_targets=("8.8.8.8" "1.1.1.1" "192.168.1.1" "10.0.0.1" "172.16.0.1" "default")
|
|
||||||
for target in "${ipv4_targets[@]}"; do
|
|
||||||
if [[ "$target" == "default" ]]; then
|
if [[ "$target" == "default" ]]; then
|
||||||
ip=$(ip route get 1 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
ip=$(ip route get 1 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||||||
else
|
else
|
||||||
@@ -3607,23 +3511,6 @@ get_current_ip() {
|
|||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
# IPv6 fallback: Try direct interface lookup for eth0
|
|
||||||
ip=$(ip -6 addr show eth0 scope global 2>/dev/null | awk '/inet6 / {print $2}' | cut -d/ -f1 | head -n1)
|
|
||||||
if [[ -n "$ip" && "$ip" =~ : ]]; then
|
|
||||||
echo "$ip"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# IPv6 fallback: Use routing table with IPv6 targets (Google DNS, Cloudflare DNS)
|
|
||||||
local ipv6_targets=("2001:4860:4860::8888" "2606:4700:4700::1111")
|
|
||||||
for target in "${ipv6_targets[@]}"; do
|
|
||||||
ip=$(ip -6 route get "$target" 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
|
||||||
if [[ -n "$ip" && "$ip" =~ : ]]; then
|
|
||||||
echo "$ip"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -3662,145 +3549,58 @@ EOF
|
|||||||
}
|
}
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# Installs or updates MariaDB.
|
# Installs or updates MariaDB from official repo.
|
||||||
#
|
#
|
||||||
# Description:
|
# Description:
|
||||||
# - Uses Debian/Ubuntu distribution packages by default (most reliable)
|
|
||||||
# - Only uses official MariaDB repository when a specific version is requested
|
|
||||||
# - Detects current MariaDB version and replaces it if necessary
|
# - Detects current MariaDB version and replaces it if necessary
|
||||||
# - Preserves existing database data
|
# - Preserves existing database data
|
||||||
|
# - Dynamically determines latest GA version if "latest" is given
|
||||||
#
|
#
|
||||||
# Variables:
|
# Variables:
|
||||||
# MARIADB_VERSION - MariaDB version to install (optional)
|
# MARIADB_VERSION - MariaDB version to install (e.g. 10.11, latest) (default: latest)
|
||||||
# - Not set or "latest": Uses distribution packages (recommended)
|
|
||||||
# - Specific version (e.g. "11.4", "12.2"): Uses MariaDB official repo
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
|
|
||||||
setup_mariadb() {
|
setup_mariadb() {
|
||||||
local MARIADB_VERSION="${MARIADB_VERSION:-latest}"
|
local MARIADB_VERSION="${MARIADB_VERSION:-latest}"
|
||||||
local USE_DISTRO_PACKAGES=false
|
|
||||||
|
|
||||||
# Ensure non-interactive mode for all apt operations
|
# Resolve "latest" to actual version
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
if [[ "$MARIADB_VERSION" == "latest" ]]; then
|
||||||
export NEEDRESTART_MODE=a
|
if ! curl -fsI --max-time 10 http://mirror.mariadb.org/repo/ >/dev/null 2>&1; then
|
||||||
export NEEDRESTART_SUSPEND=1
|
msg_warn "MariaDB mirror not reachable - trying mariadb_repo_setup fallback"
|
||||||
|
# Try using official mariadb_repo_setup script as fallback
|
||||||
|
if curl -fsSL --max-time 15 https://r.mariadb.com/downloads/mariadb_repo_setup 2>/dev/null | bash -s -- --skip-verify >/dev/null 2>&1; then
|
||||||
|
msg_ok "MariaDB repository configured via mariadb_repo_setup"
|
||||||
|
# Extract version from configured repo
|
||||||
|
MARIADB_VERSION=$(grep -oP 'repo/\K[0-9]+\.[0-9]+\.[0-9]+' /etc/apt/sources.list.d/mariadb.list 2>/dev/null | head -n1 || echo "12.2")
|
||||||
|
else
|
||||||
|
msg_warn "mariadb_repo_setup failed - using hardcoded fallback version"
|
||||||
|
MARIADB_VERSION="12.2"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
MARIADB_VERSION=$(curl -fsSL --max-time 15 http://mirror.mariadb.org/repo/ 2>/dev/null |
|
||||||
|
grep -Eo '[0-9]+\.[0-9]+\.[0-9]+/' |
|
||||||
|
grep -vE 'rc/|rolling/' |
|
||||||
|
sed 's|/||' |
|
||||||
|
sort -Vr |
|
||||||
|
head -n1 || echo "")
|
||||||
|
|
||||||
# Determine installation method:
|
if [[ -z "$MARIADB_VERSION" ]]; then
|
||||||
# - "latest" or empty: Use distribution packages (avoids mirror issues)
|
msg_warn "Could not parse latest GA MariaDB version from mirror - trying mariadb_repo_setup"
|
||||||
# - Specific version: Use MariaDB official repository
|
if curl -fsSL --max-time 15 https://r.mariadb.com/downloads/mariadb_repo_setup 2>/dev/null | bash -s -- --skip-verify >/dev/null 2>&1; then
|
||||||
if [[ "$MARIADB_VERSION" == "latest" || -z "$MARIADB_VERSION" ]]; then
|
msg_ok "MariaDB repository configured via mariadb_repo_setup"
|
||||||
USE_DISTRO_PACKAGES=true
|
MARIADB_VERSION=$(grep -oP 'repo/\K[0-9]+\.[0-9]+\.[0-9]+' /etc/apt/sources.list.d/mariadb.list 2>/dev/null | head -n1 || echo "12.2")
|
||||||
msg_info "Setup MariaDB (distribution packages)"
|
else
|
||||||
else
|
msg_warn "mariadb_repo_setup failed - using hardcoded fallback version"
|
||||||
msg_info "Setup MariaDB $MARIADB_VERSION (official repository)"
|
MARIADB_VERSION="12.2"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Get currently installed version
|
# Get currently installed version
|
||||||
local CURRENT_VERSION=""
|
local CURRENT_VERSION=""
|
||||||
CURRENT_VERSION=$(is_tool_installed "mariadb" 2>/dev/null) || true
|
CURRENT_VERSION=$(is_tool_installed "mariadb" 2>/dev/null) || true
|
||||||
|
|
||||||
# Pre-configure debconf to prevent any interactive prompts during install/upgrade
|
|
||||||
debconf-set-selections <<EOF
|
|
||||||
mariadb-server mariadb-server/feedback boolean false
|
|
||||||
mariadb-server mariadb-server/root_password password
|
|
||||||
mariadb-server mariadb-server/root_password_again password
|
|
||||||
EOF
|
|
||||||
|
|
||||||
# If specific version requested, also configure version-specific debconf
|
|
||||||
if [[ "$USE_DISTRO_PACKAGES" == "false" ]]; then
|
|
||||||
local MARIADB_MAJOR_MINOR
|
|
||||||
MARIADB_MAJOR_MINOR=$(echo "$MARIADB_VERSION" | awk -F. '{print $1"."$2}')
|
|
||||||
if [[ -n "$MARIADB_MAJOR_MINOR" ]]; then
|
|
||||||
debconf-set-selections <<EOF
|
|
||||||
mariadb-server-$MARIADB_MAJOR_MINOR mariadb-server/feedback boolean false
|
|
||||||
mariadb-server-$MARIADB_MAJOR_MINOR mariadb-server/root_password password
|
|
||||||
mariadb-server-$MARIADB_MAJOR_MINOR mariadb-server/root_password_again password
|
|
||||||
EOF
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# DISTRIBUTION PACKAGES PATH (default, most reliable)
|
|
||||||
# ============================================================================
|
|
||||||
if [[ "$USE_DISTRO_PACKAGES" == "true" ]]; then
|
|
||||||
# Check if MariaDB was previously installed from official repo
|
|
||||||
local HAD_MARIADB_REPO=false
|
|
||||||
if [[ -f /etc/apt/sources.list.d/mariadb.sources ]] || [[ -f /etc/apt/sources.list.d/mariadb.list ]]; then
|
|
||||||
HAD_MARIADB_REPO=true
|
|
||||||
msg_info "Removing MariaDB official repository (switching to distribution packages)"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Clean up any existing MariaDB repository files to avoid conflicts
|
|
||||||
cleanup_old_repo_files "mariadb"
|
|
||||||
|
|
||||||
# If we had a repo, we need to refresh APT cache
|
|
||||||
if [[ "$HAD_MARIADB_REPO" == "true" ]]; then
|
|
||||||
$STD apt update || msg_warn "APT update had issues, continuing..."
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Ensure APT is working
|
|
||||||
ensure_apt_working || return 1
|
|
||||||
|
|
||||||
# Check if installed version is from official repo and higher than distro version
|
|
||||||
# In this case, we keep the existing installation to avoid data issues
|
|
||||||
if [[ -n "$CURRENT_VERSION" ]]; then
|
|
||||||
# Get available distro version
|
|
||||||
local DISTRO_VERSION=""
|
|
||||||
DISTRO_VERSION=$(apt-cache policy mariadb-server 2>/dev/null | grep -E "Candidate:" | awk '{print $2}' | grep -oP '^\d+:\K\d+\.\d+\.\d+' || echo "")
|
|
||||||
|
|
||||||
if [[ -n "$DISTRO_VERSION" ]]; then
|
|
||||||
# Compare versions - if current is higher, keep it
|
|
||||||
local CURRENT_MAJOR DISTRO_MAJOR
|
|
||||||
CURRENT_MAJOR=$(echo "$CURRENT_VERSION" | awk -F. '{print $1}')
|
|
||||||
DISTRO_MAJOR=$(echo "$DISTRO_VERSION" | awk -F. '{print $1}')
|
|
||||||
|
|
||||||
if [[ "$CURRENT_MAJOR" -gt "$DISTRO_MAJOR" ]]; then
|
|
||||||
msg_warn "MariaDB $CURRENT_VERSION is already installed (higher than distro $DISTRO_VERSION)"
|
|
||||||
msg_warn "Keeping existing installation to preserve data integrity"
|
|
||||||
msg_warn "To use distribution packages, manually remove MariaDB first"
|
|
||||||
_setup_mariadb_runtime_dir
|
|
||||||
cache_installed_version "mariadb" "$CURRENT_VERSION"
|
|
||||||
msg_ok "Setup MariaDB $CURRENT_VERSION (existing installation kept)"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Install or upgrade MariaDB from distribution packages
|
|
||||||
if ! install_packages_with_retry "mariadb-server" "mariadb-client"; then
|
|
||||||
msg_error "Failed to install MariaDB packages from distribution"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Get installed version for caching
|
|
||||||
local INSTALLED_VERSION=""
|
|
||||||
INSTALLED_VERSION=$(mariadb --version 2>/dev/null | grep -oP '\d+\.\d+\.\d+' | head -n1 || echo "distro")
|
|
||||||
|
|
||||||
# Configure runtime directory and finish
|
|
||||||
_setup_mariadb_runtime_dir
|
|
||||||
cache_installed_version "mariadb" "$INSTALLED_VERSION"
|
|
||||||
msg_ok "Setup MariaDB $INSTALLED_VERSION (distribution packages)"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# OFFICIAL REPOSITORY PATH (only when specific version requested)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
# First, check if there's an old/broken repository that needs cleanup
|
|
||||||
if [[ -f /etc/apt/sources.list.d/mariadb.sources ]] || [[ -f /etc/apt/sources.list.d/mariadb.list ]]; then
|
|
||||||
local OLD_REPO_VERSION=""
|
|
||||||
OLD_REPO_VERSION=$(grep -oP 'repo/\K[0-9]+\.[0-9]+(\.[0-9]+)?' /etc/apt/sources.list.d/mariadb.sources 2>/dev/null || \
|
|
||||||
grep -oP 'repo/\K[0-9]+\.[0-9]+(\.[0-9]+)?' /etc/apt/sources.list.d/mariadb.list 2>/dev/null || echo "")
|
|
||||||
|
|
||||||
# Check if old repo points to a different version
|
|
||||||
if [[ -n "$OLD_REPO_VERSION" ]] && [[ "${OLD_REPO_VERSION%.*}" != "${MARIADB_VERSION%.*}" ]]; then
|
|
||||||
msg_info "Cleaning up old MariaDB repository (was: $OLD_REPO_VERSION, requested: $MARIADB_VERSION)"
|
|
||||||
cleanup_old_repo_files "mariadb"
|
|
||||||
$STD apt update || msg_warn "APT update had issues, continuing..."
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Scenario 1: Already installed at target version - just update packages
|
# Scenario 1: Already installed at target version - just update packages
|
||||||
if [[ -n "$CURRENT_VERSION" && "$CURRENT_VERSION" == "$MARIADB_VERSION" ]]; then
|
if [[ -n "$CURRENT_VERSION" && "$CURRENT_VERSION" == "$MARIADB_VERSION" ]]; then
|
||||||
msg_info "Update MariaDB $MARIADB_VERSION"
|
msg_info "Update MariaDB $MARIADB_VERSION"
|
||||||
@@ -3839,7 +3639,9 @@ EOF
|
|||||||
remove_old_tool_version "mariadb"
|
remove_old_tool_version "mariadb"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Scenario 3: Fresh install or version change with specific version
|
# Scenario 3: Fresh install or version change
|
||||||
|
msg_info "Setup MariaDB $MARIADB_VERSION"
|
||||||
|
|
||||||
# Prepare repository (cleanup + validation)
|
# Prepare repository (cleanup + validation)
|
||||||
prepare_repository_setup "mariadb" || {
|
prepare_repository_setup "mariadb" || {
|
||||||
msg_error "Failed to prepare MariaDB repository"
|
msg_error "Failed to prepare MariaDB repository"
|
||||||
@@ -3865,39 +3667,31 @@ EOF
|
|||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Set debconf selections for all potential versions
|
||||||
|
local MARIADB_MAJOR_MINOR
|
||||||
|
MARIADB_MAJOR_MINOR=$(echo "$MARIADB_VERSION" | awk -F. '{print $1"."$2}')
|
||||||
|
if [[ -n "$MARIADB_MAJOR_MINOR" ]]; then
|
||||||
|
echo "mariadb-server-$MARIADB_MAJOR_MINOR mariadb-server/feedback boolean false" | debconf-set-selections
|
||||||
|
fi
|
||||||
|
|
||||||
# Install packages with retry logic
|
# Install packages with retry logic
|
||||||
|
export DEBIAN_FRONTEND=noninteractive
|
||||||
if ! install_packages_with_retry "mariadb-server" "mariadb-client"; then
|
if ! install_packages_with_retry "mariadb-server" "mariadb-client"; then
|
||||||
# Fallback: try distribution packages
|
# Fallback: try without specific version
|
||||||
msg_warn "Failed to install MariaDB $MARIADB_VERSION from official repo, falling back to distribution packages..."
|
msg_warn "Failed to install MariaDB packages from upstream repo, trying distro fallback..."
|
||||||
cleanup_old_repo_files "mariadb"
|
cleanup_old_repo_files "mariadb"
|
||||||
$STD apt update || {
|
$STD apt update || {
|
||||||
msg_warn "APT update also failed, continuing with cache"
|
msg_warn "APT update also failed, continuing with cache"
|
||||||
}
|
}
|
||||||
if install_packages_with_retry "mariadb-server" "mariadb-client"; then
|
install_packages_with_retry "mariadb-server" "mariadb-client" || {
|
||||||
local FALLBACK_VERSION=""
|
msg_error "Failed to install MariaDB packages (both upstream and distro)"
|
||||||
FALLBACK_VERSION=$(mariadb --version 2>/dev/null | grep -oP '\d+\.\d+\.\d+' | head -n1 || echo "distro")
|
|
||||||
msg_warn "Installed MariaDB $FALLBACK_VERSION from distribution instead of requested $MARIADB_VERSION"
|
|
||||||
_setup_mariadb_runtime_dir
|
|
||||||
cache_installed_version "mariadb" "$FALLBACK_VERSION"
|
|
||||||
msg_ok "Setup MariaDB $FALLBACK_VERSION (fallback to distribution packages)"
|
|
||||||
return 0
|
|
||||||
else
|
|
||||||
msg_error "Failed to install MariaDB packages (both official repo and distribution)"
|
|
||||||
return 1
|
return 1
|
||||||
fi
|
}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
_setup_mariadb_runtime_dir
|
|
||||||
cache_installed_version "mariadb" "$MARIADB_VERSION"
|
|
||||||
msg_ok "Setup MariaDB $MARIADB_VERSION"
|
|
||||||
}
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
|
||||||
# Helper function: Configure MariaDB runtime directory persistence
|
|
||||||
# ------------------------------------------------------------------------------
|
|
||||||
_setup_mariadb_runtime_dir() {
|
|
||||||
# Configure tmpfiles.d to ensure /run/mysqld directory is created on boot
|
# Configure tmpfiles.d to ensure /run/mysqld directory is created on boot
|
||||||
# This fixes the issue where MariaDB fails to start after container reboot
|
# This fixes the issue where MariaDB fails to start after container reboot
|
||||||
|
msg_info "Configuring MariaDB runtime directory persistence"
|
||||||
|
|
||||||
# Create tmpfiles.d configuration with error handling
|
# Create tmpfiles.d configuration with error handling
|
||||||
if ! printf '# Ensure /run/mysqld directory exists with correct permissions for MariaDB\nd /run/mysqld 0755 mysql mysql -\n' >/etc/tmpfiles.d/mariadb.conf; then
|
if ! printf '# Ensure /run/mysqld directory exists with correct permissions for MariaDB\nd /run/mysqld 0755 mysql mysql -\n' >/etc/tmpfiles.d/mariadb.conf; then
|
||||||
@@ -3917,6 +3711,11 @@ _setup_mariadb_runtime_dir() {
|
|||||||
msg_warn "mysql user not found - directory created with correct permissions but ownership not set"
|
msg_warn "mysql user not found - directory created with correct permissions but ownership not set"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
msg_ok "Configured MariaDB runtime directory persistence"
|
||||||
|
|
||||||
|
cache_installed_version "mariadb" "$MARIADB_VERSION"
|
||||||
|
msg_ok "Setup MariaDB $MARIADB_VERSION"
|
||||||
}
|
}
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
@@ -4016,11 +3815,6 @@ function setup_mongodb() {
|
|||||||
DISTRO_ID=$(get_os_info id)
|
DISTRO_ID=$(get_os_info id)
|
||||||
DISTRO_CODENAME=$(get_os_info codename)
|
DISTRO_CODENAME=$(get_os_info codename)
|
||||||
|
|
||||||
# Ensure non-interactive mode for all apt operations
|
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
|
||||||
export NEEDRESTART_MODE=a
|
|
||||||
export NEEDRESTART_SUSPEND=1
|
|
||||||
|
|
||||||
# Check AVX support
|
# Check AVX support
|
||||||
if ! grep -qm1 'avx[^ ]*' /proc/cpuinfo; then
|
if ! grep -qm1 'avx[^ ]*' /proc/cpuinfo; then
|
||||||
local major="${MONGO_VERSION%%.*}"
|
local major="${MONGO_VERSION%%.*}"
|
||||||
@@ -4139,11 +3933,6 @@ function setup_mysql() {
|
|||||||
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
||||||
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
||||||
|
|
||||||
# Ensure non-interactive mode for all apt operations
|
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
|
||||||
export NEEDRESTART_MODE=a
|
|
||||||
export NEEDRESTART_SUSPEND=1
|
|
||||||
|
|
||||||
# Get currently installed version
|
# Get currently installed version
|
||||||
local CURRENT_VERSION=""
|
local CURRENT_VERSION=""
|
||||||
CURRENT_VERSION=$(is_tool_installed "mysql" 2>/dev/null) || true
|
CURRENT_VERSION=$(is_tool_installed "mysql" 2>/dev/null) || true
|
||||||
@@ -4238,6 +4027,7 @@ EOF
|
|||||||
ensure_apt_working || return 1
|
ensure_apt_working || return 1
|
||||||
|
|
||||||
# Try multiple package names with retry logic
|
# Try multiple package names with retry logic
|
||||||
|
export DEBIAN_FRONTEND=noninteractive
|
||||||
local mysql_install_success=false
|
local mysql_install_success=false
|
||||||
|
|
||||||
if apt-cache search "^mysql-server$" 2>/dev/null | grep -q . &&
|
if apt-cache search "^mysql-server$" 2>/dev/null | grep -q . &&
|
||||||
@@ -4525,20 +4315,11 @@ EOF
|
|||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
# Use different repository based on OS
|
manage_tool_repository "php" "$PHP_VERSION" "" "https://packages.sury.org/debsuryorg-archive-keyring.deb" || {
|
||||||
if [[ "$DISTRO_ID" == "ubuntu" ]]; then
|
msg_error "Failed to setup PHP repository"
|
||||||
# Ubuntu: Use ondrej/php PPA
|
return 1
|
||||||
msg_info "Adding ondrej/php PPA for Ubuntu"
|
}
|
||||||
$STD apt install -y software-properties-common
|
|
||||||
# Don't use $STD for add-apt-repository as it uses background processes
|
|
||||||
add-apt-repository -y ppa:ondrej/php >>"$(get_active_logfile)" 2>&1
|
|
||||||
else
|
|
||||||
# Debian: Use Sury repository
|
|
||||||
manage_tool_repository "php" "$PHP_VERSION" "" "https://packages.sury.org/debsuryorg-archive-keyring.deb" || {
|
|
||||||
msg_error "Failed to setup PHP repository"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
fi
|
|
||||||
ensure_apt_working || return 1
|
ensure_apt_working || return 1
|
||||||
$STD apt update
|
$STD apt update
|
||||||
|
|
||||||
@@ -4561,14 +4342,6 @@ EOF
|
|||||||
|
|
||||||
if [[ "$PHP_FPM" == "YES" ]]; then
|
if [[ "$PHP_FPM" == "YES" ]]; then
|
||||||
MODULE_LIST+=" php${PHP_VERSION}-fpm"
|
MODULE_LIST+=" php${PHP_VERSION}-fpm"
|
||||||
# Create systemd override for PHP-FPM to fix runtime directory issues in LXC containers
|
|
||||||
mkdir -p /etc/systemd/system/php${PHP_VERSION}-fpm.service.d/
|
|
||||||
cat <<EOF >/etc/systemd/system/php${PHP_VERSION}-fpm.service.d/override.conf
|
|
||||||
[Service]
|
|
||||||
RuntimeDirectory=php
|
|
||||||
RuntimeDirectoryMode=0755
|
|
||||||
EOF
|
|
||||||
$STD systemctl daemon-reload
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# install apache2 with PHP support if requested
|
# install apache2 with PHP support if requested
|
||||||
@@ -4693,11 +4466,6 @@ function setup_postgresql() {
|
|||||||
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
||||||
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
||||||
|
|
||||||
# Ensure non-interactive mode for all apt operations
|
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
|
||||||
export NEEDRESTART_MODE=a
|
|
||||||
export NEEDRESTART_SUSPEND=1
|
|
||||||
|
|
||||||
# Get currently installed version
|
# Get currently installed version
|
||||||
local CURRENT_PG_VERSION=""
|
local CURRENT_PG_VERSION=""
|
||||||
if command -v psql >/dev/null; then
|
if command -v psql >/dev/null; then
|
||||||
@@ -5136,146 +4904,6 @@ function setup_ruby() {
|
|||||||
msg_ok "Setup Ruby $RUBY_VERSION"
|
msg_ok "Setup Ruby $RUBY_VERSION"
|
||||||
}
|
}
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
|
||||||
# Installs or updates MeiliSearch search engine.
|
|
||||||
#
|
|
||||||
# Description:
|
|
||||||
# - Fresh install: Downloads binary, creates config/service, starts
|
|
||||||
# - Update: Checks for new release, updates binary if available
|
|
||||||
# - Waits for service to be ready before returning
|
|
||||||
# - Exports API keys for use by caller
|
|
||||||
#
|
|
||||||
# Variables:
|
|
||||||
# MEILISEARCH_BIND - Bind address (default: 127.0.0.1:7700)
|
|
||||||
# MEILISEARCH_ENV - Environment: production/development (default: production)
|
|
||||||
# MEILISEARCH_DB_PATH - Database path (default: /var/lib/meilisearch/data)
|
|
||||||
#
|
|
||||||
# Exports:
|
|
||||||
# MEILISEARCH_MASTER_KEY - The master key for admin access
|
|
||||||
# MEILISEARCH_API_KEY - The default search API key
|
|
||||||
# MEILISEARCH_API_KEY_UID - The UID of the default API key
|
|
||||||
#
|
|
||||||
# Example (install script):
|
|
||||||
# setup_meilisearch
|
|
||||||
#
|
|
||||||
# Example (CT update_script):
|
|
||||||
# setup_meilisearch
|
|
||||||
# ------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
function setup_meilisearch() {
|
|
||||||
local MEILISEARCH_BIND="${MEILISEARCH_BIND:-127.0.0.1:7700}"
|
|
||||||
local MEILISEARCH_ENV="${MEILISEARCH_ENV:-production}"
|
|
||||||
local MEILISEARCH_DB_PATH="${MEILISEARCH_DB_PATH:-/var/lib/meilisearch/data}"
|
|
||||||
local MEILISEARCH_DUMP_DIR="${MEILISEARCH_DUMP_DIR:-/var/lib/meilisearch/dumps}"
|
|
||||||
local MEILISEARCH_SNAPSHOT_DIR="${MEILISEARCH_SNAPSHOT_DIR:-/var/lib/meilisearch/snapshots}"
|
|
||||||
|
|
||||||
# Get bind address for health checks
|
|
||||||
local MEILISEARCH_HOST="${MEILISEARCH_BIND%%:*}"
|
|
||||||
local MEILISEARCH_PORT="${MEILISEARCH_BIND##*:}"
|
|
||||||
[[ "$MEILISEARCH_HOST" == "0.0.0.0" ]] && MEILISEARCH_HOST="127.0.0.1"
|
|
||||||
|
|
||||||
# Update mode: MeiliSearch already installed
|
|
||||||
if [[ -f /usr/bin/meilisearch ]]; then
|
|
||||||
if check_for_gh_release "meilisearch" "meilisearch/meilisearch"; then
|
|
||||||
msg_info "Updating MeiliSearch"
|
|
||||||
systemctl stop meilisearch
|
|
||||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary"
|
|
||||||
systemctl start meilisearch
|
|
||||||
msg_ok "Updated MeiliSearch"
|
|
||||||
fi
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Fresh install
|
|
||||||
msg_info "Setup MeiliSearch"
|
|
||||||
|
|
||||||
# Install binary
|
|
||||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary" || {
|
|
||||||
msg_error "Failed to install MeiliSearch binary"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
# Download default config
|
|
||||||
curl -fsSL https://raw.githubusercontent.com/meilisearch/meilisearch/latest/config.toml -o /etc/meilisearch.toml || {
|
|
||||||
msg_error "Failed to download MeiliSearch config"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
# Generate master key
|
|
||||||
MEILISEARCH_MASTER_KEY=$(openssl rand -base64 12)
|
|
||||||
export MEILISEARCH_MASTER_KEY
|
|
||||||
|
|
||||||
# Configure
|
|
||||||
sed -i \
|
|
||||||
-e "s|^env =.*|env = \"${MEILISEARCH_ENV}\"|" \
|
|
||||||
-e "s|^# master_key =.*|master_key = \"${MEILISEARCH_MASTER_KEY}\"|" \
|
|
||||||
-e "s|^db_path =.*|db_path = \"${MEILISEARCH_DB_PATH}\"|" \
|
|
||||||
-e "s|^dump_dir =.*|dump_dir = \"${MEILISEARCH_DUMP_DIR}\"|" \
|
|
||||||
-e "s|^snapshot_dir =.*|snapshot_dir = \"${MEILISEARCH_SNAPSHOT_DIR}\"|" \
|
|
||||||
-e 's|^# no_analytics = true|no_analytics = true|' \
|
|
||||||
-e "s|^http_addr =.*|http_addr = \"${MEILISEARCH_BIND}\"|" \
|
|
||||||
/etc/meilisearch.toml
|
|
||||||
|
|
||||||
# Create data directories
|
|
||||||
mkdir -p "${MEILISEARCH_DB_PATH}" "${MEILISEARCH_DUMP_DIR}" "${MEILISEARCH_SNAPSHOT_DIR}"
|
|
||||||
|
|
||||||
# Create systemd service
|
|
||||||
cat <<EOF >/etc/systemd/system/meilisearch.service
|
|
||||||
[Unit]
|
|
||||||
Description=Meilisearch
|
|
||||||
After=network.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
ExecStart=/usr/bin/meilisearch --config-file-path /etc/meilisearch.toml
|
|
||||||
Restart=always
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
EOF
|
|
||||||
|
|
||||||
# Enable and start service
|
|
||||||
systemctl daemon-reload
|
|
||||||
systemctl enable -q --now meilisearch
|
|
||||||
|
|
||||||
# Wait for MeiliSearch to be ready (up to 30 seconds)
|
|
||||||
for i in {1..30}; do
|
|
||||||
if curl -s -o /dev/null -w "%{http_code}" "http://${MEILISEARCH_HOST}:${MEILISEARCH_PORT}/health" 2>/dev/null | grep -q "200"; then
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
|
|
||||||
# Verify service is running
|
|
||||||
if ! systemctl is-active --quiet meilisearch; then
|
|
||||||
msg_error "MeiliSearch service failed to start"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Get API keys with retry logic
|
|
||||||
MEILISEARCH_API_KEY=""
|
|
||||||
for i in {1..10}; do
|
|
||||||
MEILISEARCH_API_KEY=$(curl -s -X GET "http://${MEILISEARCH_HOST}:${MEILISEARCH_PORT}/keys" \
|
|
||||||
-H "Authorization: Bearer ${MEILISEARCH_MASTER_KEY}" 2>/dev/null | \
|
|
||||||
grep -o '"key":"[^"]*"' | head -n 1 | sed 's/"key":"//;s/"//') || true
|
|
||||||
[[ -n "$MEILISEARCH_API_KEY" ]] && break
|
|
||||||
sleep 2
|
|
||||||
done
|
|
||||||
|
|
||||||
MEILISEARCH_API_KEY_UID=$(curl -s -X GET "http://${MEILISEARCH_HOST}:${MEILISEARCH_PORT}/keys" \
|
|
||||||
-H "Authorization: Bearer ${MEILISEARCH_MASTER_KEY}" 2>/dev/null | \
|
|
||||||
grep -o '"uid":"[^"]*"' | head -n 1 | sed 's/"uid":"//;s/"//') || true
|
|
||||||
|
|
||||||
export MEILISEARCH_API_KEY
|
|
||||||
export MEILISEARCH_API_KEY_UID
|
|
||||||
|
|
||||||
# Cache version
|
|
||||||
local MEILISEARCH_VERSION
|
|
||||||
MEILISEARCH_VERSION=$(/usr/bin/meilisearch --version 2>/dev/null | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -1) || true
|
|
||||||
cache_installed_version "meilisearch" "${MEILISEARCH_VERSION:-unknown}"
|
|
||||||
|
|
||||||
msg_ok "Setup MeiliSearch ${MEILISEARCH_VERSION:-}"
|
|
||||||
}
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# Installs or upgrades ClickHouse database server.
|
# Installs or upgrades ClickHouse database server.
|
||||||
#
|
#
|
||||||
@@ -5295,11 +4923,6 @@ function setup_clickhouse() {
|
|||||||
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
||||||
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
||||||
|
|
||||||
# Ensure non-interactive mode for all apt operations
|
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
|
||||||
export NEEDRESTART_MODE=a
|
|
||||||
export NEEDRESTART_SUSPEND=1
|
|
||||||
|
|
||||||
# Resolve "latest" version
|
# Resolve "latest" version
|
||||||
if [[ "$CLICKHOUSE_VERSION" == "latest" ]]; then
|
if [[ "$CLICKHOUSE_VERSION" == "latest" ]]; then
|
||||||
CLICKHOUSE_VERSION=$(curl -fsSL --max-time 15 https://packages.clickhouse.com/tgz/stable/ 2>/dev/null |
|
CLICKHOUSE_VERSION=$(curl -fsSL --max-time 15 https://packages.clickhouse.com/tgz/stable/ 2>/dev/null |
|
||||||
@@ -5362,6 +4985,7 @@ function setup_clickhouse() {
|
|||||||
"main"
|
"main"
|
||||||
|
|
||||||
# Install packages with retry logic
|
# Install packages with retry logic
|
||||||
|
export DEBIAN_FRONTEND=noninteractive
|
||||||
$STD apt update || {
|
$STD apt update || {
|
||||||
msg_error "APT update failed for ClickHouse repository"
|
msg_error "APT update failed for ClickHouse repository"
|
||||||
return 1
|
return 1
|
||||||
@@ -6004,4 +5628,4 @@ EOF
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
msg_ok "Docker setup completed"
|
msg_ok "Docker setup completed"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
SCRIPT_DIR="$(dirname "$0")"
|
SCRIPT_DIR="$(dirname "$0")"
|
||||||
source "$SCRIPT_DIR/../core/build.func"
|
source "$SCRIPT_DIR/../core/build.func"
|
||||||
# Copyright (c) 2021-2026 tteck
|
# Copyright (c) 2021-2025 tteck
|
||||||
# Author: tteck (tteckster)
|
# Author: tteck (tteckster)
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://www.debian.org/
|
# Source: https://www.debian.org/
|
||||||
@@ -40,5 +40,5 @@ start
|
|||||||
build_container
|
build_container
|
||||||
description
|
description
|
||||||
|
|
||||||
msg_ok "Completed successfully!\n"
|
msg_ok "Completed Successfully!\n"
|
||||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
# Copyright (c) 2021-2026 tteck
|
# Copyright (c) 2021-2025 tteck
|
||||||
# Author: tteck (tteckster)
|
# Author: tteck (tteckster)
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://www.debian.org/
|
# Source: https://www.debian.org/
|
||||||
|
|||||||
66
server.js
66
server.js
@@ -3,7 +3,6 @@ import { parse } from 'url';
|
|||||||
import next from 'next';
|
import next from 'next';
|
||||||
import { WebSocketServer } from 'ws';
|
import { WebSocketServer } from 'ws';
|
||||||
import { spawn } from 'child_process';
|
import { spawn } from 'child_process';
|
||||||
import { existsSync } from 'fs';
|
|
||||||
import { join, resolve } from 'path';
|
import { join, resolve } from 'path';
|
||||||
import stripAnsi from 'strip-ansi';
|
import stripAnsi from 'strip-ansi';
|
||||||
import { spawn as ptySpawn } from 'node-pty';
|
import { spawn as ptySpawn } from 'node-pty';
|
||||||
@@ -57,8 +56,6 @@ const handle = app.getRequestHandler();
|
|||||||
* @property {string} user
|
* @property {string} user
|
||||||
* @property {string} password
|
* @property {string} password
|
||||||
* @property {number} [id]
|
* @property {number} [id]
|
||||||
* @property {string} [auth_type]
|
|
||||||
* @property {string} [ssh_key_path]
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -298,20 +295,6 @@ class ScriptExecutionHandler {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Resolve full server from DB when client sends server with id but no ssh_key_path (e.g. for Shell/Update over SSH).
|
|
||||||
* @param {ServerInfo|null} server - Server from WebSocket message
|
|
||||||
* @returns {Promise<ServerInfo|null>} Same server or full server from DB
|
|
||||||
*/
|
|
||||||
async resolveServerForSSH(server) {
|
|
||||||
if (!server?.id) return server;
|
|
||||||
if (server.auth_type === 'key' && (!server.ssh_key_path || !existsSync(server.ssh_key_path))) {
|
|
||||||
const full = await this.db.getServerById(server.id);
|
|
||||||
return /** @type {ServerInfo|null} */ (full ?? server);
|
|
||||||
}
|
|
||||||
return server;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {ExtendedWebSocket} ws
|
* @param {ExtendedWebSocket} ws
|
||||||
* @param {WebSocketMessage} message
|
* @param {WebSocketMessage} message
|
||||||
@@ -322,21 +305,16 @@ class ScriptExecutionHandler {
|
|||||||
switch (action) {
|
switch (action) {
|
||||||
case 'start':
|
case 'start':
|
||||||
if (scriptPath && executionId) {
|
if (scriptPath && executionId) {
|
||||||
let serverToUse = server;
|
|
||||||
if (serverToUse?.id) {
|
|
||||||
serverToUse = await this.resolveServerForSSH(serverToUse) ?? serverToUse;
|
|
||||||
}
|
|
||||||
const resolved = serverToUse ?? server;
|
|
||||||
if (isClone && containerId && storage && server && cloneCount && hostnames && containerType) {
|
if (isClone && containerId && storage && server && cloneCount && hostnames && containerType) {
|
||||||
await this.startSSHCloneExecution(ws, containerId, executionId, storage, /** @type {ServerInfo} */ (resolved), containerType, cloneCount, hostnames);
|
await this.startSSHCloneExecution(ws, containerId, executionId, storage, server, containerType, cloneCount, hostnames);
|
||||||
} else if (isBackup && containerId && storage) {
|
} else if (isBackup && containerId && storage) {
|
||||||
await this.startBackupExecution(ws, containerId, executionId, storage, mode, resolved);
|
await this.startBackupExecution(ws, containerId, executionId, storage, mode, server);
|
||||||
} else if (isUpdate && containerId) {
|
} else if (isUpdate && containerId) {
|
||||||
await this.startUpdateExecution(ws, containerId, executionId, mode, resolved, backupStorage);
|
await this.startUpdateExecution(ws, containerId, executionId, mode, server, backupStorage);
|
||||||
} else if (isShell && containerId) {
|
} else if (isShell && containerId) {
|
||||||
await this.startShellExecution(ws, containerId, executionId, mode, resolved, containerType);
|
await this.startShellExecution(ws, containerId, executionId, mode, server);
|
||||||
} else {
|
} else {
|
||||||
await this.startScriptExecution(ws, scriptPath, executionId, mode, resolved, envVars);
|
await this.startScriptExecution(ws, scriptPath, executionId, mode, server, envVars);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
this.sendMessage(ws, {
|
this.sendMessage(ws, {
|
||||||
@@ -1175,11 +1153,10 @@ class ScriptExecutionHandler {
|
|||||||
const hostname = hostnames[i];
|
const hostname = hostnames[i];
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Read config file to get hostname/name (node-specific path)
|
// Read config file to get hostname/name
|
||||||
const nodeName = server.name;
|
|
||||||
const configPath = containerType === 'lxc'
|
const configPath = containerType === 'lxc'
|
||||||
? `/etc/pve/nodes/${nodeName}/lxc/${nextId}.conf`
|
? `/etc/pve/lxc/${nextId}.conf`
|
||||||
: `/etc/pve/nodes/${nodeName}/qemu-server/${nextId}.conf`;
|
: `/etc/pve/qemu-server/${nextId}.conf`;
|
||||||
|
|
||||||
let configContent = '';
|
let configContent = '';
|
||||||
await new Promise(/** @type {(resolve: (value?: void) => void) => void} */ ((resolve) => {
|
await new Promise(/** @type {(resolve: (value?: void) => void) => void} */ ((resolve) => {
|
||||||
@@ -1497,21 +1474,21 @@ class ScriptExecutionHandler {
|
|||||||
* @param {string} executionId
|
* @param {string} executionId
|
||||||
* @param {string} mode
|
* @param {string} mode
|
||||||
* @param {ServerInfo|null} server
|
* @param {ServerInfo|null} server
|
||||||
* @param {'lxc'|'vm'} [containerType='lxc']
|
|
||||||
*/
|
*/
|
||||||
async startShellExecution(ws, containerId, executionId, mode = 'local', server = null, containerType = 'lxc') {
|
async startShellExecution(ws, containerId, executionId, mode = 'local', server = null) {
|
||||||
try {
|
try {
|
||||||
const typeLabel = containerType === 'vm' ? 'VM' : 'container';
|
|
||||||
|
// Send start message
|
||||||
this.sendMessage(ws, {
|
this.sendMessage(ws, {
|
||||||
type: 'start',
|
type: 'start',
|
||||||
data: `Starting shell session for ${typeLabel} ${containerId}...`,
|
data: `Starting shell session for container ${containerId}...`,
|
||||||
timestamp: Date.now()
|
timestamp: Date.now()
|
||||||
});
|
});
|
||||||
|
|
||||||
if (mode === 'ssh' && server) {
|
if (mode === 'ssh' && server) {
|
||||||
await this.startSSHShellExecution(ws, containerId, executionId, server, containerType);
|
await this.startSSHShellExecution(ws, containerId, executionId, server);
|
||||||
} else {
|
} else {
|
||||||
await this.startLocalShellExecution(ws, containerId, executionId, containerType);
|
await this.startLocalShellExecution(ws, containerId, executionId);
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -1528,12 +1505,12 @@ class ScriptExecutionHandler {
|
|||||||
* @param {ExtendedWebSocket} ws
|
* @param {ExtendedWebSocket} ws
|
||||||
* @param {string} containerId
|
* @param {string} containerId
|
||||||
* @param {string} executionId
|
* @param {string} executionId
|
||||||
* @param {'lxc'|'vm'} [containerType='lxc']
|
|
||||||
*/
|
*/
|
||||||
async startLocalShellExecution(ws, containerId, executionId, containerType = 'lxc') {
|
async startLocalShellExecution(ws, containerId, executionId) {
|
||||||
const { spawn } = await import('node-pty');
|
const { spawn } = await import('node-pty');
|
||||||
const shellCommand = containerType === 'vm' ? `qm terminal ${containerId}` : `pct enter ${containerId}`;
|
|
||||||
const childProcess = spawn('bash', ['-c', shellCommand], {
|
// Create a shell process that will run pct enter
|
||||||
|
const childProcess = spawn('bash', ['-c', `pct enter ${containerId}`], {
|
||||||
name: 'xterm-color',
|
name: 'xterm-color',
|
||||||
cols: 80,
|
cols: 80,
|
||||||
rows: 24,
|
rows: 24,
|
||||||
@@ -1576,15 +1553,14 @@ class ScriptExecutionHandler {
|
|||||||
* @param {string} containerId
|
* @param {string} containerId
|
||||||
* @param {string} executionId
|
* @param {string} executionId
|
||||||
* @param {ServerInfo} server
|
* @param {ServerInfo} server
|
||||||
* @param {'lxc'|'vm'} [containerType='lxc']
|
|
||||||
*/
|
*/
|
||||||
async startSSHShellExecution(ws, containerId, executionId, server, containerType = 'lxc') {
|
async startSSHShellExecution(ws, containerId, executionId, server) {
|
||||||
const sshService = getSSHExecutionService();
|
const sshService = getSSHExecutionService();
|
||||||
const shellCommand = containerType === 'vm' ? `qm terminal ${containerId}` : `pct enter ${containerId}`;
|
|
||||||
try {
|
try {
|
||||||
const execution = await sshService.executeCommand(
|
const execution = await sshService.executeCommand(
|
||||||
server,
|
server,
|
||||||
shellCommand,
|
`pct enter ${containerId}`,
|
||||||
/** @param {string} data */
|
/** @param {string} data */
|
||||||
(data) => {
|
(data) => {
|
||||||
this.sendMessage(ws, {
|
this.sendMessage(ws, {
|
||||||
|
|||||||
@@ -58,11 +58,6 @@ export function ConfigurationModal({
|
|||||||
// Advanced mode state
|
// Advanced mode state
|
||||||
const [advancedVars, setAdvancedVars] = useState<EnvVars>({});
|
const [advancedVars, setAdvancedVars] = useState<EnvVars>({});
|
||||||
|
|
||||||
// Discovered SSH keys on the Proxmox host (advanced mode only)
|
|
||||||
const [discoveredSshKeys, setDiscoveredSshKeys] = useState<string[]>([]);
|
|
||||||
const [discoveredSshKeysLoading, setDiscoveredSshKeysLoading] = useState(false);
|
|
||||||
const [discoveredSshKeysError, setDiscoveredSshKeysError] = useState<string | null>(null);
|
|
||||||
|
|
||||||
// Validation errors
|
// Validation errors
|
||||||
const [errors, setErrors] = useState<Record<string, string>>({});
|
const [errors, setErrors] = useState<Record<string, string>>({});
|
||||||
|
|
||||||
@@ -109,7 +104,6 @@ export function ConfigurationModal({
|
|||||||
var_mknod: 0,
|
var_mknod: 0,
|
||||||
var_mount_fs: '',
|
var_mount_fs: '',
|
||||||
var_protection: 'no',
|
var_protection: 'no',
|
||||||
var_tun: 'no',
|
|
||||||
|
|
||||||
// System
|
// System
|
||||||
var_timezone: '',
|
var_timezone: '',
|
||||||
@@ -125,38 +119,6 @@ export function ConfigurationModal({
|
|||||||
}
|
}
|
||||||
}, [actualScript, server, mode, resources, slug]);
|
}, [actualScript, server, mode, resources, slug]);
|
||||||
|
|
||||||
// Discover SSH keys on the Proxmox host when advanced mode is open
|
|
||||||
useEffect(() => {
|
|
||||||
if (!server?.id || !isOpen || mode !== 'advanced') {
|
|
||||||
setDiscoveredSshKeys([]);
|
|
||||||
setDiscoveredSshKeysError(null);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let cancelled = false;
|
|
||||||
setDiscoveredSshKeysLoading(true);
|
|
||||||
setDiscoveredSshKeysError(null);
|
|
||||||
fetch(`/api/servers/${server.id}/discover-ssh-keys`)
|
|
||||||
.then((res) => {
|
|
||||||
if (!res.ok) throw new Error(res.status === 404 ? 'Server not found' : res.statusText);
|
|
||||||
return res.json();
|
|
||||||
})
|
|
||||||
.then((data: { keys?: string[] }) => {
|
|
||||||
if (!cancelled && Array.isArray(data.keys)) setDiscoveredSshKeys(data.keys);
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
if (!cancelled) {
|
|
||||||
setDiscoveredSshKeys([]);
|
|
||||||
setDiscoveredSshKeysError(err instanceof Error ? err.message : 'Could not detect keys');
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.finally(() => {
|
|
||||||
if (!cancelled) setDiscoveredSshKeysLoading(false);
|
|
||||||
});
|
|
||||||
return () => {
|
|
||||||
cancelled = true;
|
|
||||||
};
|
|
||||||
}, [server?.id, isOpen, mode]);
|
|
||||||
|
|
||||||
// Validation functions
|
// Validation functions
|
||||||
const validateIPv4 = (ip: string): boolean => {
|
const validateIPv4 = (ip: string): boolean => {
|
||||||
if (!ip) return true; // Empty is allowed (auto)
|
if (!ip) return true; // Empty is allowed (auto)
|
||||||
@@ -199,17 +161,6 @@ export function ConfigurationModal({
|
|||||||
return !isNaN(num) && num > 0;
|
return !isNaN(num) && num > 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
const validateHostname = (hostname: string): boolean => {
|
|
||||||
if (!hostname || hostname.length > 253) return false;
|
|
||||||
const label = /^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/;
|
|
||||||
const labels = hostname.split('.');
|
|
||||||
return labels.length >= 1 && labels.every(l => l.length >= 1 && l.length <= 63 && label.test(l));
|
|
||||||
};
|
|
||||||
|
|
||||||
const validateAptCacherAddress = (value: string): boolean => {
|
|
||||||
return validateIPv4(value) || validateHostname(value);
|
|
||||||
};
|
|
||||||
|
|
||||||
const validateForm = (): boolean => {
|
const validateForm = (): boolean => {
|
||||||
const newErrors: Record<string, string> = {};
|
const newErrors: Record<string, string> = {};
|
||||||
|
|
||||||
@@ -227,8 +178,8 @@ export function ConfigurationModal({
|
|||||||
if (advancedVars.var_ns && !validateIPv4(advancedVars.var_ns as string)) {
|
if (advancedVars.var_ns && !validateIPv4(advancedVars.var_ns as string)) {
|
||||||
newErrors.var_ns = 'Invalid IPv4 address';
|
newErrors.var_ns = 'Invalid IPv4 address';
|
||||||
}
|
}
|
||||||
if (advancedVars.var_apt_cacher_ip && !validateAptCacherAddress(advancedVars.var_apt_cacher_ip as string)) {
|
if (advancedVars.var_apt_cacher_ip && !validateIPv4(advancedVars.var_apt_cacher_ip as string)) {
|
||||||
newErrors.var_apt_cacher_ip = 'Invalid IPv4 address or hostname';
|
newErrors.var_apt_cacher_ip = 'Invalid IPv4 address';
|
||||||
}
|
}
|
||||||
// Validate IPv4 CIDR if network mode is static
|
// Validate IPv4 CIDR if network mode is static
|
||||||
const netValue = advancedVars.var_net;
|
const netValue = advancedVars.var_net;
|
||||||
@@ -324,16 +275,6 @@ export function ConfigurationModal({
|
|||||||
if ((hasPassword || hasSSHKey) && envVars.var_ssh !== 'no') {
|
if ((hasPassword || hasSSHKey) && envVars.var_ssh !== 'no') {
|
||||||
envVars.var_ssh = 'yes';
|
envVars.var_ssh = 'yes';
|
||||||
}
|
}
|
||||||
|
|
||||||
// Normalize var_tags: accept both comma and semicolon, output comma-separated
|
|
||||||
const rawTags = envVars.var_tags;
|
|
||||||
if (typeof rawTags === 'string' && rawTags.trim() !== '') {
|
|
||||||
envVars.var_tags = rawTags
|
|
||||||
.split(/[,;]/)
|
|
||||||
.map((s) => s.trim())
|
|
||||||
.filter(Boolean)
|
|
||||||
.join(',');
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove empty string values (but keep 0, false, etc.)
|
// Remove empty string values (but keep 0, false, etc.)
|
||||||
@@ -703,13 +644,13 @@ export function ConfigurationModal({
|
|||||||
</div>
|
</div>
|
||||||
<div className="col-span-2">
|
<div className="col-span-2">
|
||||||
<label className="block text-sm font-medium text-foreground mb-2">
|
<label className="block text-sm font-medium text-foreground mb-2">
|
||||||
Tags (comma or semicolon separated)
|
Tags (comma-separated)
|
||||||
</label>
|
</label>
|
||||||
<Input
|
<Input
|
||||||
type="text"
|
type="text"
|
||||||
value={typeof advancedVars.var_tags === 'boolean' ? '' : String(advancedVars.var_tags ?? '')}
|
value={typeof advancedVars.var_tags === 'boolean' ? '' : String(advancedVars.var_tags ?? '')}
|
||||||
onChange={(e) => updateAdvancedVar('var_tags', e.target.value)}
|
onChange={(e) => updateAdvancedVar('var_tags', e.target.value)}
|
||||||
placeholder="e.g. tag1; tag2"
|
placeholder="community-script"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -736,40 +677,11 @@ export function ConfigurationModal({
|
|||||||
<label className="block text-sm font-medium text-foreground mb-2">
|
<label className="block text-sm font-medium text-foreground mb-2">
|
||||||
SSH Authorized Key
|
SSH Authorized Key
|
||||||
</label>
|
</label>
|
||||||
{discoveredSshKeysLoading && (
|
|
||||||
<p className="text-sm text-muted-foreground mb-2">Detecting SSH keys...</p>
|
|
||||||
)}
|
|
||||||
{discoveredSshKeysError && !discoveredSshKeysLoading && (
|
|
||||||
<p className="text-sm text-muted-foreground mb-2">Could not detect keys on host</p>
|
|
||||||
)}
|
|
||||||
{discoveredSshKeys.length > 0 && !discoveredSshKeysLoading && (
|
|
||||||
<div className="mb-2">
|
|
||||||
<label htmlFor="discover-ssh-key" className="sr-only">Use detected key</label>
|
|
||||||
<select
|
|
||||||
id="discover-ssh-key"
|
|
||||||
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm text-foreground focus:ring-2 focus:ring-ring focus:outline-none mb-2"
|
|
||||||
value=""
|
|
||||||
onChange={(e) => {
|
|
||||||
const idx = e.target.value;
|
|
||||||
if (idx === '') return;
|
|
||||||
const key = discoveredSshKeys[Number(idx)];
|
|
||||||
if (key) updateAdvancedVar('var_ssh_authorized_key', key);
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
<option value="">— Select or paste below —</option>
|
|
||||||
{discoveredSshKeys.map((key, i) => (
|
|
||||||
<option key={i} value={i}>
|
|
||||||
{key.length > 44 ? `${key.slice(0, 44)}...` : key}
|
|
||||||
</option>
|
|
||||||
))}
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
<Input
|
<Input
|
||||||
type="text"
|
type="text"
|
||||||
value={typeof advancedVars.var_ssh_authorized_key === 'boolean' ? '' : String(advancedVars.var_ssh_authorized_key ?? '')}
|
value={typeof advancedVars.var_ssh_authorized_key === 'boolean' ? '' : String(advancedVars.var_ssh_authorized_key ?? '')}
|
||||||
onChange={(e) => updateAdvancedVar('var_ssh_authorized_key', e.target.value)}
|
onChange={(e) => updateAdvancedVar('var_ssh_authorized_key', e.target.value)}
|
||||||
placeholder="Or paste a public key: ssh-rsa AAAA..."
|
placeholder="ssh-rsa AAAA..."
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -818,20 +730,6 @@ export function ConfigurationModal({
|
|||||||
<option value={1}>Enabled</option>
|
<option value={1}>Enabled</option>
|
||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
<div>
|
|
||||||
<label className="block text-sm font-medium text-foreground mb-2">
|
|
||||||
TUN/TAP (VPN)
|
|
||||||
</label>
|
|
||||||
<select
|
|
||||||
value={typeof advancedVars.var_tun === 'boolean' ? (advancedVars.var_tun ? 'yes' : 'no') : String(advancedVars.var_tun ?? 'no')}
|
|
||||||
onChange={(e) => updateAdvancedVar('var_tun', e.target.value)}
|
|
||||||
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm text-foreground focus:ring-2 focus:ring-ring focus:outline-none"
|
|
||||||
>
|
|
||||||
<option value="no">No</option>
|
|
||||||
<option value="yes">Yes</option>
|
|
||||||
</select>
|
|
||||||
<p className="text-xs text-muted-foreground mt-1">For Tailscale, WireGuard, OpenVPN</p>
|
|
||||||
</div>
|
|
||||||
<div>
|
<div>
|
||||||
<label className="block text-sm font-medium text-foreground mb-2">
|
<label className="block text-sm font-medium text-foreground mb-2">
|
||||||
Mknod
|
Mknod
|
||||||
@@ -915,13 +813,13 @@ export function ConfigurationModal({
|
|||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
<label className="block text-sm font-medium text-foreground mb-2">
|
<label className="block text-sm font-medium text-foreground mb-2">
|
||||||
APT Cacher host or IP
|
APT Cacher IP
|
||||||
</label>
|
</label>
|
||||||
<Input
|
<Input
|
||||||
type="text"
|
type="text"
|
||||||
value={typeof advancedVars.var_apt_cacher_ip === 'boolean' ? '' : String(advancedVars.var_apt_cacher_ip ?? '')}
|
value={typeof advancedVars.var_apt_cacher_ip === 'boolean' ? '' : String(advancedVars.var_apt_cacher_ip ?? '')}
|
||||||
onChange={(e) => updateAdvancedVar('var_apt_cacher_ip', e.target.value)}
|
onChange={(e) => updateAdvancedVar('var_apt_cacher_ip', e.target.value)}
|
||||||
placeholder="192.168.1.10 or apt-cacher.internal"
|
placeholder="192.168.1.10"
|
||||||
className={errors.var_apt_cacher_ip ? 'border-destructive' : ''}
|
className={errors.var_apt_cacher_ip ? 'border-destructive' : ''}
|
||||||
/>
|
/>
|
||||||
{errors.var_apt_cacher_ip && (
|
{errors.var_apt_cacher_ip && (
|
||||||
|
|||||||
@@ -8,9 +8,7 @@ import { ScriptDetailModal } from "./ScriptDetailModal";
|
|||||||
import { CategorySidebar } from "./CategorySidebar";
|
import { CategorySidebar } from "./CategorySidebar";
|
||||||
import { FilterBar, type FilterState } from "./FilterBar";
|
import { FilterBar, type FilterState } from "./FilterBar";
|
||||||
import { ViewToggle } from "./ViewToggle";
|
import { ViewToggle } from "./ViewToggle";
|
||||||
import { ConfirmationModal } from "./ConfirmationModal";
|
|
||||||
import { Button } from "./ui/button";
|
import { Button } from "./ui/button";
|
||||||
import { RefreshCw } from "lucide-react";
|
|
||||||
import type { ScriptCard as ScriptCardType } from "~/types/script";
|
import type { ScriptCard as ScriptCardType } from "~/types/script";
|
||||||
import type { Server } from "~/types/server";
|
import type { Server } from "~/types/server";
|
||||||
import { getDefaultFilters, mergeFiltersWithDefaults } from "./filterUtils";
|
import { getDefaultFilters, mergeFiltersWithDefaults } from "./filterUtils";
|
||||||
@@ -34,15 +32,8 @@ export function DownloadedScriptsTab({
|
|||||||
const [filters, setFilters] = useState<FilterState>(getDefaultFilters());
|
const [filters, setFilters] = useState<FilterState>(getDefaultFilters());
|
||||||
const [saveFiltersEnabled, setSaveFiltersEnabled] = useState(false);
|
const [saveFiltersEnabled, setSaveFiltersEnabled] = useState(false);
|
||||||
const [isLoadingFilters, setIsLoadingFilters] = useState(true);
|
const [isLoadingFilters, setIsLoadingFilters] = useState(true);
|
||||||
const [updateAllConfirmOpen, setUpdateAllConfirmOpen] = useState(false);
|
|
||||||
const [updateResult, setUpdateResult] = useState<{
|
|
||||||
successCount: number;
|
|
||||||
failCount: number;
|
|
||||||
failed: { slug: string; error: string }[];
|
|
||||||
} | null>(null);
|
|
||||||
const gridRef = useRef<HTMLDivElement>(null);
|
const gridRef = useRef<HTMLDivElement>(null);
|
||||||
|
|
||||||
const utils = api.useUtils();
|
|
||||||
const {
|
const {
|
||||||
data: scriptCardsData,
|
data: scriptCardsData,
|
||||||
isLoading: githubLoading,
|
isLoading: githubLoading,
|
||||||
@@ -59,30 +50,6 @@ export function DownloadedScriptsTab({
|
|||||||
{ enabled: !!selectedSlug },
|
{ enabled: !!selectedSlug },
|
||||||
);
|
);
|
||||||
|
|
||||||
const loadMultipleScriptsMutation = api.scripts.loadMultipleScripts.useMutation({
|
|
||||||
onSuccess: (data) => {
|
|
||||||
void utils.scripts.getAllDownloadedScripts.invalidate();
|
|
||||||
void utils.scripts.getScriptCardsWithCategories.invalidate();
|
|
||||||
setUpdateResult({
|
|
||||||
successCount: data.successful?.length ?? 0,
|
|
||||||
failCount: data.failed?.length ?? 0,
|
|
||||||
failed: (data.failed ?? []).map((f) => ({
|
|
||||||
slug: f.slug,
|
|
||||||
error: f.error ?? "Unknown error",
|
|
||||||
})),
|
|
||||||
});
|
|
||||||
setTimeout(() => setUpdateResult(null), 8000);
|
|
||||||
},
|
|
||||||
onError: (error) => {
|
|
||||||
setUpdateResult({
|
|
||||||
successCount: 0,
|
|
||||||
failCount: 1,
|
|
||||||
failed: [{ slug: "Request failed", error: error.message }],
|
|
||||||
});
|
|
||||||
setTimeout(() => setUpdateResult(null), 8000);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
// Load SAVE_FILTER setting, saved filters, and view mode on component mount
|
// Load SAVE_FILTER setting, saved filters, and view mode on component mount
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const loadSettings = async () => {
|
const loadSettings = async () => {
|
||||||
@@ -449,21 +416,6 @@ export function DownloadedScriptsTab({
|
|||||||
setSelectedSlug(null);
|
setSelectedSlug(null);
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleUpdateAllClick = () => {
|
|
||||||
setUpdateResult(null);
|
|
||||||
setUpdateAllConfirmOpen(true);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleUpdateAllConfirm = () => {
|
|
||||||
setUpdateAllConfirmOpen(false);
|
|
||||||
const slugs = downloadedScripts
|
|
||||||
.map((s) => s.slug)
|
|
||||||
.filter((slug): slug is string => Boolean(slug));
|
|
||||||
if (slugs.length > 0) {
|
|
||||||
loadMultipleScriptsMutation.mutate({ slugs });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if (githubLoading || localLoading) {
|
if (githubLoading || localLoading) {
|
||||||
return (
|
return (
|
||||||
<div className="flex items-center justify-center py-12">
|
<div className="flex items-center justify-center py-12">
|
||||||
@@ -556,43 +508,6 @@ export function DownloadedScriptsTab({
|
|||||||
|
|
||||||
{/* Main Content */}
|
{/* Main Content */}
|
||||||
<div className="order-1 min-w-0 flex-1 lg:order-2" ref={gridRef}>
|
<div className="order-1 min-w-0 flex-1 lg:order-2" ref={gridRef}>
|
||||||
{/* Update all downloaded scripts */}
|
|
||||||
<div className="mb-4 flex flex-wrap items-center gap-3">
|
|
||||||
<Button
|
|
||||||
onClick={handleUpdateAllClick}
|
|
||||||
disabled={loadMultipleScriptsMutation.isPending}
|
|
||||||
variant="secondary"
|
|
||||||
size="default"
|
|
||||||
className="flex items-center gap-2"
|
|
||||||
>
|
|
||||||
{loadMultipleScriptsMutation.isPending ? (
|
|
||||||
<>
|
|
||||||
<RefreshCw className="h-4 w-4 animate-spin" />
|
|
||||||
<span>Updating...</span>
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
<>
|
|
||||||
<RefreshCw className="h-4 w-4" />
|
|
||||||
<span>Update all downloaded scripts</span>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</Button>
|
|
||||||
{updateResult && (
|
|
||||||
<span className="text-muted-foreground text-sm">
|
|
||||||
Updated {updateResult.successCount} successfully
|
|
||||||
{updateResult.failCount > 0
|
|
||||||
? `, ${updateResult.failCount} failed`
|
|
||||||
: ""}
|
|
||||||
.
|
|
||||||
{updateResult.failCount > 0 && updateResult.failed.length > 0 && (
|
|
||||||
<span className="ml-1" title={updateResult.failed.map((f) => `${f.slug}: ${f.error}`).join("\n")}>
|
|
||||||
(hover for details)
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Enhanced Filter Bar */}
|
{/* Enhanced Filter Bar */}
|
||||||
<FilterBar
|
<FilterBar
|
||||||
filters={filters}
|
filters={filters}
|
||||||
@@ -706,17 +621,6 @@ export function DownloadedScriptsTab({
|
|||||||
onClose={handleCloseModal}
|
onClose={handleCloseModal}
|
||||||
onInstallScript={onInstallScript}
|
onInstallScript={onInstallScript}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<ConfirmationModal
|
|
||||||
isOpen={updateAllConfirmOpen}
|
|
||||||
onClose={() => setUpdateAllConfirmOpen(false)}
|
|
||||||
onConfirm={handleUpdateAllConfirm}
|
|
||||||
title="Update all downloaded scripts"
|
|
||||||
message={`Update all ${downloadedScripts.length} downloaded scripts? This may take several minutes.`}
|
|
||||||
variant="simple"
|
|
||||||
confirmButtonText="Update all"
|
|
||||||
cancelButtonText="Cancel"
|
|
||||||
/>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -1617,7 +1617,7 @@ export function GeneralSettingsModal({
|
|||||||
<Input
|
<Input
|
||||||
id="new-repo-url"
|
id="new-repo-url"
|
||||||
type="url"
|
type="url"
|
||||||
placeholder="https://github.com/owner/repo or https://git.example.com/owner/repo"
|
placeholder="https://github.com/owner/repo"
|
||||||
value={newRepoUrl}
|
value={newRepoUrl}
|
||||||
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
|
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
|
||||||
setNewRepoUrl(e.target.value)
|
setNewRepoUrl(e.target.value)
|
||||||
@@ -1626,9 +1626,8 @@ export function GeneralSettingsModal({
|
|||||||
className="w-full"
|
className="w-full"
|
||||||
/>
|
/>
|
||||||
<p className="text-muted-foreground mt-1 text-xs">
|
<p className="text-muted-foreground mt-1 text-xs">
|
||||||
Supported: GitHub, GitLab, Bitbucket, or custom Git
|
Enter a GitHub repository URL (e.g.,
|
||||||
servers (e.g. https://github.com/owner/repo,
|
https://github.com/owner/repo)
|
||||||
https://gitlab.com/owner/repo)
|
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<div className="border-border flex items-center justify-between gap-3 rounded-lg border p-3">
|
<div className="border-border flex items-center justify-between gap-3 rounded-lg border p-3">
|
||||||
|
|||||||
@@ -80,7 +80,6 @@ export function InstalledScriptsTab() {
|
|||||||
id: number;
|
id: number;
|
||||||
containerId: string;
|
containerId: string;
|
||||||
server?: any;
|
server?: any;
|
||||||
containerType?: 'lxc' | 'vm';
|
|
||||||
} | null>(null);
|
} | null>(null);
|
||||||
const [showBackupPrompt, setShowBackupPrompt] = useState(false);
|
const [showBackupPrompt, setShowBackupPrompt] = useState(false);
|
||||||
const [showStorageSelection, setShowStorageSelection] = useState(false);
|
const [showStorageSelection, setShowStorageSelection] = useState(false);
|
||||||
@@ -1168,7 +1167,6 @@ export function InstalledScriptsTab() {
|
|||||||
id: script.id,
|
id: script.id,
|
||||||
containerId: script.container_id,
|
containerId: script.container_id,
|
||||||
server: server,
|
server: server,
|
||||||
containerType: script.is_vm ? 'vm' : 'lxc',
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1454,13 +1452,6 @@ export function InstalledScriptsTab() {
|
|||||||
{/* Shell Terminal */}
|
{/* Shell Terminal */}
|
||||||
{openingShell && (
|
{openingShell && (
|
||||||
<div className="mb-8" data-terminal="shell">
|
<div className="mb-8" data-terminal="shell">
|
||||||
{openingShell.containerType === 'vm' && (
|
|
||||||
<p className="text-muted-foreground mb-2 text-sm">
|
|
||||||
VM shell uses the Proxmox serial console. The VM must have a
|
|
||||||
serial port configured (e.g. <code className="bg-muted rounded px-1">qm set {openingShell.containerId} -serial0 socket</code>).
|
|
||||||
Detach with <kbd className="bg-muted rounded px-1">Ctrl+O</kbd>.
|
|
||||||
</p>
|
|
||||||
)}
|
|
||||||
<Terminal
|
<Terminal
|
||||||
scriptPath={`shell-${openingShell.containerId}`}
|
scriptPath={`shell-${openingShell.containerId}`}
|
||||||
onClose={handleCloseShellTerminal}
|
onClose={handleCloseShellTerminal}
|
||||||
@@ -1468,7 +1459,6 @@ export function InstalledScriptsTab() {
|
|||||||
server={openingShell.server}
|
server={openingShell.server}
|
||||||
isShell={true}
|
isShell={true}
|
||||||
containerId={openingShell.containerId}
|
containerId={openingShell.containerId}
|
||||||
containerType={openingShell.containerType}
|
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
@@ -1548,7 +1538,7 @@ export function InstalledScriptsTab() {
|
|||||||
>
|
>
|
||||||
{showAutoDetectForm
|
{showAutoDetectForm
|
||||||
? "Cancel Auto-Detect"
|
? "Cancel Auto-Detect"
|
||||||
: '🔍 Auto-Detect Containers & VMs (tag: community-script)'}
|
: '🔍 Auto-Detect LXC Containers (Must contain a tag with "community-script")'}
|
||||||
</Button>
|
</Button>
|
||||||
<Button
|
<Button
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
@@ -1774,11 +1764,12 @@ export function InstalledScriptsTab() {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Auto-Detect Containers & VMs Form */}
|
{/* Auto-Detect LXC Containers Form */}
|
||||||
{showAutoDetectForm && (
|
{showAutoDetectForm && (
|
||||||
<div className="bg-card border-border mb-6 rounded-lg border p-4 shadow-sm sm:p-6">
|
<div className="bg-card border-border mb-6 rounded-lg border p-4 shadow-sm sm:p-6">
|
||||||
<h3 className="text-foreground mb-4 text-lg font-semibold sm:mb-6">
|
<h3 className="text-foreground mb-4 text-lg font-semibold sm:mb-6">
|
||||||
Auto-Detect Containers & VMs (tag: community-script)
|
Auto-Detect LXC Containers (Must contain a tag with
|
||||||
|
"community-script")
|
||||||
</h3>
|
</h3>
|
||||||
<div className="space-y-4 sm:space-y-6">
|
<div className="space-y-4 sm:space-y-6">
|
||||||
<div className="bg-muted/30 border-muted rounded-lg border p-4">
|
<div className="bg-muted/30 border-muted rounded-lg border p-4">
|
||||||
@@ -1804,12 +1795,12 @@ export function InstalledScriptsTab() {
|
|||||||
<p>This feature will:</p>
|
<p>This feature will:</p>
|
||||||
<ul className="mt-1 list-inside list-disc space-y-1">
|
<ul className="mt-1 list-inside list-disc space-y-1">
|
||||||
<li>Connect to the selected server via SSH</li>
|
<li>Connect to the selected server via SSH</li>
|
||||||
<li>Scan LXC configs in /etc/pve/lxc/ and VM configs in /etc/pve/qemu-server/</li>
|
<li>Scan all LXC config files in /etc/pve/lxc/</li>
|
||||||
<li>
|
<li>
|
||||||
Find containers and VMs with "community-script" in
|
Find containers with "community-script" in
|
||||||
their tags
|
their tags
|
||||||
</li>
|
</li>
|
||||||
<li>Extract the container/VM ID and hostname or name</li>
|
<li>Extract the container ID and hostname</li>
|
||||||
<li>Add them as installed script entries</li>
|
<li>Add them as installed script entries</li>
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
@@ -2311,11 +2302,6 @@ export function InstalledScriptsTab() {
|
|||||||
"stopped"
|
"stopped"
|
||||||
}
|
}
|
||||||
className="text-muted-foreground hover:text-foreground hover:bg-muted/20 focus:bg-muted/20"
|
className="text-muted-foreground hover:text-foreground hover:bg-muted/20 focus:bg-muted/20"
|
||||||
title={
|
|
||||||
script.is_vm
|
|
||||||
? "VM serial console (requires serial port; detach with Ctrl+O)"
|
|
||||||
: undefined
|
|
||||||
}
|
|
||||||
>
|
>
|
||||||
Shell
|
Shell
|
||||||
</DropdownMenuItem>
|
</DropdownMenuItem>
|
||||||
|
|||||||
@@ -270,21 +270,22 @@ export function PBSCredentialsModal({
|
|||||||
htmlFor="pbs-fingerprint"
|
htmlFor="pbs-fingerprint"
|
||||||
className="text-foreground mb-1 block text-sm font-medium"
|
className="text-foreground mb-1 block text-sm font-medium"
|
||||||
>
|
>
|
||||||
Fingerprint
|
Fingerprint <span className="text-error">*</span>
|
||||||
</label>
|
</label>
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
id="pbs-fingerprint"
|
id="pbs-fingerprint"
|
||||||
value={pbsFingerprint}
|
value={pbsFingerprint}
|
||||||
onChange={(e) => setPbsFingerprint(e.target.value)}
|
onChange={(e) => setPbsFingerprint(e.target.value)}
|
||||||
|
required
|
||||||
disabled={isLoading}
|
disabled={isLoading}
|
||||||
className="bg-card text-foreground placeholder-muted-foreground focus:ring-ring focus:border-ring border-border w-full rounded-md border px-3 py-2 shadow-sm focus:ring-2 focus:outline-none"
|
className="bg-card text-foreground placeholder-muted-foreground focus:ring-ring focus:border-ring border-border w-full rounded-md border px-3 py-2 shadow-sm focus:ring-2 focus:outline-none"
|
||||||
placeholder="e.g., 7b:e5:87:38:5e:16:05:d1:12:22:7f:73:d2:e2:d0:cf:8c:cb:28:e2:74:0c:78:91:1a:71:74:2e:79:20:5a:02"
|
placeholder="e.g., 7b:e5:87:38:5e:16:05:d1:12:22:7f:73:d2:e2:d0:cf:8c:cb:28:e2:74:0c:78:91:1a:71:74:2e:79:20:5a:02"
|
||||||
/>
|
/>
|
||||||
<p className="text-muted-foreground mt-1 text-xs">
|
<p className="text-muted-foreground mt-1 text-xs">
|
||||||
Leave empty if PBS uses a trusted CA (e.g. Let's Encrypt).
|
Server fingerprint for auto-acceptance. You can find this on
|
||||||
For self-signed certificates, enter the server fingerprint from
|
your PBS dashboard by clicking the "Show Fingerprint"
|
||||||
the PBS dashboard ("Show Fingerprint").
|
button.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -438,11 +438,6 @@ export function ServerForm({
|
|||||||
{errors.password && (
|
{errors.password && (
|
||||||
<p className="text-destructive mt-1 text-sm">{errors.password}</p>
|
<p className="text-destructive mt-1 text-sm">{errors.password}</p>
|
||||||
)}
|
)}
|
||||||
<p className="text-muted-foreground mt-1 text-xs">
|
|
||||||
SSH key is recommended when possible. Special characters (e.g.{" "}
|
|
||||||
<code className="rounded bg-muted px-0.5">{"{ } $ \" '"}</code>) are
|
|
||||||
supported.
|
|
||||||
</p>
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
|||||||
@@ -1,96 +0,0 @@
|
|||||||
import type { NextRequest } from 'next/server';
|
|
||||||
import { NextResponse } from 'next/server';
|
|
||||||
import { getDatabase } from '../../../../../server/database-prisma';
|
|
||||||
import { getSSHExecutionService } from '../../../../../server/ssh-execution-service';
|
|
||||||
import type { Server } from '~/types/server';
|
|
||||||
|
|
||||||
const DISCOVER_TIMEOUT_MS = 10_000;
|
|
||||||
|
|
||||||
/** Match lines that look like SSH public keys (same as build.func) */
|
|
||||||
const SSH_PUBKEY_RE = /^(ssh-(rsa|ed25519)|ecdsa-sha2-nistp256|sk-(ssh-ed25519|ecdsa-sha2-nistp256))\s+/;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Run a command on the Proxmox host and return buffered stdout.
|
|
||||||
* Resolves when the process exits or rejects on timeout/spawn error.
|
|
||||||
*/
|
|
||||||
function runRemoteCommand(
|
|
||||||
server: Server,
|
|
||||||
command: string,
|
|
||||||
timeoutMs: number
|
|
||||||
): Promise<{ stdout: string; exitCode: number }> {
|
|
||||||
const ssh = getSSHExecutionService();
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const chunks: string[] = [];
|
|
||||||
let settled = false;
|
|
||||||
|
|
||||||
const finish = (stdout: string, exitCode: number) => {
|
|
||||||
if (settled) return;
|
|
||||||
settled = true;
|
|
||||||
clearTimeout(timer);
|
|
||||||
resolve({ stdout, exitCode });
|
|
||||||
};
|
|
||||||
|
|
||||||
const timer = setTimeout(() => {
|
|
||||||
if (settled) return;
|
|
||||||
settled = true;
|
|
||||||
reject(new Error('SSH discover keys timeout'));
|
|
||||||
}, timeoutMs);
|
|
||||||
|
|
||||||
ssh
|
|
||||||
.executeCommand(
|
|
||||||
server,
|
|
||||||
command,
|
|
||||||
(data: string) => chunks.push(data),
|
|
||||||
() => {},
|
|
||||||
(code: number) => finish(chunks.join(''), code)
|
|
||||||
)
|
|
||||||
.catch((err) => {
|
|
||||||
if (!settled) {
|
|
||||||
settled = true;
|
|
||||||
clearTimeout(timer);
|
|
||||||
reject(err);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function GET(
|
|
||||||
_request: NextRequest,
|
|
||||||
{ params }: { params: Promise<{ id: string }> }
|
|
||||||
) {
|
|
||||||
try {
|
|
||||||
const { id: idParam } = await params;
|
|
||||||
const id = parseInt(idParam);
|
|
||||||
if (isNaN(id)) {
|
|
||||||
return NextResponse.json({ error: 'Invalid server ID' }, { status: 400 });
|
|
||||||
}
|
|
||||||
|
|
||||||
const db = getDatabase();
|
|
||||||
const server = await db.getServerById(id) as Server | null;
|
|
||||||
|
|
||||||
if (!server) {
|
|
||||||
return NextResponse.json({ error: 'Server not found' }, { status: 404 });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Same paths as native build.func ssh_discover_default_files()
|
|
||||||
const remoteScript = `bash -c 'for f in /root/.ssh/authorized_keys /root/.ssh/authorized_keys2 /root/.ssh/*.pub /etc/ssh/authorized_keys /etc/ssh/authorized_keys.d/* 2>/dev/null; do [ -f "$f" ] && [ -r "$f" ] && grep -E "^(ssh-(rsa|ed25519)|ecdsa-sha2-nistp256|sk-)" "$f" 2>/dev/null; done | sort -u'`;
|
|
||||||
|
|
||||||
const { stdout } = await runRemoteCommand(server, remoteScript, DISCOVER_TIMEOUT_MS);
|
|
||||||
|
|
||||||
const keys = stdout
|
|
||||||
.split(/\r?\n/)
|
|
||||||
.map((line) => line.trim())
|
|
||||||
.filter((line) => line.length > 0 && SSH_PUBKEY_RE.test(line));
|
|
||||||
|
|
||||||
return NextResponse.json({ keys });
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error discovering SSH keys:', error);
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -23,11 +23,8 @@ export const env = createEnv({
|
|||||||
ALLOWED_SCRIPT_PATHS: z.string().default("scripts/"),
|
ALLOWED_SCRIPT_PATHS: z.string().default("scripts/"),
|
||||||
// WebSocket Configuration
|
// WebSocket Configuration
|
||||||
WEBSOCKET_PORT: z.string().default("3001"),
|
WEBSOCKET_PORT: z.string().default("3001"),
|
||||||
// Git provider tokens (optional, for private repos)
|
// GitHub Configuration
|
||||||
GITHUB_TOKEN: z.string().optional(),
|
GITHUB_TOKEN: z.string().optional(),
|
||||||
GITLAB_TOKEN: z.string().optional(),
|
|
||||||
BITBUCKET_APP_PASSWORD: z.string().optional(),
|
|
||||||
BITBUCKET_TOKEN: z.string().optional(),
|
|
||||||
// Authentication Configuration
|
// Authentication Configuration
|
||||||
AUTH_USERNAME: z.string().optional(),
|
AUTH_USERNAME: z.string().optional(),
|
||||||
AUTH_PASSWORD_HASH: z.string().optional(),
|
AUTH_PASSWORD_HASH: z.string().optional(),
|
||||||
@@ -65,10 +62,8 @@ export const env = createEnv({
|
|||||||
ALLOWED_SCRIPT_PATHS: process.env.ALLOWED_SCRIPT_PATHS,
|
ALLOWED_SCRIPT_PATHS: process.env.ALLOWED_SCRIPT_PATHS,
|
||||||
// WebSocket Configuration
|
// WebSocket Configuration
|
||||||
WEBSOCKET_PORT: process.env.WEBSOCKET_PORT,
|
WEBSOCKET_PORT: process.env.WEBSOCKET_PORT,
|
||||||
|
// GitHub Configuration
|
||||||
GITHUB_TOKEN: process.env.GITHUB_TOKEN,
|
GITHUB_TOKEN: process.env.GITHUB_TOKEN,
|
||||||
GITLAB_TOKEN: process.env.GITLAB_TOKEN,
|
|
||||||
BITBUCKET_APP_PASSWORD: process.env.BITBUCKET_APP_PASSWORD,
|
|
||||||
BITBUCKET_TOKEN: process.env.BITBUCKET_TOKEN,
|
|
||||||
// Authentication Configuration
|
// Authentication Configuration
|
||||||
AUTH_USERNAME: process.env.AUTH_USERNAME,
|
AUTH_USERNAME: process.env.AUTH_USERNAME,
|
||||||
AUTH_PASSWORD_HASH: process.env.AUTH_PASSWORD_HASH,
|
AUTH_PASSWORD_HASH: process.env.AUTH_PASSWORD_HASH,
|
||||||
|
|||||||
@@ -418,46 +418,44 @@ async function isVM(scriptId: number, containerId: string, serverId: number | nu
|
|||||||
return false; // Default to LXC if SSH fails
|
return false; // Default to LXC if SSH fails
|
||||||
}
|
}
|
||||||
|
|
||||||
// Node-specific paths (multi-node Proxmox: /etc/pve/nodes/NODENAME/...)
|
// Check both config file paths
|
||||||
const nodeName = (server as Server).name;
|
const vmConfigPath = `/etc/pve/qemu-server/${containerId}.conf`;
|
||||||
const vmConfigPathNode = `/etc/pve/nodes/${nodeName}/qemu-server/${containerId}.conf`;
|
const lxcConfigPath = `/etc/pve/lxc/${containerId}.conf`;
|
||||||
const lxcConfigPathNode = `/etc/pve/nodes/${nodeName}/lxc/${containerId}.conf`;
|
|
||||||
// Fallback for single-node or when server.name is not the Proxmox node name
|
// Check VM config file
|
||||||
const vmConfigPathFallback = `/etc/pve/qemu-server/${containerId}.conf`;
|
let vmConfigExists = false;
|
||||||
const lxcConfigPathFallback = `/etc/pve/lxc/${containerId}.conf`;
|
await new Promise<void>((resolve) => {
|
||||||
|
void sshExecutionService.executeCommand(
|
||||||
const checkPathExists = (path: string): Promise<boolean> =>
|
server as Server,
|
||||||
new Promise<boolean>((resolve) => {
|
`test -f "${vmConfigPath}" && echo "exists" || echo "not_exists"`,
|
||||||
let exists = false;
|
(data: string) => {
|
||||||
void sshExecutionService.executeCommand(
|
if (data.includes('exists')) {
|
||||||
server as Server,
|
vmConfigExists = true;
|
||||||
`test -f "${path}" && echo "exists" || echo "not_exists"`,
|
}
|
||||||
(data: string) => {
|
},
|
||||||
if (data.includes('exists')) exists = true;
|
() => resolve(),
|
||||||
},
|
() => resolve()
|
||||||
() => resolve(exists),
|
);
|
||||||
() => resolve(exists)
|
});
|
||||||
);
|
|
||||||
});
|
if (vmConfigExists) {
|
||||||
|
return true; // VM config file exists
|
||||||
// Prefer node-specific paths first
|
|
||||||
const vmConfigExistsNode = await checkPathExists(vmConfigPathNode);
|
|
||||||
if (vmConfigExistsNode) {
|
|
||||||
return true; // VM config file exists on node
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check LXC config file (not needed for return value, but check for completeness)
|
||||||
|
await new Promise<void>((resolve) => {
|
||||||
|
void sshExecutionService.executeCommand(
|
||||||
|
server as Server,
|
||||||
|
`test -f "${lxcConfigPath}" && echo "exists" || echo "not_exists"`,
|
||||||
|
(_data: string) => {
|
||||||
|
// Data handler not needed - just checking if file exists
|
||||||
|
},
|
||||||
|
() => resolve(),
|
||||||
|
() => resolve()
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
const lxcConfigExistsNode = await checkPathExists(lxcConfigPathNode);
|
return false; // Always LXC since VM config doesn't exist
|
||||||
if (lxcConfigExistsNode) {
|
|
||||||
return false; // LXC config file exists on node
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback: single-node or server.name not matching Proxmox node name
|
|
||||||
const vmConfigExistsFallback = await checkPathExists(vmConfigPathFallback);
|
|
||||||
if (vmConfigExistsFallback) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return false; // LXC (or neither path exists)
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error determining container type:', error);
|
console.error('Error determining container type:', error);
|
||||||
return false; // Default to LXC on error
|
return false; // Default to LXC on error
|
||||||
@@ -973,11 +971,10 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Helper function to check config file for community-script tag and extract hostname/name
|
// Helper function to check config file for community-script tag and extract hostname/name
|
||||||
const nodeName = (server as Server).name;
|
|
||||||
const checkConfigAndExtractInfo = async (id: string, isVM: boolean): Promise<any> => {
|
const checkConfigAndExtractInfo = async (id: string, isVM: boolean): Promise<any> => {
|
||||||
const configPath = isVM
|
const configPath = isVM
|
||||||
? `/etc/pve/nodes/${nodeName}/qemu-server/${id}.conf`
|
? `/etc/pve/qemu-server/${id}.conf`
|
||||||
: `/etc/pve/nodes/${nodeName}/lxc/${id}.conf`;
|
: `/etc/pve/lxc/${id}.conf`;
|
||||||
|
|
||||||
const readCommand = `cat "${configPath}" 2>/dev/null`;
|
const readCommand = `cat "${configPath}" 2>/dev/null`;
|
||||||
|
|
||||||
@@ -1063,7 +1060,7 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
reject(new Error(`pct list failed: ${error}`));
|
reject(new Error(`pct list failed: ${error}`));
|
||||||
},
|
},
|
||||||
(_exitCode: number) => {
|
(_exitCode: number) => {
|
||||||
setImmediate(() => resolve());
|
resolve();
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -1082,7 +1079,7 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
reject(new Error(`qm list failed: ${error}`));
|
reject(new Error(`qm list failed: ${error}`));
|
||||||
},
|
},
|
||||||
(_exitCode: number) => {
|
(_exitCode: number) => {
|
||||||
setImmediate(() => resolve());
|
resolve();
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -1321,10 +1318,10 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
|
|
||||||
// Check if ID exists in either pct list (containers) or qm list (VMs)
|
// Check if ID exists in either pct list (containers) or qm list (VMs)
|
||||||
if (!existingIds.has(containerId)) {
|
if (!existingIds.has(containerId)) {
|
||||||
// Also verify config file doesn't exist as a double-check (node-specific paths)
|
// Also verify config file doesn't exist as a double-check
|
||||||
const nodeName = (server as Server).name;
|
// Check both container and VM config paths
|
||||||
const checkContainerCommand = `test -f "/etc/pve/nodes/${nodeName}/lxc/${containerId}.conf" && echo "exists" || echo "not_found"`;
|
const checkContainerCommand = `test -f "/etc/pve/lxc/${containerId}.conf" && echo "exists" || echo "not_found"`;
|
||||||
const checkVMCommand = `test -f "/etc/pve/nodes/${nodeName}/qemu-server/${containerId}.conf" && echo "exists" || echo "not_found"`;
|
const checkVMCommand = `test -f "/etc/pve/qemu-server/${containerId}.conf" && echo "exists" || echo "not_found"`;
|
||||||
|
|
||||||
const configExists = await new Promise<boolean>((resolve) => {
|
const configExists = await new Promise<boolean>((resolve) => {
|
||||||
let combinedOutput = '';
|
let combinedOutput = '';
|
||||||
@@ -2071,72 +2068,32 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resolve app slug from /usr/bin/update (community-scripts) when available; else from hostname/suffix.
|
// Get the script's interface_port from metadata (prioritize metadata over existing database values)
|
||||||
let slugFromUpdate: string | null = null;
|
|
||||||
try {
|
|
||||||
const updateCommand = `pct exec ${scriptData.container_id} -- cat /usr/bin/update 2>/dev/null`;
|
|
||||||
let updateOutput = '';
|
|
||||||
await new Promise<void>((resolve) => {
|
|
||||||
void sshExecutionService.executeCommand(
|
|
||||||
server as Server,
|
|
||||||
updateCommand,
|
|
||||||
(data: string) => { updateOutput += data; },
|
|
||||||
() => {},
|
|
||||||
() => resolve()
|
|
||||||
);
|
|
||||||
});
|
|
||||||
const ctSlugMatch = /ct\/([a-zA-Z0-9_.-]+)\.sh/.exec(updateOutput);
|
|
||||||
if (ctSlugMatch?.[1]) {
|
|
||||||
slugFromUpdate = ctSlugMatch[1].trim().toLowerCase();
|
|
||||||
console.log('🔍 Slug from /usr/bin/update:', slugFromUpdate);
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// Container may not be from community-scripts; use hostname fallback
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the script's interface_port from metadata. Primary: slug from /usr/bin/update; fallback: hostname/suffix.
|
|
||||||
let detectedPort = 80; // Default fallback
|
let detectedPort = 80; // Default fallback
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// Import localScriptsService to get script metadata
|
||||||
const { localScriptsService } = await import('~/server/services/localScripts');
|
const { localScriptsService } = await import('~/server/services/localScripts');
|
||||||
|
|
||||||
|
// Get all scripts and find the one matching our script name
|
||||||
const allScripts = await localScriptsService.getAllScripts();
|
const allScripts = await localScriptsService.getAllScripts();
|
||||||
|
|
||||||
const nameFromHostname = scriptData.script_name.replace(/\.sh$/, '').toLowerCase();
|
// Extract script slug from script_name (remove .sh extension)
|
||||||
|
const scriptSlug = scriptData.script_name.replace(/\.sh$/, '');
|
||||||
// Primary: slug from /usr/bin/update (community-scripts)
|
console.log('🔍 Looking for script with slug:', scriptSlug);
|
||||||
let scriptMetadata =
|
|
||||||
slugFromUpdate != null
|
const scriptMetadata = allScripts.find(script => script.slug === scriptSlug);
|
||||||
? allScripts.find((s) => s.slug === slugFromUpdate)
|
|
||||||
: undefined;
|
|
||||||
if (scriptMetadata) {
|
|
||||||
console.log('🔍 Using slug from /usr/bin/update for metadata:', scriptMetadata.slug);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback: exact hostname then hostname ends with slug (longest wins)
|
|
||||||
if (!scriptMetadata) {
|
|
||||||
scriptMetadata = allScripts.find((script) => script.slug === nameFromHostname);
|
|
||||||
if (!scriptMetadata) {
|
|
||||||
const suffixMatches = allScripts.filter((script) => nameFromHostname.endsWith(script.slug));
|
|
||||||
scriptMetadata =
|
|
||||||
suffixMatches.length > 0
|
|
||||||
? suffixMatches.reduce((a, b) => (a.slug.length >= b.slug.length ? a : b))
|
|
||||||
: undefined;
|
|
||||||
if (scriptMetadata) {
|
|
||||||
console.log('🔍 Matched metadata by slug suffix in hostname:', scriptMetadata.slug);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (scriptMetadata?.interface_port) {
|
if (scriptMetadata?.interface_port) {
|
||||||
detectedPort = scriptMetadata.interface_port;
|
detectedPort = scriptMetadata.interface_port;
|
||||||
console.log('📋 Found interface_port in metadata:', detectedPort);
|
console.log('📋 Found interface_port in metadata:', detectedPort);
|
||||||
} else {
|
} else {
|
||||||
console.log('📋 No interface_port found in metadata, using default port 80');
|
console.log('📋 No interface_port found in metadata, using default port 80');
|
||||||
detectedPort = 80;
|
detectedPort = 80; // Default to port 80 if no metadata port found
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log('⚠️ Error getting script metadata, using default port 80:', error);
|
console.log('⚠️ Error getting script metadata, using default port 80:', error);
|
||||||
detectedPort = 80;
|
detectedPort = 80; // Default to port 80 if metadata lookup fails
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('🎯 Final detected port:', detectedPort);
|
console.log('🎯 Final detected port:', detectedPort);
|
||||||
@@ -2240,9 +2197,8 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read config file (node-specific path)
|
// Read config file
|
||||||
const nodeName = (server as Server).name;
|
const configPath = `/etc/pve/lxc/${script.container_id}.conf`;
|
||||||
const configPath = `/etc/pve/nodes/${nodeName}/lxc/${script.container_id}.conf`;
|
|
||||||
const readCommand = `cat "${configPath}" 2>/dev/null`;
|
const readCommand = `cat "${configPath}" 2>/dev/null`;
|
||||||
let rawConfig = '';
|
let rawConfig = '';
|
||||||
|
|
||||||
@@ -2372,9 +2328,8 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write config file using heredoc for safe escaping (node-specific path)
|
// Write config file using heredoc for safe escaping
|
||||||
const nodeName = (server as Server).name;
|
const configPath = `/etc/pve/lxc/${script.container_id}.conf`;
|
||||||
const configPath = `/etc/pve/nodes/${nodeName}/lxc/${script.container_id}.conf`;
|
|
||||||
const writeCommand = `cat > "${configPath}" << 'EOFCONFIG'
|
const writeCommand = `cat > "${configPath}" << 'EOFCONFIG'
|
||||||
${rawConfig}
|
${rawConfig}
|
||||||
EOFCONFIG`;
|
EOFCONFIG`;
|
||||||
@@ -2782,10 +2737,9 @@ EOFCONFIG`;
|
|||||||
const { getSSHExecutionService } = await import('~/server/ssh-execution-service');
|
const { getSSHExecutionService } = await import('~/server/ssh-execution-service');
|
||||||
const sshExecutionService = getSSHExecutionService();
|
const sshExecutionService = getSSHExecutionService();
|
||||||
|
|
||||||
const nodeName = (server as Server).name;
|
|
||||||
const configPath = input.containerType === 'lxc'
|
const configPath = input.containerType === 'lxc'
|
||||||
? `/etc/pve/nodes/${nodeName}/lxc/${input.containerId}.conf`
|
? `/etc/pve/lxc/${input.containerId}.conf`
|
||||||
: `/etc/pve/nodes/${nodeName}/qemu-server/${input.containerId}.conf`;
|
: `/etc/pve/qemu-server/${input.containerId}.conf`;
|
||||||
|
|
||||||
let configContent = '';
|
let configContent = '';
|
||||||
await new Promise<void>((resolve) => {
|
await new Promise<void>((resolve) => {
|
||||||
@@ -3177,11 +3131,10 @@ EOFCONFIG`;
|
|||||||
const { getSSHExecutionService } = await import('~/server/ssh-execution-service');
|
const { getSSHExecutionService } = await import('~/server/ssh-execution-service');
|
||||||
const sshExecutionService = getSSHExecutionService();
|
const sshExecutionService = getSSHExecutionService();
|
||||||
|
|
||||||
// Read config file to get hostname/name (node-specific path)
|
// Read config file to get hostname/name
|
||||||
const nodeName = (server as Server).name;
|
|
||||||
const configPath = input.containerType === 'lxc'
|
const configPath = input.containerType === 'lxc'
|
||||||
? `/etc/pve/nodes/${nodeName}/lxc/${input.containerId}.conf`
|
? `/etc/pve/lxc/${input.containerId}.conf`
|
||||||
: `/etc/pve/nodes/${nodeName}/qemu-server/${input.containerId}.conf`;
|
: `/etc/pve/qemu-server/${input.containerId}.conf`;
|
||||||
|
|
||||||
let configContent = '';
|
let configContent = '';
|
||||||
await new Promise<void>((resolve) => {
|
await new Promise<void>((resolve) => {
|
||||||
|
|||||||
@@ -1,55 +0,0 @@
|
|||||||
import type { DirEntry, GitProvider } from './types';
|
|
||||||
import { parseRepoUrl } from '../repositoryUrlValidation';
|
|
||||||
|
|
||||||
export class BitbucketProvider implements GitProvider {
|
|
||||||
async listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
|
||||||
const { owner, repo } = parseRepoUrl(repoUrl);
|
|
||||||
const listUrl = `https://api.bitbucket.org/2.0/repositories/${owner}/${repo}/src/${encodeURIComponent(branch)}/${path}`;
|
|
||||||
const headers: Record<string, string> = {
|
|
||||||
'User-Agent': 'PVEScripts-Local/1.0',
|
|
||||||
};
|
|
||||||
const token = process.env.BITBUCKET_APP_PASSWORD ?? process.env.BITBUCKET_TOKEN;
|
|
||||||
if (token) {
|
|
||||||
const auth = Buffer.from(`:${token}`).toString('base64');
|
|
||||||
headers.Authorization = `Basic ${auth}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(listUrl, { headers });
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`Bitbucket API error: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = (await response.json()) as { values?: { path: string; type: string }[] };
|
|
||||||
const data = body.values ?? (Array.isArray(body) ? body : []);
|
|
||||||
if (!Array.isArray(data)) {
|
|
||||||
throw new Error('Bitbucket API returned unexpected response');
|
|
||||||
}
|
|
||||||
return data.map((item: { path: string; type: string }) => {
|
|
||||||
const name = item.path.split('/').pop() ?? item.path;
|
|
||||||
return {
|
|
||||||
name,
|
|
||||||
path: item.path,
|
|
||||||
type: item.type === 'commit_directory' ? ('dir' as const) : ('file' as const),
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
|
||||||
const { owner, repo } = parseRepoUrl(repoUrl);
|
|
||||||
const rawUrl = `https://api.bitbucket.org/2.0/repositories/${owner}/${repo}/src/${encodeURIComponent(branch)}/${filePath}`;
|
|
||||||
const headers: Record<string, string> = {
|
|
||||||
'User-Agent': 'PVEScripts-Local/1.0',
|
|
||||||
};
|
|
||||||
const token = process.env.BITBUCKET_APP_PASSWORD ?? process.env.BITBUCKET_TOKEN;
|
|
||||||
if (token) {
|
|
||||||
const auth = Buffer.from(`:${token}`).toString('base64');
|
|
||||||
headers.Authorization = `Basic ${auth}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(rawUrl, { headers });
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
return response.text();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
import type { DirEntry, GitProvider } from "./types";
|
|
||||||
import { parseRepoUrl } from "../repositoryUrlValidation";
|
|
||||||
|
|
||||||
export class CustomProvider implements GitProvider {
|
|
||||||
async listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
|
||||||
const { origin, owner, repo } = parseRepoUrl(repoUrl);
|
|
||||||
const apiUrl = `${origin}/api/v1/repos/${owner}/${repo}/contents/${path}?ref=${encodeURIComponent(branch)}`;
|
|
||||||
const headers: Record<string, string> = { "User-Agent": "PVEScripts-Local/1.0" };
|
|
||||||
const token = process.env.GITEA_TOKEN ?? process.env.GIT_TOKEN;
|
|
||||||
if (token) headers.Authorization = `token ${token}`;
|
|
||||||
|
|
||||||
const response = await fetch(apiUrl, { headers });
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`Custom Git server: list directory failed (${response.status}).`);
|
|
||||||
}
|
|
||||||
const data = (await response.json()) as { type: string; name: string; path: string }[];
|
|
||||||
if (!Array.isArray(data)) {
|
|
||||||
const single = data as unknown as { type?: string; name?: string; path?: string };
|
|
||||||
if (single?.name) {
|
|
||||||
return [{ name: single.name, path: single.path ?? path, type: single.type === "dir" ? "dir" : "file" }];
|
|
||||||
}
|
|
||||||
throw new Error("Custom Git server returned unexpected response");
|
|
||||||
}
|
|
||||||
return data.map((item) => ({
|
|
||||||
name: item.name,
|
|
||||||
path: item.path,
|
|
||||||
type: item.type === "dir" ? ("dir" as const) : ("file" as const),
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
async downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
|
||||||
const { origin, owner, repo } = parseRepoUrl(repoUrl);
|
|
||||||
const rawUrl = `${origin}/${owner}/${repo}/raw/${encodeURIComponent(branch)}/${filePath}`;
|
|
||||||
const headers: Record<string, string> = { "User-Agent": "PVEScripts-Local/1.0" };
|
|
||||||
const token = process.env.GITEA_TOKEN ?? process.env.GIT_TOKEN;
|
|
||||||
if (token) headers.Authorization = `token ${token}`;
|
|
||||||
|
|
||||||
const response = await fetch(rawUrl, { headers });
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`Failed to download ${filePath} from custom Git server (${response.status}).`);
|
|
||||||
}
|
|
||||||
return response.text();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
import type { DirEntry, GitProvider } from './types';
|
|
||||||
import { parseRepoUrl } from '../repositoryUrlValidation';
|
|
||||||
|
|
||||||
export class GitHubProvider implements GitProvider {
|
|
||||||
async listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
|
||||||
const { owner, repo } = parseRepoUrl(repoUrl);
|
|
||||||
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/contents/${path}?ref=${encodeURIComponent(branch)}`;
|
|
||||||
const headers: Record<string, string> = {
|
|
||||||
Accept: 'application/vnd.github.v3+json',
|
|
||||||
'User-Agent': 'PVEScripts-Local/1.0',
|
|
||||||
};
|
|
||||||
const token = process.env.GITHUB_TOKEN;
|
|
||||||
if (token) headers.Authorization = `token ${token}`;
|
|
||||||
|
|
||||||
const response = await fetch(apiUrl, { headers });
|
|
||||||
if (!response.ok) {
|
|
||||||
if (response.status === 403) {
|
|
||||||
const err = new Error(
|
|
||||||
`GitHub API rate limit exceeded. Consider setting GITHUB_TOKEN. Status: ${response.status} ${response.statusText}`
|
|
||||||
);
|
|
||||||
(err as Error & { name: string }).name = 'RateLimitError';
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = (await response.json()) as { type: string; name: string; path: string }[];
|
|
||||||
if (!Array.isArray(data)) {
|
|
||||||
throw new Error('GitHub API returned unexpected response');
|
|
||||||
}
|
|
||||||
return data.map((item) => ({
|
|
||||||
name: item.name,
|
|
||||||
path: item.path,
|
|
||||||
type: item.type === 'dir' ? ('dir' as const) : ('file' as const),
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
async downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
|
||||||
const { owner, repo } = parseRepoUrl(repoUrl);
|
|
||||||
const rawUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${encodeURIComponent(branch)}/${filePath}`;
|
|
||||||
const headers: Record<string, string> = {
|
|
||||||
'User-Agent': 'PVEScripts-Local/1.0',
|
|
||||||
};
|
|
||||||
const token = process.env.GITHUB_TOKEN;
|
|
||||||
if (token) headers.Authorization = `token ${token}`;
|
|
||||||
|
|
||||||
const response = await fetch(rawUrl, { headers });
|
|
||||||
if (!response.ok) {
|
|
||||||
if (response.status === 403) {
|
|
||||||
const err = new Error(
|
|
||||||
`GitHub rate limit exceeded while downloading ${filePath}. Consider setting GITHUB_TOKEN.`
|
|
||||||
);
|
|
||||||
(err as Error & { name: string }).name = 'RateLimitError';
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
return response.text();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
import type { DirEntry, GitProvider } from './types';
|
|
||||||
import { parseRepoUrl } from '../repositoryUrlValidation';
|
|
||||||
|
|
||||||
export class GitLabProvider implements GitProvider {
|
|
||||||
private getBaseUrl(repoUrl: string): string {
|
|
||||||
const { origin } = parseRepoUrl(repoUrl);
|
|
||||||
return origin;
|
|
||||||
}
|
|
||||||
|
|
||||||
private getProjectId(repoUrl: string): string {
|
|
||||||
const { owner, repo } = parseRepoUrl(repoUrl);
|
|
||||||
return encodeURIComponent(`${owner}/${repo}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
async listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
|
||||||
const baseUrl = this.getBaseUrl(repoUrl);
|
|
||||||
const projectId = this.getProjectId(repoUrl);
|
|
||||||
const apiUrl = `${baseUrl}/api/v4/projects/${projectId}/repository/tree?path=${encodeURIComponent(path)}&ref=${encodeURIComponent(branch)}&per_page=100`;
|
|
||||||
const headers: Record<string, string> = {
|
|
||||||
'User-Agent': 'PVEScripts-Local/1.0',
|
|
||||||
};
|
|
||||||
const token = process.env.GITLAB_TOKEN;
|
|
||||||
if (token) headers['PRIVATE-TOKEN'] = token;
|
|
||||||
|
|
||||||
const response = await fetch(apiUrl, { headers });
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`GitLab API error: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = (await response.json()) as { type: string; name: string; path: string }[];
|
|
||||||
if (!Array.isArray(data)) {
|
|
||||||
throw new Error('GitLab API returned unexpected response');
|
|
||||||
}
|
|
||||||
return data.map((item) => ({
|
|
||||||
name: item.name,
|
|
||||||
path: item.path,
|
|
||||||
type: item.type === 'tree' ? ('dir' as const) : ('file' as const),
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
async downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
|
||||||
const baseUrl = this.getBaseUrl(repoUrl);
|
|
||||||
const projectId = this.getProjectId(repoUrl);
|
|
||||||
const encodedPath = encodeURIComponent(filePath);
|
|
||||||
const rawUrl = `${baseUrl}/api/v4/projects/${projectId}/repository/files/${encodedPath}/raw?ref=${encodeURIComponent(branch)}`;
|
|
||||||
const headers: Record<string, string> = {
|
|
||||||
'User-Agent': 'PVEScripts-Local/1.0',
|
|
||||||
};
|
|
||||||
const token = process.env.GITLAB_TOKEN;
|
|
||||||
if (token) headers['PRIVATE-TOKEN'] = token;
|
|
||||||
|
|
||||||
const response = await fetch(rawUrl, { headers });
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
return response.text();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
export { listDirectory, downloadRawFile, getRepoProvider } from "./index.ts";
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
import type { DirEntry, GitProvider } from "./types";
|
|
||||||
import { getRepoProvider } from "../repositoryUrlValidation";
|
|
||||||
import { GitHubProvider } from "./github";
|
|
||||||
import { GitLabProvider } from "./gitlab";
|
|
||||||
import { BitbucketProvider } from "./bitbucket";
|
|
||||||
import { CustomProvider } from "./custom";
|
|
||||||
|
|
||||||
const providers: Record<string, GitProvider> = {
|
|
||||||
github: new GitHubProvider(),
|
|
||||||
gitlab: new GitLabProvider(),
|
|
||||||
bitbucket: new BitbucketProvider(),
|
|
||||||
custom: new CustomProvider(),
|
|
||||||
};
|
|
||||||
|
|
||||||
export type { DirEntry, GitProvider };
|
|
||||||
export { getRepoProvider };
|
|
||||||
|
|
||||||
export function getGitProvider(repoUrl: string): GitProvider {
|
|
||||||
return providers[getRepoProvider(repoUrl)]!;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
|
||||||
return getGitProvider(repoUrl).listDirectory(repoUrl, path, branch);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
|
||||||
return getGitProvider(repoUrl).downloadRawFile(repoUrl, filePath, branch);
|
|
||||||
}
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
/**
|
|
||||||
* Git provider interface for listing and downloading repository files.
|
|
||||||
*/
|
|
||||||
|
|
||||||
export type DirEntry = {
|
|
||||||
name: string;
|
|
||||||
path: string;
|
|
||||||
type: 'file' | 'dir';
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface GitProvider {
|
|
||||||
listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]>;
|
|
||||||
downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string>;
|
|
||||||
}
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
/**
|
|
||||||
* Repository URL validation (JS mirror for server.js).
|
|
||||||
*/
|
|
||||||
const VALID_REPO_URL =
|
|
||||||
/^(https?:\/\/)(github\.com|gitlab\.com|bitbucket\.org|[^/]+)\/[^/]+\/[^/]+$/;
|
|
||||||
|
|
||||||
export const REPO_URL_ERROR_MESSAGE =
|
|
||||||
'Invalid repository URL. Supported: GitHub, GitLab, Bitbucket, and custom Git servers (e.g. https://host/owner/repo).';
|
|
||||||
|
|
||||||
export function isValidRepositoryUrl(url) {
|
|
||||||
if (typeof url !== 'string' || !url.trim()) return false;
|
|
||||||
return VALID_REPO_URL.test(url.trim());
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getRepoProvider(url) {
|
|
||||||
if (!isValidRepositoryUrl(url)) throw new Error(REPO_URL_ERROR_MESSAGE);
|
|
||||||
const normalized = url.trim().toLowerCase();
|
|
||||||
if (normalized.includes('github.com')) return 'github';
|
|
||||||
if (normalized.includes('gitlab.com')) return 'gitlab';
|
|
||||||
if (normalized.includes('bitbucket.org')) return 'bitbucket';
|
|
||||||
return 'custom';
|
|
||||||
}
|
|
||||||
|
|
||||||
export function parseRepoUrl(url) {
|
|
||||||
if (!isValidRepositoryUrl(url)) throw new Error(REPO_URL_ERROR_MESSAGE);
|
|
||||||
try {
|
|
||||||
const u = new URL(url.trim());
|
|
||||||
const pathParts = u.pathname.replace(/^\/+/, '').replace(/\.git\/?$/, '').split('/');
|
|
||||||
return {
|
|
||||||
origin: u.origin,
|
|
||||||
owner: pathParts[0] ?? '',
|
|
||||||
repo: pathParts[1] ?? '',
|
|
||||||
};
|
|
||||||
} catch {
|
|
||||||
throw new Error(REPO_URL_ERROR_MESSAGE);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
/**
|
|
||||||
* Repository URL validation and provider detection.
|
|
||||||
* Supports GitHub, GitLab, Bitbucket, and custom Git servers.
|
|
||||||
*/
|
|
||||||
|
|
||||||
const VALID_REPO_URL =
|
|
||||||
/^(https?:\/\/)(github\.com|gitlab\.com|bitbucket\.org|[^/]+)\/[^/]+\/[^/]+$/;
|
|
||||||
|
|
||||||
export const REPO_URL_ERROR_MESSAGE =
|
|
||||||
'Invalid repository URL. Supported: GitHub, GitLab, Bitbucket, and custom Git servers (e.g. https://host/owner/repo).';
|
|
||||||
|
|
||||||
export type RepoProvider = 'github' | 'gitlab' | 'bitbucket' | 'custom';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if a string is a valid repository URL (format only).
|
|
||||||
*/
|
|
||||||
export function isValidRepositoryUrl(url: string): boolean {
|
|
||||||
if (typeof url !== 'string' || !url.trim()) return false;
|
|
||||||
return VALID_REPO_URL.test(url.trim());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Detect the Git provider from a repository URL.
|
|
||||||
*/
|
|
||||||
export function getRepoProvider(url: string): RepoProvider {
|
|
||||||
if (!isValidRepositoryUrl(url)) {
|
|
||||||
throw new Error(REPO_URL_ERROR_MESSAGE);
|
|
||||||
}
|
|
||||||
const normalized = url.trim().toLowerCase();
|
|
||||||
if (normalized.includes('github.com')) return 'github';
|
|
||||||
if (normalized.includes('gitlab.com')) return 'gitlab';
|
|
||||||
if (normalized.includes('bitbucket.org')) return 'bitbucket';
|
|
||||||
return 'custom';
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parse owner and repo from a repository URL (path segments).
|
|
||||||
* Works for GitHub, GitLab, Bitbucket, and custom (host/owner/repo).
|
|
||||||
*/
|
|
||||||
export function parseRepoUrl(url: string): { origin: string; owner: string; repo: string } {
|
|
||||||
if (!isValidRepositoryUrl(url)) {
|
|
||||||
throw new Error(REPO_URL_ERROR_MESSAGE);
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const u = new URL(url.trim());
|
|
||||||
const pathParts = u.pathname.replace(/^\/+/, '').replace(/\.git\/?$/, '').split('/');
|
|
||||||
const owner = pathParts[0] ?? '';
|
|
||||||
const repo = pathParts[1] ?? '';
|
|
||||||
return {
|
|
||||||
origin: u.origin,
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
};
|
|
||||||
} catch {
|
|
||||||
throw new Error(REPO_URL_ERROR_MESSAGE);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -327,16 +327,13 @@ class BackupService {
|
|||||||
// PBS supports PBS_PASSWORD and PBS_REPOSITORY environment variables for non-interactive login
|
// PBS supports PBS_PASSWORD and PBS_REPOSITORY environment variables for non-interactive login
|
||||||
const repository = `root@pam@${pbsIp}:${pbsDatastore}`;
|
const repository = `root@pam@${pbsIp}:${pbsDatastore}`;
|
||||||
|
|
||||||
// Escape password and fingerprint for shell safety (single quotes)
|
// Escape password for shell safety (single quotes)
|
||||||
const escapedPassword = credential.pbs_password.replace(/'/g, "'\\''");
|
const escapedPassword = credential.pbs_password.replace(/'/g, "'\\''");
|
||||||
const fingerprint = credential.pbs_fingerprint?.trim() ?? '';
|
|
||||||
const escapedFingerprint = fingerprint ? fingerprint.replace(/'/g, "'\\''") : '';
|
// Use PBS_PASSWORD environment variable for non-interactive authentication
|
||||||
const envParts = [`PBS_PASSWORD='${escapedPassword}'`, `PBS_REPOSITORY='${repository}'`];
|
// Auto-accept fingerprint by piping "y" to stdin
|
||||||
if (escapedFingerprint) {
|
// PBS will use PBS_PASSWORD env var if available, avoiding interactive prompt
|
||||||
envParts.push(`PBS_FINGERPRINT='${escapedFingerprint}'`);
|
const fullCommand = `echo "y" | PBS_PASSWORD='${escapedPassword}' PBS_REPOSITORY='${repository}' timeout 10 proxmox-backup-client login --repository ${repository} 2>&1`;
|
||||||
}
|
|
||||||
const envStr = envParts.join(' ');
|
|
||||||
const fullCommand = `${envStr} timeout 10 proxmox-backup-client login --repository ${repository} 2>&1`;
|
|
||||||
|
|
||||||
console.log(`[BackupService] Logging into PBS: ${repository}`);
|
console.log(`[BackupService] Logging into PBS: ${repository}`);
|
||||||
|
|
||||||
@@ -422,12 +419,9 @@ class BackupService {
|
|||||||
|
|
||||||
// Build full repository string: root@pam@<IP>:<DATASTORE>
|
// Build full repository string: root@pam@<IP>:<DATASTORE>
|
||||||
const repository = `root@pam@${pbsIp}:${pbsDatastore}`;
|
const repository = `root@pam@${pbsIp}:${pbsDatastore}`;
|
||||||
const fingerprint = credential.pbs_fingerprint?.trim() ?? '';
|
|
||||||
const escapedFingerprint = fingerprint ? fingerprint.replace(/'/g, "'\\''") : '';
|
|
||||||
const snapshotEnvParts = escapedFingerprint ? [`PBS_FINGERPRINT='${escapedFingerprint}'`] : [];
|
|
||||||
const snapshotEnvStr = snapshotEnvParts.length ? snapshotEnvParts.join(' ') + ' ' : '';
|
|
||||||
// Use correct command: snapshot list ct/<CT_ID> --repository <full_repo_string>
|
// Use correct command: snapshot list ct/<CT_ID> --repository <full_repo_string>
|
||||||
const command = `${snapshotEnvStr}timeout 30 proxmox-backup-client snapshot list ct/${ctId} --repository ${repository} 2>&1 || echo "PBS_ERROR"`;
|
const command = `timeout 30 proxmox-backup-client snapshot list ct/${ctId} --repository ${repository} 2>&1 || echo "PBS_ERROR"`;
|
||||||
let output = '';
|
let output = '';
|
||||||
|
|
||||||
console.log(`[BackupService] Discovering PBS backups for CT ${ctId} on repository ${repository}`);
|
console.log(`[BackupService] Discovering PBS backups for CT ${ctId} on repository ${repository}`);
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
// JavaScript wrapper for githubJsonService (for use with node server.js)
|
// JavaScript wrapper for githubJsonService (for use with node server.js)
|
||||||
import { writeFile, mkdir, readdir, readFile, unlink } from 'fs/promises';
|
import { writeFile, mkdir, readdir, readFile } from 'fs/promises';
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
import { repositoryService } from './repositoryService.js';
|
import { repositoryService } from './repositoryService.js';
|
||||||
import { listDirectory, downloadRawFile } from '../lib/gitProvider/index.js';
|
|
||||||
|
|
||||||
// Get environment variables
|
// Get environment variables
|
||||||
const getEnv = () => ({
|
const getEnv = () => ({
|
||||||
@@ -29,9 +28,76 @@ class GitHubJsonService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getBaseUrl(repoUrl) {
|
||||||
|
const urlMatch = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
||||||
|
if (!urlMatch) {
|
||||||
|
throw new Error(`Invalid GitHub repository URL: ${repoUrl}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [, owner, repo] = urlMatch;
|
||||||
|
return `https://api.github.com/repos/${owner}/${repo}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
extractRepoPath(repoUrl) {
|
||||||
|
const match = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
||||||
|
if (!match) {
|
||||||
|
throw new Error('Invalid GitHub repository URL');
|
||||||
|
}
|
||||||
|
return `${match[1]}/${match[2]}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fetchFromGitHub(repoUrl, endpoint) {
|
||||||
|
const baseUrl = this.getBaseUrl(repoUrl);
|
||||||
|
const env = getEnv();
|
||||||
|
|
||||||
|
const headers = {
|
||||||
|
'Accept': 'application/vnd.github.v3+json',
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
|
||||||
|
if (env.GITHUB_TOKEN) {
|
||||||
|
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(`${baseUrl}${endpoint}`, { headers });
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
if (response.status === 403) {
|
||||||
|
const error = new Error(`GitHub API rate limit exceeded. Consider setting GITHUB_TOKEN for higher limits. Status: ${response.status} ${response.statusText}`);
|
||||||
|
error.name = 'RateLimitError';
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
|
||||||
async downloadJsonFile(repoUrl, filePath) {
|
async downloadJsonFile(repoUrl, filePath) {
|
||||||
this.initializeConfig();
|
this.initializeConfig();
|
||||||
const content = await downloadRawFile(repoUrl, filePath, this.branch);
|
const repoPath = this.extractRepoPath(repoUrl);
|
||||||
|
const rawUrl = `https://raw.githubusercontent.com/${repoPath}/${this.branch}/${filePath}`;
|
||||||
|
const env = getEnv();
|
||||||
|
|
||||||
|
const headers = {
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
|
||||||
|
if (env.GITHUB_TOKEN) {
|
||||||
|
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(rawUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
if (response.status === 403) {
|
||||||
|
const error = new Error(`GitHub rate limit exceeded while downloading ${filePath}. Consider setting GITHUB_TOKEN for higher limits.`);
|
||||||
|
error.name = 'RateLimitError';
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = await response.text();
|
||||||
const script = JSON.parse(content);
|
const script = JSON.parse(content);
|
||||||
script.repository_url = repoUrl;
|
script.repository_url = repoUrl;
|
||||||
return script;
|
return script;
|
||||||
@@ -39,13 +105,16 @@ class GitHubJsonService {
|
|||||||
|
|
||||||
async getJsonFiles(repoUrl) {
|
async getJsonFiles(repoUrl) {
|
||||||
this.initializeConfig();
|
this.initializeConfig();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const entries = await listDirectory(repoUrl, this.jsonFolder, this.branch);
|
const files = await this.fetchFromGitHub(
|
||||||
return entries
|
repoUrl,
|
||||||
.filter((e) => e.type === 'file' && e.name.endsWith('.json'))
|
`/contents/${this.jsonFolder}?ref=${this.branch}`
|
||||||
.map((e) => ({ name: e.name, path: e.path }));
|
);
|
||||||
|
|
||||||
|
return files.filter(file => file.name.endsWith('.json'));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error fetching JSON files from repository (${repoUrl}):`, error);
|
console.error(`Error fetching JSON files from GitHub (${repoUrl}):`, error);
|
||||||
throw new Error(`Failed to fetch script files from repository: ${repoUrl}`);
|
throw new Error(`Failed to fetch script files from repository: ${repoUrl}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -163,42 +232,25 @@ class GitHubJsonService {
|
|||||||
const localFiles = await this.getLocalJsonFiles();
|
const localFiles = await this.getLocalJsonFiles();
|
||||||
console.log(`Found ${localFiles.length} local JSON files`);
|
console.log(`Found ${localFiles.length} local JSON files`);
|
||||||
|
|
||||||
// Delete local JSON files that belong to this repo but are no longer in the remote
|
|
||||||
const remoteFilenames = new Set(githubFiles.map((f) => f.name));
|
|
||||||
const deletedFiles = await this.deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames);
|
|
||||||
if (deletedFiles.length > 0) {
|
|
||||||
console.log(`Removed ${deletedFiles.length} obsolete JSON file(s) no longer in ${repoUrl}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
|
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
|
||||||
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
|
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
|
||||||
|
|
||||||
if (filesToSync.length === 0) {
|
if (filesToSync.length === 0) {
|
||||||
const msg =
|
|
||||||
deletedFiles.length > 0
|
|
||||||
? `All JSON files are up to date for repository: ${repoUrl}. Removed ${deletedFiles.length} obsolete file(s).`
|
|
||||||
: `All JSON files are up to date for repository: ${repoUrl}`;
|
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: msg,
|
message: `All JSON files are up to date for repository: ${repoUrl}`,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: [],
|
syncedFiles: []
|
||||||
deletedFiles
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
|
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
|
||||||
|
|
||||||
const msg =
|
|
||||||
deletedFiles.length > 0
|
|
||||||
? `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}, removed ${deletedFiles.length} obsolete file(s).`
|
|
||||||
: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`;
|
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: msg,
|
message: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`,
|
||||||
count: syncedFiles.length,
|
count: syncedFiles.length,
|
||||||
syncedFiles,
|
syncedFiles
|
||||||
deletedFiles
|
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`JSON sync failed for ${repoUrl}:`, error);
|
console.error(`JSON sync failed for ${repoUrl}:`, error);
|
||||||
@@ -206,8 +258,7 @@ class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: [],
|
syncedFiles: []
|
||||||
deletedFiles: []
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -223,15 +274,13 @@ class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: 'No enabled repositories found',
|
message: 'No enabled repositories found',
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: [],
|
syncedFiles: []
|
||||||
deletedFiles: []
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Found ${enabledRepos.length} enabled repositories`);
|
console.log(`Found ${enabledRepos.length} enabled repositories`);
|
||||||
|
|
||||||
const allSyncedFiles = [];
|
const allSyncedFiles = [];
|
||||||
const allDeletedFiles = [];
|
|
||||||
const processedSlugs = new Set();
|
const processedSlugs = new Set();
|
||||||
let totalSynced = 0;
|
let totalSynced = 0;
|
||||||
|
|
||||||
@@ -242,7 +291,6 @@ class GitHubJsonService {
|
|||||||
const result = await this.syncJsonFilesForRepo(repo.url);
|
const result = await this.syncJsonFilesForRepo(repo.url);
|
||||||
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
allDeletedFiles.push(...(result.deletedFiles ?? []));
|
|
||||||
const newFiles = result.syncedFiles.filter(file => {
|
const newFiles = result.syncedFiles.filter(file => {
|
||||||
const slug = file.replace('.json', '');
|
const slug = file.replace('.json', '');
|
||||||
if (processedSlugs.has(slug)) {
|
if (processedSlugs.has(slug)) {
|
||||||
@@ -264,16 +312,11 @@ class GitHubJsonService {
|
|||||||
|
|
||||||
await this.updateExistingFilesWithRepositoryUrl();
|
await this.updateExistingFilesWithRepositoryUrl();
|
||||||
|
|
||||||
const msg =
|
|
||||||
allDeletedFiles.length > 0
|
|
||||||
? `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories, removed ${allDeletedFiles.length} obsolete file(s).`
|
|
||||||
: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`;
|
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: msg,
|
message: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`,
|
||||||
count: totalSynced,
|
count: totalSynced,
|
||||||
syncedFiles: allSyncedFiles,
|
syncedFiles: allSyncedFiles
|
||||||
deletedFiles: allDeletedFiles
|
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Multi-repository JSON sync failed:', error);
|
console.error('Multi-repository JSON sync failed:', error);
|
||||||
@@ -281,8 +324,7 @@ class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: [],
|
syncedFiles: []
|
||||||
deletedFiles: []
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -324,32 +366,6 @@ class GitHubJsonService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames) {
|
|
||||||
this.initializeConfig();
|
|
||||||
const localFiles = await this.getLocalJsonFiles();
|
|
||||||
const deletedFiles = [];
|
|
||||||
|
|
||||||
for (const file of localFiles) {
|
|
||||||
try {
|
|
||||||
const filePath = join(this.localJsonDirectory, file);
|
|
||||||
const content = await readFile(filePath, 'utf-8');
|
|
||||||
const script = JSON.parse(content);
|
|
||||||
|
|
||||||
if (script.repository_url === repoUrl && !remoteFilenames.has(file)) {
|
|
||||||
await unlink(filePath);
|
|
||||||
const slug = file.replace(/\.json$/, '');
|
|
||||||
this.scriptCache.delete(slug);
|
|
||||||
deletedFiles.push(file);
|
|
||||||
console.log(`Removed obsolete script JSON: ${file} (no longer in ${repoUrl})`);
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// If we can't read or parse the file, skip (do not delete)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return deletedFiles;
|
|
||||||
}
|
|
||||||
|
|
||||||
async findFilesToSyncForRepo(repoUrl, githubFiles, localFiles) {
|
async findFilesToSyncForRepo(repoUrl, githubFiles, localFiles) {
|
||||||
const filesToSync = [];
|
const filesToSync = [];
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
import { writeFile, mkdir, readdir, readFile, unlink } from 'fs/promises';
|
import { writeFile, mkdir, readdir, readFile } from 'fs/promises';
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
import { env } from '../../env.js';
|
import { env } from '../../env.js';
|
||||||
import type { Script, ScriptCard, GitHubFile } from '../../types/script';
|
import type { Script, ScriptCard, GitHubFile } from '../../types/script';
|
||||||
import { repositoryService } from './repositoryService';
|
import { repositoryService } from './repositoryService';
|
||||||
import { listDirectory, downloadRawFile } from '~/server/lib/gitProvider';
|
|
||||||
|
|
||||||
export class GitHubJsonService {
|
export class GitHubJsonService {
|
||||||
private branch: string | null = null;
|
private branch: string | null = null;
|
||||||
@@ -23,24 +22,96 @@ export class GitHubJsonService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private getBaseUrl(repoUrl: string): string {
|
||||||
|
const urlMatch = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
||||||
|
if (!urlMatch) {
|
||||||
|
throw new Error(`Invalid GitHub repository URL: ${repoUrl}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [, owner, repo] = urlMatch;
|
||||||
|
return `https://api.github.com/repos/${owner}/${repo}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractRepoPath(repoUrl: string): string {
|
||||||
|
const match = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
||||||
|
if (!match) {
|
||||||
|
throw new Error('Invalid GitHub repository URL');
|
||||||
|
}
|
||||||
|
return `${match[1]}/${match[2]}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async fetchFromGitHub<T>(repoUrl: string, endpoint: string): Promise<T> {
|
||||||
|
const baseUrl = this.getBaseUrl(repoUrl);
|
||||||
|
|
||||||
|
const headers: HeadersInit = {
|
||||||
|
'Accept': 'application/vnd.github.v3+json',
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add GitHub token authentication if available
|
||||||
|
if (env.GITHUB_TOKEN) {
|
||||||
|
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(`${baseUrl}${endpoint}`, { headers });
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
if (response.status === 403) {
|
||||||
|
const error = new Error(`GitHub API rate limit exceeded. Consider setting GITHUB_TOKEN for higher limits. Status: ${response.status} ${response.statusText}`);
|
||||||
|
error.name = 'RateLimitError';
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
return data as T;
|
||||||
|
}
|
||||||
|
|
||||||
private async downloadJsonFile(repoUrl: string, filePath: string): Promise<Script> {
|
private async downloadJsonFile(repoUrl: string, filePath: string): Promise<Script> {
|
||||||
this.initializeConfig();
|
this.initializeConfig();
|
||||||
const content = await downloadRawFile(repoUrl, filePath, this.branch!);
|
const repoPath = this.extractRepoPath(repoUrl);
|
||||||
|
const rawUrl = `https://raw.githubusercontent.com/${repoPath}/${this.branch!}/${filePath}`;
|
||||||
|
|
||||||
|
const headers: HeadersInit = {
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add GitHub token authentication if available
|
||||||
|
if (env.GITHUB_TOKEN) {
|
||||||
|
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(rawUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
if (response.status === 403) {
|
||||||
|
const error = new Error(`GitHub rate limit exceeded while downloading ${filePath}. Consider setting GITHUB_TOKEN for higher limits. Status: ${response.status} ${response.statusText}`);
|
||||||
|
error.name = 'RateLimitError';
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = await response.text();
|
||||||
const script = JSON.parse(content) as Script;
|
const script = JSON.parse(content) as Script;
|
||||||
|
// Add repository_url to script
|
||||||
script.repository_url = repoUrl;
|
script.repository_url = repoUrl;
|
||||||
return script;
|
return script;
|
||||||
}
|
}
|
||||||
|
|
||||||
async getJsonFiles(repoUrl: string): Promise<GitHubFile[]> {
|
async getJsonFiles(repoUrl: string): Promise<GitHubFile[]> {
|
||||||
this.initializeConfig();
|
this.initializeConfig();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const entries = await listDirectory(repoUrl, this.jsonFolder!, this.branch!);
|
const files = await this.fetchFromGitHub<GitHubFile[]>(
|
||||||
const files: GitHubFile[] = entries
|
repoUrl,
|
||||||
.filter((e) => e.type === 'file' && e.name.endsWith('.json'))
|
`/contents/${this.jsonFolder!}?ref=${this.branch!}`
|
||||||
.map((e) => ({ name: e.name, path: e.path } as GitHubFile));
|
);
|
||||||
return files;
|
|
||||||
|
// Filter for JSON files only
|
||||||
|
return files.filter(file => file.name.endsWith('.json'));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error fetching JSON files from repository (${repoUrl}):`, error);
|
console.error(`Error fetching JSON files from GitHub (${repoUrl}):`, error);
|
||||||
throw new Error(`Failed to fetch script files from repository: ${repoUrl}`);
|
throw new Error(`Failed to fetch script files from repository: ${repoUrl}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -158,11 +229,12 @@ export class GitHubJsonService {
|
|||||||
/**
|
/**
|
||||||
* Sync JSON files from a specific repository
|
* Sync JSON files from a specific repository
|
||||||
*/
|
*/
|
||||||
async syncJsonFilesForRepo(repoUrl: string): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[]; deletedFiles: string[] }> {
|
async syncJsonFilesForRepo(repoUrl: string): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[] }> {
|
||||||
try {
|
try {
|
||||||
console.log(`Starting JSON sync from repository: ${repoUrl}`);
|
console.log(`Starting JSON sync from repository: ${repoUrl}`);
|
||||||
|
|
||||||
console.log(`Fetching file list from repository (${repoUrl})...`);
|
// Get file list from GitHub
|
||||||
|
console.log(`Fetching file list from GitHub (${repoUrl})...`);
|
||||||
const githubFiles = await this.getJsonFiles(repoUrl);
|
const githubFiles = await this.getJsonFiles(repoUrl);
|
||||||
console.log(`Found ${githubFiles.length} JSON files in repository ${repoUrl}`);
|
console.log(`Found ${githubFiles.length} JSON files in repository ${repoUrl}`);
|
||||||
|
|
||||||
@@ -170,45 +242,28 @@ export class GitHubJsonService {
|
|||||||
const localFiles = await this.getLocalJsonFiles();
|
const localFiles = await this.getLocalJsonFiles();
|
||||||
console.log(`Found ${localFiles.length} local JSON files`);
|
console.log(`Found ${localFiles.length} local JSON files`);
|
||||||
|
|
||||||
// Delete local JSON files that belong to this repo but are no longer in the remote
|
|
||||||
const remoteFilenames = new Set(githubFiles.map((f) => f.name));
|
|
||||||
const deletedFiles = await this.deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames);
|
|
||||||
if (deletedFiles.length > 0) {
|
|
||||||
console.log(`Removed ${deletedFiles.length} obsolete JSON file(s) no longer in ${repoUrl}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Compare and find files that need syncing
|
// Compare and find files that need syncing
|
||||||
// For multi-repo support, we need to check if file exists AND if it's from this repo
|
// For multi-repo support, we need to check if file exists AND if it's from this repo
|
||||||
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
|
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
|
||||||
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
|
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
|
||||||
|
|
||||||
if (filesToSync.length === 0) {
|
if (filesToSync.length === 0) {
|
||||||
const msg =
|
|
||||||
deletedFiles.length > 0
|
|
||||||
? `All JSON files are up to date for repository: ${repoUrl}. Removed ${deletedFiles.length} obsolete file(s).`
|
|
||||||
: `All JSON files are up to date for repository: ${repoUrl}`;
|
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: msg,
|
message: `All JSON files are up to date for repository: ${repoUrl}`,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: [],
|
syncedFiles: []
|
||||||
deletedFiles
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Download and save only the files that need syncing
|
// Download and save only the files that need syncing
|
||||||
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
|
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
|
||||||
|
|
||||||
const msg =
|
|
||||||
deletedFiles.length > 0
|
|
||||||
? `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}, removed ${deletedFiles.length} obsolete file(s).`
|
|
||||||
: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`;
|
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: msg,
|
message: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`,
|
||||||
count: syncedFiles.length,
|
count: syncedFiles.length,
|
||||||
syncedFiles,
|
syncedFiles
|
||||||
deletedFiles
|
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`JSON sync failed for ${repoUrl}:`, error);
|
console.error(`JSON sync failed for ${repoUrl}:`, error);
|
||||||
@@ -216,8 +271,7 @@ export class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: [],
|
syncedFiles: []
|
||||||
deletedFiles: []
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -225,7 +279,7 @@ export class GitHubJsonService {
|
|||||||
/**
|
/**
|
||||||
* Sync JSON files from all enabled repositories (main repo has priority)
|
* Sync JSON files from all enabled repositories (main repo has priority)
|
||||||
*/
|
*/
|
||||||
async syncJsonFiles(): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[]; deletedFiles: string[] }> {
|
async syncJsonFiles(): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[] }> {
|
||||||
try {
|
try {
|
||||||
console.log('Starting multi-repository JSON sync...');
|
console.log('Starting multi-repository JSON sync...');
|
||||||
|
|
||||||
@@ -236,15 +290,13 @@ export class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: 'No enabled repositories found',
|
message: 'No enabled repositories found',
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: [],
|
syncedFiles: []
|
||||||
deletedFiles: []
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Found ${enabledRepos.length} enabled repositories`);
|
console.log(`Found ${enabledRepos.length} enabled repositories`);
|
||||||
|
|
||||||
const allSyncedFiles: string[] = [];
|
const allSyncedFiles: string[] = [];
|
||||||
const allDeletedFiles: string[] = [];
|
|
||||||
const processedSlugs = new Set<string>(); // Track slugs we've already processed
|
const processedSlugs = new Set<string>(); // Track slugs we've already processed
|
||||||
let totalSynced = 0;
|
let totalSynced = 0;
|
||||||
|
|
||||||
@@ -256,7 +308,6 @@ export class GitHubJsonService {
|
|||||||
const result = await this.syncJsonFilesForRepo(repo.url);
|
const result = await this.syncJsonFilesForRepo(repo.url);
|
||||||
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
allDeletedFiles.push(...(result.deletedFiles ?? []));
|
|
||||||
// Only count files that weren't already processed from a higher priority repo
|
// Only count files that weren't already processed from a higher priority repo
|
||||||
const newFiles = result.syncedFiles.filter(file => {
|
const newFiles = result.syncedFiles.filter(file => {
|
||||||
const slug = file.replace('.json', '');
|
const slug = file.replace('.json', '');
|
||||||
@@ -280,16 +331,11 @@ export class GitHubJsonService {
|
|||||||
// Also update existing files that don't have repository_url set (backward compatibility)
|
// Also update existing files that don't have repository_url set (backward compatibility)
|
||||||
await this.updateExistingFilesWithRepositoryUrl();
|
await this.updateExistingFilesWithRepositoryUrl();
|
||||||
|
|
||||||
const msg =
|
|
||||||
allDeletedFiles.length > 0
|
|
||||||
? `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories, removed ${allDeletedFiles.length} obsolete file(s).`
|
|
||||||
: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`;
|
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: msg,
|
message: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`,
|
||||||
count: totalSynced,
|
count: totalSynced,
|
||||||
syncedFiles: allSyncedFiles,
|
syncedFiles: allSyncedFiles
|
||||||
deletedFiles: allDeletedFiles
|
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Multi-repository JSON sync failed:', error);
|
console.error('Multi-repository JSON sync failed:', error);
|
||||||
@@ -297,8 +343,7 @@ export class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: [],
|
syncedFiles: []
|
||||||
deletedFiles: []
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -343,36 +388,6 @@ export class GitHubJsonService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete local JSON files that belong to this repo but are no longer in the remote list.
|
|
||||||
* Returns the list of deleted filenames.
|
|
||||||
*/
|
|
||||||
private async deleteLocalFilesRemovedFromRepo(repoUrl: string, remoteFilenames: Set<string>): Promise<string[]> {
|
|
||||||
this.initializeConfig();
|
|
||||||
const localFiles = await this.getLocalJsonFiles();
|
|
||||||
const deletedFiles: string[] = [];
|
|
||||||
|
|
||||||
for (const file of localFiles) {
|
|
||||||
try {
|
|
||||||
const filePath = join(this.localJsonDirectory!, file);
|
|
||||||
const content = await readFile(filePath, 'utf-8');
|
|
||||||
const script = JSON.parse(content) as Script;
|
|
||||||
|
|
||||||
if (script.repository_url === repoUrl && !remoteFilenames.has(file)) {
|
|
||||||
await unlink(filePath);
|
|
||||||
const slug = file.replace(/\.json$/, '');
|
|
||||||
this.scriptCache.delete(slug);
|
|
||||||
deletedFiles.push(file);
|
|
||||||
console.log(`Removed obsolete script JSON: ${file} (no longer in ${repoUrl})`);
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// If we can't read or parse the file, skip (do not delete)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return deletedFiles;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find files that need syncing for a specific repository
|
* Find files that need syncing for a specific repository
|
||||||
* This checks if file exists locally AND if it's from the same repository
|
* This checks if file exists locally AND if it's from the same repository
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
// JavaScript wrapper for repositoryService (for use with node server.js)
|
// JavaScript wrapper for repositoryService (for use with node server.js)
|
||||||
import { prisma } from '../db.js';
|
import { prisma } from '../db.js';
|
||||||
import { isValidRepositoryUrl, REPO_URL_ERROR_MESSAGE } from '../lib/repositoryUrlValidation.js';
|
|
||||||
|
|
||||||
class RepositoryService {
|
class RepositoryService {
|
||||||
/**
|
/**
|
||||||
@@ -90,8 +89,9 @@ class RepositoryService {
|
|||||||
* Create a new repository
|
* Create a new repository
|
||||||
*/
|
*/
|
||||||
async createRepository(data) {
|
async createRepository(data) {
|
||||||
if (!isValidRepositoryUrl(data.url)) {
|
// Validate GitHub URL
|
||||||
throw new Error(REPO_URL_ERROR_MESSAGE);
|
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
||||||
|
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for duplicates
|
// Check for duplicates
|
||||||
@@ -122,9 +122,10 @@ class RepositoryService {
|
|||||||
* Update repository
|
* Update repository
|
||||||
*/
|
*/
|
||||||
async updateRepository(id, data) {
|
async updateRepository(id, data) {
|
||||||
|
// If updating URL, validate it
|
||||||
if (data.url) {
|
if (data.url) {
|
||||||
if (!isValidRepositoryUrl(data.url)) {
|
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
||||||
throw new Error(REPO_URL_ERROR_MESSAGE);
|
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for duplicates (excluding current repo)
|
// Check for duplicates (excluding current repo)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
|
/* eslint-disable @typescript-eslint/prefer-regexp-exec */
|
||||||
import { prisma } from '../db';
|
import { prisma } from '../db';
|
||||||
import { isValidRepositoryUrl, REPO_URL_ERROR_MESSAGE } from '../lib/repositoryUrlValidation';
|
|
||||||
|
|
||||||
export class RepositoryService {
|
export class RepositoryService {
|
||||||
/**
|
/**
|
||||||
@@ -93,8 +93,9 @@ export class RepositoryService {
|
|||||||
enabled?: boolean;
|
enabled?: boolean;
|
||||||
priority?: number;
|
priority?: number;
|
||||||
}) {
|
}) {
|
||||||
if (!isValidRepositoryUrl(data.url)) {
|
// Validate GitHub URL
|
||||||
throw new Error(REPO_URL_ERROR_MESSAGE);
|
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
||||||
|
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for duplicates
|
// Check for duplicates
|
||||||
@@ -129,9 +130,10 @@ export class RepositoryService {
|
|||||||
url?: string;
|
url?: string;
|
||||||
priority?: number;
|
priority?: number;
|
||||||
}) {
|
}) {
|
||||||
|
// If updating URL, validate it
|
||||||
if (data.url) {
|
if (data.url) {
|
||||||
if (!isValidRepositoryUrl(data.url)) {
|
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
||||||
throw new Error(REPO_URL_ERROR_MESSAGE);
|
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for duplicates (excluding current repo)
|
// Check for duplicates (excluding current repo)
|
||||||
|
|||||||
@@ -250,16 +250,9 @@ class RestoreService {
|
|||||||
const targetFolder = `/var/lib/vz/dump/vzdump-lxc-${ctId}-${snapshotNameForPath}`;
|
const targetFolder = `/var/lib/vz/dump/vzdump-lxc-${ctId}-${snapshotNameForPath}`;
|
||||||
const targetTar = `${targetFolder}.tar`;
|
const targetTar = `${targetFolder}.tar`;
|
||||||
|
|
||||||
// Use PBS_PASSWORD env var and add timeout for long downloads; PBS_FINGERPRINT when set for cert validation
|
// Use PBS_PASSWORD env var and add timeout for long downloads
|
||||||
const escapedPassword = credential.pbs_password.replace(/'/g, "'\\''");
|
const escapedPassword = credential.pbs_password.replace(/'/g, "'\\''");
|
||||||
const fingerprint = credential.pbs_fingerprint?.trim() ?? '';
|
const restoreCommand = `PBS_PASSWORD='${escapedPassword}' PBS_REPOSITORY='${repository}' timeout 300 proxmox-backup-client restore "${snapshotPath}" root.pxar "${targetFolder}" --repository '${repository}' 2>&1`;
|
||||||
const escapedFingerprint = fingerprint ? fingerprint.replace(/'/g, "'\\''") : '';
|
|
||||||
const restoreEnvParts = [`PBS_PASSWORD='${escapedPassword}'`, `PBS_REPOSITORY='${repository}'`];
|
|
||||||
if (escapedFingerprint) {
|
|
||||||
restoreEnvParts.push(`PBS_FINGERPRINT='${escapedFingerprint}'`);
|
|
||||||
}
|
|
||||||
const restoreEnvStr = restoreEnvParts.join(' ');
|
|
||||||
const restoreCommand = `${restoreEnvStr} timeout 300 proxmox-backup-client restore "${snapshotPath}" root.pxar "${targetFolder}" --repository '${repository}' 2>&1`;
|
|
||||||
|
|
||||||
let output = '';
|
let output = '';
|
||||||
let exitCode = 0;
|
let exitCode = 0;
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
// Real JavaScript implementation for script downloading
|
// Real JavaScript implementation for script downloading
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
import { writeFile, mkdir, access, readFile, unlink } from 'fs/promises';
|
import { writeFile, mkdir, access, readFile, unlink } from 'fs/promises';
|
||||||
import { downloadRawFile } from '../lib/gitProvider/index.js';
|
|
||||||
|
|
||||||
export class ScriptDownloaderService {
|
export class ScriptDownloaderService {
|
||||||
constructor() {
|
constructor() {
|
||||||
@@ -83,18 +82,51 @@ export class ScriptDownloaderService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Download a file from the repository (GitHub, GitLab, Bitbucket, or custom)
|
* Extract repository path from GitHub URL
|
||||||
* @param {string} repoUrl - The repository URL
|
* @param {string} repoUrl - The GitHub repository URL
|
||||||
|
* @returns {string}
|
||||||
|
*/
|
||||||
|
extractRepoPath(repoUrl) {
|
||||||
|
const match = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
||||||
|
if (!match) {
|
||||||
|
throw new Error(`Invalid GitHub repository URL: ${repoUrl}`);
|
||||||
|
}
|
||||||
|
return `${match[1]}/${match[2]}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Download a file from GitHub
|
||||||
|
* @param {string} repoUrl - The GitHub repository URL
|
||||||
* @param {string} filePath - The file path within the repository
|
* @param {string} filePath - The file path within the repository
|
||||||
* @param {string} [branch] - The branch to download from
|
* @param {string} [branch] - The branch to download from
|
||||||
* @returns {Promise<string>}
|
* @returns {Promise<string>}
|
||||||
*/
|
*/
|
||||||
async downloadFileFromRepo(repoUrl, filePath, branch = 'main') {
|
async downloadFileFromGitHub(repoUrl, filePath, branch = 'main') {
|
||||||
|
this.initializeConfig();
|
||||||
if (!repoUrl) {
|
if (!repoUrl) {
|
||||||
throw new Error('Repository URL is not set');
|
throw new Error('Repository URL is not set');
|
||||||
}
|
}
|
||||||
console.log(`Downloading from repository: ${repoUrl} (${filePath})`);
|
|
||||||
return downloadRawFile(repoUrl, filePath, branch);
|
const repoPath = this.extractRepoPath(repoUrl);
|
||||||
|
const url = `https://raw.githubusercontent.com/${repoPath}/${branch}/${filePath}`;
|
||||||
|
|
||||||
|
/** @type {Record<string, string>} */
|
||||||
|
const headers = {
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add GitHub token authentication if available
|
||||||
|
if (process.env.GITHUB_TOKEN) {
|
||||||
|
headers.Authorization = `token ${process.env.GITHUB_TOKEN}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Downloading from GitHub: ${url}`);
|
||||||
|
const response = await fetch(url, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to download ${filePath} from ${repoUrl}: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.text();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -152,8 +184,9 @@ export class ScriptDownloaderService {
|
|||||||
const fileName = scriptPath.split('/').pop();
|
const fileName = scriptPath.split('/').pop();
|
||||||
|
|
||||||
if (fileName) {
|
if (fileName) {
|
||||||
|
// Download from GitHub using the script's repository URL
|
||||||
console.log(`Downloading script file: ${scriptPath} from ${repoUrl}`);
|
console.log(`Downloading script file: ${scriptPath} from ${repoUrl}`);
|
||||||
const content = await this.downloadFileFromRepo(repoUrl, scriptPath, branch);
|
const content = await this.downloadFileFromGitHub(repoUrl, scriptPath, branch);
|
||||||
|
|
||||||
// Determine target directory based on script path
|
// Determine target directory based on script path
|
||||||
let targetDir;
|
let targetDir;
|
||||||
@@ -217,7 +250,7 @@ export class ScriptDownloaderService {
|
|||||||
const installScriptName = `${script.slug}-install.sh`;
|
const installScriptName = `${script.slug}-install.sh`;
|
||||||
try {
|
try {
|
||||||
console.log(`Downloading install script: install/${installScriptName} from ${repoUrl}`);
|
console.log(`Downloading install script: install/${installScriptName} from ${repoUrl}`);
|
||||||
const installContent = await this.downloadFileFromRepo(repoUrl, `install/${installScriptName}`, branch);
|
const installContent = await this.downloadFileFromGitHub(repoUrl, `install/${installScriptName}`, branch);
|
||||||
const localInstallPath = join(this.scriptsDirectory, 'install', installScriptName);
|
const localInstallPath = join(this.scriptsDirectory, 'install', installScriptName);
|
||||||
await writeFile(localInstallPath, installContent, 'utf-8');
|
await writeFile(localInstallPath, installContent, 'utf-8');
|
||||||
files.push(`install/${installScriptName}`);
|
files.push(`install/${installScriptName}`);
|
||||||
@@ -241,7 +274,7 @@ export class ScriptDownloaderService {
|
|||||||
const alpineInstallScriptName = `alpine-${script.slug}-install.sh`;
|
const alpineInstallScriptName = `alpine-${script.slug}-install.sh`;
|
||||||
try {
|
try {
|
||||||
console.log(`[${script.slug}] Downloading alpine install script: install/${alpineInstallScriptName} from ${repoUrl}`);
|
console.log(`[${script.slug}] Downloading alpine install script: install/${alpineInstallScriptName} from ${repoUrl}`);
|
||||||
const alpineInstallContent = await this.downloadFileFromRepo(repoUrl, `install/${alpineInstallScriptName}`, branch);
|
const alpineInstallContent = await this.downloadFileFromGitHub(repoUrl, `install/${alpineInstallScriptName}`, branch);
|
||||||
const localAlpineInstallPath = join(this.scriptsDirectory, 'install', alpineInstallScriptName);
|
const localAlpineInstallPath = join(this.scriptsDirectory, 'install', alpineInstallScriptName);
|
||||||
await writeFile(localAlpineInstallPath, alpineInstallContent, 'utf-8');
|
await writeFile(localAlpineInstallPath, alpineInstallContent, 'utf-8');
|
||||||
files.push(`install/${alpineInstallScriptName}`);
|
files.push(`install/${alpineInstallScriptName}`);
|
||||||
@@ -648,7 +681,7 @@ export class ScriptDownloaderService {
|
|||||||
console.log(`[Comparison] Local file size: ${localContent.length} bytes`);
|
console.log(`[Comparison] Local file size: ${localContent.length} bytes`);
|
||||||
|
|
||||||
// Download remote content from the script's repository
|
// Download remote content from the script's repository
|
||||||
const remoteContent = await this.downloadFileFromRepo(repoUrl, remotePath, branch);
|
const remoteContent = await this.downloadFileFromGitHub(repoUrl, remotePath, branch);
|
||||||
console.log(`[Comparison] Remote file size: ${remoteContent.length} bytes`);
|
console.log(`[Comparison] Remote file size: ${remoteContent.length} bytes`);
|
||||||
|
|
||||||
// Apply modification only for CT scripts, not for other script types
|
// Apply modification only for CT scripts, not for other script types
|
||||||
@@ -706,7 +739,7 @@ export class ScriptDownloaderService {
|
|||||||
// Find the corresponding script path in install_methods
|
// Find the corresponding script path in install_methods
|
||||||
const method = script.install_methods?.find(m => m.script === filePath);
|
const method = script.install_methods?.find(m => m.script === filePath);
|
||||||
if (method?.script) {
|
if (method?.script) {
|
||||||
const downloadedContent = await this.downloadFileFromRepo(repoUrl, method.script, branch);
|
const downloadedContent = await this.downloadFileFromGitHub(repoUrl, method.script, branch);
|
||||||
remoteContent = this.modifyScriptContent(downloadedContent);
|
remoteContent = this.modifyScriptContent(downloadedContent);
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
@@ -723,7 +756,7 @@ export class ScriptDownloaderService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
remoteContent = await this.downloadFileFromRepo(repoUrl, filePath, branch);
|
remoteContent = await this.downloadFileFromGitHub(repoUrl, filePath, branch);
|
||||||
} catch {
|
} catch {
|
||||||
// Error downloading remote install script
|
// Error downloading remote install script
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
import { spawn } from 'child_process';
|
import { spawn } from 'child_process';
|
||||||
import { spawn as ptySpawn } from 'node-pty';
|
import { spawn as ptySpawn } from 'node-pty';
|
||||||
import { existsSync, writeFileSync, chmodSync, unlinkSync } from 'fs';
|
import { existsSync } from 'fs';
|
||||||
import { join } from 'path';
|
|
||||||
import { tmpdir } from 'os';
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -196,45 +194,26 @@ class SSHExecutionService {
|
|||||||
*/
|
*/
|
||||||
async transferScriptsFolder(server, onData, onError) {
|
async transferScriptsFolder(server, onData, onError) {
|
||||||
const { ip, user, password, auth_type = 'password', ssh_key_passphrase, ssh_key_path, ssh_port = 22 } = server;
|
const { ip, user, password, auth_type = 'password', ssh_key_passphrase, ssh_key_path, ssh_port = 22 } = server;
|
||||||
|
|
||||||
const cleanupTempFile = (/** @type {string | null} */ tempPath) => {
|
|
||||||
if (tempPath) {
|
|
||||||
try {
|
|
||||||
unlinkSync(tempPath);
|
|
||||||
} catch (_) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
/** @type {string | null} */
|
|
||||||
let tempPath = null;
|
|
||||||
try {
|
try {
|
||||||
// Build rsync command based on authentication type.
|
// Build rsync command based on authentication type
|
||||||
// Use sshpass -f with a temp file so password/passphrase never go through the shell (safe for special chars like {, $, ").
|
|
||||||
let rshCommand;
|
let rshCommand;
|
||||||
if (auth_type === 'key') {
|
if (auth_type === 'key') {
|
||||||
if (!ssh_key_path || !existsSync(ssh_key_path)) {
|
if (!ssh_key_path || !existsSync(ssh_key_path)) {
|
||||||
throw new Error('SSH key file not found');
|
throw new Error('SSH key file not found');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ssh_key_passphrase) {
|
if (ssh_key_passphrase) {
|
||||||
tempPath = join(tmpdir(), `sshpass-${process.pid}-${Date.now()}.tmp`);
|
rshCommand = `sshpass -P passphrase -p ${ssh_key_passphrase} ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
||||||
writeFileSync(tempPath, ssh_key_passphrase);
|
|
||||||
chmodSync(tempPath, 0o600);
|
|
||||||
rshCommand = `sshpass -P passphrase -f ${tempPath} ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
|
||||||
} else {
|
} else {
|
||||||
rshCommand = `ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
rshCommand = `ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Password authentication
|
// Password authentication
|
||||||
tempPath = join(tmpdir(), `sshpass-${process.pid}-${Date.now()}.tmp`);
|
rshCommand = `sshpass -p ${password} ssh -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
||||||
writeFileSync(tempPath, password ?? '');
|
|
||||||
chmodSync(tempPath, 0o600);
|
|
||||||
rshCommand = `sshpass -f ${tempPath} ssh -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const rsyncCommand = spawn('rsync', [
|
const rsyncCommand = spawn('rsync', [
|
||||||
'-avz',
|
'-avz',
|
||||||
'--delete',
|
'--delete',
|
||||||
@@ -247,31 +226,31 @@ class SSHExecutionService {
|
|||||||
stdio: ['pipe', 'pipe', 'pipe']
|
stdio: ['pipe', 'pipe', 'pipe']
|
||||||
});
|
});
|
||||||
|
|
||||||
rsyncCommand.stdout.on('data', (/** @type {Buffer} */ data) => {
|
rsyncCommand.stdout.on('data', (/** @type {Buffer} */ data) => {
|
||||||
const output = data.toString('utf8');
|
// Ensure proper UTF-8 encoding for ANSI colors
|
||||||
onData(output);
|
const output = data.toString('utf8');
|
||||||
});
|
onData(output);
|
||||||
|
});
|
||||||
|
|
||||||
rsyncCommand.stderr.on('data', (/** @type {Buffer} */ data) => {
|
rsyncCommand.stderr.on('data', (/** @type {Buffer} */ data) => {
|
||||||
const output = data.toString('utf8');
|
// Ensure proper UTF-8 encoding for ANSI colors
|
||||||
onError(output);
|
const output = data.toString('utf8');
|
||||||
});
|
onError(output);
|
||||||
|
});
|
||||||
|
|
||||||
rsyncCommand.on('close', (code) => {
|
rsyncCommand.on('close', (code) => {
|
||||||
cleanupTempFile(tempPath);
|
if (code === 0) {
|
||||||
if (code === 0) {
|
resolve();
|
||||||
resolve();
|
} else {
|
||||||
} else {
|
reject(new Error(`rsync failed with code ${code}`));
|
||||||
reject(new Error(`rsync failed with code ${code}`));
|
}
|
||||||
}
|
});
|
||||||
});
|
|
||||||
|
|
||||||
rsyncCommand.on('error', (error) => {
|
rsyncCommand.on('error', (error) => {
|
||||||
cleanupTempFile(tempPath);
|
reject(error);
|
||||||
reject(error);
|
});
|
||||||
});
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
cleanupTempFile(tempPath);
|
|
||||||
reject(error);
|
reject(error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -169,17 +169,16 @@ class SSHService {
|
|||||||
const timeout = 10000;
|
const timeout = 10000;
|
||||||
let resolved = false;
|
let resolved = false;
|
||||||
|
|
||||||
// Pass password via env so it is not embedded in the script (safe for special chars like {, $, ").
|
|
||||||
const expectScript = `#!/usr/bin/expect -f
|
const expectScript = `#!/usr/bin/expect -f
|
||||||
set timeout 10
|
set timeout 10
|
||||||
spawn ssh -p ${ssh_port} -o ConnectTimeout=10 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o LogLevel=ERROR -o PasswordAuthentication=yes -o PubkeyAuthentication=no ${user}@${ip} "echo SSH_LOGIN_SUCCESS"
|
spawn ssh -p ${ssh_port} -o ConnectTimeout=10 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o LogLevel=ERROR -o PasswordAuthentication=yes -o PubkeyAuthentication=no ${user}@${ip} "echo SSH_LOGIN_SUCCESS"
|
||||||
expect {
|
expect {
|
||||||
"password:" {
|
"password:" {
|
||||||
send "$env(SSH_PASSWORD)\\r"
|
send "${password}\r"
|
||||||
exp_continue
|
exp_continue
|
||||||
}
|
}
|
||||||
"Password:" {
|
"Password:" {
|
||||||
send "$env(SSH_PASSWORD)\\r"
|
send "${password}\r"
|
||||||
exp_continue
|
exp_continue
|
||||||
}
|
}
|
||||||
"SSH_LOGIN_SUCCESS" {
|
"SSH_LOGIN_SUCCESS" {
|
||||||
@@ -194,8 +193,7 @@ expect {
|
|||||||
}`;
|
}`;
|
||||||
|
|
||||||
const expectCommand = spawn('expect', ['-c', expectScript], {
|
const expectCommand = spawn('expect', ['-c', expectScript], {
|
||||||
stdio: ['pipe', 'pipe', 'pipe'],
|
stdio: ['pipe', 'pipe', 'pipe']
|
||||||
env: { ...process.env, SSH_PASSWORD: password ?? '' }
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const timer = setTimeout(() => {
|
const timer = setTimeout(() => {
|
||||||
|
|||||||
Reference in New Issue
Block a user