Compare commits
39 Commits
fixes_upda
...
fix/365
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aea14cda7e | ||
|
|
4893ccda6e | ||
|
|
a56c625b4f | ||
|
|
54b2187f98 | ||
|
|
2f4e8606ed | ||
|
|
ff5478dd72 | ||
|
|
944a527972 | ||
|
|
c4479c1932 | ||
|
|
9998e48621 | ||
|
|
34eade3971 | ||
|
|
82be47b959 | ||
|
|
9b77fc7ddb | ||
|
|
db12ac4219 | ||
|
|
f66d1db861 | ||
|
|
886c3e37ff | ||
|
|
38deb09aa9 | ||
|
|
6d326dce1f | ||
|
|
6c8e177d3e | ||
|
|
879a548345 | ||
|
|
64cd81d5ba | ||
|
|
61e75949c8 | ||
|
|
a5d24bfad7 | ||
|
|
04595c0093 | ||
|
|
06fdb4889d | ||
|
|
38d4f9f918 | ||
|
|
63dc7c6983 | ||
|
|
d57c6059fc | ||
|
|
eb152f9fae | ||
|
|
1a8e98fec0 | ||
|
|
83a1c7ea31 | ||
|
|
79c63a7d3d | ||
|
|
753721eee0 | ||
|
|
09607296af | ||
|
|
c88040084a | ||
|
|
2573eb7314 | ||
|
|
414c356446 | ||
|
|
c38ded7a39 | ||
|
|
0cfed84cd0 | ||
|
|
9611bc9bcf |
@@ -18,7 +18,12 @@ ALLOWED_SCRIPT_PATHS="scripts/"
|
||||
WEBSOCKET_PORT="3001"
|
||||
|
||||
# User settings
|
||||
# Optional tokens for private repos: GITHUB_TOKEN (GitHub), GITLAB_TOKEN (GitLab),
|
||||
# BITBUCKET_APP_PASSWORD or BITBUCKET_TOKEN (Bitbucket). REPO_URL and added repos
|
||||
# can be GitHub, GitLab, Bitbucket, or custom Git servers.
|
||||
GITHUB_TOKEN=
|
||||
GITLAB_TOKEN=
|
||||
BITBUCKET_APP_PASSWORD=
|
||||
SAVE_FILTER=false
|
||||
FILTERS=
|
||||
AUTH_USERNAME=
|
||||
|
||||
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@@ -4,7 +4,7 @@
|
||||
|
||||
|
||||
## 🔗 Related PR / Issue
|
||||
Link: #
|
||||
Fixes: #
|
||||
|
||||
|
||||
## ✅ Prerequisites (**X** in brackets)
|
||||
|
||||
847
package-lock.json
generated
847
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
48
package.json
48
package.json
@@ -25,33 +25,33 @@
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@prisma/adapter-better-sqlite3": "^7.1.0",
|
||||
"@prisma/client": "^7.1.0",
|
||||
"@prisma/adapter-better-sqlite3": "^7.2.0",
|
||||
"@prisma/client": "^7.2.0",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.16",
|
||||
"@radix-ui/react-slot": "^1.2.4",
|
||||
"@t3-oss/env-nextjs": "^0.13.10",
|
||||
"@tailwindcss/typography": "^0.5.19",
|
||||
"@tanstack/react-query": "^5.90.12",
|
||||
"@trpc/client": "^11.8.0",
|
||||
"@tanstack/react-query": "^5.90.18",
|
||||
"@trpc/client": "^11.8.1",
|
||||
"@trpc/react-query": "^11.8.1",
|
||||
"@trpc/server": "^11.8.0",
|
||||
"@trpc/server": "^11.8.1",
|
||||
"@types/react-syntax-highlighter": "^15.5.13",
|
||||
"@types/ws": "^8.18.1",
|
||||
"@xterm/addon-fit": "^0.10.0",
|
||||
"@xterm/addon-fit": "^0.11.0",
|
||||
"@xterm/addon-web-links": "^0.12.0",
|
||||
"@xterm/xterm": "^6.0.0",
|
||||
"axios": "^1.13.2",
|
||||
"bcryptjs": "^3.0.3",
|
||||
"better-sqlite3": "^12.5.0",
|
||||
"better-sqlite3": "^12.6.0",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cron-validator": "^1.4.0",
|
||||
"dotenv": "^17.2.3",
|
||||
"jsonwebtoken": "^9.0.3",
|
||||
"lucide-react": "^0.562.0",
|
||||
"next": "^16.0.10",
|
||||
"next": "^16.1.3",
|
||||
"node-cron": "^4.2.1",
|
||||
"node-pty": "^1.0.0",
|
||||
"node-pty": "^1.1.0",
|
||||
"react": "^19.2.3",
|
||||
"react-dom": "^19.2.3",
|
||||
"react-markdown": "^10.1.0",
|
||||
@@ -62,8 +62,8 @@
|
||||
"strip-ansi": "^7.1.2",
|
||||
"superjson": "^2.2.6",
|
||||
"tailwind-merge": "^3.4.0",
|
||||
"ws": "^8.18.3",
|
||||
"zod": "^4.1.13"
|
||||
"ws": "^8.19.0",
|
||||
"zod": "^4.3.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/postcss": "^4.1.18",
|
||||
@@ -73,26 +73,26 @@
|
||||
"@types/bcryptjs": "^3.0.0",
|
||||
"@types/better-sqlite3": "^7.6.13",
|
||||
"@types/jsonwebtoken": "^9.0.10",
|
||||
"@types/node": "^24.10.4",
|
||||
"@types/node": "^24.10.9",
|
||||
"@types/node-cron": "^3.0.11",
|
||||
"@types/react": "^19.2.7",
|
||||
"@types/react": "^19.2.8",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@vitejs/plugin-react": "^5.1.2",
|
||||
"@vitest/coverage-v8": "^4.0.16",
|
||||
"@vitest/ui": "^4.0.14",
|
||||
"baseline-browser-mapping": "^2.9.3",
|
||||
"eslint": "^9.39.1",
|
||||
"eslint-config-next": "^16.1.0",
|
||||
"jsdom": "^27.3.0",
|
||||
"@vitest/coverage-v8": "^4.0.17",
|
||||
"@vitest/ui": "^4.0.17",
|
||||
"baseline-browser-mapping": "^2.9.15",
|
||||
"eslint": "^9.39.2",
|
||||
"eslint-config-next": "^16.1.3",
|
||||
"jsdom": "^27.4.0",
|
||||
"postcss": "^8.5.6",
|
||||
"prettier": "^3.7.4",
|
||||
"prettier": "^3.8.0",
|
||||
"prettier-plugin-tailwindcss": "^0.7.2",
|
||||
"prisma": "^7.1.0",
|
||||
"prisma": "^7.2.0",
|
||||
"tailwindcss": "^4.1.18",
|
||||
"tsx": "^4.21.0",
|
||||
"typescript": "^5.9.3",
|
||||
"typescript-eslint": "^8.48.1",
|
||||
"vitest": "^4.0.14"
|
||||
"typescript-eslint": "^8.53.0",
|
||||
"vitest": "^4.0.17"
|
||||
},
|
||||
"ct3aMetadata": {
|
||||
"initVersion": "7.39.3"
|
||||
@@ -104,4 +104,4 @@
|
||||
"overrides": {
|
||||
"prismjs": "^1.30.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -11,6 +11,9 @@ source "$(dirname "${BASH_SOURCE[0]}")/error-handler.func"
|
||||
load_functions
|
||||
catch_errors
|
||||
|
||||
# Get LXC IP address (must be called INSIDE container, after network is up)
|
||||
get_lxc_ip
|
||||
|
||||
# This function enables IPv6 if it's not disabled and sets verbose mode
|
||||
verb_ip6() {
|
||||
set_std_mode # Set STD mode based on VERBOSE
|
||||
@@ -125,22 +128,13 @@ update_os() {
|
||||
# This function modifies the message of the day (motd) and SSH settings
|
||||
motd_ssh() {
|
||||
echo "export TERM='xterm-256color'" >>/root/.bashrc
|
||||
IP=$(ip -4 addr show eth0 | awk '/inet / {print $2}' | cut -d/ -f1 | head -n 1)
|
||||
|
||||
if [ -f "/etc/os-release" ]; then
|
||||
OS_NAME=$(grep ^NAME /etc/os-release | cut -d= -f2 | tr -d '"')
|
||||
OS_VERSION=$(grep ^VERSION_ID /etc/os-release | cut -d= -f2 | tr -d '"')
|
||||
else
|
||||
OS_NAME="Alpine Linux"
|
||||
OS_VERSION="Unknown"
|
||||
fi
|
||||
|
||||
PROFILE_FILE="/etc/profile.d/00_lxc-details.sh"
|
||||
echo "echo -e \"\"" >"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${BOLD}${APPLICATION} LXC Container${CL}"\" >>"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${TAB}${GATEWAY}${YW} Provided by: ${GN}community-scripts ORG ${YW}| GitHub: ${GN}https://github.com/community-scripts/ProxmoxVE${CL}\"" >>"$PROFILE_FILE"
|
||||
echo "echo \"\"" >>"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${TAB}${OS}${YW} OS: ${GN}${OS_NAME} - Version: ${OS_VERSION}${CL}\"" >>"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${TAB}${OS}${YW} OS: ${GN}\$(grep ^NAME /etc/os-release | cut -d= -f2 | tr -d '\"') - Version: \$(grep ^VERSION_ID /etc/os-release | cut -d= -f2 | tr -d '\"')${CL}\"" >>"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${TAB}${HOSTNAME}${YW} Hostname: ${GN}\$(hostname)${CL}\"" >>"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${TAB}${INFO}${YW} IP Address: ${GN}\$(ip -4 addr show eth0 | awk '/inet / {print \$2}' | cut -d/ -f1 | head -n 1)${CL}\"" >>"$PROFILE_FILE"
|
||||
|
||||
|
||||
@@ -1,507 +1,188 @@
|
||||
#!/bin/ash
|
||||
# shellcheck shell=ash
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
|
||||
# Expects existing msg_* functions and optional $STD from the framework.
|
||||
if ! command -v curl >/dev/null 2>&1; then
|
||||
apk update && apk add curl >/dev/null 2>&1
|
||||
fi
|
||||
source "$(dirname "${BASH_SOURCE[0]}")/core.func"
|
||||
source "$(dirname "${BASH_SOURCE[0]}")/error-handler.func"
|
||||
load_functions
|
||||
catch_errors
|
||||
|
||||
# ------------------------------
|
||||
# helpers
|
||||
# ------------------------------
|
||||
lower() { printf '%s' "$1" | tr '[:upper:]' '[:lower:]'; }
|
||||
has() { command -v "$1" >/dev/null 2>&1; }
|
||||
# Get LXC IP address (must be called INSIDE container, after network is up)
|
||||
get_lxc_ip
|
||||
|
||||
need_tool() {
|
||||
# usage: need_tool curl jq unzip ...
|
||||
# setup missing tools via apk
|
||||
local missing=0 t
|
||||
for t in "$@"; do
|
||||
if ! has "$t"; then missing=1; fi
|
||||
# This function enables IPv6 if it's not disabled and sets verbose mode
|
||||
verb_ip6() {
|
||||
set_std_mode # Set STD mode based on VERBOSE
|
||||
|
||||
if [ "${IPV6_METHOD:-}" = "disable" ]; then
|
||||
msg_info "Disabling IPv6 (this may affect some services)"
|
||||
$STD sysctl -w net.ipv6.conf.all.disable_ipv6=1
|
||||
$STD sysctl -w net.ipv6.conf.default.disable_ipv6=1
|
||||
$STD sysctl -w net.ipv6.conf.lo.disable_ipv6=1
|
||||
mkdir -p /etc/sysctl.d
|
||||
$STD tee /etc/sysctl.d/99-disable-ipv6.conf >/dev/null <<EOF
|
||||
net.ipv6.conf.all.disable_ipv6 = 1
|
||||
net.ipv6.conf.default.disable_ipv6 = 1
|
||||
net.ipv6.conf.lo.disable_ipv6 = 1
|
||||
EOF
|
||||
$STD rc-update add sysctl default
|
||||
msg_ok "Disabled IPv6"
|
||||
fi
|
||||
}
|
||||
|
||||
set -Eeuo pipefail
|
||||
trap 'error_handler $? $LINENO "$BASH_COMMAND"' ERR
|
||||
trap on_exit EXIT
|
||||
trap on_interrupt INT
|
||||
trap on_terminate TERM
|
||||
|
||||
error_handler() {
|
||||
local exit_code="$1"
|
||||
local line_number="$2"
|
||||
local command="$3"
|
||||
|
||||
if [[ "$exit_code" -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
printf "\e[?25h"
|
||||
echo -e "\n${RD}[ERROR]${CL} in line ${RD}$line_number${CL}: exit code ${RD}$exit_code${CL}: while executing command ${YW}$command${CL}\n"
|
||||
exit "$exit_code"
|
||||
}
|
||||
|
||||
on_exit() {
|
||||
local exit_code="$?"
|
||||
[[ -n "${lockfile:-}" && -e "$lockfile" ]] && rm -f "$lockfile"
|
||||
exit "$exit_code"
|
||||
}
|
||||
|
||||
on_interrupt() {
|
||||
echo -e "\n${RD}Interrupted by user (SIGINT)${CL}"
|
||||
exit 130
|
||||
}
|
||||
|
||||
on_terminate() {
|
||||
echo -e "\n${RD}Terminated by signal (SIGTERM)${CL}"
|
||||
exit 143
|
||||
}
|
||||
|
||||
# This function sets up the Container OS by generating the locale, setting the timezone, and checking the network connection
|
||||
setting_up_container() {
|
||||
msg_info "Setting up Container OS"
|
||||
while [ $i -gt 0 ]; do
|
||||
if [ "$(ip addr show | grep 'inet ' | grep -v '127.0.0.1' | awk '{print $2}' | cut -d'/' -f1)" != "" ]; then
|
||||
break
|
||||
fi
|
||||
echo 1>&2 -en "${CROSS}${RD} No Network! "
|
||||
sleep $RETRY_EVERY
|
||||
i=$((i - 1))
|
||||
done
|
||||
if [ "$missing" -eq 1 ]; then
|
||||
msg_info "Installing tools: $*"
|
||||
apk add --no-cache "$@" >/dev/null 2>&1 || {
|
||||
msg_error "apk add failed for: $*"
|
||||
return 1
|
||||
}
|
||||
msg_ok "Tools ready: $*"
|
||||
|
||||
if [ "$(ip addr show | grep 'inet ' | grep -v '127.0.0.1' | awk '{print $2}' | cut -d'/' -f1)" = "" ]; then
|
||||
echo 1>&2 -e "\n${CROSS}${RD} No Network After $RETRY_NUM Tries${CL}"
|
||||
echo -e "${NETWORK}Check Network Settings"
|
||||
exit 1
|
||||
fi
|
||||
msg_ok "Set up Container OS"
|
||||
msg_ok "Network Connected: ${BL}$(ip addr show | grep 'inet ' | awk '{print $2}' | cut -d'/' -f1 | tail -n1)${CL}"
|
||||
}
|
||||
|
||||
net_resolves() {
|
||||
# better handling for missing getent on Alpine
|
||||
# usage: net_resolves api.github.com
|
||||
local host="$1"
|
||||
ping -c1 -W1 "$host" >/dev/null 2>&1 || nslookup "$host" >/dev/null 2>&1
|
||||
}
|
||||
|
||||
ensure_usr_local_bin_persist() {
|
||||
local PROFILE_FILE="/etc/profile.d/10-localbin.sh"
|
||||
if [ ! -f "$PROFILE_FILE" ]; then
|
||||
echo 'case ":$PATH:" in *:/usr/local/bin:*) ;; *) export PATH="/usr/local/bin:$PATH";; esac' >"$PROFILE_FILE"
|
||||
chmod +x "$PROFILE_FILE"
|
||||
fi
|
||||
}
|
||||
|
||||
download_with_progress() {
|
||||
# $1 url, $2 dest
|
||||
local url="$1" out="$2" cl
|
||||
need_tool curl pv || return 1
|
||||
cl=$(curl -fsSLI "$url" 2>/dev/null | awk 'tolower($0) ~ /^content-length:/ {print $2}' | tr -d '\r')
|
||||
if [ -n "$cl" ]; then
|
||||
curl -fsSL "$url" | pv -s "$cl" >"$out" || {
|
||||
msg_error "Download failed: $url"
|
||||
return 1
|
||||
}
|
||||
# This function checks the network connection by pinging a known IP address and prompts the user to continue if the internet is not connected
|
||||
network_check() {
|
||||
set +e
|
||||
trap - ERR
|
||||
if ping -c 1 -W 1 1.1.1.1 &>/dev/null || ping -c 1 -W 1 8.8.8.8 &>/dev/null || ping -c 1 -W 1 9.9.9.9 &>/dev/null; then
|
||||
ipv4_status="${GN}✔${CL} IPv4"
|
||||
else
|
||||
curl -fL# -o "$out" "$url" || {
|
||||
msg_error "Download failed: $url"
|
||||
return 1
|
||||
}
|
||||
fi
|
||||
}
|
||||
|
||||
# ------------------------------
|
||||
# GitHub: check Release
|
||||
# ------------------------------
|
||||
check_for_gh_release() {
|
||||
# app, repo, [pinned]
|
||||
local app="$1" source="$2" pinned="${3:-}"
|
||||
local app_lc
|
||||
app_lc="$(lower "$app" | tr -d ' ')"
|
||||
local current_file="$HOME/.${app_lc}"
|
||||
local current="" release tag
|
||||
|
||||
msg_info "Check for update: $app"
|
||||
|
||||
net_resolves api.github.com || {
|
||||
msg_error "DNS/network error: api.github.com"
|
||||
return 1
|
||||
}
|
||||
need_tool curl jq || return 1
|
||||
|
||||
tag=$(curl -fsSL "https://api.github.com/repos/${source}/releases/latest" | jq -r '.tag_name // empty')
|
||||
[ -z "$tag" ] && {
|
||||
msg_error "Unable to fetch latest tag for $app"
|
||||
return 1
|
||||
}
|
||||
release="${tag#v}"
|
||||
|
||||
[ -f "$current_file" ] && current="$(cat "$current_file")"
|
||||
|
||||
if [ -n "$pinned" ]; then
|
||||
if [ "$pinned" = "$release" ]; then
|
||||
msg_ok "$app pinned to v$pinned (no update)"
|
||||
return 1
|
||||
fi
|
||||
if [ "$current" = "$pinned" ]; then
|
||||
msg_ok "$app pinned v$pinned installed (upstream v$release)"
|
||||
return 1
|
||||
fi
|
||||
msg_info "$app pinned v$pinned (upstream v$release) → update/downgrade"
|
||||
CHECK_UPDATE_RELEASE="$pinned"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [ "$release" != "$current" ] || [ ! -f "$current_file" ]; then
|
||||
CHECK_UPDATE_RELEASE="$release"
|
||||
msg_info "New release available: v$release (current: v${current:-none})"
|
||||
return 0
|
||||
fi
|
||||
|
||||
msg_ok "$app is up to date (v$release)"
|
||||
return 1
|
||||
}
|
||||
|
||||
# ------------------------------
|
||||
# GitHub: get Release & deploy (Alpine)
|
||||
# modes: tarball | prebuild | singlefile
|
||||
# ------------------------------
|
||||
fetch_and_deploy_gh() {
|
||||
# $1 app, $2 repo, [$3 mode], [$4 version], [$5 target], [$6 asset_pattern
|
||||
local app="$1" repo="$2" mode="${3:-tarball}" version="${4:-latest}" target="${5:-/opt/$1}" pattern="${6:-}"
|
||||
local app_lc
|
||||
app_lc="$(lower "$app" | tr -d ' ')"
|
||||
local vfile="$HOME/.${app_lc}"
|
||||
local json url filename tmpd unpack
|
||||
|
||||
net_resolves api.github.com || {
|
||||
msg_error "DNS/network error"
|
||||
return 1
|
||||
}
|
||||
need_tool curl jq tar || return 1
|
||||
[ "$mode" = "prebuild" ] || [ "$mode" = "singlefile" ] && need_tool unzip >/dev/null 2>&1 || true
|
||||
|
||||
tmpd="$(mktemp -d)" || return 1
|
||||
mkdir -p "$target"
|
||||
|
||||
# Release JSON
|
||||
if [ "$version" = "latest" ]; then
|
||||
json="$(curl -fsSL "https://api.github.com/repos/$repo/releases/latest")" || {
|
||||
msg_error "GitHub API failed"
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
else
|
||||
json="$(curl -fsSL "https://api.github.com/repos/$repo/releases/tags/$version")" || {
|
||||
msg_error "GitHub API failed"
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
fi
|
||||
|
||||
# correct Version
|
||||
version="$(printf '%s' "$json" | jq -r '.tag_name // empty')"
|
||||
version="${version#v}"
|
||||
|
||||
[ -z "$version" ] && {
|
||||
msg_error "No tag in release json"
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
|
||||
case "$mode" in
|
||||
tarball | source)
|
||||
url="$(printf '%s' "$json" | jq -r '.tarball_url // empty')"
|
||||
[ -z "$url" ] && url="https://github.com/$repo/archive/refs/tags/v$version.tar.gz"
|
||||
filename="${app_lc}-${version}.tar.gz"
|
||||
download_with_progress "$url" "$tmpd/$filename" || {
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
tar -xzf "$tmpd/$filename" -C "$tmpd" || {
|
||||
msg_error "tar extract failed"
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
unpack="$(find "$tmpd" -mindepth 1 -maxdepth 1 -type d | head -n1)"
|
||||
# copy content of unpack to target
|
||||
(cd "$unpack" && tar -cf - .) | (cd "$target" && tar -xf -) || {
|
||||
msg_error "copy failed"
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
;;
|
||||
prebuild)
|
||||
[ -n "$pattern" ] || {
|
||||
msg_error "prebuild requires asset pattern"
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
url="$(printf '%s' "$json" | jq -r '.assets[].browser_download_url' | awk -v p="$pattern" '
|
||||
BEGIN{IGNORECASE=1}
|
||||
$0 ~ p {print; exit}
|
||||
')"
|
||||
[ -z "$url" ] && {
|
||||
msg_error "asset not found for pattern: $pattern"
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
filename="${url##*/}"
|
||||
download_with_progress "$url" "$tmpd/$filename" || {
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
# unpack archive (Zip or tarball)
|
||||
case "$filename" in
|
||||
*.zip)
|
||||
need_tool unzip || {
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
mkdir -p "$tmpd/unp"
|
||||
unzip -q "$tmpd/$filename" -d "$tmpd/unp"
|
||||
;;
|
||||
*.tar.gz | *.tgz | *.tar.xz | *.tar.zst | *.tar.bz2)
|
||||
mkdir -p "$tmpd/unp"
|
||||
tar -xf "$tmpd/$filename" -C "$tmpd/unp"
|
||||
;;
|
||||
*)
|
||||
msg_error "unsupported archive: $filename"
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
# top-level folder strippen
|
||||
if [ "$(find "$tmpd/unp" -mindepth 1 -maxdepth 1 -type d | wc -l)" -eq 1 ] && [ -z "$(find "$tmpd/unp" -mindepth 1 -maxdepth 1 -type f | head -n1)" ]; then
|
||||
unpack="$(find "$tmpd/unp" -mindepth 1 -maxdepth 1 -type d)"
|
||||
(cd "$unpack" && tar -cf - .) | (cd "$target" && tar -xf -) || {
|
||||
msg_error "copy failed"
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
ipv4_status="${RD}✖${CL} IPv4"
|
||||
read -r -p "Internet NOT connected. Continue anyway? <y/N> " prompt
|
||||
if [[ "${prompt,,}" =~ ^(y|yes)$ ]]; then
|
||||
echo -e "${INFO}${RD}Expect Issues Without Internet${CL}"
|
||||
else
|
||||
(cd "$tmpd/unp" && tar -cf - .) | (cd "$target" && tar -xf -) || {
|
||||
msg_error "copy failed"
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
fi
|
||||
;;
|
||||
singlefile)
|
||||
[ -n "$pattern" ] || {
|
||||
msg_error "singlefile requires asset pattern"
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
url="$(printf '%s' "$json" | jq -r '.assets[].browser_download_url' | awk -v p="$pattern" '
|
||||
BEGIN{IGNORECASE=1}
|
||||
$0 ~ p {print; exit}
|
||||
')"
|
||||
[ -z "$url" ] && {
|
||||
msg_error "asset not found for pattern: $pattern"
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
filename="${url##*/}"
|
||||
download_with_progress "$url" "$target/$app" || {
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
chmod +x "$target/$app"
|
||||
;;
|
||||
*)
|
||||
msg_error "Unknown mode: $mode"
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "$version" >"$vfile"
|
||||
ensure_usr_local_bin_persist
|
||||
rm -rf "$tmpd"
|
||||
msg_ok "Deployed $app ($version) → $target"
|
||||
}
|
||||
|
||||
# ------------------------------
|
||||
# yq (mikefarah) – Alpine
|
||||
# ------------------------------
|
||||
setup_yq() {
|
||||
# prefer apk, unless FORCE_GH=1
|
||||
if [ "${FORCE_GH:-0}" != "1" ] && apk info -e yq >/dev/null 2>&1; then
|
||||
msg_info "Updating yq via apk"
|
||||
apk add --no-cache --upgrade yq >/dev/null 2>&1 || true
|
||||
msg_ok "yq ready ($(yq --version 2>/dev/null))"
|
||||
return 0
|
||||
fi
|
||||
|
||||
need_tool curl || return 1
|
||||
local arch bin url tmp
|
||||
case "$(uname -m)" in
|
||||
x86_64) arch="amd64" ;;
|
||||
aarch64) arch="arm64" ;;
|
||||
*)
|
||||
msg_error "Unsupported arch for yq: $(uname -m)"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
url="https://github.com/mikefarah/yq/releases/latest/download/yq_linux_${arch}"
|
||||
tmp="$(mktemp)"
|
||||
download_with_progress "$url" "$tmp" || return 1
|
||||
install -m 0755 "$tmp" /usr/local/bin/yq
|
||||
rm -f "$tmp"
|
||||
msg_ok "Setup yq ($(yq --version 2>/dev/null))"
|
||||
}
|
||||
|
||||
# ------------------------------
|
||||
# Adminer – Alpine
|
||||
# ------------------------------
|
||||
setup_adminer() {
|
||||
need_tool curl || return 1
|
||||
msg_info "Setup Adminer (Alpine)"
|
||||
mkdir -p /var/www/localhost/htdocs/adminer
|
||||
curl -fsSL https://github.com/vrana/adminer/releases/latest/download/adminer.php \
|
||||
-o /var/www/localhost/htdocs/adminer/index.php || {
|
||||
msg_error "Adminer download failed"
|
||||
return 1
|
||||
}
|
||||
msg_ok "Adminer at /adminer (served by your webserver)"
|
||||
}
|
||||
|
||||
# ------------------------------
|
||||
# uv – Alpine (musl tarball)
|
||||
# optional: PYTHON_VERSION="3.12"
|
||||
# ------------------------------
|
||||
setup_uv() {
|
||||
need_tool curl tar || return 1
|
||||
local UV_BIN="/usr/local/bin/uv"
|
||||
local arch tarball url tmpd ver installed
|
||||
|
||||
case "$(uname -m)" in
|
||||
x86_64) arch="x86_64-unknown-linux-musl" ;;
|
||||
aarch64) arch="aarch64-unknown-linux-musl" ;;
|
||||
*)
|
||||
msg_error "Unsupported arch for uv: $(uname -m)"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
|
||||
ver="$(curl -fsSL https://api.github.com/repos/astral-sh/uv/releases/latest | jq -r '.tag_name' 2>/dev/null)"
|
||||
ver="${ver#v}"
|
||||
[ -z "$ver" ] && {
|
||||
msg_error "uv: cannot determine latest version"
|
||||
return 1
|
||||
}
|
||||
|
||||
if has "$UV_BIN"; then
|
||||
installed="$($UV_BIN -V 2>/dev/null | awk '{print $2}')"
|
||||
[ "$installed" = "$ver" ] && {
|
||||
msg_ok "uv $ver already installed"
|
||||
return 0
|
||||
}
|
||||
msg_info "Updating uv $installed → $ver"
|
||||
else
|
||||
msg_info "Setup uv $ver"
|
||||
fi
|
||||
|
||||
tmpd="$(mktemp -d)" || return 1
|
||||
tarball="uv-${arch}.tar.gz"
|
||||
url="https://github.com/astral-sh/uv/releases/download/v${ver}/${tarball}"
|
||||
|
||||
download_with_progress "$url" "$tmpd/uv.tar.gz" || {
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
tar -xzf "$tmpd/uv.tar.gz" -C "$tmpd" || {
|
||||
msg_error "uv: extract failed"
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
|
||||
# tar contains ./uv
|
||||
if [ -x "$tmpd/uv" ]; then
|
||||
install -m 0755 "$tmpd/uv" "$UV_BIN"
|
||||
else
|
||||
# fallback: in subfolder
|
||||
install -m 0755 "$tmpd"/*/uv "$UV_BIN" 2>/dev/null || {
|
||||
msg_error "uv binary not found in tar"
|
||||
rm -rf "$tmpd"
|
||||
return 1
|
||||
}
|
||||
fi
|
||||
rm -rf "$tmpd"
|
||||
ensure_usr_local_bin_persist
|
||||
msg_ok "Setup uv $ver"
|
||||
|
||||
if [ -n "${PYTHON_VERSION:-}" ]; then
|
||||
local match
|
||||
match="$(uv python list --only-downloads 2>/dev/null | awk -v maj="$PYTHON_VERSION" '
|
||||
$0 ~ "^cpython-"maj"\\." { print $0 }' | awk -F- '{print $2}' | sort -V | tail -n1)"
|
||||
[ -z "$match" ] && {
|
||||
msg_error "No matching Python for $PYTHON_VERSION"
|
||||
return 1
|
||||
}
|
||||
if ! uv python list | grep -q "cpython-${match}-linux"; then
|
||||
msg_info "Installing Python $match via uv"
|
||||
uv python install "$match" || {
|
||||
msg_error "uv python install failed"
|
||||
return 1
|
||||
}
|
||||
msg_ok "Python $match installed (uv)"
|
||||
echo -e "${NETWORK}Check Network Settings"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# ------------------------------
|
||||
# Java – Alpine (OpenJDK)
|
||||
# JAVA_VERSION: 17|21 (Default 21)
|
||||
# ------------------------------
|
||||
setup_java() {
|
||||
local JAVA_VERSION="${JAVA_VERSION:-21}" pkg
|
||||
case "$JAVA_VERSION" in
|
||||
17) pkg="openjdk17-jdk" ;;
|
||||
21 | *) pkg="openjdk21-jdk" ;;
|
||||
esac
|
||||
msg_info "Setup Java (OpenJDK $JAVA_VERSION)"
|
||||
apk add --no-cache "$pkg" >/dev/null 2>&1 || {
|
||||
msg_error "apk add $pkg failed"
|
||||
return 1
|
||||
}
|
||||
# set JAVA_HOME
|
||||
local prof="/etc/profile.d/20-java.sh"
|
||||
if [ ! -f "$prof" ]; then
|
||||
echo 'export JAVA_HOME=$(dirname $(dirname $(readlink -f $(command -v java))))' >"$prof"
|
||||
echo 'case ":$PATH:" in *:$JAVA_HOME/bin:*) ;; *) export PATH="$JAVA_HOME/bin:$PATH";; esac' >>"$prof"
|
||||
chmod +x "$prof"
|
||||
fi
|
||||
msg_ok "Java ready: $(java -version 2>&1 | head -n1)"
|
||||
}
|
||||
|
||||
# ------------------------------
|
||||
# Go – Alpine (apk prefers, else tarball)
|
||||
# ------------------------------
|
||||
setup_go() {
|
||||
if [ -z "${GO_VERSION:-}" ]; then
|
||||
msg_info "Setup Go (apk)"
|
||||
apk add --no-cache go >/dev/null 2>&1 || {
|
||||
msg_error "apk add go failed"
|
||||
return 1
|
||||
}
|
||||
msg_ok "Go ready: $(go version 2>/dev/null)"
|
||||
return 0
|
||||
fi
|
||||
|
||||
need_tool curl tar || return 1
|
||||
local ARCH TARBALL URL TMP
|
||||
case "$(uname -m)" in
|
||||
x86_64) ARCH="amd64" ;;
|
||||
aarch64) ARCH="arm64" ;;
|
||||
*)
|
||||
msg_error "Unsupported arch for Go: $(uname -m)"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
TARBALL="go${GO_VERSION}.linux-${ARCH}.tar.gz"
|
||||
URL="https://go.dev/dl/${TARBALL}"
|
||||
msg_info "Setup Go $GO_VERSION (tarball)"
|
||||
TMP="$(mktemp)"
|
||||
download_with_progress "$URL" "$TMP" || return 1
|
||||
rm -rf /usr/local/go
|
||||
tar -C /usr/local -xzf "$TMP" || {
|
||||
msg_error "extract go failed"
|
||||
rm -f "$TMP"
|
||||
return 1
|
||||
}
|
||||
rm -f "$TMP"
|
||||
ln -sf /usr/local/go/bin/go /usr/local/bin/go
|
||||
ln -sf /usr/local/go/bin/gofmt /usr/local/bin/gofmt
|
||||
ensure_usr_local_bin_persist
|
||||
msg_ok "Go ready: $(go version 2>/dev/null)"
|
||||
}
|
||||
|
||||
# ------------------------------
|
||||
# Composer – Alpine
|
||||
# uses php83-cli + openssl + phar
|
||||
# ------------------------------
|
||||
setup_composer() {
|
||||
local COMPOSER_BIN="/usr/local/bin/composer"
|
||||
if ! has php; then
|
||||
# prefers php83
|
||||
msg_info "Installing PHP CLI for Composer"
|
||||
apk add --no-cache php83-cli php83-openssl php83-phar php83-iconv >/dev/null 2>&1 || {
|
||||
# Fallback to generic php if 83 not available
|
||||
apk add --no-cache php-cli php-openssl php-phar php-iconv >/dev/null 2>&1 || {
|
||||
msg_error "Failed to install php-cli for composer"
|
||||
return 1
|
||||
}
|
||||
}
|
||||
msg_ok "PHP CLI ready: $(php -v | head -n1)"
|
||||
fi
|
||||
|
||||
if [ -x "$COMPOSER_BIN" ]; then
|
||||
msg_info "Updating Composer"
|
||||
RESOLVEDIP=$(getent hosts github.com | awk '{ print $1 }')
|
||||
if [[ -z "$RESOLVEDIP" ]]; then
|
||||
msg_error "Internet: ${ipv4_status} DNS Failed"
|
||||
else
|
||||
msg_info "Setup Composer"
|
||||
msg_ok "Internet: ${ipv4_status} DNS: ${BL}${RESOLVEDIP}${CL}"
|
||||
fi
|
||||
set -e
|
||||
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
|
||||
}
|
||||
|
||||
# This function updates the Container OS by running apt-get update and upgrade
|
||||
update_os() {
|
||||
msg_info "Updating Container OS"
|
||||
$STD apk -U upgrade
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
||||
msg_ok "Updated Container OS"
|
||||
}
|
||||
|
||||
# This function modifies the message of the day (motd) and SSH settings
|
||||
motd_ssh() {
|
||||
echo "export TERM='xterm-256color'" >>/root/.bashrc
|
||||
|
||||
PROFILE_FILE="/etc/profile.d/00_lxc-details.sh"
|
||||
echo "echo -e \"\"" >"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${BOLD}${APPLICATION} LXC Container${CL}"\" >>"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${TAB}${GATEWAY}${YW} Provided by: ${GN}community-scripts ORG ${YW}| GitHub: ${GN}https://github.com/community-scripts/ProxmoxVE${CL}\"" >>"$PROFILE_FILE"
|
||||
echo "echo \"\"" >>"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${TAB}${OS}${YW} OS: ${GN}\$(grep ^NAME /etc/os-release | cut -d= -f2 | tr -d '\"') - Version: \$(grep ^VERSION_ID /etc/os-release | cut -d= -f2 | tr -d '\"')${CL}\"" >>"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${TAB}${HOSTNAME}${YW} Hostname: ${GN}\$(hostname)${CL}\"" >>"$PROFILE_FILE"
|
||||
echo -e "echo -e \"${TAB}${INFO}${YW} IP Address: ${GN}\$(ip -4 addr show eth0 | awk '/inet / {print \$2}' | cut -d/ -f1 | head -n 1)${CL}\"" >>"$PROFILE_FILE"
|
||||
|
||||
# Configure SSH if enabled
|
||||
if [[ "${SSH_ROOT}" == "yes" ]]; then
|
||||
# Enable sshd service
|
||||
$STD rc-update add sshd
|
||||
# Allow root login via SSH
|
||||
sed -i "s/#PermitRootLogin prohibit-password/PermitRootLogin yes/g" /etc/ssh/sshd_config
|
||||
# Start the sshd service
|
||||
$STD /etc/init.d/sshd start
|
||||
fi
|
||||
}
|
||||
|
||||
# Validate Timezone for some LXC's
|
||||
validate_tz() {
|
||||
[[ -f "/usr/share/zoneinfo/$1" ]]
|
||||
}
|
||||
|
||||
# This function customizes the container and enables passwordless login for the root user
|
||||
customize() {
|
||||
if [[ "$PASSWORD" == "" ]]; then
|
||||
msg_info "Customizing Container"
|
||||
passwd -d root >/dev/null 2>&1
|
||||
|
||||
# Ensure agetty is available
|
||||
apk add --no-cache --force-broken-world util-linux >/dev/null 2>&1
|
||||
|
||||
# Create persistent autologin boot script
|
||||
mkdir -p /etc/local.d
|
||||
cat <<'EOF' >/etc/local.d/autologin.start
|
||||
#!/bin/sh
|
||||
sed -i 's|^tty1::respawn:.*|tty1::respawn:/sbin/agetty --autologin root --noclear tty1 38400 linux|' /etc/inittab
|
||||
kill -HUP 1
|
||||
EOF
|
||||
touch /root/.hushlogin
|
||||
|
||||
chmod +x /etc/local.d/autologin.start
|
||||
rc-update add local >/dev/null 2>&1
|
||||
|
||||
# Apply autologin immediately for current session
|
||||
/etc/local.d/autologin.start
|
||||
|
||||
msg_ok "Customized Container"
|
||||
fi
|
||||
|
||||
need_tool curl || return 1
|
||||
curl -fsSL https://getcomposer.org/installer -o /tmp/composer-setup.php || {
|
||||
msg_error "composer installer download failed"
|
||||
return 1
|
||||
}
|
||||
php /tmp/composer-setup.php --install-dir=/usr/local/bin --filename=composer >/dev/null 2>&1 || {
|
||||
msg_error "composer install failed"
|
||||
return 1
|
||||
}
|
||||
rm -f /tmp/composer-setup.php
|
||||
ensure_usr_local_bin_persist
|
||||
msg_ok "Composer ready: $(composer --version 2>/dev/null)"
|
||||
echo "bash -c \"\$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/ct/${app}.sh)\"" >/usr/bin/update
|
||||
chmod +x /usr/bin/update
|
||||
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -127,6 +127,34 @@ icons() {
|
||||
HOURGLASS="${TAB}⏳${TAB}"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# ensure_profile_loaded()
|
||||
#
|
||||
# - Sources /etc/profile.d/*.sh scripts if not already loaded
|
||||
# - Fixes PATH issues when running via pct enter/exec (non-login shells)
|
||||
# - Safe to call multiple times (uses guard variable)
|
||||
# - Should be called in update_script() or any script running inside LXC
|
||||
# ------------------------------------------------------------------------------
|
||||
ensure_profile_loaded() {
|
||||
# Skip if already loaded or running on Proxmox host
|
||||
[[ -n "${_PROFILE_LOADED:-}" ]] && return
|
||||
command -v pveversion &>/dev/null && return
|
||||
|
||||
# Source all profile.d scripts to ensure PATH is complete
|
||||
if [[ -d /etc/profile.d ]]; then
|
||||
for script in /etc/profile.d/*.sh; do
|
||||
[[ -r "$script" ]] && source "$script"
|
||||
done
|
||||
fi
|
||||
|
||||
# Also ensure /usr/local/bin is in PATH (common install location)
|
||||
if [[ ":$PATH:" != *":/usr/local/bin:"* ]]; then
|
||||
export PATH="/usr/local/bin:$PATH"
|
||||
fi
|
||||
|
||||
export _PROFILE_LOADED=1
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# default_vars()
|
||||
#
|
||||
@@ -787,11 +815,9 @@ is_verbose_mode() {
|
||||
# ------------------------------------------------------------------------------
|
||||
# cleanup_lxc()
|
||||
#
|
||||
# - Comprehensive cleanup of package managers, caches, and logs
|
||||
# - Supports Alpine (apk), Debian/Ubuntu (apt), and language package managers
|
||||
# - Cleans: Python (pip/uv), Node.js (npm/yarn/pnpm), Go, Rust, Ruby, PHP
|
||||
# - Truncates log files and vacuums systemd journal
|
||||
# - Run at end of container creation to minimize disk usage
|
||||
# - Cleans package manager and language caches (safe for installs AND updates)
|
||||
# - Supports Alpine (apk), Debian/Ubuntu (apt), Python, Node.js, Go, Rust, Ruby, PHP
|
||||
# - Uses fallback error handling to prevent cleanup failures from breaking installs
|
||||
# ------------------------------------------------------------------------------
|
||||
cleanup_lxc() {
|
||||
msg_info "Cleaning up"
|
||||
@@ -800,32 +826,52 @@ cleanup_lxc() {
|
||||
$STD apk cache clean || true
|
||||
rm -rf /var/cache/apk/*
|
||||
else
|
||||
$STD apt -y autoremove || true
|
||||
$STD apt -y autoclean || true
|
||||
$STD apt -y clean || true
|
||||
$STD apt -y autoremove 2>/dev/null || msg_warn "apt autoremove failed (non-critical)"
|
||||
$STD apt -y autoclean 2>/dev/null || msg_warn "apt autoclean failed (non-critical)"
|
||||
$STD apt -y clean 2>/dev/null || msg_warn "apt clean failed (non-critical)"
|
||||
fi
|
||||
|
||||
# Clear temp artifacts (keep sockets/FIFOs; ignore errors)
|
||||
find /tmp /var/tmp -type f -name 'tmp*' -delete 2>/dev/null || true
|
||||
find /tmp /var/tmp -type f -name 'tempfile*' -delete 2>/dev/null || true
|
||||
|
||||
# Node.js npm - directly remove cache directory
|
||||
# npm cache clean/verify can fail with ENOTEMPTY errors, so we skip them
|
||||
# Python
|
||||
if command -v pip &>/dev/null; then
|
||||
rm -rf /root/.cache/pip 2>/dev/null || true
|
||||
fi
|
||||
if command -v uv &>/dev/null; then
|
||||
rm -rf /root/.cache/uv 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Node.js
|
||||
if command -v npm &>/dev/null; then
|
||||
rm -rf /root/.npm/_cacache /root/.npm/_logs 2>/dev/null || true
|
||||
fi
|
||||
# Node.js yarn
|
||||
if command -v yarn &>/dev/null; then yarn cache clean &>/dev/null || true; fi
|
||||
# Node.js pnpm
|
||||
if command -v pnpm &>/dev/null; then pnpm store prune &>/dev/null || true; fi
|
||||
# Go
|
||||
if command -v go &>/dev/null; then $STD go clean -cache -modcache || true; fi
|
||||
# Rust cargo
|
||||
if command -v cargo &>/dev/null; then $STD cargo clean || true; fi
|
||||
# Ruby gem
|
||||
if command -v gem &>/dev/null; then $STD gem cleanup || true; fi
|
||||
# Composer (PHP)
|
||||
if command -v composer &>/dev/null; then COMPOSER_ALLOW_SUPERUSER=1 $STD composer clear-cache || true; fi
|
||||
if command -v yarn &>/dev/null; then
|
||||
rm -rf /root/.cache/yarn /root/.yarn/cache 2>/dev/null || true
|
||||
fi
|
||||
if command -v pnpm &>/dev/null; then
|
||||
pnpm store prune &>/dev/null || true
|
||||
fi
|
||||
|
||||
# Go (only build cache, not modules)
|
||||
if command -v go &>/dev/null; then
|
||||
$STD go clean -cache 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Rust (only registry cache, not build artifacts)
|
||||
if command -v cargo &>/dev/null; then
|
||||
rm -rf /root/.cargo/registry/cache /root/.cargo/.package-cache 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Ruby
|
||||
if command -v gem &>/dev/null; then
|
||||
rm -rf /root/.gem/cache 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# PHP
|
||||
if command -v composer &>/dev/null; then
|
||||
rm -rf /root/.composer/cache 2>/dev/null || true
|
||||
fi
|
||||
|
||||
msg_ok "Cleaned"
|
||||
}
|
||||
@@ -878,8 +924,95 @@ check_or_create_swap() {
|
||||
fi
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Loads LOCAL_IP from persistent store or detects if missing.
|
||||
#
|
||||
# Description:
|
||||
# - Loads from /run/local-ip.env or performs runtime lookup
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
function get_lxc_ip() {
|
||||
local IP_FILE="/run/local-ip.env"
|
||||
if [[ -f "$IP_FILE" ]]; then
|
||||
# shellcheck disable=SC1090
|
||||
source "$IP_FILE"
|
||||
fi
|
||||
|
||||
if [[ -z "${LOCAL_IP:-}" ]]; then
|
||||
get_current_ip() {
|
||||
local ip
|
||||
|
||||
# Try direct interface lookup for eth0 FIRST (most reliable for LXC) - IPv4
|
||||
ip=$(ip -4 addr show eth0 2>/dev/null | awk '/inet / {print $2}' | cut -d/ -f1 | head -n1)
|
||||
if [[ -n "$ip" && "$ip" =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "$ip"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Fallback: Try hostname -I (returns IPv4 first if available)
|
||||
if command -v hostname >/dev/null 2>&1; then
|
||||
ip=$(hostname -I 2>/dev/null | awk '{print $1}')
|
||||
if [[ -n "$ip" && "$ip" =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "$ip"
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# Try routing table with IPv4 targets
|
||||
local ipv4_targets=("8.8.8.8" "1.1.1.1" "default")
|
||||
for target in "${ipv4_targets[@]}"; do
|
||||
if [[ "$target" == "default" ]]; then
|
||||
ip=$(ip route get 1 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||||
else
|
||||
ip=$(ip route get "$target" 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||||
fi
|
||||
if [[ -n "$ip" ]]; then
|
||||
echo "$ip"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
# IPv6 fallback: Try direct interface lookup for eth0
|
||||
ip=$(ip -6 addr show eth0 scope global 2>/dev/null | awk '/inet6 / {print $2}' | cut -d/ -f1 | head -n1)
|
||||
if [[ -n "$ip" && "$ip" =~ : ]]; then
|
||||
echo "$ip"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# IPv6 fallback: Try hostname -I for IPv6
|
||||
if command -v hostname >/dev/null 2>&1; then
|
||||
ip=$(hostname -I 2>/dev/null | tr ' ' '\n' | grep -E ':' | head -n1)
|
||||
if [[ -n "$ip" && "$ip" =~ : ]]; then
|
||||
echo "$ip"
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# IPv6 fallback: Use routing table with IPv6 targets
|
||||
local ipv6_targets=("2001:4860:4860::8888" "2606:4700:4700::1111")
|
||||
for target in "${ipv6_targets[@]}"; do
|
||||
ip=$(ip -6 route get "$target" 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||||
if [[ -n "$ip" && "$ip" =~ : ]]; then
|
||||
echo "$ip"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
LOCAL_IP="$(get_current_ip || true)"
|
||||
if [[ -z "$LOCAL_IP" ]]; then
|
||||
msg_error "Could not determine LOCAL_IP"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
export LOCAL_IP
|
||||
}
|
||||
|
||||
# ==============================================================================
|
||||
# SIGNAL TRAPS
|
||||
# ==============================================================================
|
||||
|
||||
trap 'stop_spinner' EXIT INT TERM
|
||||
trap 'stop_spinner' EXIT INT TERM
|
||||
@@ -37,6 +37,9 @@ source "$(dirname "${BASH_SOURCE[0]}")/error-handler.func"
|
||||
load_functions
|
||||
catch_errors
|
||||
|
||||
# Get LXC IP address (must be called INSIDE container, after network is up)
|
||||
get_lxc_ip
|
||||
|
||||
# ==============================================================================
|
||||
# SECTION 2: NETWORK & CONNECTIVITY
|
||||
# ==============================================================================
|
||||
@@ -76,6 +79,13 @@ EOF
|
||||
# ------------------------------------------------------------------------------
|
||||
setting_up_container() {
|
||||
msg_info "Setting up Container OS"
|
||||
|
||||
# Fix Debian 13 LXC template bug where / is owned by nobody
|
||||
# Only attempt in privileged containers (unprivileged cannot chown /)
|
||||
if [[ "$(stat -c '%U' /)" != "root" ]]; then
|
||||
(chown root:root / 2>/dev/null) || true
|
||||
fi
|
||||
|
||||
for ((i = RETRY_NUM; i > 0; i--)); do
|
||||
if [ "$(hostname -I)" != "" ]; then
|
||||
break
|
||||
|
||||
@@ -184,7 +184,10 @@ install_packages_with_retry() {
|
||||
local retry=0
|
||||
|
||||
while [[ $retry -le $max_retries ]]; do
|
||||
if $STD apt install -y "${packages[@]}" 2>/dev/null; then
|
||||
if DEBIAN_FRONTEND=noninteractive $STD apt install -y \
|
||||
-o Dpkg::Options::="--force-confdef" \
|
||||
-o Dpkg::Options::="--force-confold" \
|
||||
"${packages[@]}" 2>/dev/null; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
@@ -211,7 +214,10 @@ upgrade_packages_with_retry() {
|
||||
local retry=0
|
||||
|
||||
while [[ $retry -le $max_retries ]]; do
|
||||
if $STD apt install --only-upgrade -y "${packages[@]}" 2>/dev/null; then
|
||||
if DEBIAN_FRONTEND=noninteractive $STD apt install --only-upgrade -y \
|
||||
-o Dpkg::Options::="--force-confdef" \
|
||||
-o Dpkg::Options::="--force-confold" \
|
||||
"${packages[@]}" 2>/dev/null; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
@@ -568,7 +574,8 @@ EOF
|
||||
msg_error "Failed to download PHP keyring"
|
||||
return 1
|
||||
}
|
||||
dpkg -i /tmp/debsuryorg-archive-keyring.deb >/dev/null 2>&1 || {
|
||||
# Don't use /dev/null redirection for dpkg as it may use background processes
|
||||
dpkg -i /tmp/debsuryorg-archive-keyring.deb >>"$(get_active_logfile)" 2>&1 || {
|
||||
msg_error "Failed to install PHP keyring"
|
||||
rm -f /tmp/debsuryorg-archive-keyring.deb
|
||||
return 1
|
||||
@@ -1838,8 +1845,9 @@ function fetch_and_deploy_gh_release() {
|
||||
}
|
||||
|
||||
chmod 644 "$tmpdir/$filename"
|
||||
$STD apt install -y "$tmpdir/$filename" || {
|
||||
$STD dpkg -i "$tmpdir/$filename" || {
|
||||
# SYSTEMD_OFFLINE=1 prevents systemd-tmpfiles failures in unprivileged LXC (Debian 13+/systemd 257+)
|
||||
SYSTEMD_OFFLINE=1 $STD apt install -y "$tmpdir/$filename" || {
|
||||
SYSTEMD_OFFLINE=1 $STD dpkg -i "$tmpdir/$filename" || {
|
||||
msg_error "Both apt and dpkg installation failed"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
@@ -1894,7 +1902,7 @@ function fetch_and_deploy_gh_release() {
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
elif [[ "$filename" == *.tar.* || "$filename" == *.tgz ]]; then
|
||||
elif [[ "$filename" == *.tar.* || "$filename" == *.tgz || "$filename" == *.txz ]]; then
|
||||
tar --no-same-owner -xf "$tmpdir/$filename" -C "$unpack_tmp" || {
|
||||
msg_error "Failed to extract TAR archive"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
@@ -1998,50 +2006,6 @@ function fetch_and_deploy_gh_release() {
|
||||
rm -rf "$tmpdir"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Loads LOCAL_IP from persistent store or detects if missing.
|
||||
#
|
||||
# Description:
|
||||
# - Loads from /run/local-ip.env or performs runtime lookup
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
function import_local_ip() {
|
||||
local IP_FILE="/run/local-ip.env"
|
||||
if [[ -f "$IP_FILE" ]]; then
|
||||
# shellcheck disable=SC1090
|
||||
source "$IP_FILE"
|
||||
fi
|
||||
|
||||
if [[ -z "${LOCAL_IP:-}" ]]; then
|
||||
get_current_ip() {
|
||||
local targets=("8.8.8.8" "1.1.1.1" "192.168.1.1" "10.0.0.1" "172.16.0.1" "default")
|
||||
local ip
|
||||
|
||||
for target in "${targets[@]}"; do
|
||||
if [[ "$target" == "default" ]]; then
|
||||
ip=$(ip route get 1 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||||
else
|
||||
ip=$(ip route get "$target" 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||||
fi
|
||||
if [[ -n "$ip" ]]; then
|
||||
echo "$ip"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
LOCAL_IP="$(get_current_ip || true)"
|
||||
if [[ -z "$LOCAL_IP" ]]; then
|
||||
msg_error "Could not determine LOCAL_IP"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
export LOCAL_IP
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Installs Adminer (Debian/Ubuntu via APT, Alpine via direct download).
|
||||
#
|
||||
@@ -2669,6 +2633,7 @@ function setup_hwaccel() {
|
||||
# GPU Selection - Let user choose which GPU(s) to configure
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
local -a SELECTED_INDICES=()
|
||||
local install_nvidia_drivers="yes"
|
||||
|
||||
if [[ $gpu_count -eq 1 ]]; then
|
||||
# Single GPU - auto-select
|
||||
@@ -2677,7 +2642,7 @@ function setup_hwaccel() {
|
||||
else
|
||||
# Multiple GPUs - show selection menu
|
||||
echo ""
|
||||
msg_info "Multiple GPUs detected:"
|
||||
msg_custom "⚠" "${YW}" "Multiple GPUs detected:"
|
||||
echo ""
|
||||
for i in "${!GPU_LIST[@]}"; do
|
||||
local type_display="${GPU_TYPES[$i]}"
|
||||
@@ -2730,6 +2695,30 @@ function setup_hwaccel() {
|
||||
fi
|
||||
fi
|
||||
|
||||
# Ask whether to install NVIDIA drivers in the container
|
||||
local nvidia_selected="no"
|
||||
for idx in "${SELECTED_INDICES[@]}"; do
|
||||
if [[ "${GPU_TYPES[$idx]}" == "NVIDIA" ]]; then
|
||||
nvidia_selected="yes"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$nvidia_selected" == "yes" ]]; then
|
||||
if [[ -n "${INSTALL_NVIDIA_DRIVERS:-}" ]]; then
|
||||
install_nvidia_drivers="${INSTALL_NVIDIA_DRIVERS}"
|
||||
else
|
||||
echo ""
|
||||
msg_custom "🎮" "${GN}" "NVIDIA GPU passthrough detected"
|
||||
local nvidia_reply=""
|
||||
read -r -t 60 -p "${TAB3}⚙️ Install NVIDIA driver libraries in the container? [Y/n] (auto-yes in 60s): " nvidia_reply || nvidia_reply=""
|
||||
case "${nvidia_reply,,}" in
|
||||
n | no) install_nvidia_drivers="no" ;;
|
||||
*) install_nvidia_drivers="yes" ;;
|
||||
esac
|
||||
fi
|
||||
fi
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
# OS Detection
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
@@ -2790,7 +2779,11 @@ function setup_hwaccel() {
|
||||
# NVIDIA GPUs
|
||||
# ─────────────────────────────────────────────────────────────────────────
|
||||
NVIDIA)
|
||||
_setup_nvidia_gpu "$os_id" "$os_codename" "$os_version"
|
||||
if [[ "$install_nvidia_drivers" == "yes" ]]; then
|
||||
_setup_nvidia_gpu "$os_id" "$os_codename" "$os_version"
|
||||
else
|
||||
msg_warn "Skipping NVIDIA driver installation (user opted to install manually)"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
done
|
||||
@@ -2920,8 +2913,15 @@ _setup_intel_legacy() {
|
||||
vainfo \
|
||||
intel-gpu-tools 2>/dev/null || msg_warn "Some Intel legacy packages failed"
|
||||
|
||||
# beignet provides OpenCL for older Intel GPUs (if available)
|
||||
$STD apt -y install beignet-opencl-icd 2>/dev/null || true
|
||||
# beignet provides OpenCL for older Intel GPUs (Sandy Bridge to Broadwell)
|
||||
# Note: beignet-opencl-icd was removed in Debian 12+ and Ubuntu 22.04+
|
||||
# Check if package is available before attempting installation
|
||||
if apt-cache show beignet-opencl-icd &>/dev/null; then
|
||||
$STD apt -y install beignet-opencl-icd 2>/dev/null || msg_warn "beignet-opencl-icd installation failed (optional)"
|
||||
else
|
||||
msg_warn "beignet-opencl-icd not available - OpenCL support for legacy Intel GPU limited"
|
||||
msg_warn "Note: Hardware video encoding/decoding (VA-API) still works without OpenCL"
|
||||
fi
|
||||
|
||||
msg_ok "Intel Legacy GPU configured"
|
||||
}
|
||||
@@ -2989,16 +2989,24 @@ _setup_nvidia_gpu() {
|
||||
|
||||
msg_info "Installing NVIDIA GPU drivers"
|
||||
|
||||
# Prevent interactive dialogs (e.g., "Mismatching nvidia kernel module" whiptail)
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
export NEEDRESTART_MODE=a
|
||||
|
||||
# Detect host driver version (passed through via /proc)
|
||||
# Format varies by driver type:
|
||||
# Proprietary: "NVRM version: NVIDIA UNIX x86_64 Kernel Module 550.54.14 Thu..."
|
||||
# Open: "NVRM version: NVIDIA UNIX Open Kernel Module for x86_64 590.48.01 Release..."
|
||||
# Use regex to extract version number (###.##.## pattern)
|
||||
local nvidia_host_version=""
|
||||
if [[ -f /proc/driver/nvidia/version ]]; then
|
||||
nvidia_host_version=$(grep "NVRM version:" /proc/driver/nvidia/version 2>/dev/null | awk '{print $8}')
|
||||
nvidia_host_version=$(grep -oP '\d{3,}\.\d+\.\d+' /proc/driver/nvidia/version 2>/dev/null | head -1)
|
||||
fi
|
||||
|
||||
if [[ -z "$nvidia_host_version" ]]; then
|
||||
msg_warn "NVIDIA host driver version not found in /proc/driver/nvidia/version"
|
||||
msg_warn "Ensure NVIDIA drivers are installed on host and GPU passthrough is enabled"
|
||||
$STD apt -y install va-driver-all vainfo 2>/dev/null || true
|
||||
$STD apt-get -y install va-driver-all vainfo 2>/dev/null || true
|
||||
return 0
|
||||
fi
|
||||
|
||||
@@ -3011,53 +3019,115 @@ _setup_nvidia_gpu() {
|
||||
sed -i -E 's/Components: (.*)$/Components: \1 contrib non-free non-free-firmware/g' /etc/apt/sources.list.d/debian.sources 2>/dev/null || true
|
||||
fi
|
||||
fi
|
||||
$STD apt-get -y update 2>/dev/null || msg_warn "apt update failed - continuing anyway"
|
||||
|
||||
# Determine CUDA repository
|
||||
local cuda_repo="debian12"
|
||||
case "$os_codename" in
|
||||
bullseye) cuda_repo="debian11" ;;
|
||||
bookworm) cuda_repo="debian12" ;;
|
||||
trixie | sid) cuda_repo="debian12" ;; # Forward compatible
|
||||
esac
|
||||
# For Debian 13 Trixie/Sid: Use Debian's own nvidia packages first (better compatibility)
|
||||
# NVIDIA's CUDA repo targets Debian 12 and may not have amd64 packages for Trixie
|
||||
if [[ "$os_codename" == "trixie" || "$os_codename" == "sid" ]]; then
|
||||
msg_info "Debian ${os_codename}: Using Debian's NVIDIA packages"
|
||||
|
||||
# Add NVIDIA CUDA repository
|
||||
if [[ ! -f /usr/share/keyrings/cuda-archive-keyring.gpg ]]; then
|
||||
msg_info "Adding NVIDIA CUDA repository (${cuda_repo})"
|
||||
local cuda_keyring
|
||||
cuda_keyring="$(mktemp)"
|
||||
if curl -fsSL -o "$cuda_keyring" "https://developer.download.nvidia.com/compute/cuda/repos/${cuda_repo}/x86_64/cuda-keyring_1.1-1_all.deb" 2>/dev/null; then
|
||||
$STD dpkg -i "$cuda_keyring" 2>/dev/null || true
|
||||
# Extract major version for flexible matching (580.126.09 -> 580)
|
||||
local nvidia_major_version="${nvidia_host_version%%.*}"
|
||||
|
||||
# Check what versions are actually available
|
||||
local available_version=""
|
||||
available_version=$(apt-cache madison libcuda1 2>/dev/null | awk '{print $3}' | grep -E "^${nvidia_major_version}\." | head -1 || true)
|
||||
|
||||
if [[ -n "$available_version" ]]; then
|
||||
msg_info "Found available NVIDIA version: ${available_version}"
|
||||
local nvidia_pkgs="libcuda1=${available_version} libnvcuvid1=${available_version} libnvidia-encode1=${available_version} libnvidia-ml1=${available_version}"
|
||||
if $STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends $nvidia_pkgs 2>/dev/null; then
|
||||
msg_ok "Installed NVIDIA libraries (${available_version})"
|
||||
else
|
||||
msg_warn "Failed to install NVIDIA ${available_version} - trying unversioned"
|
||||
$STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends libcuda1 libnvcuvid1 libnvidia-encode1 libnvidia-ml1 2>/dev/null || true
|
||||
fi
|
||||
else
|
||||
msg_warn "Failed to download NVIDIA CUDA keyring"
|
||||
# No matching major version - try latest available or unversioned
|
||||
msg_warn "No NVIDIA packages for version ${nvidia_major_version}.x found in repos"
|
||||
available_version=$(apt-cache madison libcuda1 2>/dev/null | awk '{print $3}' | head -1 || true)
|
||||
if [[ -n "$available_version" ]]; then
|
||||
msg_info "Trying latest available: ${available_version} (may cause version mismatch)"
|
||||
$STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends \
|
||||
libcuda1="${available_version}" libnvcuvid1="${available_version}" \
|
||||
libnvidia-encode1="${available_version}" libnvidia-ml1="${available_version}" 2>/dev/null ||
|
||||
$STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends \
|
||||
libcuda1 libnvcuvid1 libnvidia-encode1 libnvidia-ml1 2>/dev/null ||
|
||||
msg_warn "NVIDIA library installation failed - GPU compute may not work"
|
||||
else
|
||||
msg_warn "No NVIDIA packages available in Debian repos - GPU support disabled"
|
||||
fi
|
||||
fi
|
||||
rm -f "$cuda_keyring"
|
||||
fi
|
||||
$STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends nvidia-smi 2>/dev/null || true
|
||||
|
||||
# Pin NVIDIA repo for version matching
|
||||
cat <<'NVIDIA_PIN' >/etc/apt/preferences.d/nvidia-cuda-pin
|
||||
else
|
||||
# Debian 11/12: Use NVIDIA CUDA repository for version matching
|
||||
local cuda_repo="debian12"
|
||||
case "$os_codename" in
|
||||
bullseye) cuda_repo="debian11" ;;
|
||||
bookworm) cuda_repo="debian12" ;;
|
||||
esac
|
||||
|
||||
# Add NVIDIA CUDA repository
|
||||
if [[ ! -f /usr/share/keyrings/cuda-archive-keyring.gpg ]]; then
|
||||
msg_info "Adding NVIDIA CUDA repository (${cuda_repo})"
|
||||
local cuda_keyring
|
||||
cuda_keyring="$(mktemp)"
|
||||
if curl -fsSL -o "$cuda_keyring" "https://developer.download.nvidia.com/compute/cuda/repos/${cuda_repo}/x86_64/cuda-keyring_1.1-1_all.deb" 2>/dev/null; then
|
||||
$STD dpkg -i "$cuda_keyring" 2>/dev/null || true
|
||||
else
|
||||
msg_warn "Failed to download NVIDIA CUDA keyring"
|
||||
fi
|
||||
rm -f "$cuda_keyring"
|
||||
fi
|
||||
|
||||
# Pin NVIDIA repo for version matching
|
||||
cat <<'NVIDIA_PIN' >/etc/apt/preferences.d/nvidia-cuda-pin
|
||||
Package: *
|
||||
Pin: origin developer.download.nvidia.com
|
||||
Pin-Priority: 1001
|
||||
NVIDIA_PIN
|
||||
|
||||
$STD apt -y update
|
||||
$STD apt-get -y update 2>/dev/null || msg_warn "apt update failed - continuing anyway"
|
||||
|
||||
# Install version-matched NVIDIA libraries
|
||||
local nvidia_pkgs="libcuda1=${nvidia_host_version}* libnvcuvid1=${nvidia_host_version}* libnvidia-encode1=${nvidia_host_version}* libnvidia-ml1=${nvidia_host_version}*"
|
||||
# Extract major version for flexible matching (580.126.09 -> 580)
|
||||
local nvidia_major_version="${nvidia_host_version%%.*}"
|
||||
|
||||
msg_info "Installing NVIDIA libraries (version ${nvidia_host_version})"
|
||||
if $STD apt -y install --no-install-recommends $nvidia_pkgs 2>/dev/null; then
|
||||
msg_ok "Installed version-matched NVIDIA libraries"
|
||||
else
|
||||
msg_warn "Version-pinned install failed - trying unpinned"
|
||||
if $STD apt -y install --no-install-recommends libcuda1 libnvcuvid1 libnvidia-encode1 libnvidia-ml1 2>/dev/null; then
|
||||
msg_warn "Installed NVIDIA libraries (unpinned) - version mismatch may occur"
|
||||
# Check what versions are actually available in CUDA repo
|
||||
local available_version=""
|
||||
available_version=$(apt-cache madison libcuda1 2>/dev/null | awk '{print $3}' | grep -E "^${nvidia_major_version}\." | head -1 || true)
|
||||
|
||||
if [[ -n "$available_version" ]]; then
|
||||
msg_info "Installing NVIDIA libraries (version ${available_version})"
|
||||
local nvidia_pkgs="libcuda1=${available_version} libnvcuvid1=${available_version} libnvidia-encode1=${available_version} libnvidia-ml1=${available_version}"
|
||||
if $STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends $nvidia_pkgs 2>/dev/null; then
|
||||
msg_ok "Installed version-matched NVIDIA libraries"
|
||||
else
|
||||
msg_warn "Version-pinned install failed - trying unpinned"
|
||||
$STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends libcuda1 libnvcuvid1 libnvidia-encode1 libnvidia-ml1 2>/dev/null ||
|
||||
msg_warn "NVIDIA library installation failed"
|
||||
fi
|
||||
else
|
||||
msg_warn "NVIDIA library installation failed"
|
||||
msg_warn "No NVIDIA packages for version ${nvidia_major_version}.x in CUDA repo (host: ${nvidia_host_version})"
|
||||
# Try latest available version
|
||||
available_version=$(apt-cache madison libcuda1 2>/dev/null | awk '{print $3}' | head -1 || true)
|
||||
if [[ -n "$available_version" ]]; then
|
||||
msg_info "Trying latest available: ${available_version} (version mismatch warning)"
|
||||
if $STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends \
|
||||
libcuda1="${available_version}" libnvcuvid1="${available_version}" \
|
||||
libnvidia-encode1="${available_version}" libnvidia-ml1="${available_version}" 2>/dev/null; then
|
||||
msg_ok "Installed NVIDIA libraries (${available_version}) - version differs from host"
|
||||
else
|
||||
$STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends libcuda1 libnvcuvid1 libnvidia-encode1 libnvidia-ml1 2>/dev/null ||
|
||||
msg_warn "NVIDIA library installation failed"
|
||||
fi
|
||||
else
|
||||
msg_warn "No NVIDIA packages available in CUDA repo - GPU support disabled"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
$STD apt -y install --no-install-recommends nvidia-smi 2>/dev/null || true
|
||||
$STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends nvidia-smi 2>/dev/null || true
|
||||
fi
|
||||
|
||||
elif [[ "$os_id" == "ubuntu" ]]; then
|
||||
# Ubuntu versioning
|
||||
@@ -3081,20 +3151,45 @@ NVIDIA_PIN
|
||||
rm -f "$cuda_keyring"
|
||||
fi
|
||||
|
||||
$STD apt -y update
|
||||
$STD apt-get -y update 2>/dev/null || msg_warn "apt update failed - continuing anyway"
|
||||
|
||||
# Try version-matched install
|
||||
local nvidia_pkgs="libcuda1=${nvidia_host_version}* libnvcuvid1=${nvidia_host_version}* libnvidia-encode1=${nvidia_host_version}* libnvidia-ml1=${nvidia_host_version}*"
|
||||
if $STD apt -y install --no-install-recommends $nvidia_pkgs 2>/dev/null; then
|
||||
msg_ok "Installed version-matched NVIDIA libraries"
|
||||
# Extract major version for flexible matching
|
||||
local nvidia_major_version="${nvidia_host_version%%.*}"
|
||||
|
||||
# Check what versions are available
|
||||
local available_version=""
|
||||
available_version=$(apt-cache madison libcuda1 2>/dev/null | awk '{print $3}' | grep -E "^${nvidia_major_version}\." | head -1 || true)
|
||||
|
||||
if [[ -n "$available_version" ]]; then
|
||||
msg_info "Installing NVIDIA libraries (version ${available_version})"
|
||||
local nvidia_pkgs="libcuda1=${available_version} libnvcuvid1=${available_version} libnvidia-encode1=${available_version} libnvidia-ml1=${available_version}"
|
||||
if $STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends $nvidia_pkgs 2>/dev/null; then
|
||||
msg_ok "Installed version-matched NVIDIA libraries"
|
||||
else
|
||||
# Fallback to Ubuntu repo packages with versioned nvidia-utils
|
||||
msg_warn "CUDA repo install failed - trying Ubuntu native packages (nvidia-utils-${nvidia_major_version})"
|
||||
if $STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends \
|
||||
libnvidia-decode-${nvidia_major_version} libnvidia-encode-${nvidia_major_version} nvidia-utils-${nvidia_major_version} 2>/dev/null; then
|
||||
msg_ok "Installed Ubuntu NVIDIA packages (${nvidia_major_version})"
|
||||
else
|
||||
msg_warn "NVIDIA driver installation failed - please install manually: apt install nvidia-utils-${nvidia_major_version}"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
# Fallback to Ubuntu repo packages
|
||||
$STD apt -y install --no-install-recommends libnvidia-decode libnvidia-encode nvidia-utils 2>/dev/null || msg_warn "NVIDIA installation failed"
|
||||
msg_warn "No NVIDIA packages for version ${nvidia_major_version}.x in CUDA repo"
|
||||
# Fallback to Ubuntu repo packages with versioned nvidia-utils
|
||||
msg_info "Trying Ubuntu native packages (nvidia-utils-${nvidia_major_version})"
|
||||
if $STD apt-get -y -o Dpkg::Options::="--force-confold" install --no-install-recommends \
|
||||
libnvidia-decode-${nvidia_major_version} libnvidia-encode-${nvidia_major_version} nvidia-utils-${nvidia_major_version} 2>/dev/null; then
|
||||
msg_ok "Installed Ubuntu NVIDIA packages (${nvidia_major_version})"
|
||||
else
|
||||
msg_warn "NVIDIA driver installation failed - please install manually: apt install nvidia-utils-${nvidia_major_version}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# VA-API for hybrid setups (Intel + NVIDIA)
|
||||
$STD apt -y install va-driver-all vainfo 2>/dev/null || true
|
||||
$STD apt-get -y install va-driver-all vainfo 2>/dev/null || true
|
||||
|
||||
msg_ok "NVIDIA GPU configured"
|
||||
}
|
||||
@@ -3496,10 +3591,11 @@ IP_FILE="/run/local-ip.env"
|
||||
mkdir -p "$(dirname "$IP_FILE")"
|
||||
|
||||
get_current_ip() {
|
||||
local targets=("8.8.8.8" "1.1.1.1" "192.168.1.1" "10.0.0.1" "172.16.0.1" "default")
|
||||
local ip
|
||||
|
||||
for target in "${targets[@]}"; do
|
||||
# Try IPv4 targets first
|
||||
local ipv4_targets=("8.8.8.8" "1.1.1.1" "192.168.1.1" "10.0.0.1" "172.16.0.1" "default")
|
||||
for target in "${ipv4_targets[@]}"; do
|
||||
if [[ "$target" == "default" ]]; then
|
||||
ip=$(ip route get 1 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||||
else
|
||||
@@ -3511,6 +3607,23 @@ get_current_ip() {
|
||||
fi
|
||||
done
|
||||
|
||||
# IPv6 fallback: Try direct interface lookup for eth0
|
||||
ip=$(ip -6 addr show eth0 scope global 2>/dev/null | awk '/inet6 / {print $2}' | cut -d/ -f1 | head -n1)
|
||||
if [[ -n "$ip" && "$ip" =~ : ]]; then
|
||||
echo "$ip"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# IPv6 fallback: Use routing table with IPv6 targets (Google DNS, Cloudflare DNS)
|
||||
local ipv6_targets=("2001:4860:4860::8888" "2606:4700:4700::1111")
|
||||
for target in "${ipv6_targets[@]}"; do
|
||||
ip=$(ip -6 route get "$target" 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||||
if [[ -n "$ip" && "$ip" =~ : ]]; then
|
||||
echo "$ip"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
@@ -3549,58 +3662,145 @@ EOF
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Installs or updates MariaDB from official repo.
|
||||
# Installs or updates MariaDB.
|
||||
#
|
||||
# Description:
|
||||
# - Uses Debian/Ubuntu distribution packages by default (most reliable)
|
||||
# - Only uses official MariaDB repository when a specific version is requested
|
||||
# - Detects current MariaDB version and replaces it if necessary
|
||||
# - Preserves existing database data
|
||||
# - Dynamically determines latest GA version if "latest" is given
|
||||
#
|
||||
# Variables:
|
||||
# MARIADB_VERSION - MariaDB version to install (e.g. 10.11, latest) (default: latest)
|
||||
# MARIADB_VERSION - MariaDB version to install (optional)
|
||||
# - Not set or "latest": Uses distribution packages (recommended)
|
||||
# - Specific version (e.g. "11.4", "12.2"): Uses MariaDB official repo
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
setup_mariadb() {
|
||||
local MARIADB_VERSION="${MARIADB_VERSION:-latest}"
|
||||
local USE_DISTRO_PACKAGES=false
|
||||
|
||||
# Resolve "latest" to actual version
|
||||
if [[ "$MARIADB_VERSION" == "latest" ]]; then
|
||||
if ! curl -fsI --max-time 10 http://mirror.mariadb.org/repo/ >/dev/null 2>&1; then
|
||||
msg_warn "MariaDB mirror not reachable - trying mariadb_repo_setup fallback"
|
||||
# Try using official mariadb_repo_setup script as fallback
|
||||
if curl -fsSL --max-time 15 https://r.mariadb.com/downloads/mariadb_repo_setup 2>/dev/null | bash -s -- --skip-verify >/dev/null 2>&1; then
|
||||
msg_ok "MariaDB repository configured via mariadb_repo_setup"
|
||||
# Extract version from configured repo
|
||||
MARIADB_VERSION=$(grep -oP 'repo/\K[0-9]+\.[0-9]+\.[0-9]+' /etc/apt/sources.list.d/mariadb.list 2>/dev/null | head -n1 || echo "12.2")
|
||||
else
|
||||
msg_warn "mariadb_repo_setup failed - using hardcoded fallback version"
|
||||
MARIADB_VERSION="12.2"
|
||||
fi
|
||||
else
|
||||
MARIADB_VERSION=$(curl -fsSL --max-time 15 http://mirror.mariadb.org/repo/ 2>/dev/null |
|
||||
grep -Eo '[0-9]+\.[0-9]+\.[0-9]+/' |
|
||||
grep -vE 'rc/|rolling/' |
|
||||
sed 's|/||' |
|
||||
sort -Vr |
|
||||
head -n1 || echo "")
|
||||
# Ensure non-interactive mode for all apt operations
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
export NEEDRESTART_MODE=a
|
||||
export NEEDRESTART_SUSPEND=1
|
||||
|
||||
if [[ -z "$MARIADB_VERSION" ]]; then
|
||||
msg_warn "Could not parse latest GA MariaDB version from mirror - trying mariadb_repo_setup"
|
||||
if curl -fsSL --max-time 15 https://r.mariadb.com/downloads/mariadb_repo_setup 2>/dev/null | bash -s -- --skip-verify >/dev/null 2>&1; then
|
||||
msg_ok "MariaDB repository configured via mariadb_repo_setup"
|
||||
MARIADB_VERSION=$(grep -oP 'repo/\K[0-9]+\.[0-9]+\.[0-9]+' /etc/apt/sources.list.d/mariadb.list 2>/dev/null | head -n1 || echo "12.2")
|
||||
else
|
||||
msg_warn "mariadb_repo_setup failed - using hardcoded fallback version"
|
||||
MARIADB_VERSION="12.2"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
# Determine installation method:
|
||||
# - "latest" or empty: Use distribution packages (avoids mirror issues)
|
||||
# - Specific version: Use MariaDB official repository
|
||||
if [[ "$MARIADB_VERSION" == "latest" || -z "$MARIADB_VERSION" ]]; then
|
||||
USE_DISTRO_PACKAGES=true
|
||||
msg_info "Setup MariaDB (distribution packages)"
|
||||
else
|
||||
msg_info "Setup MariaDB $MARIADB_VERSION (official repository)"
|
||||
fi
|
||||
|
||||
# Get currently installed version
|
||||
local CURRENT_VERSION=""
|
||||
CURRENT_VERSION=$(is_tool_installed "mariadb" 2>/dev/null) || true
|
||||
|
||||
# Pre-configure debconf to prevent any interactive prompts during install/upgrade
|
||||
debconf-set-selections <<EOF
|
||||
mariadb-server mariadb-server/feedback boolean false
|
||||
mariadb-server mariadb-server/root_password password
|
||||
mariadb-server mariadb-server/root_password_again password
|
||||
EOF
|
||||
|
||||
# If specific version requested, also configure version-specific debconf
|
||||
if [[ "$USE_DISTRO_PACKAGES" == "false" ]]; then
|
||||
local MARIADB_MAJOR_MINOR
|
||||
MARIADB_MAJOR_MINOR=$(echo "$MARIADB_VERSION" | awk -F. '{print $1"."$2}')
|
||||
if [[ -n "$MARIADB_MAJOR_MINOR" ]]; then
|
||||
debconf-set-selections <<EOF
|
||||
mariadb-server-$MARIADB_MAJOR_MINOR mariadb-server/feedback boolean false
|
||||
mariadb-server-$MARIADB_MAJOR_MINOR mariadb-server/root_password password
|
||||
mariadb-server-$MARIADB_MAJOR_MINOR mariadb-server/root_password_again password
|
||||
EOF
|
||||
fi
|
||||
fi
|
||||
|
||||
# ============================================================================
|
||||
# DISTRIBUTION PACKAGES PATH (default, most reliable)
|
||||
# ============================================================================
|
||||
if [[ "$USE_DISTRO_PACKAGES" == "true" ]]; then
|
||||
# Check if MariaDB was previously installed from official repo
|
||||
local HAD_MARIADB_REPO=false
|
||||
if [[ -f /etc/apt/sources.list.d/mariadb.sources ]] || [[ -f /etc/apt/sources.list.d/mariadb.list ]]; then
|
||||
HAD_MARIADB_REPO=true
|
||||
msg_info "Removing MariaDB official repository (switching to distribution packages)"
|
||||
fi
|
||||
|
||||
# Clean up any existing MariaDB repository files to avoid conflicts
|
||||
cleanup_old_repo_files "mariadb"
|
||||
|
||||
# If we had a repo, we need to refresh APT cache
|
||||
if [[ "$HAD_MARIADB_REPO" == "true" ]]; then
|
||||
$STD apt update || msg_warn "APT update had issues, continuing..."
|
||||
fi
|
||||
|
||||
# Ensure APT is working
|
||||
ensure_apt_working || return 1
|
||||
|
||||
# Check if installed version is from official repo and higher than distro version
|
||||
# In this case, we keep the existing installation to avoid data issues
|
||||
if [[ -n "$CURRENT_VERSION" ]]; then
|
||||
# Get available distro version
|
||||
local DISTRO_VERSION=""
|
||||
DISTRO_VERSION=$(apt-cache policy mariadb-server 2>/dev/null | grep -E "Candidate:" | awk '{print $2}' | grep -oP '^\d+:\K\d+\.\d+\.\d+' || echo "")
|
||||
|
||||
if [[ -n "$DISTRO_VERSION" ]]; then
|
||||
# Compare versions - if current is higher, keep it
|
||||
local CURRENT_MAJOR DISTRO_MAJOR
|
||||
CURRENT_MAJOR=$(echo "$CURRENT_VERSION" | awk -F. '{print $1}')
|
||||
DISTRO_MAJOR=$(echo "$DISTRO_VERSION" | awk -F. '{print $1}')
|
||||
|
||||
if [[ "$CURRENT_MAJOR" -gt "$DISTRO_MAJOR" ]]; then
|
||||
msg_warn "MariaDB $CURRENT_VERSION is already installed (higher than distro $DISTRO_VERSION)"
|
||||
msg_warn "Keeping existing installation to preserve data integrity"
|
||||
msg_warn "To use distribution packages, manually remove MariaDB first"
|
||||
_setup_mariadb_runtime_dir
|
||||
cache_installed_version "mariadb" "$CURRENT_VERSION"
|
||||
msg_ok "Setup MariaDB $CURRENT_VERSION (existing installation kept)"
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Install or upgrade MariaDB from distribution packages
|
||||
if ! install_packages_with_retry "mariadb-server" "mariadb-client"; then
|
||||
msg_error "Failed to install MariaDB packages from distribution"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Get installed version for caching
|
||||
local INSTALLED_VERSION=""
|
||||
INSTALLED_VERSION=$(mariadb --version 2>/dev/null | grep -oP '\d+\.\d+\.\d+' | head -n1 || echo "distro")
|
||||
|
||||
# Configure runtime directory and finish
|
||||
_setup_mariadb_runtime_dir
|
||||
cache_installed_version "mariadb" "$INSTALLED_VERSION"
|
||||
msg_ok "Setup MariaDB $INSTALLED_VERSION (distribution packages)"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# ============================================================================
|
||||
# OFFICIAL REPOSITORY PATH (only when specific version requested)
|
||||
# ============================================================================
|
||||
|
||||
# First, check if there's an old/broken repository that needs cleanup
|
||||
if [[ -f /etc/apt/sources.list.d/mariadb.sources ]] || [[ -f /etc/apt/sources.list.d/mariadb.list ]]; then
|
||||
local OLD_REPO_VERSION=""
|
||||
OLD_REPO_VERSION=$(grep -oP 'repo/\K[0-9]+\.[0-9]+(\.[0-9]+)?' /etc/apt/sources.list.d/mariadb.sources 2>/dev/null || \
|
||||
grep -oP 'repo/\K[0-9]+\.[0-9]+(\.[0-9]+)?' /etc/apt/sources.list.d/mariadb.list 2>/dev/null || echo "")
|
||||
|
||||
# Check if old repo points to a different version
|
||||
if [[ -n "$OLD_REPO_VERSION" ]] && [[ "${OLD_REPO_VERSION%.*}" != "${MARIADB_VERSION%.*}" ]]; then
|
||||
msg_info "Cleaning up old MariaDB repository (was: $OLD_REPO_VERSION, requested: $MARIADB_VERSION)"
|
||||
cleanup_old_repo_files "mariadb"
|
||||
$STD apt update || msg_warn "APT update had issues, continuing..."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Scenario 1: Already installed at target version - just update packages
|
||||
if [[ -n "$CURRENT_VERSION" && "$CURRENT_VERSION" == "$MARIADB_VERSION" ]]; then
|
||||
msg_info "Update MariaDB $MARIADB_VERSION"
|
||||
@@ -3639,9 +3839,7 @@ setup_mariadb() {
|
||||
remove_old_tool_version "mariadb"
|
||||
fi
|
||||
|
||||
# Scenario 3: Fresh install or version change
|
||||
msg_info "Setup MariaDB $MARIADB_VERSION"
|
||||
|
||||
# Scenario 3: Fresh install or version change with specific version
|
||||
# Prepare repository (cleanup + validation)
|
||||
prepare_repository_setup "mariadb" || {
|
||||
msg_error "Failed to prepare MariaDB repository"
|
||||
@@ -3667,31 +3865,39 @@ setup_mariadb() {
|
||||
return 1
|
||||
}
|
||||
|
||||
# Set debconf selections for all potential versions
|
||||
local MARIADB_MAJOR_MINOR
|
||||
MARIADB_MAJOR_MINOR=$(echo "$MARIADB_VERSION" | awk -F. '{print $1"."$2}')
|
||||
if [[ -n "$MARIADB_MAJOR_MINOR" ]]; then
|
||||
echo "mariadb-server-$MARIADB_MAJOR_MINOR mariadb-server/feedback boolean false" | debconf-set-selections
|
||||
fi
|
||||
|
||||
# Install packages with retry logic
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
if ! install_packages_with_retry "mariadb-server" "mariadb-client"; then
|
||||
# Fallback: try without specific version
|
||||
msg_warn "Failed to install MariaDB packages from upstream repo, trying distro fallback..."
|
||||
# Fallback: try distribution packages
|
||||
msg_warn "Failed to install MariaDB $MARIADB_VERSION from official repo, falling back to distribution packages..."
|
||||
cleanup_old_repo_files "mariadb"
|
||||
$STD apt update || {
|
||||
msg_warn "APT update also failed, continuing with cache"
|
||||
}
|
||||
install_packages_with_retry "mariadb-server" "mariadb-client" || {
|
||||
msg_error "Failed to install MariaDB packages (both upstream and distro)"
|
||||
if install_packages_with_retry "mariadb-server" "mariadb-client"; then
|
||||
local FALLBACK_VERSION=""
|
||||
FALLBACK_VERSION=$(mariadb --version 2>/dev/null | grep -oP '\d+\.\d+\.\d+' | head -n1 || echo "distro")
|
||||
msg_warn "Installed MariaDB $FALLBACK_VERSION from distribution instead of requested $MARIADB_VERSION"
|
||||
_setup_mariadb_runtime_dir
|
||||
cache_installed_version "mariadb" "$FALLBACK_VERSION"
|
||||
msg_ok "Setup MariaDB $FALLBACK_VERSION (fallback to distribution packages)"
|
||||
return 0
|
||||
else
|
||||
msg_error "Failed to install MariaDB packages (both official repo and distribution)"
|
||||
return 1
|
||||
}
|
||||
fi
|
||||
fi
|
||||
|
||||
_setup_mariadb_runtime_dir
|
||||
cache_installed_version "mariadb" "$MARIADB_VERSION"
|
||||
msg_ok "Setup MariaDB $MARIADB_VERSION"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Helper function: Configure MariaDB runtime directory persistence
|
||||
# ------------------------------------------------------------------------------
|
||||
_setup_mariadb_runtime_dir() {
|
||||
# Configure tmpfiles.d to ensure /run/mysqld directory is created on boot
|
||||
# This fixes the issue where MariaDB fails to start after container reboot
|
||||
msg_info "Configuring MariaDB runtime directory persistence"
|
||||
|
||||
# Create tmpfiles.d configuration with error handling
|
||||
if ! printf '# Ensure /run/mysqld directory exists with correct permissions for MariaDB\nd /run/mysqld 0755 mysql mysql -\n' >/etc/tmpfiles.d/mariadb.conf; then
|
||||
@@ -3711,11 +3917,6 @@ setup_mariadb() {
|
||||
msg_warn "mysql user not found - directory created with correct permissions but ownership not set"
|
||||
fi
|
||||
fi
|
||||
|
||||
msg_ok "Configured MariaDB runtime directory persistence"
|
||||
|
||||
cache_installed_version "mariadb" "$MARIADB_VERSION"
|
||||
msg_ok "Setup MariaDB $MARIADB_VERSION"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
@@ -3815,6 +4016,11 @@ function setup_mongodb() {
|
||||
DISTRO_ID=$(get_os_info id)
|
||||
DISTRO_CODENAME=$(get_os_info codename)
|
||||
|
||||
# Ensure non-interactive mode for all apt operations
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
export NEEDRESTART_MODE=a
|
||||
export NEEDRESTART_SUSPEND=1
|
||||
|
||||
# Check AVX support
|
||||
if ! grep -qm1 'avx[^ ]*' /proc/cpuinfo; then
|
||||
local major="${MONGO_VERSION%%.*}"
|
||||
@@ -3933,6 +4139,11 @@ function setup_mysql() {
|
||||
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
||||
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
||||
|
||||
# Ensure non-interactive mode for all apt operations
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
export NEEDRESTART_MODE=a
|
||||
export NEEDRESTART_SUSPEND=1
|
||||
|
||||
# Get currently installed version
|
||||
local CURRENT_VERSION=""
|
||||
CURRENT_VERSION=$(is_tool_installed "mysql" 2>/dev/null) || true
|
||||
@@ -4027,7 +4238,6 @@ EOF
|
||||
ensure_apt_working || return 1
|
||||
|
||||
# Try multiple package names with retry logic
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
local mysql_install_success=false
|
||||
|
||||
if apt-cache search "^mysql-server$" 2>/dev/null | grep -q . &&
|
||||
@@ -4315,11 +4525,20 @@ EOF
|
||||
return 1
|
||||
}
|
||||
|
||||
manage_tool_repository "php" "$PHP_VERSION" "" "https://packages.sury.org/debsuryorg-archive-keyring.deb" || {
|
||||
msg_error "Failed to setup PHP repository"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Use different repository based on OS
|
||||
if [[ "$DISTRO_ID" == "ubuntu" ]]; then
|
||||
# Ubuntu: Use ondrej/php PPA
|
||||
msg_info "Adding ondrej/php PPA for Ubuntu"
|
||||
$STD apt install -y software-properties-common
|
||||
# Don't use $STD for add-apt-repository as it uses background processes
|
||||
add-apt-repository -y ppa:ondrej/php >>"$(get_active_logfile)" 2>&1
|
||||
else
|
||||
# Debian: Use Sury repository
|
||||
manage_tool_repository "php" "$PHP_VERSION" "" "https://packages.sury.org/debsuryorg-archive-keyring.deb" || {
|
||||
msg_error "Failed to setup PHP repository"
|
||||
return 1
|
||||
}
|
||||
fi
|
||||
ensure_apt_working || return 1
|
||||
$STD apt update
|
||||
|
||||
@@ -4342,6 +4561,14 @@ EOF
|
||||
|
||||
if [[ "$PHP_FPM" == "YES" ]]; then
|
||||
MODULE_LIST+=" php${PHP_VERSION}-fpm"
|
||||
# Create systemd override for PHP-FPM to fix runtime directory issues in LXC containers
|
||||
mkdir -p /etc/systemd/system/php${PHP_VERSION}-fpm.service.d/
|
||||
cat <<EOF >/etc/systemd/system/php${PHP_VERSION}-fpm.service.d/override.conf
|
||||
[Service]
|
||||
RuntimeDirectory=php
|
||||
RuntimeDirectoryMode=0755
|
||||
EOF
|
||||
$STD systemctl daemon-reload
|
||||
fi
|
||||
|
||||
# install apache2 with PHP support if requested
|
||||
@@ -4466,6 +4693,11 @@ function setup_postgresql() {
|
||||
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
||||
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
||||
|
||||
# Ensure non-interactive mode for all apt operations
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
export NEEDRESTART_MODE=a
|
||||
export NEEDRESTART_SUSPEND=1
|
||||
|
||||
# Get currently installed version
|
||||
local CURRENT_PG_VERSION=""
|
||||
if command -v psql >/dev/null; then
|
||||
@@ -4904,6 +5136,146 @@ function setup_ruby() {
|
||||
msg_ok "Setup Ruby $RUBY_VERSION"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Installs or updates MeiliSearch search engine.
|
||||
#
|
||||
# Description:
|
||||
# - Fresh install: Downloads binary, creates config/service, starts
|
||||
# - Update: Checks for new release, updates binary if available
|
||||
# - Waits for service to be ready before returning
|
||||
# - Exports API keys for use by caller
|
||||
#
|
||||
# Variables:
|
||||
# MEILISEARCH_BIND - Bind address (default: 127.0.0.1:7700)
|
||||
# MEILISEARCH_ENV - Environment: production/development (default: production)
|
||||
# MEILISEARCH_DB_PATH - Database path (default: /var/lib/meilisearch/data)
|
||||
#
|
||||
# Exports:
|
||||
# MEILISEARCH_MASTER_KEY - The master key for admin access
|
||||
# MEILISEARCH_API_KEY - The default search API key
|
||||
# MEILISEARCH_API_KEY_UID - The UID of the default API key
|
||||
#
|
||||
# Example (install script):
|
||||
# setup_meilisearch
|
||||
#
|
||||
# Example (CT update_script):
|
||||
# setup_meilisearch
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
function setup_meilisearch() {
|
||||
local MEILISEARCH_BIND="${MEILISEARCH_BIND:-127.0.0.1:7700}"
|
||||
local MEILISEARCH_ENV="${MEILISEARCH_ENV:-production}"
|
||||
local MEILISEARCH_DB_PATH="${MEILISEARCH_DB_PATH:-/var/lib/meilisearch/data}"
|
||||
local MEILISEARCH_DUMP_DIR="${MEILISEARCH_DUMP_DIR:-/var/lib/meilisearch/dumps}"
|
||||
local MEILISEARCH_SNAPSHOT_DIR="${MEILISEARCH_SNAPSHOT_DIR:-/var/lib/meilisearch/snapshots}"
|
||||
|
||||
# Get bind address for health checks
|
||||
local MEILISEARCH_HOST="${MEILISEARCH_BIND%%:*}"
|
||||
local MEILISEARCH_PORT="${MEILISEARCH_BIND##*:}"
|
||||
[[ "$MEILISEARCH_HOST" == "0.0.0.0" ]] && MEILISEARCH_HOST="127.0.0.1"
|
||||
|
||||
# Update mode: MeiliSearch already installed
|
||||
if [[ -f /usr/bin/meilisearch ]]; then
|
||||
if check_for_gh_release "meilisearch" "meilisearch/meilisearch"; then
|
||||
msg_info "Updating MeiliSearch"
|
||||
systemctl stop meilisearch
|
||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary"
|
||||
systemctl start meilisearch
|
||||
msg_ok "Updated MeiliSearch"
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Fresh install
|
||||
msg_info "Setup MeiliSearch"
|
||||
|
||||
# Install binary
|
||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary" || {
|
||||
msg_error "Failed to install MeiliSearch binary"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Download default config
|
||||
curl -fsSL https://raw.githubusercontent.com/meilisearch/meilisearch/latest/config.toml -o /etc/meilisearch.toml || {
|
||||
msg_error "Failed to download MeiliSearch config"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Generate master key
|
||||
MEILISEARCH_MASTER_KEY=$(openssl rand -base64 12)
|
||||
export MEILISEARCH_MASTER_KEY
|
||||
|
||||
# Configure
|
||||
sed -i \
|
||||
-e "s|^env =.*|env = \"${MEILISEARCH_ENV}\"|" \
|
||||
-e "s|^# master_key =.*|master_key = \"${MEILISEARCH_MASTER_KEY}\"|" \
|
||||
-e "s|^db_path =.*|db_path = \"${MEILISEARCH_DB_PATH}\"|" \
|
||||
-e "s|^dump_dir =.*|dump_dir = \"${MEILISEARCH_DUMP_DIR}\"|" \
|
||||
-e "s|^snapshot_dir =.*|snapshot_dir = \"${MEILISEARCH_SNAPSHOT_DIR}\"|" \
|
||||
-e 's|^# no_analytics = true|no_analytics = true|' \
|
||||
-e "s|^http_addr =.*|http_addr = \"${MEILISEARCH_BIND}\"|" \
|
||||
/etc/meilisearch.toml
|
||||
|
||||
# Create data directories
|
||||
mkdir -p "${MEILISEARCH_DB_PATH}" "${MEILISEARCH_DUMP_DIR}" "${MEILISEARCH_SNAPSHOT_DIR}"
|
||||
|
||||
# Create systemd service
|
||||
cat <<EOF >/etc/systemd/system/meilisearch.service
|
||||
[Unit]
|
||||
Description=Meilisearch
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/bin/meilisearch --config-file-path /etc/meilisearch.toml
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
# Enable and start service
|
||||
systemctl daemon-reload
|
||||
systemctl enable -q --now meilisearch
|
||||
|
||||
# Wait for MeiliSearch to be ready (up to 30 seconds)
|
||||
for i in {1..30}; do
|
||||
if curl -s -o /dev/null -w "%{http_code}" "http://${MEILISEARCH_HOST}:${MEILISEARCH_PORT}/health" 2>/dev/null | grep -q "200"; then
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
# Verify service is running
|
||||
if ! systemctl is-active --quiet meilisearch; then
|
||||
msg_error "MeiliSearch service failed to start"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Get API keys with retry logic
|
||||
MEILISEARCH_API_KEY=""
|
||||
for i in {1..10}; do
|
||||
MEILISEARCH_API_KEY=$(curl -s -X GET "http://${MEILISEARCH_HOST}:${MEILISEARCH_PORT}/keys" \
|
||||
-H "Authorization: Bearer ${MEILISEARCH_MASTER_KEY}" 2>/dev/null | \
|
||||
grep -o '"key":"[^"]*"' | head -n 1 | sed 's/"key":"//;s/"//') || true
|
||||
[[ -n "$MEILISEARCH_API_KEY" ]] && break
|
||||
sleep 2
|
||||
done
|
||||
|
||||
MEILISEARCH_API_KEY_UID=$(curl -s -X GET "http://${MEILISEARCH_HOST}:${MEILISEARCH_PORT}/keys" \
|
||||
-H "Authorization: Bearer ${MEILISEARCH_MASTER_KEY}" 2>/dev/null | \
|
||||
grep -o '"uid":"[^"]*"' | head -n 1 | sed 's/"uid":"//;s/"//') || true
|
||||
|
||||
export MEILISEARCH_API_KEY
|
||||
export MEILISEARCH_API_KEY_UID
|
||||
|
||||
# Cache version
|
||||
local MEILISEARCH_VERSION
|
||||
MEILISEARCH_VERSION=$(/usr/bin/meilisearch --version 2>/dev/null | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -1) || true
|
||||
cache_installed_version "meilisearch" "${MEILISEARCH_VERSION:-unknown}"
|
||||
|
||||
msg_ok "Setup MeiliSearch ${MEILISEARCH_VERSION:-}"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Installs or upgrades ClickHouse database server.
|
||||
#
|
||||
@@ -4923,6 +5295,11 @@ function setup_clickhouse() {
|
||||
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
||||
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
||||
|
||||
# Ensure non-interactive mode for all apt operations
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
export NEEDRESTART_MODE=a
|
||||
export NEEDRESTART_SUSPEND=1
|
||||
|
||||
# Resolve "latest" version
|
||||
if [[ "$CLICKHOUSE_VERSION" == "latest" ]]; then
|
||||
CLICKHOUSE_VERSION=$(curl -fsSL --max-time 15 https://packages.clickhouse.com/tgz/stable/ 2>/dev/null |
|
||||
@@ -4985,7 +5362,6 @@ function setup_clickhouse() {
|
||||
"main"
|
||||
|
||||
# Install packages with retry logic
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
$STD apt update || {
|
||||
msg_error "APT update failed for ClickHouse repository"
|
||||
return 1
|
||||
@@ -5628,4 +6004,4 @@ EOF
|
||||
fi
|
||||
|
||||
msg_ok "Docker setup completed"
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
SCRIPT_DIR="$(dirname "$0")"
|
||||
source "$SCRIPT_DIR/../core/build.func"
|
||||
# Copyright (c) 2021-2025 tteck
|
||||
# Copyright (c) 2021-2026 tteck
|
||||
# Author: tteck (tteckster)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://www.debian.org/
|
||||
@@ -40,5 +40,5 @@ start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
msg_ok "Completed successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2025 tteck
|
||||
# Copyright (c) 2021-2026 tteck
|
||||
# Author: tteck (tteckster)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://www.debian.org/
|
||||
|
||||
@@ -1610,6 +1610,7 @@ class ScriptExecutionHandler {
|
||||
// TerminalHandler removed - not used by current application
|
||||
|
||||
app.prepare().then(() => {
|
||||
console.log('> Next.js app prepared successfully');
|
||||
const httpServer = createServer(async (req, res) => {
|
||||
try {
|
||||
// Be sure to pass `true` as the second argument to `url.parse`.
|
||||
@@ -1715,4 +1716,9 @@ app.prepare().then(() => {
|
||||
autoSyncModule.setupGracefulShutdown();
|
||||
}
|
||||
});
|
||||
}).catch((err) => {
|
||||
console.error('> Failed to start server:', err.message);
|
||||
console.error('> If you see "Could not find a production build", run: npm run build');
|
||||
console.error('> Full error:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
@@ -58,6 +58,11 @@ export function ConfigurationModal({
|
||||
// Advanced mode state
|
||||
const [advancedVars, setAdvancedVars] = useState<EnvVars>({});
|
||||
|
||||
// Discovered SSH keys on the Proxmox host (advanced mode only)
|
||||
const [discoveredSshKeys, setDiscoveredSshKeys] = useState<string[]>([]);
|
||||
const [discoveredSshKeysLoading, setDiscoveredSshKeysLoading] = useState(false);
|
||||
const [discoveredSshKeysError, setDiscoveredSshKeysError] = useState<string | null>(null);
|
||||
|
||||
// Validation errors
|
||||
const [errors, setErrors] = useState<Record<string, string>>({});
|
||||
|
||||
@@ -104,6 +109,7 @@ export function ConfigurationModal({
|
||||
var_mknod: 0,
|
||||
var_mount_fs: '',
|
||||
var_protection: 'no',
|
||||
var_tun: 'no',
|
||||
|
||||
// System
|
||||
var_timezone: '',
|
||||
@@ -119,6 +125,38 @@ export function ConfigurationModal({
|
||||
}
|
||||
}, [actualScript, server, mode, resources, slug]);
|
||||
|
||||
// Discover SSH keys on the Proxmox host when advanced mode is open
|
||||
useEffect(() => {
|
||||
if (!server?.id || !isOpen || mode !== 'advanced') {
|
||||
setDiscoveredSshKeys([]);
|
||||
setDiscoveredSshKeysError(null);
|
||||
return;
|
||||
}
|
||||
let cancelled = false;
|
||||
setDiscoveredSshKeysLoading(true);
|
||||
setDiscoveredSshKeysError(null);
|
||||
fetch(`/api/servers/${server.id}/discover-ssh-keys`)
|
||||
.then((res) => {
|
||||
if (!res.ok) throw new Error(res.status === 404 ? 'Server not found' : res.statusText);
|
||||
return res.json();
|
||||
})
|
||||
.then((data: { keys?: string[] }) => {
|
||||
if (!cancelled && Array.isArray(data.keys)) setDiscoveredSshKeys(data.keys);
|
||||
})
|
||||
.catch((err) => {
|
||||
if (!cancelled) {
|
||||
setDiscoveredSshKeys([]);
|
||||
setDiscoveredSshKeysError(err instanceof Error ? err.message : 'Could not detect keys');
|
||||
}
|
||||
})
|
||||
.finally(() => {
|
||||
if (!cancelled) setDiscoveredSshKeysLoading(false);
|
||||
});
|
||||
return () => {
|
||||
cancelled = true;
|
||||
};
|
||||
}, [server?.id, isOpen, mode]);
|
||||
|
||||
// Validation functions
|
||||
const validateIPv4 = (ip: string): boolean => {
|
||||
if (!ip) return true; // Empty is allowed (auto)
|
||||
@@ -275,6 +313,16 @@ export function ConfigurationModal({
|
||||
if ((hasPassword || hasSSHKey) && envVars.var_ssh !== 'no') {
|
||||
envVars.var_ssh = 'yes';
|
||||
}
|
||||
|
||||
// Normalize var_tags: accept both comma and semicolon, output comma-separated
|
||||
const rawTags = envVars.var_tags;
|
||||
if (typeof rawTags === 'string' && rawTags.trim() !== '') {
|
||||
envVars.var_tags = rawTags
|
||||
.split(/[,;]/)
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean)
|
||||
.join(',');
|
||||
}
|
||||
}
|
||||
|
||||
// Remove empty string values (but keep 0, false, etc.)
|
||||
@@ -644,13 +692,13 @@ export function ConfigurationModal({
|
||||
</div>
|
||||
<div className="col-span-2">
|
||||
<label className="block text-sm font-medium text-foreground mb-2">
|
||||
Tags (comma-separated)
|
||||
Tags (comma or semicolon separated)
|
||||
</label>
|
||||
<Input
|
||||
type="text"
|
||||
value={typeof advancedVars.var_tags === 'boolean' ? '' : String(advancedVars.var_tags ?? '')}
|
||||
onChange={(e) => updateAdvancedVar('var_tags', e.target.value)}
|
||||
placeholder="community-script"
|
||||
placeholder="e.g. tag1; tag2"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
@@ -677,11 +725,40 @@ export function ConfigurationModal({
|
||||
<label className="block text-sm font-medium text-foreground mb-2">
|
||||
SSH Authorized Key
|
||||
</label>
|
||||
{discoveredSshKeysLoading && (
|
||||
<p className="text-sm text-muted-foreground mb-2">Detecting SSH keys...</p>
|
||||
)}
|
||||
{discoveredSshKeysError && !discoveredSshKeysLoading && (
|
||||
<p className="text-sm text-muted-foreground mb-2">Could not detect keys on host</p>
|
||||
)}
|
||||
{discoveredSshKeys.length > 0 && !discoveredSshKeysLoading && (
|
||||
<div className="mb-2">
|
||||
<label htmlFor="discover-ssh-key" className="sr-only">Use detected key</label>
|
||||
<select
|
||||
id="discover-ssh-key"
|
||||
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm text-foreground focus:ring-2 focus:ring-ring focus:outline-none mb-2"
|
||||
value=""
|
||||
onChange={(e) => {
|
||||
const idx = e.target.value;
|
||||
if (idx === '') return;
|
||||
const key = discoveredSshKeys[Number(idx)];
|
||||
if (key) updateAdvancedVar('var_ssh_authorized_key', key);
|
||||
}}
|
||||
>
|
||||
<option value="">— Select or paste below —</option>
|
||||
{discoveredSshKeys.map((key, i) => (
|
||||
<option key={i} value={i}>
|
||||
{key.length > 44 ? `${key.slice(0, 44)}...` : key}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
</div>
|
||||
)}
|
||||
<Input
|
||||
type="text"
|
||||
value={typeof advancedVars.var_ssh_authorized_key === 'boolean' ? '' : String(advancedVars.var_ssh_authorized_key ?? '')}
|
||||
onChange={(e) => updateAdvancedVar('var_ssh_authorized_key', e.target.value)}
|
||||
placeholder="ssh-rsa AAAA..."
|
||||
placeholder="Or paste a public key: ssh-rsa AAAA..."
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
@@ -730,6 +807,20 @@ export function ConfigurationModal({
|
||||
<option value={1}>Enabled</option>
|
||||
</select>
|
||||
</div>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-foreground mb-2">
|
||||
TUN/TAP (VPN)
|
||||
</label>
|
||||
<select
|
||||
value={typeof advancedVars.var_tun === 'boolean' ? (advancedVars.var_tun ? 'yes' : 'no') : String(advancedVars.var_tun ?? 'no')}
|
||||
onChange={(e) => updateAdvancedVar('var_tun', e.target.value)}
|
||||
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm text-foreground focus:ring-2 focus:ring-ring focus:outline-none"
|
||||
>
|
||||
<option value="no">No</option>
|
||||
<option value="yes">Yes</option>
|
||||
</select>
|
||||
<p className="text-xs text-muted-foreground mt-1">For Tailscale, WireGuard, OpenVPN</p>
|
||||
</div>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-foreground mb-2">
|
||||
Mknod
|
||||
|
||||
@@ -8,7 +8,9 @@ import { ScriptDetailModal } from "./ScriptDetailModal";
|
||||
import { CategorySidebar } from "./CategorySidebar";
|
||||
import { FilterBar, type FilterState } from "./FilterBar";
|
||||
import { ViewToggle } from "./ViewToggle";
|
||||
import { ConfirmationModal } from "./ConfirmationModal";
|
||||
import { Button } from "./ui/button";
|
||||
import { RefreshCw } from "lucide-react";
|
||||
import type { ScriptCard as ScriptCardType } from "~/types/script";
|
||||
import type { Server } from "~/types/server";
|
||||
import { getDefaultFilters, mergeFiltersWithDefaults } from "./filterUtils";
|
||||
@@ -32,8 +34,15 @@ export function DownloadedScriptsTab({
|
||||
const [filters, setFilters] = useState<FilterState>(getDefaultFilters());
|
||||
const [saveFiltersEnabled, setSaveFiltersEnabled] = useState(false);
|
||||
const [isLoadingFilters, setIsLoadingFilters] = useState(true);
|
||||
const [updateAllConfirmOpen, setUpdateAllConfirmOpen] = useState(false);
|
||||
const [updateResult, setUpdateResult] = useState<{
|
||||
successCount: number;
|
||||
failCount: number;
|
||||
failed: { slug: string; error: string }[];
|
||||
} | null>(null);
|
||||
const gridRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const utils = api.useUtils();
|
||||
const {
|
||||
data: scriptCardsData,
|
||||
isLoading: githubLoading,
|
||||
@@ -50,6 +59,30 @@ export function DownloadedScriptsTab({
|
||||
{ enabled: !!selectedSlug },
|
||||
);
|
||||
|
||||
const loadMultipleScriptsMutation = api.scripts.loadMultipleScripts.useMutation({
|
||||
onSuccess: (data) => {
|
||||
void utils.scripts.getAllDownloadedScripts.invalidate();
|
||||
void utils.scripts.getScriptCardsWithCategories.invalidate();
|
||||
setUpdateResult({
|
||||
successCount: data.successful?.length ?? 0,
|
||||
failCount: data.failed?.length ?? 0,
|
||||
failed: (data.failed ?? []).map((f) => ({
|
||||
slug: f.slug,
|
||||
error: f.error ?? "Unknown error",
|
||||
})),
|
||||
});
|
||||
setTimeout(() => setUpdateResult(null), 8000);
|
||||
},
|
||||
onError: (error) => {
|
||||
setUpdateResult({
|
||||
successCount: 0,
|
||||
failCount: 1,
|
||||
failed: [{ slug: "Request failed", error: error.message }],
|
||||
});
|
||||
setTimeout(() => setUpdateResult(null), 8000);
|
||||
},
|
||||
});
|
||||
|
||||
// Load SAVE_FILTER setting, saved filters, and view mode on component mount
|
||||
useEffect(() => {
|
||||
const loadSettings = async () => {
|
||||
@@ -416,6 +449,21 @@ export function DownloadedScriptsTab({
|
||||
setSelectedSlug(null);
|
||||
};
|
||||
|
||||
const handleUpdateAllClick = () => {
|
||||
setUpdateResult(null);
|
||||
setUpdateAllConfirmOpen(true);
|
||||
};
|
||||
|
||||
const handleUpdateAllConfirm = () => {
|
||||
setUpdateAllConfirmOpen(false);
|
||||
const slugs = downloadedScripts
|
||||
.map((s) => s.slug)
|
||||
.filter((slug): slug is string => Boolean(slug));
|
||||
if (slugs.length > 0) {
|
||||
loadMultipleScriptsMutation.mutate({ slugs });
|
||||
}
|
||||
};
|
||||
|
||||
if (githubLoading || localLoading) {
|
||||
return (
|
||||
<div className="flex items-center justify-center py-12">
|
||||
@@ -508,6 +556,43 @@ export function DownloadedScriptsTab({
|
||||
|
||||
{/* Main Content */}
|
||||
<div className="order-1 min-w-0 flex-1 lg:order-2" ref={gridRef}>
|
||||
{/* Update all downloaded scripts */}
|
||||
<div className="mb-4 flex flex-wrap items-center gap-3">
|
||||
<Button
|
||||
onClick={handleUpdateAllClick}
|
||||
disabled={loadMultipleScriptsMutation.isPending}
|
||||
variant="secondary"
|
||||
size="default"
|
||||
className="flex items-center gap-2"
|
||||
>
|
||||
{loadMultipleScriptsMutation.isPending ? (
|
||||
<>
|
||||
<RefreshCw className="h-4 w-4 animate-spin" />
|
||||
<span>Updating...</span>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<RefreshCw className="h-4 w-4" />
|
||||
<span>Update all downloaded scripts</span>
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
{updateResult && (
|
||||
<span className="text-muted-foreground text-sm">
|
||||
Updated {updateResult.successCount} successfully
|
||||
{updateResult.failCount > 0
|
||||
? `, ${updateResult.failCount} failed`
|
||||
: ""}
|
||||
.
|
||||
{updateResult.failCount > 0 && updateResult.failed.length > 0 && (
|
||||
<span className="ml-1" title={updateResult.failed.map((f) => `${f.slug}: ${f.error}`).join("\n")}>
|
||||
(hover for details)
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Enhanced Filter Bar */}
|
||||
<FilterBar
|
||||
filters={filters}
|
||||
@@ -621,6 +706,17 @@ export function DownloadedScriptsTab({
|
||||
onClose={handleCloseModal}
|
||||
onInstallScript={onInstallScript}
|
||||
/>
|
||||
|
||||
<ConfirmationModal
|
||||
isOpen={updateAllConfirmOpen}
|
||||
onClose={() => setUpdateAllConfirmOpen(false)}
|
||||
onConfirm={handleUpdateAllConfirm}
|
||||
title="Update all downloaded scripts"
|
||||
message={`Update all ${downloadedScripts.length} downloaded scripts? This may take several minutes.`}
|
||||
variant="simple"
|
||||
confirmButtonText="Update all"
|
||||
cancelButtonText="Cancel"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1617,7 +1617,7 @@ export function GeneralSettingsModal({
|
||||
<Input
|
||||
id="new-repo-url"
|
||||
type="url"
|
||||
placeholder="https://github.com/owner/repo"
|
||||
placeholder="https://github.com/owner/repo or https://git.example.com/owner/repo"
|
||||
value={newRepoUrl}
|
||||
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
|
||||
setNewRepoUrl(e.target.value)
|
||||
@@ -1626,8 +1626,9 @@ export function GeneralSettingsModal({
|
||||
className="w-full"
|
||||
/>
|
||||
<p className="text-muted-foreground mt-1 text-xs">
|
||||
Enter a GitHub repository URL (e.g.,
|
||||
https://github.com/owner/repo)
|
||||
Supported: GitHub, GitLab, Bitbucket, or custom Git
|
||||
servers (e.g. https://github.com/owner/repo,
|
||||
https://gitlab.com/owner/repo)
|
||||
</p>
|
||||
</div>
|
||||
<div className="border-border flex items-center justify-between gap-3 rounded-lg border p-3">
|
||||
|
||||
96
src/app/api/servers/[id]/discover-ssh-keys/route.ts
Normal file
96
src/app/api/servers/[id]/discover-ssh-keys/route.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import type { NextRequest } from 'next/server';
|
||||
import { NextResponse } from 'next/server';
|
||||
import { getDatabase } from '../../../../../server/database-prisma';
|
||||
import { getSSHExecutionService } from '../../../../../server/ssh-execution-service';
|
||||
import type { Server } from '~/types/server';
|
||||
|
||||
const DISCOVER_TIMEOUT_MS = 10_000;
|
||||
|
||||
/** Match lines that look like SSH public keys (same as build.func) */
|
||||
const SSH_PUBKEY_RE = /^(ssh-(rsa|ed25519)|ecdsa-sha2-nistp256|sk-(ssh-ed25519|ecdsa-sha2-nistp256))\s+/;
|
||||
|
||||
/**
|
||||
* Run a command on the Proxmox host and return buffered stdout.
|
||||
* Resolves when the process exits or rejects on timeout/spawn error.
|
||||
*/
|
||||
function runRemoteCommand(
|
||||
server: Server,
|
||||
command: string,
|
||||
timeoutMs: number
|
||||
): Promise<{ stdout: string; exitCode: number }> {
|
||||
const ssh = getSSHExecutionService();
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks: string[] = [];
|
||||
let settled = false;
|
||||
|
||||
const finish = (stdout: string, exitCode: number) => {
|
||||
if (settled) return;
|
||||
settled = true;
|
||||
clearTimeout(timer);
|
||||
resolve({ stdout, exitCode });
|
||||
};
|
||||
|
||||
const timer = setTimeout(() => {
|
||||
if (settled) return;
|
||||
settled = true;
|
||||
reject(new Error('SSH discover keys timeout'));
|
||||
}, timeoutMs);
|
||||
|
||||
ssh
|
||||
.executeCommand(
|
||||
server,
|
||||
command,
|
||||
(data: string) => chunks.push(data),
|
||||
() => {},
|
||||
(code: number) => finish(chunks.join(''), code)
|
||||
)
|
||||
.catch((err) => {
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
clearTimeout(timer);
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function GET(
|
||||
_request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
try {
|
||||
const { id: idParam } = await params;
|
||||
const id = parseInt(idParam);
|
||||
if (isNaN(id)) {
|
||||
return NextResponse.json({ error: 'Invalid server ID' }, { status: 400 });
|
||||
}
|
||||
|
||||
const db = getDatabase();
|
||||
const server = await db.getServerById(id) as Server | null;
|
||||
|
||||
if (!server) {
|
||||
return NextResponse.json({ error: 'Server not found' }, { status: 404 });
|
||||
}
|
||||
|
||||
// Same paths as native build.func ssh_discover_default_files()
|
||||
const remoteScript = `bash -c 'for f in /root/.ssh/authorized_keys /root/.ssh/authorized_keys2 /root/.ssh/*.pub /etc/ssh/authorized_keys /etc/ssh/authorized_keys.d/* 2>/dev/null; do [ -f "$f" ] && [ -r "$f" ] && grep -E "^(ssh-(rsa|ed25519)|ecdsa-sha2-nistp256|sk-)" "$f" 2>/dev/null; done | sort -u'`;
|
||||
|
||||
const { stdout } = await runRemoteCommand(server, remoteScript, DISCOVER_TIMEOUT_MS);
|
||||
|
||||
const keys = stdout
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0 && SSH_PUBKEY_RE.test(line));
|
||||
|
||||
return NextResponse.json({ keys });
|
||||
} catch (error) {
|
||||
console.error('Error discovering SSH keys:', error);
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
},
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -23,8 +23,11 @@ export const env = createEnv({
|
||||
ALLOWED_SCRIPT_PATHS: z.string().default("scripts/"),
|
||||
// WebSocket Configuration
|
||||
WEBSOCKET_PORT: z.string().default("3001"),
|
||||
// GitHub Configuration
|
||||
// Git provider tokens (optional, for private repos)
|
||||
GITHUB_TOKEN: z.string().optional(),
|
||||
GITLAB_TOKEN: z.string().optional(),
|
||||
BITBUCKET_APP_PASSWORD: z.string().optional(),
|
||||
BITBUCKET_TOKEN: z.string().optional(),
|
||||
// Authentication Configuration
|
||||
AUTH_USERNAME: z.string().optional(),
|
||||
AUTH_PASSWORD_HASH: z.string().optional(),
|
||||
@@ -62,8 +65,10 @@ export const env = createEnv({
|
||||
ALLOWED_SCRIPT_PATHS: process.env.ALLOWED_SCRIPT_PATHS,
|
||||
// WebSocket Configuration
|
||||
WEBSOCKET_PORT: process.env.WEBSOCKET_PORT,
|
||||
// GitHub Configuration
|
||||
GITHUB_TOKEN: process.env.GITHUB_TOKEN,
|
||||
GITLAB_TOKEN: process.env.GITLAB_TOKEN,
|
||||
BITBUCKET_APP_PASSWORD: process.env.BITBUCKET_APP_PASSWORD,
|
||||
BITBUCKET_TOKEN: process.env.BITBUCKET_TOKEN,
|
||||
// Authentication Configuration
|
||||
AUTH_USERNAME: process.env.AUTH_USERNAME,
|
||||
AUTH_PASSWORD_HASH: process.env.AUTH_PASSWORD_HASH,
|
||||
|
||||
@@ -2068,32 +2068,72 @@ export const installedScriptsRouter = createTRPCRouter({
|
||||
};
|
||||
}
|
||||
|
||||
// Get the script's interface_port from metadata (prioritize metadata over existing database values)
|
||||
let detectedPort = 80; // Default fallback
|
||||
|
||||
// Resolve app slug from /usr/bin/update (community-scripts) when available; else from hostname/suffix.
|
||||
let slugFromUpdate: string | null = null;
|
||||
try {
|
||||
const updateCommand = `pct exec ${scriptData.container_id} -- cat /usr/bin/update 2>/dev/null`;
|
||||
let updateOutput = '';
|
||||
await new Promise<void>((resolve) => {
|
||||
void sshExecutionService.executeCommand(
|
||||
server as Server,
|
||||
updateCommand,
|
||||
(data: string) => { updateOutput += data; },
|
||||
() => {},
|
||||
() => resolve()
|
||||
);
|
||||
});
|
||||
const ctSlugMatch = /ct\/([a-zA-Z0-9_.-]+)\.sh/.exec(updateOutput);
|
||||
if (ctSlugMatch?.[1]) {
|
||||
slugFromUpdate = ctSlugMatch[1].trim().toLowerCase();
|
||||
console.log('🔍 Slug from /usr/bin/update:', slugFromUpdate);
|
||||
}
|
||||
} catch {
|
||||
// Container may not be from community-scripts; use hostname fallback
|
||||
}
|
||||
|
||||
// Get the script's interface_port from metadata. Primary: slug from /usr/bin/update; fallback: hostname/suffix.
|
||||
let detectedPort = 80; // Default fallback
|
||||
|
||||
try {
|
||||
// Import localScriptsService to get script metadata
|
||||
const { localScriptsService } = await import('~/server/services/localScripts');
|
||||
|
||||
// Get all scripts and find the one matching our script name
|
||||
const allScripts = await localScriptsService.getAllScripts();
|
||||
|
||||
// Extract script slug from script_name (remove .sh extension)
|
||||
const scriptSlug = scriptData.script_name.replace(/\.sh$/, '');
|
||||
console.log('🔍 Looking for script with slug:', scriptSlug);
|
||||
|
||||
const scriptMetadata = allScripts.find(script => script.slug === scriptSlug);
|
||||
|
||||
|
||||
const nameFromHostname = scriptData.script_name.replace(/\.sh$/, '').toLowerCase();
|
||||
|
||||
// Primary: slug from /usr/bin/update (community-scripts)
|
||||
let scriptMetadata =
|
||||
slugFromUpdate != null
|
||||
? allScripts.find((s) => s.slug === slugFromUpdate)
|
||||
: undefined;
|
||||
if (scriptMetadata) {
|
||||
console.log('🔍 Using slug from /usr/bin/update for metadata:', scriptMetadata.slug);
|
||||
}
|
||||
|
||||
// Fallback: exact hostname then hostname ends with slug (longest wins)
|
||||
if (!scriptMetadata) {
|
||||
scriptMetadata = allScripts.find((script) => script.slug === nameFromHostname);
|
||||
if (!scriptMetadata) {
|
||||
const suffixMatches = allScripts.filter((script) => nameFromHostname.endsWith(script.slug));
|
||||
scriptMetadata =
|
||||
suffixMatches.length > 0
|
||||
? suffixMatches.reduce((a, b) => (a.slug.length >= b.slug.length ? a : b))
|
||||
: undefined;
|
||||
if (scriptMetadata) {
|
||||
console.log('🔍 Matched metadata by slug suffix in hostname:', scriptMetadata.slug);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (scriptMetadata?.interface_port) {
|
||||
detectedPort = scriptMetadata.interface_port;
|
||||
console.log('📋 Found interface_port in metadata:', detectedPort);
|
||||
} else {
|
||||
console.log('📋 No interface_port found in metadata, using default port 80');
|
||||
detectedPort = 80; // Default to port 80 if no metadata port found
|
||||
detectedPort = 80;
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('⚠️ Error getting script metadata, using default port 80:', error);
|
||||
detectedPort = 80; // Default to port 80 if metadata lookup fails
|
||||
detectedPort = 80;
|
||||
}
|
||||
|
||||
console.log('🎯 Final detected port:', detectedPort);
|
||||
|
||||
@@ -238,6 +238,27 @@ export const versionRouter = createTRPCRouter({
|
||||
// Clear/create the log file
|
||||
await writeFile(logPath, '', 'utf-8');
|
||||
|
||||
// Always fetch the latest update.sh from GitHub before running
|
||||
// This ensures we always use the newest update script, avoiding
|
||||
// the "chicken-and-egg" problem where old scripts can't update properly
|
||||
const updateScriptUrl = 'https://raw.githubusercontent.com/community-scripts/ProxmoxVE-Local/main/update.sh';
|
||||
try {
|
||||
const response = await fetch(updateScriptUrl);
|
||||
if (response.ok) {
|
||||
const latestScript = await response.text();
|
||||
await writeFile(updateScriptPath, latestScript, { mode: 0o755 });
|
||||
// Log that we fetched the latest script
|
||||
await writeFile(logPath, '[INFO] Fetched latest update.sh from GitHub\n', { flag: 'a' });
|
||||
} else {
|
||||
// If fetch fails, log warning but continue with local script
|
||||
await writeFile(logPath, `[WARNING] Could not fetch latest update.sh (HTTP ${response.status}), using local version\n`, { flag: 'a' });
|
||||
}
|
||||
} catch (fetchError) {
|
||||
// If fetch fails, log warning but continue with local script
|
||||
const errorMsg = fetchError instanceof Error ? fetchError.message : 'Unknown error';
|
||||
await writeFile(logPath, `[WARNING] Could not fetch latest update.sh: ${errorMsg}, using local version\n`, { flag: 'a' });
|
||||
}
|
||||
|
||||
// Spawn the update script as a detached process using nohup
|
||||
// This allows it to run independently and kill the parent Node.js process
|
||||
// Redirect output to log file
|
||||
|
||||
55
src/server/lib/gitProvider/bitbucket.ts
Normal file
55
src/server/lib/gitProvider/bitbucket.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import type { DirEntry, GitProvider } from './types';
|
||||
import { parseRepoUrl } from '../repositoryUrlValidation';
|
||||
|
||||
export class BitbucketProvider implements GitProvider {
|
||||
async listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
||||
const { owner, repo } = parseRepoUrl(repoUrl);
|
||||
const listUrl = `https://api.bitbucket.org/2.0/repositories/${owner}/${repo}/src/${encodeURIComponent(branch)}/${path}`;
|
||||
const headers: Record<string, string> = {
|
||||
'User-Agent': 'PVEScripts-Local/1.0',
|
||||
};
|
||||
const token = process.env.BITBUCKET_APP_PASSWORD ?? process.env.BITBUCKET_TOKEN;
|
||||
if (token) {
|
||||
const auth = Buffer.from(`:${token}`).toString('base64');
|
||||
headers.Authorization = `Basic ${auth}`;
|
||||
}
|
||||
|
||||
const response = await fetch(listUrl, { headers });
|
||||
if (!response.ok) {
|
||||
throw new Error(`Bitbucket API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const body = (await response.json()) as { values?: { path: string; type: string }[] };
|
||||
const data = body.values ?? (Array.isArray(body) ? body : []);
|
||||
if (!Array.isArray(data)) {
|
||||
throw new Error('Bitbucket API returned unexpected response');
|
||||
}
|
||||
return data.map((item: { path: string; type: string }) => {
|
||||
const name = item.path.split('/').pop() ?? item.path;
|
||||
return {
|
||||
name,
|
||||
path: item.path,
|
||||
type: item.type === 'commit_directory' ? ('dir' as const) : ('file' as const),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
||||
const { owner, repo } = parseRepoUrl(repoUrl);
|
||||
const rawUrl = `https://api.bitbucket.org/2.0/repositories/${owner}/${repo}/src/${encodeURIComponent(branch)}/${filePath}`;
|
||||
const headers: Record<string, string> = {
|
||||
'User-Agent': 'PVEScripts-Local/1.0',
|
||||
};
|
||||
const token = process.env.BITBUCKET_APP_PASSWORD ?? process.env.BITBUCKET_TOKEN;
|
||||
if (token) {
|
||||
const auth = Buffer.from(`:${token}`).toString('base64');
|
||||
headers.Authorization = `Basic ${auth}`;
|
||||
}
|
||||
|
||||
const response = await fetch(rawUrl, { headers });
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
return response.text();
|
||||
}
|
||||
}
|
||||
44
src/server/lib/gitProvider/custom.ts
Normal file
44
src/server/lib/gitProvider/custom.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import type { DirEntry, GitProvider } from "./types";
|
||||
import { parseRepoUrl } from "../repositoryUrlValidation";
|
||||
|
||||
export class CustomProvider implements GitProvider {
|
||||
async listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
||||
const { origin, owner, repo } = parseRepoUrl(repoUrl);
|
||||
const apiUrl = `${origin}/api/v1/repos/${owner}/${repo}/contents/${path}?ref=${encodeURIComponent(branch)}`;
|
||||
const headers: Record<string, string> = { "User-Agent": "PVEScripts-Local/1.0" };
|
||||
const token = process.env.GITEA_TOKEN ?? process.env.GIT_TOKEN;
|
||||
if (token) headers.Authorization = `token ${token}`;
|
||||
|
||||
const response = await fetch(apiUrl, { headers });
|
||||
if (!response.ok) {
|
||||
throw new Error(`Custom Git server: list directory failed (${response.status}).`);
|
||||
}
|
||||
const data = (await response.json()) as { type: string; name: string; path: string }[];
|
||||
if (!Array.isArray(data)) {
|
||||
const single = data as unknown as { type?: string; name?: string; path?: string };
|
||||
if (single?.name) {
|
||||
return [{ name: single.name, path: single.path ?? path, type: single.type === "dir" ? "dir" : "file" }];
|
||||
}
|
||||
throw new Error("Custom Git server returned unexpected response");
|
||||
}
|
||||
return data.map((item) => ({
|
||||
name: item.name,
|
||||
path: item.path,
|
||||
type: item.type === "dir" ? ("dir" as const) : ("file" as const),
|
||||
}));
|
||||
}
|
||||
|
||||
async downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
||||
const { origin, owner, repo } = parseRepoUrl(repoUrl);
|
||||
const rawUrl = `${origin}/${owner}/${repo}/raw/${encodeURIComponent(branch)}/${filePath}`;
|
||||
const headers: Record<string, string> = { "User-Agent": "PVEScripts-Local/1.0" };
|
||||
const token = process.env.GITEA_TOKEN ?? process.env.GIT_TOKEN;
|
||||
if (token) headers.Authorization = `token ${token}`;
|
||||
|
||||
const response = await fetch(rawUrl, { headers });
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to download ${filePath} from custom Git server (${response.status}).`);
|
||||
}
|
||||
return response.text();
|
||||
}
|
||||
}
|
||||
60
src/server/lib/gitProvider/github.ts
Normal file
60
src/server/lib/gitProvider/github.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import type { DirEntry, GitProvider } from './types';
|
||||
import { parseRepoUrl } from '../repositoryUrlValidation';
|
||||
|
||||
export class GitHubProvider implements GitProvider {
|
||||
async listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
||||
const { owner, repo } = parseRepoUrl(repoUrl);
|
||||
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/contents/${path}?ref=${encodeURIComponent(branch)}`;
|
||||
const headers: Record<string, string> = {
|
||||
Accept: 'application/vnd.github.v3+json',
|
||||
'User-Agent': 'PVEScripts-Local/1.0',
|
||||
};
|
||||
const token = process.env.GITHUB_TOKEN;
|
||||
if (token) headers.Authorization = `token ${token}`;
|
||||
|
||||
const response = await fetch(apiUrl, { headers });
|
||||
if (!response.ok) {
|
||||
if (response.status === 403) {
|
||||
const err = new Error(
|
||||
`GitHub API rate limit exceeded. Consider setting GITHUB_TOKEN. Status: ${response.status} ${response.statusText}`
|
||||
);
|
||||
(err as Error & { name: string }).name = 'RateLimitError';
|
||||
throw err;
|
||||
}
|
||||
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = (await response.json()) as { type: string; name: string; path: string }[];
|
||||
if (!Array.isArray(data)) {
|
||||
throw new Error('GitHub API returned unexpected response');
|
||||
}
|
||||
return data.map((item) => ({
|
||||
name: item.name,
|
||||
path: item.path,
|
||||
type: item.type === 'dir' ? ('dir' as const) : ('file' as const),
|
||||
}));
|
||||
}
|
||||
|
||||
async downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
||||
const { owner, repo } = parseRepoUrl(repoUrl);
|
||||
const rawUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${encodeURIComponent(branch)}/${filePath}`;
|
||||
const headers: Record<string, string> = {
|
||||
'User-Agent': 'PVEScripts-Local/1.0',
|
||||
};
|
||||
const token = process.env.GITHUB_TOKEN;
|
||||
if (token) headers.Authorization = `token ${token}`;
|
||||
|
||||
const response = await fetch(rawUrl, { headers });
|
||||
if (!response.ok) {
|
||||
if (response.status === 403) {
|
||||
const err = new Error(
|
||||
`GitHub rate limit exceeded while downloading ${filePath}. Consider setting GITHUB_TOKEN.`
|
||||
);
|
||||
(err as Error & { name: string }).name = 'RateLimitError';
|
||||
throw err;
|
||||
}
|
||||
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
return response.text();
|
||||
}
|
||||
}
|
||||
58
src/server/lib/gitProvider/gitlab.ts
Normal file
58
src/server/lib/gitProvider/gitlab.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import type { DirEntry, GitProvider } from './types';
|
||||
import { parseRepoUrl } from '../repositoryUrlValidation';
|
||||
|
||||
export class GitLabProvider implements GitProvider {
|
||||
private getBaseUrl(repoUrl: string): string {
|
||||
const { origin } = parseRepoUrl(repoUrl);
|
||||
return origin;
|
||||
}
|
||||
|
||||
private getProjectId(repoUrl: string): string {
|
||||
const { owner, repo } = parseRepoUrl(repoUrl);
|
||||
return encodeURIComponent(`${owner}/${repo}`);
|
||||
}
|
||||
|
||||
async listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
||||
const baseUrl = this.getBaseUrl(repoUrl);
|
||||
const projectId = this.getProjectId(repoUrl);
|
||||
const apiUrl = `${baseUrl}/api/v4/projects/${projectId}/repository/tree?path=${encodeURIComponent(path)}&ref=${encodeURIComponent(branch)}&per_page=100`;
|
||||
const headers: Record<string, string> = {
|
||||
'User-Agent': 'PVEScripts-Local/1.0',
|
||||
};
|
||||
const token = process.env.GITLAB_TOKEN;
|
||||
if (token) headers['PRIVATE-TOKEN'] = token;
|
||||
|
||||
const response = await fetch(apiUrl, { headers });
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitLab API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = (await response.json()) as { type: string; name: string; path: string }[];
|
||||
if (!Array.isArray(data)) {
|
||||
throw new Error('GitLab API returned unexpected response');
|
||||
}
|
||||
return data.map((item) => ({
|
||||
name: item.name,
|
||||
path: item.path,
|
||||
type: item.type === 'tree' ? ('dir' as const) : ('file' as const),
|
||||
}));
|
||||
}
|
||||
|
||||
async downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
||||
const baseUrl = this.getBaseUrl(repoUrl);
|
||||
const projectId = this.getProjectId(repoUrl);
|
||||
const encodedPath = encodeURIComponent(filePath);
|
||||
const rawUrl = `${baseUrl}/api/v4/projects/${projectId}/repository/files/${encodedPath}/raw?ref=${encodeURIComponent(branch)}`;
|
||||
const headers: Record<string, string> = {
|
||||
'User-Agent': 'PVEScripts-Local/1.0',
|
||||
};
|
||||
const token = process.env.GITLAB_TOKEN;
|
||||
if (token) headers['PRIVATE-TOKEN'] = token;
|
||||
|
||||
const response = await fetch(rawUrl, { headers });
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
return response.text();
|
||||
}
|
||||
}
|
||||
1
src/server/lib/gitProvider/index.js
Normal file
1
src/server/lib/gitProvider/index.js
Normal file
@@ -0,0 +1 @@
|
||||
export { listDirectory, downloadRawFile, getRepoProvider } from "./index.ts";
|
||||
28
src/server/lib/gitProvider/index.ts
Normal file
28
src/server/lib/gitProvider/index.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import type { DirEntry, GitProvider } from "./types";
|
||||
import { getRepoProvider } from "../repositoryUrlValidation";
|
||||
import { GitHubProvider } from "./github";
|
||||
import { GitLabProvider } from "./gitlab";
|
||||
import { BitbucketProvider } from "./bitbucket";
|
||||
import { CustomProvider } from "./custom";
|
||||
|
||||
const providers: Record<string, GitProvider> = {
|
||||
github: new GitHubProvider(),
|
||||
gitlab: new GitLabProvider(),
|
||||
bitbucket: new BitbucketProvider(),
|
||||
custom: new CustomProvider(),
|
||||
};
|
||||
|
||||
export type { DirEntry, GitProvider };
|
||||
export { getRepoProvider };
|
||||
|
||||
export function getGitProvider(repoUrl: string): GitProvider {
|
||||
return providers[getRepoProvider(repoUrl)]!;
|
||||
}
|
||||
|
||||
export async function listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
||||
return getGitProvider(repoUrl).listDirectory(repoUrl, path, branch);
|
||||
}
|
||||
|
||||
export async function downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
||||
return getGitProvider(repoUrl).downloadRawFile(repoUrl, filePath, branch);
|
||||
}
|
||||
14
src/server/lib/gitProvider/types.ts
Normal file
14
src/server/lib/gitProvider/types.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
/**
|
||||
* Git provider interface for listing and downloading repository files.
|
||||
*/
|
||||
|
||||
export type DirEntry = {
|
||||
name: string;
|
||||
path: string;
|
||||
type: 'file' | 'dir';
|
||||
};
|
||||
|
||||
export interface GitProvider {
|
||||
listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]>;
|
||||
downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string>;
|
||||
}
|
||||
37
src/server/lib/repositoryUrlValidation.js
Normal file
37
src/server/lib/repositoryUrlValidation.js
Normal file
@@ -0,0 +1,37 @@
|
||||
/**
|
||||
* Repository URL validation (JS mirror for server.js).
|
||||
*/
|
||||
const VALID_REPO_URL =
|
||||
/^(https?:\/\/)(github\.com|gitlab\.com|bitbucket\.org|[^/]+)\/[^/]+\/[^/]+$/;
|
||||
|
||||
export const REPO_URL_ERROR_MESSAGE =
|
||||
'Invalid repository URL. Supported: GitHub, GitLab, Bitbucket, and custom Git servers (e.g. https://host/owner/repo).';
|
||||
|
||||
export function isValidRepositoryUrl(url) {
|
||||
if (typeof url !== 'string' || !url.trim()) return false;
|
||||
return VALID_REPO_URL.test(url.trim());
|
||||
}
|
||||
|
||||
export function getRepoProvider(url) {
|
||||
if (!isValidRepositoryUrl(url)) throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||
const normalized = url.trim().toLowerCase();
|
||||
if (normalized.includes('github.com')) return 'github';
|
||||
if (normalized.includes('gitlab.com')) return 'gitlab';
|
||||
if (normalized.includes('bitbucket.org')) return 'bitbucket';
|
||||
return 'custom';
|
||||
}
|
||||
|
||||
export function parseRepoUrl(url) {
|
||||
if (!isValidRepositoryUrl(url)) throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||
try {
|
||||
const u = new URL(url.trim());
|
||||
const pathParts = u.pathname.replace(/^\/+/, '').replace(/\.git\/?$/, '').split('/');
|
||||
return {
|
||||
origin: u.origin,
|
||||
owner: pathParts[0] ?? '',
|
||||
repo: pathParts[1] ?? '',
|
||||
};
|
||||
} catch {
|
||||
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||
}
|
||||
}
|
||||
57
src/server/lib/repositoryUrlValidation.ts
Normal file
57
src/server/lib/repositoryUrlValidation.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
/**
|
||||
* Repository URL validation and provider detection.
|
||||
* Supports GitHub, GitLab, Bitbucket, and custom Git servers.
|
||||
*/
|
||||
|
||||
const VALID_REPO_URL =
|
||||
/^(https?:\/\/)(github\.com|gitlab\.com|bitbucket\.org|[^/]+)\/[^/]+\/[^/]+$/;
|
||||
|
||||
export const REPO_URL_ERROR_MESSAGE =
|
||||
'Invalid repository URL. Supported: GitHub, GitLab, Bitbucket, and custom Git servers (e.g. https://host/owner/repo).';
|
||||
|
||||
export type RepoProvider = 'github' | 'gitlab' | 'bitbucket' | 'custom';
|
||||
|
||||
/**
|
||||
* Check if a string is a valid repository URL (format only).
|
||||
*/
|
||||
export function isValidRepositoryUrl(url: string): boolean {
|
||||
if (typeof url !== 'string' || !url.trim()) return false;
|
||||
return VALID_REPO_URL.test(url.trim());
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect the Git provider from a repository URL.
|
||||
*/
|
||||
export function getRepoProvider(url: string): RepoProvider {
|
||||
if (!isValidRepositoryUrl(url)) {
|
||||
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||
}
|
||||
const normalized = url.trim().toLowerCase();
|
||||
if (normalized.includes('github.com')) return 'github';
|
||||
if (normalized.includes('gitlab.com')) return 'gitlab';
|
||||
if (normalized.includes('bitbucket.org')) return 'bitbucket';
|
||||
return 'custom';
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse owner and repo from a repository URL (path segments).
|
||||
* Works for GitHub, GitLab, Bitbucket, and custom (host/owner/repo).
|
||||
*/
|
||||
export function parseRepoUrl(url: string): { origin: string; owner: string; repo: string } {
|
||||
if (!isValidRepositoryUrl(url)) {
|
||||
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||
}
|
||||
try {
|
||||
const u = new URL(url.trim());
|
||||
const pathParts = u.pathname.replace(/^\/+/, '').replace(/\.git\/?$/, '').split('/');
|
||||
const owner = pathParts[0] ?? '';
|
||||
const repo = pathParts[1] ?? '';
|
||||
return {
|
||||
origin: u.origin,
|
||||
owner,
|
||||
repo,
|
||||
};
|
||||
} catch {
|
||||
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,7 @@
|
||||
import { writeFile, mkdir, readdir, readFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { repositoryService } from './repositoryService.js';
|
||||
import { listDirectory, downloadRawFile } from '../lib/gitProvider/index.js';
|
||||
|
||||
// Get environment variables
|
||||
const getEnv = () => ({
|
||||
@@ -28,76 +29,9 @@ class GitHubJsonService {
|
||||
}
|
||||
}
|
||||
|
||||
getBaseUrl(repoUrl) {
|
||||
const urlMatch = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
||||
if (!urlMatch) {
|
||||
throw new Error(`Invalid GitHub repository URL: ${repoUrl}`);
|
||||
}
|
||||
|
||||
const [, owner, repo] = urlMatch;
|
||||
return `https://api.github.com/repos/${owner}/${repo}`;
|
||||
}
|
||||
|
||||
extractRepoPath(repoUrl) {
|
||||
const match = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
||||
if (!match) {
|
||||
throw new Error('Invalid GitHub repository URL');
|
||||
}
|
||||
return `${match[1]}/${match[2]}`;
|
||||
}
|
||||
|
||||
async fetchFromGitHub(repoUrl, endpoint) {
|
||||
const baseUrl = this.getBaseUrl(repoUrl);
|
||||
const env = getEnv();
|
||||
|
||||
const headers = {
|
||||
'Accept': 'application/vnd.github.v3+json',
|
||||
'User-Agent': 'PVEScripts-Local/1.0',
|
||||
};
|
||||
|
||||
if (env.GITHUB_TOKEN) {
|
||||
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
||||
}
|
||||
|
||||
const response = await fetch(`${baseUrl}${endpoint}`, { headers });
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 403) {
|
||||
const error = new Error(`GitHub API rate limit exceeded. Consider setting GITHUB_TOKEN for higher limits. Status: ${response.status} ${response.statusText}`);
|
||||
error.name = 'RateLimitError';
|
||||
throw error;
|
||||
}
|
||||
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
}
|
||||
|
||||
async downloadJsonFile(repoUrl, filePath) {
|
||||
this.initializeConfig();
|
||||
const repoPath = this.extractRepoPath(repoUrl);
|
||||
const rawUrl = `https://raw.githubusercontent.com/${repoPath}/${this.branch}/${filePath}`;
|
||||
const env = getEnv();
|
||||
|
||||
const headers = {
|
||||
'User-Agent': 'PVEScripts-Local/1.0',
|
||||
};
|
||||
|
||||
if (env.GITHUB_TOKEN) {
|
||||
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
||||
}
|
||||
|
||||
const response = await fetch(rawUrl, { headers });
|
||||
if (!response.ok) {
|
||||
if (response.status === 403) {
|
||||
const error = new Error(`GitHub rate limit exceeded while downloading ${filePath}. Consider setting GITHUB_TOKEN for higher limits.`);
|
||||
error.name = 'RateLimitError';
|
||||
throw error;
|
||||
}
|
||||
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const content = await response.text();
|
||||
const content = await downloadRawFile(repoUrl, filePath, this.branch);
|
||||
const script = JSON.parse(content);
|
||||
script.repository_url = repoUrl;
|
||||
return script;
|
||||
@@ -105,16 +39,13 @@ class GitHubJsonService {
|
||||
|
||||
async getJsonFiles(repoUrl) {
|
||||
this.initializeConfig();
|
||||
|
||||
try {
|
||||
const files = await this.fetchFromGitHub(
|
||||
repoUrl,
|
||||
`/contents/${this.jsonFolder}?ref=${this.branch}`
|
||||
);
|
||||
|
||||
return files.filter(file => file.name.endsWith('.json'));
|
||||
const entries = await listDirectory(repoUrl, this.jsonFolder, this.branch);
|
||||
return entries
|
||||
.filter((e) => e.type === 'file' && e.name.endsWith('.json'))
|
||||
.map((e) => ({ name: e.name, path: e.path }));
|
||||
} catch (error) {
|
||||
console.error(`Error fetching JSON files from GitHub (${repoUrl}):`, error);
|
||||
console.error(`Error fetching JSON files from repository (${repoUrl}):`, error);
|
||||
throw new Error(`Failed to fetch script files from repository: ${repoUrl}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import { join } from 'path';
|
||||
import { env } from '../../env.js';
|
||||
import type { Script, ScriptCard, GitHubFile } from '../../types/script';
|
||||
import { repositoryService } from './repositoryService';
|
||||
import { listDirectory, downloadRawFile } from '~/server/lib/gitProvider';
|
||||
|
||||
export class GitHubJsonService {
|
||||
private branch: string | null = null;
|
||||
@@ -22,96 +23,24 @@ export class GitHubJsonService {
|
||||
}
|
||||
}
|
||||
|
||||
private getBaseUrl(repoUrl: string): string {
|
||||
const urlMatch = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
||||
if (!urlMatch) {
|
||||
throw new Error(`Invalid GitHub repository URL: ${repoUrl}`);
|
||||
}
|
||||
|
||||
const [, owner, repo] = urlMatch;
|
||||
return `https://api.github.com/repos/${owner}/${repo}`;
|
||||
}
|
||||
|
||||
private extractRepoPath(repoUrl: string): string {
|
||||
const match = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
||||
if (!match) {
|
||||
throw new Error('Invalid GitHub repository URL');
|
||||
}
|
||||
return `${match[1]}/${match[2]}`;
|
||||
}
|
||||
|
||||
private async fetchFromGitHub<T>(repoUrl: string, endpoint: string): Promise<T> {
|
||||
const baseUrl = this.getBaseUrl(repoUrl);
|
||||
|
||||
const headers: HeadersInit = {
|
||||
'Accept': 'application/vnd.github.v3+json',
|
||||
'User-Agent': 'PVEScripts-Local/1.0',
|
||||
};
|
||||
|
||||
// Add GitHub token authentication if available
|
||||
if (env.GITHUB_TOKEN) {
|
||||
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
||||
}
|
||||
|
||||
const response = await fetch(`${baseUrl}${endpoint}`, { headers });
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 403) {
|
||||
const error = new Error(`GitHub API rate limit exceeded. Consider setting GITHUB_TOKEN for higher limits. Status: ${response.status} ${response.statusText}`);
|
||||
error.name = 'RateLimitError';
|
||||
throw error;
|
||||
}
|
||||
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
return data as T;
|
||||
}
|
||||
|
||||
private async downloadJsonFile(repoUrl: string, filePath: string): Promise<Script> {
|
||||
this.initializeConfig();
|
||||
const repoPath = this.extractRepoPath(repoUrl);
|
||||
const rawUrl = `https://raw.githubusercontent.com/${repoPath}/${this.branch!}/${filePath}`;
|
||||
|
||||
const headers: HeadersInit = {
|
||||
'User-Agent': 'PVEScripts-Local/1.0',
|
||||
};
|
||||
|
||||
// Add GitHub token authentication if available
|
||||
if (env.GITHUB_TOKEN) {
|
||||
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
||||
}
|
||||
|
||||
const response = await fetch(rawUrl, { headers });
|
||||
if (!response.ok) {
|
||||
if (response.status === 403) {
|
||||
const error = new Error(`GitHub rate limit exceeded while downloading ${filePath}. Consider setting GITHUB_TOKEN for higher limits. Status: ${response.status} ${response.statusText}`);
|
||||
error.name = 'RateLimitError';
|
||||
throw error;
|
||||
}
|
||||
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const content = await response.text();
|
||||
const content = await downloadRawFile(repoUrl, filePath, this.branch!);
|
||||
const script = JSON.parse(content) as Script;
|
||||
// Add repository_url to script
|
||||
script.repository_url = repoUrl;
|
||||
return script;
|
||||
}
|
||||
|
||||
async getJsonFiles(repoUrl: string): Promise<GitHubFile[]> {
|
||||
this.initializeConfig();
|
||||
|
||||
try {
|
||||
const files = await this.fetchFromGitHub<GitHubFile[]>(
|
||||
repoUrl,
|
||||
`/contents/${this.jsonFolder!}?ref=${this.branch!}`
|
||||
);
|
||||
|
||||
// Filter for JSON files only
|
||||
return files.filter(file => file.name.endsWith('.json'));
|
||||
const entries = await listDirectory(repoUrl, this.jsonFolder!, this.branch!);
|
||||
const files: GitHubFile[] = entries
|
||||
.filter((e) => e.type === 'file' && e.name.endsWith('.json'))
|
||||
.map((e) => ({ name: e.name, path: e.path } as GitHubFile));
|
||||
return files;
|
||||
} catch (error) {
|
||||
console.error(`Error fetching JSON files from GitHub (${repoUrl}):`, error);
|
||||
console.error(`Error fetching JSON files from repository (${repoUrl}):`, error);
|
||||
throw new Error(`Failed to fetch script files from repository: ${repoUrl}`);
|
||||
}
|
||||
}
|
||||
@@ -233,8 +162,7 @@ export class GitHubJsonService {
|
||||
try {
|
||||
console.log(`Starting JSON sync from repository: ${repoUrl}`);
|
||||
|
||||
// Get file list from GitHub
|
||||
console.log(`Fetching file list from GitHub (${repoUrl})...`);
|
||||
console.log(`Fetching file list from repository (${repoUrl})...`);
|
||||
const githubFiles = await this.getJsonFiles(repoUrl);
|
||||
console.log(`Found ${githubFiles.length} JSON files in repository ${repoUrl}`);
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
// JavaScript wrapper for repositoryService (for use with node server.js)
|
||||
import { prisma } from '../db.js';
|
||||
import { isValidRepositoryUrl, REPO_URL_ERROR_MESSAGE } from '../lib/repositoryUrlValidation.js';
|
||||
|
||||
class RepositoryService {
|
||||
/**
|
||||
@@ -89,9 +90,8 @@ class RepositoryService {
|
||||
* Create a new repository
|
||||
*/
|
||||
async createRepository(data) {
|
||||
// Validate GitHub URL
|
||||
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
||||
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
||||
if (!isValidRepositoryUrl(data.url)) {
|
||||
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||
}
|
||||
|
||||
// Check for duplicates
|
||||
@@ -122,10 +122,9 @@ class RepositoryService {
|
||||
* Update repository
|
||||
*/
|
||||
async updateRepository(id, data) {
|
||||
// If updating URL, validate it
|
||||
if (data.url) {
|
||||
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
||||
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
||||
if (!isValidRepositoryUrl(data.url)) {
|
||||
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||
}
|
||||
|
||||
// Check for duplicates (excluding current repo)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable @typescript-eslint/prefer-regexp-exec */
|
||||
import { prisma } from '../db';
|
||||
import { isValidRepositoryUrl, REPO_URL_ERROR_MESSAGE } from '../lib/repositoryUrlValidation';
|
||||
|
||||
export class RepositoryService {
|
||||
/**
|
||||
@@ -93,9 +93,8 @@ export class RepositoryService {
|
||||
enabled?: boolean;
|
||||
priority?: number;
|
||||
}) {
|
||||
// Validate GitHub URL
|
||||
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
||||
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
||||
if (!isValidRepositoryUrl(data.url)) {
|
||||
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||
}
|
||||
|
||||
// Check for duplicates
|
||||
@@ -130,10 +129,9 @@ export class RepositoryService {
|
||||
url?: string;
|
||||
priority?: number;
|
||||
}) {
|
||||
// If updating URL, validate it
|
||||
if (data.url) {
|
||||
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
||||
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
||||
if (!isValidRepositoryUrl(data.url)) {
|
||||
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||
}
|
||||
|
||||
// Check for duplicates (excluding current repo)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
// Real JavaScript implementation for script downloading
|
||||
import { join } from 'path';
|
||||
import { writeFile, mkdir, access, readFile, unlink } from 'fs/promises';
|
||||
import { downloadRawFile } from '../lib/gitProvider/index.js';
|
||||
|
||||
export class ScriptDownloaderService {
|
||||
constructor() {
|
||||
@@ -82,51 +83,18 @@ export class ScriptDownloaderService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract repository path from GitHub URL
|
||||
* @param {string} repoUrl - The GitHub repository URL
|
||||
* @returns {string}
|
||||
*/
|
||||
extractRepoPath(repoUrl) {
|
||||
const match = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
||||
if (!match) {
|
||||
throw new Error(`Invalid GitHub repository URL: ${repoUrl}`);
|
||||
}
|
||||
return `${match[1]}/${match[2]}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Download a file from GitHub
|
||||
* @param {string} repoUrl - The GitHub repository URL
|
||||
* Download a file from the repository (GitHub, GitLab, Bitbucket, or custom)
|
||||
* @param {string} repoUrl - The repository URL
|
||||
* @param {string} filePath - The file path within the repository
|
||||
* @param {string} [branch] - The branch to download from
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
async downloadFileFromGitHub(repoUrl, filePath, branch = 'main') {
|
||||
this.initializeConfig();
|
||||
async downloadFileFromRepo(repoUrl, filePath, branch = 'main') {
|
||||
if (!repoUrl) {
|
||||
throw new Error('Repository URL is not set');
|
||||
}
|
||||
|
||||
const repoPath = this.extractRepoPath(repoUrl);
|
||||
const url = `https://raw.githubusercontent.com/${repoPath}/${branch}/${filePath}`;
|
||||
|
||||
/** @type {Record<string, string>} */
|
||||
const headers = {
|
||||
'User-Agent': 'PVEScripts-Local/1.0',
|
||||
};
|
||||
|
||||
// Add GitHub token authentication if available
|
||||
if (process.env.GITHUB_TOKEN) {
|
||||
headers.Authorization = `token ${process.env.GITHUB_TOKEN}`;
|
||||
}
|
||||
|
||||
console.log(`Downloading from GitHub: ${url}`);
|
||||
const response = await fetch(url, { headers });
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to download ${filePath} from ${repoUrl}: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
return response.text();
|
||||
console.log(`Downloading from repository: ${repoUrl} (${filePath})`);
|
||||
return downloadRawFile(repoUrl, filePath, branch);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -184,9 +152,8 @@ export class ScriptDownloaderService {
|
||||
const fileName = scriptPath.split('/').pop();
|
||||
|
||||
if (fileName) {
|
||||
// Download from GitHub using the script's repository URL
|
||||
console.log(`Downloading script file: ${scriptPath} from ${repoUrl}`);
|
||||
const content = await this.downloadFileFromGitHub(repoUrl, scriptPath, branch);
|
||||
const content = await this.downloadFileFromRepo(repoUrl, scriptPath, branch);
|
||||
|
||||
// Determine target directory based on script path
|
||||
let targetDir;
|
||||
@@ -250,7 +217,7 @@ export class ScriptDownloaderService {
|
||||
const installScriptName = `${script.slug}-install.sh`;
|
||||
try {
|
||||
console.log(`Downloading install script: install/${installScriptName} from ${repoUrl}`);
|
||||
const installContent = await this.downloadFileFromGitHub(repoUrl, `install/${installScriptName}`, branch);
|
||||
const installContent = await this.downloadFileFromRepo(repoUrl, `install/${installScriptName}`, branch);
|
||||
const localInstallPath = join(this.scriptsDirectory, 'install', installScriptName);
|
||||
await writeFile(localInstallPath, installContent, 'utf-8');
|
||||
files.push(`install/${installScriptName}`);
|
||||
@@ -274,7 +241,7 @@ export class ScriptDownloaderService {
|
||||
const alpineInstallScriptName = `alpine-${script.slug}-install.sh`;
|
||||
try {
|
||||
console.log(`[${script.slug}] Downloading alpine install script: install/${alpineInstallScriptName} from ${repoUrl}`);
|
||||
const alpineInstallContent = await this.downloadFileFromGitHub(repoUrl, `install/${alpineInstallScriptName}`, branch);
|
||||
const alpineInstallContent = await this.downloadFileFromRepo(repoUrl, `install/${alpineInstallScriptName}`, branch);
|
||||
const localAlpineInstallPath = join(this.scriptsDirectory, 'install', alpineInstallScriptName);
|
||||
await writeFile(localAlpineInstallPath, alpineInstallContent, 'utf-8');
|
||||
files.push(`install/${alpineInstallScriptName}`);
|
||||
@@ -681,7 +648,7 @@ export class ScriptDownloaderService {
|
||||
console.log(`[Comparison] Local file size: ${localContent.length} bytes`);
|
||||
|
||||
// Download remote content from the script's repository
|
||||
const remoteContent = await this.downloadFileFromGitHub(repoUrl, remotePath, branch);
|
||||
const remoteContent = await this.downloadFileFromRepo(repoUrl, remotePath, branch);
|
||||
console.log(`[Comparison] Remote file size: ${remoteContent.length} bytes`);
|
||||
|
||||
// Apply modification only for CT scripts, not for other script types
|
||||
@@ -739,7 +706,7 @@ export class ScriptDownloaderService {
|
||||
// Find the corresponding script path in install_methods
|
||||
const method = script.install_methods?.find(m => m.script === filePath);
|
||||
if (method?.script) {
|
||||
const downloadedContent = await this.downloadFileFromGitHub(repoUrl, method.script, branch);
|
||||
const downloadedContent = await this.downloadFileFromRepo(repoUrl, method.script, branch);
|
||||
remoteContent = this.modifyScriptContent(downloadedContent);
|
||||
}
|
||||
} catch {
|
||||
@@ -756,7 +723,7 @@ export class ScriptDownloaderService {
|
||||
}
|
||||
|
||||
try {
|
||||
remoteContent = await this.downloadFileFromGitHub(repoUrl, filePath, branch);
|
||||
remoteContent = await this.downloadFileFromRepo(repoUrl, filePath, branch);
|
||||
} catch {
|
||||
// Error downloading remote install script
|
||||
}
|
||||
|
||||
54
update.sh
54
update.sh
@@ -710,11 +710,14 @@ install_and_build() {
|
||||
log "Building application..."
|
||||
# Set NODE_ENV to production for build
|
||||
export NODE_ENV=production
|
||||
# Unset TURBOPACK to prevent "Multiple bundler flags" error with --webpack
|
||||
unset TURBOPACK 2>/dev/null || true
|
||||
export TURBOPACK=''
|
||||
|
||||
# Create temporary file for npm build output
|
||||
local build_log="/tmp/npm_build_$$.log"
|
||||
|
||||
if ! npm run build >"$build_log" 2>&1; then
|
||||
if ! TURBOPACK='' npm run build >"$build_log" 2>&1; then
|
||||
log_error "Failed to build application"
|
||||
log_error "npm run build output:"
|
||||
cat "$build_log" | while read -r line; do
|
||||
@@ -781,6 +784,23 @@ start_with_npm() {
|
||||
fi
|
||||
}
|
||||
|
||||
# Re-enable the systemd service on failure to prevent users from being locked out
|
||||
re_enable_service_on_failure() {
|
||||
if check_service; then
|
||||
log "Re-enabling systemd service after failure..."
|
||||
if systemctl enable pvescriptslocal.service 2>/dev/null; then
|
||||
log_success "Service re-enabled"
|
||||
if systemctl start pvescriptslocal.service 2>/dev/null; then
|
||||
log_success "Service started"
|
||||
else
|
||||
log_warning "Failed to start service - manual intervention may be required"
|
||||
fi
|
||||
else
|
||||
log_warning "Failed to re-enable service - manual intervention may be required"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Rollback function
|
||||
rollback() {
|
||||
log_warning "Rolling back to previous version..."
|
||||
@@ -852,6 +872,9 @@ rollback() {
|
||||
log_error "No backup directory found for rollback"
|
||||
fi
|
||||
|
||||
# Re-enable the service so users aren't locked out
|
||||
re_enable_service_on_failure
|
||||
|
||||
log_error "Update failed. Please check the logs and try again."
|
||||
exit 1
|
||||
}
|
||||
@@ -870,14 +893,14 @@ check_node_version() {
|
||||
|
||||
log "Detected Node.js version: $current"
|
||||
|
||||
if ((major_version < 24)); then
|
||||
if ((major_version == 24)); then
|
||||
log_success "Node.js 24 already installed"
|
||||
elif ((major_version < 24)); then
|
||||
log_warning "Node.js < 24 detected → upgrading to Node.js 24 LTS..."
|
||||
upgrade_node_to_24
|
||||
elif ((major_version > 24)); then
|
||||
else
|
||||
log_warning "Node.js > 24 detected → script tested only up to Node 24"
|
||||
log "Continuing anyway…"
|
||||
else
|
||||
log_success "Node.js 24 already installed"
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -885,22 +908,39 @@ check_node_version() {
|
||||
upgrade_node_to_24() {
|
||||
log "Preparing Node.js 24 upgrade…"
|
||||
|
||||
# Remove old nodesource repo if it exists
|
||||
# Remove old nodesource repo files if they exist
|
||||
if [ -f /etc/apt/sources.list.d/nodesource.list ]; then
|
||||
log "Removing old nodesource.list file..."
|
||||
rm -f /etc/apt/sources.list.d/nodesource.list
|
||||
fi
|
||||
if [ -f /etc/apt/sources.list.d/nodesource.sources ]; then
|
||||
log "Removing old nodesource.sources file..."
|
||||
rm -f /etc/apt/sources.list.d/nodesource.sources
|
||||
fi
|
||||
|
||||
# Update apt cache first
|
||||
log "Updating apt cache..."
|
||||
apt-get update >>"$LOG_FILE" 2>&1 || true
|
||||
|
||||
# Install NodeSource repo for Node.js 24
|
||||
curl -fsSL https://deb.nodesource.com/setup_24.x -o /tmp/node24_setup.sh
|
||||
log "Downloading Node.js 24 setup script..."
|
||||
if ! curl -fsSL https://deb.nodesource.com/setup_24.x -o /tmp/node24_setup.sh; then
|
||||
log_error "Failed to download Node.js 24 setup script"
|
||||
re_enable_service_on_failure
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! bash /tmp/node24_setup.sh >/tmp/node24_setup.log 2>&1; then
|
||||
log_error "Failed to configure Node.js 24 repository"
|
||||
tail -20 /tmp/node24_setup.log | while read -r line; do log_error "$line"; done
|
||||
re_enable_service_on_failure
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log "Installing Node.js 24…"
|
||||
if ! apt-get install -y nodejs >>"$LOG_FILE" 2>&1; then
|
||||
log_error "Failed to install Node.js 24"
|
||||
re_enable_service_on_failure
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
Reference in New Issue
Block a user