From 57293b9e5908a443beb240cf4eb6de82d9bcd09a Mon Sep 17 00:00:00 2001 From: Michel Roegl-Brunner Date: Wed, 10 Sep 2025 16:26:29 +0200 Subject: [PATCH] Fix script execution issues and improve container creation - Fixed syntax errors in build.func (duplicate export, unmatched quotes) - Fixed color variable initialization by calling load_functions in core.func - Replaced undefined function calls (post_to_api, post_update_to_api) with echo statements - Fixed install script execution by copying scripts into container first - Made create_lxc.sh executable - Improved error handling and script sourcing - Added missing core functions and tools - Enhanced script downloader and local script management --- .env.example | 12 +- README.md | 249 ++- scripts/core/build.func | 621 ++++++- scripts/core/core.func | 5 +- scripts/core/create_lxc.sh | 380 ++++ scripts/core/tools.func | 2047 +++++++++++++++++++++ scripts/ct/2fauth.sh | 81 - scripts/ct/actualbudget.sh | 68 - scripts/ct/adguard.sh | 42 - scripts/ct/alpine-adguard.sh | 47 - scripts/ct/debian.sh | 6 +- scripts/install/2fauth-install.sh | 104 -- scripts/install/actualbudget-install.sh | 97 - scripts/install/adguard-install.sh | 50 - scripts/install/debian-install.sh | 2 +- server.js | 3 - src/app/_components/DiffViewer.tsx | 152 ++ src/app/_components/RepoStatus.tsx | 117 -- src/app/_components/ScriptCard.tsx | 49 +- src/app/_components/ScriptDetailModal.tsx | 186 +- src/app/_components/ScriptsGrid.tsx | 167 +- src/app/_components/ScriptsList.tsx | 163 -- src/app/_components/Terminal.tsx | 9 - src/app/page.tsx | 38 +- src/server/api/routers/scripts.ts | 60 + src/server/api/trpc.ts | 24 +- src/server/api/websocket/handler.ts | 9 +- src/server/lib/git.ts | 26 +- src/server/lib/scripts.ts | 6 +- src/server/services/localScripts.ts | 1 - src/server/services/scriptDownloader.ts | 204 +- src/types/script.ts | 3 + 32 files changed, 4062 insertions(+), 966 deletions(-) create mode 100755 scripts/core/create_lxc.sh create mode 100644 scripts/core/tools.func delete mode 100644 scripts/ct/2fauth.sh delete mode 100644 scripts/ct/actualbudget.sh delete mode 100644 scripts/ct/adguard.sh delete mode 100644 scripts/ct/alpine-adguard.sh mode change 100755 => 100644 scripts/ct/debian.sh delete mode 100644 scripts/install/2fauth-install.sh delete mode 100644 scripts/install/actualbudget-install.sh delete mode 100644 scripts/install/adguard-install.sh create mode 100644 src/app/_components/DiffViewer.tsx delete mode 100644 src/app/_components/RepoStatus.tsx delete mode 100644 src/app/_components/ScriptsList.tsx diff --git a/.env.example b/.env.example index 1c3a936..4828832 100644 --- a/.env.example +++ b/.env.example @@ -4,14 +4,18 @@ # Prisma # https://www.prisma.io/docs/reference/database-reference/connection-urls#env DATABASE_URL="postgresql://postgres:password@localhost:5432/pve-scripts-local" -REPO_URL="https://github.com/michelroegl-brunner/PVESciptslocal" +REPO_URL="https://github.com/community-scripts/ProxmoxVE" REPO_BRANCH="main" -SCRIPTS_DIRECTORY="scripts/ct" +SCRIPTS_DIRECTORY="scripts" ALLOWED_SCRIPT_EXTENSIONS=".sh" +CT_SCRIPT_FOLDER="ct" +INSTALL_SCRIPT_FOLDER="install" +JSON_FOLDER="frontend/public/json" + # Security -MAX_SCRIPT_EXECUTION_TIME="300000" +MAX_SCRIPT_EXECUTION_TIME="900000" ALLOWED_SCRIPT_PATHS="scripts/" # WebSocket Configuration -WEBSOCKET_PORT="3000" +WEBSOCKET_PORT="3001" diff --git a/README.md b/README.md index 5cd3ed8..a66ae01 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,249 @@ -# How to run this Alpha Software +# PVE Scripts Local 🚀 -You need npm and git installed on your PVE host +A modern web-based management interface for Proxmox VE (PVE) helper scripts. This tool provides a user-friendly way to discover, download, and execute community-sourced Proxmox scripts locally with real-time terminal output streaming. -```git clone git@github.com:michelroegl-brunner/PVESciptslocal.git``` +## 🌟 Features -Then you need to run ```npm install``` +- **Web-based Interface**: Modern React/Next.js frontend with real-time terminal emulation +- **Script Discovery**: Browse and search through community Proxmox scripts from GitHub +- **One-Click Execution**: Run scripts directly from the web interface with live output +- **Real-time Terminal**: Full terminal emulation with xterm.js for interactive script execution +- **Script Management**: Download, update, and manage local script collections +- **Security**: Sandboxed script execution with path validation and time limits +- **Database Integration**: PostgreSQL backend for script metadata and execution history +- **WebSocket Communication**: Real-time bidirectional communication for script execution -And to run the dev server on IP:3000 +## 🏗️ Architecture -```npm run dev``` +### Frontend +- **Next.js 15** with React 19 +- **TypeScript** for type safety +- **Tailwind CSS** for styling +- **xterm.js** for terminal emulation +- **tRPC** for type-safe API communication +### Backend +- **Node.js** server with WebSocket support +- **PostgreSQL** database with Prisma ORM +- **WebSocket Server** for real-time script execution +- **Script Downloader Service** for GitHub integration + +### Scripts +- **Core Functions**: Shared utilities and build functions +- **Container Scripts**: Pre-configured LXC container setups +- **Installation Scripts**: System setup and configuration tools + +## 📋 Prerequisites + +- **Node.js** 18+ and npm +- **PostgreSQL** database (or Docker/Podman for local development) +- **Git** for cloning the repository +- **Linux/Unix environment** (tested on Proxmox VE hosts) + +## 🚀 Installation + +### 1. Clone the Repository + +```bash +git clone https://github.com/michelroegl-brunner/PVESciptslocal.git +cd PVESciptslocal +``` + +### 2. Install Dependencies + +```bash +npm install +``` + +### 3. Environment Configuration + +Copy the example environment file and configure your settings: + +```bash +cp .env.example .env +``` + +Edit `.env` with your configuration: + +```env +# Database Configuration +DATABASE_URL="postgresql://postgres:password@localhost:5432/pve-scripts-local" + +# GitHub Repository Configuration +REPO_URL="https://github.com/community-scripts/ProxmoxVE" +REPO_BRANCH="main" +SCRIPTS_DIRECTORY="scripts/ct" + +# Security Settings +MAX_SCRIPT_EXECUTION_TIME="300000" +ALLOWED_SCRIPT_PATHS="scripts/" + +# WebSocket Configuration +WEBSOCKET_PORT="3000" +``` + + +### 4. Start the Application + + + +#### Production Mode +```bash +npm run build +npm start +``` + +The application will be available at `http://IP:3000` + +## 🎯 Usage + +### 1. Access the Web Interface + +Open your browser and navigate to `http://IP:3000` (or your configured host/port). + +### 2. Browse Available Scripts + +- The main page displays a grid of available Proxmox scripts +- Use the search functionality to find specific scripts +- Scripts are categorized by type (containers, installations, etc.) + +### 3. Download Scripts + +- Click on any script card to view details +- Use the "Download" button to fetch scripts from GitHub +- Downloaded scripts are stored locally in the `scripts/` directory + +### 4. Execute Scripts + +- Click "Run Script" on any downloaded script +- A terminal window will open with real-time output +- Interact with the script through the web terminal +- Use the close button to stop execution + +### 5. Script Management + +- View script execution history +- Update scripts to latest versions +- Manage local script collections + +## 📁 Project Structure + +``` +PVESciptslocal/ +├── scripts/ # Script collection +│ ├── core/ # Core utility functions +│ │ ├── build.func # Build system functions +│ │ ├── tools.func # Tool installation functions +│ │ └── create_lxc.sh # LXC container creation +│ ├── ct/ # Container templates +│ │ ├── 2fauth.sh # 2FA authentication app +│ │ ├── adguard.sh # AdGuard Home +│ │ └── debian.sh # Debian base container +│ └── install/ # Installation scripts +├── src/ # Source code +│ ├── app/ # Next.js app directory +│ │ ├── _components/ # React components +│ │ └── page.tsx # Main page +│ └── server/ # Server-side code +│ └── services/ # Business logic services +├── prisma/ # Database schema +├── public/ # Static assets +├── server.js # Main server file +└── package.json # Dependencies and scripts +``` + + +## 🔧 Configuration + +### Environment Variables + +| Variable | Description | Default | +|----------|-------------|---------| +| `DATABASE_URL` | PostgreSQL connection string | Required | +| `REPO_URL` | GitHub repository URL | Required | +| `REPO_BRANCH` | Git branch to use | `main` | +| `SCRIPTS_DIRECTORY` | Local scripts directory | `scripts/ct` | +| `MAX_SCRIPT_EXECUTION_TIME` | Max execution time (ms) | `300000` | +| `ALLOWED_SCRIPT_PATHS` | Allowed script paths | `scripts/` | + +### Database Configuration + +The application uses PostgreSQL with Prisma ORM. The database stores: +- Script metadata and descriptions +- Execution history and logs +- User preferences and settings + +## 🚀 Development + +### Prerequisites for Development +- Node.js 18+ +- PostgreSQL or Docker +- Git + +### Development Commands + +```bash +# Install dependencies +npm install + +# Start development server +npm run dev + +# Start Next.js in development mode +npm run dev:next + +# Type checking +npm run typecheck + +# Linting +npm run lint +npm run lint:fix + +# Formatting +npm run format:write +npm run format:check + +# Database operations +npm run db:generate # Generate Prisma client +npm run db:migrate # Run migrations +npm run db:push # Push schema changes +npm run db:studio # Open Prisma Studio +``` + +### Project Structure for Developers + +- **Frontend**: React components in `src/app/_components/` +- **Backend**: Server logic in `src/server/` +- **API**: tRPC routers for type-safe API communication +- **Database**: Prisma schema in `prisma/schema.prisma` +- **Scripts**: Bash scripts in `scripts/` directory + +## 🤝 Contributing + +1. Fork the repository +2. Create a feature branch (`git checkout -b feature/amazing-feature`) +3. Commit your changes (`git commit -m 'Add some amazing feature'`) +4. Push to the branch (`git push origin feature/amazing-feature`) +5. Open a Pull Request + +### Adding New Scripts + +1. Create a new `.sh` file in the appropriate directory (`scripts/ct/` for containers) +2. Follow the existing script structure and include proper headers +3. Test the script thoroughly +4. Submit a pull request with the new script + +## 📝 License + +This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. + +### Logs + +- Server logs: Check console output or `server.log` +- Database logs: Check PostgreSQL logs +- Script execution: View in web terminal + + +--- + +**Note**: This is alpha software. Use with caution in production environments and always backup your Proxmox configuration before running scripts. \ No newline at end of file diff --git a/scripts/core/build.func b/scripts/core/build.func index 75ac19f..508d4bd 100755 --- a/scripts/core/build.func +++ b/scripts/core/build.func @@ -1,5 +1,7 @@ -# Copyright (c) 2021-2025 michelroegl-brunner -# Author: michelroegl-brunner +# Copyright (c) 2021-2025 tteck +# Author: tteck (tteckster) +# Co-Author: MickLesk +# Co-Author: michelroegl-brunner # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE variables() { @@ -7,14 +9,15 @@ variables() { var_install="${NSAPP}-install" # sets the var_install variable by appending "-install" to the value of NSAPP. INTEGER='^[0-9]+([.][0-9]+)?$' # it defines the INTEGER regular expression pattern. PVEHOST_NAME=$(hostname) # gets the Proxmox Hostname and sets it to Uppercase + DIAGNOSTICS="yes" # sets the DIAGNOSTICS variable to "yes", used for the API call. METHOD="default" # sets the METHOD variable to "default", used for the API call. + RANDOM_UUID="$(cat /proc/sys/kernel/random/uuid)" # generates a random UUID and sets it to the RANDOM_UUID variable. CT_TYPE=${var_unprivileged:-$CT_TYPE} } - - source "$(dirname "${BASH_SOURCE[0]}")/core.func" + # This function enables error handling in the script by setting options and defining a trap for the ERR signal. catch_errors() { set -Eeo pipefail @@ -23,7 +26,6 @@ catch_errors() { # This function is called when an error occurs. It receives the exit code, line number, and command that caused the error, and displays an error message. error_handler() { - printf "\e[?25h" local exit_code="$?" local line_number="$1" @@ -351,25 +353,574 @@ exit_script() { exit } +# This function allows the user to configure advanced settings for the script. +advanced_settings() { + whiptail --backtitle "Proxmox VE Helper Scripts" --msgbox --title "Here is an instructional tip:" "To make a selection, use the Spacebar." 8 58 + # Setting Default Tag for Advanced Settings + TAGS="community-script;${var_tags:-}" + CT_DEFAULT_TYPE="${CT_TYPE}" + CT_TYPE="" + while [ -z "$CT_TYPE" ]; do + if [ "$CT_DEFAULT_TYPE" == "1" ]; then + if CT_TYPE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "CONTAINER TYPE" --radiolist "Choose Type" 10 58 2 \ + "1" "Unprivileged" ON \ + "0" "Privileged" OFF \ + 3>&1 1>&2 2>&3); then + if [ -n "$CT_TYPE" ]; then + CT_TYPE_DESC="Unprivileged" + if [ "$CT_TYPE" -eq 0 ]; then + CT_TYPE_DESC="Privileged" + fi + echo -e "${OS}${BOLD}${DGN}Operating System: ${BGN}$var_os${CL}" + echo -e "${OSVERSION}${BOLD}${DGN}Version: ${BGN}$var_version${CL}" + echo -e "${CONTAINERTYPE}${BOLD}${DGN}Container Type: ${BGN}$CT_TYPE_DESC${CL}" + fi + else + exit_script + fi + fi + if [ "$CT_DEFAULT_TYPE" == "0" ]; then + if CT_TYPE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "CONTAINER TYPE" --radiolist "Choose Type" 10 58 2 \ + "1" "Unprivileged" OFF \ + "0" "Privileged" ON \ + 3>&1 1>&2 2>&3); then + if [ -n "$CT_TYPE" ]; then + CT_TYPE_DESC="Unprivileged" + if [ "$CT_TYPE" -eq 0 ]; then + CT_TYPE_DESC="Privileged" + fi + echo -e "${OS}${BOLD}${DGN}Operating System: ${BGN}$var_os${CL}" + echo -e "${OSVERSION}${BOLD}${DGN}Version: ${BGN}$var_version${CL}" + echo -e "${CONTAINERTYPE}${BOLD}${DGN}Container Type: ${BGN}$CT_TYPE_DESC${CL}" + fi + else + exit_script + fi + fi + done + + while true; do + if PW1=$(whiptail --backtitle "Proxmox VE Helper Scripts" --passwordbox "\nSet Root Password (needed for root ssh access)" 9 58 --title "PASSWORD (leave blank for automatic login)" 3>&1 1>&2 2>&3); then + # Empty = Autologin + if [[ -z "$PW1" ]]; then + PW="" + PW1="Automatic Login" + echo -e "${VERIFYPW}${BOLD}${DGN}Root Password: ${BGN}$PW1${CL}" + break + fi + + # Invalid: contains spaces + if [[ "$PW1" == *" "* ]]; then + whiptail --msgbox "Password cannot contain spaces." 8 58 + continue + fi + + # Invalid: too short + if ((${#PW1} < 5)); then + whiptail --msgbox "Password must be at least 5 characters." 8 58 + continue + fi + + # Confirm password + if PW2=$(whiptail --backtitle "Proxmox VE Helper Scripts" --passwordbox "\nVerify Root Password" 9 58 --title "PASSWORD VERIFICATION" 3>&1 1>&2 2>&3); then + if [[ "$PW1" == "$PW2" ]]; then + PW="-password $PW1" + echo -e "${VERIFYPW}${BOLD}${DGN}Root Password: ${BGN}********${CL}" + break + else + whiptail --msgbox "Passwords do not match. Please try again." 8 58 + fi + else + exit_script + fi + else + exit_script + fi + done + + if CT_ID=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set Container ID" 8 58 "$NEXTID" --title "CONTAINER ID" 3>&1 1>&2 2>&3); then + if [ -z "$CT_ID" ]; then + CT_ID="$NEXTID" + echo -e "${CONTAINERID}${BOLD}${DGN}Container ID: ${BGN}$CT_ID${CL}" + else + echo -e "${CONTAINERID}${BOLD}${DGN}Container ID: ${BGN}$CT_ID${CL}" + fi + else + exit_script + fi + + while true; do + if CT_NAME=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set Hostname" 8 58 "$NSAPP" --title "HOSTNAME" 3>&1 1>&2 2>&3); then + if [ -z "$CT_NAME" ]; then + HN="$NSAPP" + else + HN=$(echo "${CT_NAME,,}" | tr -d ' ') + fi + # Hostname validate (RFC 1123) + if [[ "$HN" =~ ^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ ]]; then + echo -e "${HOSTNAME}${BOLD}${DGN}Hostname: ${BGN}$HN${CL}" + break + else + whiptail --backtitle "Proxmox VE Helper Scripts" \ + --msgbox "❌ Invalid hostname: '$HN'\n\nOnly lowercase letters, digits and hyphens (-) are allowed.\nUnderscores (_) or other characters are not permitted!" 10 70 + fi + else + exit_script + fi + done + + while true; do + DISK_SIZE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set Disk Size in GB" 8 58 "$var_disk" --title "DISK SIZE" 3>&1 1>&2 2>&3) || exit_script + + if [ -z "$DISK_SIZE" ]; then + DISK_SIZE="$var_disk" + fi + + if [[ "$DISK_SIZE" =~ ^[1-9][0-9]*$ ]]; then + echo -e "${DISKSIZE}${BOLD}${DGN}Disk Size: ${BGN}${DISK_SIZE} GB${CL}" + break + else + whiptail --msgbox "Disk size must be a positive integer!" 8 58 + fi + done + + while true; do + CORE_COUNT=$(whiptail --backtitle "Proxmox VE Helper Scripts" \ + --inputbox "Allocate CPU Cores" 8 58 "$var_cpu" --title "CORE COUNT" 3>&1 1>&2 2>&3) || exit_script + + if [ -z "$CORE_COUNT" ]; then + CORE_COUNT="$var_cpu" + fi + + if [[ "$CORE_COUNT" =~ ^[1-9][0-9]*$ ]]; then + echo -e "${CPUCORE}${BOLD}${DGN}CPU Cores: ${BGN}$CORE_COUNT${CL}" + break + else + whiptail --msgbox "CPU core count must be a positive integer!" 8 58 + fi + done + + while true; do + RAM_SIZE=$(whiptail --backtitle "Proxmox VE Helper Scripts" \ + --inputbox "Allocate RAM in MiB" 8 58 "$var_ram" --title "RAM" 3>&1 1>&2 2>&3) || exit_script + + if [ -z "$RAM_SIZE" ]; then + RAM_SIZE="$var_ram" + fi + + if [[ "$RAM_SIZE" =~ ^[1-9][0-9]*$ ]]; then + echo -e "${RAMSIZE}${BOLD}${DGN}RAM Size: ${BGN}${RAM_SIZE} MiB${CL}" + break + else + whiptail --msgbox "RAM size must be a positive integer!" 8 58 + fi + done + + BRIDGES="" + IFACE_FILEPATH_LIST="/etc/network/interfaces"$'\n'$(find "/etc/network/interfaces.d/" -type f) + OLD_IFS=$IFS + IFS=$'\n' + + for iface_filepath in ${IFACE_FILEPATH_LIST}; do + iface_indexes_tmpfile=$(mktemp -q -u '.iface-XXXX') + + (grep -Pn '^\s*iface' "${iface_filepath}" | cut -d':' -f1 && wc -l "${iface_filepath}" | cut -d' ' -f1) | + awk 'FNR==1 {line=$0; next} {print line":"$0-1; line=$0}' >"${iface_indexes_tmpfile}" || true + + if [ -f "${iface_indexes_tmpfile}" ]; then + while read -r pair; do + start=$(echo "${pair}" | cut -d':' -f1) + end=$(echo "${pair}" | cut -d':' -f2) + + if awk "NR >= ${start} && NR <= ${end}" "${iface_filepath}" | grep -qP '^\s*(bridge[-_](ports|stp|fd|vlan-aware|vids)|ovs_type\s+OVSBridge)\b'; then + iface_name=$(sed "${start}q;d" "${iface_filepath}" | awk '{print $2}') + BRIDGES="${iface_name}"$'\n'"${BRIDGES}" + fi + + done <"${iface_indexes_tmpfile}" + rm -f "${iface_indexes_tmpfile}" + fi + + done + + IFS=$OLD_IFS + + BRIDGES=$(echo "$BRIDGES" | grep -v '^\s*$' | sort | uniq) + + if [[ -z "$BRIDGES" ]]; then + BRG="vmbr0" + echo -e "${BRIDGE}${BOLD}${DGN}Bridge: ${BGN}$BRG${CL}" + else + BRG=$(whiptail --backtitle "Proxmox VE Helper Scripts" --menu "Select network bridge:" 15 40 6 $(echo "$BRIDGES" | awk '{print $0, "Bridge"}') 3>&1 1>&2 2>&3) + if [ -z "$BRG" ]; then + exit_script + else + echo -e "${BRIDGE}${BOLD}${DGN}Bridge: ${BGN}$BRG${CL}" + fi + fi + + # IPv4 methods: dhcp, static, none + while true; do + IPV4_METHOD=$(whiptail --backtitle "Proxmox VE Helper Scripts" \ + --title "IPv4 Address Management" \ + --menu "Select IPv4 Address Assignment Method:" 12 60 2 \ + "dhcp" "Automatic (DHCP, recommended)" \ + "static" "Static (manual entry)" \ + 3>&1 1>&2 2>&3) + + exit_status=$? + if [ $exit_status -ne 0 ]; then + exit_script + fi + + case "$IPV4_METHOD" in + dhcp) + NET="dhcp" + GATE="" + echo -e "${NETWORK}${BOLD}${DGN}IPv4: DHCP${CL}" + break + ;; + static) + # Static: call and validate CIDR address + while true; do + NET=$(whiptail --backtitle "Proxmox VE Helper Scripts" \ + --inputbox "Enter Static IPv4 CIDR Address (e.g. 192.168.100.50/24)" 8 58 "" \ + --title "IPv4 ADDRESS" 3>&1 1>&2 2>&3) + if [ -z "$NET" ]; then + whiptail --msgbox "IPv4 address must not be empty." 8 58 + continue + elif [[ "$NET" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}/([0-9]|[1-2][0-9]|3[0-2])$ ]]; then + echo -e "${NETWORK}${BOLD}${DGN}IPv4 Address: ${BGN}$NET${CL}" + break + else + whiptail --msgbox "$NET is not a valid IPv4 CIDR address. Please enter a correct value!" 8 58 + fi + done + + # call and validate Gateway + while true; do + GATE1=$(whiptail --backtitle "Proxmox VE Helper Scripts" \ + --inputbox "Enter Gateway IP address for static IPv4" 8 58 "" \ + --title "Gateway IP" 3>&1 1>&2 2>&3) + if [ -z "$GATE1" ]; then + whiptail --msgbox "Gateway IP address cannot be empty." 8 58 + elif [[ ! "$GATE1" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]]; then + whiptail --msgbox "Invalid Gateway IP address format." 8 58 + else + GATE=",gw=$GATE1" + echo -e "${GATEWAY}${BOLD}${DGN}Gateway IP Address: ${BGN}$GATE1${CL}" + break + fi + done + break + ;; + esac + done + + # IPv6 Address Management selection + while true; do + IPV6_METHOD=$(whiptail --backtitle "Proxmox VE Helper Scripts" --menu \ + "Select IPv6 Address Management Type:" 15 58 4 \ + "auto" "SLAAC/AUTO (recommended, default)" \ + "dhcp" "DHCPv6" \ + "static" "Static (manual entry)" \ + "none" "Disabled" \ + --default-item "auto" 3>&1 1>&2 2>&3) + [ $? -ne 0 ] && exit_script + + case "$IPV6_METHOD" in + auto) + echo -e "${NETWORK}${BOLD}${DGN}IPv6: ${BGN}SLAAC/AUTO${CL}" + IPV6_ADDR="" + IPV6_GATE="" + break + ;; + dhcp) + echo -e "${NETWORK}${BOLD}${DGN}IPv6: ${BGN}DHCPv6${CL}" + IPV6_ADDR="dhcp" + IPV6_GATE="" + break + ;; + static) + # Ask for static IPv6 address (CIDR notation, e.g., 2001:db8::1234/64) + while true; do + IPV6_ADDR=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox \ + "Set a static IPv6 CIDR address (e.g., 2001:db8::1234/64)" 8 58 "" \ + --title "IPv6 STATIC ADDRESS" 3>&1 1>&2 2>&3) || exit_script + if [[ "$IPV6_ADDR" =~ ^([0-9a-fA-F:]+:+)+[0-9a-fA-F]+(/[0-9]{1,3})$ ]]; then + echo -e "${NETWORK}${BOLD}${DGN}IPv6 Address: ${BGN}$IPV6_ADDR${CL}" + break + else + whiptail --backtitle "Proxmox VE Helper Scripts" --msgbox \ + "$IPV6_ADDR is an invalid IPv6 CIDR address. Please enter a valid IPv6 CIDR address (e.g., 2001:db8::1234/64)" 8 58 + fi + done + # Optional: ask for IPv6 gateway for static config + while true; do + IPV6_GATE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox \ + "Enter IPv6 gateway address (optional, leave blank for none)" 8 58 "" --title "IPv6 GATEWAY" 3>&1 1>&2 2>&3) + if [ -z "$IPV6_GATE" ]; then + IPV6_GATE="" + break + elif [[ "$IPV6_GATE" =~ ^([0-9a-fA-F:]+:+)+[0-9a-fA-F]+$ ]]; then + break + else + whiptail --backtitle "Proxmox VE Helper Scripts" --msgbox \ + "Invalid IPv6 gateway format." 8 58 + + fi + done + break + ;; + none) + echo -e "${NETWORK}${BOLD}${DGN}IPv6: ${BGN}Disabled${CL}" + IPV6_ADDR="none" + IPV6_GATE="" + break + ;; + *) + exit_script + ;; + esac + done + + if [ "$var_os" == "alpine" ]; then + APT_CACHER="" + APT_CACHER_IP="" + else + if APT_CACHER_IP=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set APT-Cacher IP (leave blank for none)" 8 58 --title "APT-Cacher IP" 3>&1 1>&2 2>&3); then + APT_CACHER="${APT_CACHER_IP:+yes}" + echo -e "${NETWORK}${BOLD}${DGN}APT-Cacher IP Address: ${BGN}${APT_CACHER_IP:-Default}${CL}" + else + exit_script + fi + fi + + if MTU1=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set Interface MTU Size (leave blank for default [The MTU of your selected vmbr, default is 1500])" 8 58 --title "MTU SIZE" 3>&1 1>&2 2>&3); then + if [ -z "$MTU1" ]; then + MTU1="Default" + MTU="" + else + MTU=",mtu=$MTU1" + fi + echo -e "${DEFAULT}${BOLD}${DGN}Interface MTU Size: ${BGN}$MTU1${CL}" + else + exit_script + fi + + if SD=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set a DNS Search Domain (leave blank for HOST)" 8 58 --title "DNS Search Domain" 3>&1 1>&2 2>&3); then + if [ -z "$SD" ]; then + SX=Host + SD="" + else + SX=$SD + SD="-searchdomain=$SD" + fi + echo -e "${SEARCH}${BOLD}${DGN}DNS Search Domain: ${BGN}$SX${CL}" + else + exit_script + fi + + if NX=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set a DNS Server IP (leave blank for HOST)" 8 58 --title "DNS SERVER IP" 3>&1 1>&2 2>&3); then + if [ -z "$NX" ]; then + NX=Host + NS="" + else + NS="-nameserver=$NX" + fi + echo -e "${NETWORK}${BOLD}${DGN}DNS Server IP Address: ${BGN}$NX${CL}" + else + exit_script + fi + + if MAC1=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set a MAC Address(leave blank for generated MAC)" 8 58 --title "MAC ADDRESS" 3>&1 1>&2 2>&3); then + if [ -z "$MAC1" ]; then + MAC1="Default" + MAC="" + else + MAC=",hwaddr=$MAC1" + echo -e "${MACADDRESS}${BOLD}${DGN}MAC Address: ${BGN}$MAC1${CL}" + fi + else + exit_script + fi + + if VLAN1=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set a Vlan(leave blank for no VLAN)" 8 58 --title "VLAN" 3>&1 1>&2 2>&3); then + if [ -z "$VLAN1" ]; then + VLAN1="Default" + VLAN="" + else + VLAN=",tag=$VLAN1" + fi + echo -e "${VLANTAG}${BOLD}${DGN}Vlan: ${BGN}$VLAN1${CL}" + else + exit_script + fi + + if ADV_TAGS=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set Custom Tags?[If you remove all, there will be no tags!]" 8 58 "${TAGS}" --title "Advanced Tags" 3>&1 1>&2 2>&3); then + if [ -n "${ADV_TAGS}" ]; then + ADV_TAGS=$(echo "$ADV_TAGS" | tr -d '[:space:]') + TAGS="${ADV_TAGS}" + else + TAGS=";" + fi + echo -e "${NETWORK}${BOLD}${DGN}Tags: ${BGN}$TAGS${CL}" + else + exit_script + fi + + SSH_AUTHORIZED_KEY="$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "SSH Authorized key for root (leave empty for none)" 8 58 --title "SSH Key" 3>&1 1>&2 2>&3)" + + if [[ -z "${SSH_AUTHORIZED_KEY}" ]]; then + SSH_AUTHORIZED_KEY="" + fi + + if [[ "$PW" == -password* || -n "$SSH_AUTHORIZED_KEY" ]]; then + if (whiptail --backtitle "Proxmox VE Helper Scripts" --defaultno --title "SSH ACCESS" --yesno "Enable Root SSH Access?" 10 58); then + SSH="yes" + else + SSH="no" + fi + echo -e "${ROOTSSH}${BOLD}${DGN}Root SSH Access: ${BGN}$SSH${CL}" + else + SSH="no" + echo -e "${ROOTSSH}${BOLD}${DGN}Root SSH Access: ${BGN}$SSH${CL}" + fi + + if (whiptail --backtitle "Proxmox VE Helper Scripts" --defaultno --title "FUSE Support" --yesno "Enable FUSE support?\nRequired for tools like rclone, mergerfs, AppImage, etc." 10 58); then + ENABLE_FUSE="yes" + else + ENABLE_FUSE="no" + fi + echo -e "${FUSE}${BOLD}${DGN}Enable FUSE Support: ${BGN}$ENABLE_FUSE${CL}" + + if (whiptail --backtitle "Proxmox VE Helper Scripts" --defaultno --title "VERBOSE MODE" --yesno "Enable Verbose Mode?" 10 58); then + VERBOSE="yes" + else + VERBOSE="no" + fi + echo -e "${SEARCH}${BOLD}${DGN}Verbose Mode: ${BGN}$VERBOSE${CL}" + + if (whiptail --backtitle "Proxmox VE Helper Scripts" --title "ADVANCED SETTINGS COMPLETE" --yesno "Ready to create ${APP} LXC?" 10 58); then + echo -e "${CREATING}${BOLD}${RD}Creating a ${APP} LXC using the above advanced settings${CL}" + + # Strip prefixes from DNS parameters for config file storage + local SD_VALUE="$SD" + local NS_VALUE="$NS" + local MAC_VALUE="$MAC" + local VLAN_VALUE="$VLAN" + [[ "$SD" =~ ^-searchdomain= ]] && SD_VALUE="${SD#-searchdomain=}" + [[ "$NS" =~ ^-nameserver= ]] && NS_VALUE="${NS#-nameserver=}" + [[ "$MAC" =~ ^,hwaddr= ]] && MAC_VALUE="${MAC#,hwaddr=}" + [[ "$VLAN" =~ ^,tag= ]] && VLAN_VALUE="${VLAN#,tag=}" + + # Temporarily store original values + local SD_ORIG="$SD" + local NS_ORIG="$NS" + local MAC_ORIG="$MAC" + local VLAN_ORIG="$VLAN" + + # Set clean values for config file writing + SD="$SD_VALUE" + NS="$NS_VALUE" + MAC="$MAC_VALUE" + VLAN="$VLAN_VALUE" + + write_config + + # Restore original formatted values for container creation + SD="$SD_ORIG" + NS="$NS_ORIG" + MAC="$MAC_ORIG" + VLAN="$VLAN_ORIG" + else + clear + header_info + echo -e "${ADVANCED}${BOLD}${RD}Using Advanced Settings on node $PVEHOST_NAME${CL}" + advanced_settings + fi +} + +diagnostics_check() { + if ! [ -d "/usr/local/community-scripts" ]; then + mkdir -p /usr/local/community-scripts + fi + + if ! [ -f "/usr/local/community-scripts/diagnostics" ]; then + if (whiptail --backtitle "Proxmox VE Helper Scripts" --title "DIAGNOSTICS" --yesno "Send Diagnostics of LXC Installation?\n\n(This only transmits data without user data, just RAM, CPU, LXC name, ...)" 10 58); then + cat </usr/local/community-scripts/diagnostics +DIAGNOSTICS=yes + +#This file is used to store the diagnostics settings for the Community-Scripts API. +#https://github.com/community-scripts/ProxmoxVE/discussions/1836 +#Your diagnostics will be sent to the Community-Scripts API for troubleshooting/statistical purposes. +#You can review the data at https://community-scripts.github.io/ProxmoxVE/data +#If you do not wish to send diagnostics, please set the variable 'DIAGNOSTICS' to "no" in /usr/local/community-scripts/diagnostics, or use the menue. +#This will disable the diagnostics feature. +#To send diagnostics, set the variable 'DIAGNOSTICS' to "yes" in /usr/local/community-scripts/diagnostics, or use the menue. +#This will enable the diagnostics feature. +#The following information will be sent: +#"ct_type" +#"disk_size" +#"core_count" +#"ram_size" +#"os_type" +#"os_version" +#"nsapp" +#"method" +#"pve_version" +#"status" +#If you have any concerns, please review the source code at /misc/build.func +EOF + DIAGNOSTICS="yes" + else + cat </usr/local/community-scripts/diagnostics +DIAGNOSTICS=no + +#This file is used to store the diagnostics settings for the Community-Scripts API. +#https://github.com/community-scripts/ProxmoxVE/discussions/1836 +#Your diagnostics will be sent to the Community-Scripts API for troubleshooting/statistical purposes. +#You can review the data at https://community-scripts.github.io/ProxmoxVE/data +#If you do not wish to send diagnostics, please set the variable 'DIAGNOSTICS' to "no" in /usr/local/community-scripts/diagnostics, or use the menue. +#This will disable the diagnostics feature. +#To send diagnostics, set the variable 'DIAGNOSTICS' to "yes" in /usr/local/community-scripts/diagnostics, or use the menue. +#This will enable the diagnostics feature. +#The following information will be sent: +#"ct_type" +#"disk_size" +#"core_count" +#"ram_size" +#"os_type" +#"os_version" +#"nsapp" +#"method" +#"pve_version" +#"status" +#If you have any concerns, please review the source code at /misc/build.func +EOF + DIAGNOSTICS="no" + fi + else + DIAGNOSTICS=$(awk -F '=' '/^DIAGNOSTICS/ {print $2}' /usr/local/community-scripts/diagnostics) + + fi + +} install_script() { pve_check shell_check root_check arch_check - #ssh_check maxkeys_check - - - + diagnostics_check if systemctl is-active -q ping-instances.service; then systemctl -q stop ping-instances.service fi NEXTID=$(pvesh get /cluster/nextid) timezone=$(cat /etc/timezone) - #header_info - echo "TEST" + header_info while true; do TMP_CHOICE=$(whiptail --backtitle "Proxmox VE Helper Scripts" \ @@ -378,7 +929,9 @@ install_script() { "1" "Default Settings" \ "2" "Default Settings (with verbose)" \ "3" "Advanced Settings" \ - "4" "Exit" \ + "4" "Use Config File" \ + "5" "Diagnostic Settings" \ + "6" "Exit" \ --default-item "1" 3>&1 1>&2 2>&3) || true if [ -z "$TMP_CHOICE" ]; then @@ -416,6 +969,32 @@ install_script() { break ;; 4) + header_info + echo -e "${INFO}${HOLD} ${GN}Using Config File on node $PVEHOST_NAME${CL}" + METHOD="config_file" + source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/config-file.func) + config_file + break + ;; + 5) + if [[ $DIAGNOSTICS == "yes" ]]; then + if whiptail --backtitle "Proxmox VE Helper Scripts" --title "DIAGNOSTICS SETTINGS" --yesno "Send Diagnostics of LXC Installation?\n\nCurrent setting: ${DIAGNOSTICS}" 10 58 \ + --yes-button "No" --no-button "Back"; then + DIAGNOSTICS="no" + sed -i 's/^DIAGNOSTICS=.*/DIAGNOSTICS=no/' /usr/local/community-scripts/diagnostics + whiptail --backtitle "Proxmox VE Helper Scripts" --title "DIAGNOSTICS SETTINGS" --msgbox "Diagnostics settings changed to ${DIAGNOSTICS}." 8 58 + fi + else + if whiptail --backtitle "Proxmox VE Helper Scripts" --title "DIAGNOSTICS SETTINGS" --yesno "Send Diagnostics of LXC Installation?\n\nCurrent setting: ${DIAGNOSTICS}" 10 58 \ + --yes-button "Yes" --no-button "Back"; then + DIAGNOSTICS="yes" + sed -i 's/^DIAGNOSTICS=.*/DIAGNOSTICS=yes/' /usr/local/community-scripts/diagnostics + whiptail --backtitle "Proxmox VE Helper Scripts" --title "DIAGNOSTICS SETTINGS" --msgbox "Diagnostics settings changed to ${DIAGNOSTICS}." 8 58 + fi + fi + + ;; + 6) echo -e "\n${CROSS}${RD}Script terminated. Have a great day!${CL}\n" exit 0 ;; @@ -464,9 +1043,10 @@ check_container_storage() { } start() { - source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func) + source "$(dirname "${BASH_SOURCE[0]}")/tools.func" if command -v pveversion >/dev/null 2>&1; then install_script + echo "TEST!!!" else CHOICE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "${APP} LXC Update/Setting" --menu \ "Support/Update functions for ${APP} LXC. Choose an option:" \ @@ -496,6 +1076,7 @@ start() { # This function collects user settings and integrates all the collected information. build_container() { + echo "TEST" # if [ "$VERBOSE" == "yes" ]; then set -x; fi NET_STRING="-net0 name=eth0,bridge=$BRG$MAC,ip=$NET$GATE$VLAN$MTU" @@ -518,13 +1099,16 @@ build_container() { FEATURES="$FEATURES,fuse=1" fi + if [[ $DIAGNOSTICS == "yes" ]]; then + echo "Diagnostics enabled (post_to_api function not available)" + fi TEMP_DIR=$(mktemp -d) pushd "$TEMP_DIR" >/dev/null if [ "$var_os" == "alpine" ]; then export FUNCTIONS_FILE_PATH="$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/alpine-install.func)" else - export FUNCTIONS_FILE_PATH="$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/install.func)" + export FUNCTIONS_FILE_PATH="$(dirname "${BASH_SOURCE[0]}")/install.func" fi export DIAGNOSTICS="$DIAGNOSTICS" @@ -559,7 +1143,7 @@ build_container() { $PW " # This executes create_lxc.sh and creates the container and .conf file - bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/create_lxc.sh)" $? + bash "$(dirname "${BASH_SOURCE[0]}")/create_lxc.sh" $? LXC_CONFIG="/etc/pve/lxc/${CTID}.conf" @@ -752,7 +1336,9 @@ EOF' fi msg_ok "Customized LXC Container" - lxc-attach -n "$CTID" -- bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/install/${var_install}.sh)" + # Copy the install script into the container and execute it + pct push "$CTID" "$(dirname "${BASH_SOURCE[0]}")/../install/${var_install}.sh" "/tmp/${var_install}.sh" + lxc-attach -n "$CTID" -- bash "/tmp/${var_install}.sh" } # This function sets the description of the container. @@ -797,5 +1383,6 @@ EOF if [[ -f /etc/systemd/system/ping-instances.service ]]; then systemctl start ping-instances.service fi - } + + diff --git a/scripts/core/core.func b/scripts/core/core.func index 584d509..16b7dec 100644 --- a/scripts/core/core.func +++ b/scripts/core/core.func @@ -406,4 +406,7 @@ check_or_create_swap() { fi } -trap 'stop_spinner' EXIT INT TERM \ No newline at end of file +trap 'stop_spinner' EXIT INT TERM + +# Initialize functions when core.func is sourced +load_functions \ No newline at end of file diff --git a/scripts/core/create_lxc.sh b/scripts/core/create_lxc.sh new file mode 100755 index 0000000..0e0e0ff --- /dev/null +++ b/scripts/core/create_lxc.sh @@ -0,0 +1,380 @@ +#!/usr/bin/env bash + +# Copyright (c) 2021-2025 tteck +# Author: tteck (tteckster) +# Co-Author: MickLesk +# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE + +# This sets verbose mode if the global variable is set to "yes" +# if [ "$VERBOSE" == "yes" ]; then set -x; fi + +source "$(dirname "$0")/core.func" + + +# This sets error handling options and defines the error_handler function to handle errors +set -Eeuo pipefail +trap 'error_handler $LINENO "$BASH_COMMAND"' ERR +trap on_exit EXIT +trap on_interrupt INT +trap on_terminate TERM + +function on_exit() { + local exit_code="$?" + [[ -n "${lockfile:-}" && -e "$lockfile" ]] && rm -f "$lockfile" + exit "$exit_code" +} + +function error_handler() { + local exit_code="$?" + local line_number="$1" + local command="$2" + printf "\e[?25h" + echo -e "\n${RD}[ERROR]${CL} in line ${RD}$line_number${CL}: exit code ${RD}$exit_code${CL}: while executing command ${YW}$command${CL}\n" + exit "$exit_code" +} + +function on_interrupt() { + echo -e "\n${RD}Interrupted by user (SIGINT)${CL}" + exit 130 +} + +function on_terminate() { + echo -e "\n${RD}Terminated by signal (SIGTERM)${CL}" + exit 143 +} + +function exit_script() { + clear + printf "\e[?25h" + echo -e "\n${CROSS}${RD}User exited script${CL}\n" + kill 0 + exit 1 +} + +function check_storage_support() { + local CONTENT="$1" + local -a VALID_STORAGES=() + while IFS= read -r line; do + local STORAGE_NAME + STORAGE_NAME=$(awk '{print $1}' <<<"$line") + [[ -z "$STORAGE_NAME" ]] && continue + VALID_STORAGES+=("$STORAGE_NAME") + done < <(pvesm status -content "$CONTENT" 2>/dev/null | awk 'NR>1') + + [[ ${#VALID_STORAGES[@]} -gt 0 ]] +} + +# This function selects a storage pool for a given content type (e.g., rootdir, vztmpl). +function select_storage() { + local CLASS=$1 CONTENT CONTENT_LABEL + + case $CLASS in + container) + CONTENT='rootdir' + CONTENT_LABEL='Container' + ;; + template) + CONTENT='vztmpl' + CONTENT_LABEL='Container template' + ;; + iso) + CONTENT='iso' + CONTENT_LABEL='ISO image' + ;; + images) + CONTENT='images' + CONTENT_LABEL='VM Disk image' + ;; + backup) + CONTENT='backup' + CONTENT_LABEL='Backup' + ;; + snippets) + CONTENT='snippets' + CONTENT_LABEL='Snippets' + ;; + *) + msg_error "Invalid storage class '$CLASS'" + return 1 + ;; + esac + + # Check for preset STORAGE variable + if [ "$CONTENT" = "rootdir" ] && [ -n "${STORAGE:-}" ]; then + if pvesm status -content "$CONTENT" | awk 'NR>1 {print $1}' | grep -qx "$STORAGE"; then + STORAGE_RESULT="$STORAGE" + msg_info "Using preset storage: $STORAGE_RESULT for $CONTENT_LABEL" + return 0 + else + msg_error "Preset storage '$STORAGE' is not valid for content type '$CONTENT'." + return 2 + fi + fi + + local -A STORAGE_MAP + local -a MENU + local COL_WIDTH=0 + + while read -r TAG TYPE _ TOTAL USED FREE _; do + [[ -n "$TAG" && -n "$TYPE" ]] || continue + local STORAGE_NAME="$TAG" + local DISPLAY="${STORAGE_NAME} (${TYPE})" + local USED_FMT=$(numfmt --to=iec --from-unit=K --format %.1f <<<"$USED") + local FREE_FMT=$(numfmt --to=iec --from-unit=K --format %.1f <<<"$FREE") + local INFO="Free: ${FREE_FMT}B Used: ${USED_FMT}B" + STORAGE_MAP["$DISPLAY"]="$STORAGE_NAME" + MENU+=("$DISPLAY" "$INFO" "OFF") + ((${#DISPLAY} > COL_WIDTH)) && COL_WIDTH=${#DISPLAY} + done < <(pvesm status -content "$CONTENT" | awk 'NR>1') + + if [ ${#MENU[@]} -eq 0 ]; then + msg_error "No storage found for content type '$CONTENT'." + return 2 + fi + + if [ $((${#MENU[@]} / 3)) -eq 1 ]; then + STORAGE_RESULT="${STORAGE_MAP[${MENU[0]}]}" + STORAGE_INFO="${MENU[1]}" + return 0 + fi + + local WIDTH=$((COL_WIDTH + 42)) + while true; do + local DISPLAY_SELECTED + DISPLAY_SELECTED=$(whiptail --backtitle "Proxmox VE Helper Scripts" \ + --title "Storage Pools" \ + --radiolist "Which storage pool for ${CONTENT_LABEL,,}?\n(Spacebar to select)" \ + 16 "$WIDTH" 6 "${MENU[@]}" 3>&1 1>&2 2>&3) + + # Cancel or ESC + [[ $? -ne 0 ]] && exit_script + + # Strip trailing whitespace or newline (important for storages like "storage (dir)") + DISPLAY_SELECTED=$(sed 's/[[:space:]]*$//' <<<"$DISPLAY_SELECTED") + + if [[ -z "$DISPLAY_SELECTED" || -z "${STORAGE_MAP[$DISPLAY_SELECTED]+_}" ]]; then + whiptail --msgbox "No valid storage selected. Please try again." 8 58 + continue + fi + + STORAGE_RESULT="${STORAGE_MAP[$DISPLAY_SELECTED]}" + for ((i = 0; i < ${#MENU[@]}; i += 3)); do + if [[ "${MENU[$i]}" == "$DISPLAY_SELECTED" ]]; then + STORAGE_INFO="${MENU[$i + 1]}" + break + fi + done + return 0 + done +} + +# Test if required variables are set +[[ "${CTID:-}" ]] || { + msg_error "You need to set 'CTID' variable." + exit 203 +} +[[ "${PCT_OSTYPE:-}" ]] || { + msg_error "You need to set 'PCT_OSTYPE' variable." + exit 204 +} + +# Test if ID is valid +[ "$CTID" -ge "100" ] || { + msg_error "ID cannot be less than 100." + exit 205 +} + +# Test if ID is in use +if qm status "$CTID" &>/dev/null || pct status "$CTID" &>/dev/null; then + echo -e "ID '$CTID' is already in use." + unset CTID + msg_error "Cannot use ID that is already in use." + exit 206 +fi + +# This checks for the presence of valid Container Storage and Template Storage locations +msg_info "Validating storage" +if ! check_storage_support "rootdir"; then + msg_error "No valid storage found for 'rootdir' [Container]" + exit 1 +fi +if ! check_storage_support "vztmpl"; then + msg_error "No valid storage found for 'vztmpl' [Template]" + exit 1 +fi + +#msg_info "Checking template storage" +while true; do + if select_storage template; then + TEMPLATE_STORAGE="$STORAGE_RESULT" + TEMPLATE_STORAGE_INFO="$STORAGE_INFO" + msg_ok "Storage ${BL}$TEMPLATE_STORAGE${CL} ($TEMPLATE_STORAGE_INFO) [Template]" + break + fi +done + +while true; do + if select_storage container; then + CONTAINER_STORAGE="$STORAGE_RESULT" + CONTAINER_STORAGE_INFO="$STORAGE_INFO" + msg_ok "Storage ${BL}$CONTAINER_STORAGE${CL} ($CONTAINER_STORAGE_INFO) [Container]" + break + fi +done + +# Check free space on selected container storage +STORAGE_FREE=$(pvesm status | awk -v s="$CONTAINER_STORAGE" '$1 == s { print $6 }') +REQUIRED_KB=$((${PCT_DISK_SIZE:-8} * 1024 * 1024)) +if [ "$STORAGE_FREE" -lt "$REQUIRED_KB" ]; then + msg_error "Not enough space on '$CONTAINER_STORAGE'. Needed: ${PCT_DISK_SIZE:-8}G." + exit 214 +fi + +# Check Cluster Quorum if in Cluster +if [ -f /etc/pve/corosync.conf ]; then + msg_info "Checking cluster quorum" + if ! pvecm status | awk -F':' '/^Quorate/ { exit ($2 ~ /Yes/) ? 0 : 1 }'; then + + msg_error "Cluster is not quorate. Start all nodes or configure quorum device (QDevice)." + exit 210 + fi + msg_ok "Cluster is quorate" +fi + +# Update LXC template list +TEMPLATE_SEARCH="${PCT_OSTYPE}-${PCT_OSVERSION:-}" +case "$PCT_OSTYPE" in +debian | ubuntu) + TEMPLATE_PATTERN="-standard_" + ;; +alpine | fedora | rocky | centos) + TEMPLATE_PATTERN="-default_" + ;; +*) + TEMPLATE_PATTERN="" + ;; +esac + +# 1. Check local templates first +msg_info "Searching for template '$TEMPLATE_SEARCH'" +mapfile -t TEMPLATES < <( + pveam list "$TEMPLATE_STORAGE" | + awk -v s="$TEMPLATE_SEARCH" -v p="$TEMPLATE_PATTERN" '$1 ~ s && $1 ~ p {print $1}' | + sed 's/.*\///' | sort -t - -k 2 -V +) + +if [ ${#TEMPLATES[@]} -gt 0 ]; then + TEMPLATE_SOURCE="local" +else + msg_info "No local template found, checking online repository" + pveam update >/dev/null 2>&1 + mapfile -t TEMPLATES < <( + pveam update >/dev/null 2>&1 && + pveam available -section system | + sed -n "s/.*\($TEMPLATE_SEARCH.*$TEMPLATE_PATTERN.*\)/\1/p" | + sort -t - -k 2 -V + ) + TEMPLATE_SOURCE="online" +fi + +TEMPLATE="${TEMPLATES[-1]}" +TEMPLATE_PATH="$(pvesm path $TEMPLATE_STORAGE:vztmpl/$TEMPLATE 2>/dev/null || + echo "/var/lib/vz/template/cache/$TEMPLATE")" +msg_ok "Template ${BL}$TEMPLATE${CL} [$TEMPLATE_SOURCE]" + +# 4. Validate template (exists & not corrupted) +TEMPLATE_VALID=1 + +if [ ! -s "$TEMPLATE_PATH" ]; then + TEMPLATE_VALID=0 +elif ! tar --use-compress-program=zstdcat -tf "$TEMPLATE_PATH" >/dev/null 2>&1; then + TEMPLATE_VALID=0 +fi + +if [ "$TEMPLATE_VALID" -eq 0 ]; then + msg_warn "Template $TEMPLATE is missing or corrupted. Re-downloading." + [[ -f "$TEMPLATE_PATH" ]] && rm -f "$TEMPLATE_PATH" + for attempt in {1..3}; do + msg_info "Attempt $attempt: Downloading LXC template..." + if pveam download "$TEMPLATE_STORAGE" "$TEMPLATE" >/dev/null 2>&1; then + msg_ok "Template download successful." + break + fi + if [ $attempt -eq 3 ]; then + msg_error "Failed after 3 attempts. Please check network access or manually run:\n pveam download $TEMPLATE_STORAGE $TEMPLATE" + exit 208 + fi + sleep $((attempt * 5)) + done +fi + +msg_info "Creating LXC Container" +# Check and fix subuid/subgid +grep -q "root:100000:65536" /etc/subuid || echo "root:100000:65536" >>/etc/subuid +grep -q "root:100000:65536" /etc/subgid || echo "root:100000:65536" >>/etc/subgid + +# Combine all options +PCT_OPTIONS=(${PCT_OPTIONS[@]:-${DEFAULT_PCT_OPTIONS[@]}}) +[[ " ${PCT_OPTIONS[@]} " =~ " -rootfs " ]] || PCT_OPTIONS+=(-rootfs "$CONTAINER_STORAGE:${PCT_DISK_SIZE:-8}") + +# Secure creation of the LXC container with lock and template check +lockfile="/tmp/template.${TEMPLATE}.lock" +exec 9>"$lockfile" || { + msg_error "Failed to create lock file '$lockfile'." + exit 200 +} +flock -w 60 9 || { + msg_error "Timeout while waiting for template lock" + exit 211 +} + +if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" &>/dev/null; then + msg_error "Container creation failed. Checking if template is corrupted or incomplete." + + if [[ ! -s "$TEMPLATE_PATH" || "$(stat -c%s "$TEMPLATE_PATH")" -lt 1000000 ]]; then + msg_error "Template file too small or missing – re-downloading." + rm -f "$TEMPLATE_PATH" + elif ! zstdcat "$TEMPLATE_PATH" | tar -tf - &>/dev/null; then + msg_error "Template appears to be corrupted – re-downloading." + rm -f "$TEMPLATE_PATH" + else + msg_error "Template is valid, but container creation still failed." + exit 209 + fi + + # Retry download + for attempt in {1..3}; do + msg_info "Attempt $attempt: Re-downloading template..." + if timeout 120 pveam download "$TEMPLATE_STORAGE" "$TEMPLATE" >/dev/null; then + msg_ok "Template re-download successful." + break + fi + if [ "$attempt" -eq 3 ]; then + msg_error "Three failed attempts. Aborting." + exit 208 + fi + sleep $((attempt * 5)) + done + + sleep 1 # I/O-Sync-Delay + msg_ok "Re-downloaded LXC Template" +fi + +if ! pct list | awk '{print $1}' | grep -qx "$CTID"; then + msg_error "Container ID $CTID not listed in 'pct list' – unexpected failure." + exit 215 +fi + +if ! grep -q '^rootfs:' "/etc/pve/lxc/$CTID.conf"; then + msg_error "RootFS entry missing in container config – storage not correctly assigned." + exit 216 +fi + +if grep -q '^hostname:' "/etc/pve/lxc/$CTID.conf"; then + CT_HOSTNAME=$(grep '^hostname:' "/etc/pve/lxc/$CTID.conf" | awk '{print $2}') + if [[ ! "$CT_HOSTNAME" =~ ^[a-z0-9-]+$ ]]; then + msg_warn "Hostname '$CT_HOSTNAME' contains invalid characters – may cause issues with networking or DNS." + fi +fi + +msg_ok "LXC Container ${BL}$CTID${CL} ${GN}was successfully created." \ No newline at end of file diff --git a/scripts/core/tools.func b/scripts/core/tools.func new file mode 100644 index 0000000..0b5fe71 --- /dev/null +++ b/scripts/core/tools.func @@ -0,0 +1,2047 @@ +#!/bin/bash + +# ------------------------------------------------------------------------------ +# Installs Node.js and optional global modules. +# +# Description: +# - Installs specified Node.js version using NodeSource APT repo +# - Optionally installs or updates global npm modules +# +# Variables: +# NODE_VERSION - Node.js version to install (default: 22) +# NODE_MODULE - Comma-separated list of global modules (e.g. "yarn,@vue/cli@5.0.0") +# ------------------------------------------------------------------------------ + +function setup_nodejs() { + local NODE_VERSION="${NODE_VERSION:-22}" + local NODE_MODULE="${NODE_MODULE:-}" + local CURRENT_NODE_VERSION="" + local NEED_NODE_INSTALL=false + + # Check if Node.js is already installed + if command -v node >/dev/null; then + CURRENT_NODE_VERSION="$(node -v | grep -oP '^v\K[0-9]+')" + if [[ "$CURRENT_NODE_VERSION" != "$NODE_VERSION" ]]; then + msg_info "Old Node.js $CURRENT_NODE_VERSION found, replacing with $NODE_VERSION" + NEED_NODE_INSTALL=true + fi + else + msg_info "Setup Node.js $NODE_VERSION" + NEED_NODE_INSTALL=true + fi + + if ! command -v jq &>/dev/null; then + $STD apt-get update + $STD apt-get install -y jq || { + msg_error "Failed to install jq" + return 1 + } + fi + + # Install Node.js if required + if [[ "$NEED_NODE_INSTALL" == true ]]; then + $STD apt-get purge -y nodejs + rm -f /etc/apt/sources.list.d/nodesource.list /etc/apt/keyrings/nodesource.gpg + + mkdir -p /etc/apt/keyrings + + if ! curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | + gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg; then + msg_error "Failed to download or import NodeSource GPG key" + exit 1 + fi + + echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_${NODE_VERSION}.x nodistro main" \ + >/etc/apt/sources.list.d/nodesource.list + + sleep 2 + if ! apt-get update >/dev/null 2>&1; then + msg_warn "APT update failed – retrying in 5s" + sleep 5 + if ! apt-get update >/dev/null 2>&1; then + msg_error "Failed to update APT repositories after adding NodeSource" + exit 1 + fi + fi + + if ! apt-get install -y nodejs >/dev/null 2>&1; then + msg_error "Failed to install Node.js ${NODE_VERSION} from NodeSource" + exit 1 + fi + + # Update to latest npm + $STD npm install -g npm@latest || { + msg_error "Failed to update npm to latest version" + } + msg_ok "Setup Node.js ${NODE_VERSION}" + fi + + export NODE_OPTIONS="--max-old-space-size=4096" + + # Ensure valid working directory for npm (avoids uv_cwd error) + if [[ ! -d /opt ]]; then + mkdir -p /opt + fi + cd /opt || { + msg_error "Failed to set safe working directory before npm install" + exit 1 + } + + # Install global Node modules + if [[ -n "$NODE_MODULE" ]]; then + IFS=',' read -ra MODULES <<<"$NODE_MODULE" + for mod in "${MODULES[@]}"; do + local MODULE_NAME MODULE_REQ_VERSION MODULE_INSTALLED_VERSION + if [[ "$mod" == @*/*@* ]]; then + # Scoped package with version, e.g. @vue/cli-service@latest + MODULE_NAME="${mod%@*}" + MODULE_REQ_VERSION="${mod##*@}" + elif [[ "$mod" == *"@"* ]]; then + # Unscoped package with version, e.g. yarn@latest + MODULE_NAME="${mod%@*}" + MODULE_REQ_VERSION="${mod##*@}" + else + # No version specified + MODULE_NAME="$mod" + MODULE_REQ_VERSION="latest" + fi + + # Check if the module is already installed + if npm list -g --depth=0 "$MODULE_NAME" >/dev/null 2>&1; then + MODULE_INSTALLED_VERSION="$(npm list -g --depth=0 "$MODULE_NAME" | grep "$MODULE_NAME@" | awk -F@ '{print $2}' | tr -d '[:space:]')" + if [[ "$MODULE_REQ_VERSION" != "latest" && "$MODULE_REQ_VERSION" != "$MODULE_INSTALLED_VERSION" ]]; then + msg_info "Updating $MODULE_NAME from v$MODULE_INSTALLED_VERSION to v$MODULE_REQ_VERSION" + if ! $STD npm install -g "${MODULE_NAME}@${MODULE_REQ_VERSION}"; then + msg_error "Failed to update $MODULE_NAME to version $MODULE_REQ_VERSION" + exit 1 + fi + elif [[ "$MODULE_REQ_VERSION" == "latest" ]]; then + msg_info "Updating $MODULE_NAME to latest version" + if ! $STD npm install -g "${MODULE_NAME}@latest"; then + msg_error "Failed to update $MODULE_NAME to latest version" + exit 1 + fi + fi + else + msg_info "Installing $MODULE_NAME@$MODULE_REQ_VERSION" + if ! $STD npm install -g "${MODULE_NAME}@${MODULE_REQ_VERSION}"; then + msg_error "Failed to install $MODULE_NAME@$MODULE_REQ_VERSION" + exit 1 + fi + fi + done + msg_ok "Installed Node.js modules: $NODE_MODULE" + fi +} + +# ------------------------------------------------------------------------------ +# Installs or upgrades PostgreSQL and optional extensions/modules. +# +# Description: +# - Detects existing PostgreSQL version +# - Dumps all databases before upgrade +# - Adds PGDG repo and installs specified version +# - Installs optional PG_MODULES (e.g. postgis, contrib) +# - Restores dumped data post-upgrade +# +# Variables: +# PG_VERSION - Major PostgreSQL version (e.g. 15, 16) (default: 16) +# PG_MODULES - Comma-separated list of extensions (e.g. "postgis,contrib") +# ------------------------------------------------------------------------------ +function setup_postgresql() { + local PG_VERSION="${PG_VERSION:-16}" + local PG_MODULES="${PG_MODULES:-}" + local CURRENT_PG_VERSION="" + local DISTRO + local NEED_PG_INSTALL=false + DISTRO="$(awk -F'=' '/^VERSION_CODENAME=/{ print $NF }' /etc/os-release)" + + if command -v psql >/dev/null; then + CURRENT_PG_VERSION="$(psql -V | awk '{print $3}' | cut -d. -f1)" + if [[ "$CURRENT_PG_VERSION" == "$PG_VERSION" ]]; then + : # PostgreSQL is already at the desired version – no action needed + else + $STD msg_info "Detected PostgreSQL $CURRENT_PG_VERSION, preparing upgrade to $PG_VERSION" + NEED_PG_INSTALL=true + fi + else + + NEED_PG_INSTALL=true + fi + + if [[ "$NEED_PG_INSTALL" == true ]]; then + if [[ -n "$CURRENT_PG_VERSION" ]]; then + $STD msg_info "Dumping PostgreSQL $CURRENT_PG_VERSION data" + su - postgres -c "pg_dumpall > /var/lib/postgresql/backup_$(date +%F)_v${CURRENT_PG_VERSION}.sql" + $STD msg_ok "Data dump completed" + + systemctl stop postgresql + fi + + rm -f /etc/apt/sources.list.d/pgdg.list /etc/apt/trusted.gpg.d/postgresql.gpg + + $STD msg_info "Adding PostgreSQL PGDG repository" + curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | + gpg --dearmor -o /etc/apt/trusted.gpg.d/postgresql.gpg + + echo "deb https://apt.postgresql.org/pub/repos/apt ${DISTRO}-pgdg main" \ + >/etc/apt/sources.list.d/pgdg.list + + $STD apt-get update + $STD msg_ok "Repository added" + + msg_info "Setup PostgreSQL $PG_VERSION" + $STD apt-get install -y "postgresql-${PG_VERSION}" "postgresql-client-${PG_VERSION}" + + if [[ -n "$CURRENT_PG_VERSION" ]]; then + $STD apt-get purge -y "postgresql-${CURRENT_PG_VERSION}" "postgresql-client-${CURRENT_PG_VERSION}" || true + fi + + systemctl enable -q --now postgresql + + if [[ -n "$CURRENT_PG_VERSION" ]]; then + $STD msg_info "Restoring dumped data" + su - postgres -c "psql < /var/lib/postgresql/backup_$(date +%F)_v${CURRENT_PG_VERSION}.sql" + $STD msg_ok "Data restored" + fi + + $STD msg_ok "PostgreSQL $PG_VERSION installed" + fi + + # Install optional PostgreSQL modules + if [[ -n "$PG_MODULES" ]]; then + IFS=',' read -ra MODULES <<<"$PG_MODULES" + for module in "${MODULES[@]}"; do + local pkg="postgresql-${PG_VERSION}-${module}" + $STD msg_info "Setup PostgreSQL module/s: $pkg" + $STD apt-get install -y "$pkg" || { + msg_error "Failed to install $pkg" + continue + } + done + $STD msg_ok "Setup PostgreSQL modules" + fi +} + +# ------------------------------------------------------------------------------ +# Installs or updates MariaDB from official repo. +# +# Description: +# - Detects current MariaDB version and replaces it if necessary +# - Preserves existing database data +# - Dynamically determines latest GA version if "latest" is given +# +# Variables: +# MARIADB_VERSION - MariaDB version to install (e.g. 10.11, latest) (default: latest) +# ------------------------------------------------------------------------------ + +setup_mariadb() { + local MARIADB_VERSION="${MARIADB_VERSION:-latest}" + local DISTRO_CODENAME + DISTRO_CODENAME="$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)" + CURRENT_OS="$(awk -F= '/^ID=/{print $2}' /etc/os-release)" + + if ! curl -fsI http://mirror.mariadb.org/repo/ >/dev/null; then + msg_error "MariaDB mirror not reachable" + return 1 + fi + + msg_info "Setting up MariaDB $MARIADB_VERSION" + # Grab dynamic latest LTS version + if [[ "$MARIADB_VERSION" == "latest" ]]; then + MARIADB_VERSION=$(curl -fsSL http://mirror.mariadb.org/repo/ | + grep -Eo '[0-9]+\.[0-9]+\.[0-9]+/' | + grep -vE 'rc/|rolling/' | + sed 's|/||' | + sort -Vr | + head -n1) + if [[ -z "$MARIADB_VERSION" ]]; then + msg_error "Could not determine latest GA MariaDB version" + return 1 + fi + fi + + local CURRENT_VERSION="" + if command -v mariadb >/dev/null; then + CURRENT_VERSION=$(mariadb --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+') + fi + + if [[ "$CURRENT_VERSION" == "$MARIADB_VERSION" ]]; then + $STD msg_info "MariaDB $MARIADB_VERSION, upgrading" + $STD apt-get update + $STD apt-get install --only-upgrade -y mariadb-server mariadb-client + $STD msg_ok "MariaDB upgraded to $MARIADB_VERSION" + return 0 + fi + + if [[ -n "$CURRENT_VERSION" ]]; then + $STD msg_info "Upgrading MariaDB $CURRENT_VERSION to $MARIADB_VERSION" + $STD systemctl stop mariadb >/dev/null 2>&1 || true + $STD apt-get purge -y 'mariadb*' || true + rm -f /etc/apt/sources.list.d/mariadb.list /etc/apt/trusted.gpg.d/mariadb.gpg + else + $STD msg_info "Setup MariaDB $MARIADB_VERSION" + fi + + curl -fsSL "https://mariadb.org/mariadb_release_signing_key.asc" | + gpg --dearmor -o /etc/apt/trusted.gpg.d/mariadb.gpg + + echo "deb [signed-by=/etc/apt/trusted.gpg.d/mariadb.gpg] http://mirror.mariadb.org/repo/${MARIADB_VERSION}/${CURRENT_OS} ${DISTRO_CODENAME} main" \ + >/etc/apt/sources.list.d/mariadb.list + + $STD apt-get update + + local MARIADB_MAJOR_MINOR + MARIADB_MAJOR_MINOR=$(echo "$MARIADB_VERSION" | awk -F. '{print $1"."$2}') + if [[ -n "$MARIADB_MAJOR_MINOR" ]]; then + echo "mariadb-server-$MARIADB_MAJOR_MINOR mariadb-server/feedback boolean false" | debconf-set-selections + else + for ver in 12.1 12.0 11.4 11.3 11.2 11.1 11.0 10.11 10.6 10.5 10.4 10.3; do + echo "mariadb-server-$ver mariadb-server/feedback boolean false" | debconf-set-selections + done + fi + DEBIAN_FRONTEND=noninteractive $STD apt-get install -y mariadb-server mariadb-client + + msg_ok "Setup MariaDB $MARIADB_VERSION" +} + +# ------------------------------------------------------------------------------ +# Installs or upgrades MySQL and configures APT repo. +# +# Description: +# - Detects existing MySQL installation +# - Purges conflicting packages before installation +# - Supports clean upgrade +# +# Variables: +# MYSQL_VERSION - MySQL version to install (e.g. 5.7, 8.0) (default: 8.0) +# ------------------------------------------------------------------------------ + +function setup_mysql() { + local MYSQL_VERSION="${MYSQL_VERSION:-8.0}" + local CURRENT_VERSION="" + local NEED_INSTALL=false + CURRENT_OS="$(awk -F= '/^ID=/{print $2}' /etc/os-release)" + + if command -v mysql >/dev/null; then + CURRENT_VERSION="$(mysql --version | grep -oP 'Distrib\s+\K[0-9]+\.[0-9]+')" + if [[ "$CURRENT_VERSION" != "$MYSQL_VERSION" ]]; then + $STD msg_info "MySQL $CURRENT_VERSION will be upgraded to $MYSQL_VERSION" + NEED_INSTALL=true + else + # Check for patch-level updates + if apt list --upgradable 2>/dev/null | grep -q '^mysql-server/'; then + $STD msg_info "MySQL $CURRENT_VERSION available for upgrade" + $STD apt-get update + $STD apt-get install --only-upgrade -y mysql-server + $STD msg_ok "MySQL upgraded" + fi + return + fi + else + msg_info "Setup MySQL $MYSQL_VERSION" + NEED_INSTALL=true + fi + + if [[ "$NEED_INSTALL" == true ]]; then + $STD systemctl stop mysql || true + $STD apt-get purge -y "^mysql-server.*" "^mysql-client.*" "^mysql-common.*" || true + rm -f /etc/apt/sources.list.d/mysql.list /etc/apt/trusted.gpg.d/mysql.gpg + + local DISTRO_CODENAME + DISTRO_CODENAME="$(awk -F= '/VERSION_CODENAME/ { print $2 }' /etc/os-release)" + curl -fsSL https://repo.mysql.com/RPM-GPG-KEY-mysql-2023 | gpg --dearmor -o /etc/apt/trusted.gpg.d/mysql.gpg + echo "deb [signed-by=/etc/apt/trusted.gpg.d/mysql.gpg] https://repo.mysql.com/apt/${CURRENT_OS}/ ${DISTRO_CODENAME} mysql-${MYSQL_VERSION}" \ + >/etc/apt/sources.list.d/mysql.list + + export DEBIAN_FRONTEND=noninteractive + $STD apt-get update + $STD apt-get install -y mysql-server + msg_ok "Setup MySQL $MYSQL_VERSION" + fi +} + +# ------------------------------------------------------------------------------ +# Installs PHP with selected modules and configures Apache/FPM support. +# +# Description: +# - Adds Sury PHP repo if needed +# - Installs default and user-defined modules +# - Patches php.ini for CLI, Apache, and FPM as needed +# +# Variables: +# PHP_VERSION - PHP version to install (default: 8.4) +# PHP_MODULE - Additional comma-separated modules +# PHP_APACHE - Set YES to enable PHP with Apache +# PHP_FPM - Set YES to enable PHP-FPM +# PHP_MEMORY_LIMIT - (default: 512M) +# PHP_UPLOAD_MAX_FILESIZE - (default: 128M) +# PHP_POST_MAX_SIZE - (default: 128M) +# PHP_MAX_EXECUTION_TIME - (default: 300) +# ------------------------------------------------------------------------------ + +function setup_php() { + local PHP_VERSION="${PHP_VERSION:-8.4}" + local PHP_MODULE="${PHP_MODULE:-}" + local PHP_APACHE="${PHP_APACHE:-NO}" + local PHP_FPM="${PHP_FPM:-NO}" + local DISTRO_CODENAME + DISTRO_CODENAME=$(awk -F= '/VERSION_CODENAME/ { print $2 }' /etc/os-release) + + local DEFAULT_MODULES="bcmath,cli,curl,gd,intl,mbstring,opcache,readline,xml,zip" + local COMBINED_MODULES + + local PHP_MEMORY_LIMIT="${PHP_MEMORY_LIMIT:-512M}" + local PHP_UPLOAD_MAX_FILESIZE="${PHP_UPLOAD_MAX_FILESIZE:-128M}" + local PHP_POST_MAX_SIZE="${PHP_POST_MAX_SIZE:-128M}" + local PHP_MAX_EXECUTION_TIME="${PHP_MAX_EXECUTION_TIME:-300}" + + # Merge default + user-defined modules + if [[ -n "$PHP_MODULE" ]]; then + COMBINED_MODULES="${DEFAULT_MODULES},${PHP_MODULE}" + else + COMBINED_MODULES="${DEFAULT_MODULES}" + fi + + # Deduplicate + COMBINED_MODULES=$(echo "$COMBINED_MODULES" | tr ',' '\n' | awk '!seen[$0]++' | paste -sd, -) + + # Get current PHP-CLI version + local CURRENT_PHP="" + if command -v php >/dev/null 2>&1; then + CURRENT_PHP=$(php -v 2>/dev/null | awk '/^PHP/{print $2}' | cut -d. -f1,2) + fi + + if [[ -z "$CURRENT_PHP" ]]; then + msg_info "Setup PHP $PHP_VERSION" + elif [[ "$CURRENT_PHP" != "$PHP_VERSION" ]]; then + msg_info "Old PHP $CURRENT_PHP detected, Setup new PHP $PHP_VERSION" + $STD apt-get purge -y "php${CURRENT_PHP//./}"* || true + fi + + # Ensure Sury repo is available + if [[ ! -f /etc/apt/sources.list.d/php.list ]]; then + $STD curl -fsSLo /tmp/debsuryorg-archive-keyring.deb https://packages.sury.org/debsuryorg-archive-keyring.deb + $STD dpkg -i /tmp/debsuryorg-archive-keyring.deb + echo "deb [signed-by=/usr/share/keyrings/deb.sury.org-php.gpg] https://packages.sury.org/php/ ${DISTRO_CODENAME} main" \ + >/etc/apt/sources.list.d/php.list + $STD apt-get update + fi + + # Build module list + local MODULE_LIST="php${PHP_VERSION}" + IFS=',' read -ra MODULES <<<"$COMBINED_MODULES" + for mod in "${MODULES[@]}"; do + if apt-cache show "php${PHP_VERSION}-${mod}" >/dev/null 2>&1; then + MODULE_LIST+=" php${PHP_VERSION}-${mod}" + else + msg_warn "PHP-Module ${mod} for PHP ${PHP_VERSION} not found – skipping" + fi + done + if [[ "$PHP_FPM" == "YES" ]]; then + MODULE_LIST+=" php${PHP_VERSION}-fpm" + fi + + # install apache2 with PHP support if requested + if [[ "$PHP_APACHE" == "YES" ]]; then + if ! dpkg -l | grep -q "libapache2-mod-php${PHP_VERSION}"; then + msg_info "Installing Apache with PHP${PHP_VERSION} support" + $STD apt-get install -y apache2 libapache2-mod-php${PHP_VERSION} + else + msg_info "Apache with PHP${PHP_VERSION} already installed – skipping install" + fi + fi + + # setup / update PHP modules + $STD apt-get install -y $MODULE_LIST + msg_ok "Setup PHP $PHP_VERSION" + + # optional stop old PHP-FPM service + if [[ "$PHP_FPM" == "YES" && -n "$CURRENT_PHP" && "$CURRENT_PHP" != "$PHP_VERSION" ]]; then + $STD systemctl stop php"${CURRENT_PHP}"-fpm || true + $STD systemctl disable php"${CURRENT_PHP}"-fpm || true + fi + + # Patch all relevant php.ini files + local PHP_INI_PATHS=("/etc/php/${PHP_VERSION}/cli/php.ini") + [[ "$PHP_FPM" == "YES" ]] && PHP_INI_PATHS+=("/etc/php/${PHP_VERSION}/fpm/php.ini") + [[ "$PHP_APACHE" == "YES" ]] && PHP_INI_PATHS+=("/etc/php/${PHP_VERSION}/apache2/php.ini") + for ini in "${PHP_INI_PATHS[@]}"; do + if [[ -f "$ini" ]]; then + $STD msg_info "Patching $ini" + sed -i "s|^memory_limit = .*|memory_limit = ${PHP_MEMORY_LIMIT}|" "$ini" + sed -i "s|^upload_max_filesize = .*|upload_max_filesize = ${PHP_UPLOAD_MAX_FILESIZE}|" "$ini" + sed -i "s|^post_max_size = .*|post_max_size = ${PHP_POST_MAX_SIZE}|" "$ini" + sed -i "s|^max_execution_time = .*|max_execution_time = ${PHP_MAX_EXECUTION_TIME}|" "$ini" + $STD msg_ok "Patched $ini" + fi + done + + # patch Apache configuration if needed + if [[ "$PHP_APACHE" == "YES" ]]; then + for mod in $(ls /etc/apache2/mods-enabled/ 2>/dev/null | grep -E '^php[0-9]\.[0-9]\.conf$' | sed 's/\.conf//'); do + if [[ "$mod" != "php${PHP_VERSION}" ]]; then + $STD a2dismod "$mod" || true + fi + done + $STD a2enmod mpm_prefork + $STD a2enmod "php${PHP_VERSION}" + $STD systemctl restart apache2 || true + fi + + # enable and restart PHP-FPM if requested + if [[ "$PHP_FPM" == "YES" ]]; then + if systemctl list-unit-files | grep -q "php${PHP_VERSION}-fpm.service"; then + $STD systemctl enable php${PHP_VERSION}-fpm + $STD systemctl restart php${PHP_VERSION}-fpm + else + msg_warn "FPM requested but service php${PHP_VERSION}-fpm not found" + fi + fi +} + +# ------------------------------------------------------------------------------ +# Installs or updates Composer globally (robust, idempotent). +# +# - Installs to /usr/local/bin/composer +# - Removes old binaries/symlinks in /usr/bin, /bin, /root/.composer, etc. +# - Ensures /usr/local/bin is in PATH (permanent) +# ------------------------------------------------------------------------------ + +function setup_composer() { + local COMPOSER_BIN="/usr/local/bin/composer" + export COMPOSER_ALLOW_SUPERUSER=1 + + # Clean up old Composer binaries/symlinks (if any) + for old in /usr/bin/composer /bin/composer /root/.composer/vendor/bin/composer; do + [[ -e "$old" && "$old" != "$COMPOSER_BIN" ]] && rm -f "$old" + done + + # Ensure /usr/local/bin is in PATH for future logins (and current shell) + ensure_usr_local_bin_persist + export PATH="/usr/local/bin:$PATH" + + # Check if composer is already installed + if [[ -x "$COMPOSER_BIN" ]]; then + local CURRENT_VERSION + CURRENT_VERSION=$("$COMPOSER_BIN" --version | awk '{print $3}') + $STD msg_info "Old Composer $CURRENT_VERSION found, updating to latest" + else + msg_info "Installing Composer" + fi + + # Download and install latest Composer + curl -fsSL https://getcomposer.org/installer -o /tmp/composer-setup.php + php /tmp/composer-setup.php --install-dir=/usr/local/bin --filename=composer >/dev/null 2>&1 + + if [[ ! -x "$COMPOSER_BIN" ]]; then + msg_error "Composer was not successfully installed (no binary at $COMPOSER_BIN)" + return 1 + fi + + chmod +x "$COMPOSER_BIN" + $STD "$COMPOSER_BIN" self-update --no-interaction || true # safe if already latest + $STD "$COMPOSER_BIN" diagnose + msg_ok "Composer is ready at $COMPOSER_BIN" +} + +# ------------------------------------------------------------------------------ +# Installs Go (Golang) from official tarball. +# +# Description: +# - Determines system architecture +# - Downloads latest version if GO_VERSION not set +# +# Variables: +# GO_VERSION - Version to install (e.g. 1.22.2 or latest) +# ------------------------------------------------------------------------------ + +function setup_go() { + local ARCH + case "$(uname -m)" in + x86_64) ARCH="amd64" ;; + aarch64) ARCH="arm64" ;; + *) + msg_error "Unsupported architecture: $(uname -m)" + return 1 + ;; + esac + + # Determine version + if [[ -z "${GO_VERSION:-}" || "${GO_VERSION}" == "latest" ]]; then + GO_VERSION=$(curl -fsSL https://go.dev/VERSION?m=text | head -n1 | sed 's/^go//') + if [[ -z "$GO_VERSION" ]]; then + msg_error "Could not determine latest Go version" + return 1 + fi + fi + + local GO_BIN="/usr/local/bin/go" + local GO_INSTALL_DIR="/usr/local/go" + + if [[ -x "$GO_BIN" ]]; then + local CURRENT_VERSION + CURRENT_VERSION=$("$GO_BIN" version | awk '{print $3}' | sed 's/go//') + if [[ "$CURRENT_VERSION" == "$GO_VERSION" ]]; then + return 0 + else + $STD msg_info "Old Go Installation ($CURRENT_VERSION) found, upgrading to $GO_VERSION" + rm -rf "$GO_INSTALL_DIR" + fi + else + msg_info "Setup Go $GO_VERSION" + fi + + local TARBALL="go${GO_VERSION}.linux-${ARCH}.tar.gz" + local URL="https://go.dev/dl/${TARBALL}" + local TMP_TAR=$(mktemp) + + curl -fsSL "$URL" -o "$TMP_TAR" || { + msg_error "Failed to download $TARBALL" + return 1 + } + + tar -C /usr/local -xzf "$TMP_TAR" + ln -sf /usr/local/go/bin/go /usr/local/bin/go + ln -sf /usr/local/go/bin/gofmt /usr/local/bin/gofmt + rm -f "$TMP_TAR" + + msg_ok "Setup Go $GO_VERSION" +} + +# ------------------------------------------------------------------------------ +# Installs Temurin JDK via Adoptium APT repository. +# +# Description: +# - Removes previous JDK if version mismatch +# - Installs or upgrades to specified JAVA_VERSION +# +# Variables: +# JAVA_VERSION - Temurin JDK version to install (e.g. 17, 21) +# ------------------------------------------------------------------------------ + +function setup_java() { + local JAVA_VERSION="${JAVA_VERSION:-21}" + local DISTRO_CODENAME + DISTRO_CODENAME=$(awk -F= '/VERSION_CODENAME/ { print $2 }' /etc/os-release) + local DESIRED_PACKAGE="temurin-${JAVA_VERSION}-jdk" + + # Add Adoptium repo if missing + if [[ ! -f /etc/apt/sources.list.d/adoptium.list ]]; then + $STD msg_info "Setting up Adoptium Repository" + mkdir -p /etc/apt/keyrings + curl -fsSL "https://packages.adoptium.net/artifactory/api/gpg/key/public" | gpg --dearmor -o /etc/apt/trusted.gpg.d/adoptium.gpg + echo "deb [signed-by=/etc/apt/trusted.gpg.d/adoptium.gpg] https://packages.adoptium.net/artifactory/deb ${DISTRO_CODENAME} main" \ + >/etc/apt/sources.list.d/adoptium.list + $STD apt-get update + $STD msg_ok "Set up Adoptium Repository" + fi + + # Detect currently installed temurin version + local INSTALLED_VERSION="" + if dpkg -l | grep -q "temurin-.*-jdk"; then + INSTALLED_VERSION=$(dpkg -l | awk '/temurin-.*-jdk/{print $2}' | grep -oP 'temurin-\K[0-9]+') + fi + + if [[ "$INSTALLED_VERSION" == "$JAVA_VERSION" ]]; then + $STD msg_info "Upgrading Temurin JDK $JAVA_VERSION" + $STD apt-get update + $STD apt-get install --only-upgrade -y "$DESIRED_PACKAGE" + $STD msg_ok "Upgraded Temurin JDK $JAVA_VERSION" + else + if [[ -n "$INSTALLED_VERSION" ]]; then + $STD msg_info "Removing Temurin JDK $INSTALLED_VERSION" + $STD apt-get purge -y "temurin-${INSTALLED_VERSION}-jdk" + fi + + msg_info "Setup Temurin JDK $JAVA_VERSION" + $STD apt-get install -y "$DESIRED_PACKAGE" + msg_ok "Setup Temurin JDK $JAVA_VERSION" + fi +} + +# ------------------------------------------------------------------------------ +# Installs or updates MongoDB to specified major version. +# +# Description: +# - Preserves data across installations +# - Adds official MongoDB repo +# +# Variables: +# MONGO_VERSION - MongoDB major version to install (e.g. 7.0, 8.0) +# ------------------------------------------------------------------------------ + +function setup_mongodb() { + local MONGO_VERSION="${MONGO_VERSION:-8.0}" + local DISTRO_ID DISTRO_CODENAME MONGO_BASE_URL + DISTRO_ID=$(awk -F= '/^ID=/{ gsub(/"/,"",$2); print $2 }' /etc/os-release) + DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{ print $2 }' /etc/os-release) + + # Check AVX support + if ! grep -qm1 'avx[^ ]*' /proc/cpuinfo; then + local major="${MONGO_VERSION%%.*}" + if ((major > 5)); then + msg_error "MongoDB ${MONGO_VERSION} requires AVX support, which is not available on this system." + return 1 + fi + fi + + case "$DISTRO_ID" in + ubuntu) + MONGO_BASE_URL="https://repo.mongodb.org/apt/ubuntu" + REPO_COMPONENT="multiverse" + ;; + debian) + MONGO_BASE_URL="https://repo.mongodb.org/apt/debian" + REPO_COMPONENT="main" + ;; + *) + msg_error "Unsupported distribution: $DISTRO_ID" + return 1 + ;; + esac + + local REPO_LIST="/etc/apt/sources.list.d/mongodb-org-${MONGO_VERSION}.list" + + local INSTALLED_VERSION="" + if command -v mongod >/dev/null; then + INSTALLED_VERSION=$(mongod --version | awk '/db version/{print $3}' | cut -d. -f1,2) + fi + + if [[ "$INSTALLED_VERSION" == "$MONGO_VERSION" ]]; then + $STD msg_info "Upgrading MongoDB $MONGO_VERSION" + $STD apt-get update + $STD apt-get install --only-upgrade -y mongodb-org + $STD msg_ok "Upgraded MongoDB $MONGO_VERSION" + return 0 + fi + + if [[ -n "$INSTALLED_VERSION" ]]; then + $STD systemctl stop mongod || true + $STD apt-get purge -y mongodb-org || true + rm -f /etc/apt/sources.list.d/mongodb-org-*.list + rm -f /etc/apt/trusted.gpg.d/mongodb-*.gpg + else + msg_info "Setup MongoDB $MONGO_VERSION" + fi + + curl -fsSL "https://pgp.mongodb.com/server-${MONGO_VERSION}.asc" | gpg --dearmor -o "/etc/apt/trusted.gpg.d/mongodb-${MONGO_VERSION}.gpg" + echo "deb [signed-by=/etc/apt/trusted.gpg.d/mongodb-${MONGO_VERSION}.gpg] ${MONGO_BASE_URL} ${DISTRO_CODENAME}/mongodb-org/${MONGO_VERSION} ${REPO_COMPONENT}" \ + >"$REPO_LIST" + + $STD apt-get update || { + msg_error "APT update failed — invalid MongoDB repo for ${DISTRO_ID}-${DISTRO_CODENAME}?" + return 1 + } + + $STD apt-get install -y mongodb-org + + mkdir -p /var/lib/mongodb + chown -R mongodb:mongodb /var/lib/mongodb + + $STD systemctl enable mongod + $STD systemctl start mongod + msg_ok "Setup MongoDB $MONGO_VERSION" +} + +# ------------------------------------------------------------------------------ +# Downloads and deploys latest GitHub release (source, binary, tarball, asset). +# +# Description: +# - Fetches latest release metadata from GitHub API +# - Supports the following modes: +# - tarball: Source code tarball (default if omitted) +# - source: Alias for tarball (same behavior) +# - binary: .deb package install (arch-dependent) +# - prebuild: Prebuilt .tar.gz archive (e.g. Go binaries) +# - singlefile: Standalone binary (no archive, direct chmod +x install) +# - Handles download, extraction/installation and version tracking in ~/. +# +# Parameters: +# $1 APP - Application name (used for install path and version file) +# $2 REPO - GitHub repository in form user/repo +# $3 MODE - Release type: +# tarball → source tarball (.tar.gz) +# binary → .deb file (auto-arch matched) +# prebuild → prebuilt archive (e.g. tar.gz) +# singlefile→ standalone binary (chmod +x) +# $4 VERSION - Optional release tag (default: latest) +# $5 TARGET_DIR - Optional install path (default: /opt/) +# $6 ASSET_FILENAME - Required for: +# - prebuild → archive filename or pattern +# - singlefile→ binary filename or pattern +# +# Optional: +# - Set GITHUB_TOKEN env var to increase API rate limit (recommended for CI/CD). +# +# Examples: +# # 1. Minimal: Fetch and deploy source tarball +# fetch_and_deploy_gh_release "myapp" "myuser/myapp" +# +# # 2. Binary install via .deb asset (architecture auto-detected) +# fetch_and_deploy_gh_release "myapp" "myuser/myapp" "binary" +# +# # 3. Prebuilt archive (.tar.gz) with asset filename match +# fetch_and_deploy_gh_release "hanko" "teamhanko/hanko" "prebuild" "latest" "/opt/hanko" "hanko_Linux_x86_64.tar.gz" +# +# # 4. Single binary (chmod +x) like Argus, Promtail etc. +# fetch_and_deploy_gh_release "argus" "release-argus/Argus" "singlefile" "0.26.3" "/opt/argus" "Argus-.*linux-amd64" +# ------------------------------------------------------------------------------ + +function fetch_and_deploy_gh_release() { + local app="$1" + local repo="$2" + local mode="${3:-tarball}" # tarball | binary | prebuild | singlefile + local version="${4:-latest}" + local target="${5:-/opt/$app}" + local asset_pattern="${6:-}" + + local app_lc=$(echo "${app,,}" | tr -d ' ') + local version_file="$HOME/.${app_lc}" + + local api_timeout="--connect-timeout 10 --max-time 60" + local download_timeout="--connect-timeout 15 --max-time 900" + + local current_version="" + [[ -f "$version_file" ]] && current_version=$(<"$version_file") + + if ! command -v jq &>/dev/null; then + $STD apt-get install -y jq &>/dev/null + fi + + local api_url="https://api.github.com/repos/$repo/releases" + [[ "$version" != "latest" ]] && api_url="$api_url/tags/$version" || api_url="$api_url/latest" + local header=() + [[ -n "${GITHUB_TOKEN:-}" ]] && header=(-H "Authorization: token $GITHUB_TOKEN") + + # dns pre check + local gh_host + gh_host=$(awk -F/ '{print $3}' <<<"$api_url") + if ! getent hosts "$gh_host" &>/dev/null; then + msg_error "DNS resolution failed for $gh_host – check /etc/resolv.conf or networking" + return 1 + fi + + local max_retries=3 retry_delay=2 attempt=1 success=false resp http_code + + while ((attempt <= max_retries)); do + resp=$(curl $api_timeout -fsSL -w "%{http_code}" -o /tmp/gh_rel.json "${header[@]}" "$api_url") && success=true && break + sleep "$retry_delay" + ((attempt++)) + done + + if ! $success; then + msg_error "Failed to fetch release metadata from $api_url after $max_retries attempts" + return 1 + fi + + http_code="${resp:(-3)}" + [[ "$http_code" != "200" ]] && { + msg_error "GitHub API returned HTTP $http_code" + return 1 + } + + local json tag_name + json=$(/dev/null || uname -m) + [[ "$arch" == "x86_64" ]] && arch="amd64" + [[ "$arch" == "aarch64" ]] && arch="arm64" + + local assets url_match="" + assets=$(echo "$json" | jq -r '.assets[].browser_download_url') + + # If explicit filename pattern is provided (param $6), match that first + if [[ -n "$asset_pattern" ]]; then + for u in $assets; do + case "${u##*/}" in + $asset_pattern) + url_match="$u" + break + ;; + esac + done + fi + + # If no match via explicit pattern, fall back to architecture heuristic + if [[ -z "$url_match" ]]; then + for u in $assets; do + if [[ "$u" =~ ($arch|amd64|x86_64|aarch64|arm64).*\.deb$ ]]; then + url_match="$u" + break + fi + done + fi + + # Fallback: any .deb file + if [[ -z "$url_match" ]]; then + for u in $assets; do + [[ "$u" =~ \.deb$ ]] && url_match="$u" && break + done + fi + + if [[ -z "$url_match" ]]; then + msg_error "No suitable .deb asset found for $app" + rm -rf "$tmpdir" + return 1 + fi + + filename="${url_match##*/}" + curl $download_timeout -fsSL -o "$tmpdir/$filename" "$url_match" || { + msg_error "Download failed: $url_match" + rm -rf "$tmpdir" + return 1 + } + + chmod 644 "$tmpdir/$filename" + $STD apt-get install -y "$tmpdir/$filename" || { + $STD dpkg -i "$tmpdir/$filename" || { + msg_error "Both apt and dpkg installation failed" + rm -rf "$tmpdir" + return 1 + } + } + + ### Prebuild Mode ### + elif [[ "$mode" == "prebuild" ]]; then + local pattern="${6%\"}" + pattern="${pattern#\"}" + [[ -z "$pattern" ]] && { + msg_error "Mode 'prebuild' requires 6th parameter (asset filename pattern)" + rm -rf "$tmpdir" + return 1 + } + + local asset_url="" + for u in $(echo "$json" | jq -r '.assets[].browser_download_url'); do + filename_candidate="${u##*/}" + case "$filename_candidate" in + $pattern) + asset_url="$u" + break + ;; + esac + done + + [[ -z "$asset_url" ]] && { + msg_error "No asset matching '$pattern' found" + rm -rf "$tmpdir" + return 1 + } + + filename="${asset_url##*/}" + curl $download_timeout -fsSL -o "$tmpdir/$filename" "$asset_url" || { + msg_error "Download failed: $asset_url" + rm -rf "$tmpdir" + return 1 + } + + local unpack_tmp + unpack_tmp=$(mktemp -d) + mkdir -p "$target" + + if [[ "$filename" == *.zip ]]; then + if ! command -v unzip &>/dev/null; then + $STD apt-get install -y unzip + fi + unzip -q "$tmpdir/$filename" -d "$unpack_tmp" + elif [[ "$filename" == *.tar.* || "$filename" == *.tgz ]]; then + tar -xf "$tmpdir/$filename" -C "$unpack_tmp" + else + msg_error "Unsupported archive format: $filename" + rm -rf "$tmpdir" "$unpack_tmp" + return 1 + fi + + local top_dirs + top_dirs=$(find "$unpack_tmp" -mindepth 1 -maxdepth 1 -type d | wc -l) + local top_entries inner_dir + top_entries=$(find "$unpack_tmp" -mindepth 1 -maxdepth 1) + if [[ "$(echo "$top_entries" | wc -l)" -eq 1 && -d "$top_entries" ]]; then + # Strip leading folder + inner_dir="$top_entries" + shopt -s dotglob nullglob + if compgen -G "$inner_dir/*" >/dev/null; then + cp -r "$inner_dir"/* "$target/" || { + msg_error "Failed to copy contents from $inner_dir to $target" + rm -rf "$tmpdir" "$unpack_tmp" + return 1 + } + else + msg_error "Inner directory is empty: $inner_dir" + rm -rf "$tmpdir" "$unpack_tmp" + return 1 + fi + shopt -u dotglob nullglob + else + # Copy all contents + shopt -s dotglob nullglob + if compgen -G "$unpack_tmp/*" >/dev/null; then + cp -r "$unpack_tmp"/* "$target/" || { + msg_error "Failed to copy contents to $target" + rm -rf "$tmpdir" "$unpack_tmp" + return 1 + } + else + msg_error "Unpacked archive is empty" + rm -rf "$tmpdir" "$unpack_tmp" + return 1 + fi + shopt -u dotglob nullglob + fi + + ### Singlefile Mode ### + elif [[ "$mode" == "singlefile" ]]; then + local pattern="${6%\"}" + pattern="${pattern#\"}" + [[ -z "$pattern" ]] && { + msg_error "Mode 'singlefile' requires 6th parameter (asset filename pattern)" + rm -rf "$tmpdir" + return 1 + } + + local asset_url="" + for u in $(echo "$json" | jq -r '.assets[].browser_download_url'); do + filename_candidate="${u##*/}" + case "$filename_candidate" in + $pattern) + asset_url="$u" + break + ;; + esac + done + + [[ -z "$asset_url" ]] && { + msg_error "No asset matching '$pattern' found" + rm -rf "$tmpdir" + return 1 + } + + filename="${asset_url##*/}" + mkdir -p "$target" + + local use_filename="${USE_ORIGINAL_FILENAME:-false}" + local target_file="$app" + [[ "$use_filename" == "true" ]] && target_file="$filename" + + curl $download_timeout -fsSL -o "$target/$target_file" "$asset_url" || { + msg_error "Download failed: $asset_url" + rm -rf "$tmpdir" + return 1 + } + + if [[ "$target_file" != *.jar && -f "$target/$target_file" ]]; then + chmod +x "$target/$target_file" + fi + + else + msg_error "Unknown mode: $mode" + rm -rf "$tmpdir" + return 1 + fi + + echo "$version" >"$version_file" + msg_ok "Deployed: $app ($version)" + rm -rf "$tmpdir" +} + +# ------------------------------------------------------------------------------ +# Installs a local IP updater script using networkd-dispatcher. +# +# Description: +# - Stores current IP in /run/local-ip.env +# - Automatically runs on network changes +# ------------------------------------------------------------------------------ + +function setup_local_ip_helper() { + local BASE_DIR="/usr/local/community-scripts/ip-management" + local SCRIPT_PATH="$BASE_DIR/update_local_ip.sh" + local IP_FILE="/run/local-ip.env" + local DISPATCHER_SCRIPT="/etc/networkd-dispatcher/routable.d/10-update-local-ip.sh" + + mkdir -p "$BASE_DIR" + + # Install networkd-dispatcher if not present + if ! dpkg -s networkd-dispatcher >/dev/null 2>&1; then + $STD apt-get update + $STD apt-get install -y networkd-dispatcher + fi + + # Write update_local_ip.sh + cat <<'EOF' >"$SCRIPT_PATH" +#!/bin/bash +set -euo pipefail + +IP_FILE="/run/local-ip.env" +mkdir -p "$(dirname "$IP_FILE")" + +get_current_ip() { + local targets=("8.8.8.8" "1.1.1.1" "192.168.1.1" "10.0.0.1" "172.16.0.1" "default") + local ip + + for target in "${targets[@]}"; do + if [[ "$target" == "default" ]]; then + ip=$(ip route get 1 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}') + else + ip=$(ip route get "$target" 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}') + fi + if [[ -n "$ip" ]]; then + echo "$ip" + return 0 + fi + done + + return 1 +} + +current_ip="$(get_current_ip)" + +if [[ -z "$current_ip" ]]; then + echo "[ERROR] Could not detect local IP" >&2 + exit 1 +fi + +if [[ -f "$IP_FILE" ]]; then + source "$IP_FILE" + [[ "$LOCAL_IP" == "$current_ip" ]] && exit 0 +fi + +echo "LOCAL_IP=$current_ip" > "$IP_FILE" +echo "[INFO] LOCAL_IP updated to $current_ip" +EOF + + chmod +x "$SCRIPT_PATH" + + # Install dispatcher hook + mkdir -p "$(dirname "$DISPATCHER_SCRIPT")" + cat <"$DISPATCHER_SCRIPT" +#!/bin/bash +$SCRIPT_PATH +EOF + + chmod +x "$DISPATCHER_SCRIPT" + systemctl enable -q --now networkd-dispatcher.service +} + +# ------------------------------------------------------------------------------ +# Loads LOCAL_IP from persistent store or detects if missing. +# +# Description: +# - Loads from /run/local-ip.env or performs runtime lookup +# ------------------------------------------------------------------------------ + +function import_local_ip() { + local IP_FILE="/run/local-ip.env" + if [[ -f "$IP_FILE" ]]; then + # shellcheck disable=SC1090 + source "$IP_FILE" + fi + + if [[ -z "${LOCAL_IP:-}" ]]; then + get_current_ip() { + local targets=("8.8.8.8" "1.1.1.1" "192.168.1.1" "10.0.0.1" "172.16.0.1" "default") + local ip + + for target in "${targets[@]}"; do + if [[ "$target" == "default" ]]; then + ip=$(ip route get 1 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}') + else + ip=$(ip route get "$target" 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}') + fi + if [[ -n "$ip" ]]; then + echo "$ip" + return 0 + fi + done + + return 1 + } + + LOCAL_IP="$(get_current_ip || true)" + if [[ -z "$LOCAL_IP" ]]; then + msg_error "Could not determine LOCAL_IP" + return 1 + fi + fi + + export LOCAL_IP +} + +# ------------------------------------------------------------------------------ +# Downloads file with optional progress indicator using pv. +# +# Arguments: +# $1 - URL +# $2 - Destination path +# ------------------------------------------------------------------------------ + +function download_with_progress() { + local url="$1" + local output="$2" + if [ -n "$SPINNER_PID" ] && ps -p "$SPINNER_PID" >/dev/null; then kill "$SPINNER_PID" >/dev/null; fi + + if ! command -v pv &>/dev/null; then + $STD apt-get install -y pv + fi + set -o pipefail + + # Content-Length aus HTTP-Header holen + local content_length + content_length=$(curl -fsSLI "$url" | awk '/Content-Length/ {print $2}' | tr -d '\r' || true) + + if [[ -z "$content_length" ]]; then + if ! curl -fL# -o "$output" "$url"; then + msg_error "Download failed" + return 1 + fi + else + if ! curl -fsSL "$url" | pv -s "$content_length" >"$output"; then + msg_error "Download failed" + return 1 + fi + fi +} + +# ------------------------------------------------------------------------------ +# Installs or upgrades uv (Python package manager) from GitHub releases. +# - Downloads platform-specific tarball (no install.sh!) +# - Extracts uv binary +# - Places it in /usr/local/bin +# - Optionally installs a specific Python version via uv +# ------------------------------------------------------------------------------ + +function setup_uv() { + local UV_BIN="/usr/local/bin/uv" + local TMP_DIR + TMP_DIR=$(mktemp -d) + + # Determine system architecture + local ARCH + ARCH=$(uname -m) + local UV_TAR + + case "$ARCH" in + x86_64) + if grep -qi "alpine" /etc/os-release; then + UV_TAR="uv-x86_64-unknown-linux-musl.tar.gz" + else + UV_TAR="uv-x86_64-unknown-linux-gnu.tar.gz" + fi + ;; + aarch64) + if grep -qi "alpine" /etc/os-release; then + UV_TAR="uv-aarch64-unknown-linux-musl.tar.gz" + else + UV_TAR="uv-aarch64-unknown-linux-gnu.tar.gz" + fi + ;; + *) + msg_error "Unsupported architecture: $ARCH" + rm -rf "$TMP_DIR" + return 1 + ;; + esac + + # Get latest version from GitHub + local LATEST_VERSION + LATEST_VERSION=$(curl -fsSL https://api.github.com/repos/astral-sh/uv/releases/latest | + grep '"tag_name":' | cut -d '"' -f4 | sed 's/^v//') + + if [[ -z "$LATEST_VERSION" ]]; then + msg_error "Could not fetch latest uv version from GitHub." + rm -rf "$TMP_DIR" + return 1 + fi + + # Check if uv is already up to date + if [[ -x "$UV_BIN" ]]; then + local INSTALLED_VERSION + INSTALLED_VERSION=$($UV_BIN -V | awk '{print $2}') + if [[ "$INSTALLED_VERSION" == "$LATEST_VERSION" ]]; then + rm -rf "$TMP_DIR" + [[ ":$PATH:" != *":/usr/local/bin:"* ]] && export PATH="/usr/local/bin:$PATH" + return 0 + else + msg_info "Updating uv from $INSTALLED_VERSION to $LATEST_VERSION" + fi + else + msg_info "Setup uv $LATEST_VERSION" + fi + + # Download and install manually + local UV_URL="https://github.com/astral-sh/uv/releases/latest/download/${UV_TAR}" + if ! curl -fsSL "$UV_URL" -o "$TMP_DIR/uv.tar.gz"; then + msg_error "Failed to download $UV_URL" + rm -rf "$TMP_DIR" + return 1 + fi + + if ! tar -xzf "$TMP_DIR/uv.tar.gz" -C "$TMP_DIR"; then + msg_error "Failed to extract uv archive" + rm -rf "$TMP_DIR" + return 1 + fi + + install -m 755 "$TMP_DIR"/*/uv "$UV_BIN" || { + msg_error "Failed to install uv binary" + rm -rf "$TMP_DIR" + return 1 + } + + if [[ ":$PATH:" != *":/usr/local/bin:"* ]]; then + export PATH="/usr/local/bin:$PATH" + fi + ensure_usr_local_bin_persist + if ! $STD uv python update-shell; then + msg_error "uv shell integration failed – continuing anyway" + fi + msg_ok "Setup uv $LATEST_VERSION" + + # Optional: install specific Python version + if [[ -n "${PYTHON_VERSION:-}" ]]; then + local VERSION_MATCH + VERSION_MATCH=$(uv python list --only-downloads | + grep -E "^cpython-${PYTHON_VERSION//./\\.}\.[0-9]+-linux" | + cut -d'-' -f2 | sort -V | tail -n1) + + if [[ -z "$VERSION_MATCH" ]]; then + msg_error "No matching Python $PYTHON_VERSION.x version found via uv" + return 1 + fi + + if ! uv python list | grep -q "cpython-${VERSION_MATCH}-linux.*uv/python"; then + if ! $STD uv python install "$VERSION_MATCH"; then + msg_error "Failed to install Python $VERSION_MATCH via uv" + return 1 + fi + msg_ok "Setup Python $VERSION_MATCH via uv" + fi + fi +} + +# ------------------------------------------------------------------------------ +# Ensures /usr/local/bin is permanently in system PATH. +# +# Description: +# - Adds to /etc/profile.d if not present +# ------------------------------------------------------------------------------ + +function ensure_usr_local_bin_persist() { + local PROFILE_FILE="/etc/profile.d/custom_path.sh" + + if [[ ! -f "$PROFILE_FILE" ]] && ! command -v pveversion &>/dev/null; then + echo 'export PATH="/usr/local/bin:$PATH"' >"$PROFILE_FILE" + chmod +x "$PROFILE_FILE" + fi +} + +# ------------------------------------------------------------------------------ +# Installs or updates Ghostscript (gs) from source. +# +# Description: +# - Fetches latest release +# - Builds and installs system-wide +# ------------------------------------------------------------------------------ + +function setup_gs() { + mkdir -p /tmp + TMP_DIR=$(mktemp -d) + CURRENT_VERSION=$(gs --version 2>/dev/null || echo "0") + + RELEASE_JSON=$(curl -fsSL https://api.github.com/repos/ArtifexSoftware/ghostpdl-downloads/releases/latest) + LATEST_VERSION=$(echo "$RELEASE_JSON" | grep '"tag_name":' | head -n1 | cut -d '"' -f4 | sed 's/^gs//') + LATEST_VERSION_DOTTED=$(echo "$RELEASE_JSON" | grep '"name":' | head -n1 | grep -o '[0-9]\+\.[0-9]\+\.[0-9]\+') + + if [[ -z "$LATEST_VERSION" ]]; then + msg_error "Could not determine latest Ghostscript version from GitHub." + rm -rf "$TMP_DIR" + return + fi + + if dpkg --compare-versions "$CURRENT_VERSION" ge "$LATEST_VERSION_DOTTED"; then + rm -rf "$TMP_DIR" + return + fi + + msg_info "Setup Ghostscript $LATEST_VERSION_DOTTED" + curl -fsSL "https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs${LATEST_VERSION}/ghostscript-${LATEST_VERSION_DOTTED}.tar.gz" -o "$TMP_DIR/ghostscript.tar.gz" + + if ! tar -xzf "$TMP_DIR/ghostscript.tar.gz" -C "$TMP_DIR"; then + msg_error "Failed to extract Ghostscript archive." + rm -rf "$TMP_DIR" + return + fi + + cd "$TMP_DIR/ghostscript-${LATEST_VERSION_DOTTED}" || { + msg_error "Failed to enter Ghostscript source directory." + rm -rf "$TMP_DIR" + } + $STD apt-get install -y build-essential libpng-dev zlib1g-dev + $STD ./configure >/dev/null + $STD make + $STD sudo make install + local EXIT_CODE=$? + hash -r + if [[ ! -x "$(command -v gs)" ]]; then + if [[ -x /usr/local/bin/gs ]]; then + ln -sf /usr/local/bin/gs /usr/bin/gs + fi + fi + + rm -rf "$TMP_DIR" + + if [[ $EXIT_CODE -eq 0 ]]; then + msg_ok "Setup Ghostscript $LATEST_VERSION_DOTTED" + else + msg_error "Ghostscript installation failed" + fi +} + +# ------------------------------------------------------------------------------ +# Installs rbenv and ruby-build, installs Ruby and optionally Rails. +# +# Description: +# - Downloads rbenv and ruby-build from GitHub +# - Compiles and installs target Ruby version +# - Optionally installs Rails via gem +# +# Variables: +# RUBY_VERSION - Ruby version to install (default: 3.4.4) +# RUBY_INSTALL_RAILS - true/false to install Rails (default: true) +# ------------------------------------------------------------------------------ + +function setup_ruby() { + local RUBY_VERSION="${RUBY_VERSION:-3.4.4}" + local RUBY_INSTALL_RAILS="${RUBY_INSTALL_RAILS:-true}" + + local RBENV_DIR="$HOME/.rbenv" + local RBENV_BIN="$RBENV_DIR/bin/rbenv" + local PROFILE_FILE="$HOME/.profile" + local TMP_DIR + TMP_DIR=$(mktemp -d) + + msg_info "Setup Ruby $RUBY_VERSION" + + local RBENV_RELEASE + RBENV_RELEASE=$(curl -fsSL https://api.github.com/repos/rbenv/rbenv/releases/latest | grep '"tag_name":' | cut -d '"' -f4 | sed 's/^v//') + if [[ -z "$RBENV_RELEASE" ]]; then + msg_error "Failed to fetch latest rbenv version" + rm -rf "$TMP_DIR" + return 1 + fi + + curl -fsSL "https://github.com/rbenv/rbenv/archive/refs/tags/v${RBENV_RELEASE}.tar.gz" -o "$TMP_DIR/rbenv.tar.gz" + tar -xzf "$TMP_DIR/rbenv.tar.gz" -C "$TMP_DIR" + mkdir -p "$RBENV_DIR" + cp -r "$TMP_DIR/rbenv-${RBENV_RELEASE}/." "$RBENV_DIR/" + cd "$RBENV_DIR" && src/configure && $STD make -C src + + local RUBY_BUILD_RELEASE + RUBY_BUILD_RELEASE=$(curl -fsSL https://api.github.com/repos/rbenv/ruby-build/releases/latest | grep '"tag_name":' | cut -d '"' -f4 | sed 's/^v//') + if [[ -z "$RUBY_BUILD_RELEASE" ]]; then + msg_error "Failed to fetch latest ruby-build version" + rm -rf "$TMP_DIR" + return 1 + fi + + curl -fsSL "https://github.com/rbenv/ruby-build/archive/refs/tags/v${RUBY_BUILD_RELEASE}.tar.gz" -o "$TMP_DIR/ruby-build.tar.gz" + tar -xzf "$TMP_DIR/ruby-build.tar.gz" -C "$TMP_DIR" + mkdir -p "$RBENV_DIR/plugins/ruby-build" + cp -r "$TMP_DIR/ruby-build-${RUBY_BUILD_RELEASE}/." "$RBENV_DIR/plugins/ruby-build/" + echo "$RUBY_BUILD_RELEASE" >"$RBENV_DIR/plugins/ruby-build/RUBY_BUILD_version.txt" + + if ! grep -q 'rbenv init' "$PROFILE_FILE"; then + echo 'export PATH="$HOME/.rbenv/bin:$PATH"' >>"$PROFILE_FILE" + echo 'eval "$(rbenv init -)"' >>"$PROFILE_FILE" + fi + + export PATH="$RBENV_DIR/bin:$PATH" + eval "$("$RBENV_BIN" init - bash)" + + if ! "$RBENV_BIN" versions --bare | grep -qx "$RUBY_VERSION"; then + $STD "$RBENV_BIN" install "$RUBY_VERSION" + fi + + "$RBENV_BIN" global "$RUBY_VERSION" + hash -r + + if [[ "$RUBY_INSTALL_RAILS" == "true" ]]; then + msg_info "Setup Rails via gem" + gem install rails + msg_ok "Setup Rails $(rails -v)" + fi + + rm -rf "$TMP_DIR" + msg_ok "Setup Ruby $RUBY_VERSION" +} + +# ------------------------------------------------------------------------------ +# Creates and installs self-signed certificates. +# +# Description: +# - Create a self-signed certificate with option to override application name +# +# Variables: +# APP - Application name (default: $APPLICATION variable) +# ------------------------------------------------------------------------------ +function create_selfsigned_certs() { + local app=${APP:-$(echo "${APPLICATION,,}" | tr -d ' ')} + $STD openssl req -x509 -nodes -days 365 -newkey rsa:4096 \ + -keyout /etc/ssl/private/"$app"-selfsigned.key \ + -out /etc/ssl/certs/"$app"-selfsigned.crt \ + -subj "/C=US/O=$app/OU=Domain Control Validated/CN=localhost" +} + +# ------------------------------------------------------------------------------ +# Installs Rust toolchain and optional global crates via cargo. +# +# Description: +# - Installs rustup (if missing) +# - Installs or updates desired Rust toolchain (stable, nightly, or versioned) +# - Installs or updates specified global crates using `cargo install` +# +# Notes: +# - Skips crate install if exact version is already present +# - Updates crate if newer version or different version is requested +# +# Variables: +# RUST_TOOLCHAIN - Rust toolchain to install (default: stable) +# RUST_CRATES - Comma-separated list of crates (e.g. "cargo-edit,wasm-pack@0.12.1") +# ------------------------------------------------------------------------------ + +function setup_rust() { + local RUST_TOOLCHAIN="${RUST_TOOLCHAIN:-stable}" + local RUST_CRATES="${RUST_CRATES:-}" + local CARGO_BIN="${HOME}/.cargo/bin" + + # rustup & toolchain + if ! command -v rustup &>/dev/null; then + msg_info "Setup Rust" + curl -fsSL https://sh.rustup.rs | $STD sh -s -- -y --default-toolchain "$RUST_TOOLCHAIN" + export PATH="$CARGO_BIN:$PATH" + echo 'export PATH="$HOME/.cargo/bin:$PATH"' >>"$HOME/.profile" + msg_ok "Setup Rust" + else + $STD rustup install "$RUST_TOOLCHAIN" + $STD rustup default "$RUST_TOOLCHAIN" + $STD rustup update "$RUST_TOOLCHAIN" + msg_ok "Rust toolchain set to $RUST_TOOLCHAIN" + fi + + # install/update crates + if [[ -n "$RUST_CRATES" ]]; then + IFS=',' read -ra CRATES <<<"$RUST_CRATES" + for crate in "${CRATES[@]}"; do + local NAME VER INSTALLED_VER + if [[ "$crate" == *"@"* ]]; then + NAME="${crate%@*}" + VER="${crate##*@}" + else + NAME="$crate" + VER="" + fi + + INSTALLED_VER=$(cargo install --list 2>/dev/null | awk "/^$NAME v[0-9]/ {print \$2}" | tr -d 'v') + + if [[ -n "$INSTALLED_VER" ]]; then + if [[ -n "$VER" && "$VER" != "$INSTALLED_VER" ]]; then + msg_info "Update $NAME: $INSTALLED_VER → $VER" + $STD cargo install "$NAME" --version "$VER" --force + msg_ok "Updated $NAME to $VER" + elif [[ -z "$VER" ]]; then + msg_info "Update $NAME: $INSTALLED_VER → latest" + $STD cargo install "$NAME" --force + msg_ok "Updated $NAME to latest" + fi + else + msg_info "Setup $NAME ${VER:+($VER)}" + $STD cargo install "$NAME" ${VER:+--version "$VER"} + msg_ok "Setup $NAME ${VER:-latest}" + fi + done + msg_ok "Setup Rust" + fi +} + +# ------------------------------------------------------------------------------ +# Installs Adminer (Debian/Ubuntu via APT, Alpine via direct download). +# +# Description: +# - Adds Adminer to Apache or web root +# - Supports Alpine and Debian-based systems +# ------------------------------------------------------------------------------ + +function setup_adminer() { + if grep -qi alpine /etc/os-release; then + msg_info "Setup Adminer (Alpine)" + mkdir -p /var/www/localhost/htdocs/adminer + if ! curl -fsSL https://github.com/vrana/adminer/releases/latest/download/adminer.php \ + -o /var/www/localhost/htdocs/adminer/index.php; then + msg_error "Failed to download Adminer" + return 1 + fi + msg_ok "Adminer available at /adminer (Alpine)" + else + msg_info "Setup Adminer (Debian/Ubuntu)" + $STD apt-get install -y adminer + $STD a2enconf adminer + $STD systemctl reload apache2 + msg_ok "Adminer available at /adminer (Debian/Ubuntu)" + fi +} + +# ------------------------------------------------------------------------------ +# Installs or updates yq (mikefarah/yq - Go version). +# +# Description: +# - Checks if yq is installed and from correct source +# - Compares with latest release on GitHub +# - Updates if outdated or wrong implementation +# ------------------------------------------------------------------------------ + +function setup_yq() { + local TMP_DIR + TMP_DIR=$(mktemp -d) + local CURRENT_VERSION="" + local BINARY_PATH="/usr/local/bin/yq" + local GITHUB_REPO="mikefarah/yq" + + if ! command -v jq &>/dev/null; then + $STD apt-get update + $STD apt-get install -y jq || { + msg_error "Failed to install jq" + rm -rf "$TMP_DIR" + return 1 + } + fi + + if command -v yq &>/dev/null; then + if ! yq --version 2>&1 | grep -q 'mikefarah'; then + rm -f "$(command -v yq)" + else + CURRENT_VERSION=$(yq --version | awk '{print $NF}' | sed 's/^v//') + fi + fi + + local RELEASE_JSON + RELEASE_JSON=$(curl -fsSL "https://api.github.com/repos/${GITHUB_REPO}/releases/latest") + local LATEST_VERSION + LATEST_VERSION=$(echo "$RELEASE_JSON" | jq -r '.tag_name' | sed 's/^v//') + + if [[ -z "$LATEST_VERSION" ]]; then + msg_error "Could not determine latest yq version from GitHub." + rm -rf "$TMP_DIR" + return 1 + fi + + if [[ -n "$CURRENT_VERSION" && "$CURRENT_VERSION" == "$LATEST_VERSION" ]]; then + return + fi + + msg_info "Setup yq ($LATEST_VERSION)" + curl -fsSL "https://github.com/${GITHUB_REPO}/releases/download/v${LATEST_VERSION}/yq_linux_amd64" -o "$TMP_DIR/yq" + chmod +x "$TMP_DIR/yq" + mv "$TMP_DIR/yq" "$BINARY_PATH" + + if [[ ! -x "$BINARY_PATH" ]]; then + msg_error "Failed to install yq to $BINARY_PATH" + rm -rf "$TMP_DIR" + return 1 + fi + + rm -rf "$TMP_DIR" + hash -r + + local FINAL_VERSION + FINAL_VERSION=$("$BINARY_PATH" --version 2>/dev/null | awk '{print $NF}') + if [[ "$FINAL_VERSION" == "v$LATEST_VERSION" ]]; then + msg_ok "Setup yq ($LATEST_VERSION)" + else + msg_error "yq installation incomplete or version mismatch" + fi +} + +# ------------------------------------------------------------------------------ +# Installs ImageMagick 7 from source (Debian/Ubuntu only). +# +# Description: +# - Downloads the latest ImageMagick source tarball +# - Builds and installs ImageMagick to /usr/local +# - Configures dynamic linker (ldconfig) +# +# Notes: +# - Requires: build-essential, libtool, libjpeg-dev, libpng-dev, etc. +# ------------------------------------------------------------------------------ +function setup_imagemagick() { + local TMP_DIR + TMP_DIR=$(mktemp -d) + local VERSION="" + local BINARY_PATH="/usr/local/bin/magick" + + if command -v magick &>/dev/null; then + VERSION=$(magick -version | awk '/^Version/ {print $3}') + msg_ok "ImageMagick already installed ($VERSION)" + return 0 + fi + + msg_info "Setup ImageMagick (Patience)" + $STD apt-get update + $STD apt-get install -y \ + build-essential \ + libtool \ + libjpeg-dev \ + libpng-dev \ + libtiff-dev \ + libwebp-dev \ + libheif-dev \ + libde265-dev \ + libopenjp2-7-dev \ + libxml2-dev \ + liblcms2-dev \ + libfreetype6-dev \ + libraw-dev \ + libfftw3-dev \ + liblqr-1-0-dev \ + libgsl-dev \ + pkg-config \ + ghostscript + + curl -fsSL https://imagemagick.org/archive/ImageMagick.tar.gz -o "$TMP_DIR/ImageMagick.tar.gz" + tar -xzf "$TMP_DIR/ImageMagick.tar.gz" -C "$TMP_DIR" + cd "$TMP_DIR"/ImageMagick-* || { + msg_error "Source extraction failed" + rm -rf "$TMP_DIR" + return 1 + } + + ./configure --disable-static >/dev/null + $STD make + $STD make install + $STD ldconfig /usr/local/lib + + if [[ ! -x "$BINARY_PATH" ]]; then + msg_error "ImageMagick installation failed" + rm -rf "$TMP_DIR" + return 1 + fi + + VERSION=$("$BINARY_PATH" -version | awk '/^Version/ {print $3}') + rm -rf "$TMP_DIR" + ensure_usr_local_bin_persist + msg_ok "Setup ImageMagick $VERSION" +} + +# ------------------------------------------------------------------------------ +# Installs FFmpeg from source or prebuilt binary (Debian/Ubuntu only). +# +# Description: +# - Downloads and builds FFmpeg from GitHub (https://github.com/FFmpeg/FFmpeg) +# - Supports specific version override via FFMPEG_VERSION (e.g. n7.1.1) +# - Supports build profile via FFMPEG_TYPE: +# - minimal : x264, vpx, mp3 only +# - medium : adds subtitles, fonts, opus, vorbis +# - full : adds dav1d, svt-av1, zlib, numa +# - binary : downloads static build (johnvansickle.com) +# - Defaults to latest stable version and full feature set +# +# Notes: +# - Requires: curl, jq, build-essential, and matching codec libraries +# - Result is installed to /usr/local/bin/ffmpeg +# ------------------------------------------------------------------------------ + +function setup_ffmpeg() { + local TMP_DIR + TMP_DIR=$(mktemp -d) + local GITHUB_REPO="FFmpeg/FFmpeg" + local VERSION="${FFMPEG_VERSION:-latest}" + local TYPE="${FFMPEG_TYPE:-full}" + local BIN_PATH="/usr/local/bin/ffmpeg" + + # Binary fallback mode + if [[ "$TYPE" == "binary" ]]; then + msg_info "Installing FFmpeg (static binary)" + curl -fsSL https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz -o "$TMP_DIR/ffmpeg.tar.xz" + tar -xf "$TMP_DIR/ffmpeg.tar.xz" -C "$TMP_DIR" + local EXTRACTED_DIR + EXTRACTED_DIR=$(find "$TMP_DIR" -maxdepth 1 -type d -name "ffmpeg-*") + cp "$EXTRACTED_DIR/ffmpeg" "$BIN_PATH" + cp "$EXTRACTED_DIR/ffprobe" /usr/local/bin/ffprobe + chmod +x "$BIN_PATH" /usr/local/bin/ffprobe + rm -rf "$TMP_DIR" + msg_ok "Installed FFmpeg binary ($($BIN_PATH -version | head -n1))" + return + fi + + if ! command -v jq &>/dev/null; then + $STD apt-get update + $STD apt-get install -y jq + fi + + # Auto-detect latest stable version if none specified + if [[ "$VERSION" == "latest" || -z "$VERSION" ]]; then + msg_info "Resolving latest FFmpeg tag" + VERSION=$(curl -fsSL "https://api.github.com/repos/${GITHUB_REPO}/tags" | + jq -r '.[].name' | + grep -E '^n[0-9]+\.[0-9]+\.[0-9]+$' | + sort -V | tail -n1) + fi + + if [[ -z "$VERSION" ]]; then + msg_error "Could not determine FFmpeg version" + rm -rf "$TMP_DIR" + return 1 + fi + + msg_info "Installing FFmpeg ${VERSION} ($TYPE)" + + # Dependency selection + local DEPS=(build-essential yasm nasm pkg-config) + case "$TYPE" in + minimal) + DEPS+=(libx264-dev libvpx-dev libmp3lame-dev) + ;; + medium) + DEPS+=(libx264-dev libvpx-dev libmp3lame-dev libfreetype6-dev libass-dev libopus-dev libvorbis-dev) + ;; + full) + DEPS+=( + libx264-dev libx265-dev libvpx-dev libmp3lame-dev + libfreetype6-dev libass-dev libopus-dev libvorbis-dev + libdav1d-dev libsvtav1-dev zlib1g-dev libnuma-dev + libva-dev libdrm-dev + ) + ;; + *) + msg_error "Invalid FFMPEG_TYPE: $TYPE" + rm -rf "$TMP_DIR" + return 1 + ;; + esac + + $STD apt-get update + $STD apt-get install -y "${DEPS[@]}" + + curl -fsSL "https://github.com/${GITHUB_REPO}/archive/refs/tags/${VERSION}.tar.gz" -o "$TMP_DIR/ffmpeg.tar.gz" + tar -xzf "$TMP_DIR/ffmpeg.tar.gz" -C "$TMP_DIR" + cd "$TMP_DIR/FFmpeg-"* || { + msg_error "Source extraction failed" + rm -rf "$TMP_DIR" + return 1 + } + + local args=( + --enable-gpl + --enable-shared + --enable-nonfree + --disable-static + --enable-libx264 + --enable-libvpx + --enable-libmp3lame + ) + + if [[ "$TYPE" != "minimal" ]]; then + args+=(--enable-libfreetype --enable-libass --enable-libopus --enable-libvorbis) + fi + + if [[ "$TYPE" == "full" ]]; then + args+=(--enable-libx265 --enable-libdav1d --enable-zlib) + args+=(--enable-vaapi --enable-libdrm) + fi + + if [[ ${#args[@]} -eq 0 ]]; then + msg_error "FFmpeg configure args array is empty – aborting." + rm -rf "$TMP_DIR" + return 1 + fi + + ./configure "${args[@]}" >"$TMP_DIR/configure.log" 2>&1 || { + msg_error "FFmpeg ./configure failed (see $TMP_DIR/configure.log)" + cat "$TMP_DIR/configure.log" | tail -n 20 + rm -rf "$TMP_DIR" + return 1 + } + + $STD make -j"$(nproc)" + $STD make install + echo "/usr/local/lib" >/etc/ld.so.conf.d/ffmpeg.conf + ldconfig + + ldconfig -p | grep libavdevice >/dev/null || { + msg_error "libavdevice not registered with dynamic linker" + return 1 + } + + if ! command -v ffmpeg &>/dev/null; then + msg_error "FFmpeg installation failed" + rm -rf "$TMP_DIR" + return 1 + fi + + local FINAL_VERSION + FINAL_VERSION=$(ffmpeg -version | head -n1 | awk '{print $3}') + rm -rf "$TMP_DIR" + ensure_usr_local_bin_persist + msg_ok "Setup FFmpeg $FINAL_VERSION" +} + +# ------------------------------------------------------------------------------ +# Checks for new GitHub release (latest tag). +# +# Description: +# - Queries the GitHub API for the latest release tag +# - Compares it to a local cached version (~/.) +# - If newer, sets global CHECK_UPDATE_RELEASE and returns 0 +# +# Usage: +# if check_for_gh_release "flaresolverr" "FlareSolverr/FlareSolverr" [optional] "v1.1.1"; then +# # trigger update... +# fi +# exit 0 +# } (end of update_script not from the function) +# +# Notes: +# - Requires `jq` (auto-installed if missing) +# - Does not modify anything, only checks version state +# - Does not support pre-releases +# ------------------------------------------------------------------------------ +check_for_gh_release() { + local app="$1" + local source="$2" + local pinned_version_in="${3:-}" # optional + local app_lc="${app,,}" + local current_file="$HOME/.${app_lc}" + + msg_info "Checking for update: ${app}" + + # DNS check + if ! getent hosts api.github.com >/dev/null 2>&1; then + msg_error "Network error: cannot resolve api.github.com" + return 1 + fi + + # jq check + if ! command -v jq &>/dev/null; then + $STD apt-get update -qq + $STD apt-get install -y jq || { + msg_error "Failed to install jq" + return 1 + } + fi + + # Fetch releases and exclude drafts/prereleases + local releases_json + releases_json=$(curl -fsSL --max-time 20 \ + -H 'Accept: application/vnd.github+json' \ + -H 'X-GitHub-Api-Version: 2022-11-28' \ + "https://api.github.com/repos/${source}/releases") || { + msg_error "Unable to fetch releases for ${app}" + return 1 + } + + mapfile -t raw_tags < <(jq -r '.[] | select(.draft==false and .prerelease==false) | .tag_name' <<<"$releases_json") + if ((${#raw_tags[@]} == 0)); then + msg_error "No stable releases found for ${app}" + return 1 + fi + + local clean_tags=() + for t in "${raw_tags[@]}"; do + clean_tags+=("${t#v}") + done + + local latest_raw="${raw_tags[0]}" + local latest_clean="${clean_tags[0]}" + + # current installed (stored without v) + local current="" + if [[ -f "$current_file" ]]; then + current="$(<"$current_file")" + else + # Migration: search for any /opt/*_version.txt + local legacy_files + mapfile -t legacy_files < <(find /opt -maxdepth 1 -type f -name "*_version.txt" 2>/dev/null) + if ((${#legacy_files[@]} == 1)); then + current="$(<"${legacy_files[0]}")" + echo "${current#v}" >"$current_file" + rm -f "${legacy_files[0]}" + fi + fi + current="${current#v}" + + # Pinned version handling + if [[ -n "$pinned_version_in" ]]; then + local pin_clean="${pinned_version_in#v}" + local match_raw="" + for i in "${!clean_tags[@]}"; do + if [[ "${clean_tags[$i]}" == "$pin_clean" ]]; then + match_raw="${raw_tags[$i]}" + break + fi + done + + if [[ -z "$match_raw" ]]; then + msg_error "Pinned version ${pinned_version_in} not found upstream" + return 1 + fi + + if [[ "$current" != "$pin_clean" ]]; then + msg_info "${app} pinned to ${pinned_version_in} (installed ${current:-none}) → update required" + CHECK_UPDATE_RELEASE="$match_raw" + return 0 + fi + + if [[ "$pin_clean" == "$latest_clean" ]]; then + msg_ok "${app} pinned to ${pinned_version_in} (up to date)" + else + msg_ok "${app} pinned to ${pinned_version_in} (already installed, upstream ${latest_raw})" + fi + return 1 + fi + + # No pinning → use latest + if [[ -z "$current" || "$current" != "$latest_clean" ]]; then + CHECK_UPDATE_RELEASE="$latest_raw" + msg_info "New release available: ${latest_raw} (current: v${current:-none})" + return 0 + fi + + msg_ok "${app} is up to date (${latest_raw})" + return 1 +} \ No newline at end of file diff --git a/scripts/ct/2fauth.sh b/scripts/ct/2fauth.sh deleted file mode 100644 index ea78683..0000000 --- a/scripts/ct/2fauth.sh +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env bash -SCRIPT_DIR="$(dirname "$0")" -source "$SCRIPT_DIR/../core/build.func" -# Copyright (c) 2021-2025 community-scripts ORG -# Author: jkrgr0 -# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE -# Source: https://docs.2fauth.app/ - -APP="2FAuth" -var_tags="${var_tags:-2fa;authenticator}" -var_cpu="${var_cpu:-1}" -var_ram="${var_ram:-512}" -var_disk="${var_disk:-2}" -var_os="${var_os:-debian}" -var_version="${var_version:-12}" -var_unprivileged="${var_unprivileged:-1}" - -header_info "$APP" -variables -color -catch_errors - -function update_script() { - header_info - check_container_storage - check_container_resources - - if [[ ! -d "/opt/2fauth" ]]; then - msg_error "No ${APP} Installation Found!" - exit - fi - if check_for_gh_release "2fauth" "Bubka/2FAuth"; then - $STD apt-get update - $STD apt-get -y upgrade - - msg_info "Creating Backup" - mv "/opt/2fauth" "/opt/2fauth-backup" - if ! dpkg -l | grep -q 'php8.3'; then - cp /etc/nginx/conf.d/2fauth.conf /etc/nginx/conf.d/2fauth.conf.bak - fi - msg_ok "Backup Created" - - if ! dpkg -l | grep -q 'php8.3'; then - $STD apt-get install -y \ - lsb-release \ - gnupg2 - PHP_VERSION="8.3" PHP_MODULE="common,ctype,fileinfo,mysql,cli" PHP_FPM="YES" setup_php - sed -i 's/php8.2/php8.3/g' /etc/nginx/conf.d/2fauth.conf - fi - fetch_and_deploy_gh_release "2fauth" "Bubka/2FAuth" - setup_composer - mv "/opt/2fauth-backup/.env" "/opt/2fauth/.env" - mv "/opt/2fauth-backup/storage" "/opt/2fauth/storage" - cd "/opt/2fauth" || return - chown -R www-data: "/opt/2fauth" - chmod -R 755 "/opt/2fauth" - export COMPOSER_ALLOW_SUPERUSER=1 - $STD composer install --no-dev --prefer-source - php artisan 2fauth:install - $STD systemctl restart nginx - - msg_info "Cleaning Up" - if dpkg -l | grep -q 'php8.2'; then - $STD apt-get remove --purge -y php8.2* - fi - $STD apt-get -y autoremove - $STD apt-get -y autoclean - msg_ok "Cleanup Completed" - msg_ok "Updated Successfully" - fi - exit -} - -start -build_container -description - -msg_ok "Completed Successfully!\n" -echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}" -echo -e "${INFO}${YW} Access it using the following URL:${CL}" -echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:80${CL}" diff --git a/scripts/ct/actualbudget.sh b/scripts/ct/actualbudget.sh deleted file mode 100644 index 1c24cdb..0000000 --- a/scripts/ct/actualbudget.sh +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env bash -SCRIPT_DIR="$(dirname "$0")" -source "$SCRIPT_DIR/../core/build.func" -# Copyright (c) 2021-2025 community-scripts ORG -# Author: MickLesk (CanbiZ) -# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE -# Source: https://actualbudget.org/ - -APP="Actual Budget" -var_tags="${var_tags:-finance}" -var_cpu="${var_cpu:-2}" -var_ram="${var_ram:-2048}" -var_disk="${var_disk:-4}" -var_os="${var_os:-debian}" -var_version="${var_version:-12}" -var_unprivileged="${var_unprivileged:-1}" - -header_info "$APP" -variables -color -catch_errors - -function update_script() { - header_info - check_container_storage - check_container_resources - - if [[ ! -f /opt/actualbudget_version.txt ]]; then - msg_error "No ${APP} Installation Found!" - exit - fi - NODE_VERSION="22" - setup_nodejs - RELEASE=$(curl -fsSL https://api.github.com/repos/actualbudget/actual/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }') - if [[ -f /opt/actualbudget-data/config.json ]]; then - if [[ ! -f /opt/actualbudget_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/actualbudget_version.txt)" ]]; then - msg_info "Stopping ${APP}" - systemctl stop actualbudget - msg_ok "${APP} Stopped" - - msg_info "Updating ${APP} to ${RELEASE}" - $STD npm update -g @actual-app/sync-server - echo "${RELEASE}" >/opt/actualbudget_version.txt - msg_ok "Updated ${APP} to ${RELEASE}" - - msg_info "Starting ${APP}" - systemctl start actualbudget - msg_ok "Restarted ${APP}" - else - msg_info "${APP} is already up to date" - fi - else - msg_info "Old Installation Found, you need to migrate your data and recreate to a new container" - msg_info "Please follow the instructions on the ${APP} website to migrate your data" - msg_info "https://actualbudget.org/docs/backup-restore/backup" - exit 1 - fi - exit -} - -start -build_container -description - -msg_ok "Completed Successfully!\n" -echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}" -echo -e "${INFO}${YW} Access it using the following URL:${CL}" -echo -e "${TAB}${GATEWAY}${BGN}https://${IP}:5006${CL}" diff --git a/scripts/ct/adguard.sh b/scripts/ct/adguard.sh deleted file mode 100644 index af1ec23..0000000 --- a/scripts/ct/adguard.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env bash -SCRIPT_DIR="$(dirname "$0")" -source "$SCRIPT_DIR/../core/build.func" -# Copyright (c) 2021-2025 tteck -# Author: tteck (tteckster) -# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE -# Source: https://adguard.com/ - -APP="Adguard" -var_tags="${var_tags:-adblock}" -var_cpu="${var_cpu:-1}" -var_ram="${var_ram:-512}" -var_disk="${var_disk:-2}" -var_os="${var_os:-debian}" -var_version="${var_version:-12}" -var_unprivileged="${var_unprivileged:-1}" - -header_info "$APP" -variables -color -catch_errors - -function update_script() { - header_info - check_container_storage - check_container_resources - if [[ ! -d /opt/AdGuardHome ]]; then - msg_error "No ${APP} Installation Found!" - exit - fi - msg_error "Adguard Home should be updated via the user interface." - exit -} - -start -build_container -description - -msg_ok "Completed Successfully!\n" -echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}" -echo -e "${INFO}${YW} Access it using the following URL:${CL}" -echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}" \ No newline at end of file diff --git a/scripts/ct/alpine-adguard.sh b/scripts/ct/alpine-adguard.sh deleted file mode 100644 index a9837bd..0000000 --- a/scripts/ct/alpine-adguard.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env bash -SCRIPT_DIR="$(dirname "$0")" -source "$SCRIPT_DIR/../core/build.func" -# Copyright (c) 2021-2025 community-scripts ORG -# Author: MickLesk (CanbiZ) -# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE -# Source: https://adguardhome.com/ - -APP="Alpine-AdGuard" -var_tags="${var_tags:-alpine;adblock}" -var_cpu="${var_cpu:-1}" -var_ram="${var_ram:-256}" -var_disk="${var_disk:-1}" -var_os="${var_os:-alpine}" -var_version="${var_version:-3.22}" -var_unprivileged="${var_unprivileged:-1}" - -header_info "$APP" -variables -color -catch_errors - -function update_script() { - header_info - msg_info "Updating Alpine Packages" - $STD apk -U upgrade - msg_ok "Updated Alpine Packages" - - msg_info "Updating AdGuard Home" - $STD /opt/AdGuardHome/AdGuardHome --update - msg_ok "Updated AdGuard Home" - - msg_info "Restarting AdGuard Home" - $STD rc-service adguardhome restart - msg_ok "Restarted AdGuard Home" - - exit 0 -} - -start -build_container -description - -msg_ok "Completed Successfully!\n" -echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}" -echo -e "${INFO}${YW} Access it using the following URL:${CL}" -echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}" diff --git a/scripts/ct/debian.sh b/scripts/ct/debian.sh old mode 100755 new mode 100644 index 3da7ac3..57c4b3d --- a/scripts/ct/debian.sh +++ b/scripts/ct/debian.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -SCRIPT_DIR="$(dirname "$0")" +SCRIPT_DIR="$(dirname "$0")" source "$SCRIPT_DIR/../core/build.func" # Copyright (c) 2021-2025 tteck # Author: tteck (tteckster) @@ -37,7 +37,7 @@ function update_script() { start build_container -description + msg_ok "Completed Successfully!\n" -echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}" \ No newline at end of file +echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}" diff --git a/scripts/install/2fauth-install.sh b/scripts/install/2fauth-install.sh deleted file mode 100644 index 8b190d7..0000000 --- a/scripts/install/2fauth-install.sh +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env bash - -# Copyright (c) 2021-2025 community-scripts ORG -# Author: jkrgr0 -# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE -# Source: https://docs.2fauth.app/ - -source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" -color -verb_ip6 -catch_errors -setting_up_container -network_check -update_os - -msg_info "Installing Dependencies" -$STD apt-get install -y \ - lsb-release \ - nginx -msg_ok "Installed Dependencies" - -PHP_VERSION="8.3" PHP_MODULE="common,ctype,fileinfo,mysql,cli" PHP_FPM="YES" setup_php -setup_composer -setup_mariadb - -msg_info "Setting up Database" -DB_NAME=2fauth_db -DB_USER=2fauth -DB_PASS=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | head -c13) -$STD mariadb -u root -e "CREATE DATABASE $DB_NAME;" -$STD mariadb -u root -e "CREATE USER '$DB_USER'@'localhost' IDENTIFIED BY '$DB_PASS';" -$STD mariadb -u root -e "GRANT ALL ON $DB_NAME.* TO '$DB_USER'@'localhost'; FLUSH PRIVILEGES;" -{ - echo "2FAuth Credentials" - echo "Database User: $DB_USER" - echo "Database Password: $DB_PASS" - echo "Database Name: $DB_NAME" -} >>~/2FAuth.creds -msg_ok "Set up Database" - -fetch_and_deploy_gh_release "2fauth" "Bubka/2FAuth" - -msg_info "Setup 2FAuth" -cd /opt/2fauth -cp .env.example .env -IPADDRESS=$(hostname -I | awk '{print $1}') -sed -i -e "s|^APP_URL=.*|APP_URL=http://$IPADDRESS|" \ - -e "s|^DB_CONNECTION=$|DB_CONNECTION=mysql|" \ - -e "s|^DB_DATABASE=$|DB_DATABASE=$DB_NAME|" \ - -e "s|^DB_HOST=$|DB_HOST=127.0.0.1|" \ - -e "s|^DB_PORT=$|DB_PORT=3306|" \ - -e "s|^DB_USERNAME=$|DB_USERNAME=$DB_USER|" \ - -e "s|^DB_PASSWORD=$|DB_PASSWORD=$DB_PASS|" .env -export COMPOSER_ALLOW_SUPERUSER=1 -$STD composer update --no-plugins --no-scripts -$STD composer install --no-dev --prefer-source --no-plugins --no-scripts -$STD php artisan key:generate --force -$STD php artisan migrate:refresh -$STD php artisan passport:install -q -n -$STD php artisan storage:link -$STD php artisan config:cache -chown -R www-data: /opt/2fauth -chmod -R 755 /opt/2fauth -msg_ok "Setup 2fauth" - -msg_info "Configure Service" -cat </etc/nginx/conf.d/2fauth.conf -server { - listen 80; - root /opt/2fauth/public; - server_name $IPADDRESS; - index index.php; - charset utf-8; - - location / { - try_files \$uri \$uri/ /index.php?\$query_string; - } - - location = /favicon.ico { access_log off; log_not_found off; } - location = /robots.txt { access_log off; log_not_found off; } - - error_page 404 /index.php; - - location ~ \.php\$ { - fastcgi_pass unix:/var/run/php/php8.3-fpm.sock; - fastcgi_param SCRIPT_FILENAME \$realpath_root\$fastcgi_script_name; - include fastcgi_params; - } - - location ~ /\.(?!well-known).* { - deny all; - } -} -EOF -systemctl reload nginx -msg_ok "Configured Service" - -motd_ssh -customize - -msg_info "Cleaning up" -$STD apt-get -y autoremove -$STD apt-get -y autoclean -msg_ok "Cleaned" diff --git a/scripts/install/actualbudget-install.sh b/scripts/install/actualbudget-install.sh deleted file mode 100644 index b9d1d8f..0000000 --- a/scripts/install/actualbudget-install.sh +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env bash - -# Copyright (c) 2021-2025 community-scripts ORG -# Author: MickLesk (CanbiZ) -# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE -# Source: https://actualbudget.org/ - -source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" -color -verb_ip6 -catch_errors -setting_up_container -network_check -update_os - -msg_info "Installing Dependencies" -$STD apt-get install -y \ - make \ - g++ -msg_ok "Installed Dependencies" - -msg_info "Installing Actual Budget" -cd /opt -RELEASE=$(curl -fsSL https://api.github.com/repos/actualbudget/actual/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }') -NODE_VERSION="22" -setup_nodejs -mkdir -p /opt/actualbudget-data/{server-files,upload,migrate,user-files,migrations,config} -chown -R root:root /opt/actualbudget-data -chmod -R 755 /opt/actualbudget-data - -cat </opt/actualbudget-data/config.json -{ - "port": 5006, - "hostname": "::", - "serverFiles": "/opt/actualbudget-data/server-files", - "userFiles": "/opt/actualbudget-data/user-files", - "trustedProxies": [ - "10.0.0.0/8", - "172.16.0.0/12", - "192.168.0.0/16", - "127.0.0.0/8", - "::1/128", - "fc00::/7" - ], - "https": { - "key": "/opt/actualbudget/selfhost.key", - "cert": "/opt/actualbudget/selfhost.crt" - } -} -EOF - -mkdir -p /opt/actualbudget -cd /opt/actualbudget -$STD npm install --location=global @actual-app/sync-server -$STD openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout selfhost.key -out selfhost.crt <"/opt/actualbudget_version.txt" -msg_ok "Installed Actual Budget" - -msg_info "Creating Service" -cat </etc/systemd/system/actualbudget.service -[Unit] -Description=Actual Budget Service -After=network.target - -[Service] -Type=simple -User=root -Group=root -WorkingDirectory=/opt/actualbudget -Environment=ACTUAL_UPLOAD_FILE_SIZE_LIMIT_MB=20 -Environment=ACTUAL_UPLOAD_SYNC_ENCRYPTED_FILE_SYNC_SIZE_LIMIT_MB=50 -Environment=ACTUAL_UPLOAD_FILE_SYNC_SIZE_LIMIT_MB=20 -ExecStart=/usr/bin/actual-server --config /opt/actualbudget-data/config.json -Restart=always -RestartSec=10 - -[Install] -WantedBy=multi-user.target -EOF -systemctl enable -q --now actualbudget -msg_ok "Created Service" - -motd_ssh -customize - -msg_info "Cleaning up" -$STD apt-get -y autoremove -$STD apt-get -y autoclean -msg_ok "Cleaned" diff --git a/scripts/install/adguard-install.sh b/scripts/install/adguard-install.sh deleted file mode 100644 index 9d8e3b7..0000000 --- a/scripts/install/adguard-install.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env bash - -# Copyright (c) 2021-2025 tteck -# Author: tteck (tteckster) -# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE -# Source: https://adguard.com/ - -source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" -color -verb_ip6 -catch_errors -setting_up_container -network_check -update_os - -msg_info "Installing AdGuard Home" -$STD tar zxvf <(curl -fsSL https://static.adtidy.org/adguardhome/release/AdGuardHome_linux_amd64.tar.gz) -C /opt -msg_ok "Installed AdGuard Home" - -msg_info "Creating Service" -cat </etc/systemd/system/AdGuardHome.service -[Unit] -Description=AdGuard Home: Network-level blocker -ConditionFileIsExecutable=/opt/AdGuardHome/AdGuardHome -After=syslog.target network-online.target - -[Service] -StartLimitInterval=5 -StartLimitBurst=10 -ExecStart=/opt/AdGuardHome/AdGuardHome "-s" "run" -WorkingDirectory=/opt/AdGuardHome -StandardOutput=file:/var/log/AdGuardHome.out -StandardError=file:/var/log/AdGuardHome.err -Restart=always -RestartSec=10 -EnvironmentFile=-/etc/sysconfig/AdGuardHome - -[Install] -WantedBy=multi-user.target -EOF -systemctl enable -q --now AdGuardHome -msg_ok "Created Service" - -motd_ssh -customize - -msg_info "Cleaning up" -$STD apt-get -y autoremove -$STD apt-get -y autoclean -msg_ok "Cleaned" diff --git a/scripts/install/debian-install.sh b/scripts/install/debian-install.sh index 389ef7a..e68ea94 100755 --- a/scripts/install/debian-install.sh +++ b/scripts/install/debian-install.sh @@ -19,4 +19,4 @@ customize msg_info "Cleaning up" $STD apt-get -y autoremove $STD apt-get -y autoclean -msg_ok "Cleaned" \ No newline at end of file +msg_ok "Cleaned" diff --git a/server.js b/server.js index 5869cc1..4d4c744 100644 --- a/server.js +++ b/server.js @@ -66,15 +66,12 @@ class ScriptExecutionHandler { async handleMessage(ws, message) { const { action, scriptPath, executionId, input } = message; - console.log('Handling message:', { action, scriptPath, executionId }); switch (action) { case 'start': if (scriptPath && executionId) { - console.log('Starting script execution for:', scriptPath); await this.startScriptExecution(ws, scriptPath, executionId); } else { - console.log('Missing scriptPath or executionId'); this.sendMessage(ws, { type: 'error', data: 'Missing scriptPath or executionId', diff --git a/src/app/_components/DiffViewer.tsx b/src/app/_components/DiffViewer.tsx new file mode 100644 index 0000000..a169241 --- /dev/null +++ b/src/app/_components/DiffViewer.tsx @@ -0,0 +1,152 @@ +'use client'; + +import { useState } from 'react'; +import { api } from '~/trpc/react'; + +interface DiffViewerProps { + scriptSlug: string; + filePath: string; + isOpen: boolean; + onClose: () => void; +} + +export function DiffViewer({ scriptSlug, filePath, isOpen, onClose }: DiffViewerProps) { + const [isLoading, setIsLoading] = useState(false); + + // Get diff content + const { data: diffData, refetch } = api.scripts.getScriptDiff.useQuery( + { slug: scriptSlug, filePath }, + { enabled: isOpen && !!scriptSlug && !!filePath } + ); + + const handleBackdropClick = (e: React.MouseEvent) => { + if (e.target === e.currentTarget) { + onClose(); + } + }; + + const handleRefresh = async () => { + setIsLoading(true); + await refetch(); + setIsLoading(false); + }; + + if (!isOpen) return null; + + const renderDiffLine = (line: string, index: number) => { + const lineNumber = line.match(/^([+-]?\d+):/)?.[1]; + const content = line.replace(/^[+-]?\d+:\s*/, ''); + const isAdded = line.startsWith('+'); + const isRemoved = line.startsWith('-'); + const isContext = line.startsWith(' '); + + return ( +
+
+ {lineNumber} +
+
+ + {isAdded ? '+' : isRemoved ? '-' : ' '} + + {content} +
+
+ ); + }; + + return ( +
+
+ {/* Header */} +
+
+

Script Diff

+

{filePath}

+
+
+ + +
+
+ + {/* Legend */} +
+
+
+
+ Added (Remote) +
+
+
+ Removed (Local) +
+
+
+ Unchanged +
+
+
+ + {/* Diff Content */} +
+ {diffData?.success ? ( + diffData.diff ? ( +
+ {diffData.diff.split('\n').map((line, index) => + line.trim() ? renderDiffLine(line, index) : null + )} +
+ ) : ( +
+ + + +

No differences found

+

The local and remote files are identical

+
+ ) + ) : diffData?.error ? ( +
+ + + +

Error loading diff

+

{diffData.error}

+
+ ) : ( +
+
+

Loading diff...

+
+ )} +
+
+
+ ); +} diff --git a/src/app/_components/RepoStatus.tsx b/src/app/_components/RepoStatus.tsx deleted file mode 100644 index ec02cea..0000000 --- a/src/app/_components/RepoStatus.tsx +++ /dev/null @@ -1,117 +0,0 @@ -'use client'; - -import { api } from '~/trpc/react'; -import { useState } from 'react'; - -export function RepoStatus() { - const [isUpdating, setIsUpdating] = useState(false); - - const { data: repoStatus, isLoading: statusLoading, refetch: refetchStatus } = api.scripts.getRepoStatus.useQuery(); - const updateRepoMutation = api.scripts.updateRepo.useMutation({ - onSuccess: () => { - setIsUpdating(false); - refetchStatus(); - }, - onError: () => { - setIsUpdating(false); - } - }); - - const handleUpdate = async () => { - setIsUpdating(true); - updateRepoMutation.mutate(); - }; - - if (statusLoading) { - return ( -
-
-
Loading repository status...
-
-
- ); - } - - if (!repoStatus) { - return ( -
-
Failed to load repository status
-
- ); - } - - const getStatusColor = () => { - if (!repoStatus.isRepo) return 'text-gray-500'; - if (repoStatus.isBehind) return 'text-yellow-600'; - return 'text-green-600'; - }; - - const getStatusText = () => { - if (!repoStatus.isRepo) return 'No repository found'; - if (repoStatus.isBehind) return 'Behind remote'; - return 'Up to date'; - }; - - const getStatusIcon = () => { - if (!repoStatus.isRepo) return '❓'; - if (repoStatus.isBehind) return '⚠️'; - return '✅'; - }; - - return ( -
-
-
- {getStatusIcon()} -
-

Repository Status

-
- {getStatusText()} -
- {repoStatus.isRepo && ( -
-

Branch: {repoStatus.branch || 'Unknown'}

- {repoStatus.lastCommit && ( -

Last commit: {repoStatus.lastCommit.substring(0, 8)}

- )} -
- )} -
-
- -
- - - -
-
- - {updateRepoMutation.isSuccess && ( -
- ✅ {updateRepoMutation.data?.message} -
- )} - - {updateRepoMutation.isError && ( -
- ❌ {updateRepoMutation.error?.message || 'Failed to update repository'} -
- )} -
- ); -} diff --git a/src/app/_components/ScriptCard.tsx b/src/app/_components/ScriptCard.tsx index d87b265..b50b683 100644 --- a/src/app/_components/ScriptCard.tsx +++ b/src/app/_components/ScriptCard.tsx @@ -17,10 +17,10 @@ export function ScriptCard({ script, onClick }: ScriptCardProps) { return (
onClick(script)} > -
+
{/* Header with logo and name */}
@@ -43,31 +43,48 @@ export function ScriptCard({ script, onClick }: ScriptCardProps) {

{script.name || 'Unnamed Script'}

-
- - {script.type?.toUpperCase() || 'UNKNOWN'} - - {script.updateable && ( - - Updateable +
+ {/* Type and Updateable status on first row */} +
+ + {script.type?.toUpperCase() || 'UNKNOWN'} - )} + {script.updateable && ( + + Updateable + + )} +
+ + {/* Download Status */} +
+
+ + {script.isDownloaded ? 'Downloaded' : 'Not Downloaded'} + +
{/* Description */} -

+

{script.description || 'No description available'}

{/* Footer with website link */} {script.website && ( -
+
(null); + const [diffViewerOpen, setDiffViewerOpen] = useState(false); + const [selectedDiffFile, setSelectedDiffFile] = useState(null); + const [textViewerOpen, setTextViewerOpen] = useState(false); // Check if script files exist locally const { data: scriptFilesData, refetch: refetchScriptFiles } = api.scripts.checkScriptFiles.useQuery( @@ -22,6 +27,12 @@ export function ScriptDetailModal({ script, isOpen, onClose, onInstallScript }: { enabled: !!script && isOpen } ); + // Compare local and remote script content + const { data: comparisonData, refetch: refetchComparison } = api.scripts.compareScriptContent.useQuery( + { slug: script?.slug ?? '' }, + { enabled: !!script && isOpen && scriptFilesData?.success && (scriptFilesData.ctExists || scriptFilesData.installExists) } + ); + // Load script mutation const loadScriptMutation = api.scripts.loadScript.useMutation({ onSuccess: (data) => { @@ -29,8 +40,9 @@ export function ScriptDetailModal({ script, isOpen, onClose, onInstallScript }: if (data.success) { const message = 'message' in data ? data.message : 'Script loaded successfully'; setLoadMessage(`✅ ${message}`); - // Refetch script files status to update the UI + // Refetch script files status and comparison data to update the UI refetchScriptFiles(); + refetchComparison(); } else { const error = 'error' in data ? data.error : 'Failed to load script'; setLoadMessage(`❌ ${error}`); @@ -78,6 +90,15 @@ export function ScriptDetailModal({ script, isOpen, onClose, onInstallScript }: } }; + const handleShowDiff = (filePath: string) => { + setSelectedDiffFile(filePath); + setDiffViewerOpen(true); + }; + + const handleViewScript = () => { + setTextViewerOpen(true); + }; + return (
Install )} + + {/* View Button - only show if script files exist */} + {scriptFilesData?.success && (scriptFilesData.ctExists || scriptFilesData.installExists) && ( + + )} - {/* Load Script Button */} - + {/* Load/Update Script Button */} + {(() => { + const hasLocalFiles = scriptFilesData?.success && (scriptFilesData.ctExists || scriptFilesData.installExists); + const hasDifferences = comparisonData?.success && comparisonData.hasDifferences; + const isUpToDate = hasLocalFiles && !hasDifferences; + + if (!hasLocalFiles) { + // No local files - show Load Script button + return ( + + ); + } else if (isUpToDate) { + // Local files exist and are up to date - show disabled Update button + return ( + + ); + } else { + // Local files exist but have differences - show Update button + return ( + + ); + } + })()}
Install Script: {scriptFilesData.installExists ? 'Available' : 'Not loaded'}
+ {scriptFilesData?.success && (scriptFilesData.ctExists || scriptFilesData.installExists) && comparisonData?.success && ( +
+
+ Status: {comparisonData.hasDifferences ? 'Update available' : 'Up to date'} +
+ )}
{scriptFilesData.files.length > 0 && (
Files: {scriptFilesData.files.join(', ')}
)} + {scriptFilesData?.success && (scriptFilesData.ctExists || scriptFilesData.installExists) && + comparisonData?.success && comparisonData.hasDifferences && comparisonData.differences.length > 0 && ( +
+
+ Differences in: {comparisonData.differences.join(', ')} +
+
+ {comparisonData.differences.map((filePath, index) => ( + + ))} +
+
+ )}
)} @@ -372,6 +486,28 @@ export function ScriptDetailModal({ script, isOpen, onClose, onInstallScript }: )}
+ + {/* Diff Viewer Modal */} + {selectedDiffFile && ( + { + setDiffViewerOpen(false); + setSelectedDiffFile(null); + }} + /> + )} + + {/* Text Viewer Modal */} + {script && ( + method.script?.startsWith('ct/'))?.script?.split('/').pop() || `${script.slug}.sh`} + isOpen={textViewerOpen} + onClose={() => setTextViewerOpen(false)} + /> + )}
); } diff --git a/src/app/_components/ScriptsGrid.tsx b/src/app/_components/ScriptsGrid.tsx index 1e0fe63..1439488 100644 --- a/src/app/_components/ScriptsGrid.tsx +++ b/src/app/_components/ScriptsGrid.tsx @@ -1,10 +1,10 @@ 'use client'; -import { useState } from 'react'; +import React, { useState } from 'react'; import { api } from '~/trpc/react'; import { ScriptCard } from './ScriptCard'; import { ScriptDetailModal } from './ScriptDetailModal'; -import type { ScriptCard as ScriptCardType, Script } from '~/types/script'; + interface ScriptsGridProps { onInstallScript?: (scriptPath: string, scriptName: string) => void; @@ -13,22 +13,86 @@ interface ScriptsGridProps { export function ScriptsGrid({ onInstallScript }: ScriptsGridProps) { const [selectedSlug, setSelectedSlug] = useState(null); const [isModalOpen, setIsModalOpen] = useState(false); + const [searchQuery, setSearchQuery] = useState(''); - const { data: scriptCardsData, isLoading, error, refetch } = api.scripts.getScriptCards.useQuery(); + const { data: scriptCardsData, isLoading: githubLoading, error: githubError, refetch } = api.scripts.getScriptCards.useQuery(); + const { data: localScriptsData, isLoading: localLoading, error: localError } = api.scripts.getCtScripts.useQuery(); const { data: scriptData } = api.scripts.getScriptBySlug.useQuery( { slug: selectedSlug ?? '' }, { enabled: !!selectedSlug } ); - // Debug logging - console.log('ScriptsGrid render:', { - isLoading, - error: error?.message, - scriptCardsData: scriptCardsData?.success, - cardsCount: scriptCardsData?.cards?.length - }); + // Get GitHub scripts with download status + const combinedScripts = React.useMemo(() => { + const githubScripts = scriptCardsData?.success ? scriptCardsData.cards + .filter(script => script && script.name) // Filter out invalid scripts + .map(script => ({ + ...script, + source: 'github' as const, + isDownloaded: false, // Will be updated by status check + isUpToDate: false, // Will be updated by status check + })) : []; - const handleCardClick = (scriptCard: ScriptCardType) => { + return githubScripts; + }, [scriptCardsData]); + + + // Update scripts with download status + const scriptsWithStatus = React.useMemo(() => { + return combinedScripts.map(script => { + if (!script || !script.name) { + return script; // Return as-is if invalid + } + + // Check if there's a corresponding local script + const hasLocalVersion = localScriptsData?.scripts?.some(local => { + if (!local || !local.name) return false; + const localName = local.name.replace(/\.sh$/, ''); + return localName.toLowerCase() === script.name.toLowerCase() || + localName.toLowerCase() === (script.slug || '').toLowerCase(); + }) ?? false; + + return { + ...script, + isDownloaded: hasLocalVersion, + // Removed isUpToDate - only show in modal for detailed comparison + }; + }); + }, [combinedScripts, localScriptsData]); + + // Filter scripts based on search query (name and slug only) + const filteredScripts = React.useMemo(() => { + if (!searchQuery || !searchQuery.trim()) { + return scriptsWithStatus; + } + + const query = searchQuery.toLowerCase().trim(); + + // If query is too short, don't filter + if (query.length < 1) { + return scriptsWithStatus; + } + + const filtered = scriptsWithStatus.filter(script => { + // Ensure script exists and has required properties + if (!script || typeof script !== 'object') { + return false; + } + + const name = (script.name || '').toLowerCase(); + const slug = (script.slug || '').toLowerCase(); + + const matches = name.includes(query) || slug.includes(query); + + return matches; + }); + + return filtered; + }, [scriptsWithStatus, searchQuery]); + + + const handleCardClick = (scriptCard: any) => { + // All scripts are GitHub scripts, open modal setSelectedSlug(scriptCard.slug); setIsModalOpen(true); }; @@ -38,7 +102,7 @@ export function ScriptsGrid({ onInstallScript }: ScriptsGridProps) { setSelectedSlug(null); }; - if (isLoading) { + if (githubLoading || localLoading) { return (
@@ -47,7 +111,7 @@ export function ScriptsGrid({ onInstallScript }: ScriptsGridProps) { ); } - if (error || !scriptCardsData?.success) { + if (githubError || localError) { return (
@@ -56,12 +120,8 @@ export function ScriptsGrid({ onInstallScript }: ScriptsGridProps) {

Failed to load scripts

- {scriptCardsData?.error || 'Unknown error occurred'} + {githubError?.message || localError?.message || 'Unknown error occurred'}

-
-

No JSON files found in scripts/json directory.

-

Use the "Resync Scripts" button to download from GitHub.

-
+ )} +
+ {searchQuery && ( +
+ {filteredScripts.length === 0 ? ( + No scripts found matching "{searchQuery}" + ) : ( + Found {filteredScripts.length} script{filteredScripts.length !== 1 ? 's' : ''} matching "{searchQuery}" + )} +
+ )} +
+ + {/* Scripts Grid */} + {filteredScripts.length === 0 && searchQuery ? ( +
+
+ + + +

No matching scripts found

+

+ Try adjusting your search terms or clear the search to see all scripts. +

+ +
+
+ ) : ( +
+ {filteredScripts.map((script, index) => { // Add validation to ensure script has required properties if (!script || typeof script !== 'object') { - console.warn('Invalid script object at index', index, script); return null; } @@ -109,7 +223,8 @@ export function ScriptsGrid({ onInstallScript }: ScriptsGridProps) { /> ); })} -
+ + )} void; -} - -export function ScriptsList({ onRunScript }: ScriptsListProps) { - const { data, isLoading, error, refetch } = api.scripts.getCtScripts.useQuery(); - const [selectedScript, setSelectedScript] = useState(null); - const [viewerScript, setViewerScript] = useState(null); - const [isViewerOpen, setIsViewerOpen] = useState(false); - - if (isLoading) { - return ( -
-
Loading scripts...
-
- ); - } - - if (error) { - return ( -
-
- Error loading scripts: {error.message} -
-
- ); - } - - if (!data?.scripts || data.scripts.length === 0) { - return ( -
-
No scripts found in the scripts directory
-
- ); - } - - const formatFileSize = (bytes: number): string => { - if (bytes === 0) return '0 Bytes'; - const k = 1024; - const sizes = ['Bytes', 'KB', 'MB', 'GB']; - const i = Math.floor(Math.log(bytes) / Math.log(k)); - return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i]; - }; - - const formatDate = (date: Date): string => { - return new Date(date).toLocaleString(); - }; - - const getFileIcon = (extension: string): string => { - switch (extension) { - case '.sh': - case '.bash': - return '🐚'; - case '.py': - return '🐍'; - case '.js': - return '📜'; - case '.ts': - return '🔷'; - default: - return '📄'; - } - }; - - return ( -
-
-

Available Scripts

-

- Found {data.scripts.length} script(s) in {data.directoryInfo.path} -

-
-

Allowed extensions: {data.directoryInfo.allowedExtensions.join(', ')}

-

Max execution time: {Math.round(data.directoryInfo.maxExecutionTime / 1000)}s

-
-
- -
- {data.scripts.map((script) => ( -
-
-
- {script.logo ? ( - {`${script.name} { - // Fallback to file icon if logo fails to load - e.currentTarget.style.display = 'none'; - const nextElement = e.currentTarget.nextElementSibling as HTMLElement; - if (nextElement) { - nextElement.style.display = 'block'; - } - }} - /> - ) : null} - - {getFileIcon(script.extension)} - -
-

{script.name}

-
-

Size: {formatFileSize(script.size)}

-

Modified: {formatDate(script.lastModified)}

-

Extension: {script.extension}

-
-
-
- -
- - -
-
-
- ))} -
- - {selectedScript && ( -
- setSelectedScript(null)} - /> -
- )} - - { - setIsViewerOpen(false); - setViewerScript(null); - }} - /> -
- ); -} diff --git a/src/app/_components/Terminal.tsx b/src/app/_components/Terminal.tsx index 029e77d..ddd125d 100644 --- a/src/app/_components/Terminal.tsx +++ b/src/app/_components/Terminal.tsx @@ -112,13 +112,11 @@ export function Terminal({ scriptPath, onClose }: TerminalProps) { useEffect(() => { // Prevent multiple connections in React Strict Mode if (hasConnectedRef.current || isConnectingRef.current || (wsRef.current && wsRef.current.readyState === WebSocket.OPEN)) { - console.log('WebSocket already connected, connecting, or has connected, skipping...'); return; } // Close any existing connection first if (wsRef.current) { - console.log('Closing existing WebSocket connection'); wsRef.current.close(); wsRef.current = null; } @@ -132,14 +130,10 @@ export function Terminal({ scriptPath, onClose }: TerminalProps) { const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; const wsUrl = `${protocol}//${window.location.host}/ws/script-execution`; - console.log('Connecting to WebSocket:', wsUrl); const ws = new WebSocket(wsUrl); wsRef.current = ws; ws.onopen = () => { - console.log('WebSocket connected successfully'); - console.log('Script path:', scriptPath); - console.log('Execution ID:', executionId); setIsConnected(true); isConnectingRef.current = false; @@ -154,7 +148,6 @@ export function Terminal({ scriptPath, onClose }: TerminalProps) { ws.onmessage = (event) => { try { const message: TerminalMessage = JSON.parse(event.data); - console.log('Received message:', message); handleMessage(message); } catch (error) { console.error('Error parsing WebSocket message:', error); @@ -162,7 +155,6 @@ export function Terminal({ scriptPath, onClose }: TerminalProps) { }; ws.onclose = (event) => { - console.log('WebSocket disconnected:', event.code, event.reason); setIsConnected(false); setIsRunning(false); isConnectingRef.current = false; @@ -184,7 +176,6 @@ export function Terminal({ scriptPath, onClose }: TerminalProps) { isConnectingRef.current = false; hasConnectedRef.current = false; if (wsRef.current && (wsRef.current.readyState === WebSocket.OPEN || wsRef.current.readyState === WebSocket.CONNECTING)) { - console.log('Cleaning up WebSocket connection'); wsRef.current.close(); } }; diff --git a/src/app/page.tsx b/src/app/page.tsx index 4921ba9..3b22e21 100644 --- a/src/app/page.tsx +++ b/src/app/page.tsx @@ -2,15 +2,12 @@ 'use client'; import { useState } from 'react'; -import { ScriptsList } from './_components/ScriptsList'; import { ScriptsGrid } from './_components/ScriptsGrid'; -import { RepoStatus } from './_components/RepoStatus'; import { ResyncButton } from './_components/ResyncButton'; import { Terminal } from './_components/Terminal'; export default function Home() { const [runningScript, setRunningScript] = useState<{ path: string; name: string } | null>(null); - const [activeTab, setActiveTab] = useState<'local' | 'github'>('github'); const handleRunScript = (scriptPath: string, scriptName: string) => { setRunningScript({ path: scriptPath, name: scriptName }); @@ -26,39 +23,18 @@ export default function Home() { {/* Header */}

- 🚀 PVE Scripts Local Management + 🚀 PVE Scripts Management

Manage and execute Proxmox helper scripts locally with live output streaming

- {/* Script Source Tabs */} + {/* Resync Button */}
-
- - -
- {activeTab === 'github' && } +
+
@@ -73,11 +49,7 @@ export default function Home() { )} {/* Scripts List */} - {activeTab === 'github' ? ( - - ) : ( - - )} + ); diff --git a/src/server/api/routers/scripts.ts b/src/server/api/routers/scripts.ts index 4a6c785..bf074dd 100644 --- a/src/server/api/routers/scripts.ts +++ b/src/server/api/routers/scripts.ts @@ -219,5 +219,65 @@ export const scriptsRouter = createTRPCRouter({ files: [] }; } + }), + + // Compare local and remote script content + compareScriptContent: publicProcedure + .input(z.object({ slug: z.string() })) + .query(async ({ input }) => { + try { + const script = await localScriptsService.getScriptBySlug(input.slug); + if (!script) { + return { + success: false, + error: 'Script not found', + hasDifferences: false, + differences: [] + }; + } + + const result = await scriptDownloaderService.compareScriptContent(script); + return { + success: true, + ...result + }; + } catch (error) { + console.error('Error in compareScriptContent:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to compare script content', + hasDifferences: false, + differences: [] + }; + } + }), + + // Get diff content for a specific script file + getScriptDiff: publicProcedure + .input(z.object({ slug: z.string(), filePath: z.string() })) + .query(async ({ input }) => { + try { + const script = await localScriptsService.getScriptBySlug(input.slug); + if (!script) { + return { + success: false, + error: 'Script not found', + diff: null + }; + } + + const result = await scriptDownloaderService.getScriptDiff(script, input.filePath); + return { + success: true, + ...result + }; + } catch (error) { + console.error('Error in getScriptDiff:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to get script diff', + diff: null + }; + } }) }); diff --git a/src/server/api/trpc.ts b/src/server/api/trpc.ts index 4e24ba4..94c98f8 100644 --- a/src/server/api/trpc.ts +++ b/src/server/api/trpc.ts @@ -73,28 +73,6 @@ export const createCallerFactory = t.createCallerFactory; */ export const createTRPCRouter = t.router; -/** - * Middleware for timing procedure execution and adding an artificial delay in development. - * - * You can remove this if you don't like it, but it can help catch unwanted waterfalls by simulating - * network latency that would occur in production but not in local development. - */ -const timingMiddleware = t.middleware(async ({ next, path }) => { - const start = Date.now(); - - if (t._config.isDev) { - // artificial delay in dev - const waitMs = Math.floor(Math.random() * 400) + 100; - await new Promise((resolve) => setTimeout(resolve, waitMs)); - } - - const result = await next(); - - const end = Date.now(); - console.log(`[TRPC] ${path} took ${end - start}ms to execute`); - - return result; -}); /** * Public (unauthenticated) procedure @@ -103,4 +81,4 @@ const timingMiddleware = t.middleware(async ({ next, path }) => { * guarantee that a user querying is authorized, but you can still access user session data if they * are logged in. */ -export const publicProcedure = t.procedure.use(timingMiddleware); +export const publicProcedure = t.procedure; diff --git a/src/server/api/websocket/handler.ts b/src/server/api/websocket/handler.ts index b899f60..9b8ebc0 100644 --- a/src/server/api/websocket/handler.ts +++ b/src/server/api/websocket/handler.ts @@ -1,7 +1,6 @@ -import { WebSocketServer, WebSocket } from 'ws'; -import { IncomingMessage } from 'http'; +import { WebSocketServer, type WebSocket } from 'ws'; +import type { IncomingMessage } from 'http'; import { scriptManager } from '~/server/lib/scripts'; -import { env } from '~/env.js'; interface ScriptExecutionMessage { type: 'start' | 'output' | 'error' | 'end'; @@ -22,8 +21,7 @@ export class ScriptExecutionHandler { this.wss.on('connection', this.handleConnection.bind(this)); } - private handleConnection(ws: WebSocket, request: IncomingMessage) { - console.log('New WebSocket connection for script execution'); + private handleConnection(ws: WebSocket, _request: IncomingMessage) { ws.on('message', (data) => { try { @@ -40,7 +38,6 @@ export class ScriptExecutionHandler { }); ws.on('close', () => { - console.log('WebSocket connection closed'); // Clean up any active executions for this connection this.cleanupActiveExecutions(ws); }); diff --git a/src/server/lib/git.ts b/src/server/lib/git.ts index 4df9cdb..83a2a9d 100644 --- a/src/server/lib/git.ts +++ b/src/server/lib/git.ts @@ -1,4 +1,4 @@ -import { simpleGit, SimpleGit } from 'simple-git'; +import { simpleGit, type SimpleGit } from 'simple-git'; import { env } from '~/env.js'; import { join } from 'path'; @@ -77,14 +77,13 @@ export class GitManager { return { success: false, message: 'No repository URL configured' }; } - console.log(`Cloning repository from ${env.REPO_URL}...`); // Clone the repository - await this.git.clone(env.REPO_URL, this.repoPath, { - '--branch': env.REPO_BRANCH, - '--single-branch': true, - '--depth': 1 - }); + await this.git.clone(env.REPO_URL, this.repoPath, [ + '--branch', env.REPO_BRANCH, + '--single-branch', + '--depth', '1' + ]); return { success: true, @@ -105,32 +104,23 @@ export class GitManager { async initializeRepository(): Promise { try { if (!env.REPO_URL) { - console.log('No remote repository configured, skipping initialization'); return; } const isRepo = await this.git.checkIsRepo(); if (!isRepo) { - console.log('Repository not found, cloning...'); const result = await this.cloneRepository(); if (result.success) { - console.log('Repository initialized successfully'); } else { - console.error('Failed to initialize repository:', result.message); } } else { - console.log('Repository already exists, checking for updates...'); const behind = await this.isBehindRemote(); if (behind) { - console.log('Repository is behind remote, pulling updates...'); const result = await this.pullUpdates(); if (result.success) { - console.log('Repository updated successfully'); } else { - console.error('Failed to update repository:', result.message); } } else { - console.log('Repository is up to date'); } } } catch (error) { @@ -160,8 +150,8 @@ export class GitManager { return { isRepo: true, isBehind, - lastCommit: log.latest?.hash, - branch: status.current + lastCommit: log.latest?.hash || undefined, + branch: status.current || undefined }; } catch (error) { console.error('Error getting repository status:', error); diff --git a/src/server/lib/scripts.ts b/src/server/lib/scripts.ts index 0285767..705614a 100644 --- a/src/server/lib/scripts.ts +++ b/src/server/lib/scripts.ts @@ -1,7 +1,7 @@ -import { readdir, stat, access } from 'fs/promises'; +import { readdir, stat } from 'fs/promises'; import { join, resolve, extname } from 'path'; import { env } from '~/env.js'; -import { spawn, ChildProcess } from 'child_process'; +import { spawn, type ChildProcess } from 'child_process'; import { localScriptsService } from '~/server/services/localScripts'; export interface ScriptInfo { @@ -98,7 +98,6 @@ export class ScriptManager { logo = scriptData?.logo || undefined; } catch (error) { // JSON file might not exist, that's okay - console.log(`No JSON data found for ${slug}:`, error); } scripts.push({ @@ -226,7 +225,6 @@ export class ScriptManager { const timeout = setTimeout(() => { if (!childProcess.killed) { childProcess.kill('SIGTERM'); - console.log(`Script execution timed out after ${this.maxExecutionTime}ms`); } }, this.maxExecutionTime); diff --git a/src/server/services/localScripts.ts b/src/server/services/localScripts.ts index 871b387..44cfc59 100644 --- a/src/server/services/localScripts.ts +++ b/src/server/services/localScripts.ts @@ -94,7 +94,6 @@ export class LocalScriptsService { await writeFile(filePath, content, 'utf-8'); } - console.log(`Successfully saved ${scripts.length} scripts to ${this.scriptsDirectory}`); } catch (error) { console.error('Error saving scripts from GitHub:', error); throw new Error('Failed to save scripts from GitHub'); diff --git a/src/server/services/scriptDownloader.ts b/src/server/services/scriptDownloader.ts index 6e29e66..0eaad6a 100644 --- a/src/server/services/scriptDownloader.ts +++ b/src/server/services/scriptDownloader.ts @@ -93,7 +93,6 @@ export class ScriptDownloaderService { files.push(`install/${installScriptName}`); } catch (error) { // Install script might not exist, that's okay - console.log(`Install script not found for ${script.slug}: ${error}`); } return { @@ -153,6 +152,209 @@ export class ScriptDownloaderService { return { ctExists: false, installExists: false, files: [] }; } } + + async compareScriptContent(script: Script): Promise<{ hasDifferences: boolean; differences: string[] }> { + const differences: string[] = []; + let hasDifferences = false; + + try { + // First check if any local files exist + const localFilesExist = await this.checkScriptExists(script); + if (!localFilesExist.ctExists && !localFilesExist.installExists) { + // No local files exist, so no comparison needed + return { hasDifferences: false, differences: [] }; + } + + // Compare CT script only if it exists locally + if (localFilesExist.ctExists && script.install_methods && script.install_methods.length > 0) { + for (const method of script.install_methods) { + if (method.script && method.script.startsWith('ct/')) { + const fileName = method.script.split('/').pop(); + if (fileName) { + const localPath = join(this.scriptsDirectory, 'ct', fileName); + try { + // Read local content + const localContent = await readFile(localPath, 'utf-8'); + + // Download remote content + const remoteContent = await this.downloadFileFromGitHub(method.script); + + // Apply the same modification that would be applied during load + const modifiedRemoteContent = this.modifyScriptContent(remoteContent); + + // Compare content + if (localContent !== modifiedRemoteContent) { + hasDifferences = true; + differences.push(`ct/${fileName}`); + } + } catch (error) { + console.error(`Error comparing CT script ${fileName}:`, error); + // Don't add to differences if there's an error reading files + } + } + } + } + } + + // Compare install script only if it exists locally + if (localFilesExist.installExists) { + const installScriptName = `${script.slug}-install.sh`; + const localInstallPath = join(this.scriptsDirectory, 'install', installScriptName); + try { + // Read local content + const localContent = await readFile(localInstallPath, 'utf-8'); + + // Download remote content + const remoteContent = await this.downloadFileFromGitHub(`install/${installScriptName}`); + + // Apply the same modification that would be applied during load + const modifiedRemoteContent = this.modifyScriptContent(remoteContent); + + // Compare content + if (localContent !== modifiedRemoteContent) { + hasDifferences = true; + differences.push(`install/${installScriptName}`); + } + } catch (error) { + console.error(`Error comparing install script ${installScriptName}:`, error); + // Don't add to differences if there's an error reading files + } + } + + return { hasDifferences, differences }; + } catch (error) { + console.error('Error comparing script content:', error); + return { hasDifferences: false, differences: [] }; + } + } + + async getScriptDiff(script: Script, filePath: string): Promise<{ diff: string | null; localContent: string | null; remoteContent: string | null }> { + try { + let localContent: string | null = null; + let remoteContent: string | null = null; + + if (filePath.startsWith('ct/')) { + // Handle CT script + const fileName = filePath.split('/').pop(); + if (fileName) { + const localPath = join(this.scriptsDirectory, 'ct', fileName); + try { + localContent = await readFile(localPath, 'utf-8'); + } catch (error) { + console.error('Error reading local CT script:', error); + } + + try { + // Find the corresponding script path in install_methods + const method = script.install_methods?.find(m => m.script === filePath); + if (method?.script) { + const downloadedContent = await this.downloadFileFromGitHub(method.script); + remoteContent = this.modifyScriptContent(downloadedContent); + } + } catch (error) { + console.error('Error downloading remote CT script:', error); + } + } + } else if (filePath.startsWith('install/')) { + // Handle install script + const localPath = join(this.scriptsDirectory, filePath); + try { + localContent = await readFile(localPath, 'utf-8'); + } catch (error) { + console.error('Error reading local install script:', error); + } + + try { + remoteContent = await this.downloadFileFromGitHub(filePath); + } catch (error) { + console.error('Error downloading remote install script:', error); + } + } + + if (!localContent || !remoteContent) { + return { diff: null, localContent, remoteContent }; + } + + // Generate diff using a simple line-by-line comparison + const diff = this.generateDiff(localContent, remoteContent); + return { diff, localContent, remoteContent }; + } catch (error) { + console.error('Error getting script diff:', error); + return { diff: null, localContent: null, remoteContent: null }; + } + } + + private generateDiff(localContent: string, remoteContent: string): string { + const localLines = localContent.split('\n'); + const remoteLines = remoteContent.split('\n'); + + let diff = ''; + let i = 0; + let j = 0; + + while (i < localLines.length || j < remoteLines.length) { + const localLine = localLines[i]; + const remoteLine = remoteLines[j]; + + if (i >= localLines.length) { + // Only remote lines left + diff += `+${j + 1}: ${remoteLine}\n`; + j++; + } else if (j >= remoteLines.length) { + // Only local lines left + diff += `-${i + 1}: ${localLine}\n`; + i++; + } else if (localLine === remoteLine) { + // Lines are the same + diff += ` ${i + 1}: ${localLine}\n`; + i++; + j++; + } else { + // Lines are different - find the best match + let found = false; + for (let k = j + 1; k < Math.min(j + 10, remoteLines.length); k++) { + if (localLine === remoteLines[k]) { + // Found match in remote, local line was removed + for (let l = j; l < k; l++) { + diff += `+${l + 1}: ${remoteLines[l]}\n`; + } + diff += ` ${i + 1}: ${localLine}\n`; + i++; + j = k + 1; + found = true; + break; + } + } + + if (!found) { + for (let k = i + 1; k < Math.min(i + 10, localLines.length); k++) { + if (remoteLine === localLines[k]) { + // Found match in local, remote line was added + diff += `-${i + 1}: ${localLine}\n`; + for (let l = i + 1; l < k; l++) { + diff += `-${l + 1}: ${localLines[l]}\n`; + } + diff += `+${j + 1}: ${remoteLine}\n`; + i = k + 1; + j++; + found = true; + break; + } + } + } + + if (!found) { + // No match found, lines are different + diff += `-${i + 1}: ${localLine}\n`; + diff += `+${j + 1}: ${remoteLine}\n`; + i++; + j++; + } + } + } + + return diff; + } } // Singleton instance diff --git a/src/types/script.ts b/src/types/script.ts index 9824ae9..063134b 100644 --- a/src/types/script.ts +++ b/src/types/script.ts @@ -49,6 +49,9 @@ export interface ScriptCard { type: string; updateable: boolean; website: string | null; + source?: 'github' | 'local'; + isDownloaded?: boolean; + localPath?: string; } export interface GitHubFile {