Compare commits
35 Commits
feat/419
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e0c42a9ea8 | ||
|
|
351ba09f4e | ||
|
|
580986abfa | ||
|
|
e1d270d52c | ||
|
|
20dbcae42a | ||
|
|
8e8c724392 | ||
|
|
201b33ec84 | ||
|
|
6d2df9929c | ||
|
|
f33504baf5 | ||
|
|
4bc5f4d6ad | ||
|
|
a52a897346 | ||
|
|
1d585d4d3f | ||
|
|
d4b8ceb581 | ||
|
|
7079c236ab | ||
|
|
0678aba911 | ||
|
|
ffdd742aa0 | ||
|
|
f4de214a83 | ||
|
|
3b0da19cd1 | ||
|
|
08bc4ab37b | ||
|
|
d2e7477898 | ||
|
|
b5c6beafff | ||
|
|
a34566651a | ||
|
|
4628e67e5c | ||
|
|
578fa28461 | ||
|
|
9e6154b0de | ||
|
|
d29f71a92f | ||
|
|
aea14cda7e | ||
|
|
4893ccda6e | ||
|
|
a56c625b4f | ||
|
|
54b2187f98 | ||
|
|
c06b8e6731 | ||
|
|
14e01513e3 | ||
|
|
2e4634ca25 | ||
|
|
a82bc02b15 | ||
|
|
2ea44e6b24 |
@@ -18,7 +18,12 @@ ALLOWED_SCRIPT_PATHS="scripts/"
|
|||||||
WEBSOCKET_PORT="3001"
|
WEBSOCKET_PORT="3001"
|
||||||
|
|
||||||
# User settings
|
# User settings
|
||||||
|
# Optional tokens for private repos: GITHUB_TOKEN (GitHub), GITLAB_TOKEN (GitLab),
|
||||||
|
# BITBUCKET_APP_PASSWORD or BITBUCKET_TOKEN (Bitbucket). REPO_URL and added repos
|
||||||
|
# can be GitHub, GitLab, Bitbucket, or custom Git servers.
|
||||||
GITHUB_TOKEN=
|
GITHUB_TOKEN=
|
||||||
|
GITLAB_TOKEN=
|
||||||
|
BITBUCKET_APP_PASSWORD=
|
||||||
SAVE_FILTER=false
|
SAVE_FILTER=false
|
||||||
FILTERS=
|
FILTERS=
|
||||||
AUTH_USERNAME=
|
AUTH_USERNAME=
|
||||||
|
|||||||
18
.github/workflows/publish_release.yml
vendored
18
.github/workflows/publish_release.yml
vendored
@@ -31,20 +31,24 @@ jobs:
|
|||||||
echo "Found draft version: ${{ steps.draft.outputs.tag_name }}"
|
echo "Found draft version: ${{ steps.draft.outputs.tag_name }}"
|
||||||
|
|
||||||
|
|
||||||
- name: Create branch and commit VERSION
|
- name: Create branch and commit VERSION and package.json
|
||||||
run: |
|
run: |
|
||||||
branch="update-version-${{ steps.draft.outputs.tag_name }}"
|
branch="update-version-${{ steps.draft.outputs.tag_name }}"
|
||||||
# Delete remote branch if exists
|
# Delete remote branch if exists
|
||||||
git push origin --delete "$branch" || echo "No remote branch to delete"
|
git push origin --delete "$branch" || echo "No remote branch to delete"
|
||||||
git fetch origin main
|
git fetch origin main
|
||||||
git checkout -b "$branch" origin/main
|
git checkout -b "$branch" origin/main
|
||||||
# Write VERSION file and timestamp to ensure a diff
|
# Version without 'v' prefix (e.g. v1.2.3 -> 1.2.3)
|
||||||
version="${{ steps.draft.outputs.tag_name }}"
|
version="${{ steps.draft.outputs.tag_name }}"
|
||||||
echo "$version" | sed 's/^v//' > VERSION
|
version_plain=$(echo "$version" | sed 's/^v//')
|
||||||
git add VERSION
|
# Write VERSION file
|
||||||
|
echo "$version_plain" > VERSION
|
||||||
|
# Update package.json version
|
||||||
|
jq --arg v "$version_plain" '.version = $v' package.json > package.json.tmp && mv package.json.tmp package.json
|
||||||
|
git add VERSION package.json
|
||||||
git config user.name "github-actions[bot]"
|
git config user.name "github-actions[bot]"
|
||||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
git commit -m "chore: add VERSION $version" --allow-empty
|
git commit -m "chore: bump version to $version_plain (VERSION + package.json)" --allow-empty
|
||||||
|
|
||||||
- name: Push changes
|
- name: Push changes
|
||||||
run: |
|
run: |
|
||||||
@@ -57,8 +61,8 @@ jobs:
|
|||||||
pr_url=$(gh pr create \
|
pr_url=$(gh pr create \
|
||||||
--base main \
|
--base main \
|
||||||
--head update-version-${{ steps.draft.outputs.tag_name }} \
|
--head update-version-${{ steps.draft.outputs.tag_name }} \
|
||||||
--title "chore: add VERSION ${{ steps.draft.outputs.tag_name }}" \
|
--title "chore: bump version to ${{ steps.draft.outputs.tag_name }} (VERSION + package.json)" \
|
||||||
--body "Adds VERSION file for release ${{ steps.draft.outputs.tag_name }}" \
|
--body "Updates VERSION file and package.json version for release ${{ steps.draft.outputs.tag_name }}" \
|
||||||
--label automated)
|
--label automated)
|
||||||
|
|
||||||
pr_number=$(echo "$pr_url" | awk -F/ '{print $NF}')
|
pr_number=$(echo "$pr_url" | awk -F/ '{print $NF}')
|
||||||
|
|||||||
686
package-lock.json
generated
686
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
24
package.json
24
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "pve-scripts-local",
|
"name": "pve-scripts-local",
|
||||||
"version": "0.1.0",
|
"version": "0.5.6",
|
||||||
"private": true,
|
"private": true,
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -25,13 +25,13 @@
|
|||||||
"typecheck": "tsc --noEmit"
|
"typecheck": "tsc --noEmit"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@prisma/adapter-better-sqlite3": "^7.2.0",
|
"@prisma/adapter-better-sqlite3": "^7.3.0",
|
||||||
"@prisma/client": "^7.2.0",
|
"@prisma/client": "^7.3.0",
|
||||||
"@radix-ui/react-dropdown-menu": "^2.1.16",
|
"@radix-ui/react-dropdown-menu": "^2.1.16",
|
||||||
"@radix-ui/react-slot": "^1.2.4",
|
"@radix-ui/react-slot": "^1.2.4",
|
||||||
"@t3-oss/env-nextjs": "^0.13.10",
|
"@t3-oss/env-nextjs": "^0.13.10",
|
||||||
"@tailwindcss/typography": "^0.5.19",
|
"@tailwindcss/typography": "^0.5.19",
|
||||||
"@tanstack/react-query": "^5.90.18",
|
"@tanstack/react-query": "^5.90.20",
|
||||||
"@trpc/client": "^11.8.1",
|
"@trpc/client": "^11.8.1",
|
||||||
"@trpc/react-query": "^11.8.1",
|
"@trpc/react-query": "^11.8.1",
|
||||||
"@trpc/server": "^11.8.1",
|
"@trpc/server": "^11.8.1",
|
||||||
@@ -42,14 +42,14 @@
|
|||||||
"@xterm/xterm": "^6.0.0",
|
"@xterm/xterm": "^6.0.0",
|
||||||
"axios": "^1.13.2",
|
"axios": "^1.13.2",
|
||||||
"bcryptjs": "^3.0.3",
|
"bcryptjs": "^3.0.3",
|
||||||
"better-sqlite3": "^12.6.0",
|
"better-sqlite3": "^12.6.2",
|
||||||
"class-variance-authority": "^0.7.1",
|
"class-variance-authority": "^0.7.1",
|
||||||
"clsx": "^2.1.1",
|
"clsx": "^2.1.1",
|
||||||
"cron-validator": "^1.4.0",
|
"cron-validator": "^1.4.0",
|
||||||
"dotenv": "^17.2.3",
|
"dotenv": "^17.2.3",
|
||||||
"jsonwebtoken": "^9.0.3",
|
"jsonwebtoken": "^9.0.3",
|
||||||
"lucide-react": "^0.562.0",
|
"lucide-react": "^0.563.0",
|
||||||
"next": "^16.1.3",
|
"next": ">=16.1.5",
|
||||||
"node-cron": "^4.2.1",
|
"node-cron": "^4.2.1",
|
||||||
"node-pty": "^1.1.0",
|
"node-pty": "^1.1.0",
|
||||||
"react": "^19.2.3",
|
"react": "^19.2.3",
|
||||||
@@ -66,9 +66,10 @@
|
|||||||
"zod": "^4.3.5"
|
"zod": "^4.3.5"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"next": ">=16.1.5",
|
||||||
"@tailwindcss/postcss": "^4.1.18",
|
"@tailwindcss/postcss": "^4.1.18",
|
||||||
"@testing-library/jest-dom": "^6.9.1",
|
"@testing-library/jest-dom": "^6.9.1",
|
||||||
"@testing-library/react": "^16.3.1",
|
"@testing-library/react": "^16.3.2",
|
||||||
"@testing-library/user-event": "^14.6.1",
|
"@testing-library/user-event": "^14.6.1",
|
||||||
"@types/bcryptjs": "^3.0.0",
|
"@types/bcryptjs": "^3.0.0",
|
||||||
"@types/better-sqlite3": "^7.6.13",
|
"@types/better-sqlite3": "^7.6.13",
|
||||||
@@ -87,11 +88,11 @@
|
|||||||
"postcss": "^8.5.6",
|
"postcss": "^8.5.6",
|
||||||
"prettier": "^3.8.0",
|
"prettier": "^3.8.0",
|
||||||
"prettier-plugin-tailwindcss": "^0.7.2",
|
"prettier-plugin-tailwindcss": "^0.7.2",
|
||||||
"prisma": "^7.2.0",
|
"prisma": "^7.3.0",
|
||||||
"tailwindcss": "^4.1.18",
|
"tailwindcss": "^4.1.18",
|
||||||
"tsx": "^4.21.0",
|
"tsx": "^4.21.0",
|
||||||
"typescript": "^5.9.3",
|
"typescript": "^5.9.3",
|
||||||
"typescript-eslint": "^8.53.0",
|
"typescript-eslint": "^8.54.0",
|
||||||
"vitest": "^4.0.17"
|
"vitest": "^4.0.17"
|
||||||
},
|
},
|
||||||
"ct3aMetadata": {
|
"ct3aMetadata": {
|
||||||
@@ -102,6 +103,7 @@
|
|||||||
"node": ">=24.0.0"
|
"node": ">=24.0.0"
|
||||||
},
|
},
|
||||||
"overrides": {
|
"overrides": {
|
||||||
"prismjs": "^1.30.0"
|
"prismjs": "^1.30.0",
|
||||||
|
"hono": ">=4.11.7"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
66
server.js
66
server.js
@@ -3,6 +3,7 @@ import { parse } from 'url';
|
|||||||
import next from 'next';
|
import next from 'next';
|
||||||
import { WebSocketServer } from 'ws';
|
import { WebSocketServer } from 'ws';
|
||||||
import { spawn } from 'child_process';
|
import { spawn } from 'child_process';
|
||||||
|
import { existsSync } from 'fs';
|
||||||
import { join, resolve } from 'path';
|
import { join, resolve } from 'path';
|
||||||
import stripAnsi from 'strip-ansi';
|
import stripAnsi from 'strip-ansi';
|
||||||
import { spawn as ptySpawn } from 'node-pty';
|
import { spawn as ptySpawn } from 'node-pty';
|
||||||
@@ -56,6 +57,8 @@ const handle = app.getRequestHandler();
|
|||||||
* @property {string} user
|
* @property {string} user
|
||||||
* @property {string} password
|
* @property {string} password
|
||||||
* @property {number} [id]
|
* @property {number} [id]
|
||||||
|
* @property {string} [auth_type]
|
||||||
|
* @property {string} [ssh_key_path]
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -295,6 +298,20 @@ class ScriptExecutionHandler {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve full server from DB when client sends server with id but no ssh_key_path (e.g. for Shell/Update over SSH).
|
||||||
|
* @param {ServerInfo|null} server - Server from WebSocket message
|
||||||
|
* @returns {Promise<ServerInfo|null>} Same server or full server from DB
|
||||||
|
*/
|
||||||
|
async resolveServerForSSH(server) {
|
||||||
|
if (!server?.id) return server;
|
||||||
|
if (server.auth_type === 'key' && (!server.ssh_key_path || !existsSync(server.ssh_key_path))) {
|
||||||
|
const full = await this.db.getServerById(server.id);
|
||||||
|
return /** @type {ServerInfo|null} */ (full ?? server);
|
||||||
|
}
|
||||||
|
return server;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {ExtendedWebSocket} ws
|
* @param {ExtendedWebSocket} ws
|
||||||
* @param {WebSocketMessage} message
|
* @param {WebSocketMessage} message
|
||||||
@@ -305,16 +322,21 @@ class ScriptExecutionHandler {
|
|||||||
switch (action) {
|
switch (action) {
|
||||||
case 'start':
|
case 'start':
|
||||||
if (scriptPath && executionId) {
|
if (scriptPath && executionId) {
|
||||||
|
let serverToUse = server;
|
||||||
|
if (serverToUse?.id) {
|
||||||
|
serverToUse = await this.resolveServerForSSH(serverToUse) ?? serverToUse;
|
||||||
|
}
|
||||||
|
const resolved = serverToUse ?? server;
|
||||||
if (isClone && containerId && storage && server && cloneCount && hostnames && containerType) {
|
if (isClone && containerId && storage && server && cloneCount && hostnames && containerType) {
|
||||||
await this.startSSHCloneExecution(ws, containerId, executionId, storage, server, containerType, cloneCount, hostnames);
|
await this.startSSHCloneExecution(ws, containerId, executionId, storage, /** @type {ServerInfo} */ (resolved), containerType, cloneCount, hostnames);
|
||||||
} else if (isBackup && containerId && storage) {
|
} else if (isBackup && containerId && storage) {
|
||||||
await this.startBackupExecution(ws, containerId, executionId, storage, mode, server);
|
await this.startBackupExecution(ws, containerId, executionId, storage, mode, resolved);
|
||||||
} else if (isUpdate && containerId) {
|
} else if (isUpdate && containerId) {
|
||||||
await this.startUpdateExecution(ws, containerId, executionId, mode, server, backupStorage);
|
await this.startUpdateExecution(ws, containerId, executionId, mode, resolved, backupStorage);
|
||||||
} else if (isShell && containerId) {
|
} else if (isShell && containerId) {
|
||||||
await this.startShellExecution(ws, containerId, executionId, mode, server);
|
await this.startShellExecution(ws, containerId, executionId, mode, resolved, containerType);
|
||||||
} else {
|
} else {
|
||||||
await this.startScriptExecution(ws, scriptPath, executionId, mode, server, envVars);
|
await this.startScriptExecution(ws, scriptPath, executionId, mode, resolved, envVars);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
this.sendMessage(ws, {
|
this.sendMessage(ws, {
|
||||||
@@ -1153,10 +1175,11 @@ class ScriptExecutionHandler {
|
|||||||
const hostname = hostnames[i];
|
const hostname = hostnames[i];
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Read config file to get hostname/name
|
// Read config file to get hostname/name (node-specific path)
|
||||||
|
const nodeName = server.name;
|
||||||
const configPath = containerType === 'lxc'
|
const configPath = containerType === 'lxc'
|
||||||
? `/etc/pve/lxc/${nextId}.conf`
|
? `/etc/pve/nodes/${nodeName}/lxc/${nextId}.conf`
|
||||||
: `/etc/pve/qemu-server/${nextId}.conf`;
|
: `/etc/pve/nodes/${nodeName}/qemu-server/${nextId}.conf`;
|
||||||
|
|
||||||
let configContent = '';
|
let configContent = '';
|
||||||
await new Promise(/** @type {(resolve: (value?: void) => void) => void} */ ((resolve) => {
|
await new Promise(/** @type {(resolve: (value?: void) => void) => void} */ ((resolve) => {
|
||||||
@@ -1474,21 +1497,21 @@ class ScriptExecutionHandler {
|
|||||||
* @param {string} executionId
|
* @param {string} executionId
|
||||||
* @param {string} mode
|
* @param {string} mode
|
||||||
* @param {ServerInfo|null} server
|
* @param {ServerInfo|null} server
|
||||||
|
* @param {'lxc'|'vm'} [containerType='lxc']
|
||||||
*/
|
*/
|
||||||
async startShellExecution(ws, containerId, executionId, mode = 'local', server = null) {
|
async startShellExecution(ws, containerId, executionId, mode = 'local', server = null, containerType = 'lxc') {
|
||||||
try {
|
try {
|
||||||
|
const typeLabel = containerType === 'vm' ? 'VM' : 'container';
|
||||||
// Send start message
|
|
||||||
this.sendMessage(ws, {
|
this.sendMessage(ws, {
|
||||||
type: 'start',
|
type: 'start',
|
||||||
data: `Starting shell session for container ${containerId}...`,
|
data: `Starting shell session for ${typeLabel} ${containerId}...`,
|
||||||
timestamp: Date.now()
|
timestamp: Date.now()
|
||||||
});
|
});
|
||||||
|
|
||||||
if (mode === 'ssh' && server) {
|
if (mode === 'ssh' && server) {
|
||||||
await this.startSSHShellExecution(ws, containerId, executionId, server);
|
await this.startSSHShellExecution(ws, containerId, executionId, server, containerType);
|
||||||
} else {
|
} else {
|
||||||
await this.startLocalShellExecution(ws, containerId, executionId);
|
await this.startLocalShellExecution(ws, containerId, executionId, containerType);
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -1505,12 +1528,12 @@ class ScriptExecutionHandler {
|
|||||||
* @param {ExtendedWebSocket} ws
|
* @param {ExtendedWebSocket} ws
|
||||||
* @param {string} containerId
|
* @param {string} containerId
|
||||||
* @param {string} executionId
|
* @param {string} executionId
|
||||||
|
* @param {'lxc'|'vm'} [containerType='lxc']
|
||||||
*/
|
*/
|
||||||
async startLocalShellExecution(ws, containerId, executionId) {
|
async startLocalShellExecution(ws, containerId, executionId, containerType = 'lxc') {
|
||||||
const { spawn } = await import('node-pty');
|
const { spawn } = await import('node-pty');
|
||||||
|
const shellCommand = containerType === 'vm' ? `qm terminal ${containerId}` : `pct enter ${containerId}`;
|
||||||
// Create a shell process that will run pct enter
|
const childProcess = spawn('bash', ['-c', shellCommand], {
|
||||||
const childProcess = spawn('bash', ['-c', `pct enter ${containerId}`], {
|
|
||||||
name: 'xterm-color',
|
name: 'xterm-color',
|
||||||
cols: 80,
|
cols: 80,
|
||||||
rows: 24,
|
rows: 24,
|
||||||
@@ -1553,14 +1576,15 @@ class ScriptExecutionHandler {
|
|||||||
* @param {string} containerId
|
* @param {string} containerId
|
||||||
* @param {string} executionId
|
* @param {string} executionId
|
||||||
* @param {ServerInfo} server
|
* @param {ServerInfo} server
|
||||||
|
* @param {'lxc'|'vm'} [containerType='lxc']
|
||||||
*/
|
*/
|
||||||
async startSSHShellExecution(ws, containerId, executionId, server) {
|
async startSSHShellExecution(ws, containerId, executionId, server, containerType = 'lxc') {
|
||||||
const sshService = getSSHExecutionService();
|
const sshService = getSSHExecutionService();
|
||||||
|
const shellCommand = containerType === 'vm' ? `qm terminal ${containerId}` : `pct enter ${containerId}`;
|
||||||
try {
|
try {
|
||||||
const execution = await sshService.executeCommand(
|
const execution = await sshService.executeCommand(
|
||||||
server,
|
server,
|
||||||
`pct enter ${containerId}`,
|
shellCommand,
|
||||||
/** @param {string} data */
|
/** @param {string} data */
|
||||||
(data) => {
|
(data) => {
|
||||||
this.sendMessage(ws, {
|
this.sendMessage(ws, {
|
||||||
|
|||||||
@@ -199,6 +199,17 @@ export function ConfigurationModal({
|
|||||||
return !isNaN(num) && num > 0;
|
return !isNaN(num) && num > 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const validateHostname = (hostname: string): boolean => {
|
||||||
|
if (!hostname || hostname.length > 253) return false;
|
||||||
|
const label = /^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/;
|
||||||
|
const labels = hostname.split('.');
|
||||||
|
return labels.length >= 1 && labels.every(l => l.length >= 1 && l.length <= 63 && label.test(l));
|
||||||
|
};
|
||||||
|
|
||||||
|
const validateAptCacherAddress = (value: string): boolean => {
|
||||||
|
return validateIPv4(value) || validateHostname(value);
|
||||||
|
};
|
||||||
|
|
||||||
const validateForm = (): boolean => {
|
const validateForm = (): boolean => {
|
||||||
const newErrors: Record<string, string> = {};
|
const newErrors: Record<string, string> = {};
|
||||||
|
|
||||||
@@ -216,8 +227,8 @@ export function ConfigurationModal({
|
|||||||
if (advancedVars.var_ns && !validateIPv4(advancedVars.var_ns as string)) {
|
if (advancedVars.var_ns && !validateIPv4(advancedVars.var_ns as string)) {
|
||||||
newErrors.var_ns = 'Invalid IPv4 address';
|
newErrors.var_ns = 'Invalid IPv4 address';
|
||||||
}
|
}
|
||||||
if (advancedVars.var_apt_cacher_ip && !validateIPv4(advancedVars.var_apt_cacher_ip as string)) {
|
if (advancedVars.var_apt_cacher_ip && !validateAptCacherAddress(advancedVars.var_apt_cacher_ip as string)) {
|
||||||
newErrors.var_apt_cacher_ip = 'Invalid IPv4 address';
|
newErrors.var_apt_cacher_ip = 'Invalid IPv4 address or hostname';
|
||||||
}
|
}
|
||||||
// Validate IPv4 CIDR if network mode is static
|
// Validate IPv4 CIDR if network mode is static
|
||||||
const netValue = advancedVars.var_net;
|
const netValue = advancedVars.var_net;
|
||||||
@@ -904,13 +915,13 @@ export function ConfigurationModal({
|
|||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
<label className="block text-sm font-medium text-foreground mb-2">
|
<label className="block text-sm font-medium text-foreground mb-2">
|
||||||
APT Cacher IP
|
APT Cacher host or IP
|
||||||
</label>
|
</label>
|
||||||
<Input
|
<Input
|
||||||
type="text"
|
type="text"
|
||||||
value={typeof advancedVars.var_apt_cacher_ip === 'boolean' ? '' : String(advancedVars.var_apt_cacher_ip ?? '')}
|
value={typeof advancedVars.var_apt_cacher_ip === 'boolean' ? '' : String(advancedVars.var_apt_cacher_ip ?? '')}
|
||||||
onChange={(e) => updateAdvancedVar('var_apt_cacher_ip', e.target.value)}
|
onChange={(e) => updateAdvancedVar('var_apt_cacher_ip', e.target.value)}
|
||||||
placeholder="192.168.1.10"
|
placeholder="192.168.1.10 or apt-cacher.internal"
|
||||||
className={errors.var_apt_cacher_ip ? 'border-destructive' : ''}
|
className={errors.var_apt_cacher_ip ? 'border-destructive' : ''}
|
||||||
/>
|
/>
|
||||||
{errors.var_apt_cacher_ip && (
|
{errors.var_apt_cacher_ip && (
|
||||||
|
|||||||
@@ -1617,7 +1617,7 @@ export function GeneralSettingsModal({
|
|||||||
<Input
|
<Input
|
||||||
id="new-repo-url"
|
id="new-repo-url"
|
||||||
type="url"
|
type="url"
|
||||||
placeholder="https://github.com/owner/repo"
|
placeholder="https://github.com/owner/repo or https://git.example.com/owner/repo"
|
||||||
value={newRepoUrl}
|
value={newRepoUrl}
|
||||||
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
|
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
|
||||||
setNewRepoUrl(e.target.value)
|
setNewRepoUrl(e.target.value)
|
||||||
@@ -1626,8 +1626,9 @@ export function GeneralSettingsModal({
|
|||||||
className="w-full"
|
className="w-full"
|
||||||
/>
|
/>
|
||||||
<p className="text-muted-foreground mt-1 text-xs">
|
<p className="text-muted-foreground mt-1 text-xs">
|
||||||
Enter a GitHub repository URL (e.g.,
|
Supported: GitHub, GitLab, Bitbucket, or custom Git
|
||||||
https://github.com/owner/repo)
|
servers (e.g. https://github.com/owner/repo,
|
||||||
|
https://gitlab.com/owner/repo)
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<div className="border-border flex items-center justify-between gap-3 rounded-lg border p-3">
|
<div className="border-border flex items-center justify-between gap-3 rounded-lg border p-3">
|
||||||
|
|||||||
@@ -80,6 +80,7 @@ export function InstalledScriptsTab() {
|
|||||||
id: number;
|
id: number;
|
||||||
containerId: string;
|
containerId: string;
|
||||||
server?: any;
|
server?: any;
|
||||||
|
containerType?: 'lxc' | 'vm';
|
||||||
} | null>(null);
|
} | null>(null);
|
||||||
const [showBackupPrompt, setShowBackupPrompt] = useState(false);
|
const [showBackupPrompt, setShowBackupPrompt] = useState(false);
|
||||||
const [showStorageSelection, setShowStorageSelection] = useState(false);
|
const [showStorageSelection, setShowStorageSelection] = useState(false);
|
||||||
@@ -1167,6 +1168,7 @@ export function InstalledScriptsTab() {
|
|||||||
id: script.id,
|
id: script.id,
|
||||||
containerId: script.container_id,
|
containerId: script.container_id,
|
||||||
server: server,
|
server: server,
|
||||||
|
containerType: script.is_vm ? 'vm' : 'lxc',
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1452,6 +1454,13 @@ export function InstalledScriptsTab() {
|
|||||||
{/* Shell Terminal */}
|
{/* Shell Terminal */}
|
||||||
{openingShell && (
|
{openingShell && (
|
||||||
<div className="mb-8" data-terminal="shell">
|
<div className="mb-8" data-terminal="shell">
|
||||||
|
{openingShell.containerType === 'vm' && (
|
||||||
|
<p className="text-muted-foreground mb-2 text-sm">
|
||||||
|
VM shell uses the Proxmox serial console. The VM must have a
|
||||||
|
serial port configured (e.g. <code className="bg-muted rounded px-1">qm set {openingShell.containerId} -serial0 socket</code>).
|
||||||
|
Detach with <kbd className="bg-muted rounded px-1">Ctrl+O</kbd>.
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
<Terminal
|
<Terminal
|
||||||
scriptPath={`shell-${openingShell.containerId}`}
|
scriptPath={`shell-${openingShell.containerId}`}
|
||||||
onClose={handleCloseShellTerminal}
|
onClose={handleCloseShellTerminal}
|
||||||
@@ -1459,6 +1468,7 @@ export function InstalledScriptsTab() {
|
|||||||
server={openingShell.server}
|
server={openingShell.server}
|
||||||
isShell={true}
|
isShell={true}
|
||||||
containerId={openingShell.containerId}
|
containerId={openingShell.containerId}
|
||||||
|
containerType={openingShell.containerType}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
@@ -1538,7 +1548,7 @@ export function InstalledScriptsTab() {
|
|||||||
>
|
>
|
||||||
{showAutoDetectForm
|
{showAutoDetectForm
|
||||||
? "Cancel Auto-Detect"
|
? "Cancel Auto-Detect"
|
||||||
: '🔍 Auto-Detect LXC Containers (Must contain a tag with "community-script")'}
|
: '🔍 Auto-Detect Containers & VMs (tag: community-script)'}
|
||||||
</Button>
|
</Button>
|
||||||
<Button
|
<Button
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
@@ -1764,12 +1774,11 @@ export function InstalledScriptsTab() {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Auto-Detect LXC Containers Form */}
|
{/* Auto-Detect Containers & VMs Form */}
|
||||||
{showAutoDetectForm && (
|
{showAutoDetectForm && (
|
||||||
<div className="bg-card border-border mb-6 rounded-lg border p-4 shadow-sm sm:p-6">
|
<div className="bg-card border-border mb-6 rounded-lg border p-4 shadow-sm sm:p-6">
|
||||||
<h3 className="text-foreground mb-4 text-lg font-semibold sm:mb-6">
|
<h3 className="text-foreground mb-4 text-lg font-semibold sm:mb-6">
|
||||||
Auto-Detect LXC Containers (Must contain a tag with
|
Auto-Detect Containers & VMs (tag: community-script)
|
||||||
"community-script")
|
|
||||||
</h3>
|
</h3>
|
||||||
<div className="space-y-4 sm:space-y-6">
|
<div className="space-y-4 sm:space-y-6">
|
||||||
<div className="bg-muted/30 border-muted rounded-lg border p-4">
|
<div className="bg-muted/30 border-muted rounded-lg border p-4">
|
||||||
@@ -1795,12 +1804,12 @@ export function InstalledScriptsTab() {
|
|||||||
<p>This feature will:</p>
|
<p>This feature will:</p>
|
||||||
<ul className="mt-1 list-inside list-disc space-y-1">
|
<ul className="mt-1 list-inside list-disc space-y-1">
|
||||||
<li>Connect to the selected server via SSH</li>
|
<li>Connect to the selected server via SSH</li>
|
||||||
<li>Scan all LXC config files in /etc/pve/lxc/</li>
|
<li>Scan LXC configs in /etc/pve/lxc/ and VM configs in /etc/pve/qemu-server/</li>
|
||||||
<li>
|
<li>
|
||||||
Find containers with "community-script" in
|
Find containers and VMs with "community-script" in
|
||||||
their tags
|
their tags
|
||||||
</li>
|
</li>
|
||||||
<li>Extract the container ID and hostname</li>
|
<li>Extract the container/VM ID and hostname or name</li>
|
||||||
<li>Add them as installed script entries</li>
|
<li>Add them as installed script entries</li>
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
@@ -2302,6 +2311,11 @@ export function InstalledScriptsTab() {
|
|||||||
"stopped"
|
"stopped"
|
||||||
}
|
}
|
||||||
className="text-muted-foreground hover:text-foreground hover:bg-muted/20 focus:bg-muted/20"
|
className="text-muted-foreground hover:text-foreground hover:bg-muted/20 focus:bg-muted/20"
|
||||||
|
title={
|
||||||
|
script.is_vm
|
||||||
|
? "VM serial console (requires serial port; detach with Ctrl+O)"
|
||||||
|
: undefined
|
||||||
|
}
|
||||||
>
|
>
|
||||||
Shell
|
Shell
|
||||||
</DropdownMenuItem>
|
</DropdownMenuItem>
|
||||||
|
|||||||
@@ -270,22 +270,21 @@ export function PBSCredentialsModal({
|
|||||||
htmlFor="pbs-fingerprint"
|
htmlFor="pbs-fingerprint"
|
||||||
className="text-foreground mb-1 block text-sm font-medium"
|
className="text-foreground mb-1 block text-sm font-medium"
|
||||||
>
|
>
|
||||||
Fingerprint <span className="text-error">*</span>
|
Fingerprint
|
||||||
</label>
|
</label>
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
id="pbs-fingerprint"
|
id="pbs-fingerprint"
|
||||||
value={pbsFingerprint}
|
value={pbsFingerprint}
|
||||||
onChange={(e) => setPbsFingerprint(e.target.value)}
|
onChange={(e) => setPbsFingerprint(e.target.value)}
|
||||||
required
|
|
||||||
disabled={isLoading}
|
disabled={isLoading}
|
||||||
className="bg-card text-foreground placeholder-muted-foreground focus:ring-ring focus:border-ring border-border w-full rounded-md border px-3 py-2 shadow-sm focus:ring-2 focus:outline-none"
|
className="bg-card text-foreground placeholder-muted-foreground focus:ring-ring focus:border-ring border-border w-full rounded-md border px-3 py-2 shadow-sm focus:ring-2 focus:outline-none"
|
||||||
placeholder="e.g., 7b:e5:87:38:5e:16:05:d1:12:22:7f:73:d2:e2:d0:cf:8c:cb:28:e2:74:0c:78:91:1a:71:74:2e:79:20:5a:02"
|
placeholder="e.g., 7b:e5:87:38:5e:16:05:d1:12:22:7f:73:d2:e2:d0:cf:8c:cb:28:e2:74:0c:78:91:1a:71:74:2e:79:20:5a:02"
|
||||||
/>
|
/>
|
||||||
<p className="text-muted-foreground mt-1 text-xs">
|
<p className="text-muted-foreground mt-1 text-xs">
|
||||||
Server fingerprint for auto-acceptance. You can find this on
|
Leave empty if PBS uses a trusted CA (e.g. Let's Encrypt).
|
||||||
your PBS dashboard by clicking the "Show Fingerprint"
|
For self-signed certificates, enter the server fingerprint from
|
||||||
button.
|
the PBS dashboard ("Show Fingerprint").
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -438,6 +438,11 @@ export function ServerForm({
|
|||||||
{errors.password && (
|
{errors.password && (
|
||||||
<p className="text-destructive mt-1 text-sm">{errors.password}</p>
|
<p className="text-destructive mt-1 text-sm">{errors.password}</p>
|
||||||
)}
|
)}
|
||||||
|
<p className="text-muted-foreground mt-1 text-xs">
|
||||||
|
SSH key is recommended when possible. Special characters (e.g.{" "}
|
||||||
|
<code className="rounded bg-muted px-0.5">{"{ } $ \" '"}</code>) are
|
||||||
|
supported.
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
|||||||
@@ -23,8 +23,11 @@ export const env = createEnv({
|
|||||||
ALLOWED_SCRIPT_PATHS: z.string().default("scripts/"),
|
ALLOWED_SCRIPT_PATHS: z.string().default("scripts/"),
|
||||||
// WebSocket Configuration
|
// WebSocket Configuration
|
||||||
WEBSOCKET_PORT: z.string().default("3001"),
|
WEBSOCKET_PORT: z.string().default("3001"),
|
||||||
// GitHub Configuration
|
// Git provider tokens (optional, for private repos)
|
||||||
GITHUB_TOKEN: z.string().optional(),
|
GITHUB_TOKEN: z.string().optional(),
|
||||||
|
GITLAB_TOKEN: z.string().optional(),
|
||||||
|
BITBUCKET_APP_PASSWORD: z.string().optional(),
|
||||||
|
BITBUCKET_TOKEN: z.string().optional(),
|
||||||
// Authentication Configuration
|
// Authentication Configuration
|
||||||
AUTH_USERNAME: z.string().optional(),
|
AUTH_USERNAME: z.string().optional(),
|
||||||
AUTH_PASSWORD_HASH: z.string().optional(),
|
AUTH_PASSWORD_HASH: z.string().optional(),
|
||||||
@@ -62,8 +65,10 @@ export const env = createEnv({
|
|||||||
ALLOWED_SCRIPT_PATHS: process.env.ALLOWED_SCRIPT_PATHS,
|
ALLOWED_SCRIPT_PATHS: process.env.ALLOWED_SCRIPT_PATHS,
|
||||||
// WebSocket Configuration
|
// WebSocket Configuration
|
||||||
WEBSOCKET_PORT: process.env.WEBSOCKET_PORT,
|
WEBSOCKET_PORT: process.env.WEBSOCKET_PORT,
|
||||||
// GitHub Configuration
|
|
||||||
GITHUB_TOKEN: process.env.GITHUB_TOKEN,
|
GITHUB_TOKEN: process.env.GITHUB_TOKEN,
|
||||||
|
GITLAB_TOKEN: process.env.GITLAB_TOKEN,
|
||||||
|
BITBUCKET_APP_PASSWORD: process.env.BITBUCKET_APP_PASSWORD,
|
||||||
|
BITBUCKET_TOKEN: process.env.BITBUCKET_TOKEN,
|
||||||
// Authentication Configuration
|
// Authentication Configuration
|
||||||
AUTH_USERNAME: process.env.AUTH_USERNAME,
|
AUTH_USERNAME: process.env.AUTH_USERNAME,
|
||||||
AUTH_PASSWORD_HASH: process.env.AUTH_PASSWORD_HASH,
|
AUTH_PASSWORD_HASH: process.env.AUTH_PASSWORD_HASH,
|
||||||
|
|||||||
@@ -418,44 +418,46 @@ async function isVM(scriptId: number, containerId: string, serverId: number | nu
|
|||||||
return false; // Default to LXC if SSH fails
|
return false; // Default to LXC if SSH fails
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check both config file paths
|
// Node-specific paths (multi-node Proxmox: /etc/pve/nodes/NODENAME/...)
|
||||||
const vmConfigPath = `/etc/pve/qemu-server/${containerId}.conf`;
|
const nodeName = (server as Server).name;
|
||||||
const lxcConfigPath = `/etc/pve/lxc/${containerId}.conf`;
|
const vmConfigPathNode = `/etc/pve/nodes/${nodeName}/qemu-server/${containerId}.conf`;
|
||||||
|
const lxcConfigPathNode = `/etc/pve/nodes/${nodeName}/lxc/${containerId}.conf`;
|
||||||
|
// Fallback for single-node or when server.name is not the Proxmox node name
|
||||||
|
const vmConfigPathFallback = `/etc/pve/qemu-server/${containerId}.conf`;
|
||||||
|
const lxcConfigPathFallback = `/etc/pve/lxc/${containerId}.conf`;
|
||||||
|
|
||||||
// Check VM config file
|
const checkPathExists = (path: string): Promise<boolean> =>
|
||||||
let vmConfigExists = false;
|
new Promise<boolean>((resolve) => {
|
||||||
await new Promise<void>((resolve) => {
|
let exists = false;
|
||||||
void sshExecutionService.executeCommand(
|
void sshExecutionService.executeCommand(
|
||||||
server as Server,
|
server as Server,
|
||||||
`test -f "${vmConfigPath}" && echo "exists" || echo "not_exists"`,
|
`test -f "${path}" && echo "exists" || echo "not_exists"`,
|
||||||
(data: string) => {
|
(data: string) => {
|
||||||
if (data.includes('exists')) {
|
if (data.includes('exists')) exists = true;
|
||||||
vmConfigExists = true;
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
() => resolve(),
|
() => resolve(exists),
|
||||||
() => resolve()
|
() => resolve(exists)
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
if (vmConfigExists) {
|
// Prefer node-specific paths first
|
||||||
return true; // VM config file exists
|
const vmConfigExistsNode = await checkPathExists(vmConfigPathNode);
|
||||||
|
if (vmConfigExistsNode) {
|
||||||
|
return true; // VM config file exists on node
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check LXC config file (not needed for return value, but check for completeness)
|
const lxcConfigExistsNode = await checkPathExists(lxcConfigPathNode);
|
||||||
await new Promise<void>((resolve) => {
|
if (lxcConfigExistsNode) {
|
||||||
void sshExecutionService.executeCommand(
|
return false; // LXC config file exists on node
|
||||||
server as Server,
|
}
|
||||||
`test -f "${lxcConfigPath}" && echo "exists" || echo "not_exists"`,
|
|
||||||
(_data: string) => {
|
|
||||||
// Data handler not needed - just checking if file exists
|
|
||||||
},
|
|
||||||
() => resolve(),
|
|
||||||
() => resolve()
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
return false; // Always LXC since VM config doesn't exist
|
// Fallback: single-node or server.name not matching Proxmox node name
|
||||||
|
const vmConfigExistsFallback = await checkPathExists(vmConfigPathFallback);
|
||||||
|
if (vmConfigExistsFallback) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false; // LXC (or neither path exists)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error determining container type:', error);
|
console.error('Error determining container type:', error);
|
||||||
return false; // Default to LXC on error
|
return false; // Default to LXC on error
|
||||||
@@ -971,10 +973,11 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Helper function to check config file for community-script tag and extract hostname/name
|
// Helper function to check config file for community-script tag and extract hostname/name
|
||||||
|
const nodeName = (server as Server).name;
|
||||||
const checkConfigAndExtractInfo = async (id: string, isVM: boolean): Promise<any> => {
|
const checkConfigAndExtractInfo = async (id: string, isVM: boolean): Promise<any> => {
|
||||||
const configPath = isVM
|
const configPath = isVM
|
||||||
? `/etc/pve/qemu-server/${id}.conf`
|
? `/etc/pve/nodes/${nodeName}/qemu-server/${id}.conf`
|
||||||
: `/etc/pve/lxc/${id}.conf`;
|
: `/etc/pve/nodes/${nodeName}/lxc/${id}.conf`;
|
||||||
|
|
||||||
const readCommand = `cat "${configPath}" 2>/dev/null`;
|
const readCommand = `cat "${configPath}" 2>/dev/null`;
|
||||||
|
|
||||||
@@ -1060,7 +1063,7 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
reject(new Error(`pct list failed: ${error}`));
|
reject(new Error(`pct list failed: ${error}`));
|
||||||
},
|
},
|
||||||
(_exitCode: number) => {
|
(_exitCode: number) => {
|
||||||
resolve();
|
setImmediate(() => resolve());
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -1079,7 +1082,7 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
reject(new Error(`qm list failed: ${error}`));
|
reject(new Error(`qm list failed: ${error}`));
|
||||||
},
|
},
|
||||||
(_exitCode: number) => {
|
(_exitCode: number) => {
|
||||||
resolve();
|
setImmediate(() => resolve());
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -1318,10 +1321,10 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
|
|
||||||
// Check if ID exists in either pct list (containers) or qm list (VMs)
|
// Check if ID exists in either pct list (containers) or qm list (VMs)
|
||||||
if (!existingIds.has(containerId)) {
|
if (!existingIds.has(containerId)) {
|
||||||
// Also verify config file doesn't exist as a double-check
|
// Also verify config file doesn't exist as a double-check (node-specific paths)
|
||||||
// Check both container and VM config paths
|
const nodeName = (server as Server).name;
|
||||||
const checkContainerCommand = `test -f "/etc/pve/lxc/${containerId}.conf" && echo "exists" || echo "not_found"`;
|
const checkContainerCommand = `test -f "/etc/pve/nodes/${nodeName}/lxc/${containerId}.conf" && echo "exists" || echo "not_found"`;
|
||||||
const checkVMCommand = `test -f "/etc/pve/qemu-server/${containerId}.conf" && echo "exists" || echo "not_found"`;
|
const checkVMCommand = `test -f "/etc/pve/nodes/${nodeName}/qemu-server/${containerId}.conf" && echo "exists" || echo "not_found"`;
|
||||||
|
|
||||||
const configExists = await new Promise<boolean>((resolve) => {
|
const configExists = await new Promise<boolean>((resolve) => {
|
||||||
let combinedOutput = '';
|
let combinedOutput = '';
|
||||||
@@ -2068,32 +2071,72 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the script's interface_port from metadata (prioritize metadata over existing database values)
|
// Resolve app slug from /usr/bin/update (community-scripts) when available; else from hostname/suffix.
|
||||||
|
let slugFromUpdate: string | null = null;
|
||||||
|
try {
|
||||||
|
const updateCommand = `pct exec ${scriptData.container_id} -- cat /usr/bin/update 2>/dev/null`;
|
||||||
|
let updateOutput = '';
|
||||||
|
await new Promise<void>((resolve) => {
|
||||||
|
void sshExecutionService.executeCommand(
|
||||||
|
server as Server,
|
||||||
|
updateCommand,
|
||||||
|
(data: string) => { updateOutput += data; },
|
||||||
|
() => {},
|
||||||
|
() => resolve()
|
||||||
|
);
|
||||||
|
});
|
||||||
|
const ctSlugMatch = /ct\/([a-zA-Z0-9_.-]+)\.sh/.exec(updateOutput);
|
||||||
|
if (ctSlugMatch?.[1]) {
|
||||||
|
slugFromUpdate = ctSlugMatch[1].trim().toLowerCase();
|
||||||
|
console.log('🔍 Slug from /usr/bin/update:', slugFromUpdate);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Container may not be from community-scripts; use hostname fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the script's interface_port from metadata. Primary: slug from /usr/bin/update; fallback: hostname/suffix.
|
||||||
let detectedPort = 80; // Default fallback
|
let detectedPort = 80; // Default fallback
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Import localScriptsService to get script metadata
|
|
||||||
const { localScriptsService } = await import('~/server/services/localScripts');
|
const { localScriptsService } = await import('~/server/services/localScripts');
|
||||||
|
|
||||||
// Get all scripts and find the one matching our script name
|
|
||||||
const allScripts = await localScriptsService.getAllScripts();
|
const allScripts = await localScriptsService.getAllScripts();
|
||||||
|
|
||||||
// Extract script slug from script_name (remove .sh extension)
|
const nameFromHostname = scriptData.script_name.replace(/\.sh$/, '').toLowerCase();
|
||||||
const scriptSlug = scriptData.script_name.replace(/\.sh$/, '');
|
|
||||||
console.log('🔍 Looking for script with slug:', scriptSlug);
|
|
||||||
|
|
||||||
const scriptMetadata = allScripts.find(script => script.slug === scriptSlug);
|
// Primary: slug from /usr/bin/update (community-scripts)
|
||||||
|
let scriptMetadata =
|
||||||
|
slugFromUpdate != null
|
||||||
|
? allScripts.find((s) => s.slug === slugFromUpdate)
|
||||||
|
: undefined;
|
||||||
|
if (scriptMetadata) {
|
||||||
|
console.log('🔍 Using slug from /usr/bin/update for metadata:', scriptMetadata.slug);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: exact hostname then hostname ends with slug (longest wins)
|
||||||
|
if (!scriptMetadata) {
|
||||||
|
scriptMetadata = allScripts.find((script) => script.slug === nameFromHostname);
|
||||||
|
if (!scriptMetadata) {
|
||||||
|
const suffixMatches = allScripts.filter((script) => nameFromHostname.endsWith(script.slug));
|
||||||
|
scriptMetadata =
|
||||||
|
suffixMatches.length > 0
|
||||||
|
? suffixMatches.reduce((a, b) => (a.slug.length >= b.slug.length ? a : b))
|
||||||
|
: undefined;
|
||||||
|
if (scriptMetadata) {
|
||||||
|
console.log('🔍 Matched metadata by slug suffix in hostname:', scriptMetadata.slug);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (scriptMetadata?.interface_port) {
|
if (scriptMetadata?.interface_port) {
|
||||||
detectedPort = scriptMetadata.interface_port;
|
detectedPort = scriptMetadata.interface_port;
|
||||||
console.log('📋 Found interface_port in metadata:', detectedPort);
|
console.log('📋 Found interface_port in metadata:', detectedPort);
|
||||||
} else {
|
} else {
|
||||||
console.log('📋 No interface_port found in metadata, using default port 80');
|
console.log('📋 No interface_port found in metadata, using default port 80');
|
||||||
detectedPort = 80; // Default to port 80 if no metadata port found
|
detectedPort = 80;
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log('⚠️ Error getting script metadata, using default port 80:', error);
|
console.log('⚠️ Error getting script metadata, using default port 80:', error);
|
||||||
detectedPort = 80; // Default to port 80 if metadata lookup fails
|
detectedPort = 80;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('🎯 Final detected port:', detectedPort);
|
console.log('🎯 Final detected port:', detectedPort);
|
||||||
@@ -2197,8 +2240,9 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read config file
|
// Read config file (node-specific path)
|
||||||
const configPath = `/etc/pve/lxc/${script.container_id}.conf`;
|
const nodeName = (server as Server).name;
|
||||||
|
const configPath = `/etc/pve/nodes/${nodeName}/lxc/${script.container_id}.conf`;
|
||||||
const readCommand = `cat "${configPath}" 2>/dev/null`;
|
const readCommand = `cat "${configPath}" 2>/dev/null`;
|
||||||
let rawConfig = '';
|
let rawConfig = '';
|
||||||
|
|
||||||
@@ -2328,8 +2372,9 @@ export const installedScriptsRouter = createTRPCRouter({
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write config file using heredoc for safe escaping
|
// Write config file using heredoc for safe escaping (node-specific path)
|
||||||
const configPath = `/etc/pve/lxc/${script.container_id}.conf`;
|
const nodeName = (server as Server).name;
|
||||||
|
const configPath = `/etc/pve/nodes/${nodeName}/lxc/${script.container_id}.conf`;
|
||||||
const writeCommand = `cat > "${configPath}" << 'EOFCONFIG'
|
const writeCommand = `cat > "${configPath}" << 'EOFCONFIG'
|
||||||
${rawConfig}
|
${rawConfig}
|
||||||
EOFCONFIG`;
|
EOFCONFIG`;
|
||||||
@@ -2737,9 +2782,10 @@ EOFCONFIG`;
|
|||||||
const { getSSHExecutionService } = await import('~/server/ssh-execution-service');
|
const { getSSHExecutionService } = await import('~/server/ssh-execution-service');
|
||||||
const sshExecutionService = getSSHExecutionService();
|
const sshExecutionService = getSSHExecutionService();
|
||||||
|
|
||||||
|
const nodeName = (server as Server).name;
|
||||||
const configPath = input.containerType === 'lxc'
|
const configPath = input.containerType === 'lxc'
|
||||||
? `/etc/pve/lxc/${input.containerId}.conf`
|
? `/etc/pve/nodes/${nodeName}/lxc/${input.containerId}.conf`
|
||||||
: `/etc/pve/qemu-server/${input.containerId}.conf`;
|
: `/etc/pve/nodes/${nodeName}/qemu-server/${input.containerId}.conf`;
|
||||||
|
|
||||||
let configContent = '';
|
let configContent = '';
|
||||||
await new Promise<void>((resolve) => {
|
await new Promise<void>((resolve) => {
|
||||||
@@ -3131,10 +3177,11 @@ EOFCONFIG`;
|
|||||||
const { getSSHExecutionService } = await import('~/server/ssh-execution-service');
|
const { getSSHExecutionService } = await import('~/server/ssh-execution-service');
|
||||||
const sshExecutionService = getSSHExecutionService();
|
const sshExecutionService = getSSHExecutionService();
|
||||||
|
|
||||||
// Read config file to get hostname/name
|
// Read config file to get hostname/name (node-specific path)
|
||||||
|
const nodeName = (server as Server).name;
|
||||||
const configPath = input.containerType === 'lxc'
|
const configPath = input.containerType === 'lxc'
|
||||||
? `/etc/pve/lxc/${input.containerId}.conf`
|
? `/etc/pve/nodes/${nodeName}/lxc/${input.containerId}.conf`
|
||||||
: `/etc/pve/qemu-server/${input.containerId}.conf`;
|
: `/etc/pve/nodes/${nodeName}/qemu-server/${input.containerId}.conf`;
|
||||||
|
|
||||||
let configContent = '';
|
let configContent = '';
|
||||||
await new Promise<void>((resolve) => {
|
await new Promise<void>((resolve) => {
|
||||||
|
|||||||
55
src/server/lib/gitProvider/bitbucket.ts
Normal file
55
src/server/lib/gitProvider/bitbucket.ts
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import type { DirEntry, GitProvider } from './types';
|
||||||
|
import { parseRepoUrl } from '../repositoryUrlValidation';
|
||||||
|
|
||||||
|
export class BitbucketProvider implements GitProvider {
|
||||||
|
async listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
||||||
|
const { owner, repo } = parseRepoUrl(repoUrl);
|
||||||
|
const listUrl = `https://api.bitbucket.org/2.0/repositories/${owner}/${repo}/src/${encodeURIComponent(branch)}/${path}`;
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
const token = process.env.BITBUCKET_APP_PASSWORD ?? process.env.BITBUCKET_TOKEN;
|
||||||
|
if (token) {
|
||||||
|
const auth = Buffer.from(`:${token}`).toString('base64');
|
||||||
|
headers.Authorization = `Basic ${auth}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(listUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Bitbucket API error: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = (await response.json()) as { values?: { path: string; type: string }[] };
|
||||||
|
const data = body.values ?? (Array.isArray(body) ? body : []);
|
||||||
|
if (!Array.isArray(data)) {
|
||||||
|
throw new Error('Bitbucket API returned unexpected response');
|
||||||
|
}
|
||||||
|
return data.map((item: { path: string; type: string }) => {
|
||||||
|
const name = item.path.split('/').pop() ?? item.path;
|
||||||
|
return {
|
||||||
|
name,
|
||||||
|
path: item.path,
|
||||||
|
type: item.type === 'commit_directory' ? ('dir' as const) : ('file' as const),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
||||||
|
const { owner, repo } = parseRepoUrl(repoUrl);
|
||||||
|
const rawUrl = `https://api.bitbucket.org/2.0/repositories/${owner}/${repo}/src/${encodeURIComponent(branch)}/${filePath}`;
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
const token = process.env.BITBUCKET_APP_PASSWORD ?? process.env.BITBUCKET_TOKEN;
|
||||||
|
if (token) {
|
||||||
|
const auth = Buffer.from(`:${token}`).toString('base64');
|
||||||
|
headers.Authorization = `Basic ${auth}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(rawUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
return response.text();
|
||||||
|
}
|
||||||
|
}
|
||||||
44
src/server/lib/gitProvider/custom.ts
Normal file
44
src/server/lib/gitProvider/custom.ts
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import type { DirEntry, GitProvider } from "./types";
|
||||||
|
import { parseRepoUrl } from "../repositoryUrlValidation";
|
||||||
|
|
||||||
|
export class CustomProvider implements GitProvider {
|
||||||
|
async listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
||||||
|
const { origin, owner, repo } = parseRepoUrl(repoUrl);
|
||||||
|
const apiUrl = `${origin}/api/v1/repos/${owner}/${repo}/contents/${path}?ref=${encodeURIComponent(branch)}`;
|
||||||
|
const headers: Record<string, string> = { "User-Agent": "PVEScripts-Local/1.0" };
|
||||||
|
const token = process.env.GITEA_TOKEN ?? process.env.GIT_TOKEN;
|
||||||
|
if (token) headers.Authorization = `token ${token}`;
|
||||||
|
|
||||||
|
const response = await fetch(apiUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Custom Git server: list directory failed (${response.status}).`);
|
||||||
|
}
|
||||||
|
const data = (await response.json()) as { type: string; name: string; path: string }[];
|
||||||
|
if (!Array.isArray(data)) {
|
||||||
|
const single = data as unknown as { type?: string; name?: string; path?: string };
|
||||||
|
if (single?.name) {
|
||||||
|
return [{ name: single.name, path: single.path ?? path, type: single.type === "dir" ? "dir" : "file" }];
|
||||||
|
}
|
||||||
|
throw new Error("Custom Git server returned unexpected response");
|
||||||
|
}
|
||||||
|
return data.map((item) => ({
|
||||||
|
name: item.name,
|
||||||
|
path: item.path,
|
||||||
|
type: item.type === "dir" ? ("dir" as const) : ("file" as const),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
async downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
||||||
|
const { origin, owner, repo } = parseRepoUrl(repoUrl);
|
||||||
|
const rawUrl = `${origin}/${owner}/${repo}/raw/${encodeURIComponent(branch)}/${filePath}`;
|
||||||
|
const headers: Record<string, string> = { "User-Agent": "PVEScripts-Local/1.0" };
|
||||||
|
const token = process.env.GITEA_TOKEN ?? process.env.GIT_TOKEN;
|
||||||
|
if (token) headers.Authorization = `token ${token}`;
|
||||||
|
|
||||||
|
const response = await fetch(rawUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to download ${filePath} from custom Git server (${response.status}).`);
|
||||||
|
}
|
||||||
|
return response.text();
|
||||||
|
}
|
||||||
|
}
|
||||||
60
src/server/lib/gitProvider/github.ts
Normal file
60
src/server/lib/gitProvider/github.ts
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import type { DirEntry, GitProvider } from './types';
|
||||||
|
import { parseRepoUrl } from '../repositoryUrlValidation';
|
||||||
|
|
||||||
|
export class GitHubProvider implements GitProvider {
|
||||||
|
async listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
||||||
|
const { owner, repo } = parseRepoUrl(repoUrl);
|
||||||
|
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/contents/${path}?ref=${encodeURIComponent(branch)}`;
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
Accept: 'application/vnd.github.v3+json',
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
const token = process.env.GITHUB_TOKEN;
|
||||||
|
if (token) headers.Authorization = `token ${token}`;
|
||||||
|
|
||||||
|
const response = await fetch(apiUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
if (response.status === 403) {
|
||||||
|
const err = new Error(
|
||||||
|
`GitHub API rate limit exceeded. Consider setting GITHUB_TOKEN. Status: ${response.status} ${response.statusText}`
|
||||||
|
);
|
||||||
|
(err as Error & { name: string }).name = 'RateLimitError';
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await response.json()) as { type: string; name: string; path: string }[];
|
||||||
|
if (!Array.isArray(data)) {
|
||||||
|
throw new Error('GitHub API returned unexpected response');
|
||||||
|
}
|
||||||
|
return data.map((item) => ({
|
||||||
|
name: item.name,
|
||||||
|
path: item.path,
|
||||||
|
type: item.type === 'dir' ? ('dir' as const) : ('file' as const),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
async downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
||||||
|
const { owner, repo } = parseRepoUrl(repoUrl);
|
||||||
|
const rawUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${encodeURIComponent(branch)}/${filePath}`;
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
const token = process.env.GITHUB_TOKEN;
|
||||||
|
if (token) headers.Authorization = `token ${token}`;
|
||||||
|
|
||||||
|
const response = await fetch(rawUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
if (response.status === 403) {
|
||||||
|
const err = new Error(
|
||||||
|
`GitHub rate limit exceeded while downloading ${filePath}. Consider setting GITHUB_TOKEN.`
|
||||||
|
);
|
||||||
|
(err as Error & { name: string }).name = 'RateLimitError';
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
return response.text();
|
||||||
|
}
|
||||||
|
}
|
||||||
58
src/server/lib/gitProvider/gitlab.ts
Normal file
58
src/server/lib/gitProvider/gitlab.ts
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
import type { DirEntry, GitProvider } from './types';
|
||||||
|
import { parseRepoUrl } from '../repositoryUrlValidation';
|
||||||
|
|
||||||
|
export class GitLabProvider implements GitProvider {
|
||||||
|
private getBaseUrl(repoUrl: string): string {
|
||||||
|
const { origin } = parseRepoUrl(repoUrl);
|
||||||
|
return origin;
|
||||||
|
}
|
||||||
|
|
||||||
|
private getProjectId(repoUrl: string): string {
|
||||||
|
const { owner, repo } = parseRepoUrl(repoUrl);
|
||||||
|
return encodeURIComponent(`${owner}/${repo}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
||||||
|
const baseUrl = this.getBaseUrl(repoUrl);
|
||||||
|
const projectId = this.getProjectId(repoUrl);
|
||||||
|
const apiUrl = `${baseUrl}/api/v4/projects/${projectId}/repository/tree?path=${encodeURIComponent(path)}&ref=${encodeURIComponent(branch)}&per_page=100`;
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
const token = process.env.GITLAB_TOKEN;
|
||||||
|
if (token) headers['PRIVATE-TOKEN'] = token;
|
||||||
|
|
||||||
|
const response = await fetch(apiUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`GitLab API error: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await response.json()) as { type: string; name: string; path: string }[];
|
||||||
|
if (!Array.isArray(data)) {
|
||||||
|
throw new Error('GitLab API returned unexpected response');
|
||||||
|
}
|
||||||
|
return data.map((item) => ({
|
||||||
|
name: item.name,
|
||||||
|
path: item.path,
|
||||||
|
type: item.type === 'tree' ? ('dir' as const) : ('file' as const),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
async downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
||||||
|
const baseUrl = this.getBaseUrl(repoUrl);
|
||||||
|
const projectId = this.getProjectId(repoUrl);
|
||||||
|
const encodedPath = encodeURIComponent(filePath);
|
||||||
|
const rawUrl = `${baseUrl}/api/v4/projects/${projectId}/repository/files/${encodedPath}/raw?ref=${encodeURIComponent(branch)}`;
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
};
|
||||||
|
const token = process.env.GITLAB_TOKEN;
|
||||||
|
if (token) headers['PRIVATE-TOKEN'] = token;
|
||||||
|
|
||||||
|
const response = await fetch(rawUrl, { headers });
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
return response.text();
|
||||||
|
}
|
||||||
|
}
|
||||||
1
src/server/lib/gitProvider/index.js
Normal file
1
src/server/lib/gitProvider/index.js
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export { listDirectory, downloadRawFile, getRepoProvider } from "./index.ts";
|
||||||
28
src/server/lib/gitProvider/index.ts
Normal file
28
src/server/lib/gitProvider/index.ts
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
import type { DirEntry, GitProvider } from "./types";
|
||||||
|
import { getRepoProvider } from "../repositoryUrlValidation";
|
||||||
|
import { GitHubProvider } from "./github";
|
||||||
|
import { GitLabProvider } from "./gitlab";
|
||||||
|
import { BitbucketProvider } from "./bitbucket";
|
||||||
|
import { CustomProvider } from "./custom";
|
||||||
|
|
||||||
|
const providers: Record<string, GitProvider> = {
|
||||||
|
github: new GitHubProvider(),
|
||||||
|
gitlab: new GitLabProvider(),
|
||||||
|
bitbucket: new BitbucketProvider(),
|
||||||
|
custom: new CustomProvider(),
|
||||||
|
};
|
||||||
|
|
||||||
|
export type { DirEntry, GitProvider };
|
||||||
|
export { getRepoProvider };
|
||||||
|
|
||||||
|
export function getGitProvider(repoUrl: string): GitProvider {
|
||||||
|
return providers[getRepoProvider(repoUrl)]!;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]> {
|
||||||
|
return getGitProvider(repoUrl).listDirectory(repoUrl, path, branch);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string> {
|
||||||
|
return getGitProvider(repoUrl).downloadRawFile(repoUrl, filePath, branch);
|
||||||
|
}
|
||||||
14
src/server/lib/gitProvider/types.ts
Normal file
14
src/server/lib/gitProvider/types.ts
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
/**
|
||||||
|
* Git provider interface for listing and downloading repository files.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export type DirEntry = {
|
||||||
|
name: string;
|
||||||
|
path: string;
|
||||||
|
type: 'file' | 'dir';
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface GitProvider {
|
||||||
|
listDirectory(repoUrl: string, path: string, branch: string): Promise<DirEntry[]>;
|
||||||
|
downloadRawFile(repoUrl: string, filePath: string, branch: string): Promise<string>;
|
||||||
|
}
|
||||||
37
src/server/lib/repositoryUrlValidation.js
Normal file
37
src/server/lib/repositoryUrlValidation.js
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
/**
|
||||||
|
* Repository URL validation (JS mirror for server.js).
|
||||||
|
*/
|
||||||
|
const VALID_REPO_URL =
|
||||||
|
/^(https?:\/\/)(github\.com|gitlab\.com|bitbucket\.org|[^/]+)\/[^/]+\/[^/]+$/;
|
||||||
|
|
||||||
|
export const REPO_URL_ERROR_MESSAGE =
|
||||||
|
'Invalid repository URL. Supported: GitHub, GitLab, Bitbucket, and custom Git servers (e.g. https://host/owner/repo).';
|
||||||
|
|
||||||
|
export function isValidRepositoryUrl(url) {
|
||||||
|
if (typeof url !== 'string' || !url.trim()) return false;
|
||||||
|
return VALID_REPO_URL.test(url.trim());
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getRepoProvider(url) {
|
||||||
|
if (!isValidRepositoryUrl(url)) throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
|
const normalized = url.trim().toLowerCase();
|
||||||
|
if (normalized.includes('github.com')) return 'github';
|
||||||
|
if (normalized.includes('gitlab.com')) return 'gitlab';
|
||||||
|
if (normalized.includes('bitbucket.org')) return 'bitbucket';
|
||||||
|
return 'custom';
|
||||||
|
}
|
||||||
|
|
||||||
|
export function parseRepoUrl(url) {
|
||||||
|
if (!isValidRepositoryUrl(url)) throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
|
try {
|
||||||
|
const u = new URL(url.trim());
|
||||||
|
const pathParts = u.pathname.replace(/^\/+/, '').replace(/\.git\/?$/, '').split('/');
|
||||||
|
return {
|
||||||
|
origin: u.origin,
|
||||||
|
owner: pathParts[0] ?? '',
|
||||||
|
repo: pathParts[1] ?? '',
|
||||||
|
};
|
||||||
|
} catch {
|
||||||
|
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
|
}
|
||||||
|
}
|
||||||
57
src/server/lib/repositoryUrlValidation.ts
Normal file
57
src/server/lib/repositoryUrlValidation.ts
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
/**
|
||||||
|
* Repository URL validation and provider detection.
|
||||||
|
* Supports GitHub, GitLab, Bitbucket, and custom Git servers.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const VALID_REPO_URL =
|
||||||
|
/^(https?:\/\/)(github\.com|gitlab\.com|bitbucket\.org|[^/]+)\/[^/]+\/[^/]+$/;
|
||||||
|
|
||||||
|
export const REPO_URL_ERROR_MESSAGE =
|
||||||
|
'Invalid repository URL. Supported: GitHub, GitLab, Bitbucket, and custom Git servers (e.g. https://host/owner/repo).';
|
||||||
|
|
||||||
|
export type RepoProvider = 'github' | 'gitlab' | 'bitbucket' | 'custom';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a string is a valid repository URL (format only).
|
||||||
|
*/
|
||||||
|
export function isValidRepositoryUrl(url: string): boolean {
|
||||||
|
if (typeof url !== 'string' || !url.trim()) return false;
|
||||||
|
return VALID_REPO_URL.test(url.trim());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detect the Git provider from a repository URL.
|
||||||
|
*/
|
||||||
|
export function getRepoProvider(url: string): RepoProvider {
|
||||||
|
if (!isValidRepositoryUrl(url)) {
|
||||||
|
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
|
}
|
||||||
|
const normalized = url.trim().toLowerCase();
|
||||||
|
if (normalized.includes('github.com')) return 'github';
|
||||||
|
if (normalized.includes('gitlab.com')) return 'gitlab';
|
||||||
|
if (normalized.includes('bitbucket.org')) return 'bitbucket';
|
||||||
|
return 'custom';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse owner and repo from a repository URL (path segments).
|
||||||
|
* Works for GitHub, GitLab, Bitbucket, and custom (host/owner/repo).
|
||||||
|
*/
|
||||||
|
export function parseRepoUrl(url: string): { origin: string; owner: string; repo: string } {
|
||||||
|
if (!isValidRepositoryUrl(url)) {
|
||||||
|
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const u = new URL(url.trim());
|
||||||
|
const pathParts = u.pathname.replace(/^\/+/, '').replace(/\.git\/?$/, '').split('/');
|
||||||
|
const owner = pathParts[0] ?? '';
|
||||||
|
const repo = pathParts[1] ?? '';
|
||||||
|
return {
|
||||||
|
origin: u.origin,
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
};
|
||||||
|
} catch {
|
||||||
|
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -327,13 +327,16 @@ class BackupService {
|
|||||||
// PBS supports PBS_PASSWORD and PBS_REPOSITORY environment variables for non-interactive login
|
// PBS supports PBS_PASSWORD and PBS_REPOSITORY environment variables for non-interactive login
|
||||||
const repository = `root@pam@${pbsIp}:${pbsDatastore}`;
|
const repository = `root@pam@${pbsIp}:${pbsDatastore}`;
|
||||||
|
|
||||||
// Escape password for shell safety (single quotes)
|
// Escape password and fingerprint for shell safety (single quotes)
|
||||||
const escapedPassword = credential.pbs_password.replace(/'/g, "'\\''");
|
const escapedPassword = credential.pbs_password.replace(/'/g, "'\\''");
|
||||||
|
const fingerprint = credential.pbs_fingerprint?.trim() ?? '';
|
||||||
// Use PBS_PASSWORD environment variable for non-interactive authentication
|
const escapedFingerprint = fingerprint ? fingerprint.replace(/'/g, "'\\''") : '';
|
||||||
// Auto-accept fingerprint by piping "y" to stdin
|
const envParts = [`PBS_PASSWORD='${escapedPassword}'`, `PBS_REPOSITORY='${repository}'`];
|
||||||
// PBS will use PBS_PASSWORD env var if available, avoiding interactive prompt
|
if (escapedFingerprint) {
|
||||||
const fullCommand = `echo "y" | PBS_PASSWORD='${escapedPassword}' PBS_REPOSITORY='${repository}' timeout 10 proxmox-backup-client login --repository ${repository} 2>&1`;
|
envParts.push(`PBS_FINGERPRINT='${escapedFingerprint}'`);
|
||||||
|
}
|
||||||
|
const envStr = envParts.join(' ');
|
||||||
|
const fullCommand = `${envStr} timeout 10 proxmox-backup-client login --repository ${repository} 2>&1`;
|
||||||
|
|
||||||
console.log(`[BackupService] Logging into PBS: ${repository}`);
|
console.log(`[BackupService] Logging into PBS: ${repository}`);
|
||||||
|
|
||||||
@@ -419,9 +422,12 @@ class BackupService {
|
|||||||
|
|
||||||
// Build full repository string: root@pam@<IP>:<DATASTORE>
|
// Build full repository string: root@pam@<IP>:<DATASTORE>
|
||||||
const repository = `root@pam@${pbsIp}:${pbsDatastore}`;
|
const repository = `root@pam@${pbsIp}:${pbsDatastore}`;
|
||||||
|
const fingerprint = credential.pbs_fingerprint?.trim() ?? '';
|
||||||
|
const escapedFingerprint = fingerprint ? fingerprint.replace(/'/g, "'\\''") : '';
|
||||||
|
const snapshotEnvParts = escapedFingerprint ? [`PBS_FINGERPRINT='${escapedFingerprint}'`] : [];
|
||||||
|
const snapshotEnvStr = snapshotEnvParts.length ? snapshotEnvParts.join(' ') + ' ' : '';
|
||||||
// Use correct command: snapshot list ct/<CT_ID> --repository <full_repo_string>
|
// Use correct command: snapshot list ct/<CT_ID> --repository <full_repo_string>
|
||||||
const command = `timeout 30 proxmox-backup-client snapshot list ct/${ctId} --repository ${repository} 2>&1 || echo "PBS_ERROR"`;
|
const command = `${snapshotEnvStr}timeout 30 proxmox-backup-client snapshot list ct/${ctId} --repository ${repository} 2>&1 || echo "PBS_ERROR"`;
|
||||||
let output = '';
|
let output = '';
|
||||||
|
|
||||||
console.log(`[BackupService] Discovering PBS backups for CT ${ctId} on repository ${repository}`);
|
console.log(`[BackupService] Discovering PBS backups for CT ${ctId} on repository ${repository}`);
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
// JavaScript wrapper for githubJsonService (for use with node server.js)
|
// JavaScript wrapper for githubJsonService (for use with node server.js)
|
||||||
import { writeFile, mkdir, readdir, readFile } from 'fs/promises';
|
import { writeFile, mkdir, readdir, readFile, unlink } from 'fs/promises';
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
import { repositoryService } from './repositoryService.js';
|
import { repositoryService } from './repositoryService.js';
|
||||||
|
import { listDirectory, downloadRawFile } from '../lib/gitProvider/index.js';
|
||||||
|
|
||||||
// Get environment variables
|
// Get environment variables
|
||||||
const getEnv = () => ({
|
const getEnv = () => ({
|
||||||
@@ -28,76 +29,9 @@ class GitHubJsonService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
getBaseUrl(repoUrl) {
|
|
||||||
const urlMatch = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
|
||||||
if (!urlMatch) {
|
|
||||||
throw new Error(`Invalid GitHub repository URL: ${repoUrl}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const [, owner, repo] = urlMatch;
|
|
||||||
return `https://api.github.com/repos/${owner}/${repo}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
extractRepoPath(repoUrl) {
|
|
||||||
const match = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
|
||||||
if (!match) {
|
|
||||||
throw new Error('Invalid GitHub repository URL');
|
|
||||||
}
|
|
||||||
return `${match[1]}/${match[2]}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
async fetchFromGitHub(repoUrl, endpoint) {
|
|
||||||
const baseUrl = this.getBaseUrl(repoUrl);
|
|
||||||
const env = getEnv();
|
|
||||||
|
|
||||||
const headers = {
|
|
||||||
'Accept': 'application/vnd.github.v3+json',
|
|
||||||
'User-Agent': 'PVEScripts-Local/1.0',
|
|
||||||
};
|
|
||||||
|
|
||||||
if (env.GITHUB_TOKEN) {
|
|
||||||
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(`${baseUrl}${endpoint}`, { headers });
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
if (response.status === 403) {
|
|
||||||
const error = new Error(`GitHub API rate limit exceeded. Consider setting GITHUB_TOKEN for higher limits. Status: ${response.status} ${response.statusText}`);
|
|
||||||
error.name = 'RateLimitError';
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return response.json();
|
|
||||||
}
|
|
||||||
|
|
||||||
async downloadJsonFile(repoUrl, filePath) {
|
async downloadJsonFile(repoUrl, filePath) {
|
||||||
this.initializeConfig();
|
this.initializeConfig();
|
||||||
const repoPath = this.extractRepoPath(repoUrl);
|
const content = await downloadRawFile(repoUrl, filePath, this.branch);
|
||||||
const rawUrl = `https://raw.githubusercontent.com/${repoPath}/${this.branch}/${filePath}`;
|
|
||||||
const env = getEnv();
|
|
||||||
|
|
||||||
const headers = {
|
|
||||||
'User-Agent': 'PVEScripts-Local/1.0',
|
|
||||||
};
|
|
||||||
|
|
||||||
if (env.GITHUB_TOKEN) {
|
|
||||||
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(rawUrl, { headers });
|
|
||||||
if (!response.ok) {
|
|
||||||
if (response.status === 403) {
|
|
||||||
const error = new Error(`GitHub rate limit exceeded while downloading ${filePath}. Consider setting GITHUB_TOKEN for higher limits.`);
|
|
||||||
error.name = 'RateLimitError';
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const content = await response.text();
|
|
||||||
const script = JSON.parse(content);
|
const script = JSON.parse(content);
|
||||||
script.repository_url = repoUrl;
|
script.repository_url = repoUrl;
|
||||||
return script;
|
return script;
|
||||||
@@ -105,16 +39,13 @@ class GitHubJsonService {
|
|||||||
|
|
||||||
async getJsonFiles(repoUrl) {
|
async getJsonFiles(repoUrl) {
|
||||||
this.initializeConfig();
|
this.initializeConfig();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const files = await this.fetchFromGitHub(
|
const entries = await listDirectory(repoUrl, this.jsonFolder, this.branch);
|
||||||
repoUrl,
|
return entries
|
||||||
`/contents/${this.jsonFolder}?ref=${this.branch}`
|
.filter((e) => e.type === 'file' && e.name.endsWith('.json'))
|
||||||
);
|
.map((e) => ({ name: e.name, path: e.path }));
|
||||||
|
|
||||||
return files.filter(file => file.name.endsWith('.json'));
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error fetching JSON files from GitHub (${repoUrl}):`, error);
|
console.error(`Error fetching JSON files from repository (${repoUrl}):`, error);
|
||||||
throw new Error(`Failed to fetch script files from repository: ${repoUrl}`);
|
throw new Error(`Failed to fetch script files from repository: ${repoUrl}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -232,25 +163,42 @@ class GitHubJsonService {
|
|||||||
const localFiles = await this.getLocalJsonFiles();
|
const localFiles = await this.getLocalJsonFiles();
|
||||||
console.log(`Found ${localFiles.length} local JSON files`);
|
console.log(`Found ${localFiles.length} local JSON files`);
|
||||||
|
|
||||||
|
// Delete local JSON files that belong to this repo but are no longer in the remote
|
||||||
|
const remoteFilenames = new Set(githubFiles.map((f) => f.name));
|
||||||
|
const deletedFiles = await this.deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames);
|
||||||
|
if (deletedFiles.length > 0) {
|
||||||
|
console.log(`Removed ${deletedFiles.length} obsolete JSON file(s) no longer in ${repoUrl}`);
|
||||||
|
}
|
||||||
|
|
||||||
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
|
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
|
||||||
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
|
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
|
||||||
|
|
||||||
if (filesToSync.length === 0) {
|
if (filesToSync.length === 0) {
|
||||||
|
const msg =
|
||||||
|
deletedFiles.length > 0
|
||||||
|
? `All JSON files are up to date for repository: ${repoUrl}. Removed ${deletedFiles.length} obsolete file(s).`
|
||||||
|
: `All JSON files are up to date for repository: ${repoUrl}`;
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: `All JSON files are up to date for repository: ${repoUrl}`,
|
message: msg,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: []
|
syncedFiles: [],
|
||||||
|
deletedFiles
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
|
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
|
||||||
|
|
||||||
|
const msg =
|
||||||
|
deletedFiles.length > 0
|
||||||
|
? `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}, removed ${deletedFiles.length} obsolete file(s).`
|
||||||
|
: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`;
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`,
|
message: msg,
|
||||||
count: syncedFiles.length,
|
count: syncedFiles.length,
|
||||||
syncedFiles
|
syncedFiles,
|
||||||
|
deletedFiles
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`JSON sync failed for ${repoUrl}:`, error);
|
console.error(`JSON sync failed for ${repoUrl}:`, error);
|
||||||
@@ -258,7 +206,8 @@ class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: []
|
syncedFiles: [],
|
||||||
|
deletedFiles: []
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -274,13 +223,15 @@ class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: 'No enabled repositories found',
|
message: 'No enabled repositories found',
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: []
|
syncedFiles: [],
|
||||||
|
deletedFiles: []
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Found ${enabledRepos.length} enabled repositories`);
|
console.log(`Found ${enabledRepos.length} enabled repositories`);
|
||||||
|
|
||||||
const allSyncedFiles = [];
|
const allSyncedFiles = [];
|
||||||
|
const allDeletedFiles = [];
|
||||||
const processedSlugs = new Set();
|
const processedSlugs = new Set();
|
||||||
let totalSynced = 0;
|
let totalSynced = 0;
|
||||||
|
|
||||||
@@ -291,6 +242,7 @@ class GitHubJsonService {
|
|||||||
const result = await this.syncJsonFilesForRepo(repo.url);
|
const result = await this.syncJsonFilesForRepo(repo.url);
|
||||||
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
|
allDeletedFiles.push(...(result.deletedFiles ?? []));
|
||||||
const newFiles = result.syncedFiles.filter(file => {
|
const newFiles = result.syncedFiles.filter(file => {
|
||||||
const slug = file.replace('.json', '');
|
const slug = file.replace('.json', '');
|
||||||
if (processedSlugs.has(slug)) {
|
if (processedSlugs.has(slug)) {
|
||||||
@@ -312,11 +264,16 @@ class GitHubJsonService {
|
|||||||
|
|
||||||
await this.updateExistingFilesWithRepositoryUrl();
|
await this.updateExistingFilesWithRepositoryUrl();
|
||||||
|
|
||||||
|
const msg =
|
||||||
|
allDeletedFiles.length > 0
|
||||||
|
? `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories, removed ${allDeletedFiles.length} obsolete file(s).`
|
||||||
|
: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`;
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`,
|
message: msg,
|
||||||
count: totalSynced,
|
count: totalSynced,
|
||||||
syncedFiles: allSyncedFiles
|
syncedFiles: allSyncedFiles,
|
||||||
|
deletedFiles: allDeletedFiles
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Multi-repository JSON sync failed:', error);
|
console.error('Multi-repository JSON sync failed:', error);
|
||||||
@@ -324,7 +281,8 @@ class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: []
|
syncedFiles: [],
|
||||||
|
deletedFiles: []
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -366,6 +324,32 @@ class GitHubJsonService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames) {
|
||||||
|
this.initializeConfig();
|
||||||
|
const localFiles = await this.getLocalJsonFiles();
|
||||||
|
const deletedFiles = [];
|
||||||
|
|
||||||
|
for (const file of localFiles) {
|
||||||
|
try {
|
||||||
|
const filePath = join(this.localJsonDirectory, file);
|
||||||
|
const content = await readFile(filePath, 'utf-8');
|
||||||
|
const script = JSON.parse(content);
|
||||||
|
|
||||||
|
if (script.repository_url === repoUrl && !remoteFilenames.has(file)) {
|
||||||
|
await unlink(filePath);
|
||||||
|
const slug = file.replace(/\.json$/, '');
|
||||||
|
this.scriptCache.delete(slug);
|
||||||
|
deletedFiles.push(file);
|
||||||
|
console.log(`Removed obsolete script JSON: ${file} (no longer in ${repoUrl})`);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// If we can't read or parse the file, skip (do not delete)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return deletedFiles;
|
||||||
|
}
|
||||||
|
|
||||||
async findFilesToSyncForRepo(repoUrl, githubFiles, localFiles) {
|
async findFilesToSyncForRepo(repoUrl, githubFiles, localFiles) {
|
||||||
const filesToSync = [];
|
const filesToSync = [];
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import { writeFile, mkdir, readdir, readFile } from 'fs/promises';
|
import { writeFile, mkdir, readdir, readFile, unlink } from 'fs/promises';
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
import { env } from '../../env.js';
|
import { env } from '../../env.js';
|
||||||
import type { Script, ScriptCard, GitHubFile } from '../../types/script';
|
import type { Script, ScriptCard, GitHubFile } from '../../types/script';
|
||||||
import { repositoryService } from './repositoryService';
|
import { repositoryService } from './repositoryService';
|
||||||
|
import { listDirectory, downloadRawFile } from '~/server/lib/gitProvider';
|
||||||
|
|
||||||
export class GitHubJsonService {
|
export class GitHubJsonService {
|
||||||
private branch: string | null = null;
|
private branch: string | null = null;
|
||||||
@@ -22,96 +23,24 @@ export class GitHubJsonService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private getBaseUrl(repoUrl: string): string {
|
|
||||||
const urlMatch = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
|
||||||
if (!urlMatch) {
|
|
||||||
throw new Error(`Invalid GitHub repository URL: ${repoUrl}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const [, owner, repo] = urlMatch;
|
|
||||||
return `https://api.github.com/repos/${owner}/${repo}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
private extractRepoPath(repoUrl: string): string {
|
|
||||||
const match = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
|
||||||
if (!match) {
|
|
||||||
throw new Error('Invalid GitHub repository URL');
|
|
||||||
}
|
|
||||||
return `${match[1]}/${match[2]}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async fetchFromGitHub<T>(repoUrl: string, endpoint: string): Promise<T> {
|
|
||||||
const baseUrl = this.getBaseUrl(repoUrl);
|
|
||||||
|
|
||||||
const headers: HeadersInit = {
|
|
||||||
'Accept': 'application/vnd.github.v3+json',
|
|
||||||
'User-Agent': 'PVEScripts-Local/1.0',
|
|
||||||
};
|
|
||||||
|
|
||||||
// Add GitHub token authentication if available
|
|
||||||
if (env.GITHUB_TOKEN) {
|
|
||||||
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(`${baseUrl}${endpoint}`, { headers });
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
if (response.status === 403) {
|
|
||||||
const error = new Error(`GitHub API rate limit exceeded. Consider setting GITHUB_TOKEN for higher limits. Status: ${response.status} ${response.statusText}`);
|
|
||||||
error.name = 'RateLimitError';
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json();
|
|
||||||
return data as T;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async downloadJsonFile(repoUrl: string, filePath: string): Promise<Script> {
|
private async downloadJsonFile(repoUrl: string, filePath: string): Promise<Script> {
|
||||||
this.initializeConfig();
|
this.initializeConfig();
|
||||||
const repoPath = this.extractRepoPath(repoUrl);
|
const content = await downloadRawFile(repoUrl, filePath, this.branch!);
|
||||||
const rawUrl = `https://raw.githubusercontent.com/${repoPath}/${this.branch!}/${filePath}`;
|
|
||||||
|
|
||||||
const headers: HeadersInit = {
|
|
||||||
'User-Agent': 'PVEScripts-Local/1.0',
|
|
||||||
};
|
|
||||||
|
|
||||||
// Add GitHub token authentication if available
|
|
||||||
if (env.GITHUB_TOKEN) {
|
|
||||||
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(rawUrl, { headers });
|
|
||||||
if (!response.ok) {
|
|
||||||
if (response.status === 403) {
|
|
||||||
const error = new Error(`GitHub rate limit exceeded while downloading ${filePath}. Consider setting GITHUB_TOKEN for higher limits. Status: ${response.status} ${response.statusText}`);
|
|
||||||
error.name = 'RateLimitError';
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const content = await response.text();
|
|
||||||
const script = JSON.parse(content) as Script;
|
const script = JSON.parse(content) as Script;
|
||||||
// Add repository_url to script
|
|
||||||
script.repository_url = repoUrl;
|
script.repository_url = repoUrl;
|
||||||
return script;
|
return script;
|
||||||
}
|
}
|
||||||
|
|
||||||
async getJsonFiles(repoUrl: string): Promise<GitHubFile[]> {
|
async getJsonFiles(repoUrl: string): Promise<GitHubFile[]> {
|
||||||
this.initializeConfig();
|
this.initializeConfig();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const files = await this.fetchFromGitHub<GitHubFile[]>(
|
const entries = await listDirectory(repoUrl, this.jsonFolder!, this.branch!);
|
||||||
repoUrl,
|
const files: GitHubFile[] = entries
|
||||||
`/contents/${this.jsonFolder!}?ref=${this.branch!}`
|
.filter((e) => e.type === 'file' && e.name.endsWith('.json'))
|
||||||
);
|
.map((e) => ({ name: e.name, path: e.path } as GitHubFile));
|
||||||
|
return files;
|
||||||
// Filter for JSON files only
|
|
||||||
return files.filter(file => file.name.endsWith('.json'));
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error fetching JSON files from GitHub (${repoUrl}):`, error);
|
console.error(`Error fetching JSON files from repository (${repoUrl}):`, error);
|
||||||
throw new Error(`Failed to fetch script files from repository: ${repoUrl}`);
|
throw new Error(`Failed to fetch script files from repository: ${repoUrl}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -229,12 +158,11 @@ export class GitHubJsonService {
|
|||||||
/**
|
/**
|
||||||
* Sync JSON files from a specific repository
|
* Sync JSON files from a specific repository
|
||||||
*/
|
*/
|
||||||
async syncJsonFilesForRepo(repoUrl: string): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[] }> {
|
async syncJsonFilesForRepo(repoUrl: string): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[]; deletedFiles: string[] }> {
|
||||||
try {
|
try {
|
||||||
console.log(`Starting JSON sync from repository: ${repoUrl}`);
|
console.log(`Starting JSON sync from repository: ${repoUrl}`);
|
||||||
|
|
||||||
// Get file list from GitHub
|
console.log(`Fetching file list from repository (${repoUrl})...`);
|
||||||
console.log(`Fetching file list from GitHub (${repoUrl})...`);
|
|
||||||
const githubFiles = await this.getJsonFiles(repoUrl);
|
const githubFiles = await this.getJsonFiles(repoUrl);
|
||||||
console.log(`Found ${githubFiles.length} JSON files in repository ${repoUrl}`);
|
console.log(`Found ${githubFiles.length} JSON files in repository ${repoUrl}`);
|
||||||
|
|
||||||
@@ -242,28 +170,45 @@ export class GitHubJsonService {
|
|||||||
const localFiles = await this.getLocalJsonFiles();
|
const localFiles = await this.getLocalJsonFiles();
|
||||||
console.log(`Found ${localFiles.length} local JSON files`);
|
console.log(`Found ${localFiles.length} local JSON files`);
|
||||||
|
|
||||||
|
// Delete local JSON files that belong to this repo but are no longer in the remote
|
||||||
|
const remoteFilenames = new Set(githubFiles.map((f) => f.name));
|
||||||
|
const deletedFiles = await this.deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames);
|
||||||
|
if (deletedFiles.length > 0) {
|
||||||
|
console.log(`Removed ${deletedFiles.length} obsolete JSON file(s) no longer in ${repoUrl}`);
|
||||||
|
}
|
||||||
|
|
||||||
// Compare and find files that need syncing
|
// Compare and find files that need syncing
|
||||||
// For multi-repo support, we need to check if file exists AND if it's from this repo
|
// For multi-repo support, we need to check if file exists AND if it's from this repo
|
||||||
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
|
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
|
||||||
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
|
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
|
||||||
|
|
||||||
if (filesToSync.length === 0) {
|
if (filesToSync.length === 0) {
|
||||||
|
const msg =
|
||||||
|
deletedFiles.length > 0
|
||||||
|
? `All JSON files are up to date for repository: ${repoUrl}. Removed ${deletedFiles.length} obsolete file(s).`
|
||||||
|
: `All JSON files are up to date for repository: ${repoUrl}`;
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: `All JSON files are up to date for repository: ${repoUrl}`,
|
message: msg,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: []
|
syncedFiles: [],
|
||||||
|
deletedFiles
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Download and save only the files that need syncing
|
// Download and save only the files that need syncing
|
||||||
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
|
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
|
||||||
|
|
||||||
|
const msg =
|
||||||
|
deletedFiles.length > 0
|
||||||
|
? `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}, removed ${deletedFiles.length} obsolete file(s).`
|
||||||
|
: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`;
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`,
|
message: msg,
|
||||||
count: syncedFiles.length,
|
count: syncedFiles.length,
|
||||||
syncedFiles
|
syncedFiles,
|
||||||
|
deletedFiles
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`JSON sync failed for ${repoUrl}:`, error);
|
console.error(`JSON sync failed for ${repoUrl}:`, error);
|
||||||
@@ -271,7 +216,8 @@ export class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: []
|
syncedFiles: [],
|
||||||
|
deletedFiles: []
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -279,7 +225,7 @@ export class GitHubJsonService {
|
|||||||
/**
|
/**
|
||||||
* Sync JSON files from all enabled repositories (main repo has priority)
|
* Sync JSON files from all enabled repositories (main repo has priority)
|
||||||
*/
|
*/
|
||||||
async syncJsonFiles(): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[] }> {
|
async syncJsonFiles(): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[]; deletedFiles: string[] }> {
|
||||||
try {
|
try {
|
||||||
console.log('Starting multi-repository JSON sync...');
|
console.log('Starting multi-repository JSON sync...');
|
||||||
|
|
||||||
@@ -290,13 +236,15 @@ export class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: 'No enabled repositories found',
|
message: 'No enabled repositories found',
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: []
|
syncedFiles: [],
|
||||||
|
deletedFiles: []
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Found ${enabledRepos.length} enabled repositories`);
|
console.log(`Found ${enabledRepos.length} enabled repositories`);
|
||||||
|
|
||||||
const allSyncedFiles: string[] = [];
|
const allSyncedFiles: string[] = [];
|
||||||
|
const allDeletedFiles: string[] = [];
|
||||||
const processedSlugs = new Set<string>(); // Track slugs we've already processed
|
const processedSlugs = new Set<string>(); // Track slugs we've already processed
|
||||||
let totalSynced = 0;
|
let totalSynced = 0;
|
||||||
|
|
||||||
@@ -308,6 +256,7 @@ export class GitHubJsonService {
|
|||||||
const result = await this.syncJsonFilesForRepo(repo.url);
|
const result = await this.syncJsonFilesForRepo(repo.url);
|
||||||
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
|
allDeletedFiles.push(...(result.deletedFiles ?? []));
|
||||||
// Only count files that weren't already processed from a higher priority repo
|
// Only count files that weren't already processed from a higher priority repo
|
||||||
const newFiles = result.syncedFiles.filter(file => {
|
const newFiles = result.syncedFiles.filter(file => {
|
||||||
const slug = file.replace('.json', '');
|
const slug = file.replace('.json', '');
|
||||||
@@ -331,11 +280,16 @@ export class GitHubJsonService {
|
|||||||
// Also update existing files that don't have repository_url set (backward compatibility)
|
// Also update existing files that don't have repository_url set (backward compatibility)
|
||||||
await this.updateExistingFilesWithRepositoryUrl();
|
await this.updateExistingFilesWithRepositoryUrl();
|
||||||
|
|
||||||
|
const msg =
|
||||||
|
allDeletedFiles.length > 0
|
||||||
|
? `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories, removed ${allDeletedFiles.length} obsolete file(s).`
|
||||||
|
: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`;
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`,
|
message: msg,
|
||||||
count: totalSynced,
|
count: totalSynced,
|
||||||
syncedFiles: allSyncedFiles
|
syncedFiles: allSyncedFiles,
|
||||||
|
deletedFiles: allDeletedFiles
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Multi-repository JSON sync failed:', error);
|
console.error('Multi-repository JSON sync failed:', error);
|
||||||
@@ -343,7 +297,8 @@ export class GitHubJsonService {
|
|||||||
success: false,
|
success: false,
|
||||||
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||||
count: 0,
|
count: 0,
|
||||||
syncedFiles: []
|
syncedFiles: [],
|
||||||
|
deletedFiles: []
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -388,6 +343,36 @@ export class GitHubJsonService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete local JSON files that belong to this repo but are no longer in the remote list.
|
||||||
|
* Returns the list of deleted filenames.
|
||||||
|
*/
|
||||||
|
private async deleteLocalFilesRemovedFromRepo(repoUrl: string, remoteFilenames: Set<string>): Promise<string[]> {
|
||||||
|
this.initializeConfig();
|
||||||
|
const localFiles = await this.getLocalJsonFiles();
|
||||||
|
const deletedFiles: string[] = [];
|
||||||
|
|
||||||
|
for (const file of localFiles) {
|
||||||
|
try {
|
||||||
|
const filePath = join(this.localJsonDirectory!, file);
|
||||||
|
const content = await readFile(filePath, 'utf-8');
|
||||||
|
const script = JSON.parse(content) as Script;
|
||||||
|
|
||||||
|
if (script.repository_url === repoUrl && !remoteFilenames.has(file)) {
|
||||||
|
await unlink(filePath);
|
||||||
|
const slug = file.replace(/\.json$/, '');
|
||||||
|
this.scriptCache.delete(slug);
|
||||||
|
deletedFiles.push(file);
|
||||||
|
console.log(`Removed obsolete script JSON: ${file} (no longer in ${repoUrl})`);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// If we can't read or parse the file, skip (do not delete)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return deletedFiles;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find files that need syncing for a specific repository
|
* Find files that need syncing for a specific repository
|
||||||
* This checks if file exists locally AND if it's from the same repository
|
* This checks if file exists locally AND if it's from the same repository
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
// JavaScript wrapper for repositoryService (for use with node server.js)
|
// JavaScript wrapper for repositoryService (for use with node server.js)
|
||||||
import { prisma } from '../db.js';
|
import { prisma } from '../db.js';
|
||||||
|
import { isValidRepositoryUrl, REPO_URL_ERROR_MESSAGE } from '../lib/repositoryUrlValidation.js';
|
||||||
|
|
||||||
class RepositoryService {
|
class RepositoryService {
|
||||||
/**
|
/**
|
||||||
@@ -89,9 +90,8 @@ class RepositoryService {
|
|||||||
* Create a new repository
|
* Create a new repository
|
||||||
*/
|
*/
|
||||||
async createRepository(data) {
|
async createRepository(data) {
|
||||||
// Validate GitHub URL
|
if (!isValidRepositoryUrl(data.url)) {
|
||||||
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for duplicates
|
// Check for duplicates
|
||||||
@@ -122,10 +122,9 @@ class RepositoryService {
|
|||||||
* Update repository
|
* Update repository
|
||||||
*/
|
*/
|
||||||
async updateRepository(id, data) {
|
async updateRepository(id, data) {
|
||||||
// If updating URL, validate it
|
|
||||||
if (data.url) {
|
if (data.url) {
|
||||||
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
if (!isValidRepositoryUrl(data.url)) {
|
||||||
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for duplicates (excluding current repo)
|
// Check for duplicates (excluding current repo)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
/* eslint-disable @typescript-eslint/prefer-regexp-exec */
|
|
||||||
import { prisma } from '../db';
|
import { prisma } from '../db';
|
||||||
|
import { isValidRepositoryUrl, REPO_URL_ERROR_MESSAGE } from '../lib/repositoryUrlValidation';
|
||||||
|
|
||||||
export class RepositoryService {
|
export class RepositoryService {
|
||||||
/**
|
/**
|
||||||
@@ -93,9 +93,8 @@ export class RepositoryService {
|
|||||||
enabled?: boolean;
|
enabled?: boolean;
|
||||||
priority?: number;
|
priority?: number;
|
||||||
}) {
|
}) {
|
||||||
// Validate GitHub URL
|
if (!isValidRepositoryUrl(data.url)) {
|
||||||
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for duplicates
|
// Check for duplicates
|
||||||
@@ -130,10 +129,9 @@ export class RepositoryService {
|
|||||||
url?: string;
|
url?: string;
|
||||||
priority?: number;
|
priority?: number;
|
||||||
}) {
|
}) {
|
||||||
// If updating URL, validate it
|
|
||||||
if (data.url) {
|
if (data.url) {
|
||||||
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
if (!isValidRepositoryUrl(data.url)) {
|
||||||
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
throw new Error(REPO_URL_ERROR_MESSAGE);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for duplicates (excluding current repo)
|
// Check for duplicates (excluding current repo)
|
||||||
|
|||||||
@@ -250,9 +250,16 @@ class RestoreService {
|
|||||||
const targetFolder = `/var/lib/vz/dump/vzdump-lxc-${ctId}-${snapshotNameForPath}`;
|
const targetFolder = `/var/lib/vz/dump/vzdump-lxc-${ctId}-${snapshotNameForPath}`;
|
||||||
const targetTar = `${targetFolder}.tar`;
|
const targetTar = `${targetFolder}.tar`;
|
||||||
|
|
||||||
// Use PBS_PASSWORD env var and add timeout for long downloads
|
// Use PBS_PASSWORD env var and add timeout for long downloads; PBS_FINGERPRINT when set for cert validation
|
||||||
const escapedPassword = credential.pbs_password.replace(/'/g, "'\\''");
|
const escapedPassword = credential.pbs_password.replace(/'/g, "'\\''");
|
||||||
const restoreCommand = `PBS_PASSWORD='${escapedPassword}' PBS_REPOSITORY='${repository}' timeout 300 proxmox-backup-client restore "${snapshotPath}" root.pxar "${targetFolder}" --repository '${repository}' 2>&1`;
|
const fingerprint = credential.pbs_fingerprint?.trim() ?? '';
|
||||||
|
const escapedFingerprint = fingerprint ? fingerprint.replace(/'/g, "'\\''") : '';
|
||||||
|
const restoreEnvParts = [`PBS_PASSWORD='${escapedPassword}'`, `PBS_REPOSITORY='${repository}'`];
|
||||||
|
if (escapedFingerprint) {
|
||||||
|
restoreEnvParts.push(`PBS_FINGERPRINT='${escapedFingerprint}'`);
|
||||||
|
}
|
||||||
|
const restoreEnvStr = restoreEnvParts.join(' ');
|
||||||
|
const restoreCommand = `${restoreEnvStr} timeout 300 proxmox-backup-client restore "${snapshotPath}" root.pxar "${targetFolder}" --repository '${repository}' 2>&1`;
|
||||||
|
|
||||||
let output = '';
|
let output = '';
|
||||||
let exitCode = 0;
|
let exitCode = 0;
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
// Real JavaScript implementation for script downloading
|
// Real JavaScript implementation for script downloading
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
import { writeFile, mkdir, access, readFile, unlink } from 'fs/promises';
|
import { writeFile, mkdir, access, readFile, unlink } from 'fs/promises';
|
||||||
|
import { downloadRawFile } from '../lib/gitProvider/index.js';
|
||||||
|
|
||||||
export class ScriptDownloaderService {
|
export class ScriptDownloaderService {
|
||||||
constructor() {
|
constructor() {
|
||||||
@@ -82,51 +83,18 @@ export class ScriptDownloaderService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract repository path from GitHub URL
|
* Download a file from the repository (GitHub, GitLab, Bitbucket, or custom)
|
||||||
* @param {string} repoUrl - The GitHub repository URL
|
* @param {string} repoUrl - The repository URL
|
||||||
* @returns {string}
|
|
||||||
*/
|
|
||||||
extractRepoPath(repoUrl) {
|
|
||||||
const match = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
|
||||||
if (!match) {
|
|
||||||
throw new Error(`Invalid GitHub repository URL: ${repoUrl}`);
|
|
||||||
}
|
|
||||||
return `${match[1]}/${match[2]}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Download a file from GitHub
|
|
||||||
* @param {string} repoUrl - The GitHub repository URL
|
|
||||||
* @param {string} filePath - The file path within the repository
|
* @param {string} filePath - The file path within the repository
|
||||||
* @param {string} [branch] - The branch to download from
|
* @param {string} [branch] - The branch to download from
|
||||||
* @returns {Promise<string>}
|
* @returns {Promise<string>}
|
||||||
*/
|
*/
|
||||||
async downloadFileFromGitHub(repoUrl, filePath, branch = 'main') {
|
async downloadFileFromRepo(repoUrl, filePath, branch = 'main') {
|
||||||
this.initializeConfig();
|
|
||||||
if (!repoUrl) {
|
if (!repoUrl) {
|
||||||
throw new Error('Repository URL is not set');
|
throw new Error('Repository URL is not set');
|
||||||
}
|
}
|
||||||
|
console.log(`Downloading from repository: ${repoUrl} (${filePath})`);
|
||||||
const repoPath = this.extractRepoPath(repoUrl);
|
return downloadRawFile(repoUrl, filePath, branch);
|
||||||
const url = `https://raw.githubusercontent.com/${repoPath}/${branch}/${filePath}`;
|
|
||||||
|
|
||||||
/** @type {Record<string, string>} */
|
|
||||||
const headers = {
|
|
||||||
'User-Agent': 'PVEScripts-Local/1.0',
|
|
||||||
};
|
|
||||||
|
|
||||||
// Add GitHub token authentication if available
|
|
||||||
if (process.env.GITHUB_TOKEN) {
|
|
||||||
headers.Authorization = `token ${process.env.GITHUB_TOKEN}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(`Downloading from GitHub: ${url}`);
|
|
||||||
const response = await fetch(url, { headers });
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`Failed to download ${filePath} from ${repoUrl}: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return response.text();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -184,9 +152,8 @@ export class ScriptDownloaderService {
|
|||||||
const fileName = scriptPath.split('/').pop();
|
const fileName = scriptPath.split('/').pop();
|
||||||
|
|
||||||
if (fileName) {
|
if (fileName) {
|
||||||
// Download from GitHub using the script's repository URL
|
|
||||||
console.log(`Downloading script file: ${scriptPath} from ${repoUrl}`);
|
console.log(`Downloading script file: ${scriptPath} from ${repoUrl}`);
|
||||||
const content = await this.downloadFileFromGitHub(repoUrl, scriptPath, branch);
|
const content = await this.downloadFileFromRepo(repoUrl, scriptPath, branch);
|
||||||
|
|
||||||
// Determine target directory based on script path
|
// Determine target directory based on script path
|
||||||
let targetDir;
|
let targetDir;
|
||||||
@@ -250,7 +217,7 @@ export class ScriptDownloaderService {
|
|||||||
const installScriptName = `${script.slug}-install.sh`;
|
const installScriptName = `${script.slug}-install.sh`;
|
||||||
try {
|
try {
|
||||||
console.log(`Downloading install script: install/${installScriptName} from ${repoUrl}`);
|
console.log(`Downloading install script: install/${installScriptName} from ${repoUrl}`);
|
||||||
const installContent = await this.downloadFileFromGitHub(repoUrl, `install/${installScriptName}`, branch);
|
const installContent = await this.downloadFileFromRepo(repoUrl, `install/${installScriptName}`, branch);
|
||||||
const localInstallPath = join(this.scriptsDirectory, 'install', installScriptName);
|
const localInstallPath = join(this.scriptsDirectory, 'install', installScriptName);
|
||||||
await writeFile(localInstallPath, installContent, 'utf-8');
|
await writeFile(localInstallPath, installContent, 'utf-8');
|
||||||
files.push(`install/${installScriptName}`);
|
files.push(`install/${installScriptName}`);
|
||||||
@@ -274,7 +241,7 @@ export class ScriptDownloaderService {
|
|||||||
const alpineInstallScriptName = `alpine-${script.slug}-install.sh`;
|
const alpineInstallScriptName = `alpine-${script.slug}-install.sh`;
|
||||||
try {
|
try {
|
||||||
console.log(`[${script.slug}] Downloading alpine install script: install/${alpineInstallScriptName} from ${repoUrl}`);
|
console.log(`[${script.slug}] Downloading alpine install script: install/${alpineInstallScriptName} from ${repoUrl}`);
|
||||||
const alpineInstallContent = await this.downloadFileFromGitHub(repoUrl, `install/${alpineInstallScriptName}`, branch);
|
const alpineInstallContent = await this.downloadFileFromRepo(repoUrl, `install/${alpineInstallScriptName}`, branch);
|
||||||
const localAlpineInstallPath = join(this.scriptsDirectory, 'install', alpineInstallScriptName);
|
const localAlpineInstallPath = join(this.scriptsDirectory, 'install', alpineInstallScriptName);
|
||||||
await writeFile(localAlpineInstallPath, alpineInstallContent, 'utf-8');
|
await writeFile(localAlpineInstallPath, alpineInstallContent, 'utf-8');
|
||||||
files.push(`install/${alpineInstallScriptName}`);
|
files.push(`install/${alpineInstallScriptName}`);
|
||||||
@@ -681,7 +648,7 @@ export class ScriptDownloaderService {
|
|||||||
console.log(`[Comparison] Local file size: ${localContent.length} bytes`);
|
console.log(`[Comparison] Local file size: ${localContent.length} bytes`);
|
||||||
|
|
||||||
// Download remote content from the script's repository
|
// Download remote content from the script's repository
|
||||||
const remoteContent = await this.downloadFileFromGitHub(repoUrl, remotePath, branch);
|
const remoteContent = await this.downloadFileFromRepo(repoUrl, remotePath, branch);
|
||||||
console.log(`[Comparison] Remote file size: ${remoteContent.length} bytes`);
|
console.log(`[Comparison] Remote file size: ${remoteContent.length} bytes`);
|
||||||
|
|
||||||
// Apply modification only for CT scripts, not for other script types
|
// Apply modification only for CT scripts, not for other script types
|
||||||
@@ -739,7 +706,7 @@ export class ScriptDownloaderService {
|
|||||||
// Find the corresponding script path in install_methods
|
// Find the corresponding script path in install_methods
|
||||||
const method = script.install_methods?.find(m => m.script === filePath);
|
const method = script.install_methods?.find(m => m.script === filePath);
|
||||||
if (method?.script) {
|
if (method?.script) {
|
||||||
const downloadedContent = await this.downloadFileFromGitHub(repoUrl, method.script, branch);
|
const downloadedContent = await this.downloadFileFromRepo(repoUrl, method.script, branch);
|
||||||
remoteContent = this.modifyScriptContent(downloadedContent);
|
remoteContent = this.modifyScriptContent(downloadedContent);
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
@@ -756,7 +723,7 @@ export class ScriptDownloaderService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
remoteContent = await this.downloadFileFromGitHub(repoUrl, filePath, branch);
|
remoteContent = await this.downloadFileFromRepo(repoUrl, filePath, branch);
|
||||||
} catch {
|
} catch {
|
||||||
// Error downloading remote install script
|
// Error downloading remote install script
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
import { spawn } from 'child_process';
|
import { spawn } from 'child_process';
|
||||||
import { spawn as ptySpawn } from 'node-pty';
|
import { spawn as ptySpawn } from 'node-pty';
|
||||||
import { existsSync } from 'fs';
|
import { existsSync, writeFileSync, chmodSync, unlinkSync } from 'fs';
|
||||||
|
import { join } from 'path';
|
||||||
|
import { tmpdir } from 'os';
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -195,9 +197,22 @@ class SSHExecutionService {
|
|||||||
async transferScriptsFolder(server, onData, onError) {
|
async transferScriptsFolder(server, onData, onError) {
|
||||||
const { ip, user, password, auth_type = 'password', ssh_key_passphrase, ssh_key_path, ssh_port = 22 } = server;
|
const { ip, user, password, auth_type = 'password', ssh_key_passphrase, ssh_key_path, ssh_port = 22 } = server;
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
const cleanupTempFile = (/** @type {string | null} */ tempPath) => {
|
||||||
|
if (tempPath) {
|
||||||
try {
|
try {
|
||||||
// Build rsync command based on authentication type
|
unlinkSync(tempPath);
|
||||||
|
} catch (_) {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
/** @type {string | null} */
|
||||||
|
let tempPath = null;
|
||||||
|
try {
|
||||||
|
// Build rsync command based on authentication type.
|
||||||
|
// Use sshpass -f with a temp file so password/passphrase never go through the shell (safe for special chars like {, $, ").
|
||||||
let rshCommand;
|
let rshCommand;
|
||||||
if (auth_type === 'key') {
|
if (auth_type === 'key') {
|
||||||
if (!ssh_key_path || !existsSync(ssh_key_path)) {
|
if (!ssh_key_path || !existsSync(ssh_key_path)) {
|
||||||
@@ -205,13 +220,19 @@ class SSHExecutionService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (ssh_key_passphrase) {
|
if (ssh_key_passphrase) {
|
||||||
rshCommand = `sshpass -P passphrase -p ${ssh_key_passphrase} ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
tempPath = join(tmpdir(), `sshpass-${process.pid}-${Date.now()}.tmp`);
|
||||||
|
writeFileSync(tempPath, ssh_key_passphrase);
|
||||||
|
chmodSync(tempPath, 0o600);
|
||||||
|
rshCommand = `sshpass -P passphrase -f ${tempPath} ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
||||||
} else {
|
} else {
|
||||||
rshCommand = `ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
rshCommand = `ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Password authentication
|
// Password authentication
|
||||||
rshCommand = `sshpass -p ${password} ssh -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
tempPath = join(tmpdir(), `sshpass-${process.pid}-${Date.now()}.tmp`);
|
||||||
|
writeFileSync(tempPath, password ?? '');
|
||||||
|
chmodSync(tempPath, 0o600);
|
||||||
|
rshCommand = `sshpass -f ${tempPath} ssh -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
||||||
}
|
}
|
||||||
|
|
||||||
const rsyncCommand = spawn('rsync', [
|
const rsyncCommand = spawn('rsync', [
|
||||||
@@ -227,18 +248,17 @@ class SSHExecutionService {
|
|||||||
});
|
});
|
||||||
|
|
||||||
rsyncCommand.stdout.on('data', (/** @type {Buffer} */ data) => {
|
rsyncCommand.stdout.on('data', (/** @type {Buffer} */ data) => {
|
||||||
// Ensure proper UTF-8 encoding for ANSI colors
|
|
||||||
const output = data.toString('utf8');
|
const output = data.toString('utf8');
|
||||||
onData(output);
|
onData(output);
|
||||||
});
|
});
|
||||||
|
|
||||||
rsyncCommand.stderr.on('data', (/** @type {Buffer} */ data) => {
|
rsyncCommand.stderr.on('data', (/** @type {Buffer} */ data) => {
|
||||||
// Ensure proper UTF-8 encoding for ANSI colors
|
|
||||||
const output = data.toString('utf8');
|
const output = data.toString('utf8');
|
||||||
onError(output);
|
onError(output);
|
||||||
});
|
});
|
||||||
|
|
||||||
rsyncCommand.on('close', (code) => {
|
rsyncCommand.on('close', (code) => {
|
||||||
|
cleanupTempFile(tempPath);
|
||||||
if (code === 0) {
|
if (code === 0) {
|
||||||
resolve();
|
resolve();
|
||||||
} else {
|
} else {
|
||||||
@@ -247,10 +267,11 @@ class SSHExecutionService {
|
|||||||
});
|
});
|
||||||
|
|
||||||
rsyncCommand.on('error', (error) => {
|
rsyncCommand.on('error', (error) => {
|
||||||
|
cleanupTempFile(tempPath);
|
||||||
reject(error);
|
reject(error);
|
||||||
});
|
});
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
cleanupTempFile(tempPath);
|
||||||
reject(error);
|
reject(error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -169,16 +169,17 @@ class SSHService {
|
|||||||
const timeout = 10000;
|
const timeout = 10000;
|
||||||
let resolved = false;
|
let resolved = false;
|
||||||
|
|
||||||
|
// Pass password via env so it is not embedded in the script (safe for special chars like {, $, ").
|
||||||
const expectScript = `#!/usr/bin/expect -f
|
const expectScript = `#!/usr/bin/expect -f
|
||||||
set timeout 10
|
set timeout 10
|
||||||
spawn ssh -p ${ssh_port} -o ConnectTimeout=10 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o LogLevel=ERROR -o PasswordAuthentication=yes -o PubkeyAuthentication=no ${user}@${ip} "echo SSH_LOGIN_SUCCESS"
|
spawn ssh -p ${ssh_port} -o ConnectTimeout=10 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o LogLevel=ERROR -o PasswordAuthentication=yes -o PubkeyAuthentication=no ${user}@${ip} "echo SSH_LOGIN_SUCCESS"
|
||||||
expect {
|
expect {
|
||||||
"password:" {
|
"password:" {
|
||||||
send "${password}\r"
|
send "$env(SSH_PASSWORD)\\r"
|
||||||
exp_continue
|
exp_continue
|
||||||
}
|
}
|
||||||
"Password:" {
|
"Password:" {
|
||||||
send "${password}\r"
|
send "$env(SSH_PASSWORD)\\r"
|
||||||
exp_continue
|
exp_continue
|
||||||
}
|
}
|
||||||
"SSH_LOGIN_SUCCESS" {
|
"SSH_LOGIN_SUCCESS" {
|
||||||
@@ -193,7 +194,8 @@ expect {
|
|||||||
}`;
|
}`;
|
||||||
|
|
||||||
const expectCommand = spawn('expect', ['-c', expectScript], {
|
const expectCommand = spawn('expect', ['-c', expectScript], {
|
||||||
stdio: ['pipe', 'pipe', 'pipe']
|
stdio: ['pipe', 'pipe', 'pipe'],
|
||||||
|
env: { ...process.env, SSH_PASSWORD: password ?? '' }
|
||||||
});
|
});
|
||||||
|
|
||||||
const timer = setTimeout(() => {
|
const timer = setTimeout(() => {
|
||||||
|
|||||||
Reference in New Issue
Block a user