Compare commits

...

4 Commits

Author SHA1 Message Date
Michel Rögl-Brunner
8e8c724392 fix: delete local JSON files when removed from remote repo (fixes #405)
- Add deleteLocalFilesRemovedFromRepo() to remove local script JSON files
  that belong to the synced repo but are no longer in the remote list
- Call it in syncJsonFilesForRepo() before find/sync so stale scripts
  no longer appear and download attempts don't 404
- Extend sync return types with deletedFiles; aggregate in syncJsonFiles()
  and include removed count in success message
2026-01-29 15:44:45 +01:00
Michel Roegl-Brunner
201b33ec84 Merge pull request #479 from community-scripts/fix/464
fix: use node-specific Proxmox config paths for VM vs LXC (fixes #464)
2026-01-29 15:31:57 +01:00
Michel Roegl-Brunner
f33504baf5 Merge pull request #478 from community-scripts/fix/312
fix: handle special characters in SSH password/passphrase (Fixes #312)
2026-01-29 15:20:44 +01:00
Michel Rögl-Brunner
4bc5f4d6ad fix: handle special characters in SSH password/passphrase (Fixes #312)
- Use sshpass -f with temp file in transferScriptsFolder so password/passphrase
  never go through shell; safe for {, $, ", etc.
- Pass password via SSH_PASSWORD env in testWithExpect instead of embedding
  in script
- Add ServerForm hint: SSH key recommended; special chars supported
2026-01-29 15:18:41 +01:00
5 changed files with 191 additions and 53 deletions

View File

@@ -438,6 +438,11 @@ export function ServerForm({
{errors.password && ( {errors.password && (
<p className="text-destructive mt-1 text-sm">{errors.password}</p> <p className="text-destructive mt-1 text-sm">{errors.password}</p>
)} )}
<p className="text-muted-foreground mt-1 text-xs">
SSH key is recommended when possible. Special characters (e.g.{" "}
<code className="rounded bg-muted px-0.5">{"{ } $ \" '"}</code>) are
supported.
</p>
</div> </div>
)} )}

View File

@@ -1,5 +1,5 @@
// JavaScript wrapper for githubJsonService (for use with node server.js) // JavaScript wrapper for githubJsonService (for use with node server.js)
import { writeFile, mkdir, readdir, readFile } from 'fs/promises'; import { writeFile, mkdir, readdir, readFile, unlink } from 'fs/promises';
import { join } from 'path'; import { join } from 'path';
import { repositoryService } from './repositoryService.js'; import { repositoryService } from './repositoryService.js';
import { listDirectory, downloadRawFile } from '../lib/gitProvider/index.js'; import { listDirectory, downloadRawFile } from '../lib/gitProvider/index.js';
@@ -163,25 +163,42 @@ class GitHubJsonService {
const localFiles = await this.getLocalJsonFiles(); const localFiles = await this.getLocalJsonFiles();
console.log(`Found ${localFiles.length} local JSON files`); console.log(`Found ${localFiles.length} local JSON files`);
// Delete local JSON files that belong to this repo but are no longer in the remote
const remoteFilenames = new Set(githubFiles.map((f) => f.name));
const deletedFiles = await this.deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames);
if (deletedFiles.length > 0) {
console.log(`Removed ${deletedFiles.length} obsolete JSON file(s) no longer in ${repoUrl}`);
}
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles); const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`); console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
if (filesToSync.length === 0) { if (filesToSync.length === 0) {
const msg =
deletedFiles.length > 0
? `All JSON files are up to date for repository: ${repoUrl}. Removed ${deletedFiles.length} obsolete file(s).`
: `All JSON files are up to date for repository: ${repoUrl}`;
return { return {
success: true, success: true,
message: `All JSON files are up to date for repository: ${repoUrl}`, message: msg,
count: 0, count: 0,
syncedFiles: [] syncedFiles: [],
deletedFiles
}; };
} }
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync); const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
const msg =
deletedFiles.length > 0
? `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}, removed ${deletedFiles.length} obsolete file(s).`
: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`;
return { return {
success: true, success: true,
message: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`, message: msg,
count: syncedFiles.length, count: syncedFiles.length,
syncedFiles syncedFiles,
deletedFiles
}; };
} catch (error) { } catch (error) {
console.error(`JSON sync failed for ${repoUrl}:`, error); console.error(`JSON sync failed for ${repoUrl}:`, error);
@@ -189,7 +206,8 @@ class GitHubJsonService {
success: false, success: false,
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`, message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
count: 0, count: 0,
syncedFiles: [] syncedFiles: [],
deletedFiles: []
}; };
} }
} }
@@ -205,13 +223,15 @@ class GitHubJsonService {
success: false, success: false,
message: 'No enabled repositories found', message: 'No enabled repositories found',
count: 0, count: 0,
syncedFiles: [] syncedFiles: [],
deletedFiles: []
}; };
} }
console.log(`Found ${enabledRepos.length} enabled repositories`); console.log(`Found ${enabledRepos.length} enabled repositories`);
const allSyncedFiles = []; const allSyncedFiles = [];
const allDeletedFiles = [];
const processedSlugs = new Set(); const processedSlugs = new Set();
let totalSynced = 0; let totalSynced = 0;
@@ -222,6 +242,7 @@ class GitHubJsonService {
const result = await this.syncJsonFilesForRepo(repo.url); const result = await this.syncJsonFilesForRepo(repo.url);
if (result.success) { if (result.success) {
allDeletedFiles.push(...(result.deletedFiles ?? []));
const newFiles = result.syncedFiles.filter(file => { const newFiles = result.syncedFiles.filter(file => {
const slug = file.replace('.json', ''); const slug = file.replace('.json', '');
if (processedSlugs.has(slug)) { if (processedSlugs.has(slug)) {
@@ -243,11 +264,16 @@ class GitHubJsonService {
await this.updateExistingFilesWithRepositoryUrl(); await this.updateExistingFilesWithRepositoryUrl();
const msg =
allDeletedFiles.length > 0
? `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories, removed ${allDeletedFiles.length} obsolete file(s).`
: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`;
return { return {
success: true, success: true,
message: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`, message: msg,
count: totalSynced, count: totalSynced,
syncedFiles: allSyncedFiles syncedFiles: allSyncedFiles,
deletedFiles: allDeletedFiles
}; };
} catch (error) { } catch (error) {
console.error('Multi-repository JSON sync failed:', error); console.error('Multi-repository JSON sync failed:', error);
@@ -255,7 +281,8 @@ class GitHubJsonService {
success: false, success: false,
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`, message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
count: 0, count: 0,
syncedFiles: [] syncedFiles: [],
deletedFiles: []
}; };
} }
} }
@@ -297,6 +324,32 @@ class GitHubJsonService {
} }
} }
async deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames) {
this.initializeConfig();
const localFiles = await this.getLocalJsonFiles();
const deletedFiles = [];
for (const file of localFiles) {
try {
const filePath = join(this.localJsonDirectory, file);
const content = await readFile(filePath, 'utf-8');
const script = JSON.parse(content);
if (script.repository_url === repoUrl && !remoteFilenames.has(file)) {
await unlink(filePath);
const slug = file.replace(/\.json$/, '');
this.scriptCache.delete(slug);
deletedFiles.push(file);
console.log(`Removed obsolete script JSON: ${file} (no longer in ${repoUrl})`);
}
} catch {
// If we can't read or parse the file, skip (do not delete)
}
}
return deletedFiles;
}
async findFilesToSyncForRepo(repoUrl, githubFiles, localFiles) { async findFilesToSyncForRepo(repoUrl, githubFiles, localFiles) {
const filesToSync = []; const filesToSync = [];

View File

@@ -1,4 +1,4 @@
import { writeFile, mkdir, readdir, readFile } from 'fs/promises'; import { writeFile, mkdir, readdir, readFile, unlink } from 'fs/promises';
import { join } from 'path'; import { join } from 'path';
import { env } from '../../env.js'; import { env } from '../../env.js';
import type { Script, ScriptCard, GitHubFile } from '../../types/script'; import type { Script, ScriptCard, GitHubFile } from '../../types/script';
@@ -158,7 +158,7 @@ export class GitHubJsonService {
/** /**
* Sync JSON files from a specific repository * Sync JSON files from a specific repository
*/ */
async syncJsonFilesForRepo(repoUrl: string): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[] }> { async syncJsonFilesForRepo(repoUrl: string): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[]; deletedFiles: string[] }> {
try { try {
console.log(`Starting JSON sync from repository: ${repoUrl}`); console.log(`Starting JSON sync from repository: ${repoUrl}`);
@@ -170,28 +170,45 @@ export class GitHubJsonService {
const localFiles = await this.getLocalJsonFiles(); const localFiles = await this.getLocalJsonFiles();
console.log(`Found ${localFiles.length} local JSON files`); console.log(`Found ${localFiles.length} local JSON files`);
// Delete local JSON files that belong to this repo but are no longer in the remote
const remoteFilenames = new Set(githubFiles.map((f) => f.name));
const deletedFiles = await this.deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames);
if (deletedFiles.length > 0) {
console.log(`Removed ${deletedFiles.length} obsolete JSON file(s) no longer in ${repoUrl}`);
}
// Compare and find files that need syncing // Compare and find files that need syncing
// For multi-repo support, we need to check if file exists AND if it's from this repo // For multi-repo support, we need to check if file exists AND if it's from this repo
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles); const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`); console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
if (filesToSync.length === 0) { if (filesToSync.length === 0) {
const msg =
deletedFiles.length > 0
? `All JSON files are up to date for repository: ${repoUrl}. Removed ${deletedFiles.length} obsolete file(s).`
: `All JSON files are up to date for repository: ${repoUrl}`;
return { return {
success: true, success: true,
message: `All JSON files are up to date for repository: ${repoUrl}`, message: msg,
count: 0, count: 0,
syncedFiles: [] syncedFiles: [],
deletedFiles
}; };
} }
// Download and save only the files that need syncing // Download and save only the files that need syncing
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync); const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
const msg =
deletedFiles.length > 0
? `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}, removed ${deletedFiles.length} obsolete file(s).`
: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`;
return { return {
success: true, success: true,
message: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`, message: msg,
count: syncedFiles.length, count: syncedFiles.length,
syncedFiles syncedFiles,
deletedFiles
}; };
} catch (error) { } catch (error) {
console.error(`JSON sync failed for ${repoUrl}:`, error); console.error(`JSON sync failed for ${repoUrl}:`, error);
@@ -199,7 +216,8 @@ export class GitHubJsonService {
success: false, success: false,
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`, message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
count: 0, count: 0,
syncedFiles: [] syncedFiles: [],
deletedFiles: []
}; };
} }
} }
@@ -207,7 +225,7 @@ export class GitHubJsonService {
/** /**
* Sync JSON files from all enabled repositories (main repo has priority) * Sync JSON files from all enabled repositories (main repo has priority)
*/ */
async syncJsonFiles(): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[] }> { async syncJsonFiles(): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[]; deletedFiles: string[] }> {
try { try {
console.log('Starting multi-repository JSON sync...'); console.log('Starting multi-repository JSON sync...');
@@ -218,13 +236,15 @@ export class GitHubJsonService {
success: false, success: false,
message: 'No enabled repositories found', message: 'No enabled repositories found',
count: 0, count: 0,
syncedFiles: [] syncedFiles: [],
deletedFiles: []
}; };
} }
console.log(`Found ${enabledRepos.length} enabled repositories`); console.log(`Found ${enabledRepos.length} enabled repositories`);
const allSyncedFiles: string[] = []; const allSyncedFiles: string[] = [];
const allDeletedFiles: string[] = [];
const processedSlugs = new Set<string>(); // Track slugs we've already processed const processedSlugs = new Set<string>(); // Track slugs we've already processed
let totalSynced = 0; let totalSynced = 0;
@@ -236,6 +256,7 @@ export class GitHubJsonService {
const result = await this.syncJsonFilesForRepo(repo.url); const result = await this.syncJsonFilesForRepo(repo.url);
if (result.success) { if (result.success) {
allDeletedFiles.push(...(result.deletedFiles ?? []));
// Only count files that weren't already processed from a higher priority repo // Only count files that weren't already processed from a higher priority repo
const newFiles = result.syncedFiles.filter(file => { const newFiles = result.syncedFiles.filter(file => {
const slug = file.replace('.json', ''); const slug = file.replace('.json', '');
@@ -259,11 +280,16 @@ export class GitHubJsonService {
// Also update existing files that don't have repository_url set (backward compatibility) // Also update existing files that don't have repository_url set (backward compatibility)
await this.updateExistingFilesWithRepositoryUrl(); await this.updateExistingFilesWithRepositoryUrl();
const msg =
allDeletedFiles.length > 0
? `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories, removed ${allDeletedFiles.length} obsolete file(s).`
: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`;
return { return {
success: true, success: true,
message: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`, message: msg,
count: totalSynced, count: totalSynced,
syncedFiles: allSyncedFiles syncedFiles: allSyncedFiles,
deletedFiles: allDeletedFiles
}; };
} catch (error) { } catch (error) {
console.error('Multi-repository JSON sync failed:', error); console.error('Multi-repository JSON sync failed:', error);
@@ -271,7 +297,8 @@ export class GitHubJsonService {
success: false, success: false,
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`, message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
count: 0, count: 0,
syncedFiles: [] syncedFiles: [],
deletedFiles: []
}; };
} }
} }
@@ -316,6 +343,36 @@ export class GitHubJsonService {
} }
} }
/**
* Delete local JSON files that belong to this repo but are no longer in the remote list.
* Returns the list of deleted filenames.
*/
private async deleteLocalFilesRemovedFromRepo(repoUrl: string, remoteFilenames: Set<string>): Promise<string[]> {
this.initializeConfig();
const localFiles = await this.getLocalJsonFiles();
const deletedFiles: string[] = [];
for (const file of localFiles) {
try {
const filePath = join(this.localJsonDirectory!, file);
const content = await readFile(filePath, 'utf-8');
const script = JSON.parse(content) as Script;
if (script.repository_url === repoUrl && !remoteFilenames.has(file)) {
await unlink(filePath);
const slug = file.replace(/\.json$/, '');
this.scriptCache.delete(slug);
deletedFiles.push(file);
console.log(`Removed obsolete script JSON: ${file} (no longer in ${repoUrl})`);
}
} catch {
// If we can't read or parse the file, skip (do not delete)
}
}
return deletedFiles;
}
/** /**
* Find files that need syncing for a specific repository * Find files that need syncing for a specific repository
* This checks if file exists locally AND if it's from the same repository * This checks if file exists locally AND if it's from the same repository

View File

@@ -1,6 +1,8 @@
import { spawn } from 'child_process'; import { spawn } from 'child_process';
import { spawn as ptySpawn } from 'node-pty'; import { spawn as ptySpawn } from 'node-pty';
import { existsSync } from 'fs'; import { existsSync, writeFileSync, chmodSync, unlinkSync } from 'fs';
import { join } from 'path';
import { tmpdir } from 'os';
/** /**
@@ -194,26 +196,45 @@ class SSHExecutionService {
*/ */
async transferScriptsFolder(server, onData, onError) { async transferScriptsFolder(server, onData, onError) {
const { ip, user, password, auth_type = 'password', ssh_key_passphrase, ssh_key_path, ssh_port = 22 } = server; const { ip, user, password, auth_type = 'password', ssh_key_passphrase, ssh_key_path, ssh_port = 22 } = server;
const cleanupTempFile = (/** @type {string | null} */ tempPath) => {
if (tempPath) {
try {
unlinkSync(tempPath);
} catch (_) {
// ignore
}
}
};
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
/** @type {string | null} */
let tempPath = null;
try { try {
// Build rsync command based on authentication type // Build rsync command based on authentication type.
// Use sshpass -f with a temp file so password/passphrase never go through the shell (safe for special chars like {, $, ").
let rshCommand; let rshCommand;
if (auth_type === 'key') { if (auth_type === 'key') {
if (!ssh_key_path || !existsSync(ssh_key_path)) { if (!ssh_key_path || !existsSync(ssh_key_path)) {
throw new Error('SSH key file not found'); throw new Error('SSH key file not found');
} }
if (ssh_key_passphrase) { if (ssh_key_passphrase) {
rshCommand = `sshpass -P passphrase -p ${ssh_key_passphrase} ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`; tempPath = join(tmpdir(), `sshpass-${process.pid}-${Date.now()}.tmp`);
writeFileSync(tempPath, ssh_key_passphrase);
chmodSync(tempPath, 0o600);
rshCommand = `sshpass -P passphrase -f ${tempPath} ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
} else { } else {
rshCommand = `ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`; rshCommand = `ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
} }
} else { } else {
// Password authentication // Password authentication
rshCommand = `sshpass -p ${password} ssh -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`; tempPath = join(tmpdir(), `sshpass-${process.pid}-${Date.now()}.tmp`);
writeFileSync(tempPath, password ?? '');
chmodSync(tempPath, 0o600);
rshCommand = `sshpass -f ${tempPath} ssh -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
} }
const rsyncCommand = spawn('rsync', [ const rsyncCommand = spawn('rsync', [
'-avz', '-avz',
'--delete', '--delete',
@@ -226,31 +247,31 @@ class SSHExecutionService {
stdio: ['pipe', 'pipe', 'pipe'] stdio: ['pipe', 'pipe', 'pipe']
}); });
rsyncCommand.stdout.on('data', (/** @type {Buffer} */ data) => { rsyncCommand.stdout.on('data', (/** @type {Buffer} */ data) => {
// Ensure proper UTF-8 encoding for ANSI colors const output = data.toString('utf8');
const output = data.toString('utf8'); onData(output);
onData(output); });
});
rsyncCommand.stderr.on('data', (/** @type {Buffer} */ data) => { rsyncCommand.stderr.on('data', (/** @type {Buffer} */ data) => {
// Ensure proper UTF-8 encoding for ANSI colors const output = data.toString('utf8');
const output = data.toString('utf8'); onError(output);
onError(output); });
});
rsyncCommand.on('close', (code) => { rsyncCommand.on('close', (code) => {
if (code === 0) { cleanupTempFile(tempPath);
resolve(); if (code === 0) {
} else { resolve();
reject(new Error(`rsync failed with code ${code}`)); } else {
} reject(new Error(`rsync failed with code ${code}`));
}); }
});
rsyncCommand.on('error', (error) => { rsyncCommand.on('error', (error) => {
reject(error); cleanupTempFile(tempPath);
}); reject(error);
});
} catch (error) { } catch (error) {
cleanupTempFile(tempPath);
reject(error); reject(error);
} }
}); });

View File

@@ -169,16 +169,17 @@ class SSHService {
const timeout = 10000; const timeout = 10000;
let resolved = false; let resolved = false;
// Pass password via env so it is not embedded in the script (safe for special chars like {, $, ").
const expectScript = `#!/usr/bin/expect -f const expectScript = `#!/usr/bin/expect -f
set timeout 10 set timeout 10
spawn ssh -p ${ssh_port} -o ConnectTimeout=10 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o LogLevel=ERROR -o PasswordAuthentication=yes -o PubkeyAuthentication=no ${user}@${ip} "echo SSH_LOGIN_SUCCESS" spawn ssh -p ${ssh_port} -o ConnectTimeout=10 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o LogLevel=ERROR -o PasswordAuthentication=yes -o PubkeyAuthentication=no ${user}@${ip} "echo SSH_LOGIN_SUCCESS"
expect { expect {
"password:" { "password:" {
send "${password}\r" send "$env(SSH_PASSWORD)\\r"
exp_continue exp_continue
} }
"Password:" { "Password:" {
send "${password}\r" send "$env(SSH_PASSWORD)\\r"
exp_continue exp_continue
} }
"SSH_LOGIN_SUCCESS" { "SSH_LOGIN_SUCCESS" {
@@ -193,7 +194,8 @@ expect {
}`; }`;
const expectCommand = spawn('expect', ['-c', expectScript], { const expectCommand = spawn('expect', ['-c', expectScript], {
stdio: ['pipe', 'pipe', 'pipe'] stdio: ['pipe', 'pipe', 'pipe'],
env: { ...process.env, SSH_PASSWORD: password ?? '' }
}); });
const timer = setTimeout(() => { const timer = setTimeout(() => {