Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e1d270d52c | ||
|
|
20dbcae42a | ||
|
|
8e8c724392 | ||
|
|
201b33ec84 | ||
|
|
f33504baf5 | ||
|
|
4bc5f4d6ad |
32
server.js
32
server.js
@@ -3,6 +3,7 @@ import { parse } from 'url';
|
||||
import next from 'next';
|
||||
import { WebSocketServer } from 'ws';
|
||||
import { spawn } from 'child_process';
|
||||
import { existsSync } from 'fs';
|
||||
import { join, resolve } from 'path';
|
||||
import stripAnsi from 'strip-ansi';
|
||||
import { spawn as ptySpawn } from 'node-pty';
|
||||
@@ -56,6 +57,8 @@ const handle = app.getRequestHandler();
|
||||
* @property {string} user
|
||||
* @property {string} password
|
||||
* @property {number} [id]
|
||||
* @property {string} [auth_type]
|
||||
* @property {string} [ssh_key_path]
|
||||
*/
|
||||
|
||||
/**
|
||||
@@ -295,6 +298,20 @@ class ScriptExecutionHandler {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve full server from DB when client sends server with id but no ssh_key_path (e.g. for Shell/Update over SSH).
|
||||
* @param {ServerInfo|null} server - Server from WebSocket message
|
||||
* @returns {Promise<ServerInfo|null>} Same server or full server from DB
|
||||
*/
|
||||
async resolveServerForSSH(server) {
|
||||
if (!server?.id) return server;
|
||||
if (server.auth_type === 'key' && (!server.ssh_key_path || !existsSync(server.ssh_key_path))) {
|
||||
const full = await this.db.getServerById(server.id);
|
||||
return /** @type {ServerInfo|null} */ (full ?? server);
|
||||
}
|
||||
return server;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ExtendedWebSocket} ws
|
||||
* @param {WebSocketMessage} message
|
||||
@@ -305,16 +322,21 @@ class ScriptExecutionHandler {
|
||||
switch (action) {
|
||||
case 'start':
|
||||
if (scriptPath && executionId) {
|
||||
let serverToUse = server;
|
||||
if (serverToUse?.id) {
|
||||
serverToUse = await this.resolveServerForSSH(serverToUse) ?? serverToUse;
|
||||
}
|
||||
const resolved = serverToUse ?? server;
|
||||
if (isClone && containerId && storage && server && cloneCount && hostnames && containerType) {
|
||||
await this.startSSHCloneExecution(ws, containerId, executionId, storage, server, containerType, cloneCount, hostnames);
|
||||
await this.startSSHCloneExecution(ws, containerId, executionId, storage, /** @type {ServerInfo} */ (resolved), containerType, cloneCount, hostnames);
|
||||
} else if (isBackup && containerId && storage) {
|
||||
await this.startBackupExecution(ws, containerId, executionId, storage, mode, server);
|
||||
await this.startBackupExecution(ws, containerId, executionId, storage, mode, resolved);
|
||||
} else if (isUpdate && containerId) {
|
||||
await this.startUpdateExecution(ws, containerId, executionId, mode, server, backupStorage);
|
||||
await this.startUpdateExecution(ws, containerId, executionId, mode, resolved, backupStorage);
|
||||
} else if (isShell && containerId) {
|
||||
await this.startShellExecution(ws, containerId, executionId, mode, server, containerType);
|
||||
await this.startShellExecution(ws, containerId, executionId, mode, resolved, containerType);
|
||||
} else {
|
||||
await this.startScriptExecution(ws, scriptPath, executionId, mode, server, envVars);
|
||||
await this.startScriptExecution(ws, scriptPath, executionId, mode, resolved, envVars);
|
||||
}
|
||||
} else {
|
||||
this.sendMessage(ws, {
|
||||
|
||||
@@ -438,6 +438,11 @@ export function ServerForm({
|
||||
{errors.password && (
|
||||
<p className="text-destructive mt-1 text-sm">{errors.password}</p>
|
||||
)}
|
||||
<p className="text-muted-foreground mt-1 text-xs">
|
||||
SSH key is recommended when possible. Special characters (e.g.{" "}
|
||||
<code className="rounded bg-muted px-0.5">{"{ } $ \" '"}</code>) are
|
||||
supported.
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// JavaScript wrapper for githubJsonService (for use with node server.js)
|
||||
import { writeFile, mkdir, readdir, readFile } from 'fs/promises';
|
||||
import { writeFile, mkdir, readdir, readFile, unlink } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { repositoryService } from './repositoryService.js';
|
||||
import { listDirectory, downloadRawFile } from '../lib/gitProvider/index.js';
|
||||
@@ -163,25 +163,42 @@ class GitHubJsonService {
|
||||
const localFiles = await this.getLocalJsonFiles();
|
||||
console.log(`Found ${localFiles.length} local JSON files`);
|
||||
|
||||
// Delete local JSON files that belong to this repo but are no longer in the remote
|
||||
const remoteFilenames = new Set(githubFiles.map((f) => f.name));
|
||||
const deletedFiles = await this.deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames);
|
||||
if (deletedFiles.length > 0) {
|
||||
console.log(`Removed ${deletedFiles.length} obsolete JSON file(s) no longer in ${repoUrl}`);
|
||||
}
|
||||
|
||||
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
|
||||
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
|
||||
|
||||
if (filesToSync.length === 0) {
|
||||
const msg =
|
||||
deletedFiles.length > 0
|
||||
? `All JSON files are up to date for repository: ${repoUrl}. Removed ${deletedFiles.length} obsolete file(s).`
|
||||
: `All JSON files are up to date for repository: ${repoUrl}`;
|
||||
return {
|
||||
success: true,
|
||||
message: `All JSON files are up to date for repository: ${repoUrl}`,
|
||||
message: msg,
|
||||
count: 0,
|
||||
syncedFiles: []
|
||||
syncedFiles: [],
|
||||
deletedFiles
|
||||
};
|
||||
}
|
||||
|
||||
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
|
||||
|
||||
const msg =
|
||||
deletedFiles.length > 0
|
||||
? `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}, removed ${deletedFiles.length} obsolete file(s).`
|
||||
: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`;
|
||||
return {
|
||||
success: true,
|
||||
message: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`,
|
||||
message: msg,
|
||||
count: syncedFiles.length,
|
||||
syncedFiles
|
||||
syncedFiles,
|
||||
deletedFiles
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`JSON sync failed for ${repoUrl}:`, error);
|
||||
@@ -189,7 +206,8 @@ class GitHubJsonService {
|
||||
success: false,
|
||||
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
count: 0,
|
||||
syncedFiles: []
|
||||
syncedFiles: [],
|
||||
deletedFiles: []
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -205,13 +223,15 @@ class GitHubJsonService {
|
||||
success: false,
|
||||
message: 'No enabled repositories found',
|
||||
count: 0,
|
||||
syncedFiles: []
|
||||
syncedFiles: [],
|
||||
deletedFiles: []
|
||||
};
|
||||
}
|
||||
|
||||
console.log(`Found ${enabledRepos.length} enabled repositories`);
|
||||
|
||||
const allSyncedFiles = [];
|
||||
const allDeletedFiles = [];
|
||||
const processedSlugs = new Set();
|
||||
let totalSynced = 0;
|
||||
|
||||
@@ -222,6 +242,7 @@ class GitHubJsonService {
|
||||
const result = await this.syncJsonFilesForRepo(repo.url);
|
||||
|
||||
if (result.success) {
|
||||
allDeletedFiles.push(...(result.deletedFiles ?? []));
|
||||
const newFiles = result.syncedFiles.filter(file => {
|
||||
const slug = file.replace('.json', '');
|
||||
if (processedSlugs.has(slug)) {
|
||||
@@ -243,11 +264,16 @@ class GitHubJsonService {
|
||||
|
||||
await this.updateExistingFilesWithRepositoryUrl();
|
||||
|
||||
const msg =
|
||||
allDeletedFiles.length > 0
|
||||
? `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories, removed ${allDeletedFiles.length} obsolete file(s).`
|
||||
: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`;
|
||||
return {
|
||||
success: true,
|
||||
message: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`,
|
||||
message: msg,
|
||||
count: totalSynced,
|
||||
syncedFiles: allSyncedFiles
|
||||
syncedFiles: allSyncedFiles,
|
||||
deletedFiles: allDeletedFiles
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Multi-repository JSON sync failed:', error);
|
||||
@@ -255,7 +281,8 @@ class GitHubJsonService {
|
||||
success: false,
|
||||
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
count: 0,
|
||||
syncedFiles: []
|
||||
syncedFiles: [],
|
||||
deletedFiles: []
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -297,6 +324,32 @@ class GitHubJsonService {
|
||||
}
|
||||
}
|
||||
|
||||
async deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames) {
|
||||
this.initializeConfig();
|
||||
const localFiles = await this.getLocalJsonFiles();
|
||||
const deletedFiles = [];
|
||||
|
||||
for (const file of localFiles) {
|
||||
try {
|
||||
const filePath = join(this.localJsonDirectory, file);
|
||||
const content = await readFile(filePath, 'utf-8');
|
||||
const script = JSON.parse(content);
|
||||
|
||||
if (script.repository_url === repoUrl && !remoteFilenames.has(file)) {
|
||||
await unlink(filePath);
|
||||
const slug = file.replace(/\.json$/, '');
|
||||
this.scriptCache.delete(slug);
|
||||
deletedFiles.push(file);
|
||||
console.log(`Removed obsolete script JSON: ${file} (no longer in ${repoUrl})`);
|
||||
}
|
||||
} catch {
|
||||
// If we can't read or parse the file, skip (do not delete)
|
||||
}
|
||||
}
|
||||
|
||||
return deletedFiles;
|
||||
}
|
||||
|
||||
async findFilesToSyncForRepo(repoUrl, githubFiles, localFiles) {
|
||||
const filesToSync = [];
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { writeFile, mkdir, readdir, readFile } from 'fs/promises';
|
||||
import { writeFile, mkdir, readdir, readFile, unlink } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { env } from '../../env.js';
|
||||
import type { Script, ScriptCard, GitHubFile } from '../../types/script';
|
||||
@@ -158,7 +158,7 @@ export class GitHubJsonService {
|
||||
/**
|
||||
* Sync JSON files from a specific repository
|
||||
*/
|
||||
async syncJsonFilesForRepo(repoUrl: string): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[] }> {
|
||||
async syncJsonFilesForRepo(repoUrl: string): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[]; deletedFiles: string[] }> {
|
||||
try {
|
||||
console.log(`Starting JSON sync from repository: ${repoUrl}`);
|
||||
|
||||
@@ -170,28 +170,45 @@ export class GitHubJsonService {
|
||||
const localFiles = await this.getLocalJsonFiles();
|
||||
console.log(`Found ${localFiles.length} local JSON files`);
|
||||
|
||||
// Delete local JSON files that belong to this repo but are no longer in the remote
|
||||
const remoteFilenames = new Set(githubFiles.map((f) => f.name));
|
||||
const deletedFiles = await this.deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames);
|
||||
if (deletedFiles.length > 0) {
|
||||
console.log(`Removed ${deletedFiles.length} obsolete JSON file(s) no longer in ${repoUrl}`);
|
||||
}
|
||||
|
||||
// Compare and find files that need syncing
|
||||
// For multi-repo support, we need to check if file exists AND if it's from this repo
|
||||
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
|
||||
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
|
||||
|
||||
if (filesToSync.length === 0) {
|
||||
const msg =
|
||||
deletedFiles.length > 0
|
||||
? `All JSON files are up to date for repository: ${repoUrl}. Removed ${deletedFiles.length} obsolete file(s).`
|
||||
: `All JSON files are up to date for repository: ${repoUrl}`;
|
||||
return {
|
||||
success: true,
|
||||
message: `All JSON files are up to date for repository: ${repoUrl}`,
|
||||
message: msg,
|
||||
count: 0,
|
||||
syncedFiles: []
|
||||
syncedFiles: [],
|
||||
deletedFiles
|
||||
};
|
||||
}
|
||||
|
||||
// Download and save only the files that need syncing
|
||||
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
|
||||
|
||||
const msg =
|
||||
deletedFiles.length > 0
|
||||
? `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}, removed ${deletedFiles.length} obsolete file(s).`
|
||||
: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`;
|
||||
return {
|
||||
success: true,
|
||||
message: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`,
|
||||
message: msg,
|
||||
count: syncedFiles.length,
|
||||
syncedFiles
|
||||
syncedFiles,
|
||||
deletedFiles
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`JSON sync failed for ${repoUrl}:`, error);
|
||||
@@ -199,7 +216,8 @@ export class GitHubJsonService {
|
||||
success: false,
|
||||
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
count: 0,
|
||||
syncedFiles: []
|
||||
syncedFiles: [],
|
||||
deletedFiles: []
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -207,7 +225,7 @@ export class GitHubJsonService {
|
||||
/**
|
||||
* Sync JSON files from all enabled repositories (main repo has priority)
|
||||
*/
|
||||
async syncJsonFiles(): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[] }> {
|
||||
async syncJsonFiles(): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[]; deletedFiles: string[] }> {
|
||||
try {
|
||||
console.log('Starting multi-repository JSON sync...');
|
||||
|
||||
@@ -218,13 +236,15 @@ export class GitHubJsonService {
|
||||
success: false,
|
||||
message: 'No enabled repositories found',
|
||||
count: 0,
|
||||
syncedFiles: []
|
||||
syncedFiles: [],
|
||||
deletedFiles: []
|
||||
};
|
||||
}
|
||||
|
||||
console.log(`Found ${enabledRepos.length} enabled repositories`);
|
||||
|
||||
const allSyncedFiles: string[] = [];
|
||||
const allDeletedFiles: string[] = [];
|
||||
const processedSlugs = new Set<string>(); // Track slugs we've already processed
|
||||
let totalSynced = 0;
|
||||
|
||||
@@ -236,6 +256,7 @@ export class GitHubJsonService {
|
||||
const result = await this.syncJsonFilesForRepo(repo.url);
|
||||
|
||||
if (result.success) {
|
||||
allDeletedFiles.push(...(result.deletedFiles ?? []));
|
||||
// Only count files that weren't already processed from a higher priority repo
|
||||
const newFiles = result.syncedFiles.filter(file => {
|
||||
const slug = file.replace('.json', '');
|
||||
@@ -259,11 +280,16 @@ export class GitHubJsonService {
|
||||
// Also update existing files that don't have repository_url set (backward compatibility)
|
||||
await this.updateExistingFilesWithRepositoryUrl();
|
||||
|
||||
const msg =
|
||||
allDeletedFiles.length > 0
|
||||
? `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories, removed ${allDeletedFiles.length} obsolete file(s).`
|
||||
: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`;
|
||||
return {
|
||||
success: true,
|
||||
message: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`,
|
||||
message: msg,
|
||||
count: totalSynced,
|
||||
syncedFiles: allSyncedFiles
|
||||
syncedFiles: allSyncedFiles,
|
||||
deletedFiles: allDeletedFiles
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Multi-repository JSON sync failed:', error);
|
||||
@@ -271,7 +297,8 @@ export class GitHubJsonService {
|
||||
success: false,
|
||||
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
count: 0,
|
||||
syncedFiles: []
|
||||
syncedFiles: [],
|
||||
deletedFiles: []
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -316,6 +343,36 @@ export class GitHubJsonService {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete local JSON files that belong to this repo but are no longer in the remote list.
|
||||
* Returns the list of deleted filenames.
|
||||
*/
|
||||
private async deleteLocalFilesRemovedFromRepo(repoUrl: string, remoteFilenames: Set<string>): Promise<string[]> {
|
||||
this.initializeConfig();
|
||||
const localFiles = await this.getLocalJsonFiles();
|
||||
const deletedFiles: string[] = [];
|
||||
|
||||
for (const file of localFiles) {
|
||||
try {
|
||||
const filePath = join(this.localJsonDirectory!, file);
|
||||
const content = await readFile(filePath, 'utf-8');
|
||||
const script = JSON.parse(content) as Script;
|
||||
|
||||
if (script.repository_url === repoUrl && !remoteFilenames.has(file)) {
|
||||
await unlink(filePath);
|
||||
const slug = file.replace(/\.json$/, '');
|
||||
this.scriptCache.delete(slug);
|
||||
deletedFiles.push(file);
|
||||
console.log(`Removed obsolete script JSON: ${file} (no longer in ${repoUrl})`);
|
||||
}
|
||||
} catch {
|
||||
// If we can't read or parse the file, skip (do not delete)
|
||||
}
|
||||
}
|
||||
|
||||
return deletedFiles;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find files that need syncing for a specific repository
|
||||
* This checks if file exists locally AND if it's from the same repository
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import { spawn } from 'child_process';
|
||||
import { spawn as ptySpawn } from 'node-pty';
|
||||
import { existsSync } from 'fs';
|
||||
import { existsSync, writeFileSync, chmodSync, unlinkSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
import { tmpdir } from 'os';
|
||||
|
||||
|
||||
/**
|
||||
@@ -194,26 +196,45 @@ class SSHExecutionService {
|
||||
*/
|
||||
async transferScriptsFolder(server, onData, onError) {
|
||||
const { ip, user, password, auth_type = 'password', ssh_key_passphrase, ssh_key_path, ssh_port = 22 } = server;
|
||||
|
||||
|
||||
const cleanupTempFile = (/** @type {string | null} */ tempPath) => {
|
||||
if (tempPath) {
|
||||
try {
|
||||
unlinkSync(tempPath);
|
||||
} catch (_) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
/** @type {string | null} */
|
||||
let tempPath = null;
|
||||
try {
|
||||
// Build rsync command based on authentication type
|
||||
// Build rsync command based on authentication type.
|
||||
// Use sshpass -f with a temp file so password/passphrase never go through the shell (safe for special chars like {, $, ").
|
||||
let rshCommand;
|
||||
if (auth_type === 'key') {
|
||||
if (!ssh_key_path || !existsSync(ssh_key_path)) {
|
||||
throw new Error('SSH key file not found');
|
||||
}
|
||||
|
||||
|
||||
if (ssh_key_passphrase) {
|
||||
rshCommand = `sshpass -P passphrase -p ${ssh_key_passphrase} ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
||||
tempPath = join(tmpdir(), `sshpass-${process.pid}-${Date.now()}.tmp`);
|
||||
writeFileSync(tempPath, ssh_key_passphrase);
|
||||
chmodSync(tempPath, 0o600);
|
||||
rshCommand = `sshpass -P passphrase -f ${tempPath} ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
||||
} else {
|
||||
rshCommand = `ssh -i ${ssh_key_path} -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
||||
}
|
||||
} else {
|
||||
// Password authentication
|
||||
rshCommand = `sshpass -p ${password} ssh -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
||||
tempPath = join(tmpdir(), `sshpass-${process.pid}-${Date.now()}.tmp`);
|
||||
writeFileSync(tempPath, password ?? '');
|
||||
chmodSync(tempPath, 0o600);
|
||||
rshCommand = `sshpass -f ${tempPath} ssh -p ${ssh_port} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null`;
|
||||
}
|
||||
|
||||
|
||||
const rsyncCommand = spawn('rsync', [
|
||||
'-avz',
|
||||
'--delete',
|
||||
@@ -226,31 +247,31 @@ class SSHExecutionService {
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
rsyncCommand.stdout.on('data', (/** @type {Buffer} */ data) => {
|
||||
// Ensure proper UTF-8 encoding for ANSI colors
|
||||
const output = data.toString('utf8');
|
||||
onData(output);
|
||||
});
|
||||
rsyncCommand.stdout.on('data', (/** @type {Buffer} */ data) => {
|
||||
const output = data.toString('utf8');
|
||||
onData(output);
|
||||
});
|
||||
|
||||
rsyncCommand.stderr.on('data', (/** @type {Buffer} */ data) => {
|
||||
// Ensure proper UTF-8 encoding for ANSI colors
|
||||
const output = data.toString('utf8');
|
||||
onError(output);
|
||||
});
|
||||
rsyncCommand.stderr.on('data', (/** @type {Buffer} */ data) => {
|
||||
const output = data.toString('utf8');
|
||||
onError(output);
|
||||
});
|
||||
|
||||
rsyncCommand.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error(`rsync failed with code ${code}`));
|
||||
}
|
||||
});
|
||||
rsyncCommand.on('close', (code) => {
|
||||
cleanupTempFile(tempPath);
|
||||
if (code === 0) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error(`rsync failed with code ${code}`));
|
||||
}
|
||||
});
|
||||
|
||||
rsyncCommand.on('error', (error) => {
|
||||
reject(error);
|
||||
});
|
||||
|
||||
rsyncCommand.on('error', (error) => {
|
||||
cleanupTempFile(tempPath);
|
||||
reject(error);
|
||||
});
|
||||
} catch (error) {
|
||||
cleanupTempFile(tempPath);
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -169,16 +169,17 @@ class SSHService {
|
||||
const timeout = 10000;
|
||||
let resolved = false;
|
||||
|
||||
// Pass password via env so it is not embedded in the script (safe for special chars like {, $, ").
|
||||
const expectScript = `#!/usr/bin/expect -f
|
||||
set timeout 10
|
||||
spawn ssh -p ${ssh_port} -o ConnectTimeout=10 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o LogLevel=ERROR -o PasswordAuthentication=yes -o PubkeyAuthentication=no ${user}@${ip} "echo SSH_LOGIN_SUCCESS"
|
||||
expect {
|
||||
"password:" {
|
||||
send "${password}\r"
|
||||
send "$env(SSH_PASSWORD)\\r"
|
||||
exp_continue
|
||||
}
|
||||
"Password:" {
|
||||
send "${password}\r"
|
||||
send "$env(SSH_PASSWORD)\\r"
|
||||
exp_continue
|
||||
}
|
||||
"SSH_LOGIN_SUCCESS" {
|
||||
@@ -193,7 +194,8 @@ expect {
|
||||
}`;
|
||||
|
||||
const expectCommand = spawn('expect', ['-c', expectScript], {
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
env: { ...process.env, SSH_PASSWORD: password ?? '' }
|
||||
});
|
||||
|
||||
const timer = setTimeout(() => {
|
||||
|
||||
Reference in New Issue
Block a user