Compare commits

...

8 Commits

Author SHA1 Message Date
github-actions[bot]
351ba09f4e chore: bump version to 0.5.6 (VERSION + package.json) (#482)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-01-29 15:08:41 +00:00
Michel Roegl-Brunner
580986abfa Merge pull request #481 from community-scripts/fix/466
fix: resolve server from DB for SSH when client sends no ssh_key_path (fixes #466)
2026-01-29 16:03:08 +01:00
Michel Rögl-Brunner
e1d270d52c fix: resolve server from DB for SSH when client sends no ssh_key_path (fixes #466)
- Add resolveServerForSSH() to load full server (including ssh_key_path) from DB
  when WebSocket server has id but key auth without valid ssh_key_path
- Call resolver in handleMessage for all start flows (clone, backup, update,
  shell, script) so Shell and Update over SSH work with key auth
- Extend ServerInfo typedef with auth_type, ssh_key_path for TypeScript
2026-01-29 15:59:58 +01:00
Michel Roegl-Brunner
20dbcae42a Merge pull request #480 from community-scripts/fix/405
fix: delete local JSON files when removed from remote repo (fixes #405)
2026-01-29 15:47:31 +01:00
Michel Rögl-Brunner
8e8c724392 fix: delete local JSON files when removed from remote repo (fixes #405)
- Add deleteLocalFilesRemovedFromRepo() to remove local script JSON files
  that belong to the synced repo but are no longer in the remote list
- Call it in syncJsonFilesForRepo() before find/sync so stale scripts
  no longer appear and download attempts don't 404
- Extend sync return types with deletedFiles; aggregate in syncJsonFiles()
  and include removed count in success message
2026-01-29 15:44:45 +01:00
Michel Roegl-Brunner
201b33ec84 Merge pull request #479 from community-scripts/fix/464
fix: use node-specific Proxmox config paths for VM vs LXC (fixes #464)
2026-01-29 15:31:57 +01:00
Michel Rögl-Brunner
6d2df9929c fix: use node-specific Proxmox config paths for VM vs LXC detection
- isVM(): check /etc/pve/nodes/<server.name>/qemu-server and lxc first, fallback to /etc/pve/qemu-server and lxc for single-node
- checkConfigAndExtractInfo, config-existence checks, getContainerHostname, addClonedContainerToDatabase: use node-specific paths
- syncLXCConfig/updateLXCConfig: use node-specific LXC config path
- server.js clone flow: use node-specific config path

Fixes #464
2026-01-29 15:29:35 +01:00
Michel Roegl-Brunner
f33504baf5 Merge pull request #478 from community-scripts/fix/312
fix: handle special characters in SSH password/passphrase (Fixes #312)
2026-01-29 15:20:44 +01:00
6 changed files with 225 additions and 85 deletions

View File

@@ -1 +1 @@
0.5.5
0.5.6

View File

@@ -1,6 +1,6 @@
{
"name": "pve-scripts-local",
"version": "0.1.0",
"version": "0.5.6",
"private": true,
"type": "module",
"scripts": {
@@ -106,4 +106,4 @@
"prismjs": "^1.30.0",
"hono": ">=4.11.7"
}
}
}

View File

@@ -3,6 +3,7 @@ import { parse } from 'url';
import next from 'next';
import { WebSocketServer } from 'ws';
import { spawn } from 'child_process';
import { existsSync } from 'fs';
import { join, resolve } from 'path';
import stripAnsi from 'strip-ansi';
import { spawn as ptySpawn } from 'node-pty';
@@ -56,6 +57,8 @@ const handle = app.getRequestHandler();
* @property {string} user
* @property {string} password
* @property {number} [id]
* @property {string} [auth_type]
* @property {string} [ssh_key_path]
*/
/**
@@ -295,6 +298,20 @@ class ScriptExecutionHandler {
});
}
/**
* Resolve full server from DB when client sends server with id but no ssh_key_path (e.g. for Shell/Update over SSH).
* @param {ServerInfo|null} server - Server from WebSocket message
* @returns {Promise<ServerInfo|null>} Same server or full server from DB
*/
async resolveServerForSSH(server) {
if (!server?.id) return server;
if (server.auth_type === 'key' && (!server.ssh_key_path || !existsSync(server.ssh_key_path))) {
const full = await this.db.getServerById(server.id);
return /** @type {ServerInfo|null} */ (full ?? server);
}
return server;
}
/**
* @param {ExtendedWebSocket} ws
* @param {WebSocketMessage} message
@@ -305,16 +322,21 @@ class ScriptExecutionHandler {
switch (action) {
case 'start':
if (scriptPath && executionId) {
let serverToUse = server;
if (serverToUse?.id) {
serverToUse = await this.resolveServerForSSH(serverToUse) ?? serverToUse;
}
const resolved = serverToUse ?? server;
if (isClone && containerId && storage && server && cloneCount && hostnames && containerType) {
await this.startSSHCloneExecution(ws, containerId, executionId, storage, server, containerType, cloneCount, hostnames);
await this.startSSHCloneExecution(ws, containerId, executionId, storage, /** @type {ServerInfo} */ (resolved), containerType, cloneCount, hostnames);
} else if (isBackup && containerId && storage) {
await this.startBackupExecution(ws, containerId, executionId, storage, mode, server);
await this.startBackupExecution(ws, containerId, executionId, storage, mode, resolved);
} else if (isUpdate && containerId) {
await this.startUpdateExecution(ws, containerId, executionId, mode, server, backupStorage);
await this.startUpdateExecution(ws, containerId, executionId, mode, resolved, backupStorage);
} else if (isShell && containerId) {
await this.startShellExecution(ws, containerId, executionId, mode, server, containerType);
await this.startShellExecution(ws, containerId, executionId, mode, resolved, containerType);
} else {
await this.startScriptExecution(ws, scriptPath, executionId, mode, server, envVars);
await this.startScriptExecution(ws, scriptPath, executionId, mode, resolved, envVars);
}
} else {
this.sendMessage(ws, {
@@ -1153,10 +1175,11 @@ class ScriptExecutionHandler {
const hostname = hostnames[i];
try {
// Read config file to get hostname/name
// Read config file to get hostname/name (node-specific path)
const nodeName = server.name;
const configPath = containerType === 'lxc'
? `/etc/pve/lxc/${nextId}.conf`
: `/etc/pve/qemu-server/${nextId}.conf`;
? `/etc/pve/nodes/${nodeName}/lxc/${nextId}.conf`
: `/etc/pve/nodes/${nodeName}/qemu-server/${nextId}.conf`;
let configContent = '';
await new Promise(/** @type {(resolve: (value?: void) => void) => void} */ ((resolve) => {

View File

@@ -418,44 +418,46 @@ async function isVM(scriptId: number, containerId: string, serverId: number | nu
return false; // Default to LXC if SSH fails
}
// Check both config file paths
const vmConfigPath = `/etc/pve/qemu-server/${containerId}.conf`;
const lxcConfigPath = `/etc/pve/lxc/${containerId}.conf`;
// Check VM config file
let vmConfigExists = false;
await new Promise<void>((resolve) => {
void sshExecutionService.executeCommand(
server as Server,
`test -f "${vmConfigPath}" && echo "exists" || echo "not_exists"`,
(data: string) => {
if (data.includes('exists')) {
vmConfigExists = true;
}
},
() => resolve(),
() => resolve()
);
});
if (vmConfigExists) {
return true; // VM config file exists
}
// Check LXC config file (not needed for return value, but check for completeness)
await new Promise<void>((resolve) => {
void sshExecutionService.executeCommand(
server as Server,
`test -f "${lxcConfigPath}" && echo "exists" || echo "not_exists"`,
(_data: string) => {
// Data handler not needed - just checking if file exists
},
() => resolve(),
() => resolve()
);
});
// Node-specific paths (multi-node Proxmox: /etc/pve/nodes/NODENAME/...)
const nodeName = (server as Server).name;
const vmConfigPathNode = `/etc/pve/nodes/${nodeName}/qemu-server/${containerId}.conf`;
const lxcConfigPathNode = `/etc/pve/nodes/${nodeName}/lxc/${containerId}.conf`;
// Fallback for single-node or when server.name is not the Proxmox node name
const vmConfigPathFallback = `/etc/pve/qemu-server/${containerId}.conf`;
const lxcConfigPathFallback = `/etc/pve/lxc/${containerId}.conf`;
return false; // Always LXC since VM config doesn't exist
const checkPathExists = (path: string): Promise<boolean> =>
new Promise<boolean>((resolve) => {
let exists = false;
void sshExecutionService.executeCommand(
server as Server,
`test -f "${path}" && echo "exists" || echo "not_exists"`,
(data: string) => {
if (data.includes('exists')) exists = true;
},
() => resolve(exists),
() => resolve(exists)
);
});
// Prefer node-specific paths first
const vmConfigExistsNode = await checkPathExists(vmConfigPathNode);
if (vmConfigExistsNode) {
return true; // VM config file exists on node
}
const lxcConfigExistsNode = await checkPathExists(lxcConfigPathNode);
if (lxcConfigExistsNode) {
return false; // LXC config file exists on node
}
// Fallback: single-node or server.name not matching Proxmox node name
const vmConfigExistsFallback = await checkPathExists(vmConfigPathFallback);
if (vmConfigExistsFallback) {
return true;
}
return false; // LXC (or neither path exists)
} catch (error) {
console.error('Error determining container type:', error);
return false; // Default to LXC on error
@@ -971,10 +973,11 @@ export const installedScriptsRouter = createTRPCRouter({
};
// Helper function to check config file for community-script tag and extract hostname/name
const nodeName = (server as Server).name;
const checkConfigAndExtractInfo = async (id: string, isVM: boolean): Promise<any> => {
const configPath = isVM
? `/etc/pve/qemu-server/${id}.conf`
: `/etc/pve/lxc/${id}.conf`;
? `/etc/pve/nodes/${nodeName}/qemu-server/${id}.conf`
: `/etc/pve/nodes/${nodeName}/lxc/${id}.conf`;
const readCommand = `cat "${configPath}" 2>/dev/null`;
@@ -1318,10 +1321,10 @@ export const installedScriptsRouter = createTRPCRouter({
// Check if ID exists in either pct list (containers) or qm list (VMs)
if (!existingIds.has(containerId)) {
// Also verify config file doesn't exist as a double-check
// Check both container and VM config paths
const checkContainerCommand = `test -f "/etc/pve/lxc/${containerId}.conf" && echo "exists" || echo "not_found"`;
const checkVMCommand = `test -f "/etc/pve/qemu-server/${containerId}.conf" && echo "exists" || echo "not_found"`;
// Also verify config file doesn't exist as a double-check (node-specific paths)
const nodeName = (server as Server).name;
const checkContainerCommand = `test -f "/etc/pve/nodes/${nodeName}/lxc/${containerId}.conf" && echo "exists" || echo "not_found"`;
const checkVMCommand = `test -f "/etc/pve/nodes/${nodeName}/qemu-server/${containerId}.conf" && echo "exists" || echo "not_found"`;
const configExists = await new Promise<boolean>((resolve) => {
let combinedOutput = '';
@@ -2237,8 +2240,9 @@ export const installedScriptsRouter = createTRPCRouter({
};
}
// Read config file
const configPath = `/etc/pve/lxc/${script.container_id}.conf`;
// Read config file (node-specific path)
const nodeName = (server as Server).name;
const configPath = `/etc/pve/nodes/${nodeName}/lxc/${script.container_id}.conf`;
const readCommand = `cat "${configPath}" 2>/dev/null`;
let rawConfig = '';
@@ -2368,8 +2372,9 @@ export const installedScriptsRouter = createTRPCRouter({
};
}
// Write config file using heredoc for safe escaping
const configPath = `/etc/pve/lxc/${script.container_id}.conf`;
// Write config file using heredoc for safe escaping (node-specific path)
const nodeName = (server as Server).name;
const configPath = `/etc/pve/nodes/${nodeName}/lxc/${script.container_id}.conf`;
const writeCommand = `cat > "${configPath}" << 'EOFCONFIG'
${rawConfig}
EOFCONFIG`;
@@ -2777,9 +2782,10 @@ EOFCONFIG`;
const { getSSHExecutionService } = await import('~/server/ssh-execution-service');
const sshExecutionService = getSSHExecutionService();
const nodeName = (server as Server).name;
const configPath = input.containerType === 'lxc'
? `/etc/pve/lxc/${input.containerId}.conf`
: `/etc/pve/qemu-server/${input.containerId}.conf`;
? `/etc/pve/nodes/${nodeName}/lxc/${input.containerId}.conf`
: `/etc/pve/nodes/${nodeName}/qemu-server/${input.containerId}.conf`;
let configContent = '';
await new Promise<void>((resolve) => {
@@ -3171,10 +3177,11 @@ EOFCONFIG`;
const { getSSHExecutionService } = await import('~/server/ssh-execution-service');
const sshExecutionService = getSSHExecutionService();
// Read config file to get hostname/name
// Read config file to get hostname/name (node-specific path)
const nodeName = (server as Server).name;
const configPath = input.containerType === 'lxc'
? `/etc/pve/lxc/${input.containerId}.conf`
: `/etc/pve/qemu-server/${input.containerId}.conf`;
? `/etc/pve/nodes/${nodeName}/lxc/${input.containerId}.conf`
: `/etc/pve/nodes/${nodeName}/qemu-server/${input.containerId}.conf`;
let configContent = '';
await new Promise<void>((resolve) => {

View File

@@ -1,5 +1,5 @@
// JavaScript wrapper for githubJsonService (for use with node server.js)
import { writeFile, mkdir, readdir, readFile } from 'fs/promises';
import { writeFile, mkdir, readdir, readFile, unlink } from 'fs/promises';
import { join } from 'path';
import { repositoryService } from './repositoryService.js';
import { listDirectory, downloadRawFile } from '../lib/gitProvider/index.js';
@@ -163,25 +163,42 @@ class GitHubJsonService {
const localFiles = await this.getLocalJsonFiles();
console.log(`Found ${localFiles.length} local JSON files`);
// Delete local JSON files that belong to this repo but are no longer in the remote
const remoteFilenames = new Set(githubFiles.map((f) => f.name));
const deletedFiles = await this.deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames);
if (deletedFiles.length > 0) {
console.log(`Removed ${deletedFiles.length} obsolete JSON file(s) no longer in ${repoUrl}`);
}
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
if (filesToSync.length === 0) {
const msg =
deletedFiles.length > 0
? `All JSON files are up to date for repository: ${repoUrl}. Removed ${deletedFiles.length} obsolete file(s).`
: `All JSON files are up to date for repository: ${repoUrl}`;
return {
success: true,
message: `All JSON files are up to date for repository: ${repoUrl}`,
message: msg,
count: 0,
syncedFiles: []
syncedFiles: [],
deletedFiles
};
}
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
const msg =
deletedFiles.length > 0
? `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}, removed ${deletedFiles.length} obsolete file(s).`
: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`;
return {
success: true,
message: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`,
message: msg,
count: syncedFiles.length,
syncedFiles
syncedFiles,
deletedFiles
};
} catch (error) {
console.error(`JSON sync failed for ${repoUrl}:`, error);
@@ -189,7 +206,8 @@ class GitHubJsonService {
success: false,
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
count: 0,
syncedFiles: []
syncedFiles: [],
deletedFiles: []
};
}
}
@@ -205,13 +223,15 @@ class GitHubJsonService {
success: false,
message: 'No enabled repositories found',
count: 0,
syncedFiles: []
syncedFiles: [],
deletedFiles: []
};
}
console.log(`Found ${enabledRepos.length} enabled repositories`);
const allSyncedFiles = [];
const allDeletedFiles = [];
const processedSlugs = new Set();
let totalSynced = 0;
@@ -222,6 +242,7 @@ class GitHubJsonService {
const result = await this.syncJsonFilesForRepo(repo.url);
if (result.success) {
allDeletedFiles.push(...(result.deletedFiles ?? []));
const newFiles = result.syncedFiles.filter(file => {
const slug = file.replace('.json', '');
if (processedSlugs.has(slug)) {
@@ -243,11 +264,16 @@ class GitHubJsonService {
await this.updateExistingFilesWithRepositoryUrl();
const msg =
allDeletedFiles.length > 0
? `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories, removed ${allDeletedFiles.length} obsolete file(s).`
: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`;
return {
success: true,
message: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`,
message: msg,
count: totalSynced,
syncedFiles: allSyncedFiles
syncedFiles: allSyncedFiles,
deletedFiles: allDeletedFiles
};
} catch (error) {
console.error('Multi-repository JSON sync failed:', error);
@@ -255,7 +281,8 @@ class GitHubJsonService {
success: false,
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
count: 0,
syncedFiles: []
syncedFiles: [],
deletedFiles: []
};
}
}
@@ -297,6 +324,32 @@ class GitHubJsonService {
}
}
async deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames) {
this.initializeConfig();
const localFiles = await this.getLocalJsonFiles();
const deletedFiles = [];
for (const file of localFiles) {
try {
const filePath = join(this.localJsonDirectory, file);
const content = await readFile(filePath, 'utf-8');
const script = JSON.parse(content);
if (script.repository_url === repoUrl && !remoteFilenames.has(file)) {
await unlink(filePath);
const slug = file.replace(/\.json$/, '');
this.scriptCache.delete(slug);
deletedFiles.push(file);
console.log(`Removed obsolete script JSON: ${file} (no longer in ${repoUrl})`);
}
} catch {
// If we can't read or parse the file, skip (do not delete)
}
}
return deletedFiles;
}
async findFilesToSyncForRepo(repoUrl, githubFiles, localFiles) {
const filesToSync = [];

View File

@@ -1,4 +1,4 @@
import { writeFile, mkdir, readdir, readFile } from 'fs/promises';
import { writeFile, mkdir, readdir, readFile, unlink } from 'fs/promises';
import { join } from 'path';
import { env } from '../../env.js';
import type { Script, ScriptCard, GitHubFile } from '../../types/script';
@@ -158,7 +158,7 @@ export class GitHubJsonService {
/**
* Sync JSON files from a specific repository
*/
async syncJsonFilesForRepo(repoUrl: string): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[] }> {
async syncJsonFilesForRepo(repoUrl: string): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[]; deletedFiles: string[] }> {
try {
console.log(`Starting JSON sync from repository: ${repoUrl}`);
@@ -170,28 +170,45 @@ export class GitHubJsonService {
const localFiles = await this.getLocalJsonFiles();
console.log(`Found ${localFiles.length} local JSON files`);
// Delete local JSON files that belong to this repo but are no longer in the remote
const remoteFilenames = new Set(githubFiles.map((f) => f.name));
const deletedFiles = await this.deleteLocalFilesRemovedFromRepo(repoUrl, remoteFilenames);
if (deletedFiles.length > 0) {
console.log(`Removed ${deletedFiles.length} obsolete JSON file(s) no longer in ${repoUrl}`);
}
// Compare and find files that need syncing
// For multi-repo support, we need to check if file exists AND if it's from this repo
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
if (filesToSync.length === 0) {
const msg =
deletedFiles.length > 0
? `All JSON files are up to date for repository: ${repoUrl}. Removed ${deletedFiles.length} obsolete file(s).`
: `All JSON files are up to date for repository: ${repoUrl}`;
return {
success: true,
message: `All JSON files are up to date for repository: ${repoUrl}`,
message: msg,
count: 0,
syncedFiles: []
syncedFiles: [],
deletedFiles
};
}
// Download and save only the files that need syncing
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
const msg =
deletedFiles.length > 0
? `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}, removed ${deletedFiles.length} obsolete file(s).`
: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`;
return {
success: true,
message: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`,
message: msg,
count: syncedFiles.length,
syncedFiles
syncedFiles,
deletedFiles
};
} catch (error) {
console.error(`JSON sync failed for ${repoUrl}:`, error);
@@ -199,7 +216,8 @@ export class GitHubJsonService {
success: false,
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
count: 0,
syncedFiles: []
syncedFiles: [],
deletedFiles: []
};
}
}
@@ -207,7 +225,7 @@ export class GitHubJsonService {
/**
* Sync JSON files from all enabled repositories (main repo has priority)
*/
async syncJsonFiles(): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[] }> {
async syncJsonFiles(): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[]; deletedFiles: string[] }> {
try {
console.log('Starting multi-repository JSON sync...');
@@ -218,13 +236,15 @@ export class GitHubJsonService {
success: false,
message: 'No enabled repositories found',
count: 0,
syncedFiles: []
syncedFiles: [],
deletedFiles: []
};
}
console.log(`Found ${enabledRepos.length} enabled repositories`);
const allSyncedFiles: string[] = [];
const allDeletedFiles: string[] = [];
const processedSlugs = new Set<string>(); // Track slugs we've already processed
let totalSynced = 0;
@@ -236,6 +256,7 @@ export class GitHubJsonService {
const result = await this.syncJsonFilesForRepo(repo.url);
if (result.success) {
allDeletedFiles.push(...(result.deletedFiles ?? []));
// Only count files that weren't already processed from a higher priority repo
const newFiles = result.syncedFiles.filter(file => {
const slug = file.replace('.json', '');
@@ -259,11 +280,16 @@ export class GitHubJsonService {
// Also update existing files that don't have repository_url set (backward compatibility)
await this.updateExistingFilesWithRepositoryUrl();
const msg =
allDeletedFiles.length > 0
? `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories, removed ${allDeletedFiles.length} obsolete file(s).`
: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`;
return {
success: true,
message: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`,
message: msg,
count: totalSynced,
syncedFiles: allSyncedFiles
syncedFiles: allSyncedFiles,
deletedFiles: allDeletedFiles
};
} catch (error) {
console.error('Multi-repository JSON sync failed:', error);
@@ -271,7 +297,8 @@ export class GitHubJsonService {
success: false,
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
count: 0,
syncedFiles: []
syncedFiles: [],
deletedFiles: []
};
}
}
@@ -316,6 +343,36 @@ export class GitHubJsonService {
}
}
/**
* Delete local JSON files that belong to this repo but are no longer in the remote list.
* Returns the list of deleted filenames.
*/
private async deleteLocalFilesRemovedFromRepo(repoUrl: string, remoteFilenames: Set<string>): Promise<string[]> {
this.initializeConfig();
const localFiles = await this.getLocalJsonFiles();
const deletedFiles: string[] = [];
for (const file of localFiles) {
try {
const filePath = join(this.localJsonDirectory!, file);
const content = await readFile(filePath, 'utf-8');
const script = JSON.parse(content) as Script;
if (script.repository_url === repoUrl && !remoteFilenames.has(file)) {
await unlink(filePath);
const slug = file.replace(/\.json$/, '');
this.scriptCache.delete(slug);
deletedFiles.push(file);
console.log(`Removed obsolete script JSON: ${file} (no longer in ${repoUrl})`);
}
} catch {
// If we can't read or parse the file, skip (do not delete)
}
}
return deletedFiles;
}
/**
* Find files that need syncing for a specific repository
* This checks if file exists locally AND if it's from the same repository