Fix script downloader placeholder files issue
- Delete stub scriptDownloader.js that contained placeholder implementation - Implement real JavaScript script downloader with GitHub fetch functionality - Fix incremental JSON sync to only process newly synced files - Add proper error handling and file structure management - Support all script types (ct/, tools/, vm/, vw/) with directory preservation - Download install scripts for CT scripts - Re-enable auto-sync service to use real implementation Scripts now download real content from GitHub instead of placeholders.
This commit is contained in:
@@ -1,9 +1,9 @@
|
||||
import { z } from "zod";
|
||||
import { createTRPCRouter, publicProcedure } from "~/server/api/trpc";
|
||||
import { scriptManager } from "~/server/lib/scripts";
|
||||
import { githubJsonService } from "~/server/services/githubJsonService.ts";
|
||||
import { localScriptsService } from "~/server/services/localScripts.ts";
|
||||
import { scriptDownloaderService } from "~/server/services/scriptDownloader.ts";
|
||||
import { githubJsonService } from "~/server/services/githubJsonService";
|
||||
import { localScriptsService } from "~/server/services/localScripts";
|
||||
import { scriptDownloaderService } from "~/server/services/scriptDownloader";
|
||||
import { AutoSyncService } from "~/server/services/autoSyncService";
|
||||
import type { ScriptCard } from "~/types/script";
|
||||
|
||||
|
||||
@@ -25,6 +25,25 @@ export class ScriptManager {
|
||||
// Initialize lazily to avoid accessing env vars during module load
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely handle file modification time, providing fallback for invalid dates
|
||||
* @param mtime - The file modification time from fs.stat
|
||||
* @returns Date - Valid date or current date as fallback
|
||||
*/
|
||||
private safeMtime(mtime: Date): Date {
|
||||
try {
|
||||
// Check if the date is valid
|
||||
if (!mtime || isNaN(mtime.getTime())) {
|
||||
console.warn('Invalid mtime detected, using current time as fallback');
|
||||
return new Date();
|
||||
}
|
||||
return mtime;
|
||||
} catch (error) {
|
||||
console.warn('Error processing mtime:', error);
|
||||
return new Date();
|
||||
}
|
||||
}
|
||||
|
||||
private initializeConfig() {
|
||||
if (this.scriptsDir === null) {
|
||||
// Handle both absolute and relative paths for testing
|
||||
@@ -63,7 +82,7 @@ export class ScriptManager {
|
||||
path: filePath,
|
||||
extension,
|
||||
size: stats.size,
|
||||
lastModified: stats.mtime,
|
||||
lastModified: this.safeMtime(stats.mtime),
|
||||
executable
|
||||
});
|
||||
}
|
||||
@@ -125,7 +144,7 @@ export class ScriptManager {
|
||||
path: filePath,
|
||||
extension,
|
||||
size: stats.size,
|
||||
lastModified: stats.mtime,
|
||||
lastModified: this.safeMtime(stats.mtime),
|
||||
executable,
|
||||
logo,
|
||||
slug
|
||||
@@ -212,7 +231,7 @@ export class ScriptManager {
|
||||
path: filePath,
|
||||
extension,
|
||||
size: stats.size,
|
||||
lastModified: stats.mtime,
|
||||
lastModified: this.safeMtime(stats.mtime),
|
||||
executable,
|
||||
logo,
|
||||
slug
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import cron from 'node-cron';
|
||||
import { githubJsonService } from './githubJsonService.ts';
|
||||
import { githubJsonService } from './githubJsonService.js';
|
||||
import { scriptDownloaderService } from './scriptDownloader.js';
|
||||
import { appriseService } from './appriseService.js';
|
||||
import { readFile, writeFile, readFileSync, writeFileSync } from 'fs';
|
||||
@@ -12,6 +12,25 @@ export class AutoSyncService {
|
||||
this.isRunning = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely convert a date to ISO string, handling invalid dates
|
||||
* @param {Date} date - The date to convert
|
||||
* @returns {string} - ISO string or fallback timestamp
|
||||
*/
|
||||
safeToISOString(date) {
|
||||
try {
|
||||
// Check if the date is valid
|
||||
if (!date || isNaN(date.getTime())) {
|
||||
console.warn('Invalid date provided to safeToISOString, using current time as fallback');
|
||||
return new Date().toISOString();
|
||||
}
|
||||
return date.toISOString();
|
||||
} catch (error) {
|
||||
console.warn('Error converting date to ISO string:', error instanceof Error ? error.message : String(error));
|
||||
return new Date().toISOString();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load auto-sync settings from .env file
|
||||
*/
|
||||
@@ -251,56 +270,120 @@ export class AutoSyncService {
|
||||
|
||||
const results = {
|
||||
jsonSync: syncResult,
|
||||
newScripts: [],
|
||||
updatedScripts: [],
|
||||
errors: []
|
||||
newScripts: /** @type {string[]} */ ([]),
|
||||
updatedScripts: /** @type {string[]} */ ([]),
|
||||
errors: /** @type {string[]} */ ([])
|
||||
};
|
||||
|
||||
// Step 2: Auto-download/update scripts if enabled
|
||||
const settings = this.loadSettings();
|
||||
|
||||
if (settings.autoDownloadNew || settings.autoUpdateExisting) {
|
||||
console.log('Processing synced JSON files for script downloads...');
|
||||
|
||||
// Only process scripts for files that were actually synced
|
||||
// @ts-ignore - syncedFiles exists in the JavaScript version
|
||||
if (syncResult.syncedFiles && syncResult.syncedFiles.length > 0) {
|
||||
// @ts-ignore - syncedFiles exists in the JavaScript version
|
||||
console.log(`Processing ${syncResult.syncedFiles.length} synced JSON files for new scripts...`);
|
||||
console.log(`Processing ${syncResult.syncedFiles.length} synced JSON files for script downloads...`);
|
||||
|
||||
// Get all scripts from synced files
|
||||
// @ts-ignore - syncedFiles exists in the JavaScript version
|
||||
const allSyncedScripts = await githubJsonService.getScriptsForFiles(syncResult.syncedFiles);
|
||||
// Get scripts only for the synced files
|
||||
const localScriptsService = await import('./localScripts.js');
|
||||
const syncedScripts = [];
|
||||
|
||||
// Initialize script downloader service
|
||||
// @ts-ignore - initializeConfig is public in the JS version
|
||||
scriptDownloaderService.initializeConfig();
|
||||
|
||||
// Filter to only truly NEW scripts (not previously downloaded)
|
||||
const newScripts = [];
|
||||
for (const script of allSyncedScripts) {
|
||||
const isDownloaded = await scriptDownloaderService.isScriptDownloaded(script);
|
||||
if (!isDownloaded) {
|
||||
newScripts.push(script);
|
||||
for (const filename of syncResult.syncedFiles) {
|
||||
try {
|
||||
// Extract slug from filename (remove .json extension)
|
||||
const slug = filename.replace('.json', '');
|
||||
const script = await localScriptsService.localScriptsService.getScriptBySlug(slug);
|
||||
if (script) {
|
||||
syncedScripts.push(script);
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`Error loading script from ${filename}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Found ${newScripts.length} new scripts out of ${allSyncedScripts.length} total scripts`);
|
||||
console.log(`Found ${syncedScripts.length} scripts from synced JSON files`);
|
||||
|
||||
if (settings.autoDownloadNew && newScripts.length > 0) {
|
||||
console.log(`Auto-downloading ${newScripts.length} new scripts...`);
|
||||
const downloadResult = await scriptDownloaderService.autoDownloadNewScripts(newScripts);
|
||||
// @ts-ignore - Type assertion needed for dynamic assignment
|
||||
results.newScripts = downloadResult.downloaded;
|
||||
// @ts-ignore - Type assertion needed for dynamic assignment
|
||||
results.errors.push(...downloadResult.errors);
|
||||
// Filter to only truly NEW scripts (not previously downloaded)
|
||||
const newScripts = [];
|
||||
const existingScripts = [];
|
||||
|
||||
for (const script of syncedScripts) {
|
||||
try {
|
||||
// Validate script object
|
||||
if (!script || !script.slug) {
|
||||
console.warn('Invalid script object found, skipping:', script);
|
||||
continue;
|
||||
}
|
||||
|
||||
const isDownloaded = await scriptDownloaderService.isScriptDownloaded(script);
|
||||
if (!isDownloaded) {
|
||||
newScripts.push(script);
|
||||
} else {
|
||||
existingScripts.push(script);
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`Error checking script ${script?.slug || 'unknown'}:`, error);
|
||||
// Treat as new script if we can't check
|
||||
if (script && script.slug) {
|
||||
newScripts.push(script);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (settings.autoUpdateExisting) {
|
||||
console.log('Auto-updating existing scripts from synced files...');
|
||||
const updateResult = await scriptDownloaderService.autoUpdateExistingScripts(allSyncedScripts);
|
||||
// @ts-ignore - Type assertion needed for dynamic assignment
|
||||
results.updatedScripts = updateResult.updated;
|
||||
// @ts-ignore - Type assertion needed for dynamic assignment
|
||||
results.errors.push(...updateResult.errors);
|
||||
console.log(`Found ${newScripts.length} new scripts and ${existingScripts.length} existing scripts from synced files`);
|
||||
|
||||
// Download new scripts
|
||||
if (settings.autoDownloadNew && newScripts.length > 0) {
|
||||
console.log(`Auto-downloading ${newScripts.length} new scripts...`);
|
||||
const downloaded = [];
|
||||
const errors = [];
|
||||
|
||||
for (const script of newScripts) {
|
||||
try {
|
||||
const result = await scriptDownloaderService.loadScript(script);
|
||||
if (result.success) {
|
||||
downloaded.push(script.name || script.slug);
|
||||
console.log(`Downloaded script: ${script.name || script.slug}`);
|
||||
} else {
|
||||
errors.push(`${script.name || script.slug}: ${result.message}`);
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMsg = error instanceof Error ? error.message : String(error);
|
||||
errors.push(`${script.name || script.slug}: ${errorMsg}`);
|
||||
console.error(`Failed to download script ${script.slug}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
results.newScripts = downloaded;
|
||||
results.errors.push(...errors);
|
||||
}
|
||||
|
||||
// Update existing scripts
|
||||
if (settings.autoUpdateExisting && existingScripts.length > 0) {
|
||||
console.log(`Auto-updating ${existingScripts.length} existing scripts...`);
|
||||
const updated = [];
|
||||
const errors = [];
|
||||
|
||||
for (const script of existingScripts) {
|
||||
try {
|
||||
// Always update existing scripts when auto-update is enabled
|
||||
const result = await scriptDownloaderService.loadScript(script);
|
||||
if (result.success) {
|
||||
updated.push(script.name || script.slug);
|
||||
console.log(`Updated script: ${script.name || script.slug}`);
|
||||
} else {
|
||||
errors.push(`${script.name || script.slug}: ${result.message}`);
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMsg = error instanceof Error ? error.message : String(error);
|
||||
errors.push(`${script.name || script.slug}: ${errorMsg}`);
|
||||
console.error(`Failed to update script ${script.slug}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
results.updatedScripts = updated;
|
||||
results.errors.push(...errors);
|
||||
}
|
||||
} else {
|
||||
console.log('No JSON files were synced, skipping script download/update');
|
||||
@@ -316,7 +399,7 @@ export class AutoSyncService {
|
||||
}
|
||||
|
||||
// Step 4: Update last sync time
|
||||
const lastSyncTime = new Date().toISOString();
|
||||
const lastSyncTime = this.safeToISOString(new Date());
|
||||
const updatedSettings = { ...settings, lastAutoSync: lastSyncTime };
|
||||
this.saveSettings(updatedSettings);
|
||||
|
||||
@@ -384,6 +467,12 @@ export class AutoSyncService {
|
||||
const grouped = new Map();
|
||||
|
||||
scripts.forEach(script => {
|
||||
// Validate script object
|
||||
if (!script || !script.name) {
|
||||
console.warn('Invalid script object in groupScriptsByCategory, skipping:', script);
|
||||
return;
|
||||
}
|
||||
|
||||
const scriptCategories = script.categories || [0]; // Default to Miscellaneous (id: 0)
|
||||
scriptCategories.forEach((/** @type {number} */ catId) => {
|
||||
const categoryName = categoryMap.get(catId) || 'Miscellaneous';
|
||||
|
||||
@@ -1,276 +1,6 @@
|
||||
import { writeFile, mkdir } from 'fs/promises';
|
||||
import { readFileSync, readdirSync, statSync, utimesSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
import { Buffer } from 'buffer';
|
||||
// JavaScript wrapper for githubJsonService.ts
|
||||
// This allows the JavaScript autoSyncService.js to import the TypeScript service
|
||||
|
||||
export class GitHubJsonService {
|
||||
constructor() {
|
||||
this.baseUrl = null;
|
||||
this.repoUrl = null;
|
||||
this.branch = null;
|
||||
this.jsonFolder = null;
|
||||
this.localJsonDirectory = null;
|
||||
this.scriptCache = new Map();
|
||||
}
|
||||
import { githubJsonService } from './githubJsonService.ts';
|
||||
|
||||
initializeConfig() {
|
||||
if (this.repoUrl === null) {
|
||||
// Get environment variables
|
||||
this.repoUrl = process.env.REPO_URL || "";
|
||||
this.branch = process.env.REPO_BRANCH || "main";
|
||||
this.jsonFolder = process.env.JSON_FOLDER || "scripts";
|
||||
this.localJsonDirectory = join(process.cwd(), 'scripts', 'json');
|
||||
|
||||
// Only validate GitHub URL if it's provided
|
||||
if (this.repoUrl) {
|
||||
// Extract owner and repo from the URL
|
||||
const urlMatch = /github\.com\/([^\/]+)\/([^\/]+)/.exec(this.repoUrl);
|
||||
if (!urlMatch) {
|
||||
throw new Error(`Invalid GitHub repository URL: ${this.repoUrl}`);
|
||||
}
|
||||
|
||||
const [, owner, repo] = urlMatch;
|
||||
this.baseUrl = `https://api.github.com/repos/${owner}/${repo}`;
|
||||
} else {
|
||||
// Set a dummy base URL if no REPO_URL is provided
|
||||
this.baseUrl = "";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fetchFromGitHub(endpoint) {
|
||||
this.initializeConfig();
|
||||
const response = await fetch(`${this.baseUrl}${endpoint}`, {
|
||||
headers: {
|
||||
'Accept': 'application/vnd.github.v3+json',
|
||||
'User-Agent': 'PVEScripts-Local/1.0',
|
||||
...(process.env.GITHUB_TOKEN && { 'Authorization': `token ${process.env.GITHUB_TOKEN}` })
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
}
|
||||
|
||||
async syncJsonFiles() {
|
||||
try {
|
||||
this.initializeConfig();
|
||||
|
||||
if (!this.baseUrl) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'No GitHub repository configured'
|
||||
};
|
||||
}
|
||||
|
||||
console.log('Starting fast incremental JSON sync...');
|
||||
|
||||
// Ensure local directory exists
|
||||
await mkdir(this.localJsonDirectory, { recursive: true });
|
||||
|
||||
// Step 1: Get file list from GitHub (single API call)
|
||||
console.log('Fetching file list from GitHub...');
|
||||
const files = await this.fetchFromGitHub(`/contents/${this.jsonFolder}?ref=${this.branch}`);
|
||||
|
||||
if (!Array.isArray(files)) {
|
||||
throw new Error('Invalid response from GitHub API');
|
||||
}
|
||||
|
||||
const jsonFiles = files.filter(file => file.name.endsWith('.json'));
|
||||
console.log(`Found ${jsonFiles.length} JSON files in repository`);
|
||||
|
||||
// Step 2: Get local file list (fast local operation)
|
||||
const localFiles = new Map();
|
||||
try {
|
||||
console.log(`Looking for local files in: ${this.localJsonDirectory}`);
|
||||
const localFileList = readdirSync(this.localJsonDirectory);
|
||||
console.log(`Found ${localFileList.length} files in local directory`);
|
||||
for (const fileName of localFileList) {
|
||||
if (fileName.endsWith('.json')) {
|
||||
const filePath = join(this.localJsonDirectory, fileName);
|
||||
const stats = statSync(filePath);
|
||||
localFiles.set(fileName, {
|
||||
mtime: stats.mtime,
|
||||
size: stats.size
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Error reading local directory:', error.message);
|
||||
console.log('Directory path:', this.localJsonDirectory);
|
||||
console.log('No local files found, will download all');
|
||||
}
|
||||
|
||||
console.log(`Found ${localFiles.size} local JSON files`);
|
||||
|
||||
// Step 3: Compare and identify files that need syncing
|
||||
const filesToSync = [];
|
||||
let skippedCount = 0;
|
||||
|
||||
for (const file of jsonFiles) {
|
||||
const localFile = localFiles.get(file.name);
|
||||
|
||||
if (!localFile) {
|
||||
// File doesn't exist locally
|
||||
filesToSync.push(file);
|
||||
console.log(`Missing: ${file.name}`);
|
||||
} else {
|
||||
// Compare modification times and sizes
|
||||
const localMtime = new Date(localFile.mtime);
|
||||
const remoteMtime = new Date(file.updated_at);
|
||||
const localSize = localFile.size;
|
||||
const remoteSize = file.size;
|
||||
|
||||
// Sync if remote is newer OR sizes are different (content changed)
|
||||
if (localMtime < remoteMtime || localSize !== remoteSize) {
|
||||
filesToSync.push(file);
|
||||
console.log(`Changed: ${file.name} (${localMtime.toISOString()} -> ${remoteMtime.toISOString()})`);
|
||||
} else {
|
||||
skippedCount++;
|
||||
console.log(`Up-to-date: ${file.name}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Files to sync: ${filesToSync.length}, Up-to-date: ${skippedCount}`);
|
||||
|
||||
// Step 4: Download only the files that need syncing
|
||||
let syncedCount = 0;
|
||||
const errors = [];
|
||||
const syncedFiles = [];
|
||||
|
||||
// Process files in batches to avoid overwhelming the API
|
||||
const batchSize = 10;
|
||||
for (let i = 0; i < filesToSync.length; i += batchSize) {
|
||||
const batch = filesToSync.slice(i, i + batchSize);
|
||||
|
||||
// Process batch in parallel
|
||||
const promises = batch.map(async (file) => {
|
||||
try {
|
||||
const content = await this.fetchFromGitHub(`/contents/${file.path}?ref=${this.branch}`);
|
||||
|
||||
if (content.content) {
|
||||
// Decode base64 content
|
||||
const fileContent = Buffer.from(content.content, 'base64').toString('utf-8');
|
||||
|
||||
// Write to local file
|
||||
const localPath = join(this.localJsonDirectory, file.name);
|
||||
await writeFile(localPath, fileContent, 'utf-8');
|
||||
|
||||
// Update file modification time to match remote
|
||||
const remoteMtime = new Date(file.updated_at);
|
||||
utimesSync(localPath, remoteMtime, remoteMtime);
|
||||
|
||||
syncedCount++;
|
||||
syncedFiles.push(file.name);
|
||||
console.log(`Synced: ${file.name}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to sync ${file.name}:`, error.message);
|
||||
errors.push(`${file.name}: ${error.message}`);
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.all(promises);
|
||||
|
||||
// Small delay between batches to be nice to the API
|
||||
if (i + batchSize < filesToSync.length) {
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`JSON sync completed. Synced ${syncedCount} files, skipped ${skippedCount} files.`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Successfully synced ${syncedCount} JSON files (${skippedCount} up-to-date)`,
|
||||
syncedCount,
|
||||
skippedCount,
|
||||
syncedFiles,
|
||||
errors
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.error('JSON sync failed:', error);
|
||||
return {
|
||||
success: false,
|
||||
message: error.message,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async getAllScripts() {
|
||||
try {
|
||||
this.initializeConfig();
|
||||
|
||||
if (!this.localJsonDirectory) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const scripts = [];
|
||||
|
||||
// Read all JSON files from local directory
|
||||
const files = readdirSync(this.localJsonDirectory);
|
||||
const jsonFiles = files.filter(file => file.endsWith('.json'));
|
||||
|
||||
for (const file of jsonFiles) {
|
||||
try {
|
||||
const filePath = join(this.localJsonDirectory, file);
|
||||
const content = readFileSync(filePath, 'utf-8');
|
||||
const script = JSON.parse(content);
|
||||
|
||||
if (script && typeof script === 'object') {
|
||||
scripts.push(script);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to parse ${file}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
return scripts;
|
||||
} catch (error) {
|
||||
console.error('Failed to get all scripts:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get scripts only for specific JSON files that were synced
|
||||
*/
|
||||
async getScriptsForFiles(syncedFiles) {
|
||||
try {
|
||||
this.initializeConfig();
|
||||
|
||||
if (!this.localJsonDirectory || !syncedFiles || syncedFiles.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const scripts = [];
|
||||
|
||||
for (const fileName of syncedFiles) {
|
||||
try {
|
||||
const filePath = join(this.localJsonDirectory, fileName);
|
||||
const content = readFileSync(filePath, 'utf-8');
|
||||
const script = JSON.parse(content);
|
||||
|
||||
if (script && typeof script === 'object') {
|
||||
scripts.push(script);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to parse ${fileName}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
return scripts;
|
||||
} catch (error) {
|
||||
console.error('Failed to get scripts for synced files:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const githubJsonService = new GitHubJsonService();
|
||||
export { githubJsonService };
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { writeFile, mkdir } from 'fs/promises';
|
||||
import { writeFile, mkdir, readdir } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { env } from '../../env.js';
|
||||
import type { Script, ScriptCard, GitHubFile } from '../../types/script';
|
||||
@@ -185,48 +185,90 @@ export class GitHubJsonService {
|
||||
}
|
||||
}
|
||||
|
||||
async syncJsonFiles(): Promise<{ success: boolean; message: string; count: number }> {
|
||||
async syncJsonFiles(): Promise<{ success: boolean; message: string; count: number; syncedFiles: string[] }> {
|
||||
try {
|
||||
// Get all scripts from GitHub (1 API call + raw downloads)
|
||||
const scripts = await this.getAllScripts();
|
||||
console.log('Starting fast incremental JSON sync...');
|
||||
|
||||
// Save scripts to local directory
|
||||
await this.saveScriptsLocally(scripts);
|
||||
// Get file list from GitHub
|
||||
console.log('Fetching file list from GitHub...');
|
||||
const githubFiles = await this.getJsonFiles();
|
||||
console.log(`Found ${githubFiles.length} JSON files in repository`);
|
||||
|
||||
// Get local files
|
||||
const localFiles = await this.getLocalJsonFiles();
|
||||
console.log(`Found ${localFiles.length} files in local directory`);
|
||||
console.log(`Found ${localFiles.filter(f => f.endsWith('.json')).length} local JSON files`);
|
||||
|
||||
// Compare and find files that need syncing
|
||||
const filesToSync = this.findFilesToSync(githubFiles, localFiles);
|
||||
console.log(`Found ${filesToSync.length} files that need syncing`);
|
||||
|
||||
if (filesToSync.length === 0) {
|
||||
return {
|
||||
success: true,
|
||||
message: 'All JSON files are up to date',
|
||||
count: 0,
|
||||
syncedFiles: []
|
||||
};
|
||||
}
|
||||
|
||||
// Download and save only the files that need syncing
|
||||
const syncedFiles = await this.syncSpecificFiles(filesToSync);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Successfully synced ${scripts.length} scripts from GitHub using 1 API call + raw downloads`,
|
||||
count: scripts.length
|
||||
message: `Successfully synced ${syncedFiles.length} JSON files from GitHub`,
|
||||
count: syncedFiles.length,
|
||||
syncedFiles
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error syncing JSON files:', error);
|
||||
console.error('JSON sync failed:', error);
|
||||
return {
|
||||
success: false,
|
||||
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
count: 0
|
||||
count: 0,
|
||||
syncedFiles: []
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private async saveScriptsLocally(scripts: Script[]): Promise<void> {
|
||||
private async getLocalJsonFiles(): Promise<string[]> {
|
||||
this.initializeConfig();
|
||||
try {
|
||||
// Ensure the directory exists
|
||||
await mkdir(this.localJsonDirectory!, { recursive: true });
|
||||
|
||||
// Save each script as a JSON file
|
||||
for (const script of scripts) {
|
||||
const filename = `${script.slug}.json`;
|
||||
const filePath = join(this.localJsonDirectory!, filename);
|
||||
const content = JSON.stringify(script, null, 2);
|
||||
await writeFile(filePath, content, 'utf-8');
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error saving scripts locally:', error);
|
||||
throw new Error('Failed to save scripts locally');
|
||||
const files = await readdir(this.localJsonDirectory!);
|
||||
return files.filter(f => f.endsWith('.json'));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
private findFilesToSync(githubFiles: GitHubFile[], localFiles: string[]): GitHubFile[] {
|
||||
const localFileSet = new Set(localFiles);
|
||||
// Return only files that don't exist locally
|
||||
return githubFiles.filter(ghFile => !localFileSet.has(ghFile.name));
|
||||
}
|
||||
|
||||
private async syncSpecificFiles(filesToSync: GitHubFile[]): Promise<string[]> {
|
||||
this.initializeConfig();
|
||||
const syncedFiles: string[] = [];
|
||||
|
||||
await mkdir(this.localJsonDirectory!, { recursive: true });
|
||||
|
||||
for (const file of filesToSync) {
|
||||
try {
|
||||
const script = await this.downloadJsonFile(file.path);
|
||||
const filename = `${script.slug}.json`;
|
||||
const filePath = join(this.localJsonDirectory!, filename);
|
||||
await writeFile(filePath, JSON.stringify(script, null, 2), 'utf-8');
|
||||
syncedFiles.push(filename);
|
||||
} catch (error) {
|
||||
console.error(`Failed to sync ${file.name}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
return syncedFiles;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
|
||||
6
src/server/services/localScripts.js
Normal file
6
src/server/services/localScripts.js
Normal file
@@ -0,0 +1,6 @@
|
||||
// JavaScript wrapper for localScripts.ts
|
||||
// This allows the JavaScript autoSyncService.js to import the TypeScript service
|
||||
|
||||
import { localScriptsService } from './localScripts.ts';
|
||||
|
||||
export { localScriptsService };
|
||||
@@ -1,14 +1,18 @@
|
||||
import { writeFile, readFile, mkdir } from 'fs/promises';
|
||||
// Real JavaScript implementation for script downloading
|
||||
import { join } from 'path';
|
||||
import { writeFile, mkdir } from 'fs/promises';
|
||||
|
||||
export class ScriptDownloaderService {
|
||||
constructor() {
|
||||
this.scriptsDirectory = null;
|
||||
this.repoUrl = null;
|
||||
}
|
||||
|
||||
initializeConfig() {
|
||||
if (this.scriptsDirectory === null) {
|
||||
this.scriptsDirectory = join(process.cwd(), 'scripts');
|
||||
// Get REPO_URL from environment or use default
|
||||
this.repoUrl = process.env.REPO_URL || 'https://github.com/community-scripts/ProxmoxVE';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,14 +27,27 @@ export class ScriptDownloaderService {
|
||||
}
|
||||
|
||||
async downloadFileFromGitHub(filePath) {
|
||||
// This is a simplified version - in a real implementation,
|
||||
// you would fetch the file content from GitHub
|
||||
// For now, we'll return a placeholder
|
||||
return `#!/bin/bash
|
||||
# Downloaded script: ${filePath}
|
||||
# This is a placeholder - implement actual GitHub file download
|
||||
echo "Script downloaded: ${filePath}"
|
||||
`;
|
||||
this.initializeConfig();
|
||||
if (!this.repoUrl) {
|
||||
throw new Error('REPO_URL environment variable is not set');
|
||||
}
|
||||
|
||||
// Extract repo path from URL
|
||||
const match = /github\.com\/([^\/]+)\/([^\/]+)/.exec(this.repoUrl);
|
||||
if (!match) {
|
||||
throw new Error('Invalid GitHub repository URL');
|
||||
}
|
||||
const [, owner, repo] = match;
|
||||
|
||||
const url = `https://raw.githubusercontent.com/${owner}/${repo}/main/${filePath}`;
|
||||
|
||||
console.log(`Downloading from GitHub: ${url}`);
|
||||
const response = await fetch(url);
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
return response.text();
|
||||
}
|
||||
|
||||
modifyScriptContent(content) {
|
||||
@@ -57,6 +74,7 @@ echo "Script downloaded: ${filePath}"
|
||||
|
||||
if (fileName) {
|
||||
// Download from GitHub
|
||||
console.log(`Downloading script file: ${scriptPath}`);
|
||||
const content = await this.downloadFileFromGitHub(scriptPath);
|
||||
|
||||
// Determine target directory based on script path
|
||||
@@ -111,6 +129,7 @@ echo "Script downloaded: ${filePath}"
|
||||
}
|
||||
|
||||
files.push(`${finalTargetDir}/${fileName}`);
|
||||
console.log(`Successfully downloaded: ${finalTargetDir}/${fileName}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -121,12 +140,15 @@ echo "Script downloaded: ${filePath}"
|
||||
if (hasCtScript) {
|
||||
const installScriptName = `${script.slug}-install.sh`;
|
||||
try {
|
||||
console.log(`Downloading install script: install/${installScriptName}`);
|
||||
const installContent = await this.downloadFileFromGitHub(`install/${installScriptName}`);
|
||||
const localInstallPath = join(this.scriptsDirectory, 'install', installScriptName);
|
||||
await writeFile(localInstallPath, installContent, 'utf-8');
|
||||
files.push(`install/${installScriptName}`);
|
||||
} catch {
|
||||
console.log(`Successfully downloaded: install/${installScriptName}`);
|
||||
} catch (error) {
|
||||
// Install script might not exist, that's okay
|
||||
console.log(`Install script not found: install/${installScriptName}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -145,78 +167,6 @@ echo "Script downloaded: ${filePath}"
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Auto-download new scripts that haven't been downloaded yet
|
||||
*/
|
||||
async autoDownloadNewScripts(allScripts) {
|
||||
this.initializeConfig();
|
||||
const downloaded = [];
|
||||
const errors = [];
|
||||
|
||||
for (const script of allScripts) {
|
||||
try {
|
||||
// Check if script is already downloaded
|
||||
const isDownloaded = await this.isScriptDownloaded(script);
|
||||
|
||||
if (!isDownloaded) {
|
||||
const result = await this.loadScript(script);
|
||||
if (result.success) {
|
||||
downloaded.push(script); // Return full script object instead of just name
|
||||
console.log(`Auto-downloaded new script: ${script.name || script.slug}`);
|
||||
} else {
|
||||
errors.push(`${script.name || script.slug}: ${result.message}`);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMsg = `${script.name || script.slug}: ${error instanceof Error ? error.message : 'Unknown error'}`;
|
||||
errors.push(errorMsg);
|
||||
console.error(`Failed to auto-download script ${script.slug}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
return { downloaded, errors };
|
||||
}
|
||||
|
||||
/**
|
||||
* Auto-update existing scripts to newer versions
|
||||
*/
|
||||
async autoUpdateExistingScripts(allScripts) {
|
||||
this.initializeConfig();
|
||||
const updated = [];
|
||||
const errors = [];
|
||||
|
||||
for (const script of allScripts) {
|
||||
try {
|
||||
// Check if script is downloaded
|
||||
const isDownloaded = await this.isScriptDownloaded(script);
|
||||
|
||||
if (isDownloaded) {
|
||||
// Check if update is needed by comparing content
|
||||
const needsUpdate = await this.scriptNeedsUpdate(script);
|
||||
|
||||
if (needsUpdate) {
|
||||
const result = await this.loadScript(script);
|
||||
if (result.success) {
|
||||
updated.push(script); // Return full script object instead of just name
|
||||
console.log(`Auto-updated script: ${script.name || script.slug}`);
|
||||
} else {
|
||||
errors.push(`${script.name || script.slug}: ${result.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMsg = `${script.name || script.slug}: ${error instanceof Error ? error.message : 'Unknown error'}`;
|
||||
errors.push(errorMsg);
|
||||
console.error(`Failed to auto-update script ${script.slug}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
return { updated, errors };
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a script is already downloaded
|
||||
*/
|
||||
async isScriptDownloaded(script) {
|
||||
if (!script.install_methods?.length) return false;
|
||||
|
||||
@@ -261,7 +211,7 @@ echo "Script downloaded: ${filePath}"
|
||||
}
|
||||
|
||||
try {
|
||||
await readFile(filePath, 'utf8');
|
||||
await import('fs/promises').then(fs => fs.readFile(filePath, 'utf8'));
|
||||
// File exists, continue checking other methods
|
||||
} catch {
|
||||
// File doesn't exist, script is not fully downloaded
|
||||
@@ -274,73 +224,6 @@ echo "Script downloaded: ${filePath}"
|
||||
// All files exist, script is downloaded
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a script needs updating by comparing local and remote content
|
||||
*/
|
||||
async scriptNeedsUpdate(script) {
|
||||
if (!script.install_methods?.length) return false;
|
||||
|
||||
for (const method of script.install_methods) {
|
||||
if (method.script) {
|
||||
const scriptPath = method.script;
|
||||
const fileName = scriptPath.split('/').pop();
|
||||
|
||||
if (fileName) {
|
||||
// Determine target directory based on script path
|
||||
let targetDir;
|
||||
let finalTargetDir;
|
||||
let filePath;
|
||||
|
||||
if (scriptPath.startsWith('ct/')) {
|
||||
targetDir = 'ct';
|
||||
finalTargetDir = targetDir;
|
||||
filePath = join(this.scriptsDirectory, targetDir, fileName);
|
||||
} else if (scriptPath.startsWith('tools/')) {
|
||||
targetDir = 'tools';
|
||||
const subPath = scriptPath.replace('tools/', '');
|
||||
const subDir = subPath.includes('/') ? subPath.substring(0, subPath.lastIndexOf('/')) : '';
|
||||
finalTargetDir = subDir ? join(targetDir, subDir) : targetDir;
|
||||
filePath = join(this.scriptsDirectory, finalTargetDir, fileName);
|
||||
} else if (scriptPath.startsWith('vm/')) {
|
||||
targetDir = 'vm';
|
||||
const subPath = scriptPath.replace('vm/', '');
|
||||
const subDir = subPath.includes('/') ? subPath.substring(0, subPath.lastIndexOf('/')) : '';
|
||||
finalTargetDir = subDir ? join(targetDir, subDir) : targetDir;
|
||||
filePath = join(this.scriptsDirectory, finalTargetDir, fileName);
|
||||
} else if (scriptPath.startsWith('vw/')) {
|
||||
targetDir = 'vw';
|
||||
const subPath = scriptPath.replace('vw/', '');
|
||||
const subDir = subPath.includes('/') ? subPath.substring(0, subPath.lastIndexOf('/')) : '';
|
||||
finalTargetDir = subDir ? join(targetDir, subDir) : targetDir;
|
||||
filePath = join(this.scriptsDirectory, finalTargetDir, fileName);
|
||||
} else {
|
||||
targetDir = 'ct';
|
||||
finalTargetDir = targetDir;
|
||||
filePath = join(this.scriptsDirectory, targetDir, fileName);
|
||||
}
|
||||
|
||||
try {
|
||||
// Read local content
|
||||
const localContent = await readFile(filePath, 'utf8');
|
||||
|
||||
// Download remote content
|
||||
const remoteContent = await this.downloadFileFromGitHub(scriptPath);
|
||||
|
||||
// Compare content (simple string comparison for now)
|
||||
// In a more sophisticated implementation, you might want to compare
|
||||
// file modification times or use content hashing
|
||||
return localContent !== remoteContent;
|
||||
} catch {
|
||||
// If we can't read local or download remote, assume update needed
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export const scriptDownloaderService = new ScriptDownloaderService();
|
||||
|
||||
Reference in New Issue
Block a user