Add JS wrappers for ESM compatibility (repositoryService, githubJsonService) and fix .js import extensions
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
import { AutoSyncService } from '../services/autoSyncService';
|
||||
import { repositoryService } from '../services/repositoryService';
|
||||
import { AutoSyncService } from '../services/autoSyncService.js';
|
||||
import { repositoryService } from '../services/repositoryService.js';
|
||||
|
||||
/** @type {AutoSyncService | null} */
|
||||
let autoSyncService = null;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import cron from 'node-cron';
|
||||
import { githubJsonService } from './githubJsonService';
|
||||
import { scriptDownloaderService } from './scriptDownloader';
|
||||
import { appriseService } from './appriseService';
|
||||
import { githubJsonService } from './githubJsonService.js';
|
||||
import { scriptDownloaderService } from './scriptDownloader.js';
|
||||
import { appriseService } from './appriseService.js';
|
||||
import { readFile, writeFile, readFileSync, writeFileSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
import cronValidator from 'cron-validator';
|
||||
|
||||
428
src/server/services/githubJsonService.js
Normal file
428
src/server/services/githubJsonService.js
Normal file
@@ -0,0 +1,428 @@
|
||||
// JavaScript wrapper for githubJsonService (for use with node server.js)
|
||||
import { writeFile, mkdir, readdir, readFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { repositoryService } from './repositoryService.js';
|
||||
|
||||
// Get environment variables
|
||||
const getEnv = () => ({
|
||||
REPO_BRANCH: process.env.REPO_BRANCH || 'main',
|
||||
JSON_FOLDER: process.env.JSON_FOLDER || 'json',
|
||||
REPO_URL: process.env.REPO_URL || 'https://github.com/community-scripts/ProxmoxVE',
|
||||
GITHUB_TOKEN: process.env.GITHUB_TOKEN
|
||||
});
|
||||
|
||||
class GitHubJsonService {
|
||||
constructor() {
|
||||
this.branch = null;
|
||||
this.jsonFolder = null;
|
||||
this.localJsonDirectory = null;
|
||||
this.scriptCache = new Map();
|
||||
}
|
||||
|
||||
initializeConfig() {
|
||||
if (this.branch === null) {
|
||||
const env = getEnv();
|
||||
this.branch = env.REPO_BRANCH;
|
||||
this.jsonFolder = env.JSON_FOLDER;
|
||||
this.localJsonDirectory = join(process.cwd(), 'scripts', 'json');
|
||||
}
|
||||
}
|
||||
|
||||
getBaseUrl(repoUrl) {
|
||||
const urlMatch = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
||||
if (!urlMatch) {
|
||||
throw new Error(`Invalid GitHub repository URL: ${repoUrl}`);
|
||||
}
|
||||
|
||||
const [, owner, repo] = urlMatch;
|
||||
return `https://api.github.com/repos/${owner}/${repo}`;
|
||||
}
|
||||
|
||||
extractRepoPath(repoUrl) {
|
||||
const match = /github\.com\/([^\/]+)\/([^\/]+)/.exec(repoUrl);
|
||||
if (!match) {
|
||||
throw new Error('Invalid GitHub repository URL');
|
||||
}
|
||||
return `${match[1]}/${match[2]}`;
|
||||
}
|
||||
|
||||
async fetchFromGitHub(repoUrl, endpoint) {
|
||||
const baseUrl = this.getBaseUrl(repoUrl);
|
||||
const env = getEnv();
|
||||
|
||||
const headers = {
|
||||
'Accept': 'application/vnd.github.v3+json',
|
||||
'User-Agent': 'PVEScripts-Local/1.0',
|
||||
};
|
||||
|
||||
if (env.GITHUB_TOKEN) {
|
||||
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
||||
}
|
||||
|
||||
const response = await fetch(`${baseUrl}${endpoint}`, { headers });
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 403) {
|
||||
const error = new Error(`GitHub API rate limit exceeded. Consider setting GITHUB_TOKEN for higher limits. Status: ${response.status} ${response.statusText}`);
|
||||
error.name = 'RateLimitError';
|
||||
throw error;
|
||||
}
|
||||
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
}
|
||||
|
||||
async downloadJsonFile(repoUrl, filePath) {
|
||||
this.initializeConfig();
|
||||
const repoPath = this.extractRepoPath(repoUrl);
|
||||
const rawUrl = `https://raw.githubusercontent.com/${repoPath}/${this.branch}/${filePath}`;
|
||||
const env = getEnv();
|
||||
|
||||
const headers = {
|
||||
'User-Agent': 'PVEScripts-Local/1.0',
|
||||
};
|
||||
|
||||
if (env.GITHUB_TOKEN) {
|
||||
headers.Authorization = `token ${env.GITHUB_TOKEN}`;
|
||||
}
|
||||
|
||||
const response = await fetch(rawUrl, { headers });
|
||||
if (!response.ok) {
|
||||
if (response.status === 403) {
|
||||
const error = new Error(`GitHub rate limit exceeded while downloading ${filePath}. Consider setting GITHUB_TOKEN for higher limits.`);
|
||||
error.name = 'RateLimitError';
|
||||
throw error;
|
||||
}
|
||||
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const content = await response.text();
|
||||
const script = JSON.parse(content);
|
||||
script.repository_url = repoUrl;
|
||||
return script;
|
||||
}
|
||||
|
||||
async getJsonFiles(repoUrl) {
|
||||
this.initializeConfig();
|
||||
|
||||
try {
|
||||
const files = await this.fetchFromGitHub(
|
||||
repoUrl,
|
||||
`/contents/${this.jsonFolder}?ref=${this.branch}`
|
||||
);
|
||||
|
||||
return files.filter(file => file.name.endsWith('.json'));
|
||||
} catch (error) {
|
||||
console.error(`Error fetching JSON files from GitHub (${repoUrl}):`, error);
|
||||
throw new Error(`Failed to fetch script files from repository: ${repoUrl}`);
|
||||
}
|
||||
}
|
||||
|
||||
async getAllScripts(repoUrl) {
|
||||
try {
|
||||
const jsonFiles = await this.getJsonFiles(repoUrl);
|
||||
const scripts = [];
|
||||
|
||||
for (const file of jsonFiles) {
|
||||
try {
|
||||
const script = await this.downloadJsonFile(repoUrl, file.path);
|
||||
scripts.push(script);
|
||||
} catch (error) {
|
||||
console.error(`Failed to download script ${file.name} from ${repoUrl}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
return scripts;
|
||||
} catch (error) {
|
||||
console.error(`Error fetching all scripts from ${repoUrl}:`, error);
|
||||
throw new Error(`Failed to fetch scripts from repository: ${repoUrl}`);
|
||||
}
|
||||
}
|
||||
|
||||
async getScriptCards(repoUrl) {
|
||||
try {
|
||||
const scripts = await this.getAllScripts(repoUrl);
|
||||
|
||||
return scripts.map(script => ({
|
||||
name: script.name,
|
||||
slug: script.slug,
|
||||
description: script.description,
|
||||
logo: script.logo,
|
||||
type: script.type,
|
||||
updateable: script.updateable,
|
||||
website: script.website,
|
||||
repository_url: script.repository_url,
|
||||
}));
|
||||
} catch (error) {
|
||||
console.error(`Error creating script cards from ${repoUrl}:`, error);
|
||||
throw new Error(`Failed to create script cards from repository: ${repoUrl}`);
|
||||
}
|
||||
}
|
||||
|
||||
async getScriptBySlug(slug, repoUrl) {
|
||||
try {
|
||||
const localScript = await this.getScriptFromLocal(slug);
|
||||
if (localScript) {
|
||||
if (repoUrl && localScript.repository_url !== repoUrl) {
|
||||
return null;
|
||||
}
|
||||
return localScript;
|
||||
}
|
||||
|
||||
if (repoUrl) {
|
||||
try {
|
||||
this.initializeConfig();
|
||||
const script = await this.downloadJsonFile(repoUrl, `${this.jsonFolder}/${slug}.json`);
|
||||
return script;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const enabledRepos = await repositoryService.getEnabledRepositories();
|
||||
for (const repo of enabledRepos) {
|
||||
try {
|
||||
this.initializeConfig();
|
||||
const script = await this.downloadJsonFile(repo.url, `${this.jsonFolder}/${slug}.json`);
|
||||
return script;
|
||||
} catch {
|
||||
// Continue to next repo
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
} catch (error) {
|
||||
console.error('Error fetching script by slug:', error);
|
||||
throw new Error(`Failed to fetch script: ${slug}`);
|
||||
}
|
||||
}
|
||||
|
||||
async getScriptFromLocal(slug) {
|
||||
try {
|
||||
if (this.scriptCache.has(slug)) {
|
||||
return this.scriptCache.get(slug);
|
||||
}
|
||||
|
||||
this.initializeConfig();
|
||||
const filePath = join(this.localJsonDirectory, `${slug}.json`);
|
||||
const content = await readFile(filePath, 'utf-8');
|
||||
const script = JSON.parse(content);
|
||||
|
||||
if (!script.repository_url) {
|
||||
const env = getEnv();
|
||||
script.repository_url = env.REPO_URL;
|
||||
}
|
||||
|
||||
this.scriptCache.set(slug, script);
|
||||
|
||||
return script;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async syncJsonFilesForRepo(repoUrl) {
|
||||
try {
|
||||
console.log(`Starting JSON sync from repository: ${repoUrl}`);
|
||||
|
||||
const githubFiles = await this.getJsonFiles(repoUrl);
|
||||
console.log(`Found ${githubFiles.length} JSON files in repository ${repoUrl}`);
|
||||
|
||||
const localFiles = await this.getLocalJsonFiles();
|
||||
console.log(`Found ${localFiles.length} local JSON files`);
|
||||
|
||||
const filesToSync = await this.findFilesToSyncForRepo(repoUrl, githubFiles, localFiles);
|
||||
console.log(`Found ${filesToSync.length} files that need syncing from ${repoUrl}`);
|
||||
|
||||
if (filesToSync.length === 0) {
|
||||
return {
|
||||
success: true,
|
||||
message: `All JSON files are up to date for repository: ${repoUrl}`,
|
||||
count: 0,
|
||||
syncedFiles: []
|
||||
};
|
||||
}
|
||||
|
||||
const syncedFiles = await this.syncSpecificFiles(repoUrl, filesToSync);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Successfully synced ${syncedFiles.length} JSON files from ${repoUrl}`,
|
||||
count: syncedFiles.length,
|
||||
syncedFiles
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`JSON sync failed for ${repoUrl}:`, error);
|
||||
return {
|
||||
success: false,
|
||||
message: `Failed to sync JSON files from ${repoUrl}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
count: 0,
|
||||
syncedFiles: []
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async syncJsonFiles() {
|
||||
try {
|
||||
console.log('Starting multi-repository JSON sync...');
|
||||
|
||||
const enabledRepos = await repositoryService.getEnabledRepositories();
|
||||
|
||||
if (enabledRepos.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'No enabled repositories found',
|
||||
count: 0,
|
||||
syncedFiles: []
|
||||
};
|
||||
}
|
||||
|
||||
console.log(`Found ${enabledRepos.length} enabled repositories`);
|
||||
|
||||
const allSyncedFiles = [];
|
||||
const processedSlugs = new Set();
|
||||
let totalSynced = 0;
|
||||
|
||||
for (const repo of enabledRepos) {
|
||||
try {
|
||||
console.log(`Syncing from repository: ${repo.url} (priority: ${repo.priority})`);
|
||||
|
||||
const result = await this.syncJsonFilesForRepo(repo.url);
|
||||
|
||||
if (result.success) {
|
||||
const newFiles = result.syncedFiles.filter(file => {
|
||||
const slug = file.replace('.json', '');
|
||||
if (processedSlugs.has(slug)) {
|
||||
return false;
|
||||
}
|
||||
processedSlugs.add(slug);
|
||||
return true;
|
||||
});
|
||||
|
||||
allSyncedFiles.push(...newFiles);
|
||||
totalSynced += newFiles.length;
|
||||
} else {
|
||||
console.error(`Failed to sync from ${repo.url}: ${result.message}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error syncing from ${repo.url}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
await this.updateExistingFilesWithRepositoryUrl();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Successfully synced ${totalSynced} JSON files from ${enabledRepos.length} repositories`,
|
||||
count: totalSynced,
|
||||
syncedFiles: allSyncedFiles
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Multi-repository JSON sync failed:', error);
|
||||
return {
|
||||
success: false,
|
||||
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
count: 0,
|
||||
syncedFiles: []
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async updateExistingFilesWithRepositoryUrl() {
|
||||
try {
|
||||
this.initializeConfig();
|
||||
const files = await this.getLocalJsonFiles();
|
||||
const env = getEnv();
|
||||
const mainRepoUrl = env.REPO_URL;
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
const filePath = join(this.localJsonDirectory, file);
|
||||
const content = await readFile(filePath, 'utf-8');
|
||||
const script = JSON.parse(content);
|
||||
|
||||
if (!script.repository_url) {
|
||||
script.repository_url = mainRepoUrl;
|
||||
await writeFile(filePath, JSON.stringify(script, null, 2), 'utf-8');
|
||||
console.log(`Updated ${file} with repository_url: ${mainRepoUrl}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error updating ${file}:`, error);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error updating existing files with repository_url:', error);
|
||||
}
|
||||
}
|
||||
|
||||
async getLocalJsonFiles() {
|
||||
this.initializeConfig();
|
||||
try {
|
||||
const files = await readdir(this.localJsonDirectory);
|
||||
return files.filter(f => f.endsWith('.json'));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async findFilesToSyncForRepo(repoUrl, githubFiles, localFiles) {
|
||||
const filesToSync = [];
|
||||
|
||||
for (const ghFile of githubFiles) {
|
||||
const localFilePath = join(this.localJsonDirectory, ghFile.name);
|
||||
|
||||
let needsSync = false;
|
||||
|
||||
if (!localFiles.includes(ghFile.name)) {
|
||||
needsSync = true;
|
||||
} else {
|
||||
try {
|
||||
const content = await readFile(localFilePath, 'utf-8');
|
||||
const script = JSON.parse(content);
|
||||
|
||||
if (!script.repository_url || script.repository_url !== repoUrl) {
|
||||
needsSync = true;
|
||||
}
|
||||
} catch {
|
||||
needsSync = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (needsSync) {
|
||||
filesToSync.push(ghFile);
|
||||
}
|
||||
}
|
||||
|
||||
return filesToSync;
|
||||
}
|
||||
|
||||
async syncSpecificFiles(repoUrl, filesToSync) {
|
||||
this.initializeConfig();
|
||||
const syncedFiles = [];
|
||||
|
||||
await mkdir(this.localJsonDirectory, { recursive: true });
|
||||
|
||||
for (const file of filesToSync) {
|
||||
try {
|
||||
const script = await this.downloadJsonFile(repoUrl, file.path);
|
||||
const filename = `${script.slug}.json`;
|
||||
const filePath = join(this.localJsonDirectory, filename);
|
||||
|
||||
script.repository_url = repoUrl;
|
||||
|
||||
await writeFile(filePath, JSON.stringify(script, null, 2), 'utf-8');
|
||||
syncedFiles.push(filename);
|
||||
|
||||
this.scriptCache.delete(script.slug);
|
||||
} catch (error) {
|
||||
console.error(`Failed to sync ${file.name} from ${repoUrl}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
return syncedFiles;
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
export const githubJsonService = new GitHubJsonService();
|
||||
216
src/server/services/repositoryService.js
Normal file
216
src/server/services/repositoryService.js
Normal file
@@ -0,0 +1,216 @@
|
||||
// JavaScript wrapper for repositoryService (for use with node server.js)
|
||||
import { prisma } from '../db.js';
|
||||
|
||||
class RepositoryService {
|
||||
/**
|
||||
* Initialize default repositories if they don't exist
|
||||
*/
|
||||
async initializeDefaultRepositories() {
|
||||
const mainRepoUrl = 'https://github.com/community-scripts/ProxmoxVE';
|
||||
const devRepoUrl = 'https://github.com/community-scripts/ProxmoxVED';
|
||||
|
||||
// Check if repositories already exist
|
||||
const existingRepos = await prisma.repository.findMany({
|
||||
where: {
|
||||
url: {
|
||||
in: [mainRepoUrl, devRepoUrl]
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const existingUrls = new Set(existingRepos.map((r) => r.url));
|
||||
|
||||
// Create main repo if it doesn't exist
|
||||
if (!existingUrls.has(mainRepoUrl)) {
|
||||
await prisma.repository.create({
|
||||
data: {
|
||||
url: mainRepoUrl,
|
||||
enabled: true,
|
||||
is_default: true,
|
||||
is_removable: false,
|
||||
priority: 1
|
||||
}
|
||||
});
|
||||
console.log('Initialized main repository:', mainRepoUrl);
|
||||
}
|
||||
|
||||
// Create dev repo if it doesn't exist
|
||||
if (!existingUrls.has(devRepoUrl)) {
|
||||
await prisma.repository.create({
|
||||
data: {
|
||||
url: devRepoUrl,
|
||||
enabled: false,
|
||||
is_default: true,
|
||||
is_removable: false,
|
||||
priority: 2
|
||||
}
|
||||
});
|
||||
console.log('Initialized dev repository:', devRepoUrl);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all repositories, sorted by priority
|
||||
*/
|
||||
async getAllRepositories() {
|
||||
return await prisma.repository.findMany({
|
||||
orderBy: [
|
||||
{ priority: 'asc' },
|
||||
{ created_at: 'asc' }
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get enabled repositories, sorted by priority
|
||||
*/
|
||||
async getEnabledRepositories() {
|
||||
return await prisma.repository.findMany({
|
||||
where: {
|
||||
enabled: true
|
||||
},
|
||||
orderBy: [
|
||||
{ priority: 'asc' },
|
||||
{ created_at: 'asc' }
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get repository by URL
|
||||
*/
|
||||
async getRepositoryByUrl(url) {
|
||||
return await prisma.repository.findUnique({
|
||||
where: { url }
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new repository
|
||||
*/
|
||||
async createRepository(data) {
|
||||
// Validate GitHub URL
|
||||
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
||||
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
||||
}
|
||||
|
||||
// Check for duplicates
|
||||
const existing = await this.getRepositoryByUrl(data.url);
|
||||
if (existing) {
|
||||
throw new Error('Repository already exists');
|
||||
}
|
||||
|
||||
// Get max priority for user-added repos
|
||||
const maxPriority = await prisma.repository.aggregate({
|
||||
_max: {
|
||||
priority: true
|
||||
}
|
||||
});
|
||||
|
||||
return await prisma.repository.create({
|
||||
data: {
|
||||
url: data.url,
|
||||
enabled: data.enabled ?? true,
|
||||
is_default: false,
|
||||
is_removable: true,
|
||||
priority: data.priority ?? (maxPriority._max.priority ?? 0) + 1
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update repository
|
||||
*/
|
||||
async updateRepository(id, data) {
|
||||
// If updating URL, validate it
|
||||
if (data.url) {
|
||||
if (!data.url.match(/^https:\/\/github\.com\/[^\/]+\/[^\/]+$/)) {
|
||||
throw new Error('Invalid GitHub repository URL. Format: https://github.com/owner/repo');
|
||||
}
|
||||
|
||||
// Check for duplicates (excluding current repo)
|
||||
const existing = await prisma.repository.findFirst({
|
||||
where: {
|
||||
url: data.url,
|
||||
id: { not: id }
|
||||
}
|
||||
});
|
||||
if (existing) {
|
||||
throw new Error('Repository URL already exists');
|
||||
}
|
||||
}
|
||||
|
||||
return await prisma.repository.update({
|
||||
where: { id },
|
||||
data
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete repository and associated JSON files
|
||||
*/
|
||||
async deleteRepository(id) {
|
||||
const repo = await prisma.repository.findUnique({
|
||||
where: { id }
|
||||
});
|
||||
|
||||
if (!repo) {
|
||||
throw new Error('Repository not found');
|
||||
}
|
||||
|
||||
if (!repo.is_removable) {
|
||||
throw new Error('Cannot delete default repository');
|
||||
}
|
||||
|
||||
// Delete associated JSON files
|
||||
await this.deleteRepositoryJsonFiles(repo.url);
|
||||
|
||||
// Delete repository
|
||||
await prisma.repository.delete({
|
||||
where: { id }
|
||||
});
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all JSON files associated with a repository
|
||||
*/
|
||||
async deleteRepositoryJsonFiles(repoUrl) {
|
||||
const { readdir, unlink, readFile } = await import('fs/promises');
|
||||
const { join } = await import('path');
|
||||
|
||||
const jsonDirectory = join(process.cwd(), 'scripts', 'json');
|
||||
|
||||
try {
|
||||
const files = await readdir(jsonDirectory);
|
||||
|
||||
for (const file of files) {
|
||||
if (!file.endsWith('.json')) continue;
|
||||
|
||||
try {
|
||||
const filePath = join(jsonDirectory, file);
|
||||
const content = await readFile(filePath, 'utf-8');
|
||||
const script = JSON.parse(content);
|
||||
|
||||
// If script has repository_url matching the repo, delete it
|
||||
if (script.repository_url === repoUrl) {
|
||||
await unlink(filePath);
|
||||
console.log(`Deleted JSON file: ${file} (from repository: ${repoUrl})`);
|
||||
}
|
||||
} catch (error) {
|
||||
// Skip files that can't be read or parsed
|
||||
console.error(`Error processing file ${file}:`, error);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Directory might not exist, which is fine
|
||||
if (error.code !== 'ENOENT') {
|
||||
console.error('Error deleting repository JSON files:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
export const repositoryService = new RepositoryService();
|
||||
Reference in New Issue
Block a user