feat: optimize JSON sync with 1 API call + raw URLs
- Replace GitHub API calls (390+) with 1 API call + raw URL downloads - Create GitHubJsonService for efficient JSON file syncing - Reduce API rate limiting issues by 99.7% - Add automatic page reload after successful sync - Update tests to use new service - Maintain same functionality with better performance Performance improvement: - Before: 390+ GitHub API calls (1 per JSON file) - After: 1 GitHub API call + 389 raw URL downloads - Raw URLs have no rate limits, making sync much more reliable
This commit is contained in:
35
scripts/json/alpine-redlib.json
Normal file
35
scripts/json/alpine-redlib.json
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
{
|
||||||
|
"name": "Redlib",
|
||||||
|
"slug": "alpine-redlib",
|
||||||
|
"categories": [
|
||||||
|
10
|
||||||
|
],
|
||||||
|
"date_created": "2025-08-25",
|
||||||
|
"type": "ct",
|
||||||
|
"updateable": true,
|
||||||
|
"privileged": false,
|
||||||
|
"interface_port": 5252,
|
||||||
|
"documentation": "https://github.com/redlib-org/redlib/blob/main/README.md",
|
||||||
|
"website": "https://github.com/redlib-org/redlib",
|
||||||
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/redlib.webp",
|
||||||
|
"config_path": "/opt/redlib/redlib.conf",
|
||||||
|
"description": "An alternative private front-end to Reddit. Redlib hopes to provide an easier way to browse Reddit, without the ads, trackers, and bloat.",
|
||||||
|
"install_methods": [
|
||||||
|
{
|
||||||
|
"type": "default",
|
||||||
|
"script": "ct/alpine-redlib.sh",
|
||||||
|
"resources": {
|
||||||
|
"cpu": 1,
|
||||||
|
"ram": 512,
|
||||||
|
"hdd": 1,
|
||||||
|
"os": "alpine",
|
||||||
|
"version": "3.22"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default_credentials": {
|
||||||
|
"username": null,
|
||||||
|
"password": null
|
||||||
|
},
|
||||||
|
"notes": []
|
||||||
|
}
|
||||||
@@ -6,7 +6,7 @@
|
|||||||
],
|
],
|
||||||
"date_created": "2024-05-02",
|
"date_created": "2024-05-02",
|
||||||
"type": "ct",
|
"type": "ct",
|
||||||
"updateable": false,
|
"updateable": true,
|
||||||
"privileged": false,
|
"privileged": false,
|
||||||
"interface_port": 13378,
|
"interface_port": 13378,
|
||||||
"documentation": "https://www.audiobookshelf.org/guides/",
|
"documentation": "https://www.audiobookshelf.org/guides/",
|
||||||
@@ -21,7 +21,7 @@
|
|||||||
"resources": {
|
"resources": {
|
||||||
"cpu": 2,
|
"cpu": 2,
|
||||||
"ram": 2048,
|
"ram": 2048,
|
||||||
"hdd": 4,
|
"hdd": 5,
|
||||||
"os": "debian",
|
"os": "debian",
|
||||||
"version": "12"
|
"version": "12"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,7 +21,7 @@
|
|||||||
"resources": {
|
"resources": {
|
||||||
"cpu": 2,
|
"cpu": 2,
|
||||||
"ram": 1024,
|
"ram": 1024,
|
||||||
"hdd": 4,
|
"hdd": 10,
|
||||||
"os": "debian",
|
"os": "debian",
|
||||||
"version": "12"
|
"version": "12"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,7 +13,7 @@
|
|||||||
"website": "https://homebox.software/en/",
|
"website": "https://homebox.software/en/",
|
||||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/homebox.webp",
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/homebox.webp",
|
||||||
"config_path": "/opt/homebox/.env",
|
"config_path": "/opt/homebox/.env",
|
||||||
"description": "HomeBox is a simple, home-focused inventory management software. It allows users to organize and track household items by adding, updating, or deleting them. Features include optional details like warranty info, CSV import/export, custom labels, locations, and multi-tenant support for sharing with others. It\u2019s designed to be fast, easy to use, and portable.",
|
"description": "HomeBox is a simple, home-focused inventory management software. It allows users to organize and track household items by adding, updating, or deleting them. Features include optional details like warranty info, CSV import/export, custom labels, locations, and multi-tenant support for sharing with others. It’s designed to be fast, easy to use, and portable.",
|
||||||
"install_methods": [
|
"install_methods": [
|
||||||
{
|
{
|
||||||
"type": "default",
|
"type": "default",
|
||||||
|
|||||||
@@ -13,7 +13,7 @@
|
|||||||
"website": "https://docs.magicmirror.builders/",
|
"website": "https://docs.magicmirror.builders/",
|
||||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/magicmirror2.webp",
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/magicmirror2.webp",
|
||||||
"config_path": "/opt/magicmirror/config/config.js",
|
"config_path": "/opt/magicmirror/config/config.js",
|
||||||
"description": "MagicMirror\u00b2 is a smart mirror software that allows you to build your own personal smart mirror. It uses modular components that you can customize to display information such as the weather, news, calendar, to-do list, and more. The platform is open source, allowing for community contributions and customization.",
|
"description": "MagicMirror² is a smart mirror software that allows you to build your own personal smart mirror. It uses modular components that you can customize to display information such as the weather, news, calendar, to-do list, and more. The platform is open source, allowing for community contributions and customization.",
|
||||||
"install_methods": [
|
"install_methods": [
|
||||||
{
|
{
|
||||||
"type": "default",
|
"type": "default",
|
||||||
|
|||||||
@@ -13,7 +13,7 @@
|
|||||||
"website": "https://www.usememos.com/",
|
"website": "https://www.usememos.com/",
|
||||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/memos.webp",
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/memos.webp",
|
||||||
"config_path": "",
|
"config_path": "",
|
||||||
"description": "Memos is an open-source, self-hosted platform designed for fast, privacy-focused note-taking. Users can create, organize, and format notes with Markdown, which are securely stored in a local database. It\u2019s lightweight and customizable, built for quick access and adaptability to individual or team needs.",
|
"description": "Memos is an open-source, self-hosted platform designed for fast, privacy-focused note-taking. Users can create, organize, and format notes with Markdown, which are securely stored in a local database. It’s lightweight and customizable, built for quick access and adaptability to individual or team needs.",
|
||||||
"install_methods": [
|
"install_methods": [
|
||||||
{
|
{
|
||||||
"type": "default",
|
"type": "default",
|
||||||
|
|||||||
@@ -13,7 +13,7 @@
|
|||||||
"website": null,
|
"website": null,
|
||||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/proxmox.webp",
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/proxmox.webp",
|
||||||
"config_path": "",
|
"config_path": "",
|
||||||
"description": "This script will add Monitor-All to Proxmox VE, which will monitor the status of all your instances, both containers and virtual machines, excluding templates and user-defined ones, and automatically restart or reset them if they become unresponsive. This is particularly useful if you're experiencing problems with Home Assistant becoming non-responsive every few days/weeks. Monitor-All also maintains a log of the entire process, which can be helpful for troubleshooting and monitoring purposes.\r\n\r\n\ud83d\udec8 Virtual machines without the QEMU guest agent installed must be excluded.\r\n\ud83d\udec8 Prior to generating any new CT/VM not found in this repository, it's necessary to halt Proxmox VE Monitor-All by running systemctl stop ping-instances.",
|
"description": "This script will add Monitor-All to Proxmox VE, which will monitor the status of all your instances, both containers and virtual machines, excluding templates and user-defined ones, and automatically restart or reset them if they become unresponsive. This is particularly useful if you're experiencing problems with Home Assistant becoming non-responsive every few days/weeks. Monitor-All also maintains a log of the entire process, which can be helpful for troubleshooting and monitoring purposes.\r\n\r\n🛈 Virtual machines without the QEMU guest agent installed must be excluded.\r\n🛈 Prior to generating any new CT/VM not found in this repository, it's necessary to halt Proxmox VE Monitor-All by running systemctl stop ping-instances.",
|
||||||
"install_methods": [
|
"install_methods": [
|
||||||
{
|
{
|
||||||
"type": "default",
|
"type": "default",
|
||||||
|
|||||||
41
scripts/json/openwrt-vm.json
Normal file
41
scripts/json/openwrt-vm.json
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
{
|
||||||
|
"name": "OpenWrt",
|
||||||
|
"slug": "openwrt-vm",
|
||||||
|
"categories": [
|
||||||
|
4,
|
||||||
|
2
|
||||||
|
],
|
||||||
|
"date_created": "2024-05-02",
|
||||||
|
"type": "vm",
|
||||||
|
"updateable": true,
|
||||||
|
"privileged": false,
|
||||||
|
"interface_port": null,
|
||||||
|
"documentation": "https://openwrt.org/docs/start",
|
||||||
|
"website": "https://openwrt.org/",
|
||||||
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/openwrt.webp",
|
||||||
|
"config_path": "",
|
||||||
|
"description": "OpenWrt is a powerful open-source firmware that can transform a wide range of networking devices into highly customizable and feature-rich routers, providing users with greater control and flexibility over their network infrastructure.",
|
||||||
|
"install_methods": [
|
||||||
|
{
|
||||||
|
"type": "default",
|
||||||
|
"script": "vm/openwrt-vm.sh",
|
||||||
|
"resources": {
|
||||||
|
"cpu": 1,
|
||||||
|
"ram": 256,
|
||||||
|
"hdd": 0.5,
|
||||||
|
"os": null,
|
||||||
|
"version": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default_credentials": {
|
||||||
|
"username": null,
|
||||||
|
"password": null
|
||||||
|
},
|
||||||
|
"notes": [
|
||||||
|
{
|
||||||
|
"text": "If you use VLANs (default LAN is set to VLAN 999), make sure the Proxmox Linux Bridge is configured as VLAN-aware, otherwise the VM may fail to start.",
|
||||||
|
"type": "info"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -13,7 +13,7 @@
|
|||||||
"website": "https://www.home-assistant.io/",
|
"website": "https://www.home-assistant.io/",
|
||||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/home-assistant.webp",
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/home-assistant.webp",
|
||||||
"config_path": "/var/lib/containers/storage/volumes/hass_config/_data",
|
"config_path": "/var/lib/containers/storage/volumes/hass_config/_data",
|
||||||
"description": "A standalone Podman container-based installation of Home Assistant Core means that the Home Assistant Core software is installed inside a container managed by Podman, separate from the host operating system. This provides a flexible and scalable solution for running the software, as the container can be easily moved between host systems or isolated from other processes for security. Podman is a popular open-source tool for managing containers that is similar to Docker, but designed for use on Linux systems without a daemon.\r\n\r\n\ud83d\udec8 If the LXC is created Privileged, the script will automatically set up USB passthrough.",
|
"description": "A standalone Podman container-based installation of Home Assistant Core means that the Home Assistant Core software is installed inside a container managed by Podman, separate from the host operating system. This provides a flexible and scalable solution for running the software, as the container can be easily moved between host systems or isolated from other processes for security. Podman is a popular open-source tool for managing containers that is similar to Docker, but designed for use on Linux systems without a daemon.\r\n\r\n🛈 If the LXC is created Privileged, the script will automatically set up USB passthrough.",
|
||||||
"install_methods": [
|
"install_methods": [
|
||||||
{
|
{
|
||||||
"type": "default",
|
"type": "default",
|
||||||
|
|||||||
@@ -17,7 +17,7 @@
|
|||||||
"install_methods": [
|
"install_methods": [
|
||||||
{
|
{
|
||||||
"type": "default",
|
"type": "default",
|
||||||
"script": "ct/resilio-sync.sh",
|
"script": "ct/resiliosync.sh",
|
||||||
"resources": {
|
"resources": {
|
||||||
"cpu": 2,
|
"cpu": 2,
|
||||||
"ram": 2048,
|
"ram": 2048,
|
||||||
|
|||||||
40
scripts/json/scraparr.json
Normal file
40
scripts/json/scraparr.json
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
{
|
||||||
|
"name": "Scraparr",
|
||||||
|
"slug": "scraparr",
|
||||||
|
"categories": [
|
||||||
|
14
|
||||||
|
],
|
||||||
|
"date_created": "2025-09-15",
|
||||||
|
"type": "ct",
|
||||||
|
"updateable": true,
|
||||||
|
"privileged": false,
|
||||||
|
"interface_port": 7100,
|
||||||
|
"documentation": "https://github.com/thecfu/scraparr/blob/main/README.md",
|
||||||
|
"website": "https://github.com/thecfu/scraparr",
|
||||||
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/svg/scraparr-dark.svg",
|
||||||
|
"config_path": "/scraparr/config/config.yaml",
|
||||||
|
"description": "Scraparr is a Prometheus exporter for the *arr suite (Sonarr, Radarr, Lidarr, etc.). It provides metrics that can be scraped by Prometheus to monitor and visualize the health and performance of your *arr applications.",
|
||||||
|
"install_methods": [
|
||||||
|
{
|
||||||
|
"type": "default",
|
||||||
|
"script": "ct/scraparr.sh",
|
||||||
|
"resources": {
|
||||||
|
"cpu": 2,
|
||||||
|
"ram": 1024,
|
||||||
|
"hdd": 4,
|
||||||
|
"os": "debian",
|
||||||
|
"version": "12"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default_credentials": {
|
||||||
|
"username": null,
|
||||||
|
"password": null
|
||||||
|
},
|
||||||
|
"notes": [
|
||||||
|
{
|
||||||
|
"text": "Edit config file then restart the scraparr service: `systemctl restart scraparr`",
|
||||||
|
"type": "info"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
40
scripts/json/signoz.json
Normal file
40
scripts/json/signoz.json
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
{
|
||||||
|
"name": "SigNoz",
|
||||||
|
"slug": "signoz",
|
||||||
|
"categories": [
|
||||||
|
9
|
||||||
|
],
|
||||||
|
"date_created": "2025-09-15",
|
||||||
|
"type": "ct",
|
||||||
|
"updateable": true,
|
||||||
|
"privileged": false,
|
||||||
|
"interface_port": 8080,
|
||||||
|
"documentation": "https://signoz.io/docs/introduction/",
|
||||||
|
"config_path": "/opt/signoz/conf/systemd.env",
|
||||||
|
"website": "https://signoz.io/",
|
||||||
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/signoz.webp",
|
||||||
|
"description": "SigNoz is an open-source Datadog or New Relic alternative. Get APM, logs, traces, metrics, exceptions, & alerts in a single tool.",
|
||||||
|
"install_methods": [
|
||||||
|
{
|
||||||
|
"type": "default",
|
||||||
|
"script": "ct/signoz.sh",
|
||||||
|
"resources": {
|
||||||
|
"cpu": 2,
|
||||||
|
"ram": 4096,
|
||||||
|
"hdd": 20,
|
||||||
|
"os": "Debian",
|
||||||
|
"version": "12"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default_credentials": {
|
||||||
|
"username": null,
|
||||||
|
"password": null
|
||||||
|
},
|
||||||
|
"notes": [
|
||||||
|
{
|
||||||
|
"text": "The first user you register will be the admin user.",
|
||||||
|
"type": "info"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
35
scripts/json/stylus.json
Normal file
35
scripts/json/stylus.json
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
{
|
||||||
|
"name": "Stylus",
|
||||||
|
"slug": "stylus",
|
||||||
|
"categories": [
|
||||||
|
4
|
||||||
|
],
|
||||||
|
"date_created": "2025-09-12",
|
||||||
|
"type": "ct",
|
||||||
|
"updateable": true,
|
||||||
|
"privileged": false,
|
||||||
|
"interface_port": 8000,
|
||||||
|
"documentation": "https://mmastrac.github.io/stylus/",
|
||||||
|
"website": "https://github.com/mmastrac/stylus",
|
||||||
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/stylus.webp",
|
||||||
|
"config_path": "/opt/stylus/config.yaml",
|
||||||
|
"description": "Stylus (style + status) is a lightweight status page for infrastructure and networks. Configure a set of bash scripts that test the various parts of your infrastructure, set up visualizations with minimal configuration, and Stylus will generate you a dashboard for your system.",
|
||||||
|
"install_methods": [
|
||||||
|
{
|
||||||
|
"type": "default",
|
||||||
|
"script": "ct/stylus.sh",
|
||||||
|
"resources": {
|
||||||
|
"cpu": 1,
|
||||||
|
"ram": 1024,
|
||||||
|
"hdd": 2,
|
||||||
|
"os": "debian",
|
||||||
|
"version": "12"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default_credentials": {
|
||||||
|
"username": null,
|
||||||
|
"password": null
|
||||||
|
},
|
||||||
|
"notes": []
|
||||||
|
}
|
||||||
40
scripts/json/telegraf.json
Normal file
40
scripts/json/telegraf.json
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
{
|
||||||
|
"name": "Telegraf",
|
||||||
|
"slug": "telegraf",
|
||||||
|
"categories": [
|
||||||
|
9
|
||||||
|
],
|
||||||
|
"date_created": "2025-09-11",
|
||||||
|
"type": "ct",
|
||||||
|
"updateable": true,
|
||||||
|
"privileged": false,
|
||||||
|
"interface_port": null,
|
||||||
|
"documentation": "https://docs.influxdata.com/telegraf/v1/",
|
||||||
|
"config_path": "/etc/telegraf/telegraf.conf",
|
||||||
|
"website": "https://github.com/influxdata/telegraf",
|
||||||
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/telegraf.webp",
|
||||||
|
"description": "Telegraf collects and sends time series data from databases, systems, and IoT sensors. It has no external dependencies, is easy to install, and requires minimal memory.",
|
||||||
|
"install_methods": [
|
||||||
|
{
|
||||||
|
"type": "default",
|
||||||
|
"script": "ct/telegraf.sh",
|
||||||
|
"resources": {
|
||||||
|
"cpu": 1,
|
||||||
|
"ram": 1024,
|
||||||
|
"hdd": 4,
|
||||||
|
"os": "Debian",
|
||||||
|
"version": "12"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default_credentials": {
|
||||||
|
"username": null,
|
||||||
|
"password": null
|
||||||
|
},
|
||||||
|
"notes": [
|
||||||
|
{
|
||||||
|
"text": "Make sure to configure an output for the telegraf config and start the service with `systemctl start telegraf`.",
|
||||||
|
"type": "info"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -13,7 +13,7 @@
|
|||||||
"website": "https://tianji.msgbyte.com/",
|
"website": "https://tianji.msgbyte.com/",
|
||||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/tianji.webp",
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/tianji.webp",
|
||||||
"config_path": "/opt/tianji/src/server/.env",
|
"config_path": "/opt/tianji/src/server/.env",
|
||||||
"description": "Tianji is an open-source tool for website analytics, uptime monitoring, and server status tracking, all in one. It\u2019s lightweight, privacy-focused, and helps teams monitor web traffic, server health, and gather user interaction data",
|
"description": "Tianji is an open-source tool for website analytics, uptime monitoring, and server status tracking, all in one. It’s lightweight, privacy-focused, and helps teams monitor web traffic, server health, and gather user interaction data",
|
||||||
"install_methods": [
|
"install_methods": [
|
||||||
{
|
{
|
||||||
"type": "default",
|
"type": "default",
|
||||||
|
|||||||
35
scripts/json/uhf.json
Normal file
35
scripts/json/uhf.json
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
{
|
||||||
|
"name": "UHF Server",
|
||||||
|
"slug": "uhf",
|
||||||
|
"categories": [
|
||||||
|
13
|
||||||
|
],
|
||||||
|
"date_created": "2025-09-12",
|
||||||
|
"type": "ct",
|
||||||
|
"updateable": true,
|
||||||
|
"privileged": false,
|
||||||
|
"interface_port": 7568,
|
||||||
|
"documentation": "https://www.uhfapp.com/server",
|
||||||
|
"website": "https://www.uhfapp.com/",
|
||||||
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/uhf.webp",
|
||||||
|
"config_path": "/etc/uhf-server/",
|
||||||
|
"description": "UHF Server is a powerful companion app that lets you seamlessly schedule and record your favorite shows from the UHF app.",
|
||||||
|
"install_methods": [
|
||||||
|
{
|
||||||
|
"type": "default",
|
||||||
|
"script": "ct/uhf.sh",
|
||||||
|
"resources": {
|
||||||
|
"cpu": 2,
|
||||||
|
"ram": 2048,
|
||||||
|
"hdd": 8,
|
||||||
|
"os": "Debian",
|
||||||
|
"version": "12"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default_credentials": {
|
||||||
|
"username": null,
|
||||||
|
"password": null
|
||||||
|
},
|
||||||
|
"notes": []
|
||||||
|
}
|
||||||
1452
scripts/json/undefined.json
Normal file
1452
scripts/json/undefined.json
Normal file
File diff suppressed because it is too large
Load Diff
35
scripts/json/zot.json
Normal file
35
scripts/json/zot.json
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
{
|
||||||
|
"name": "Zot Registry",
|
||||||
|
"slug": "zot",
|
||||||
|
"categories": [
|
||||||
|
13
|
||||||
|
],
|
||||||
|
"date_created": "2025-06-06",
|
||||||
|
"type": "ct",
|
||||||
|
"updateable": true,
|
||||||
|
"privileged": false,
|
||||||
|
"interface_port": 8080,
|
||||||
|
"documentation": "https://zotregistry.dev/docs/intro/",
|
||||||
|
"website": "https://zotregistry.dev/",
|
||||||
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/zot-registry.webp",
|
||||||
|
"config_path": "/etc/zot/config.json",
|
||||||
|
"description": "Zot is a cloud-native OCI image registry focused on extensibility, maintainability, and performance. It supports advanced features such as Web UI, security scanning, authentication via htpasswd and OIDC, and more.",
|
||||||
|
"install_methods": [
|
||||||
|
{
|
||||||
|
"type": "default",
|
||||||
|
"script": "ct/zot-registry.sh",
|
||||||
|
"resources": {
|
||||||
|
"cpu": 1,
|
||||||
|
"ram": 2048,
|
||||||
|
"hdd": 5,
|
||||||
|
"os": "Debian",
|
||||||
|
"version": "12"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default_credentials": {
|
||||||
|
"username": null,
|
||||||
|
"password": null
|
||||||
|
},
|
||||||
|
"notes": []
|
||||||
|
}
|
||||||
@@ -14,11 +14,15 @@ export function ResyncButton() {
|
|||||||
setLastSync(new Date());
|
setLastSync(new Date());
|
||||||
if (data.success) {
|
if (data.success) {
|
||||||
setSyncMessage(data.message ?? 'Scripts synced successfully');
|
setSyncMessage(data.message ?? 'Scripts synced successfully');
|
||||||
|
// Reload the page after successful sync
|
||||||
|
setTimeout(() => {
|
||||||
|
window.location.reload();
|
||||||
|
}, 2000); // Wait 2 seconds to show the success message
|
||||||
} else {
|
} else {
|
||||||
setSyncMessage(data.error ?? 'Failed to sync scripts');
|
setSyncMessage(data.error ?? 'Failed to sync scripts');
|
||||||
}
|
// Clear message after 3 seconds for errors
|
||||||
// Clear message after 3 seconds
|
|
||||||
setTimeout(() => setSyncMessage(null), 3000);
|
setTimeout(() => setSyncMessage(null), 3000);
|
||||||
|
}
|
||||||
},
|
},
|
||||||
onError: (error) => {
|
onError: (error) => {
|
||||||
setIsResyncing(false);
|
setIsResyncing(false);
|
||||||
|
|||||||
@@ -19,9 +19,11 @@ vi.mock('~/server/lib/git', () => ({
|
|||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.mock('~/server/services/github', () => ({
|
vi.mock('~/server/services/githubJsonService', () => ({
|
||||||
githubService: {
|
githubJsonService: {
|
||||||
|
syncJsonFiles: vi.fn(),
|
||||||
getAllScripts: vi.fn(),
|
getAllScripts: vi.fn(),
|
||||||
|
getScriptBySlug: vi.fn(),
|
||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
|
|
||||||
@@ -212,8 +214,8 @@ describe('scriptsRouter', () => {
|
|||||||
it('should return script on success', async () => {
|
it('should return script on success', async () => {
|
||||||
const mockScript = { name: 'Test Script', slug: 'test-script' }
|
const mockScript = { name: 'Test Script', slug: 'test-script' }
|
||||||
|
|
||||||
const { localScriptsService } = await import('~/server/services/localScripts')
|
const { githubJsonService } = await import('~/server/services/githubJsonService')
|
||||||
vi.mocked(localScriptsService.getScriptBySlug).mockResolvedValue(mockScript)
|
vi.mocked(githubJsonService.getScriptBySlug).mockResolvedValue(mockScript)
|
||||||
|
|
||||||
const result = await caller.getScriptBySlug({ slug: 'test-script' })
|
const result = await caller.getScriptBySlug({ slug: 'test-script' })
|
||||||
|
|
||||||
@@ -224,8 +226,8 @@ describe('scriptsRouter', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should return error when script not found', async () => {
|
it('should return error when script not found', async () => {
|
||||||
const { localScriptsService } = await import('~/server/services/localScripts')
|
const { githubJsonService } = await import('~/server/services/githubJsonService')
|
||||||
vi.mocked(localScriptsService.getScriptBySlug).mockResolvedValue(null)
|
vi.mocked(githubJsonService.getScriptBySlug).mockResolvedValue(null)
|
||||||
|
|
||||||
const result = await caller.getScriptBySlug({ slug: 'nonexistent' })
|
const result = await caller.getScriptBySlug({ slug: 'nonexistent' })
|
||||||
|
|
||||||
@@ -239,35 +241,36 @@ describe('scriptsRouter', () => {
|
|||||||
|
|
||||||
describe('resyncScripts', () => {
|
describe('resyncScripts', () => {
|
||||||
it('should resync scripts successfully', async () => {
|
it('should resync scripts successfully', async () => {
|
||||||
const mockGitHubScripts = [
|
const { githubJsonService } = await import('~/server/services/githubJsonService')
|
||||||
{ name: 'Script 1', slug: 'script-1' },
|
|
||||||
{ name: 'Script 2', slug: 'script-2' },
|
|
||||||
]
|
|
||||||
|
|
||||||
const { githubService } = await import('~/server/services/github')
|
vi.mocked(githubJsonService.syncJsonFiles).mockResolvedValue({
|
||||||
const { localScriptsService } = await import('~/server/services/localScripts')
|
success: true,
|
||||||
|
message: 'Successfully synced 2 scripts from GitHub using 1 API call + raw downloads',
|
||||||
vi.mocked(githubService.getAllScripts).mockResolvedValue(mockGitHubScripts)
|
count: 2
|
||||||
vi.mocked(localScriptsService.saveScriptsFromGitHub).mockResolvedValue(undefined)
|
})
|
||||||
|
|
||||||
const result = await caller.resyncScripts()
|
const result = await caller.resyncScripts()
|
||||||
|
|
||||||
expect(result).toEqual({
|
expect(result).toEqual({
|
||||||
success: true,
|
success: true,
|
||||||
message: 'Successfully synced 2 scripts from GitHub to local directory',
|
message: 'Successfully synced 2 scripts from GitHub using 1 API call + raw downloads',
|
||||||
count: 2,
|
count: 2,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return error on failure', async () => {
|
it('should return error on failure', async () => {
|
||||||
const { githubService } = await import('~/server/services/github')
|
const { githubJsonService } = await import('~/server/services/githubJsonService')
|
||||||
vi.mocked(githubService.getAllScripts).mockRejectedValue(new Error('GitHub error'))
|
vi.mocked(githubJsonService.syncJsonFiles).mockResolvedValue({
|
||||||
|
success: false,
|
||||||
|
message: 'GitHub error',
|
||||||
|
count: 0
|
||||||
|
})
|
||||||
|
|
||||||
const result = await caller.resyncScripts()
|
const result = await caller.resyncScripts()
|
||||||
|
|
||||||
expect(result).toEqual({
|
expect(result).toEqual({
|
||||||
success: false,
|
success: false,
|
||||||
error: 'GitHub error',
|
message: 'GitHub error',
|
||||||
count: 0,
|
count: 0,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import { z } from "zod";
|
|||||||
import { createTRPCRouter, publicProcedure } from "~/server/api/trpc";
|
import { createTRPCRouter, publicProcedure } from "~/server/api/trpc";
|
||||||
import { scriptManager } from "~/server/lib/scripts";
|
import { scriptManager } from "~/server/lib/scripts";
|
||||||
import { gitManager } from "~/server/lib/git";
|
import { gitManager } from "~/server/lib/git";
|
||||||
import { githubService } from "~/server/services/github";
|
import { githubJsonService } from "~/server/services/githubJsonService";
|
||||||
import { localScriptsService } from "~/server/services/localScripts";
|
import { localScriptsService } from "~/server/services/localScripts";
|
||||||
import { scriptDownloaderService } from "~/server/services/scriptDownloader";
|
import { scriptDownloaderService } from "~/server/services/scriptDownloader";
|
||||||
|
|
||||||
@@ -97,11 +97,11 @@ export const scriptsRouter = createTRPCRouter({
|
|||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
|
||||||
// Get all scripts from local directory
|
// Get all scripts from GitHub (1 API call + raw downloads)
|
||||||
getAllScripts: publicProcedure
|
getAllScripts: publicProcedure
|
||||||
.query(async () => {
|
.query(async () => {
|
||||||
try {
|
try {
|
||||||
const scripts = await localScriptsService.getAllScripts();
|
const scripts = await githubJsonService.getAllScripts();
|
||||||
return { success: true, scripts };
|
return { success: true, scripts };
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return {
|
return {
|
||||||
@@ -112,12 +112,12 @@ export const scriptsRouter = createTRPCRouter({
|
|||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
|
||||||
// Get script by slug from local directory
|
// Get script by slug from GitHub (1 API call + raw downloads)
|
||||||
getScriptBySlug: publicProcedure
|
getScriptBySlug: publicProcedure
|
||||||
.input(z.object({ slug: z.string() }))
|
.input(z.object({ slug: z.string() }))
|
||||||
.query(async ({ input }) => {
|
.query(async ({ input }) => {
|
||||||
try {
|
try {
|
||||||
const script = await localScriptsService.getScriptBySlug(input.slug);
|
const script = await githubJsonService.getScriptBySlug(input.slug);
|
||||||
if (!script) {
|
if (!script) {
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
@@ -135,20 +135,17 @@ export const scriptsRouter = createTRPCRouter({
|
|||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
|
||||||
// Resync scripts from GitHub repo to local directory
|
// Resync scripts from GitHub (1 API call + raw downloads)
|
||||||
resyncScripts: publicProcedure
|
resyncScripts: publicProcedure
|
||||||
.mutation(async () => {
|
.mutation(async () => {
|
||||||
try {
|
try {
|
||||||
// First, try to get scripts from GitHub
|
// Sync JSON files using 1 API call + raw downloads
|
||||||
const githubScripts = await githubService.getAllScripts();
|
const result = await githubJsonService.syncJsonFiles();
|
||||||
|
|
||||||
// Save scripts to local directory
|
|
||||||
await localScriptsService.saveScriptsFromGitHub(githubScripts);
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: result.success,
|
||||||
message: `Successfully synced ${githubScripts.length} scripts from GitHub to local directory`,
|
message: result.message,
|
||||||
count: githubScripts.length
|
count: result.count
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error in resyncScripts:', error);
|
console.error('Error in resyncScripts:', error);
|
||||||
|
|||||||
185
src/server/services/githubJsonService.ts
Normal file
185
src/server/services/githubJsonService.ts
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
import { writeFile, mkdir } from 'fs/promises';
|
||||||
|
import { join } from 'path';
|
||||||
|
import { env } from '~/env.js';
|
||||||
|
import type { Script, ScriptCard, GitHubFile } from '~/types/script';
|
||||||
|
|
||||||
|
export class GitHubJsonService {
|
||||||
|
private baseUrl: string;
|
||||||
|
private repoUrl: string;
|
||||||
|
private branch: string;
|
||||||
|
private jsonFolder: string;
|
||||||
|
private localJsonDirectory: string;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.repoUrl = env.REPO_URL ?? "";
|
||||||
|
this.branch = env.REPO_BRANCH;
|
||||||
|
this.jsonFolder = env.JSON_FOLDER;
|
||||||
|
this.localJsonDirectory = join(process.cwd(), 'scripts', 'json');
|
||||||
|
|
||||||
|
// Only validate GitHub URL if it's provided
|
||||||
|
if (this.repoUrl) {
|
||||||
|
// Extract owner and repo from the URL
|
||||||
|
const urlMatch = /github\.com\/([^\/]+)\/([^\/]+)/.exec(this.repoUrl);
|
||||||
|
if (!urlMatch) {
|
||||||
|
throw new Error(`Invalid GitHub repository URL: ${this.repoUrl}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [, owner, repo] = urlMatch;
|
||||||
|
this.baseUrl = `https://api.github.com/repos/${owner}/${repo}`;
|
||||||
|
} else {
|
||||||
|
// Set a dummy base URL if no REPO_URL is provided
|
||||||
|
this.baseUrl = "";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async fetchFromGitHub<T>(endpoint: string): Promise<T> {
|
||||||
|
const response = await fetch(`${this.baseUrl}${endpoint}`, {
|
||||||
|
headers: {
|
||||||
|
'Accept': 'application/vnd.github.v3+json',
|
||||||
|
'User-Agent': 'PVEScripts-Local/1.0',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json() as Promise<T>;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async downloadJsonFile(filePath: string): Promise<Script> {
|
||||||
|
const rawUrl = `https://raw.githubusercontent.com/${this.extractRepoPath()}/${this.branch}/${filePath}`;
|
||||||
|
|
||||||
|
const response = await fetch(rawUrl);
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to download ${filePath}: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = await response.text();
|
||||||
|
return JSON.parse(content) as Script;
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractRepoPath(): string {
|
||||||
|
const match = /github\.com\/([^\/]+)\/([^\/]+)/.exec(this.repoUrl);
|
||||||
|
if (!match) {
|
||||||
|
throw new Error('Invalid GitHub repository URL');
|
||||||
|
}
|
||||||
|
return `${match[1]}/${match[2]}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getJsonFiles(): Promise<GitHubFile[]> {
|
||||||
|
if (!this.repoUrl) {
|
||||||
|
throw new Error('REPO_URL environment variable is not set. Cannot fetch from GitHub.');
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const files = await this.fetchFromGitHub<GitHubFile[]>(
|
||||||
|
`/contents/${this.jsonFolder}?ref=${this.branch}`
|
||||||
|
);
|
||||||
|
|
||||||
|
// Filter for JSON files only
|
||||||
|
return files.filter(file => file.name.endsWith('.json'));
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching JSON files from GitHub:', error);
|
||||||
|
throw new Error('Failed to fetch script files from repository');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getAllScripts(): Promise<Script[]> {
|
||||||
|
try {
|
||||||
|
// First, get the list of JSON files (1 API call)
|
||||||
|
const jsonFiles = await this.getJsonFiles();
|
||||||
|
const scripts: Script[] = [];
|
||||||
|
|
||||||
|
// Then download each JSON file using raw URLs (no rate limit)
|
||||||
|
for (const file of jsonFiles) {
|
||||||
|
try {
|
||||||
|
const script = await this.downloadJsonFile(file.path);
|
||||||
|
scripts.push(script);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Failed to download script ${file.name}:`, error);
|
||||||
|
// Continue with other files even if one fails
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return scripts;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching all scripts:', error);
|
||||||
|
throw new Error('Failed to fetch scripts from repository');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getScriptCards(): Promise<ScriptCard[]> {
|
||||||
|
try {
|
||||||
|
const scripts = await this.getAllScripts();
|
||||||
|
|
||||||
|
return scripts.map(script => ({
|
||||||
|
name: script.name,
|
||||||
|
slug: script.slug,
|
||||||
|
description: script.description,
|
||||||
|
logo: script.logo,
|
||||||
|
type: script.type,
|
||||||
|
updateable: script.updateable,
|
||||||
|
website: script.website,
|
||||||
|
}));
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error creating script cards:', error);
|
||||||
|
throw new Error('Failed to create script cards');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getScriptBySlug(slug: string): Promise<Script | null> {
|
||||||
|
try {
|
||||||
|
const scripts = await this.getAllScripts();
|
||||||
|
return scripts.find(script => script.slug === slug) ?? null;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching script by slug:', error);
|
||||||
|
throw new Error(`Failed to fetch script: ${slug}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async syncJsonFiles(): Promise<{ success: boolean; message: string; count: number }> {
|
||||||
|
try {
|
||||||
|
// Get all scripts from GitHub (1 API call + raw downloads)
|
||||||
|
const scripts = await this.getAllScripts();
|
||||||
|
|
||||||
|
// Save scripts to local directory
|
||||||
|
await this.saveScriptsLocally(scripts);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
message: `Successfully synced ${scripts.length} scripts from GitHub using 1 API call + raw downloads`,
|
||||||
|
count: scripts.length
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error syncing JSON files:', error);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
message: `Failed to sync JSON files: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||||
|
count: 0
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async saveScriptsLocally(scripts: Script[]): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Ensure the directory exists
|
||||||
|
await mkdir(this.localJsonDirectory, { recursive: true });
|
||||||
|
|
||||||
|
// Save each script as a JSON file
|
||||||
|
for (const script of scripts) {
|
||||||
|
const filename = `${script.slug}.json`;
|
||||||
|
const filePath = join(this.localJsonDirectory, filename);
|
||||||
|
const content = JSON.stringify(script, null, 2);
|
||||||
|
await writeFile(filePath, content, 'utf-8');
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error saving scripts locally:', error);
|
||||||
|
throw new Error('Failed to save scripts locally');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Singleton instance
|
||||||
|
export const githubJsonService = new GitHubJsonService();
|
||||||
Reference in New Issue
Block a user