diff --git a/.github/autolabeler-config.json b/.github/autolabeler-config.json
index 5406bdaa4..342a1e38f 100644
--- a/.github/autolabeler-config.json
+++ b/.github/autolabeler-config.json
@@ -1,76 +1,131 @@
-
{
"new script": [
{
"fileStatus": "added",
- "includeGlobs": ["ct/**", "install/**", "misc/**", "turnkey/**", "vm/**"],
+ "includeGlobs": [
+ "ct/**",
+ "tools/**",
+ "install/**",
+ "misc/**",
+ "turnkey/**",
+ "vm/**"
+ ],
"excludeGlobs": []
}
],
"update script": [
{
"fileStatus": "modified",
- "includeGlobs": ["ct/**", "install/**", "misc/**", "turnkey/**", "vm/**"],
- "excludeGlobs": ["misc/build.func", "misc/install.func", "misc/api.func"]
+ "includeGlobs": [
+ "ct/**",
+ "tools/**",
+ "install/**",
+ "misc/**",
+ "turnkey/**",
+ "vm/**"
+ ],
+ "excludeGlobs": []
}
],
"delete script": [
{
- "fileStatus": "removed",
- "includeGlobs": ["ct/**", "install/**", "misc/**", "turnkey/**", "vm/**"],
+ "fileStatus": "removed",
+ "includeGlobs": [
+ "ct/**",
+ "tools/**",
+ "install/**",
+ "misc/**",
+ "turnkey/**",
+ "vm/**"
+ ],
"excludeGlobs": []
}
],
"maintenance": [
{
"fileStatus": null,
- "includeGlobs": ["*.md", ".github/**", "misc/*.func", "ct/create_lxc.sh", "api/**"],
+ "includeGlobs": [
+ "*.md"
+ ],
"excludeGlobs": []
}
],
"core": [
{
"fileStatus": null,
- "includeGlobs": ["misc/*.func", "ct/create_lxc.sh"],
- "excludeGlobs": []
+ "includeGlobs": [
+ "misc/*.func",
+ "misc/create_lxc.sh"
+ ],
+ "excludeGlobs": [
+ "misc/api.func"
+ ]
}
],
"website": [
{
"fileStatus": null,
- "includeGlobs": ["frontend/**", "json/**"],
- "excludeGlobs": []
+ "includeGlobs": [
+ "frontend/**"
+ ],
+ "excludeGlobs": [
+ "frontend/public/json/**"
+ ]
}
],
"api": [
{
"fileStatus": null,
- "includeGlobs": ["api/**", "misc/api.func"],
+ "includeGlobs": [
+ "api/**",
+ "misc/api.func"
+ ],
"excludeGlobs": []
}
],
"github": [
{
"fileStatus": null,
- "includeGlobs": [".github/**"],
+ "includeGlobs": [
+ ".github/**"
+ ],
"excludeGlobs": []
}
],
"json": [
{
"fileStatus": "modified",
- "includeGlobs": ["json/**"],
+ "includeGlobs": [
+ "frontend/public/json/**"
+ ],
"excludeGlobs": []
}
],
-
- "high risk": [
+ "addon": [
{
"fileStatus": null,
- "includeGlobs": ["misc/build.func", "misc/install.func", "ct/create_lxc.sh"],
+ "includeGlobs": [
+ "tools/addon/**"
+ ],
+ "excludeGlobs": []
+ }
+ ],
+ "pve-tool": [
+ {
+ "fileStatus": null,
+ "includeGlobs": [
+ "tools/pve/**"
+ ],
+ "excludeGlobs": []
+ }
+ ],
+ "vm": [
+ {
+ "fileStatus": null,
+ "includeGlobs": [
+ "vm/**"
+ ],
"excludeGlobs": []
}
]
-
-
-}
\ No newline at end of file
+}
diff --git a/.github/changelog-pr-config.json b/.github/changelog-pr-config.json
index 2c62aa3f0..e556703d0 100644
--- a/.github/changelog-pr-config.json
+++ b/.github/changelog-pr-config.json
@@ -1,97 +1,148 @@
[
{
"title": "π New Scripts",
- "labels": ["new script"]
+ "labels": [
+ "new script"
+ ]
},
{
"title": "π Updated Scripts",
- "labels": ["update script"],
+ "labels": [
+ "update script"
+ ],
"subCategories": [
{
"title": "π Bug Fixes",
- "labels": ["bugfix"],
- "notes" : []
+ "labels": [
+ "bugfix"
+ ],
+ "notes": []
},
{
"title": "β¨ New Features",
- "labels": ["feature"],
- "notes" : []
+ "labels": [
+ "feature"
+ ],
+ "notes": []
},
{
"title": "π₯ Breaking Changes",
- "labels": ["breaking change"],
- "notes" : []
+ "labels": [
+ "breaking change"
+ ],
+ "notes": []
+ },
+ {
+ "title": "π§ Refactor",
+ "labels": [
+ "refactor"
+ ],
+ "notes": []
}
]
},
{
"title": "π§° Maintenance",
- "labels": ["maintenance"],
+ "labels": [
+ "maintenance"
+ ],
"subCategories": [
{
"title": "π Bug Fixes",
- "labels": ["bugfix"],
- "notes" : []
+ "labels": [
+ "bugfix"
+ ],
+ "notes": []
},
{
"title": "β¨ New Features",
- "labels": ["feature"],
- "notes" : []
+ "labels": [
+ "feature"
+ ],
+ "notes": []
},
{
"title": "π₯ Breaking Changes",
- "labels": ["breaking change"],
- "notes" : []
+ "labels": [
+ "breaking change"
+ ],
+ "notes": []
},
{
"title": "π‘ API",
- "labels": ["api"],
- "notes" : []
+ "labels": [
+ "api"
+ ],
+ "notes": []
},
{
"title": "πΎ Core",
- "labels": ["core"],
- "notes" : []
+ "labels": [
+ "core"
+ ],
+ "notes": []
},
{
"title": "π Github",
- "labels": ["github"],
- "notes" : []
+ "labels": [
+ "github"
+ ],
+ "notes": []
+ },
+ {
+ "title": "π Documentation",
+ "labels": [
+ "maintenance"
+ ],
+ "notes": []
+ },
+ {
+ "title": "π§ Refactor",
+ "labels": [
+ "refactor"
+ ],
+ "notes": []
}
]
},
{
"title": "π Website",
- "labels": ["website"],
+ "labels": [
+ "website"
+ ],
"subCategories": [
{
"title": "π Bug Fixes",
- "labels": ["bugfix"],
- "notes" : []
+ "labels": [
+ "bugfix"
+ ],
+ "notes": []
},
{
"title": "β¨ New Features",
- "labels": ["feature"],
- "notes" : []
+ "labels": [
+ "feature"
+ ],
+ "notes": []
},
{
"title": "π₯ Breaking Changes",
- "labels": ["breaking change"],
- "notes" : []
+ "labels": [
+ "breaking change"
+ ],
+ "notes": []
},
{
"title": "π Script Information",
- "labels": ["json"],
- "notes" : []
+ "labels": [
+ "json"
+ ],
+ "notes": []
}
]
},
{
"title": "β Unlabelled",
"labels": []
- },
- {
- "title": "π₯ Breaking Changes",
- "labels": ["breaking change"]
}
]
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9adf87904..8519808a3 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -10,12 +10,230 @@
> [!CAUTION]
Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit the project's popularity for potentially malicious purposes.
-> [!NOTE]
-All LXC instances created using this repository come pre-installed with Midnight Commander, which is a command-line tool (`mc`) that offers a user-friendly file and directory management interface for the terminal environment.
+## 2025-07-12
+## 2025-07-11
+
+### π Updated Scripts
+
+ - #### π Bug Fixes
+
+ - immich: hotfix #5921 [@vhsdream](https://github.com/vhsdream) ([#5938](https://github.com/community-scripts/ProxmoxVE/pull/5938))
+ - bookstack: add setup_composer in update [@MickLesk](https://github.com/MickLesk) ([#5935](https://github.com/community-scripts/ProxmoxVE/pull/5935))
+ - Quickfix: Immich: revert install sequence [@vhsdream](https://github.com/vhsdream) ([#5932](https://github.com/community-scripts/ProxmoxVE/pull/5932))
+
+ - #### β¨ New Features
+
+ - Refactor & Function Bump: Docker [@MickLesk](https://github.com/MickLesk) ([#5889](https://github.com/community-scripts/ProxmoxVE/pull/5889))
+
+ - #### π§ Refactor
+
+ - Immich: handle custom library dependency updates; other fixes [@vhsdream](https://github.com/vhsdream) ([#5896](https://github.com/community-scripts/ProxmoxVE/pull/5896))
+
+## 2025-07-10
+
+### π Updated Scripts
+
+ - Refactor: Habitica [@MickLesk](https://github.com/MickLesk) ([#5911](https://github.com/community-scripts/ProxmoxVE/pull/5911))
+
+ - #### π Bug Fixes
+
+ - core: fix breaking re-download of lxc containers [@MickLesk](https://github.com/MickLesk) ([#5906](https://github.com/community-scripts/ProxmoxVE/pull/5906))
+ - PLANKA: Fix paths to application directory [@tremor021](https://github.com/tremor021) ([#5900](https://github.com/community-scripts/ProxmoxVE/pull/5900))
+
+ - #### π§ Refactor
+
+ - Refactor: EMQX + Update-Function + Improved NodeJS Crawling [@MickLesk](https://github.com/MickLesk) ([#5907](https://github.com/community-scripts/ProxmoxVE/pull/5907))
+
+## 2025-07-09
+
+### π Updated Scripts
+
+ - #### π Bug Fixes
+
+ - Omada Update: add missing exit [@MickLesk](https://github.com/MickLesk) ([#5894](https://github.com/community-scripts/ProxmoxVE/pull/5894))
+ - FreshRSS: fix needed php modules [@MickLesk](https://github.com/MickLesk) ([#5886](https://github.com/community-scripts/ProxmoxVE/pull/5886))
+ - core: Fix VAAPI passthrough for unprivileged LXC containers via devX [@MickLesk](https://github.com/MickLesk) ([#5875](https://github.com/community-scripts/ProxmoxVE/pull/5875))
+ - tools.func: fix an bug while php libapache2-mod breaks [@MickLesk](https://github.com/MickLesk) ([#5857](https://github.com/community-scripts/ProxmoxVE/pull/5857))
+ - BabyBuddy: fix path issues for update [@MickLesk](https://github.com/MickLesk) ([#5856](https://github.com/community-scripts/ProxmoxVE/pull/5856))
+
+ - #### β¨ New Features
+
+ - tools.func: strip leading folders for prebuild assets [@MickLesk](https://github.com/MickLesk) ([#5865](https://github.com/community-scripts/ProxmoxVE/pull/5865))
+
+ - #### π₯ Breaking Changes
+
+ - Refactor: Stirling-PDF [@MickLesk](https://github.com/MickLesk) ([#5872](https://github.com/community-scripts/ProxmoxVE/pull/5872))
+
+ - #### π§ Refactor
+
+ - Refactor: EMQX [@tremor021](https://github.com/tremor021) ([#5840](https://github.com/community-scripts/ProxmoxVE/pull/5840))
+ - Refactor: Excalidraw [@tremor021](https://github.com/tremor021) ([#5841](https://github.com/community-scripts/ProxmoxVE/pull/5841))
+ - Refactor: Firefly [@tremor021](https://github.com/tremor021) ([#5844](https://github.com/community-scripts/ProxmoxVE/pull/5844))
+ - Refactor: gatus [@tremor021](https://github.com/tremor021) ([#5849](https://github.com/community-scripts/ProxmoxVE/pull/5849))
+ - Refactor: FreshRSS [@tremor021](https://github.com/tremor021) ([#5847](https://github.com/community-scripts/ProxmoxVE/pull/5847))
+ - Refactor: Fluid-Calendar [@tremor021](https://github.com/tremor021) ([#5846](https://github.com/community-scripts/ProxmoxVE/pull/5846))
+ - Refactor: Commafeed [@tremor021](https://github.com/tremor021) ([#5802](https://github.com/community-scripts/ProxmoxVE/pull/5802))
+ - Refactor: FlareSolverr [@tremor021](https://github.com/tremor021) ([#5845](https://github.com/community-scripts/ProxmoxVE/pull/5845))
+ - Refactor: Glance [@tremor021](https://github.com/tremor021) ([#5874](https://github.com/community-scripts/ProxmoxVE/pull/5874))
+ - Refactor: Gitea [@tremor021](https://github.com/tremor021) ([#5876](https://github.com/community-scripts/ProxmoxVE/pull/5876))
+ - Refactor: Ghost (use now MySQL) [@MickLesk](https://github.com/MickLesk) ([#5871](https://github.com/community-scripts/ProxmoxVE/pull/5871))
+
+### π§° Maintenance
+
+ - #### π Github
+
+ - Github: AutoLabler | ChangeLog (Refactor) [@MickLesk](https://github.com/MickLesk) ([#5868](https://github.com/community-scripts/ProxmoxVE/pull/5868))
+
+## 2025-07-08
+
+### π Updated Scripts
+
+ - Refactor: Emby [@tremor021](https://github.com/tremor021) ([#5839](https://github.com/community-scripts/ProxmoxVE/pull/5839))
+
+ - #### π Bug Fixes
+
+ - Ollama: fix update script [@lucacome](https://github.com/lucacome) ([#5819](https://github.com/community-scripts/ProxmoxVE/pull/5819))
+
+ - #### β¨ New Features
+
+ - tools.func: add ffmpeg + minor improvement [@MickLesk](https://github.com/MickLesk) ([#5834](https://github.com/community-scripts/ProxmoxVE/pull/5834))
+
+ - #### π§ Refactor
+
+ - Refactor: ErsatzTV [@MickLesk](https://github.com/MickLesk) ([#5835](https://github.com/community-scripts/ProxmoxVE/pull/5835))
+
+## 2025-07-07
+
+### π Updated Scripts
+
+ - #### π Bug Fixes
+
+ - Fix/stirling pdf script [@JcMinarro](https://github.com/JcMinarro) ([#5803](https://github.com/community-scripts/ProxmoxVE/pull/5803))
+ - gitea-mirror: update repo-url [@CrazyWolf13](https://github.com/CrazyWolf13) ([#5794](https://github.com/community-scripts/ProxmoxVE/pull/5794))
+ - Fix unbound var in pulse.sh [@michelroegl-brunner](https://github.com/michelroegl-brunner) ([#5807](https://github.com/community-scripts/ProxmoxVE/pull/5807))
+ - Bookstack: Fix PHP Issue & Bump to PHP 8.3 [@MickLesk](https://github.com/MickLesk) ([#5779](https://github.com/community-scripts/ProxmoxVE/pull/5779))
+
+ - #### β¨ New Features
+
+ - Refactor: Threadfin (+ updatable) [@MickLesk](https://github.com/MickLesk) ([#5783](https://github.com/community-scripts/ProxmoxVE/pull/5783))
+ - tools.func: better handling when unpacking tarfiles in prebuild mode [@MickLesk](https://github.com/MickLesk) ([#5781](https://github.com/community-scripts/ProxmoxVE/pull/5781))
+ - tools.func: add AVX check for MongoDB [@MickLesk](https://github.com/MickLesk) ([#5780](https://github.com/community-scripts/ProxmoxVE/pull/5780))
+
+ - #### π§ Refactor
+
+ - Refactor: Docmost [@tremor021](https://github.com/tremor021) ([#5806](https://github.com/community-scripts/ProxmoxVE/pull/5806))
+ - Refactor: Baby Buddy [@tremor021](https://github.com/tremor021) ([#5769](https://github.com/community-scripts/ProxmoxVE/pull/5769))
+ - Refactor: Changed the way we install BunkerWeb by leveraging the brand new install-bunkerweb.sh [@TheophileDiot](https://github.com/TheophileDiot) ([#5707](https://github.com/community-scripts/ProxmoxVE/pull/5707))
+
+### π Website
+
+ - #### π Script Information
+
+ - PBS: add hint for advanced installs [@MickLesk](https://github.com/MickLesk) ([#5788](https://github.com/community-scripts/ProxmoxVE/pull/5788))
+ - EMQX: Add warning to website [@tremor021](https://github.com/tremor021) ([#5770](https://github.com/community-scripts/ProxmoxVE/pull/5770))
+
+## 2025-07-06
+
+### π Updated Scripts
+
+ - Refactor: Barcodebuddy [@tremor021](https://github.com/tremor021) ([#5735](https://github.com/community-scripts/ProxmoxVE/pull/5735))
+
+ - #### π Bug Fixes
+
+ - Fix update script for Mafl: ensure directory is removed recursively [@jonalbr](https://github.com/jonalbr) ([#5759](https://github.com/community-scripts/ProxmoxVE/pull/5759))
+ - BookStack: Typo fix [@tremor021](https://github.com/tremor021) ([#5746](https://github.com/community-scripts/ProxmoxVE/pull/5746))
+ - Resolves incorrect URL at end of Pocket ID script [@johnsturgeon](https://github.com/johnsturgeon) ([#5743](https://github.com/community-scripts/ProxmoxVE/pull/5743))
+
+ - #### β¨ New Features
+
+ - [Feature] Add option to expose Docker via TCP port (alpine docker) [@oformaniuk](https://github.com/oformaniuk) ([#5716](https://github.com/community-scripts/ProxmoxVE/pull/5716))
+
+ - #### π§ Refactor
+
+ - Refactor: Bitmagnet [@tremor021](https://github.com/tremor021) ([#5733](https://github.com/community-scripts/ProxmoxVE/pull/5733))
+ - Refactor: Baikal [@tremor021](https://github.com/tremor021) ([#5736](https://github.com/community-scripts/ProxmoxVE/pull/5736))
+
+## 2025-07-05
+
+### π Updated Scripts
+
+ - #### π§ Refactor
+
+ - Refactor: BookStack [@tremor021](https://github.com/tremor021) ([#5732](https://github.com/community-scripts/ProxmoxVE/pull/5732))
+ - Refactor: Authelia [@tremor021](https://github.com/tremor021) ([#5722](https://github.com/community-scripts/ProxmoxVE/pull/5722))
+ - Refactor: Dashy [@tremor021](https://github.com/tremor021) ([#5723](https://github.com/community-scripts/ProxmoxVE/pull/5723))
+ - Refactor: CryptPad [@tremor021](https://github.com/tremor021) ([#5724](https://github.com/community-scripts/ProxmoxVE/pull/5724))
+ - Refactor: ByteStash [@tremor021](https://github.com/tremor021) ([#5725](https://github.com/community-scripts/ProxmoxVE/pull/5725))
+ - Refactor: AgentDVR [@tremor021](https://github.com/tremor021) ([#5726](https://github.com/community-scripts/ProxmoxVE/pull/5726))
+
+## 2025-07-04
+
+### π Updated Scripts
+
+ - #### π Bug Fixes
+
+ - Refactor: Mafl [@tremor021](https://github.com/tremor021) ([#5702](https://github.com/community-scripts/ProxmoxVE/pull/5702))
+ - Outline: Fix sed command for v0.85.0 [@tremor021](https://github.com/tremor021) ([#5688](https://github.com/community-scripts/ProxmoxVE/pull/5688))
+ - Komodo: Update Script to use FerretDB / remove psql & sqlite options [@MickLesk](https://github.com/MickLesk) ([#5690](https://github.com/community-scripts/ProxmoxVE/pull/5690))
+ - ESPHome: Fix Linking issue to prevent version mismatch [@MickLesk](https://github.com/MickLesk) ([#5685](https://github.com/community-scripts/ProxmoxVE/pull/5685))
+ - Cloudflare-DDNS: fix unvisible read command at install [@MickLesk](https://github.com/MickLesk) ([#5682](https://github.com/community-scripts/ProxmoxVE/pull/5682))
+
+ - #### β¨ New Features
+
+ - Core layer refactor: centralized error traps and msg_* consistency [@MickLesk](https://github.com/MickLesk) ([#5705](https://github.com/community-scripts/ProxmoxVE/pull/5705))
+
+ - #### π₯ Breaking Changes
+
+ - Update Iptag [@DesertGamer](https://github.com/DesertGamer) ([#5677](https://github.com/community-scripts/ProxmoxVE/pull/5677))
+
+### π Website
+
+ - #### π Script Information
+
+ - MySQL phpMyAdmin Access Information [@austinpilz](https://github.com/austinpilz) ([#5679](https://github.com/community-scripts/ProxmoxVE/pull/5679))
+
+## 2025-07-03
+
+### π Updated Scripts
+
+ - #### π Bug Fixes
+
+ - Zipline: Fix typo in uploads directory path [@tremor021](https://github.com/tremor021) ([#5662](https://github.com/community-scripts/ProxmoxVE/pull/5662))
+
+ - #### β¨ New Features
+
+ - Improve asset matching in fetch_and_deploy_gh_release for prebuild and singlefile modes [@MickLesk](https://github.com/MickLesk) ([#5669](https://github.com/community-scripts/ProxmoxVE/pull/5669))
+
+ - #### π§ Refactor
+
+ - Refactor: Trilium [@MickLesk](https://github.com/MickLesk) ([#5665](https://github.com/community-scripts/ProxmoxVE/pull/5665))
+
+### π Website
+
+ - #### π Script Information
+
+ - Bump Icons to selfhst repo | switch svg to webp [@MickLesk](https://github.com/MickLesk) ([#5659](https://github.com/community-scripts/ProxmoxVE/pull/5659))
## 2025-07-02
+### π Updated Scripts
+
+ - #### π Bug Fixes
+
+ - Changedetection: Base64 encode the launch options [@tremor021](https://github.com/tremor021) ([#5640](https://github.com/community-scripts/ProxmoxVE/pull/5640))
+
+ - #### π§ Refactor
+
+ - Refactor & Bump to Node24: Zigbee2MQTT [@MickLesk](https://github.com/MickLesk) ([#5638](https://github.com/community-scripts/ProxmoxVE/pull/5638))
+
+### π Website
+
+ - #### π₯ Breaking Changes
+
+ - Remove: Pingvin-Share [@MickLesk](https://github.com/MickLesk) ([#5635](https://github.com/community-scripts/ProxmoxVE/pull/5635))
+ - Remove: Readarr [@MickLesk](https://github.com/MickLesk) ([#5636](https://github.com/community-scripts/ProxmoxVE/pull/5636))
+
## 2025-07-01
### π New Scripts
diff --git a/README.md b/README.md
index 01be61f47..6f4d839c0 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,9 @@
Proxmox arm64 Install Scripts
a port of the Promox VE Helper-Scripts to arm64!
diff --git a/ct/alpine-komodo.sh b/ct/alpine-komodo.sh
index 738359f25..7de718831 100644
--- a/ct/alpine-komodo.sh
+++ b/ct/alpine-komodo.sh
@@ -32,6 +32,15 @@ function update_script() {
exit 1
fi
COMPOSE_BASENAME=$(basename "$COMPOSE_FILE")
+
+ if [[ "$COMPOSE_BASENAME" == "sqlite.compose.yaml" || "$COMPOSE_BASENAME" == "postgres.compose.yaml" ]]; then
+ msg_error "β Detected outdated Komodo setup using SQLite or PostgreSQL (FerretDB v1)."
+ echo -e "${YW}This configuration is no longer supported since Komodo v1.18.0.${CL}"
+ echo -e "${YW}Please follow the migration guide:${CL}"
+ echo -e "${BGN}https://github.com/community-scripts/ProxmoxVE/discussions/5689${CL}\n"
+ exit 1
+ fi
+
BACKUP_FILE="/opt/komodo/${COMPOSE_BASENAME}.bak_$(date +%Y%m%d_%H%M%S)"
cp "$COMPOSE_FILE" "$BACKUP_FILE" || {
msg_error "Failed to create backup of ${COMPOSE_BASENAME}!"
diff --git a/ct/authelia.sh b/ct/authelia.sh
index d196b75aa..226e640b7 100644
--- a/ct/authelia.sh
+++ b/ct/authelia.sh
@@ -22,30 +22,30 @@ color
catch_errors
function update_script() {
- header_info
- check_container_storage
- check_container_resources
- if [[ ! -d "/etc/authelia/" ]]; then
- msg_error "No ${APP} Installation Found!"
- exit
- fi
- RELEASE=$(curl -fsSL https://api.github.com/repos/authelia/authelia/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
- if [[ "${RELEASE}" != "$(/usr/bin/authelia -v | awk '{print substr($3, 2, length($2)) }')" ]]; then
- msg_info "Updating $APP to ${RELEASE}"
- $STD apt-get update
- $STD apt-get -y upgrade
- curl -fsSL "https://github.com/authelia/authelia/releases/download/${RELEASE}/authelia_${RELEASE}_amd64.deb" -o $(basename "https://github.com/authelia/authelia/releases/download/${RELEASE}/authelia_${RELEASE}_amd64.deb")
- $STD dpkg -i "authelia_${RELEASE}_amd64.deb"
- msg_info "Cleaning Up"
- rm -f "authelia_${RELEASE}_amd64.deb"
- $STD apt-get -y autoremove
- $STD apt-get -y autoclean
- msg_ok "Cleanup Completed"
- msg_ok "Updated $APP to ${RELEASE}"
- else
- msg_ok "No update required. ${APP} is already at ${RELEASE}"
- fi
+ header_info
+ check_container_storage
+ check_container_resources
+ if [[ ! -d "/etc/authelia/" ]]; then
+ msg_error "No ${APP} Installation Found!"
exit
+ fi
+ RELEASE=$(curl -fsSL https://api.github.com/repos/authelia/authelia/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
+ if [[ "${RELEASE}" != "$(/usr/bin/authelia -v | awk '{print substr($3, 2, length($2)) }')" ]]; then
+ $STD apt-get update
+ $STD apt-get -y upgrade
+
+ fetch_and_deploy_gh_release "authelia" "authelia/authelia" "binary"
+
+ msg_info "Cleaning Up"
+ $STD apt-get -y autoremove
+ $STD apt-get -y autoclean
+ msg_ok "Cleanup Completed"
+
+ msg_ok "Updated $APP to ${RELEASE}"
+ else
+ msg_ok "No update required. ${APP} is already at ${RELEASE}"
+ fi
+ exit
}
start
diff --git a/ct/babybuddy.sh b/ct/babybuddy.sh
index 2cb37d6d3..7c0652a97 100644
--- a/ct/babybuddy.sh
+++ b/ct/babybuddy.sh
@@ -29,7 +29,7 @@ function update_script() {
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/babybuddy/babybuddy/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
- if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/babybuddy_version.txt)" ]]; then
+ if [[ "${RELEASE}" != "$(cat ~/.babybuddy 2>/dev/null)" ]] || [[ ! -f ~/.babybuddy ]]; then
setup_uv
msg_info "Stopping Services"
@@ -38,21 +38,18 @@ function update_script() {
msg_ok "Services Stopped"
msg_info "Cleaning old files"
- cp babybuddy/settings/production.py /tmp/production.py.bak
+ cp /opt/babybuddy/babybuddy/settings/production.py /tmp/production.py.bak
find . -mindepth 1 -maxdepth 1 ! -name '.venv' -exec rm -rf {} +
msg_ok "Cleaned old files"
+ fetch_and_deploy_gh_release "babybuddy" "babybuddy/babybuddy"
+
msg_info "Updating ${APP} to v${RELEASE}"
- temp_file=$(mktemp)
- curl -fsSL "https://github.com/babybuddy/babybuddy/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
- cd /opt/babybuddy
- tar zxf "$temp_file" --strip-components=1 -C /opt/babybuddy
- mv /tmp/production.py.bak babybuddy/settings/production.py
cd /opt/babybuddy
+ mv /tmp/production.py.bak /opt/babybuddy/babybuddy/settings/production.py
source .venv/bin/activate
$STD uv pip install -r requirements.txt
$STD python manage.py migrate
- echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated ${APP} to v${RELEASE}"
msg_info "Fixing permissions"
@@ -66,9 +63,6 @@ function update_script() {
systemctl start nginx
msg_ok "Services Started"
- msg_info "Cleaning up"
- rm -f "$temp_file"
- msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
diff --git a/ct/baikal.sh b/ct/baikal.sh
index 88dcf81a7..e42234fad 100644
--- a/ct/baikal.sh
+++ b/ct/baikal.sh
@@ -23,34 +23,37 @@ function update_script() {
header_info
check_container_storage
check_container_resources
+
if [[ ! -d /opt/baikal ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/sabre-io/Baikal/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
- if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
+ if [[ "${RELEASE}" != "$(cat ~/.baikal 2>/dev/null)" ]] || [[ ! -f ~/.baikal ]]; then
msg_info "Stopping Service"
systemctl stop apache2
msg_ok "Stopped Service"
- msg_info "Updating ${APP} to v${RELEASE}"
- cd /opt
- curl -fsSL "https://github.com/sabre-io/baikal/releases/download/${RELEASE}/baikal-${RELEASE}.zip" -o $(basename "https://github.com/sabre-io/baikal/releases/download/${RELEASE}/baikal-${RELEASE}.zip")
+ msg_info "Backing up data"
mv /opt/baikal /opt/baikal-backup
- $STD unzip -o "baikal-${RELEASE}.zip"
+ msg_ok "Backed up data"
+
+ fetch_and_deploy_gh_release "baikal" "sabre-io/Baikal"
+
+ msg_info "Configuring Baikal"
cp -r /opt/baikal-backup/config/baikal.yaml /opt/baikal/config/
cp -r /opt/baikal-backup/Specific/ /opt/baikal/
chown -R www-data:www-data /opt/baikal/
chmod -R 755 /opt/baikal/
- echo "${RELEASE}" >/opt/${APP}_version.txt
- msg_ok "Updated $APP to v${RELEASE}"
+ cd /opt/baikal
+ $STD composer install
+ msg_ok "Configured Baikal"
msg_info "Starting Service"
systemctl start apache2
msg_ok "Started Service"
msg_info "Cleaning up"
- rm -rf "/opt/baikal-${RELEASE}.zip"
rm -rf /opt/baikal-backup
msg_ok "Cleaned"
msg_ok "Updated Successfully"
diff --git a/ct/barcode-buddy.sh b/ct/barcode-buddy.sh
index 51b2761ee..2b6105040 100644
--- a/ct/barcode-buddy.sh
+++ b/ct/barcode-buddy.sh
@@ -23,27 +23,28 @@ function update_script() {
header_info
check_container_storage
check_container_resources
+
if [[ ! -d /opt/barcodebuddy ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/Forceu/barcodebuddy/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
- if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
+ if [[ "${RELEASE}" != "$(cat ~/.barcodebuddy 2>/dev/null)" ]] || [[ ! -f ~/.barcodebuddy ]]; then
msg_info "Stopping Service"
systemctl stop apache2
systemctl stop barcodebuddy
msg_ok "Stopped Service"
- msg_info "Updating ${APP} to v${RELEASE}"
- cd /opt
+ msg_info "Backing up data"
mv /opt/barcodebuddy/ /opt/barcodebuddy-backup
- curl -fsSL "https://github.com/Forceu/barcodebuddy/archive/refs/tags/v${RELEASE}.zip" -o $(basename "https://github.com/Forceu/barcodebuddy/archive/refs/tags/v${RELEASE}.zip")
- $STD unzip "v${RELEASE}.zip"
- mv "/opt/barcodebuddy-${RELEASE}" /opt/barcodebuddy
+ msg_ok "Backed up data"
+
+ fetch_and_deploy_gh_release "barcodebuddy" "Forceu/barcodebuddy"
+
+ msg_info "Configuring ${APP}"
cp -r /opt/barcodebuddy-backup/data/. /opt/barcodebuddy/data
chown -R www-data:www-data /opt/barcodebuddy/data
- echo "${RELEASE}" >/opt/${APP}_version.txt
- msg_ok "Updated $APP to v${RELEASE}"
+ msg_ok "Configured ${APP}"
msg_info "Starting Service"
systemctl start apache2
@@ -51,7 +52,6 @@ function update_script() {
msg_ok "Started Service"
msg_info "Cleaning up"
- rm -r "/opt/v${RELEASE}.zip"
rm -r /opt/barcodebuddy-backup
msg_ok "Cleaned"
msg_ok "Updated Successfully"
diff --git a/ct/bitmagnet.sh b/ct/bitmagnet.sh
index 8a0304bf1..19fc72eaa 100644
--- a/ct/bitmagnet.sh
+++ b/ct/bitmagnet.sh
@@ -28,12 +28,12 @@ function update_script() {
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/bitmagnet-io/bitmagnet/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
- if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
+ if [[ "${RELEASE}" != "$(cat ~/.bitmagnet 2>/dev/null)" ]] || [[ ! -f ~/.bitmagnet ]]; then
msg_info "Stopping Service"
systemctl stop bitmagnet-web
msg_ok "Stopped Service"
- msg_info "Backing up database"
+ msg_info "Backing up data"
rm -f /tmp/backup.sql
$STD sudo -u postgres pg_dump \
--column-inserts \
@@ -56,31 +56,26 @@ function update_script() {
bitmagnet \
>/tmp/backup.sql
mv /tmp/backup.sql /opt/
- msg_ok "Database backed up"
-
- msg_info "Updating ${APP} to v${RELEASE}"
[ -f /opt/bitmagnet/.env ] && cp /opt/bitmagnet/.env /opt/
[ -f /opt/bitmagnet/config.yml ] && cp /opt/bitmagnet/config.yml /opt/
- rm -rf /opt/bitmagnet/*
- temp_file=$(mktemp)
- curl -fsSL "https://github.com/bitmagnet-io/bitmagnet/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
- tar zxf "$temp_file" --strip-components=1 -C /opt/bitmagnet
+ msg_ok "Data backed up"
+
+ rm -rf /opt/bitmagnet
+ fetch_and_deploy_gh_release "bitmagnet" "bitmagnet-io/bitmagnet"
+
+ msg_info "Updating ${APP} to v${RELEASE}"
cd /opt/bitmagnet
VREL=v$RELEASE
$STD go build -ldflags "-s -w -X github.com/bitmagnet-io/bitmagnet/internal/version.GitTag=$VREL"
chmod +x bitmagnet
[ -f "/opt/.env" ] && cp "/opt/.env" /opt/bitmagnet/
[ -f "/opt/config.yml" ] && cp "/opt/config.yml" /opt/bitmagnet/
- echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated $APP to v${RELEASE}"
msg_info "Starting Service"
systemctl start bitmagnet-web
msg_ok "Started Service"
- msg_info "Cleaning up"
- rm -f "$temp_file"
- msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
diff --git a/ct/bookstack.sh b/ct/bookstack.sh
index f408b8b39..072396ea3 100644
--- a/ct/bookstack.sh
+++ b/ct/bookstack.sh
@@ -23,25 +23,33 @@ function update_script() {
header_info
check_container_storage
check_container_resources
+
if [[ ! -d /opt/bookstack ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/BookStackApp/BookStack/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
- if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
+ if [[ "${RELEASE}" != "$(cat ~/.bookstack 2>/dev/null)" ]] || [[ ! -f ~/.bookstack ]]; then
msg_info "Stopping Apache2"
systemctl stop apache2
msg_ok "Services Stopped"
- msg_info "Updating ${APP} to v${RELEASE}"
+ msg_info "Backing up data"
mv /opt/bookstack /opt/bookstack-backup
- curl -fsSL "https://github.com/BookStackApp/BookStack/archive/refs/tags/v${RELEASE}.zip" -o "/opt/BookStack-${RELEASE}.zip"
- $STD unzip "/opt/BookStack-${RELEASE}.zip" -d /opt
- mv "/opt/BookStack-${RELEASE}" /opt/bookstack
+ msg_ok "Backup finished"
+
+ fetch_and_deploy_gh_release "bookstack" "BookStackApp/BookStack"
+ PHP_MODULE="ldap,tidy,bz2,mysqli" PHP_FPM="YES" PHP_APACHE="YES" PHP_VERSION="8.3" setup_php
+ setup_composer
+
+ msg_info "Restoring backup"
cp /opt/bookstack-backup/.env /opt/bookstack/.env
[[ -d /opt/bookstack-backup/public/uploads ]] && cp -a /opt/bookstack-backup/public/uploads/. /opt/bookstack/public/uploads/
[[ -d /opt/bookstack-backup/storage/uploads ]] && cp -a /opt/bookstack-backup/storage/uploads/. /opt/bookstack/storage/uploads/
[[ -d /opt/bookstack-backup/themes ]] && cp -a /opt/bookstack-backup/themes/. /opt/bookstack/themes/
+ msg_ok "Backup restored"
+
+ msg_info "Configuring BookStack"
cd /opt/bookstack
export COMPOSER_ALLOW_SUPERUSER=1
$STD composer install --no-dev
@@ -51,7 +59,7 @@ function update_script() {
chmod -R 775 /opt/bookstack/storage /opt/bookstack/bootstrap/cache /opt/bookstack/public/uploads
chmod -R 640 /opt/bookstack/.env
echo "${RELEASE}" >/opt/${APP}_version.txt
- msg_ok "Updated ${APP} to v${RELEASE}"
+ msg_ok "Configured BookStack"
msg_info "Starting Apache2"
systemctl start apache2
@@ -59,7 +67,6 @@ function update_script() {
msg_info "Cleaning Up"
rm -rf /opt/bookstack-backup
- rm -rf "/opt/BookStack-${RELEASE}.zip"
msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
diff --git a/ct/bunkerweb.sh b/ct/bunkerweb.sh
index c69f3d749..a6affaad8 100644
--- a/ct/bunkerweb.sh
+++ b/ct/bunkerweb.sh
@@ -37,8 +37,8 @@ Pin: version ${RELEASE}
Pin-Priority: 1001
EOF
apt-get update
- apt-get install -y nginx=1.26.3*
- apt-get install -y bunkerweb=${RELEASE}
+ apt-mark unhold bunkerweb nginx
+ apt-get install -y --allow-downgrades bunkerweb=${RELEASE}
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated ${APP} to ${RELEASE}"
diff --git a/ct/bytestash.sh b/ct/bytestash.sh
index abf2dc9cf..ece9abc4a 100644
--- a/ct/bytestash.sh
+++ b/ct/bytestash.sh
@@ -20,47 +20,47 @@ color
catch_errors
function update_script() {
- header_info
- check_container_storage
- check_container_resources
- if [[ ! -d /opt/bytestash ]]; then
- msg_error "No ${APP} Installation Found!"
- exit
- fi
- RELEASE=$(curl -fsSL https://api.github.com/repos/jordan-dalby/ByteStash/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
- if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
- msg_info "Stopping Services"
- systemctl stop bytestash-backend
- systemctl stop bytestash-frontend
- msg_ok "Services Stopped"
+ header_info
+ check_container_storage
+ check_container_resources
- msg_info "Updating ${APP} to ${RELEASE}"
- temp_file=$(mktemp)
-curl -fsSL "https://github.com/jordan-dalby/ByteStash/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
- tar zxf $temp_file
- rm -rf /opt/bytestash/server/node_modules
- rm -rf /opt/bytestash/client/node_modules
- cp -rf ByteStash-${RELEASE}/* /opt/bytestash
- cd /opt/bytestash/server
- $STD npm install
- cd /opt/bytestash/client
- $STD npm install
- echo "${RELEASE}" >/opt/${APP}_version.txt
- msg_ok "Updated ${APP}"
-
- msg_info "Starting Services"
- systemctl start bytestash-backend
- systemctl start bytestash-frontend
- msg_ok "Started Services"
-
- msg_info "Cleaning Up"
- rm -f $temp_file
- msg_ok "Cleaned"
- msg_ok "Updated Successfully"
- else
- msg_ok "No update required. ${APP} is already at ${RELEASE}"
- fi
+ if [[ ! -d /opt/bytestash ]]; then
+ msg_error "No ${APP} Installation Found!"
exit
+ fi
+ RELEASE=$(curl -fsSL https://api.github.com/repos/jordan-dalby/ByteStash/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
+ if [[ "${RELEASE}" != "$(cat ~/.bytestash 2>/dev/null)" ]] || [[ ! -f ~/.bytestash ]]; then
+
+ read -rp "${TAB3}Did you make a backup via application WebUI? (y/n): " backuped
+ if [[ "$backuped" =~ ^[Yy]$ ]]; then
+ msg_info "Stopping Services"
+ systemctl stop bytestash-backend
+ systemctl stop bytestash-frontend
+ msg_ok "Services Stopped"
+
+ rm -rf /opt/bytestash
+ fetch_and_deploy_gh_release "bytestash" "jordan-dalby/ByteStash"
+
+ msg_info "Configuring ByteStash"
+ cd /opt/bytestash/server
+ $STD npm install
+ cd /opt/bytestash/client
+ $STD npm install
+ msg_ok "Updated ${APP}"
+
+ msg_info "Starting Services"
+ systemctl start bytestash-backend
+ systemctl start bytestash-frontend
+ msg_ok "Started Services"
+ else
+ msg_error "PLEASE MAKE A BACKUP FIRST!"
+ exit
+ fi
+ msg_ok "Updated Successfully"
+ else
+ msg_ok "No update required. ${APP} is already at ${RELEASE}"
+ fi
+ exit
}
start
@@ -70,4 +70,4 @@ description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
-echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"
\ No newline at end of file
+echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"
diff --git a/ct/commafeed.sh b/ct/commafeed.sh
index cacda60a5..dba851c06 100644
--- a/ct/commafeed.sh
+++ b/ct/commafeed.sh
@@ -23,12 +23,13 @@ function update_script() {
header_info
check_container_storage
check_container_resources
+
if [[ ! -d /opt/commafeed ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/Athou/commafeed/releases/latest | grep '"tag_name":' | cut -d'"' -f4)
- if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
+ if [[ "${RELEASE}" != "$(cat ~/.commafeed 2>/dev/null)" ]] || [[ ! -f ~/.commafeed ]]; then
msg_info "Stopping ${APP}"
systemctl stop commafeed
msg_ok "Stopped ${APP}"
@@ -39,13 +40,15 @@ function update_script() {
$STD apt-get install -y rsync
msg_ok "Installed Dependencies"
fi
-
+ if [ -d /opt/commafeed/data ] && [ "$(ls -A /opt/commafeed/data)" ]; then
+ mv /opt/commafeed/data /opt/data.bak
+ fi
+ fetch_and_deploy_gh_release "commafeed" "Athou/commafeed" "prebuild" "latest" "/opt/commafeed" "commafeed-*-h2-jvm.zip"
+
msg_info "Updating ${APP} to ${RELEASE}"
- curl -fsSL "https://github.com/Athou/commafeed/releases/download/${RELEASE}/commafeed-${RELEASE}-h2-jvm.zip" -o $(basename "https://github.com/Athou/commafeed/releases/download/${RELEASE}/commafeed-${RELEASE}-h2-jvm.zip")
- $STD unzip commafeed-"${RELEASE}"-h2-jvm.zip
- rsync -a --exclude 'data/' commafeed-"${RELEASE}"-h2/ /opt/commafeed/
- rm -rf commafeed-"${RELEASE}"-h2 commafeed-"${RELEASE}"-h2-jvm.zip
- echo "${RELEASE}" >/opt/${APP}_version.txt
+ if [ -d /opt/commafeed/data.bak ] && [ "$(ls -A /opt/commafeed/data.bak)" ]; then
+ mv /opt/commafeed/data.bak /opt/commafeed/data
+ fi
msg_ok "Updated ${APP} to ${RELEASE}"
msg_info "Starting ${APP}"
diff --git a/ct/cryptpad.sh b/ct/cryptpad.sh
index c8e3cded3..d258efe66 100644
--- a/ct/cryptpad.sh
+++ b/ct/cryptpad.sh
@@ -20,48 +20,46 @@ color
catch_errors
function update_script() {
- header_info
- check_container_storage
- check_container_resources
+ header_info
+ check_container_storage
+ check_container_resources
- if [[ ! -d "/opt/cryptpad" ]]; then
- msg_error "No ${APP} Installation Found!"
- exit
- fi
- RELEASE=$(curl -fsSL https://api.github.com/repos/cryptpad/cryptpad/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
- if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
- msg_info "Stopping $APP"
- systemctl stop cryptpad
- msg_ok "Stopped $APP"
-
- msg_info "Updating $APP to ${RELEASE}"
- temp_dir=$(mktemp -d)
- cp -f /opt/cryptpad/config/config.js /opt/config.js
- curl -fsSL "https://github.com/cryptpad/cryptpad/archive/refs/tags/${RELEASE}.tar.gz" -o "$temp_dir/cryptpad-${RELEASE}.tar.gz"
- cd "$temp_dir"
- tar zxf "cryptpad-${RELEASE}.tar.gz"
- cp -rf "cryptpad-${RELEASE}"/* /opt/cryptpad
- cd /opt/cryptpad
- $STD npm ci
- $STD npm run install:components
- $STD npm run build
- cp -f /opt/config.js /opt/cryptpad/config/config.js
- echo "${RELEASE}" >/opt/${APP}_version.txt
- msg_ok "Updated $APP to ${RELEASE}"
-
- msg_info "Cleaning Up"
- rm -rf $temp_dir
- msg_ok "Cleanup Completed"
-
- msg_info "Starting $APP"
- systemctl start cryptpad
- msg_ok "Started $APP"
-
- msg_ok "Update Successful"
- else
- msg_ok "No update required. ${APP} is already at ${RELEASE}"
- fi
+ if [[ ! -d "/opt/cryptpad" ]]; then
+ msg_error "No ${APP} Installation Found!"
exit
+ fi
+ RELEASE=$(curl -fsSL https://api.github.com/repos/cryptpad/cryptpad/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
+ if [[ "${RELEASE}" != "$(cat ~/.cryptpad 2>/dev/null)" ]] || [[ ! -f ~/.cryptpad ]]; then
+ msg_info "Stopping $APP"
+ systemctl stop cryptpad
+ msg_ok "Stopped $APP"
+
+ msg_info "Backing up configuration"
+ [ -f /opt/cryptpad/config/config.js ] && mv /opt/cryptpad/config/config.js /opt/
+ msg_ok "Backed up configuration"
+
+ fetch_and_deploy_gh_release "cryptpad" "cryptpad/cryptpad"
+
+ msg_info "Updating $APP to ${RELEASE}"
+ cd /opt/cryptpad
+ $STD npm ci
+ $STD npm run install:components
+ $STD npm run build
+ msg_ok "Updated $APP to ${RELEASE}"
+
+ msg_info "Restoring configuration"
+ mv /opt/config.js /opt/cryptpad/config/
+ msg_ok "Configuration restored"
+
+ msg_info "Starting $APP"
+ systemctl start cryptpad
+ msg_ok "Started $APP"
+
+ msg_ok "Update Successful"
+ else
+ msg_ok "No update required. ${APP} is already at ${RELEASE}"
+ fi
+ exit
}
start
diff --git a/ct/dashy.sh b/ct/dashy.sh
index 6ca0df037..d652055ce 100644
--- a/ct/dashy.sh
+++ b/ct/dashy.sh
@@ -29,7 +29,7 @@ function update_script() {
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/Lissy93/dashy/releases/latest | grep '"tag_name":' | cut -d'"' -f4)
- if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
+ if [[ "${RELEASE}" != "$(cat ~/.dashy 2>/dev/null)" ]] || [[ ! -f ~/.dashy ]]; then
msg_info "Stopping ${APP}"
systemctl stop dashy
msg_ok "Stopped ${APP}"
@@ -43,14 +43,13 @@ function update_script() {
fi
msg_ok "Backed up conf.yml"
- msg_info "Updating ${APP} to ${RELEASE}"
rm -rf /opt/dashy
- mkdir -p /opt/dashy
- curl -fsSL "https://github.com/Lissy93/dashy/archive/refs/tags/${RELEASE}.tar.gz" | tar -xz -C /opt/dashy --strip-components=1
+ fetch_and_deploy_gh_release "dashy" "Lissy93/dashy"
+
+ msg_info "Updating ${APP} to ${RELEASE}"
cd /opt/dashy
npm install
npm run build
- echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated ${APP} to ${RELEASE}"
msg_info "Restoring conf.yml"
@@ -65,6 +64,7 @@ function update_script() {
msg_info "Starting Dashy"
systemctl start dashy
msg_ok "Started Dashy"
+
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
diff --git a/ct/docker.sh b/ct/docker.sh
index 169f379ce..553c4cef7 100644
--- a/ct/docker.sh
+++ b/ct/docker.sh
@@ -20,18 +20,68 @@ color
catch_errors
function update_script() {
- header_info
- check_container_storage
- check_container_resources
- if [[ ! -d /var ]]; then
- msg_error "No ${APP} Installation Found!"
- exit
- fi
- msg_info "Updating ${APP} LXC"
- $STD apt-get update
- $STD apt-get -y upgrade
- msg_ok "Updated ${APP} LXC"
- exit
+ header_info
+ check_container_storage
+ check_container_resources
+
+ get_latest_release() {
+ curl -fsSL https://api.github.com/repos/"$1"/releases/latest | grep '"tag_name":' | cut -d'"' -f4
+ }
+
+ msg_info "Updating base system"
+ $STD apt-get update
+ $STD apt-get -y upgrade
+ msg_ok "Base system updated"
+
+ msg_info "Updating Docker Engine"
+ $STD apt-get install --only-upgrade -y docker-ce docker-ce-cli containerd.io
+ msg_ok "Docker Engine updated"
+
+ if [[ -f /usr/local/lib/docker/cli-plugins/docker-compose ]]; then
+ COMPOSE_BIN="/usr/local/lib/docker/cli-plugins/docker-compose"
+ COMPOSE_NEW_VERSION=$(get_latest_release "docker/compose")
+ msg_info "Updating Docker Compose to $COMPOSE_NEW_VERSION"
+ curl -fsSL "https://github.com/docker/compose/releases/download/${COMPOSE_NEW_VERSION}/docker-compose-$(uname -s)-$(uname -m)" \
+ -o "$COMPOSE_BIN"
+ chmod +x "$COMPOSE_BIN"
+ msg_ok "Docker Compose updated"
+ fi
+
+ if docker ps -a --format '{{.Names}}' | grep -q '^portainer$'; then
+ msg_info "Updating Portainer"
+ $STD docker pull portainer/portainer-ce:latest
+ $STD docker stop portainer && docker rm portainer
+ $STD docker volume create portainer_data >/dev/null 2>&1
+ $STD docker run -d \
+ -p 8000:8000 \
+ -p 9443:9443 \
+ --name=portainer \
+ --restart=always \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ -v portainer_data:/data \
+ portainer/portainer-ce:latest
+ msg_ok "Updated Portainer"
+ fi
+
+ if docker ps -a --format '{{.Names}}' | grep -q '^portainer_agent$'; then
+ msg_info "Updating Portainer Agent"
+ $STD docker pull portainer/agent:latest
+ $STD docker stop portainer_agent && docker rm portainer_agent
+ $STD docker run -d \
+ -p 9001:9001 \
+ --name=portainer_agent \
+ --restart=always \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ -v /var/lib/docker/volumes:/var/lib/docker/volumes \
+ portainer/agent
+ msg_ok "Updated Portainer Agent"
+ fi
+
+ msg_info "Cleaning up"
+ $STD apt-get -y autoremove
+ $STD apt-get -y autoclean
+ msg_ok "Cleanup complete"
+ exit
}
start
diff --git a/ct/docmost.sh b/ct/docmost.sh
index 735750c37..8d071c022 100644
--- a/ct/docmost.sh
+++ b/ct/docmost.sh
@@ -27,48 +27,35 @@ function update_script() {
exit
fi
if ! command -v node >/dev/null || [[ "$(/usr/bin/env node -v | grep -oP '^v\K[0-9]+')" != "22" ]]; then
- msg_info "Installing Node.js 22"
- $STD apt-get purge -y nodejs
- rm -f /etc/apt/sources.list.d/nodesource.list
- rm -f /etc/apt/keyrings/nodesource.gpg
- mkdir -p /etc/apt/keyrings
- curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
- echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_22.x nodistro main" >/etc/apt/sources.list.d/nodesource.list
- $STD apt-get update
- $STD apt-get install -y nodejs
- $STD npm install -g pnpm@10.4.0
- msg_ok "Node.js 22 installed"
+ NODE_VERSION="22" NODE_MODULE="pnpm@$(curl -s https://raw.githubusercontent.com/docmost/docmost/main/package.json | jq -r '.packageManager | split("@")[1]')" setup_nodejs
fi
export NODE_OPTIONS="--max_old_space_size=4096"
RELEASE=$(curl -fsSL https://api.github.com/repos/docmost/docmost/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
- if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
+ if [[ "${RELEASE}" != "$(cat ~/.docmost 2>/dev/null)" ]] || [[ ! -f ~/.docmost ]]; then
msg_info "Stopping ${APP}"
systemctl stop docmost
msg_ok "${APP} Stopped"
- msg_info "Updating ${APP} to v${RELEASE}"
+ msg_info "Backing up data"
cp /opt/docmost/.env /opt/
cp -r /opt/docmost/data /opt/
rm -rf /opt/docmost
- temp_file=$(mktemp)
- curl -fsSL "https://github.com/docmost/docmost/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
- tar -xzf "$temp_file"
- mv docmost-${RELEASE} /opt/docmost
+ msg_ok "Data backed up"
+
+ fetch_and_deploy_gh_release "docmost" "docmost/docmost"
+
+ msg_info "Updating ${APP} to v${RELEASE}"
cd /opt/docmost
mv /opt/.env /opt/docmost/.env
mv /opt/data /opt/docmost/data
$STD pnpm install --force
$STD pnpm build
- echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated ${APP}"
msg_info "Starting ${APP}"
systemctl start docmost
msg_ok "Started ${APP}"
- msg_info "Cleaning Up"
- rm -f ${temp_file}
- msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
diff --git a/ct/emby.sh b/ct/emby.sh
index 24a57c257..3fb0cd9f1 100644
--- a/ct/emby.sh
+++ b/ct/emby.sh
@@ -23,26 +23,26 @@ function update_script() {
header_info
check_container_storage
check_container_resources
+
if [[ ! -d /opt/emby-server ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
- LATEST=$(curl -fsSL https://api.github.com/repos/MediaBrowser/Emby.Releases/releases/latest | grep '"tag_name":' | cut -d'"' -f4)
- msg_info "Stopping ${APP}"
- systemctl stop emby-server
- msg_ok "Stopped ${APP}"
+ RELEASE=$(curl -fsSL https://api.github.com/repos/MediaBrowser/Emby.Releases/releases/latest | grep '"tag_name":' | cut -d'"' -f4)
+ if [[ "${RELEASE}" != "$(cat ~/.emby 2>/dev/null)" ]] || [[ ! -f ~/.emby ]]; then
+ msg_info "Stopping ${APP}"
+ systemctl stop emby-server
+ msg_ok "Stopped ${APP}"
- msg_info "Updating ${APP}"
- $STD curl -fsSL "https://github.com/MediaBrowser/Emby.Releases/releases/download/${LATEST}/emby-server-deb_${LATEST}_amd64.deb" -o "emby-server-deb_${LATEST}_amd64.deb"
- $STD dpkg -i "emby-server-deb_${LATEST}_amd64.deb"
- rm "emby-server-deb_${LATEST}_amd64.deb"
- msg_ok "Updated ${APP}"
+ fetch_and_deploy_gh_release "emby" "MediaBrowser/Emby.Releases" "binary"
- msg_info "Starting ${APP}"
- systemctl start emby-server
- msg_ok "Started ${APP}"
- msg_ok "Updated Successfully"
- exit
+ msg_info "Starting ${APP}"
+ systemctl start emby-server
+ msg_ok "Started ${APP}"
+
+ msg_ok "Updated Successfully"
+ exit
+ fi
}
start
diff --git a/ct/emqx.sh b/ct/emqx.sh
index b37534308..dfaaf5ee0 100644
--- a/ct/emqx.sh
+++ b/ct/emqx.sh
@@ -20,18 +20,39 @@ color
catch_errors
function update_script() {
- header_info
- check_container_storage
- check_container_resources
- if [[ ! -d /var ]]; then
- msg_error "No ${APP} Installation Found!"
- exit
- fi
- msg_info "Updating $APP LXC"
- $STD apt-get update
- $STD apt-get -y upgrade
- msg_ok "Updated $APP LXC"
- exit
+ header_info
+ check_container_storage
+ check_container_resources
+
+ RELEASE=$(curl -fsSL https://www.emqx.com/en/downloads/enterprise | grep -oP '/en/downloads/enterprise/v\K[0-9]+\.[0-9]+\.[0-9]+' | sort -V | tail -n1)
+ if [[ "$RELEASE" != "$(cat ~/.emqx 2>/dev/null)" ]] || [[ ! -f ~/.emqx ]]; then
+ msg_info "Stopping EMQX"
+ systemctl stop emqx
+ msg_ok "Stopped EMQX"
+
+ msg_info "Downloading EMQX v${RELEASE}"
+ DEB_FILE="/tmp/emqx-enterprise-${RELEASE}-debian12-amd64.deb"
+ curl -fsSL -o "$DEB_FILE" "https://www.emqx.com/en/downloads/enterprise/v${RELEASE}/emqx-enterprise-${RELEASE}-debian12-amd64.deb"
+ msg_ok "Downloaded EMQX"
+
+ msg_info "Installing EMQX"
+ $STD apt-get install -y "$DEB_FILE"
+ msg_ok "Installed EMQX v${RELEASE}"
+
+ msg_info "Starting EMQX"
+ systemctl start emqx
+ echo "$RELEASE" >~/.emqx
+ msg_ok "Started EMQX"
+
+ msg_info "Cleaning Up"
+ rm -f "$DEB_FILE"
+ msg_ok "Cleanup Completed"
+ msg_ok "Update Successful"
+ else
+ msg_ok "No update required. EMQX is already at v${RELEASE}"
+ fi
+
+ exit
}
start
diff --git a/ct/ersatztv.sh b/ct/ersatztv.sh
index 242ba805a..47ecfa7dc 100644
--- a/ct/ersatztv.sh
+++ b/ct/ersatztv.sh
@@ -7,7 +7,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/asylumexp/Proxmox/main/mis
APP="ErsatzTV"
var_tags="${var_tags:-iptv}"
-var_cpu="${var_cpu:-1}"
+var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-1024}"
var_disk="${var_disk:-5}"
var_os="${var_os:-debian}"
@@ -27,31 +27,18 @@ function update_script() {
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/ErsatzTV/ErsatzTV/releases | grep -oP '"tag_name": "\K[^"]+' | head -n 1)
- if [[ ! -f /opt/${APP}_version.txt && $(echo "x.x.x" >/opt/${APP}_version.txt) || "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
+ if [[ "${RELEASE}" != "$(cat ~/.ersatztv 2>/dev/null)" ]] || [[ ! -f ~/.ersatztv ]]; then
msg_info "Stopping ErsatzTV"
systemctl stop ersatzTV
msg_ok "Stopped ErsatzTV"
- msg_info "Updating ErsatzTV"
- cp -R /opt/ErsatzTV/ ErsatzTV-backup
- rm ErsatzTV-backup/ErsatzTV
- rm -rf /opt/ErsatzTV
- temp_file=$(mktemp)
- curl -fsSL "https://github.com/ErsatzTV/ErsatzTV/releases/download/${RELEASE}/ErsatzTV-${RELEASE}-linux-x64.tar.gz" -o "$temp_file"
- tar -xzf "$temp_file"
- mv ErsatzTV-${RELEASE}-linux-x64 /opt/ErsatzTV
- cp -R ErsatzTV-backup/* /opt/ErsatzTV/
- rm -rf ErsatzTV-backup
- echo "${RELEASE}" >/opt/${APP}_version.txt
- msg_ok "Updated ErsatzTV"
+ FFMPEG_VERSION="latest" FFMPEG_TYPE="medium" setup_ffmpeg
+ fetch_and_deploy_gh_release "ersatztv" "ErsatzTV/ErsatzTV" "prebuild" "latest" "/opt/ErsatzTV" "*linux-x64.tar.gz"
msg_info "Starting ErsatzTV"
systemctl start ersatzTV
msg_ok "Started ErsatzTV"
- msg_info "Cleaning Up"
- rm -f ${temp_file}
- msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
diff --git a/ct/esphome.sh b/ct/esphome.sh
index 8383612fa..f0f6551f2 100644
--- a/ct/esphome.sh
+++ b/ct/esphome.sh
@@ -73,6 +73,11 @@ EOF
msg_ok "Updated systemd service"
fi
+ msg_info "Linking esphome to /usr/local/bin"
+ rm -f /usr/local/bin/esphome
+ ln -s /opt/esphome/.venv/bin/esphome /usr/local/bin/esphome
+ msg_ok "Linked esphome binary"
+
msg_info "Starting ${APP}"
systemctl start esphomeDashboard
msg_ok "Started ${APP}"
diff --git a/ct/excalidraw.sh b/ct/excalidraw.sh
index bc8554043..073fdf82e 100644
--- a/ct/excalidraw.sh
+++ b/ct/excalidraw.sh
@@ -28,20 +28,16 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
-
RELEASE=$(curl -fsSL https://api.github.com/repos/excalidraw/excalidraw/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
- if [[ "${RELEASE}" != "$(cat /opt/excalidraw_version.txt)" ]] || [[ ! -f /opt/excalidraw_version.txt ]]; then
+ if [[ "${RELEASE}" != "$(cat ~/.excalidraw 2>/dev/null)" ]] || [[ ! -f ~/.excalidraw ]]; then
msg_info "Stopping $APP"
systemctl stop excalidraw
msg_ok "Stopped $APP"
- msg_info "Updating $APP to v${RELEASE}"
- cd /tmp
- temp_file=$(mktemp)
- curl -fsSL "https://github.com/excalidraw/excalidraw/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
- tar xzf $temp_file
rm -rf /opt/excalidraw
- mv excalidraw-${RELEASE} /opt/excalidraw
+ fetch_and_deploy_gh_release "excalidraw" "excalidraw/excalidraw"
+
+ msg_info "Updating $APP to v${RELEASE}"
cd /opt/excalidraw
$STD yarn
msg_ok "Updated $APP to v${RELEASE}"
@@ -50,11 +46,6 @@ function update_script() {
systemctl start excalidraw
msg_ok "Started $APP"
- msg_info "Cleaning Up"
- rm -rf $temp_file
- msg_ok "Cleanup Completed"
-
- echo "${RELEASE}" >/opt/excalidraw_version.txt
msg_ok "Update Successful"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
diff --git a/ct/firefly.sh b/ct/firefly.sh
index c606751ba..1bb322718 100644
--- a/ct/firefly.sh
+++ b/ct/firefly.sh
@@ -29,17 +29,20 @@ function update_script() {
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/firefly-iii/firefly-iii/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4)}')
- if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
+ if [[ "${RELEASE}" != "$(cat ~/.firefly 2>/dev/null)" ]] || [[ ! -f ~/.firefly ]]; then
msg_info "Stopping Apache2"
systemctl stop apache2
msg_ok "Stopped Apache2"
- msg_info "Updating ${APP} to v${RELEASE}"
+ msg_info "Backing up data"
cp /opt/firefly/.env /opt/.env
cp -r /opt/firefly/storage /opt/storage
- cd /opt
- curl -fsSL "https://github.com/firefly-iii/firefly-iii/releases/download/v${RELEASE}/FireflyIII-v${RELEASE}.tar.gz" -o $(basename "https://github.com/firefly-iii/firefly-iii/releases/download/v${RELEASE}/FireflyIII-v${RELEASE}.tar.gz")
- tar -xzf FireflyIII-v${RELEASE}.tar.gz -C /opt/firefly --exclude='storage'
+ msg_ok "Backed up data"
+
+ fetch_and_deploy_gh_release "firefly" "firefly-iii/firefly-iii" "prebuild" "latest" "/opt/firefly" "FireflyIII-*.zip"
+
+ msg_info "Updating ${APP} to v${RELEASE}"
+ rm -rf /opt/firefly/storage
cp /opt/.env /opt/firefly/.env
cp -r /opt/storage /opt/firefly/storage
cd /opt/firefly
@@ -50,16 +53,12 @@ function update_script() {
$STD php artisan view:clear
$STD php artisan firefly-iii:upgrade-database
$STD php artisan firefly-iii:laravel-passport-keys
- echo "${RELEASE}" >"/opt/${APP}_version.txt"
msg_ok "Updated ${APP} to v${RELEASE}"
msg_info "Starting Apache2"
systemctl start apache2
msg_ok "Started Apache2"
- msg_info "Cleaning up"
- rm -rf /opt/FireflyIII-v${RELEASE}.tar.gz
- msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}."
diff --git a/ct/flaresolverr.sh b/ct/flaresolverr.sh
index f2476d918..997626e19 100644
--- a/ct/flaresolverr.sh
+++ b/ct/flaresolverr.sh
@@ -23,20 +23,23 @@ function update_script() {
header_info
check_container_storage
check_container_resources
+
if [[ ! -f /etc/systemd/system/flaresolverr.service ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
- RELEASE=$(curl -fsSL https://github.com/FlareSolverr/FlareSolverr/releases/latest | grep "title>Release" | cut -d " " -f 4)
- if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
- msg_info "Updating $APP LXC"
+ RELEASE=$(curl -fsSL https://api.github.com/repos/FlareSolverr/FlareSolverr/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4)}')
+ if [[ "${RELEASE}" != "$(cat ~/.flaresolverr 2>/dev/null)" ]] || [[ ! -f ~/.flaresolverr ]]; then
+ msg_info "Stopping service"
systemctl stop flaresolverr
- curl -fsSL "https://github.com/FlareSolverr/FlareSolverr/releases/download/$RELEASE/flaresolverr_linux_x64.tar.gz" -o $(basename "https://github.com/FlareSolverr/FlareSolverr/releases/download/$RELEASE/flaresolverr_linux_x64.tar.gz")
- tar -xzf flaresolverr_linux_x64.tar.gz -C /opt
- rm flaresolverr_linux_x64.tar.gz
+ msg_ok "Stopped service"
+
+ rm -rf /opt/flaresolverr
+ fetch_and_deploy_gh_release "flaresolverr" "FlareSolverr/FlareSolverr" "prebuild" "latest" "/opt/flaresolverr" "flaresolverr_linux_x64.tar.gz"
+
+ msg_info "Starting service"
systemctl start flaresolverr
- echo "${RELEASE}" >/opt/${APP}_version.txt
- msg_ok "Updated $APP LXC"
+ msg_ok "Started service"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
fi
diff --git a/ct/fluid-calendar.sh b/ct/fluid-calendar.sh
index 4c27cbfcc..482eab482 100644
--- a/ct/fluid-calendar.sh
+++ b/ct/fluid-calendar.sh
@@ -20,51 +20,43 @@ color
catch_errors
function update_script() {
- header_info
- check_container_storage
- check_container_resources
+ header_info
+ check_container_storage
+ check_container_resources
- if [[ ! -d /opt/fluid-calendar ]]; then
- msg_error "No ${APP} Installation Found!"
- exit
- fi
-
- RELEASE=$(curl -fsSL https://api.github.com/repos/dotnetfactory/fluid-calendar/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
- if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
- msg_info "Stopping $APP"
- systemctl stop fluid-calendar.service
- msg_ok "Stopped $APP"
-
- msg_info "Updating $APP to v${RELEASE}"
- cp /opt/fluid-calendar/.env /opt/fluid.env
- rm -rf /opt/fluid-calendar
- tmp_file=$(mktemp)
- curl -fsSL "https://github.com/dotnetfactory/fluid-calendar/archive/refs/tags/v${RELEASE}.zip" -o "$tmp_file"
- $STD unzip $tmp_file
- mv ${APP}-${RELEASE}/ /opt/fluid-calendar
- mv /opt/fluid.env /opt/fluid-calendar/.env
- cd /opt/fluid-calendar
- export NEXT_TELEMETRY_DISABLED=1
- $STD npm install --legacy-peer-deps
- $STD npm run prisma:generate
- $STD npx prisma migrate deploy
- $STD npm run build:os
- msg_ok "Updated $APP to v${RELEASE}"
-
- msg_info "Starting $APP"
- systemctl start fluid-calendar.service
- msg_ok "Started $APP"
-
- msg_info "Cleaning Up"
- rm -rf $tmp_file
- msg_ok "Cleanup Completed"
-
- echo "${RELEASE}" >/opt/${APP}_version.txt
- msg_ok "Update Successful"
- else
- msg_ok "No update required. ${APP} is already at v${RELEASE}"
- fi
+ if [[ ! -d /opt/fluid-calendar ]]; then
+ msg_error "No ${APP} Installation Found!"
exit
+ fi
+ RELEASE=$(curl -fsSL https://api.github.com/repos/dotnetfactory/fluid-calendar/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
+ if [[ "${RELEASE}" != "$(cat ~/.fluid-calendar 2>/dev/null)" ]] || [[ ! -f ~/.fluid-calendar ]]; then
+ msg_info "Stopping $APP"
+ systemctl stop fluid-calendar
+ msg_ok "Stopped $APP"
+
+ cp /opt/fluid-calendar/.env /opt/fluid.env
+ rm -rf /opt/fluid-calendar
+ fetch_and_deploy_gh_release "fluid-calendar" "dotnetfactory/fluid-calendar"
+
+ msg_info "Updating $APP to v${RELEASE}"
+ mv /opt/fluid.env /opt/fluid-calendar/.env
+ cd /opt/fluid-calendar
+ export NEXT_TELEMETRY_DISABLED=1
+ $STD npm install --legacy-peer-deps
+ $STD npm run prisma:generate
+ $STD npx prisma migrate deploy
+ $STD npm run build:os
+ msg_ok "Updated $APP to v${RELEASE}"
+
+ msg_info "Starting $APP"
+ systemctl start fluid-calendar
+ msg_ok "Started $APP"
+
+ msg_ok "Update Successful"
+ else
+ msg_ok "No update required. ${APP} is already at v${RELEASE}"
+ fi
+ exit
}
start
diff --git a/ct/gatus.sh b/ct/gatus.sh
index d10872421..0705634d4 100644
--- a/ct/gatus.sh
+++ b/ct/gatus.sh
@@ -29,35 +29,29 @@ function update_script() {
exit
fi
RELEASE=$(curl -s https://api.github.com/repos/TwiN/gatus/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
- if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
+ if [[ "${RELEASE}" != "$(cat ~/.gatus 2>/dev/null)" ]] || [[ ! -f ~/.gatus ]]; then
msg_info "Updating $APP"
msg_info "Stopping $APP"
systemctl stop gatus
msg_ok "Stopped $APP"
- msg_info "Updating $APP to v${RELEASE}"
mv /opt/gatus/config/config.yaml /opt
- rm -rf /opt/gatus/*
- temp_file=$(mktemp)
- curl -fsSL "https://github.com/TwiN/gatus/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
- tar zxf "$temp_file" --strip-components=1 -C /opt/gatus
+ rm -rf /opt/gatus
+ fetch_and_deploy_gh_release "gatus" "TwiN/gatus"
+
+ msg_info "Updating $APP to v${RELEASE}"
cd /opt/gatus
$STD go mod tidy
CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o gatus .
setcap CAP_NET_RAW+ep gatus
mv /opt/config.yaml config
- echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated $APP to v${RELEASE}"
msg_info "Starting $APP"
systemctl start gatus
msg_ok "Started $APP"
- msg_info "Cleaning Up"
- rm -f "$temp_file"
- msg_ok "Cleanup Completed"
-
msg_ok "Update Successful"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
diff --git a/ct/ghost.sh b/ct/ghost.sh
index f92fa0e58..4b9ba294d 100644
--- a/ct/ghost.sh
+++ b/ct/ghost.sh
@@ -20,26 +20,31 @@ color
catch_errors
function update_script() {
- header_info
- check_container_storage
- check_container_resources
- msg_info "Updating ${APP} LXC"
+ header_info
+ check_container_storage
+ check_container_resources
- if command -v ghost &>/dev/null; then
- current_version=$(ghost version | grep 'Ghost-CLI version' | awk '{print $3}')
- latest_version=$(npm show ghost-cli version)
- if [ "$current_version" != "$latest_version" ]; then
- msg_info "Updating ${APP} from version v${current_version} to v${latest_version}"
- $STD npm install -g ghost-cli@latest
- msg_ok "Updated Successfully"
- else
- msg_ok "${APP} is already at v${current_version}"
- fi
+ if ! dpkg-query -W -f='${Status}' mariadb-server 2>/dev/null | grep -q "install ok installed"; then
+ setup_mysql
+ fi
+ NODE_VERSION="22" setup_nodejs
+
+ msg_info "Updating ${APP} LXC"
+ if command -v ghost &>/dev/null; then
+ current_version=$(ghost version | grep 'Ghost-CLI version' | awk '{print $3}')
+ latest_version=$(npm show ghost-cli version)
+ if [ "$current_version" != "$latest_version" ]; then
+ msg_info "Updating ${APP} from version v${current_version} to v${latest_version}"
+ $STD npm install -g ghost-cli@latest
+ msg_ok "Updated Successfully"
else
- msg_error "No ${APP} Installation Found!"
- exit
+ msg_ok "${APP} is already at v${current_version}"
fi
+ else
+ msg_error "No ${APP} Installation Found!"
exit
+ fi
+ exit
}
start
diff --git a/ct/gitea-mirror.sh b/ct/gitea-mirror.sh
index 55bce4012..fadf6d3ef 100644
--- a/ct/gitea-mirror.sh
+++ b/ct/gitea-mirror.sh
@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/asylumexp/Proxmox/main/mis
# Copyright (c) 2021-2025 community-scripts ORG
# Author: CrazyWolf13
# License: MIT | https://github.com/asylumexp/Proxmox/raw/main/LICENSE
-# Source: https://github.com/arunavo4/gitea-mirror
+# Source: https://github.com/RayLabsHQ/gitea-mirror
APP="gitea-mirror"
var_tags="${var_tags:-mirror;gitea}"
@@ -28,7 +28,7 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
- RELEASE=$(curl -fsSL https://api.github.com/repos/arunavo4/gitea-mirror/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
+ RELEASE=$(curl -fsSL https://api.github.com/repos/RayLabsHQ/gitea-mirror/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat ~/.${APP} 2>/dev/null || cat /opt/${APP}_version.txt 2>/dev/null)" ]]; then
msg_info "Stopping Services"
@@ -48,7 +48,7 @@ function update_script() {
msg_ok "Installed Bun"
rm -rf /opt/gitea-mirror
- fetch_and_deploy_gh_release "gitea-mirror" "arunavo4/gitea-mirror"
+ fetch_and_deploy_gh_release "gitea-mirror" "RayLabsHQ/gitea-mirror"
msg_info "Updating and rebuilding ${APP} to v${RELEASE}"
cd /opt/gitea-mirror
diff --git a/ct/gitea.sh b/ct/gitea.sh
index 671c8fdbb..56ab91c62 100644
--- a/ct/gitea.sh
+++ b/ct/gitea.sh
@@ -20,24 +20,33 @@ color
catch_errors
function update_script() {
- header_info
- check_container_storage
- check_container_resources
- if [[ ! -f /usr/local/bin/gitea ]]; then
- msg_error "No ${APP} Installation Found!"
- exit
- fi
- RELEASE=$(curl -fsSL https://github.com/go-gitea/gitea/releases/latest | grep "title>Release" | cut -d " " -f 4 | sed 's/^v//')
- msg_info "Updating $APP to ${RELEASE}"
- FILENAME="gitea-$RELEASE-linux-amd64"
- curl -fsSL "https://github.com/go-gitea/gitea/releases/download/v$RELEASE/gitea-$RELEASE-linux-amd64" -o $FILENAME
- systemctl stop gitea
- rm -rf /usr/local/bin/gitea
- mv $FILENAME /usr/local/bin/gitea
- chmod +x /usr/local/bin/gitea
- systemctl start gitea
- msg_ok "Updated $APP Successfully"
- exit
+ header_info
+ check_container_storage
+ check_container_resources
+
+ if [[ ! -f /usr/local/bin/gitea ]]; then
+ msg_error "No ${APP} Installation Found!"
+ exit
+ fi
+ RELEASE=$(curl -fsSL https://github.com/go-gitea/gitea/releases/latest | grep "title>Release" | cut -d " " -f 4 | sed 's/^v//')
+ if [[ "${RELEASE}" != "$(cat ~/.gitea 2>/dev/null)" ]] || [[ ! -f ~/.gitea ]]; then
+ msg_info "Stopping service"
+ systemctl stop gitea
+ msg_ok "Service stopped"
+
+ rm -rf /usr/local/bin/gitea
+ fetch_and_deploy_gh_release "gitea" "go-gitea/gitea" "singlefile" "latest" "/usr/local/bin" "gitea-*-linux-amd64"
+ chmod +x /usr/local/bin/gitea
+
+ msg_info "Starting service"
+ systemctl start gitea
+ msg_ok "Started service"
+
+ msg_ok "Update Successful"
+ else
+ msg_ok "No update required. ${APP} is already at ${RELEASE}"
+ fi
+ exit
}
start
diff --git a/ct/glance.sh b/ct/glance.sh
index 34782accc..4ee653001 100644
--- a/ct/glance.sh
+++ b/ct/glance.sh
@@ -28,28 +28,19 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
-
RELEASE=$(curl -fsSL https://api.github.com/repos/glanceapp/glance/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
- if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
+ if [[ "${RELEASE}" != "$(cat ~/.glance 2>/dev/null)" ]] || [[ ! -f ~/.glance ]]; then
msg_info "Stopping Service"
systemctl stop glance
msg_ok "Stopped Service"
- msg_info "Updating ${APP} to v${RELEASE}"
- cd /opt
- curl -fsSL "https://github.com/glanceapp/glance/releases/download/v${RELEASE}/glance-linux-amd64.tar.gz" -o $(basename "https://github.com/glanceapp/glance/releases/download/v${RELEASE}/glance-linux-amd64.tar.gz")
- rm -rf /opt/glance/glance
- tar -xzf glance-linux-amd64.tar.gz -C /opt/glance
- echo "${RELEASE}" >"/opt/${APP}_version.txt"
- msg_ok "Updated ${APP} to v${RELEASE}"
+ rm -f /opt/glance/glance
+ fetch_and_deploy_gh_release "glance" "glanceapp/glance" "prebuild" "latest" "/opt/glance" "glance-linux-amd64.tar.gz"
msg_info "Starting Service"
systemctl start glance
msg_ok "Started Service"
- msg_info "Cleaning up"
- rm -rf /opt/glance-linux-amd64.tar.gz
- msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}."
diff --git a/ct/habitica.sh b/ct/habitica.sh
index dce950a0f..6deee0338 100644
--- a/ct/habitica.sh
+++ b/ct/habitica.sh
@@ -20,48 +20,61 @@ color
catch_errors
function update_script() {
- header_info
- check_container_storage
- check_container_resources
+ header_info
+ check_container_storage
+ check_container_resources
- if [[ ! -d "/opt/habitica" ]]; then
- msg_error "No ${APP} Installation Found!"
- exit
- fi
- RELEASE=$(curl -fsSL https://api.github.com/repos/HabitRPG/habitica/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
- if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
- msg_info "Stopping $APP"
- systemctl stop habitica-mongodb
- systemctl stop habitica
- systemctl stop habitica-client
- msg_ok "Stopped $APP"
-
- msg_info "Updating $APP to ${RELEASE}"
- temp_file=$(mktemp)
-curl -fsSL "https://github.com/HabitRPG/habitica/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
- tar zxf $temp_file
- cp -rf habitica-${RELEASE}/* /opt/habitica
- cd /opt/habitica
- $STD npm i
- echo "${RELEASE}" >/opt/${APP}_version.txt
- msg_ok "Updated $APP to ${RELEASE}"
-
- msg_info "Starting $APP"
- systemctl start habitica-mongodb
- systemctl start habitica
- systemctl start habitica-client
- msg_ok "Started $APP"
-
- msg_info "Cleaning Up"
- rm -f $temp_file
- rm -rf ~/habitica-${RELEASE}
- msg_ok "Cleanup Completed"
-
- msg_ok "Update Successful"
- else
- msg_ok "No update required. ${APP} is already at ${RELEASE}"
- fi
+ if [[ ! -d "/opt/habitica" ]]; then
+ msg_error "No ${APP} Installation Found!"
exit
+ fi
+ NODE_VERSION="20" NODE_MODULE="gulp-cli,mocha" setup_nodejs
+ RELEASE=$(curl -fsSL https://api.github.com/repos/HabitRPG/habitica/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
+ if [[ "${RELEASE}" != "$(cat ~/.habitica 2>/dev/null)" ]] || [[ ! -f ~/.habitica ]]; then
+
+ msg_info "Stopping $APP"
+ systemctl stop habitica-mongodb
+ systemctl stop habitica
+ systemctl stop habitica-client
+ msg_ok "Stopped $APP"
+
+ msg_info "Save configuration"
+ if [[ -f /opt/habitica/config.json ]]; then
+ cp /opt/habitica/config.json ~/config.json
+ msg_ok "Saved configuration"
+ else
+ msg_warn "No configuration file found, skipping save"
+ fi
+
+ fetch_and_deploy_gh_release "habitica" "HabitRPG/habitica" "tarball" "latest" "/opt/habitica"
+
+ msg_info "Updating $APP to ${RELEASE}"
+ cd /opt/habitica
+ $STD npm i
+ $STD npm run postinstall
+ $STD npm run client:build
+ $STD gulp build:prod
+ msg_ok "Updated $APP to ${RELEASE}"
+
+ msg_info "Restoring configuration"
+ if [[ -f ~/config.json ]]; then
+ cp ~/config.json /opt/habitica/config.json
+ msg_ok "Restored configuration"
+ else
+ msg_warn "No configuration file found to restore"
+ fi
+
+ msg_info "Starting $APP"
+ systemctl start habitica-mongodb
+ systemctl start habitica
+ systemctl start habitica-client
+ msg_ok "Started $APP"
+
+ msg_ok "Update Successful"
+ else
+ msg_ok "No update required. ${APP} is already at ${RELEASE}"
+ fi
+ exit
}
start
@@ -71,4 +84,4 @@ description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
-echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080${CL}"
\ No newline at end of file
+echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"
diff --git a/ct/immich.sh b/ct/immich.sh
index b6bdb6866..6323b5a8e 100644
--- a/ct/immich.sh
+++ b/ct/immich.sh
@@ -51,134 +51,12 @@ function update_script() {
fi
if [[ -f ~/.immich_library_revisions ]]; then
libraries=("libjxl" "libheif" "libraw" "imagemagick" "libvips")
- readarray -d '' NEW_REVISIONS < <(for library in "${libraries[@]}"; do
- echo "$library: $(curl -fsSL https://raw.githubusercontent.com/immich-app/base-images/refs/heads/main/server/sources/"$library".json | jq -cr '.revision' -)"
- done)
- UPDATED_REVISIONS="$(comm -13 <(sort ~/.immich_library_revisions) <(echo -n "${NEW_REVISIONS[@]}" | sort))"
- if [[ "$UPDATED_REVISIONS" ]]; then
- readarray -t NAMES < <(echo "$UPDATED_REVISIONS" | awk -F ':' '{print $1}')
- rm -rf "$SOURCE_DIR"
- mkdir -p "$SOURCE_DIR"
- cd "$BASE_DIR"
- $STD git pull
- cd "$STAGING_DIR"
- for name in "${NAMES[@]}"; do
- if [[ "$name" == "libjxl" ]]; then
- msg_info "Recompiling libjxl"
- SOURCE=${SOURCE_DIR}/libjxl
- JPEGLI_LIBJPEG_LIBRARY_SOVERSION="62"
- JPEGLI_LIBJPEG_LIBRARY_VERSION="62.3.0"
- : "${LIBJXL_REVISION:=$(jq -cr '.revision' $BASE_DIR/server/sources/libjxl.json)}"
- $STD git clone https://github.com/libjxl/libjxl.git "$SOURCE"
- cd "$SOURCE"
- $STD git reset --hard "$LIBJXL_REVISION"
- $STD git submodule update --init --recursive --depth 1 --recommend-shallow
- $STD git apply "$BASE_DIR"/server/sources/libjxl-patches/jpegli-empty-dht-marker.patch
- $STD git apply "$BASE_DIR"/server/sources/libjxl-patches/jpegli-icc-warning.patch
- mkdir build
- cd build
- $STD cmake \
- -DCMAKE_BUILD_TYPE=Release \
- -DBUILD_TESTING=OFF \
- -DJPEGXL_ENABLE_DOXYGEN=OFF \
- -DJPEGXL_ENABLE_MANPAGES=OFF \
- -DJPEGXL_ENABLE_PLUGIN_GIMP210=OFF \
- -DJPEGXL_ENABLE_BENCHMARK=OFF \
- -DJPEGXL_ENABLE_EXAMPLES=OFF \
- -DJPEGXL_FORCE_SYSTEM_BROTLI=ON \
- -DJPEGXL_FORCE_SYSTEM_HWY=ON \
- -DJPEGXL_ENABLE_JPEGLI=ON \
- -DJPEGXL_ENABLE_JPEGLI_LIBJPEG=ON \
- -DJPEGXL_INSTALL_JPEGLI_LIBJPEG=ON \
- -DJPEGXL_ENABLE_PLUGINS=ON \
- -DJPEGLI_LIBJPEG_LIBRARY_SOVERSION="$JPEGLI_LIBJPEG_LIBRARY_SOVERSION" \
- -DJPEGLI_LIBJPEG_LIBRARY_VERSION="$JPEGLI_LIBJPEG_LIBRARY_VERSION" \
- -DLIBJPEG_TURBO_VERSION_NUMBER=2001005 \
- ..
- $STD cmake --build . -- -j"$(nproc)"
- $STD cmake --install .
- ldconfig /usr/local/lib
- $STD make clean
- cd "$STAGING_DIR"
- rm -rf "$SOURCE"/{build,third_party}
- msg_ok "Recompiled libjxl"
- fi
- if [[ "$name" == "libheif" ]]; then
- msg_info "Recompiling libheif"
- SOURCE=${SOURCE_DIR}/libheif
- : "${LIBHEIF_REVISION:=$(jq -cr '.revision' $BASE_DIR/server/sources/libheif.json)}"
- $STD git clone https://github.com/strukturag/libheif.git "$SOURCE"
- cd "$SOURCE"
- $STD git reset --hard "$LIBHEIF_REVISION"
- mkdir build
- cd build
- $STD cmake --preset=release-noplugins \
- -DWITH_DAV1D=ON \
- -DENABLE_PARALLEL_TILE_DECODING=ON \
- -DWITH_LIBSHARPYUV=ON \
- -DWITH_LIBDE265=ON \
- -DWITH_AOM_DECODER=OFF \
- -DWITH_AOM_ENCODER=OFF \
- -DWITH_X265=OFF \
- -DWITH_EXAMPLES=OFF \
- ..
- $STD make install -j "$(nproc)"
- ldconfig /usr/local/lib
- $STD make clean
- cd "$STAGING_DIR"
- rm -rf "$SOURCE"/build
- msg_ok "Recompiled libheif"
- fi
- if [[ "$name" == "libraw" ]]; then
- msg_info "Recompiling libraw"
- SOURCE=${SOURCE_DIR}/libraw
- : "${LIBRAW_REVISION:=$(jq -cr '.revision' $BASE_DIR/server/sources/libraw.json)}"
- $STD git clone https://github.com/libraw/libraw.git "$SOURCE"
- cd "$SOURCE"
- $STD git reset --hard "$LIBRAW_REVISION"
- $STD autoreconf --install
- $STD ./configure
- $STD make -j"$(nproc)"
- $STD make install
- ldconfig /usr/local/lib
- $STD make clean
- cd "$STAGING_DIR"
- msg_ok "Recompiled libraw"
- fi
- if [[ "$name" == "imagemagick" ]]; then
- msg_info "Recompiling ImageMagick"
- SOURCE=$SOURCE_DIR/imagemagick
- : "${IMAGEMAGICK_REVISION:=$(jq -cr '.revision' $BASE_DIR/server/sources/imagemagick.json)}"
- $STD git clone https://github.com/ImageMagick/ImageMagick.git "$SOURCE"
- cd "$SOURCE"
- $STD git reset --hard "$IMAGEMAGICK_REVISION"
- $STD ./configure --with-modules
- $STD make -j"$(nproc)"
- $STD make install
- ldconfig /usr/local/lib
- $STD make clean
- cd "$STAGING_DIR"
- msg_ok "Recompiled ImageMagick"
- fi
- if [[ "$name" == "libvips" ]]; then
- msg_info "Recompiling libvips"
- SOURCE=$SOURCE_DIR/libvips
- : "${LIBVIPS_REVISION:=$(jq -cr '.revision' $BASE_DIR/server/sources/libvips.json)}"
- $STD git clone https://github.com/libvips/libvips.git "$SOURCE"
- cd "$SOURCE"
- $STD git reset --hard "$LIBVIPS_REVISION"
- $STD meson setup build --buildtype=release --libdir=lib -Dintrospection=disabled -Dtiff=disabled
- cd build
- $STD ninja install
- ldconfig /usr/local/lib
- cd "$STAGING_DIR"
- rm -rf "$SOURCE"/build
- msg_ok "Recompiled libvips"
- fi
- done
- echo -n "${NEW_REVISIONS[@]}" >~/.immich_library_revisions
- msg_ok "Image-processing libraries compiled"
- fi
+ cd "$BASE_DIR"
+ $STD git pull
+ for library in "${libraries[@]}"; do
+ compile_"$library"
+ done
+ msg_ok "Image-processing libraries updated"
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/immich-app/immich/releases?per_page=1 | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ -f ~/.immich && "$RELEASE" == "$(cat ~/.immich)" ]]; then
@@ -245,6 +123,10 @@ function update_script() {
cp -a server/{node_modules,dist,bin,resources,package.json,package-lock.json,start*.sh} "$APP_DIR"/
cp -a web/build "$APP_DIR"/www
cp LICENSE "$APP_DIR"
+ cd "$APP_DIR"
+ export SHARP_FORCE_GLOBAL_LIBVIPS=true
+ $STD npm install sharp
+ rm -rf "$APP_DIR"/node_modules/@img/sharp-{libvips*,linuxmusl-x64}
msg_ok "Updated ${APP} web and microservices"
cd "$SRC_DIR"/machine-learning
@@ -276,8 +158,6 @@ function update_script() {
ln -s "$GEO_DIR" "$APP_DIR"
msg_info "Updating Immich CLI"
- $STD npm install --build-from-source sharp
- rm -rf "$APP_DIR"/node_modules/@img/sharp-{libvips*,linuxmusl-x64}
$STD npm i -g @immich/cli
msg_ok "Updated Immich CLI"
@@ -293,6 +173,144 @@ function update_script() {
exit
}
+function compile_libjxl() {
+ SOURCE=${SOURCE_DIR}/libjxl
+ JPEGLI_LIBJPEG_LIBRARY_SOVERSION="62"
+ JPEGLI_LIBJPEG_LIBRARY_VERSION="62.3.0"
+ : "${LIBJXL_REVISION:=$(jq -cr '.revision' "$BASE_DIR"/server/sources/libjxl.json)}"
+ if [[ "${update:-}" ]] || [[ "$LIBJXL_REVISION" != "$(grep 'libjxl' ~/.immich_library_revisions | awk '{print $2}')" ]]; then
+ msg_info "Recompiling libjxl"
+ if [[ -d "$SOURCE" ]]; then rm -rf "$SOURCE"; fi
+ $STD git clone https://github.com/libjxl/libjxl.git "$SOURCE"
+ cd "$SOURCE"
+ $STD git reset --hard "$LIBJXL_REVISION"
+ $STD git submodule update --init --recursive --depth 1 --recommend-shallow
+ $STD git apply "$BASE_DIR"/server/sources/libjxl-patches/jpegli-empty-dht-marker.patch
+ $STD git apply "$BASE_DIR"/server/sources/libjxl-patches/jpegli-icc-warning.patch
+ mkdir build
+ cd build
+ $STD cmake \
+ -DCMAKE_BUILD_TYPE=Release \
+ -DBUILD_TESTING=OFF \
+ -DJPEGXL_ENABLE_DOXYGEN=OFF \
+ -DJPEGXL_ENABLE_MANPAGES=OFF \
+ -DJPEGXL_ENABLE_PLUGIN_GIMP210=OFF \
+ -DJPEGXL_ENABLE_BENCHMARK=OFF \
+ -DJPEGXL_ENABLE_EXAMPLES=OFF \
+ -DJPEGXL_FORCE_SYSTEM_BROTLI=ON \
+ -DJPEGXL_FORCE_SYSTEM_HWY=ON \
+ -DJPEGXL_ENABLE_JPEGLI=ON \
+ -DJPEGXL_ENABLE_JPEGLI_LIBJPEG=ON \
+ -DJPEGXL_INSTALL_JPEGLI_LIBJPEG=ON \
+ -DJPEGXL_ENABLE_PLUGINS=ON \
+ -DJPEGLI_LIBJPEG_LIBRARY_SOVERSION="$JPEGLI_LIBJPEG_LIBRARY_SOVERSION" \
+ -DJPEGLI_LIBJPEG_LIBRARY_VERSION="$JPEGLI_LIBJPEG_LIBRARY_VERSION" \
+ -DLIBJPEG_TURBO_VERSION_NUMBER=2001005 \
+ ..
+ $STD cmake --build . -- -j"$(nproc)"
+ $STD cmake --install .
+ ldconfig /usr/local/lib
+ $STD make clean
+ cd "$STAGING_DIR"
+ rm -rf "$SOURCE"/{build,third_party}
+ msg_ok "Recompiled libjxl"
+ fi
+}
+
+function compile_libheif() {
+ SOURCE=${SOURCE_DIR}/libheif
+ if ! dpkg -l | grep -q libaom; then
+ $STD apt-get install -y libaom-dev
+ local update="required"
+ fi
+ : "${LIBHEIF_REVISION:=$(jq -cr '.revision' "$BASE_DIR"/server/sources/libheif.json)}"
+ if [[ "${update:-}" ]] || [[ "$LIBHEIF_REVISION" != "$(grep 'libheif' ~/.immich_library_revisions | awk '{print $2}')" ]]; then
+ msg_info "Recompiling libheif"
+ if [[ -d "$SOURCE" ]]; then rm -rf "$SOURCE"; fi
+ $STD git clone https://github.com/strukturag/libheif.git "$SOURCE"
+ cd "$SOURCE"
+ $STD git reset --hard "$LIBHEIF_REVISION"
+ mkdir build
+ cd build
+ $STD cmake --preset=release-noplugins \
+ -DWITH_DAV1D=ON \
+ -DENABLE_PARALLEL_TILE_DECODING=ON \
+ -DWITH_LIBSHARPYUV=ON \
+ -DWITH_LIBDE265=ON \
+ -DWITH_AOM_DECODER=OFF \
+ -DWITH_AOM_ENCODER=ON \
+ -DWITH_X265=OFF \
+ -DWITH_EXAMPLES=OFF \
+ ..
+ $STD make install -j "$(nproc)"
+ ldconfig /usr/local/lib
+ $STD make clean
+ cd "$STAGING_DIR"
+ rm -rf "$SOURCE"/build
+ msg_ok "Recompiled libheif"
+ fi
+}
+
+function compile_libraw() {
+ SOURCE=${SOURCE_DIR}/libraw
+ local update
+ : "${LIBRAW_REVISION:=$(jq -cr '.revision' "$BASE_DIR"/server/sources/libraw.json)}"
+ if [[ "${update:-}" ]] || [[ "$LIBRAW_REVISION" != "$(grep 'libraw' ~/.immich_library_revisions | awk '{print $2}')" ]]; then
+ msg_info "Recompiling libraw"
+ if [[ -d "$SOURCE" ]]; then rm -rf "$SOURCE"; fi
+ $STD git clone https://github.com/libraw/libraw.git "$SOURCE"
+ cd "$SOURCE"
+ $STD git reset --hard "$LIBRAW_REVISION"
+ $STD autoreconf --install
+ $STD ./configure
+ $STD make -j"$(nproc)"
+ $STD make install
+ ldconfig /usr/local/lib
+ $STD make clean
+ cd "$STAGING_DIR"
+ msg_ok "Recompiled libraw"
+ fi
+}
+
+function compile_imagemagick() {
+ SOURCE=$SOURCE_DIR/imagemagick
+ : "${IMAGEMAGICK_REVISION:=$(jq -cr '.revision' "$BASE_DIR"/server/sources/imagemagick.json)}"
+ if [[ "${update:-}" ]] || [[ "$IMAGEMAGICK_REVISION" != "$(grep 'imagemagick' ~/.immich_library_revisions | awk '{print $2}')" ]]; then
+ msg_info "Recompiling ImageMagick"
+ if [[ -d "$SOURCE" ]]; then rm -rf "$SOURCE"; fi
+ $STD git clone https://github.com/ImageMagick/ImageMagick.git "$SOURCE"
+ cd "$SOURCE"
+ $STD git reset --hard "$IMAGEMAGICK_REVISION"
+ $STD ./configure --with-modules
+ $STD make -j"$(nproc)"
+ $STD make install
+ ldconfig /usr/local/lib
+ $STD make clean
+ cd "$STAGING_DIR"
+ msg_ok "Recompiled ImageMagick"
+ fi
+}
+
+function compile_libvips() {
+ SOURCE=$SOURCE_DIR/libvips
+ # : "${LIBVIPS_REVISION:=$(jq -cr '.revision' "$BASE_DIR"/server/sources/libvips.json)}"
+ : "${LIBVIPS_REVISION:=8fa37a64547e392d3808eed8d72adab7e02b3d00}"
+ if [[ "${update:-}" ]] || [[ "$LIBVIPS_REVISION" != "$(grep 'libvips' ~/.immich_library_revisions | awk '{print $2}')" ]]; then
+ msg_info "Recompiling libvips"
+ if [[ -d "$SOURCE" ]]; then rm -rf "$SOURCE"; fi
+ $STD git clone https://github.com/libvips/libvips.git "$SOURCE"
+ cd "$SOURCE"
+ $STD git reset --hard "$LIBVIPS_REVISION"
+ $STD meson setup build --buildtype=release --libdir=lib -Dintrospection=disabled -Dtiff=disabled
+ cd build
+ $STD ninja install
+ ldconfig /usr/local/lib
+ cd "$STAGING_DIR"
+ rm -rf "$SOURCE"/build
+ msg_ok "Recompiled libvips"
+ fi
+}
+
start
build_container
description
diff --git a/ct/komodo.sh b/ct/komodo.sh
index 7a8eb371d..b277ab064 100644
--- a/ct/komodo.sh
+++ b/ct/komodo.sh
@@ -36,6 +36,15 @@ function update_script() {
exit 1
fi
COMPOSE_BASENAME=$(basename "$COMPOSE_FILE")
+
+ if [[ "$COMPOSE_BASENAME" == "sqlite.compose.yaml" || "$COMPOSE_BASENAME" == "postgres.compose.yaml" ]]; then
+ msg_error "β Detected outdated Komodo setup using SQLite or PostgreSQL (FerretDB v1)."
+ echo -e "${YW}This configuration is no longer supported since Komodo v1.18.0.${CL}"
+ echo -e "${YW}Please follow the migration guide:${CL}"
+ echo -e "${BGN}https://github.com/community-scripts/ProxmoxVE/discussions/5689${CL}\n"
+ exit 1
+ fi
+
BACKUP_FILE="/opt/komodo/${COMPOSE_BASENAME}.bak_$(date +%Y%m%d_%H%M%S)"
cp "$COMPOSE_FILE" "$BACKUP_FILE" || {
msg_error "Failed to create backup of ${COMPOSE_BASENAME}!"
diff --git a/ct/mafl.sh b/ct/mafl.sh
index 1543951c7..83db577d0 100644
--- a/ct/mafl.sh
+++ b/ct/mafl.sh
@@ -27,18 +27,31 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
+
RELEASE=$(curl -fsSL https://api.github.com/repos/hywax/mafl/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
- msg_info "Updating Mafl to v${RELEASE} (Patience)"
- systemctl stop mafl
- curl -fsSL "https://github.com/hywax/mafl/archive/refs/tags/v${RELEASE}.tar.gz" -o $(basename "https://github.com/hywax/mafl/archive/refs/tags/v${RELEASE}.tar.gz")
- tar -xzf v${RELEASE}.tar.gz
- cp -r mafl-${RELEASE}/* /opt/mafl/
- rm -rf mafl-${RELEASE}
- cd /opt/mafl
- yarn install
- yarn build
- systemctl start mafl
- msg_ok "Updated Mafl to v${RELEASE}"
+ if [[ "${RELEASE}" != "$(cat ~/.mafl 2>/dev/null)" ]] || [[ ! -f ~/.mafl ]]; then
+ msg_info "Stopping Mafl service"
+ systemctl stop mafl
+ msg_ok "Service stopped"
+
+ msg_info "Performing backup"
+ mkdir -p /opt/mafl-backup/data
+ mv /opt/mafl/data /opt/mafl-backup/data
+ rm -rf /opt/mafl
+ msg_ok "Backup complete"
+
+ fetch_and_deploy_gh_release "mafl" "hywax/mafl"
+
+ msg_info "Updating Mafl to v${RELEASE}"
+ cd /opt/mafl
+ yarn install
+ yarn build
+ mv /opt/mafl-backup/data /opt/mafl/data
+ systemctl start mafl
+ msg_ok "Updated Mafl to v${RELEASE}"
+ else
+ msg_ok "No update required. ${APP} is already at v${RELEASE}"
+ fi
exit
}
diff --git a/ct/ollama.sh b/ct/ollama.sh
index d17f0c1c6..09d4686e9 100644
--- a/ct/ollama.sh
+++ b/ct/ollama.sh
@@ -40,6 +40,7 @@ function update_script() {
msg_info "Updating Ollama to ${RELEASE}"
rm -rf /usr/local/lib/ollama
rm -rf /usr/local/bin/ollama
+ mkdir -p /usr/local/lib/ollama
tar -xzf "${TMP_TAR}" -C /usr/local/lib/ollama
ln -sf /usr/local/lib/ollama/bin/ollama /usr/local/bin/ollama
echo "${RELEASE}" >/opt/Ollama_version.txt
diff --git a/ct/omada.sh b/ct/omada.sh
index 6a1738339..502c3963c 100644
--- a/ct/omada.sh
+++ b/ct/omada.sh
@@ -66,6 +66,7 @@ function update_script() {
$STD dpkg -i "$OMADA_PKG"
rm -f "$OMADA_PKG"
msg_ok "Updated Omada Controller"
+ exit 0
}
start
diff --git a/ct/planka.sh b/ct/planka.sh
index 1a4265987..43b72d303 100644
--- a/ct/planka.sh
+++ b/ct/planka.sh
@@ -40,20 +40,20 @@ function update_script() {
mkdir -p /opt/planka-backup/user-avatars
mkdir -p /opt/planka-backup/background-images
mkdir -p /opt/planka-backup/attachments
- mv /opt/planka/planka/.env /opt/planka-backup
- [ -n "$(ls -A /opt/planka/planka/public/favicons 2>/dev/null)" ] && mv /opt/planka/planka/public/favicons/* /opt/planka-backup/favicons/
- [ -n "$(ls -A /opt/planka/planka/public/user-avatars 2>/dev/null)" ] && mv /opt/planka/planka/public/user-avatars/* /opt/planka-backup/user-avatars/
- [ -n "$(ls -A /opt/planka/planka/public/background-images 2>/dev/null)" ] && mv /opt/planka/planka/public/background-images/* /opt/planka-backup/background-images/
- [ -n "$(ls -A /opt/planka/planka/private/attachments 2>/dev/null)" ] && mv /opt/planka/planka/private/attachments/* /opt/planka-backup/attachments/
+ mv /opt/planka/.env /opt/planka-backup
+ [ -n "$(ls -A /opt/planka/public/favicons 2>/dev/null)" ] && mv /opt/planka/public/favicons/* /opt/planka-backup/favicons/
+ [ -n "$(ls -A /opt/planka/public/user-avatars 2>/dev/null)" ] && mv /opt/planka/public/user-avatars/* /opt/planka-backup/user-avatars/
+ [ -n "$(ls -A /opt/planka/public/background-images 2>/dev/null)" ] && mv /opt/planka/public/background-images/* /opt/planka-backup/background-images/
+ [ -n "$(ls -A /opt/planka/private/attachments 2>/dev/null)" ] && mv /opt/planka/private/attachments/* /opt/planka-backup/attachments/
rm -rf /opt/planka
fetch_and_deploy_gh_release "planka" "plankanban/planka" "prebuild" "latest" "/opt/planka" "planka-prebuild.zip"
- cd /opt/planka/planka
+ cd /opt/planka
$STD npm install
- mv /opt/planka-backup/.env /opt/planka/planka/
- [ -n "$(ls -A /opt/planka-backup/favicons 2>/dev/null)" ] && mv /opt/planka-backup/favicons/* /opt/planka/planka/public/favicons/
- [ -n "$(ls -A /opt/planka-backup/user-avatars 2>/dev/null)" ] && mv /opt/planka-backup/user-avatars/* /opt/planka/planka/public/user-avatars/
- [ -n "$(ls -A /opt/planka-backup/background-images 2>/dev/null)" ] && mv /opt/planka-backup/background-images/* /opt/planka/planka/public/background-images/
- [ -n "$(ls -A /opt/planka-backup/attachments 2>/dev/null)" ] && mv /opt/planka-backup/attachments/* /opt/planka/planka/private/attachments/
+ mv /opt/planka-backup/.env /opt/planka/
+ [ -n "$(ls -A /opt/planka-backup/favicons 2>/dev/null)" ] && mv /opt/planka-backup/favicons/* /opt/planka/public/favicons/
+ [ -n "$(ls -A /opt/planka-backup/user-avatars 2>/dev/null)" ] && mv /opt/planka-backup/user-avatars/* /opt/planka/public/user-avatars/
+ [ -n "$(ls -A /opt/planka-backup/background-images 2>/dev/null)" ] && mv /opt/planka-backup/background-images/* /opt/planka/public/background-images/
+ [ -n "$(ls -A /opt/planka-backup/attachments 2>/dev/null)" ] && mv /opt/planka-backup/attachments/* /opt/planka/private/attachments/
msg_ok "Updated $APP to ${RELEASE}"
msg_info "Starting $APP"
diff --git a/ct/pocketid.sh b/ct/pocketid.sh
index 566f7370c..a9a093683 100755
--- a/ct/pocketid.sh
+++ b/ct/pocketid.sh
@@ -86,4 +86,4 @@ msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Configure your reverse proxy to point to:${BGN} ${IP}:1411${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
-echo -e "${TAB}${GATEWAY}${BGN}https://{PUBLIC_URL}/login/setup${CL}"
+echo -e "${TAB}${GATEWAY}${BGN}https://{PUBLIC_URL}/setup${CL}"
diff --git a/ct/pulse.sh b/ct/pulse.sh
index d8001fb46..1caef65ff 100644
--- a/ct/pulse.sh
+++ b/ct/pulse.sh
@@ -58,6 +58,7 @@ function update_script() {
else
msg_ok "No update required. ${APP} is already at ${RELEASE}."
fi
+ exit
}
start
diff --git a/ct/qbittorrent.sh b/ct/qbittorrent.sh
index 0cbf8561b..d025b4827 100644
--- a/ct/qbittorrent.sh
+++ b/ct/qbittorrent.sh
@@ -31,7 +31,7 @@ function update_script() {
touch /opt/${APP}_version.txt
mkdir -p $HOME/.config/qBittorrent/
mkdir -p /opt/qbittorrent/
- mv /.config/qBittorrent $HOME/.config/
+ [ -d "/.config/qBittorrent" ] && mv /.config/qBittorrent "$HOME/.config/"
$STD apt-get remove --purge -y qbittorrent-nox
sed -i 's@ExecStart=/usr/bin/qbittorrent-nox@ExecStart=/opt/qbittorrent/qbittorrent-nox@g' /etc/systemd/system/qbittorrent-nox.service
systemctl daemon-reload
diff --git a/ct/stirling-pdf.sh b/ct/stirling-pdf.sh
index f16748883..91ea7f26c 100644
--- a/ct/stirling-pdf.sh
+++ b/ct/stirling-pdf.sh
@@ -27,29 +27,41 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
- msg_info "Updating ${APP}"
- systemctl stop stirlingpdf
- if [[ -n $(dpkg -l | grep -w ocrmypdf) ]] && [[ -z $(dpkg -l | grep -w qpdf) ]]; then
- $STD apt-get remove -y ocrmypdf
- $STD apt-get install -y qpdf
+ RELEASE=$(curl -fsSL https://api.github.com/repos/Stirling-Tools/Stirling-PDF/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
+ if [[ "${RELEASE}" != "$(cat ~/.stirling-pdf 2>/dev/null)" ]] || [[ ! -f ~/.stirling-pdf ]]; then
+ if [[ ! -f /etc/systemd/system/unoserver.service ]]; then
+ msg_custom "β οΈ " "\e[33m" "Legacy installation detected β please recreate the container using the latest install script."
+ exit 0
+ fi
+
+ PYTHON_VERSION="3.12" setup_uv
+ JAVA_VERSION="21" setup_java
+
+ msg_info "Stopping Services"
+ systemctl stop stirlingpdf libreoffice-listener unoserver
+ msg_ok "Stopped Services"
+
+ if [[ -f ~/.Stirling-PDF-login ]]; then
+ USE_ORIGINAL_FILENAME=true fetch_and_deploy_gh_release "stirling-pdf" "Stirling-Tools/Stirling-PDF" "singlefile" "latest" "/opt/Stirling-PDF" "Stirling-PDF-with-login.jar"
+ mv /opt/Stirling-PDF/Stirling-PDF-with-login.jar /opt/Stirling-PDF/Stirling-PDF.jar
+ else
+ USE_ORIGINAL_FILENAME=true fetch_and_deploy_gh_release "stirling-pdf" "Stirling-Tools/Stirling-PDF" "singlefile" "latest" "/opt/Stirling-PDF" "Stirling-PDF.jar"
+ fi
+
+ msg_info "Refreshing Font Cache"
+ $STD fc-cache -fv
+ msg_ok "Font Cache Updated"
+
+ msg_info "Starting Services"
+ systemctl start stirlingpdf libreoffice-listener unoserver
+ msg_ok "Started Services"
+
+ msg_ok "Update Successful"
+ else
+ msg_ok "No update required. ${APP} is already at v${RELEASE}"
fi
- RELEASE=$(curl -fsSL https://api.github.com/repos/Stirling-Tools/Stirling-PDF/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
- curl -fsSL "https://github.com/Stirling-Tools/Stirling-PDF/archive/refs/tags/v$RELEASE.tar.gz" -o $(basename "https://github.com/Stirling-Tools/Stirling-PDF/archive/refs/tags/v$RELEASE.tar.gz")
- tar -xzf v$RELEASE.tar.gz
- cd Stirling-PDF-$RELEASE
- chmod +x ./gradlew
- $STD ./gradlew build
- rm -rf /opt/Stirling-PDF/Stirling-PDF-*.jar
- cp -r ./build/libs/Stirling-PDF-*.jar /opt/Stirling-PDF/
- cp -r scripts /opt/Stirling-PDF/
- cd ~
- rm -rf Stirling-PDF-$RELEASE v$RELEASE.tar.gz
- ln -sf /opt/Stirling-PDF/Stirling-PDF-$RELEASE.jar /opt/Stirling-PDF/Stirling-PDF.jar
- systemctl start stirlingpdf
- msg_ok "Updated ${APP} to v$RELEASE"
exit
}
-
start
build_container
description
diff --git a/ct/threadfin.sh b/ct/threadfin.sh
index 4d941feb8..d7ce4fd68 100644
--- a/ct/threadfin.sh
+++ b/ct/threadfin.sh
@@ -27,12 +27,24 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
- msg_info "Updating $APP"
- systemctl stop threadfin.service
- curl -fsSL "https://github.com/Threadfin/Threadfin/releases/latest/download/Threadfin_linux_arm64" -o "/opt/threadfin/threadfin"
- chmod +x /opt/threadfin/threadfin
- systemctl start threadfin.service
- msg_ok "Updated $APP"
+
+ RELEASE=$(curl -fsSL https://api.github.com/repos/threadfin/threadfin/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
+ if [[ "${RELEASE}" != "$(cat ~/.threadfin 2>/dev/null)" ]] || [[ ! -f ~/.threadfin ]]; then
+
+ msg_info "Stopping $APP"
+ systemctl stop threadfin
+ msg_ok "Stopped $APP"
+
+ fetch_and_deploy_gh_release "threadfin" "threadfin/threadfin" "singlefile" "latest" "/opt/threadfin" "Threadfin_linux_arm64"
+
+ msg_info "Starting $APP"
+ systemctl start threadfin
+ msg_ok "Started $APP"
+
+ msg_ok "Updated Successfully"
+ else
+ msg_ok "No update required. ${APP} is already at v${RELEASE}"
+ fi
exit
}
diff --git a/ct/trilium.sh b/ct/trilium.sh
index bcc3e58e3..8a6f6a104 100644
--- a/ct/trilium.sh
+++ b/ct/trilium.sh
@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/asylumexp/Proxmox/main/mis
# Copyright (c) 2021-2025 tteck
# Author: tteck (tteckster)
# License: MIT | https://github.com/asylumexp/Proxmox/raw/main/LICENSE
-# Source: https://triliumnext.github.io/Docs/
+# Source: https://github.com/TriliumNext/Trilium
APP="Trilium"
var_tags="${var_tags:-notes}"
@@ -27,57 +27,52 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
- if [[ ! -f /opt/${APP}_version.txt ]]; then touch /opt/${APP}_version.txt; fi
- RELEASE=$(curl -fsSL https://api.github.com/repos/TriliumNext/Notes/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
- if [[ "v${RELEASE}" != "$(cat /opt/${APP}_version.txt 2>/dev/null)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
-
- if [[ -d /opt/trilium/db ]]; then
- DB_PATH="/opt/trilium/db"
- DB_RESTORE_PATH="/opt/trilium/db"
- elif [[ -d /opt/trilium/assets/db ]]; then
- DB_PATH="/opt/trilium/assets/db"
- DB_RESTORE_PATH="/opt/trilium/assets/db"
+ RELEASE=$(curl -fsSL https://api.github.com/repos/TriliumNext/Trilium/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
+ if [[ "${RELEASE}" != "$(cat ~/.Trilium 2>/dev/null)" ]] || [[ ! -f ~/.Trilium ]]; then
+
+ if [[ -d /opt/trilium/db ]]; then
+ DB_PATH="/opt/trilium/db"
+ DB_RESTORE_PATH="/opt/trilium/db"
+ elif [[ -d /opt/trilium/assets/db ]]; then
+ DB_PATH="/opt/trilium/assets/db"
+ DB_RESTORE_PATH="/opt/trilium/assets/db"
+ else
+ msg_error "Database not found in either /opt/trilium/db or /opt/trilium/assets/db"
+ exit 1
+ fi
+
+ msg_info "Stopping ${APP}"
+ systemctl stop trilium
+ sleep 1
+ msg_ok "Stopped ${APP}"
+
+ msg_info "Backing up Database"
+ mkdir -p /opt/trilium_backup
+ cp -r "${DB_PATH}" /opt/trilium_backup/
+ rm -rf /opt/trilium
+ msg_ok "Backed up Database"
+
+ fetch_and_deploy_gh_release "Trilium" "TriliumNext/Trilium" "prebuild" "latest" "/opt/trilium" "TriliumNotes-Server-*linux-x64.tar.xz"
+
+ msg_info "Restoring Database"
+ mkdir -p "$(dirname "${DB_RESTORE_PATH}")"
+ cp -r /opt/trilium_backup/$(basename "${DB_PATH}") "${DB_RESTORE_PATH}"
+ msg_ok "Restored Database"
+
+ msg_info "Cleaning up"
+ rm -rf /opt/trilium_backup
+ msg_ok "Cleaned"
+
+ msg_info "Starting ${APP}"
+ systemctl start trilium
+ sleep 1
+ msg_ok "Started ${APP}"
+ msg_ok "Updated Successfully"
else
- msg_error "Database not found in either /opt/trilium/db or /opt/trilium/assets/db"
- exit 1
+ msg_ok "No update required. ${APP} is already at ${RELEASE}"
fi
- msg_info "Stopping ${APP}"
- systemctl stop trilium
- sleep 1
- msg_ok "Stopped ${APP}"
-
- msg_info "Updating to ${RELEASE}"
- mkdir -p /opt/trilium_backup
- cp -r "${DB_PATH}" /opt/trilium_backup/
- rm -rf /opt/trilium
- cd /tmp
- curl -fsSL "https://github.com/TriliumNext/trilium/releases/download/v${RELEASE}/TriliumNextNotes-Server-v${RELEASE}-linux-x64.tar.xz" -o "TriliumNextNotes-Server-v${RELEASE}-linux-x64.tar.xz"
- tar -xf "TriliumNextNotes-Server-v${RELEASE}-linux-x64.tar.xz"
- mv "TriliumNextNotes-Server-${RELEASE}-linux-x64" /opt/trilium
-
- # Restore database
- mkdir -p "$(dirname "${DB_RESTORE_PATH}")"
- cp -r /opt/trilium_backup/$(basename "${DB_PATH}") "${DB_RESTORE_PATH}"
-
- echo "v${RELEASE}" >/opt/${APP}_version.txt
- msg_ok "Updated to ${RELEASE}"
-
- msg_info "Cleaning up"
- rm -rf "/tmp/TriliumNextNotes-Server-${RELEASE}-linux-x64.tar.xz"
- rm -rf /opt/trilium_backup
- msg_ok "Cleaned"
-
- msg_info "Starting ${APP}"
- systemctl start trilium
- sleep 1
- msg_ok "Started ${APP}"
- msg_ok "Updated Successfully"
-else
- msg_ok "No update required. ${APP} is already at ${RELEASE}"
-fi
-
-exit
+ exit
}
start
diff --git a/ct/zigbee2mqtt.sh b/ct/zigbee2mqtt.sh
index 9e81f2feb..1a049b19c 100644
--- a/ct/zigbee2mqtt.sh
+++ b/ct/zigbee2mqtt.sh
@@ -9,7 +9,7 @@ APP="Zigbee2MQTT"
var_tags="${var_tags:-smarthome;zigbee;mqtt}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-1024}"
-var_disk="${var_disk:-4}"
+var_disk="${var_disk:-5}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-0}"
@@ -27,16 +27,28 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
+
+ if [[ -f ~/.zigbee2mqtt ]]; then
+ CURRENT="$(cat ~/.zigbee2mqtt)"
+ elif [[ -f /opt/${APP}_version.txt ]]; then
+ CURRENT="$(cat /opt/${APP}_version.txt)"
+ rm -f /opt/${APP}_version.txt
+ else
+ CURRENT=""
+ fi
+
RELEASE=$(curl -fsSL https://api.github.com/repos/Koenkk/zigbee2mqtt/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
- if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
+ if [[ "$RELEASE" != "$CURRENT" ]]; then
+ if ! command -v jq &>/dev/null; then
+ $STD apt-get update
+ $STD apt-get install -y jq
+ fi
+ NODE_VERSION=24 NODE_MODULE="pnpm@$(curl -fsSL https://raw.githubusercontent.com/Koenkk/zigbee2mqtt/master/package.json | jq -r '.packageManager | split("@")[1]')" setup_nodejs
+
msg_info "Stopping Service"
systemctl stop zigbee2mqtt
msg_ok "Stopped Service"
- msg_info "Updating pnpm"
- $STD npm install -g pnpm@10.4.1
- msg_ok "Updated pnpm"
-
msg_info "Creating Backup"
rm -rf /opt/${APP}_backup*.tar.gz
mkdir -p /opt/z2m_backup
@@ -44,12 +56,9 @@ function update_script() {
mv /opt/zigbee2mqtt/data /opt/z2m_backup
msg_ok "Backup Created"
+ fetch_and_deploy_gh_release "Zigbee2MQTT" "Koenkk/zigbee2mqtt" "tarball" "latest" "/opt/zigbee2mqtt"
+
msg_info "Updating ${APP} to v${RELEASE}"
- cd /opt
- curl -fsSL "https://github.com/Koenkk/zigbee2mqtt/archive/refs/tags/${RELEASE}.zip" -o $(basename "https://github.com/Koenkk/zigbee2mqtt/archive/refs/tags/${RELEASE}.zip")
- $STD unzip ${RELEASE}.zip
- rm -rf /opt/zigbee2mqtt
- mv zigbee2mqtt-${RELEASE} /opt/zigbee2mqtt
rm -rf /opt/zigbee2mqtt/data
mv /opt/z2m_backup/data /opt/zigbee2mqtt
cd /opt/zigbee2mqtt
@@ -63,9 +72,7 @@ function update_script() {
msg_info "Cleaning up"
rm -rf /opt/z2m_backup
- rm -rf /opt/${RELEASE}.zip
msg_ok "Cleaned up"
- echo "${RELEASE}" >/opt/${APP}_version.txt
else
msg_ok "No update required. ${APP} is already at v${RELEASE}."
fi
diff --git a/ct/zipline.sh b/ct/zipline.sh
index c62bfa9aa..cfcf4ec6b 100644
--- a/ct/zipline.sh
+++ b/ct/zipline.sh
@@ -40,9 +40,9 @@ function update_script() {
msg_info "Updating ${APP} to ${RELEASE}"
cp /opt/zipline/.env /opt/
- mkdir -p /opt/zipline-upload
- if [ -d /opt/zipline/upload ] && [ "$(ls -A /opt/zipline/upload)" ]; then
- cp -R /opt/zipline/upload/* /opt/zipline-upload/
+ mkdir -p /opt/zipline-uploads
+ if [ -d /opt/zipline/uploads ] && [ "$(ls -A /opt/zipline/uploads)" ]; then
+ cp -R /opt/zipline/uploads/* /opt/zipline-uploads/
fi
curl -fsSL "https://github.com/diced/zipline/archive/refs/tags/v${RELEASE}.zip" -o $(basename "https://github.com/diced/zipline/archive/refs/tags/v${RELEASE}.zip")
$STD unzip v"${RELEASE}".zip
diff --git a/frontend/public/json/add-iptag.json b/frontend/public/json/add-iptag.json
index 7fd9fd35a..1545e0f51 100644
--- a/frontend/public/json/add-iptag.json
+++ b/frontend/public/json/add-iptag.json
@@ -11,7 +11,7 @@
"interface_port": null,
"documentation": null,
"website": null,
- "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/svg/proxmox.svg",
+ "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/proxmox.webp",
"config_path": "/opt/iptag/iptag.conf",
"description": "This script automatically adds IP address as tags to LXC containers or VM's using a systemd service. The service also updates the tags if a LXC/VM IP address is changed.",
"install_methods": [
@@ -37,12 +37,16 @@
"type": "info"
},
{
- "text": "Configuration: `nano /opt/iptag/iptag.conf`. iptag.service must be restarted after change.",
+ "text": "Configuration: `nano /opt/iptag/iptag.conf`. iptag Service must be restarted after change. See here for full documentation: `https://github.com/community-scripts/ProxmoxVE/discussions/5790`",
"type": "info"
},
{
"text": "The Proxmox Node must contain ipcalc and net-tools. `apt-get install -y ipcalc net-tools`",
"type": "warning"
+ },
+ {
+ "text": "You can execute the ip tool manually with `iptag-run`",
+ "type": "info"
}
]
}
diff --git a/frontend/public/json/babybuddy.json b/frontend/public/json/babybuddy.json
index 9d35859d0..7294decf0 100644
--- a/frontend/public/json/babybuddy.json
+++ b/frontend/public/json/babybuddy.json
@@ -11,7 +11,7 @@
"interface_port": 80,
"documentation": "https://docs.baby-buddy.net/",
"website": "https://github.com/babybuddy/babybuddy",
- "logo": "https://raw.githubusercontent.com/selfhst/icons/refs/heads/main/svg/baby-buddy.svg",
+ "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/baby-buddy.webp",
"config_path": "/opt/babybuddy/babybuddy/settings/production.py",
"description": "Baby Buddy is an open-source web application designed to assist caregivers in tracking various aspects of a baby's daily routine, including sleep, feedings, diaper changes, tummy time, and more. By recording this data, caregivers can better understand and anticipate their baby's needs, reducing guesswork in daily care. The application offers a user-friendly dashboard for data entry and visualization, supports multiple users, and provides features like timers and reminders. Additionally, Baby Buddy can be integrated with platforms like Home Assistant and Grafana for enhanced functionality.",
"install_methods": [
diff --git a/frontend/public/json/bunkerweb.json b/frontend/public/json/bunkerweb.json
index 3cb7ca70d..9fe806f6d 100644
--- a/frontend/public/json/bunkerweb.json
+++ b/frontend/public/json/bunkerweb.json
@@ -31,5 +31,10 @@
"username": null,
"password": null
},
- "notes": []
+ "notes": [
+ {
+ "text": "WARNING: Installation sources scripts outside of Community Scripts repo. Please check the source before installing.",
+ "type": "warning"
+ }
+ ]
}
diff --git a/frontend/public/json/convertx.json b/frontend/public/json/convertx.json
index 414084aa2..ae2ee826f 100644
--- a/frontend/public/json/convertx.json
+++ b/frontend/public/json/convertx.json
@@ -12,7 +12,7 @@
"interface_port": 3000,
"documentation": "https://github.com/C4illin/ConvertX",
"website": "https://github.com/C4illin/ConvertX",
- "logo": "https://raw.githubusercontent.com/selfhst/icons/refs/heads/main/svg/convertx.svg",
+ "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/convertx.webp",
"description": "ConvertX is a self-hosted online file converter supporting over 1000 formats, including images, audio, video, documents, and more, powered by FFmpeg, GraphicsMagick, and other libraries.",
"install_methods": [
{
diff --git a/frontend/public/json/dockge.json b/frontend/public/json/dockge.json
index b4308ebc7..ad6e55541 100644
--- a/frontend/public/json/dockge.json
+++ b/frontend/public/json/dockge.json
@@ -6,7 +6,7 @@
],
"date_created": "2024-05-02",
"type": "ct",
- "updateable": false,
+ "updateable": true,
"privileged": false,
"interface_port": 5001,
"documentation": null,
diff --git a/frontend/public/json/docmost.json b/frontend/public/json/docmost.json
index 2d6260ed7..6b7d5bdf8 100644
--- a/frontend/public/json/docmost.json
+++ b/frontend/public/json/docmost.json
@@ -31,5 +31,10 @@
"username": null,
"password": null
},
- "notes": []
+ "notes": [
+ {
+ "text": "Use `cat ~/docmost.creds` to see database credentials.",
+ "type": "info"
+ }
+ ]
}
diff --git a/frontend/public/json/emqx.json b/frontend/public/json/emqx.json
index f53e5155d..7e57d45cc 100644
--- a/frontend/public/json/emqx.json
+++ b/frontend/public/json/emqx.json
@@ -6,7 +6,7 @@
],
"date_created": "2024-05-02",
"type": "ct",
- "updateable": false,
+ "updateable": true,
"privileged": false,
"interface_port": 18083,
"documentation": "https://docs.emqx.com/en/emqx/latest/",
diff --git a/frontend/public/json/ersatztv.json b/frontend/public/json/ersatztv.json
index 099821048..4924e6a6b 100644
--- a/frontend/public/json/ersatztv.json
+++ b/frontend/public/json/ersatztv.json
@@ -19,7 +19,7 @@
"type": "default",
"script": "ct/ersatztv.sh",
"resources": {
- "cpu": 1,
+ "cpu": 2,
"ram": 1024,
"hdd": 5,
"os": "debian",
diff --git a/frontend/public/json/evcc.json b/frontend/public/json/evcc.json
index 59025f070..e63288366 100644
--- a/frontend/public/json/evcc.json
+++ b/frontend/public/json/evcc.json
@@ -6,7 +6,7 @@
],
"date_created": "2024-10-15",
"type": "ct",
- "updateable": false,
+ "updateable": true,
"privileged": false,
"interface_port": 7070,
"documentation": "https://evcc.io/#devices",
diff --git a/frontend/public/json/gitea-mirror.json b/frontend/public/json/gitea-mirror.json
index 5be5cc905..937007748 100644
--- a/frontend/public/json/gitea-mirror.json
+++ b/frontend/public/json/gitea-mirror.json
@@ -9,9 +9,9 @@
"updateable": true,
"privileged": false,
"interface_port": 4321,
- "documentation": "https://github.com/arunavo4/gitea-mirror/",
+ "documentation": "https://github.com/RayLabsHQ/gitea-mirror/",
"config_path": "/etc/systemd/system/gitea-mirror.service",
- "website": "https://github.com/arunavo4/gitea-mirror/",
+ "website": "https://github.com/RayLabsHQ/gitea-mirror/",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/gitea-mirror.webp",
"description": "Gitea Mirror auto-syncs GitHub repos to your self-hosted Gitea, with a sleek Web UI and easy Docker deployment. ",
"install_methods": [
diff --git a/frontend/public/json/habitica.json b/frontend/public/json/habitica.json
index f4305ff51..76690b9dc 100644
--- a/frontend/public/json/habitica.json
+++ b/frontend/public/json/habitica.json
@@ -8,7 +8,7 @@
"type": "ct",
"updateable": true,
"privileged": false,
- "interface_port": 8080,
+ "interface_port": 3000,
"documentation": "https://github.com/HabitRPG/habitica/wiki",
"website": "https://habitica.com/",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/habitica.webp",
diff --git a/frontend/public/json/huntarr.json b/frontend/public/json/huntarr.json
index ea450a815..41a421f77 100644
--- a/frontend/public/json/huntarr.json
+++ b/frontend/public/json/huntarr.json
@@ -12,7 +12,7 @@
"documentation": "https://github.com/plexguide/Huntarr.io/wiki",
"config_path": "/opt/huntarr",
"website": "https://github.com/plexguide/Huntarr.io",
- "logo": "https://raw.githubusercontent.com/plexguide/Huntarr.io/refs/heads/main/frontend/static/logo/Huntarr.svg",
+ "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/huntarr.webp",
"description": "Huntarr is a tool that automates the search for missing or low-quality media content in your collection. It works seamlessly with applications like Sonarr, Radarr, Lidarr, Readarr, and Whisparr, enhancing their functionality with continuous background scans to identify and update missed or outdated content. Through a user-friendly web interface accessible on port 9705, Huntarr provides real-time statistics, log views, and extensive configuration options. The software is especially useful for users who want to keep their media library up to date by automatically searching for missing episodes or higher-quality versions. Huntarr is well-suited for self-hosted environments and can easily run in LXC containers or Docker setups.",
"install_methods": [
{
diff --git a/frontend/public/json/itsm-ng.json b/frontend/public/json/itsm-ng.json
index 6ff7d184c..89a7c62d7 100644
--- a/frontend/public/json/itsm-ng.json
+++ b/frontend/public/json/itsm-ng.json
@@ -11,7 +11,7 @@
"interface_port": 80,
"documentation": "https://wiki.itsm-ng.org/en/home",
"website": "https://itsm-ng.com",
- "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/svg/itsm-ng.svg",
+ "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/itsm-ng.webp",
"config_path": "/etc/itsm-ng",
"description": "ITSM-NG is a powerful, open-source IT Service Management (ITSM) solution designed for managing IT assets, software, licenses, and support processes in accordance with ITIL best practices. It offers integrated features for asset inventory, incident tracking, problem management, change requests, and service desk workflows.",
"install_methods": [
diff --git a/frontend/public/json/librespeed-rust.json b/frontend/public/json/librespeed-rust.json
index 13c2eaf5e..319cb3446 100644
--- a/frontend/public/json/librespeed-rust.json
+++ b/frontend/public/json/librespeed-rust.json
@@ -12,7 +12,7 @@
"interface_port": 8080,
"documentation": "https://github.com/librespeed/speedtest-rust",
"website": "https://github.com/librespeed/speedtest-rust",
- "logo": "https://raw.githubusercontent.com/selfhst/icons/refs/heads/main/svg/librespeed.svg",
+ "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/librespeed.webp",
"description": "Librespeed is a no flash, no java, no websocket speedtest server. This community script deploys the rust version for simplicity and low resource usage.",
"install_methods": [
{
diff --git a/frontend/public/json/lyrionmusicserver.json b/frontend/public/json/lyrionmusicserver.json
index 88dc0a0c9..9119a2133 100644
--- a/frontend/public/json/lyrionmusicserver.json
+++ b/frontend/public/json/lyrionmusicserver.json
@@ -12,7 +12,7 @@
"interface_port": 9000,
"documentation": "https://lyrion.org/",
"website": "https://lyrion.org/",
- "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/lyrion-media-server.webp",
+ "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/lyrion-music-server.webp",
"description": "Lyrion Music Server is an open-source server software to stream local music collections, internet radio, and music services to Squeezebox and compatible audio players.",
"install_methods": [
{
diff --git a/frontend/public/json/mysql.json b/frontend/public/json/mysql.json
index 8b4d29a2b..cf7a2f964 100644
--- a/frontend/public/json/mysql.json
+++ b/frontend/public/json/mysql.json
@@ -39,6 +39,10 @@
{
"text": "With an option to install the MySQL 8.4 LTS release instead of MySQL 8.0",
"type": "info"
+ },
+ {
+ "text": "If installed, access phpMyAdmin at `http:///phpMyAdmin`, case sensitive.",
+ "type": "info"
}
]
}
diff --git a/frontend/public/json/nic-offloading-fix.json b/frontend/public/json/nic-offloading-fix.json
index 7d976e29e..261a67aaa 100644
--- a/frontend/public/json/nic-offloading-fix.json
+++ b/frontend/public/json/nic-offloading-fix.json
@@ -1,5 +1,5 @@
{
- "name": "NIC Offloading Fix",
+ "name": "Intel e1000e NIC Offloading Fix",
"slug": "nic-offloading-fix",
"categories": [
1
diff --git a/frontend/public/json/oauth2-proxy.json b/frontend/public/json/oauth2-proxy.json
index 4981b6889..f12399e02 100644
--- a/frontend/public/json/oauth2-proxy.json
+++ b/frontend/public/json/oauth2-proxy.json
@@ -12,7 +12,7 @@
"interface_port": null,
"documentation": "https://oauth2-proxy.github.io/oauth2-proxy/configuration/overview",
"website": "https://oauth2-proxy.github.io/oauth2-proxy/",
- "logo": "https://raw.githubusercontent.com/oauth2-proxy/oauth2-proxy/f82e90426a1881d36bf995f25de9b7b1db4c2564/docs/static/img/logos/OAuth2_Proxy_icon.svg",
+ "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/oauth2-proxy.webp",
"config_path": "/opt/oauth2-proxy/config.toml",
"description": "A reverse proxy that provides authentication with Google, Azure, OpenID Connect and many more identity providers.",
"install_methods": [
diff --git a/frontend/public/json/pingvin.json b/frontend/public/json/pingvin.json
deleted file mode 100644
index 0e9515f5c..000000000
--- a/frontend/public/json/pingvin.json
+++ /dev/null
@@ -1,40 +0,0 @@
-{
- "name": "Pingvin Share",
- "slug": "pingvin",
- "categories": [
- 11
- ],
- "date_created": "2024-05-02",
- "type": "ct",
- "updateable": true,
- "privileged": false,
- "interface_port": 3000,
- "documentation": "https://stonith404.github.io/pingvin-share/introduction",
- "website": "https://github.com/stonith404/pingvin-share",
- "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/pingvin-share.webp",
- "config_path": "/opt/pingvin-share/config.yaml",
- "description": "Pingvin Share is self-hosted file sharing platform and an alternative for WeTransfer.",
- "install_methods": [
- {
- "type": "default",
- "script": "ct/pingvin.sh",
- "resources": {
- "cpu": 2,
- "ram": 2048,
- "hdd": 8,
- "os": "debian",
- "version": "12"
- }
- }
- ],
- "default_credentials": {
- "username": null,
- "password": null
- },
- "notes": [
- {
- "text": "Run `DATA_DIRECTORY=path-to-your-data-directory pm2 restart all --update-env` to update path to your data directory or to update environment variables.",
- "type": "info"
- }
- ]
-}
diff --git a/frontend/public/json/proxmox-backup-server.json b/frontend/public/json/proxmox-backup-server.json
index 8cebbf0f2..4fd37eeed 100644
--- a/frontend/public/json/proxmox-backup-server.json
+++ b/frontend/public/json/proxmox-backup-server.json
@@ -35,6 +35,10 @@
{
"text": "Set a root password if using autologin. This will be the PBS password. `passwd root`",
"type": "warning"
+ },
+ {
+ "text": "Advanced Install is only possible without root password and root SSH access, you can configure this after installation.",
+ "type": "warning"
}
]
}
diff --git a/frontend/public/json/pulse.json b/frontend/public/json/pulse.json
index e3cae4a9d..db2da7d02 100644
--- a/frontend/public/json/pulse.json
+++ b/frontend/public/json/pulse.json
@@ -11,7 +11,7 @@
"interface_port": 7655,
"documentation": null,
"website": "https://github.com/rcourtman/Pulse",
- "logo": "https://raw.githubusercontent.com/rcourtman/Pulse/main/src/public/logos/pulse-logo-256x256.png",
+ "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/pulse.webp",
"config_path": "/opt/pulse/.env",
"description": "A lightweight monitoring application for Proxmox VE that displays real-time status for VMs and containers via a simple web interface.",
"install_methods": [
diff --git a/frontend/public/json/rclone.json b/frontend/public/json/rclone.json
index 41dd2a858..08583f805 100644
--- a/frontend/public/json/rclone.json
+++ b/frontend/public/json/rclone.json
@@ -11,7 +11,7 @@
"interface_port": 3000,
"documentation": "https://rclone.org/docs/",
"website": "https://rclone.org/",
- "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/svg/rclone.svg",
+ "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/rclone.webp",
"config_path": "~/.config/rclone/rclone.conf",
"description": "Rclone is a command-line program to manage files on cloud storage. It is a feature-rich alternative to cloud vendors' web storage interfaces",
"install_methods": [
diff --git a/frontend/public/json/readarr.json b/frontend/public/json/readarr.json
deleted file mode 100644
index f1745a5c2..000000000
--- a/frontend/public/json/readarr.json
+++ /dev/null
@@ -1,35 +0,0 @@
-{
- "name": "Readarr",
- "slug": "readarr",
- "categories": [
- 14
- ],
- "date_created": "2024-05-02",
- "type": "ct",
- "updateable": false,
- "privileged": false,
- "interface_port": 8787,
- "documentation": null,
- "website": "https://readarr.com/",
- "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/readarr.webp",
- "config_path": "",
- "description": "Readarr is an eBook and audiobook management tool designed for Usenet and BitTorrent users. It allows users to manage and organize their eBook and audiobook collection with ease. Readarr integrates with popular Usenet and BitTorrent clients, such as Sonarr and Lidarr, to automate the downloading and organizing of eBook and audiobook files. The software provides a web-based interface for managing and organizing eBooks and audiobooks, making it easy to search and find titles, authors, and genres. Readarr also supports metadata management, including cover art and information, making it easy for users to keep their eBook and audiobook collection organized and up-to-date. The software is designed to be easy to use and provides a simple and intuitive interface for managing and organizing eBook and audiobook collections, making it a valuable tool for book and audiobook enthusiasts who want to keep their collection organized and up-to-date. With Readarr, users can enjoy their eBook and audiobook collection from anywhere, making it a powerful tool for managing and sharing book and audiobook files.",
- "install_methods": [
- {
- "type": "default",
- "script": "ct/readarr.sh",
- "resources": {
- "cpu": 2,
- "ram": 1024,
- "hdd": 4,
- "os": "debian",
- "version": "12"
- }
- }
- ],
- "default_credentials": {
- "username": null,
- "password": null
- },
- "notes": []
-}
diff --git a/frontend/public/json/stirling-pdf.json b/frontend/public/json/stirling-pdf.json
index efe3e3d86..e590c8cab 100644
--- a/frontend/public/json/stirling-pdf.json
+++ b/frontend/public/json/stirling-pdf.json
@@ -1,35 +1,35 @@
{
- "name": "Stirling-PDF",
- "slug": "stirling-pdf",
- "categories": [
- 12
- ],
- "date_created": "2024-05-02",
- "type": "ct",
- "updateable": true,
- "privileged": false,
- "interface_port": 8080,
- "documentation": null,
- "website": "https://github.com/Stirling-Tools/Stirling-PDF",
- "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/stirling-pdf.webp",
- "config_path": "/opt/Stirling-PDF/.env",
- "description": "Stirling-PDF is a powerful locally hosted web based PDF manipulation tool that allows you to perform various operations on PDF files, such as splitting merging, converting, reorganizing, adding images, rotating, compressing, and more.",
- "install_methods": [
- {
- "type": "default",
- "script": "ct/stirling-pdf.sh",
- "resources": {
- "cpu": 2,
- "ram": 2048,
- "hdd": 8,
- "os": "debian",
- "version": "12"
- }
- }
- ],
- "default_credentials": {
- "username": null,
- "password": null
- },
- "notes": []
+ "name": "Stirling-PDF",
+ "slug": "stirling-pdf",
+ "categories": [
+ 12
+ ],
+ "date_created": "2024-05-02",
+ "type": "ct",
+ "updateable": true,
+ "privileged": false,
+ "interface_port": 8080,
+ "documentation": null,
+ "website": "https://github.com/Stirling-Tools/Stirling-PDF",
+ "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/stirling-pdf.webp",
+ "config_path": "/opt/Stirling-PDF/.env",
+ "description": "Stirling-PDF is a powerful locally hosted web based PDF manipulation tool that allows you to perform various operations on PDF files, such as splitting merging, converting, reorganizing, adding images, rotating, compressing, and more.",
+ "install_methods": [
+ {
+ "type": "default",
+ "script": "ct/stirling-pdf.sh",
+ "resources": {
+ "cpu": 2,
+ "ram": 2048,
+ "hdd": 8,
+ "os": "debian",
+ "version": "12"
+ }
+ }
+ ],
+ "default_credentials": {
+ "username": "admin",
+ "password": "stirling"
+ },
+ "notes": []
}
diff --git a/frontend/public/json/streamlink-webui.json b/frontend/public/json/streamlink-webui.json
index db335b475..2aadd95de 100644
--- a/frontend/public/json/streamlink-webui.json
+++ b/frontend/public/json/streamlink-webui.json
@@ -12,7 +12,7 @@
"documentation": "https://github.com/CrazyWolf13/streamlink-webui",
"config_path": "/opt/streamlink-webui.env",
"website": "https://github.com/CrazyWolf13/streamlink-webui",
- "logo": "https://streamlink.github.io/_static/icon.svg",
+ "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/streamlink.webp",
"description": "a simple web-ui to the well-known streamlink cli application, which allows you to save twitch streams to your local disk.",
"install_methods": [
{
diff --git a/frontend/public/json/threadfin.json b/frontend/public/json/threadfin.json
index 04774f663..81dd3cb9d 100644
--- a/frontend/public/json/threadfin.json
+++ b/frontend/public/json/threadfin.json
@@ -6,7 +6,7 @@
],
"date_created": "2024-06-12",
"type": "ct",
- "updateable": false,
+ "updateable": true,
"privileged": false,
"interface_port": 34400,
"documentation": null,
diff --git a/frontend/public/json/trilium.json b/frontend/public/json/trilium.json
index 44ca9e740..2b42f1ae3 100644
--- a/frontend/public/json/trilium.json
+++ b/frontend/public/json/trilium.json
@@ -1,5 +1,5 @@
{
- "name": "TriliumNext",
+ "name": "Trilium Notes",
"slug": "trilium",
"categories": [
12
@@ -9,11 +9,11 @@
"updateable": true,
"privileged": false,
"interface_port": 8080,
- "documentation": "https://triliumnext.github.io/Docs/",
+ "documentation": "https://github.com/TriliumNext/trilium/wiki",
"website": "https://github.com/TriliumNext/trilium",
- "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/triliumnext.webp",
+ "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/trilium-notes.webp",
"config_path": "/root/trilium-data/config.ini",
- "description": "TriliumNext is an newer Fork of Trilium. TriliumNext is an open-source note-taking and personal knowledge management application. It allows users to organize and manage their notes, ideas, and information in a single place, using a hierarchical tree-like structure. Trilium offers a range of features, including rich text formatting, links, images, and attachments, making it easy to create and structure notes. The software is designed to be flexible and customizable, with a range of customization options and plugins available, including themes, export options, and more. Trilium is a self-hosted solution, and can be run on a local machine or a cloud-based server, providing users with full control over their notes and information.",
+ "description": "Trilium Notes is the latest and officially maintained version of the powerful, self-hosted note-taking and personal knowledge management application. It enables users to organize information in a hierarchical tree structure and supports rich text editing, internal linking, images, attachments, and powerful scripting capabilities. This version reflects the most current development efforts under the TriliumNext organization and replaces all prior forks or legacy variants. Trilium is ideal for building personal wikis, structured documentation, and long-term knowledge archives, giving users full local control and privacy.",
"install_methods": [
{
"type": "default",
diff --git a/frontend/public/json/versions.json b/frontend/public/json/versions.json
index 3f36ef931..4be4a6d49 100644
--- a/frontend/public/json/versions.json
+++ b/frontend/public/json/versions.json
@@ -1,39 +1,469 @@
[
+ {
+ "name": "Ombi-app/Ombi",
+ "version": "v4.47.1",
+ "date": "2025-01-05T21:14:23Z"
+ },
+ {
+ "name": "eclipse-mosquitto/mosquitto",
+ "version": "v2.0.22",
+ "date": "2025-07-11T21:34:20Z"
+ },
+ {
+ "name": "steveiliop56/tinyauth",
+ "version": "v3.6.0",
+ "date": "2025-07-09T23:15:25Z"
+ },
{
"name": "mongodb/mongo",
- "version": "r8.1.2",
- "date": "2025-07-01T22:39:32Z"
+ "version": "r8.2.0-alpha0",
+ "date": "2025-07-11T21:06:26Z"
+ },
+ {
+ "name": "duplicati/duplicati",
+ "version": "v2.1.0.124-2.1.0.124_canary_2025-07-11",
+ "date": "2025-07-11T20:09:08Z"
+ },
+ {
+ "name": "TandoorRecipes/recipes",
+ "version": "1.5.35",
+ "date": "2025-06-22T08:30:10Z"
+ },
+ {
+ "name": "MediaBrowser/Emby.Releases",
+ "version": "4.9.1.2",
+ "date": "2025-06-26T22:08:00Z"
+ },
+ {
+ "name": "homarr-labs/homarr",
+ "version": "v1.28.0",
+ "date": "2025-07-11T19:16:26Z"
+ },
+ {
+ "name": "n8n-io/n8n",
+ "version": "n8n@1.101.2",
+ "date": "2025-07-11T12:03:41Z"
+ },
+ {
+ "name": "FlowiseAI/Flowise",
+ "version": "flowise@3.0.4",
+ "date": "2025-07-11T13:26:54Z"
+ },
+ {
+ "name": "zwave-js/zwave-js-ui",
+ "version": "v10.9.0",
+ "date": "2025-07-11T12:57:54Z"
+ },
+ {
+ "name": "zitadel/zitadel",
+ "version": "v3.3.1",
+ "date": "2025-07-11T11:51:48Z"
+ },
+ {
+ "name": "prometheus/prometheus",
+ "version": "v2.53.5",
+ "date": "2025-06-30T11:01:12Z"
+ },
+ {
+ "name": "Paymenter/Paymenter",
+ "version": "v1.2.2",
+ "date": "2025-07-11T10:09:47Z"
+ },
+ {
+ "name": "traefik/traefik",
+ "version": "v3.4.4",
+ "date": "2025-07-11T08:41:34Z"
+ },
+ {
+ "name": "mattermost/mattermost",
+ "version": "preview-v0.1",
+ "date": "2025-06-27T14:35:47Z"
+ },
+ {
+ "name": "Jackett/Jackett",
+ "version": "v0.22.2145",
+ "date": "2025-07-11T05:49:32Z"
+ },
+ {
+ "name": "documenso/documenso",
+ "version": "v1.12.2-rc.1",
+ "date": "2025-07-11T02:55:56Z"
+ },
+ {
+ "name": "outline/outline",
+ "version": "v0.85.1",
+ "date": "2025-07-11T01:17:53Z"
+ },
+ {
+ "name": "jenkinsci/jenkins",
+ "version": "jenkins-2.518",
+ "date": "2025-07-08T13:52:55Z"
+ },
+ {
+ "name": "rcourtman/Pulse",
+ "version": "v3.41.1",
+ "date": "2025-07-10T17:10:46Z"
+ },
+ {
+ "name": "LibreTranslate/LibreTranslate",
+ "version": "v1.7.2",
+ "date": "2025-07-10T19:29:26Z"
+ },
+ {
+ "name": "binwiederhier/ntfy",
+ "version": "v2.13.0",
+ "date": "2025-07-10T19:27:54Z"
+ },
+ {
+ "name": "ollama/ollama",
+ "version": "v0.9.6",
+ "date": "2025-07-08T01:26:29Z"
+ },
+ {
+ "name": "keycloak/keycloak",
+ "version": "26.3.1",
+ "date": "2025-07-09T15:41:43Z"
+ },
+ {
+ "name": "msgbyte/tianji",
+ "version": "v1.23.4",
+ "date": "2025-07-10T18:13:38Z"
+ },
+ {
+ "name": "forgejo/forgejo",
+ "version": "v11.0.3",
+ "date": "2025-07-10T13:12:00Z"
+ },
+ {
+ "name": "crowdsecurity/crowdsec",
+ "version": "v1.6.10",
+ "date": "2025-07-10T12:04:30Z"
+ },
+ {
+ "name": "neo4j/neo4j",
+ "version": "5.26.9",
+ "date": "2025-07-10T10:04:29Z"
+ },
+ {
+ "name": "meilisearch/meilisearch",
+ "version": "prototype-incremental-vector-store-3",
+ "date": "2025-07-07T10:27:19Z"
+ },
+ {
+ "name": "firefly-iii/firefly-iii",
+ "version": "v6.2.20",
+ "date": "2025-07-02T04:03:37Z"
+ },
+ {
+ "name": "pocket-id/pocket-id",
+ "version": "v1.6.2",
+ "date": "2025-07-09T22:14:10Z"
+ },
+ {
+ "name": "NginxProxyManager/nginx-proxy-manager",
+ "version": "v2.12.6",
+ "date": "2025-07-09T21:52:15Z"
+ },
+ {
+ "name": "apache/tika",
+ "version": "3.2.1",
+ "date": "2025-07-09T20:47:29Z"
+ },
+ {
+ "name": "sabnzbd/sabnzbd",
+ "version": "4.5.2",
+ "date": "2025-07-09T19:08:28Z"
+ },
+ {
+ "name": "raydak-labs/configarr",
+ "version": "v1.13.6",
+ "date": "2025-07-09T17:23:01Z"
+ },
+ {
+ "name": "hargata/lubelog",
+ "version": "v1.4.9",
+ "date": "2025-07-09T16:27:46Z"
+ },
+ {
+ "name": "nicolargo/glances",
+ "version": "v4.3.3",
+ "date": "2025-07-09T15:35:44Z"
+ },
+ {
+ "name": "rclone/rclone",
+ "version": "v1.70.3",
+ "date": "2025-07-09T15:06:31Z"
+ },
+ {
+ "name": "home-assistant/operating-system",
+ "version": "16.0",
+ "date": "2025-07-09T13:28:43Z"
+ },
+ {
+ "name": "element-hq/synapse",
+ "version": "v1.133.0",
+ "date": "2025-07-01T15:13:42Z"
+ },
+ {
+ "name": "AdguardTeam/AdGuardHome",
+ "version": "v0.107.63",
+ "date": "2025-06-26T14:34:19Z"
+ },
+ {
+ "name": "fuma-nama/fumadocs",
+ "version": "fumadocs-ui@15.6.3",
+ "date": "2025-07-09T09:28:42Z"
+ },
+ {
+ "name": "cockpit-project/cockpit",
+ "version": "342",
+ "date": "2025-07-09T08:48:21Z"
+ },
+ {
+ "name": "esphome/esphome",
+ "version": "2025.6.3",
+ "date": "2025-07-03T01:07:26Z"
+ },
+ {
+ "name": "henrygd/beszel",
+ "version": "v0.11.1",
+ "date": "2025-04-29T01:14:35Z"
+ },
+ {
+ "name": "Prowlarr/Prowlarr",
+ "version": "v1.37.0.5076",
+ "date": "2025-06-04T11:04:53Z"
+ },
+ {
+ "name": "Radarr/Radarr",
+ "version": "v5.26.2.10099",
+ "date": "2025-06-11T20:10:39Z"
+ },
+ {
+ "name": "ipfs/kubo",
+ "version": "v0.35.0",
+ "date": "2025-05-21T18:00:32Z"
+ },
+ {
+ "name": "grokability/snipe-it",
+ "version": "v8.1.18",
+ "date": "2025-07-08T20:36:37Z"
+ },
+ {
+ "name": "Stirling-Tools/Stirling-PDF",
+ "version": "v1.0.2",
+ "date": "2025-07-08T19:14:31Z"
+ },
+ {
+ "name": "TwiN/gatus",
+ "version": "v5.20.0",
+ "date": "2025-07-08T16:27:11Z"
+ },
+ {
+ "name": "bunkerity/bunkerweb",
+ "version": "v1.6.2",
+ "date": "2025-07-08T13:52:33Z"
+ },
+ {
+ "name": "wazuh/wazuh",
+ "version": "coverity-w28-4.13.0",
+ "date": "2025-07-08T11:25:24Z"
+ },
+ {
+ "name": "docker/compose",
+ "version": "v2.38.2",
+ "date": "2025-07-08T09:35:14Z"
+ },
+ {
+ "name": "Checkmk/checkmk",
+ "version": "v2.4.0p7",
+ "date": "2025-07-08T05:51:08Z"
+ },
+ {
+ "name": "VictoriaMetrics/VictoriaMetrics",
+ "version": "pmm-6401-v1.121.0",
+ "date": "2025-07-07T16:16:13Z"
+ },
+ {
+ "name": "photoprism/photoprism",
+ "version": "250707-d28b3101e",
+ "date": "2025-07-07T15:15:21Z"
+ },
+ {
+ "name": "traccar/traccar",
+ "version": "v6.8.1",
+ "date": "2025-07-07T14:40:11Z"
+ },
+ {
+ "name": "BookStackApp/BookStack",
+ "version": "v25.05.2",
+ "date": "2025-07-07T14:08:25Z"
+ },
+ {
+ "name": "nzbgetcom/nzbget",
+ "version": "v25.2",
+ "date": "2025-07-04T08:21:42Z"
+ },
+ {
+ "name": "morpheus65535/bazarr",
+ "version": "v1.5.2",
+ "date": "2025-05-11T16:40:55Z"
+ },
+ {
+ "name": "slskd/slskd",
+ "version": "0.23.1",
+ "date": "2025-07-06T23:57:52Z"
+ },
+ {
+ "name": "pelican-dev/panel",
+ "version": "v1.0.0-beta22",
+ "date": "2025-07-06T21:16:00Z"
+ },
+ {
+ "name": "pelican-dev/wings",
+ "version": "v1.0.0-beta14",
+ "date": "2025-07-06T21:07:07Z"
+ },
+ {
+ "name": "Luligu/matterbridge",
+ "version": "3.1.2",
+ "date": "2025-07-06T20:55:23Z"
+ },
+ {
+ "name": "bluenviron/mediamtx",
+ "version": "v1.13.0",
+ "date": "2025-07-06T19:23:55Z"
+ },
+ {
+ "name": "syncthing/syncthing",
+ "version": "v1.30.0",
+ "date": "2025-07-01T11:29:11Z"
+ },
+ {
+ "name": "Part-DB/Part-DB-server",
+ "version": "v1.17.2",
+ "date": "2025-07-06T12:21:52Z"
+ },
+ {
+ "name": "redis/redis",
+ "version": "8.0.3",
+ "date": "2025-07-06T12:19:24Z"
+ },
+ {
+ "name": "fallenbagel/jellyseerr",
+ "version": "preview-OIDC",
+ "date": "2025-07-06T00:51:06Z"
+ },
+ {
+ "name": "hyperion-project/hyperion.ng",
+ "version": "2.1.1",
+ "date": "2025-06-14T17:45:06Z"
+ },
+ {
+ "name": "Kareadita/Kavita",
+ "version": "v0.8.7",
+ "date": "2025-07-05T20:08:58Z"
+ },
+ {
+ "name": "cross-seed/cross-seed",
+ "version": "v6.12.7",
+ "date": "2025-06-18T03:44:24Z"
+ },
+ {
+ "name": "runtipi/runtipi",
+ "version": "v4.3.0",
+ "date": "2025-07-05T12:14:52Z"
+ },
+ {
+ "name": "theonedev/onedev",
+ "version": "v11.11.4",
+ "date": "2025-07-05T09:23:25Z"
+ },
+ {
+ "name": "linkwarden/linkwarden",
+ "version": "v2.11.3",
+ "date": "2025-07-05T04:34:46Z"
+ },
+ {
+ "name": "home-assistant/core",
+ "version": "2025.7.1",
+ "date": "2025-07-04T20:02:52Z"
+ },
+ {
+ "name": "emqx/emqx",
+ "version": "e6.0.0-M1.202507-alpha.1",
+ "date": "2025-07-04T14:58:23Z"
+ },
+ {
+ "name": "kimai/kimai",
+ "version": "2.37.0",
+ "date": "2025-07-04T14:49:43Z"
+ },
+ {
+ "name": "Graylog2/graylog2-server",
+ "version": "6.3.1",
+ "date": "2025-07-04T11:20:48Z"
+ },
+ {
+ "name": "cloudflare/cloudflared",
+ "version": "2025.7.0",
+ "date": "2025-07-03T17:08:15Z"
+ },
+ {
+ "name": "rabbitmq/rabbitmq-server",
+ "version": "v4.1.2",
+ "date": "2025-07-03T16:59:29Z"
+ },
+ {
+ "name": "influxdata/influxdb",
+ "version": "v3.2.1",
+ "date": "2025-07-03T16:09:19Z"
+ },
+ {
+ "name": "Dolibarr/dolibarr",
+ "version": "18.0.7",
+ "date": "2025-07-03T08:57:21Z"
+ },
+ {
+ "name": "actualbudget/actual",
+ "version": "v25.7.1",
+ "date": "2025-07-03T01:03:18Z"
+ },
+ {
+ "name": "Koenkk/zigbee2mqtt",
+ "version": "2.5.1",
+ "date": "2025-07-02T19:38:06Z"
+ },
+ {
+ "name": "glpi-project/glpi",
+ "version": "10.0.18",
+ "date": "2025-02-12T11:07:02Z"
+ },
+ {
+ "name": "apache/tomcat",
+ "version": "9.0.107",
+ "date": "2025-07-02T07:12:09Z"
+ },
+ {
+ "name": "qbittorrent/qBittorrent",
+ "version": "release-5.1.2",
+ "date": "2025-07-02T06:13:16Z"
+ },
+ {
+ "name": "diced/zipline",
+ "version": "v4.2.0",
+ "date": "2025-07-02T00:45:31Z"
+ },
+ {
+ "name": "sysadminsmedia/homebox",
+ "version": "v0.20.2",
+ "date": "2025-07-02T00:37:07Z"
},
{
"name": "Threadfin/Threadfin",
"version": "1.2.35",
"date": "2025-07-01T21:37:20Z"
},
- {
- "name": "apache/tomcat",
- "version": "10.1.43",
- "date": "2025-07-01T21:32:34Z"
- },
- {
- "name": "actualbudget/actual",
- "version": "v25.7.0",
- "date": "2025-07-01T21:02:27Z"
- },
- {
- "name": "home-assistant/core",
- "version": "2025.6.3",
- "date": "2025-06-24T13:00:12Z"
- },
- {
- "name": "TwiN/gatus",
- "version": "v5.19.0",
- "date": "2025-07-01T19:59:32Z"
- },
- {
- "name": "Koenkk/zigbee2mqtt",
- "version": "2.5.0",
- "date": "2025-07-01T18:28:01Z"
- },
{
"name": "hivemq/hivemq-community-edition",
"version": "2025.4",
@@ -49,46 +479,6 @@
"version": "v0.57.0",
"date": "2025-07-01T16:47:46Z"
},
- {
- "name": "jenkinsci/jenkins",
- "version": "jenkins-2.517",
- "date": "2025-07-01T16:08:23Z"
- },
- {
- "name": "element-hq/synapse",
- "version": "v1.133.0",
- "date": "2025-07-01T15:13:42Z"
- },
- {
- "name": "sysadminsmedia/homebox",
- "version": "v0.20.1",
- "date": "2025-07-01T14:18:32Z"
- },
- {
- "name": "keycloak/keycloak",
- "version": "26.3.0",
- "date": "2025-07-01T13:18:12Z"
- },
- {
- "name": "syncthing/syncthing",
- "version": "v1.30.0",
- "date": "2025-07-01T11:29:11Z"
- },
- {
- "name": "Checkmk/checkmk",
- "version": "v2.2.0p44-rc1",
- "date": "2025-07-01T11:10:25Z"
- },
- {
- "name": "rcourtman/Pulse",
- "version": "v99.99.99",
- "date": "2025-07-01T08:26:41Z"
- },
- {
- "name": "Jackett/Jackett",
- "version": "v0.22.2101",
- "date": "2025-07-01T05:56:59Z"
- },
{
"name": "zabbix/zabbix",
"version": "7.4.0",
@@ -99,126 +489,36 @@
"version": "v0.15.0-rc3",
"date": "2025-07-01T04:09:37Z"
},
- {
- "name": "wazuh/wazuh",
- "version": "coverity-w27-4.13.0",
- "date": "2025-07-01T03:17:32Z"
- },
- {
- "name": "NginxProxyManager/nginx-proxy-manager",
- "version": "v2.12.4",
- "date": "2025-07-01T01:45:42Z"
- },
{
"name": "MagicMirrorOrg/MagicMirror",
"version": "v2.32.0",
"date": "2025-06-30T22:12:48Z"
},
- {
- "name": "docker/compose",
- "version": "v2.38.1",
- "date": "2025-06-30T20:07:35Z"
- },
{
"name": "jhuckaby/Cronicle",
"version": "v0.9.81",
"date": "2025-06-30T16:40:33Z"
},
- {
- "name": "ollama/ollama",
- "version": "v0.9.4-rc6",
- "date": "2025-06-30T15:59:03Z"
- },
- {
- "name": "prometheus/prometheus",
- "version": "v2.53.5",
- "date": "2025-06-30T11:01:12Z"
- },
- {
- "name": "n8n-io/n8n",
- "version": "n8n@1.100.0",
- "date": "2025-06-23T12:48:35Z"
- },
{
"name": "jupyter/notebook",
"version": "v7.4.4",
"date": "2025-06-30T13:04:22Z"
},
- {
- "name": "Graylog2/graylog2-server",
- "version": "6.3.0",
- "date": "2025-06-30T11:26:45Z"
- },
- {
- "name": "grokability/snipe-it",
- "version": "v8.1.17",
- "date": "2025-06-30T11:26:27Z"
- },
- {
- "name": "documenso/documenso",
- "version": "v1.12.0-rc.8",
- "date": "2025-06-30T09:47:37Z"
- },
{
"name": "PrivateBin/PrivateBin",
"version": "1.7.8",
"date": "2025-06-30T09:00:54Z"
},
- {
- "name": "fuma-nama/fumadocs",
- "version": "fumadocs-mdx@11.6.10",
- "date": "2025-06-30T07:07:36Z"
- },
- {
- "name": "mattermost/mattermost",
- "version": "preview-v0.1",
- "date": "2025-06-27T14:35:47Z"
- },
{
"name": "typesense/typesense",
"version": "v29.0",
"date": "2025-06-30T03:52:33Z"
},
- {
- "name": "firefly-iii/firefly-iii",
- "version": "v6.2.19",
- "date": "2025-06-28T06:53:45Z"
- },
{
"name": "dgtlmoon/changedetection.io",
"version": "0.50.5",
"date": "2025-06-29T08:54:47Z"
},
- {
- "name": "emqx/emqx",
- "version": "e5.9.1-rc.1",
- "date": "2025-06-29T07:27:21Z"
- },
- {
- "name": "theonedev/onedev",
- "version": "v11.11.2",
- "date": "2025-06-29T01:40:39Z"
- },
- {
- "name": "linkwarden/linkwarden",
- "version": "v2.11.2",
- "date": "2025-06-28T17:33:38Z"
- },
- {
- "name": "msgbyte/tianji",
- "version": "v1.22.5",
- "date": "2025-06-28T16:06:19Z"
- },
- {
- "name": "Luligu/matterbridge",
- "version": "3.1.0",
- "date": "2025-06-28T09:02:38Z"
- },
- {
- "name": "esphome/esphome",
- "version": "2025.6.2",
- "date": "2025-06-28T03:47:16Z"
- },
{
"name": "plexguide/Huntarr.io",
"version": "8.1.11",
@@ -229,96 +529,26 @@
"version": "v3.2.4",
"date": "2025-06-28T02:47:31Z"
},
- {
- "name": "pocket-id/pocket-id",
- "version": "v1.5.0",
- "date": "2025-06-27T22:04:32Z"
- },
- {
- "name": "homarr-labs/homarr",
- "version": "v1.26.0",
- "date": "2025-06-27T19:15:24Z"
- },
{
"name": "goauthentik/authentik",
"version": "version/2025.6.3",
"date": "2025-06-27T14:01:06Z"
},
- {
- "name": "rclone/rclone",
- "version": "v1.70.2",
- "date": "2025-06-27T13:21:17Z"
- },
- {
- "name": "sabnzbd/sabnzbd",
- "version": "4.5.1",
- "date": "2025-04-11T09:57:47Z"
- },
- {
- "name": "FlowiseAI/Flowise",
- "version": "flowise@3.0.3",
- "date": "2025-06-27T09:53:57Z"
- },
- {
- "name": "nzbgetcom/nzbget",
- "version": "v25.1",
- "date": "2025-06-27T09:14:14Z"
- },
- {
- "name": "cockpit-project/cockpit",
- "version": "341.1",
- "date": "2025-06-27T08:50:16Z"
- },
- {
- "name": "MediaBrowser/Emby.Releases",
- "version": "4.9.1.2",
- "date": "2025-06-26T22:08:00Z"
- },
- {
- "name": "home-assistant/operating-system",
- "version": "15.2",
- "date": "2025-04-14T15:37:12Z"
- },
{
"name": "netbox-community/netbox",
"version": "v4.3.3",
"date": "2025-06-26T18:42:56Z"
},
- {
- "name": "apache/tika",
- "version": "3.2.1-rc2",
- "date": "2025-06-26T17:10:25Z"
- },
{
"name": "tailscale/tailscale",
"version": "v1.84.3",
"date": "2025-06-26T16:31:57Z"
},
- {
- "name": "traefik/traefik",
- "version": "v3.5.0-rc1",
- "date": "2025-06-26T15:08:43Z"
- },
- {
- "name": "meilisearch/meilisearch",
- "version": "prototype-no-simd-x86-arroy-0",
- "date": "2025-06-26T14:54:18Z"
- },
- {
- "name": "AdguardTeam/AdGuardHome",
- "version": "v0.107.63",
- "date": "2025-06-26T14:34:19Z"
- },
{
"name": "node-red/node-red",
"version": "4.1.0-beta.2",
"date": "2025-06-26T14:23:26Z"
},
- {
- "name": "Dolibarr/dolibarr",
- "version": "18.0.7",
- "date": "2025-06-26T09:16:33Z"
- },
{
"name": "gristlabs/grist-core",
"version": "v1.6.1",
@@ -329,31 +559,16 @@
"version": "v4.101.2",
"date": "2025-06-25T21:18:52Z"
},
- {
- "name": "influxdata/influxdb",
- "version": "v3.2.0",
- "date": "2025-06-25T17:31:48Z"
- },
{
"name": "wavelog/wavelog",
"version": "2.0.5",
"date": "2025-06-25T14:53:31Z"
},
- {
- "name": "bunkerity/bunkerweb",
- "version": "testing",
- "date": "2025-06-16T18:10:42Z"
- },
{
"name": "moghtech/komodo",
"version": "v1.18.4",
"date": "2025-06-25T00:06:56Z"
},
- {
- "name": "duplicati/duplicati",
- "version": "v2.1.0.120-2.1.0.120_canary_2025-06-24",
- "date": "2025-06-24T22:39:50Z"
- },
{
"name": "evcc-io/evcc",
"version": "0.204.5",
@@ -369,26 +584,11 @@
"version": "v2.18.0",
"date": "2025-06-24T08:29:55Z"
},
- {
- "name": "fallenbagel/jellyseerr",
- "version": "preview-fix-proxy-axios",
- "date": "2025-06-24T08:50:22Z"
- },
{
"name": "minio/minio",
"version": "RELEASE.2025-06-13T11-33-47Z",
"date": "2025-06-23T20:58:42Z"
},
- {
- "name": "runtipi/runtipi",
- "version": "v4.2.1",
- "date": "2025-06-03T20:04:28Z"
- },
- {
- "name": "VictoriaMetrics/VictoriaMetrics",
- "version": "pmm-6401-v1.120.0",
- "date": "2025-06-23T15:12:12Z"
- },
{
"name": "gotson/komga",
"version": "1.22.0",
@@ -399,21 +599,11 @@
"version": "2025.6.22",
"date": "2025-06-22T22:41:11Z"
},
- {
- "name": "qbittorrent/qBittorrent",
- "version": "release-5.1.1",
- "date": "2025-06-22T21:41:17Z"
- },
{
"name": "clusterzx/paperless-ai",
"version": "v3.0.7",
"date": "2025-06-22T17:49:29Z"
},
- {
- "name": "TandoorRecipes/recipes",
- "version": "1.5.35",
- "date": "2025-06-22T08:30:10Z"
- },
{
"name": "inventree/InvenTree",
"version": "0.17.14",
@@ -459,16 +649,6 @@
"version": "v0.21.0",
"date": "2025-06-19T11:54:59Z"
},
- {
- "name": "neo4j/neo4j",
- "version": "2025.05.1",
- "date": "2025-06-19T11:28:36Z"
- },
- {
- "name": "redis/redis",
- "version": "8.2-m01-int2",
- "date": "2025-06-12T08:52:10Z"
- },
{
"name": "prometheus-pve/prometheus-pve-exporter",
"version": "v3.5.5",
@@ -479,11 +659,6 @@
"version": "v0.21.0",
"date": "2025-06-18T21:43:27Z"
},
- {
- "name": "ipfs/kubo",
- "version": "v0.35.0",
- "date": "2025-05-21T18:00:32Z"
- },
{
"name": "pterodactyl/panel",
"version": "v1.11.11",
@@ -504,26 +679,11 @@
"version": "v5.6.0",
"date": "2025-06-18T12:19:54Z"
},
- {
- "name": "zwave-js/zwave-js-ui",
- "version": "v10.7.0",
- "date": "2025-06-18T11:57:05Z"
- },
- {
- "name": "forgejo/forgejo",
- "version": "v11.0.2",
- "date": "2025-06-18T09:38:19Z"
- },
{
"name": "silverbulletmd/silverbullet",
"version": "2.0.0-pre3",
"date": "2025-06-18T08:01:24Z"
},
- {
- "name": "cross-seed/cross-seed",
- "version": "v6.12.7",
- "date": "2025-06-18T03:44:24Z"
- },
{
"name": "grafana/grafana",
"version": "v11.5.6",
@@ -534,31 +694,6 @@
"version": "v2.1.5",
"date": "2025-06-17T18:04:11Z"
},
- {
- "name": "BookStackApp/BookStack",
- "version": "v25.05.1",
- "date": "2025-06-17T14:38:04Z"
- },
- {
- "name": "cloudflare/cloudflared",
- "version": "2025.6.1",
- "date": "2025-06-17T12:45:39Z"
- },
- {
- "name": "crowdsecurity/crowdsec",
- "version": "v1.6.9",
- "date": "2025-06-17T11:54:50Z"
- },
- {
- "name": "glpi-project/glpi",
- "version": "10.0.18",
- "date": "2025-02-12T11:07:02Z"
- },
- {
- "name": "morpheus65535/bazarr",
- "version": "v1.5.2",
- "date": "2025-05-11T16:40:55Z"
- },
{
"name": "donaldzou/WGDashboard",
"version": "v4.2.4",
@@ -569,11 +704,6 @@
"version": "2.402",
"date": "2025-06-17T05:20:42Z"
},
- {
- "name": "kimai/kimai",
- "version": "2.36.1",
- "date": "2025-06-16T19:20:54Z"
- },
{
"name": "open-webui/open-webui",
"version": "v0.6.15",
@@ -599,11 +729,6 @@
"version": "cli/v0.25.0",
"date": "2025-06-15T17:48:29Z"
},
- {
- "name": "Prowlarr/Prowlarr",
- "version": "v1.37.0.5076",
- "date": "2025-06-04T11:04:53Z"
- },
{
"name": "Readarr/Readarr",
"version": "v2.0.0.4645",
@@ -614,21 +739,6 @@
"version": "v2.12.4.4658",
"date": "2025-06-09T17:27:45Z"
},
- {
- "name": "Radarr/Radarr",
- "version": "v5.26.2.10099",
- "date": "2025-06-11T20:10:39Z"
- },
- {
- "name": "traccar/traccar",
- "version": "v6.7.3",
- "date": "2025-06-15T05:46:17Z"
- },
- {
- "name": "hyperion-project/hyperion.ng",
- "version": "2.1.1",
- "date": "2025-06-14T17:45:06Z"
- },
{
"name": "advplyr/audiobookshelf",
"version": "v2.25.1",
@@ -649,21 +759,11 @@
"version": "v2025-06-12",
"date": "2025-06-12T20:59:47Z"
},
- {
- "name": "zitadel/zitadel",
- "version": "v3.3.0",
- "date": "2025-06-12T06:54:48Z"
- },
{
"name": "autobrr/autobrr",
"version": "v1.63.1",
"date": "2025-06-11T11:05:42Z"
},
- {
- "name": "steveiliop56/tinyauth",
- "version": "v3.4.1",
- "date": "2025-06-11T07:53:44Z"
- },
{
"name": "OctoPrint/OctoPrint",
"version": "1.11.2",
@@ -709,11 +809,6 @@
"version": "v1.10.0",
"date": "2025-06-07T08:31:48Z"
},
- {
- "name": "diced/zipline",
- "version": "v4.1.2",
- "date": "2025-06-06T17:44:58Z"
- },
{
"name": "ioBroker/ioBroker",
"version": "2025-05-31",
@@ -739,11 +834,6 @@
"version": "mariadb-11.8.2",
"date": "2025-06-04T13:35:16Z"
},
- {
- "name": "rabbitmq/rabbitmq-server",
- "version": "v4.1.1",
- "date": "2025-06-04T19:10:05Z"
- },
{
"name": "intri-in/manage-my-damn-life-nextjs",
"version": "v0.7.1",
@@ -779,11 +869,6 @@
"version": "v0.14.1",
"date": "2024-08-29T22:32:51Z"
},
- {
- "name": "binwiederhier/ntfy",
- "version": "v2.12.0",
- "date": "2025-05-30T00:26:27Z"
- },
{
"name": "release-argus/Argus",
"version": "0.26.3",
@@ -804,11 +889,6 @@
"version": "5.10.0",
"date": "2025-05-28T05:48:20Z"
},
- {
- "name": "bluenviron/mediamtx",
- "version": "v1.12.3",
- "date": "2025-05-27T20:43:10Z"
- },
{
"name": "dani-garcia/vaultwarden",
"version": "1.34.1",
@@ -839,16 +919,6 @@
"version": "0.5",
"date": "2025-05-21T20:19:14Z"
},
- {
- "name": "Stirling-Tools/Stirling-PDF",
- "version": "v0.46.2",
- "date": "2025-05-20T11:21:04Z"
- },
- {
- "name": "Part-DB/Part-DB-server",
- "version": "v1.17.1",
- "date": "2025-05-18T21:06:41Z"
- },
{
"name": "sbondCo/Watcharr",
"version": "v2.1.0",
@@ -869,16 +939,6 @@
"version": "v25.05.2",
"date": "2025-05-17T12:53:29Z"
},
- {
- "name": "Paymenter/Paymenter",
- "version": "v1.1.1",
- "date": "2025-05-17T10:10:36Z"
- },
- {
- "name": "Ombi-app/Ombi",
- "version": "v4.47.1",
- "date": "2025-01-05T21:14:23Z"
- },
{
"name": "motioneye-project/motioneye",
"version": "0.42.1",
@@ -894,26 +954,11 @@
"version": "v2.18.1",
"date": "2025-05-12T07:16:12Z"
},
- {
- "name": "outline/outline",
- "version": "v0.84.0",
- "date": "2025-05-11T15:50:48Z"
- },
{
"name": "owncast/owncast",
"version": "v0.2.3",
"date": "2025-05-10T21:14:45Z"
},
- {
- "name": "pelican-dev/wings",
- "version": "v1.0.0-beta13",
- "date": "2025-05-09T23:14:41Z"
- },
- {
- "name": "pelican-dev/panel",
- "version": "v1.0.0-beta21",
- "date": "2025-05-09T23:14:23Z"
- },
{
"name": "getumbrel/umbrel",
"version": "1.4.2",
@@ -939,26 +984,11 @@
"version": "3.5.0",
"date": "2025-05-05T16:28:24Z"
},
- {
- "name": "raydak-labs/configarr",
- "version": "v1.13.5",
- "date": "2025-05-03T09:48:44Z"
- },
{
"name": "WordPress/WordPress",
"version": "6.8.1",
"date": "2025-04-30T16:44:16Z"
},
- {
- "name": "hargata/lubelog",
- "version": "v1.4.7",
- "date": "2025-04-29T15:00:18Z"
- },
- {
- "name": "henrygd/beszel",
- "version": "v0.11.1",
- "date": "2025-04-29T01:14:35Z"
- },
{
"name": "deluge-torrent/deluge",
"version": "deluge-2.2.0",
@@ -974,11 +1004,6 @@
"version": "v2.6.3",
"date": "2025-04-27T09:05:42Z"
},
- {
- "name": "photoprism/photoprism",
- "version": "250426-27ec7a128",
- "date": "2025-04-26T11:51:39Z"
- },
{
"name": "TechnitiumSoftware/DnsServer",
"version": "v13.6.0",
@@ -1009,11 +1034,6 @@
"version": "v4.3.0",
"date": "2025-04-21T17:44:40Z"
},
- {
- "name": "Kareadita/Kavita",
- "version": "v0.8.6.2",
- "date": "2025-04-20T16:55:38Z"
- },
{
"name": "caddyserver/caddy",
"version": "v2.10.0",
@@ -1024,11 +1044,6 @@
"version": "v0.4.15",
"date": "2024-12-19T03:19:49Z"
},
- {
- "name": "slskd/slskd",
- "version": "0.22.5",
- "date": "2025-04-15T02:52:26Z"
- },
{
"name": "Tautulli/Tautulli",
"version": "v2.15.2",
@@ -1094,11 +1109,6 @@
"version": "v1.34.0",
"date": "2025-03-26T08:48:34Z"
},
- {
- "name": "LibreTranslate/LibreTranslate",
- "version": "v1.6.5",
- "date": "2025-03-25T20:27:29Z"
- },
{
"name": "nextcloud/nextcloudpi",
"version": "v1.55.4",
@@ -1109,11 +1119,6 @@
"version": "v2.5.307",
"date": "2025-03-24T01:33:31Z"
},
- {
- "name": "nicolargo/glances",
- "version": "v4.3.1",
- "date": "2025-03-23T09:02:54Z"
- },
{
"name": "Donkie/Spoolman",
"version": "v0.22.1",
@@ -1154,11 +1159,6 @@
"version": "v0.28.1",
"date": "2025-03-07T15:41:35Z"
},
- {
- "name": "eclipse-mosquitto/mosquitto",
- "version": "v2.0.21",
- "date": "2025-03-06T16:24:56Z"
- },
{
"name": "toniebox-reverse-engineering/teddycloud",
"version": "tc_v0.6.4",
diff --git a/frontend/public/json/zigbee2mqtt.json b/frontend/public/json/zigbee2mqtt.json
index 281d57242..ce3770f17 100644
--- a/frontend/public/json/zigbee2mqtt.json
+++ b/frontend/public/json/zigbee2mqtt.json
@@ -21,7 +21,7 @@
"resources": {
"cpu": 2,
"ram": 1024,
- "hdd": 4,
+ "hdd": 5,
"os": "debian",
"version": "12"
}
diff --git a/install/agentdvr-install.sh b/install/agentdvr-install.sh
index c29c7badb..d7756a706 100644
--- a/install/agentdvr-install.sh
+++ b/install/agentdvr-install.sh
@@ -14,11 +14,12 @@ network_check
update_os
msg_info "Installing Dependencies"
-$STD apt-get install -y apt-transport-https
-$STD apt-get install -y alsa-utils
-$STD apt-get install -y libxext-dev
-$STD apt-get install -y fontconfig
-$STD apt-get install -y libva-drm2
+$STD apt-get install -y \
+ apt-transport-https \
+ alsa-utils \
+ libxext-dev \
+ fontconfig \
+ libva-drm2
msg_ok "Installed Dependencies"
msg_info "Installing AgentDVR"
@@ -27,7 +28,6 @@ RELEASE=$(curl -fsSL "https://www.ispyconnect.com/api/Agent/DownloadLocation4?pl
cd /opt/agentdvr/agent
curl -fsSL "$RELEASE" -o $(basename "$RELEASE")
$STD unzip Agent_LinuxARM64*.zip
-rm -rf Agent_Linux64*.zip
chmod +x ./Agent
msg_ok "Installed AgentDVR"
@@ -54,6 +54,7 @@ motd_ssh
customize
msg_info "Cleaning up"
+rm -rf Agent_LinuxARM64*.zip
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/alpine-docker-install.sh b/install/alpine-docker-install.sh
index 721b42b9c..5f3a380bd 100644
--- a/install/alpine-docker-install.sh
+++ b/install/alpine-docker-install.sh
@@ -67,5 +67,14 @@ if [[ "${prompt,,}" =~ ^(y|yes)$ ]]; then
msg_ok "Installed Docker Compose $DOCKER_COMPOSE_LATEST_VERSION"
fi
+read -r -p "${TAB3}Would you like to expose the Docker TCP socket? " prompt
+if [[ "${prompt,,}" =~ ^(y|yes)$ ]]; then
+ msg_info "Exposing Docker TCP socket"
+ $STD mkdir -p /etc/docker
+ $STD echo '{ "hosts": ["unix:///var/run/docker.sock", "tcp://0.0.0.0:2375"] }' > /etc/docker/daemon.json
+ $STD rc-service docker restart
+ msg_ok "Exposed Docker TCP socket at tcp://+:2375"
+fi
+
motd_ssh
customize
diff --git a/install/alpine-komodo-install.sh b/install/alpine-komodo-install.sh
index d1db508e8..d6bf0c733 100644
--- a/install/alpine-komodo-install.sh
+++ b/install/alpine-komodo-install.sh
@@ -28,8 +28,7 @@ msg_ok "Enabled Docker Service"
echo "${TAB3}Choose the database for Komodo installation:"
echo "${TAB3}1) MongoDB (recommended)"
-echo "${TAB3}2) SQLite"
-echo "${TAB3}3) PostgreSQL"
+echo "${TAB3}2) FerretDB"
read -rp "${TAB3}Enter your choice (default: 1): " DB_CHOICE
DB_CHOICE=${DB_CHOICE:-1}
@@ -38,10 +37,7 @@ case $DB_CHOICE in
DB_COMPOSE_FILE="mongo.compose.yaml"
;;
2)
- DB_COMPOSE_FILE="sqlite.compose.yaml"
- ;;
-3)
- DB_COMPOSE_FILE="postgres.compose.yaml"
+ DB_COMPOSE_FILE="ferretdb.compose.yaml"
;;
*)
echo "Invalid choice. Defaulting to MongoDB."
diff --git a/install/alpine-tinyauth-install.sh b/install/alpine-tinyauth-install.sh
index c70c60ade..cc3bf0c6a 100644
--- a/install/alpine-tinyauth-install.sh
+++ b/install/alpine-tinyauth-install.sh
@@ -24,13 +24,13 @@ RELEASE=$(curl -s https://api.github.com/repos/steveiliop56/tinyauth/releases/la
curl -fsSL "https://github.com/steveiliop56/tinyauth/releases/download/v${RELEASE}/tinyauth-amd64" -o /opt/tinyauth/tinyauth
chmod +x /opt/tinyauth/tinyauth
-PASSWORD=$(openssl rand -base64 8 | tr -dc 'a-zA-Z0-9' | head -c 8)
-USER=$(htpasswd -Bbn "tinyauth" "${PASSWORD}")
+PASS=$(openssl rand -base64 8 | tr -dc 'a-zA-Z0-9' | head -c 8)
+USER=$(htpasswd -Bbn "tinyauth" "${PASS}")
-cat < /opt/tinyauth/credentials.txt
+cat </opt/tinyauth/credentials.txt
Tinyauth Credentials
Username: tinyauth
-Password: ${PASSWORD}
+Password: ${PASS}
EOF
echo "${RELEASE}" >/opt/tinyauth_version.txt
diff --git a/install/authelia-install.sh b/install/authelia-install.sh
index ea3d03c33..e55f193f4 100644
--- a/install/authelia-install.sh
+++ b/install/authelia-install.sh
@@ -13,13 +13,9 @@ setting_up_container
network_check
update_os
-msg_info "Installing Authelia"
-RELEASE=$(curl -fsSL https://api.github.com/repos/authelia/authelia/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
-curl -fsSL "https://github.com/authelia/authelia/releases/download/${RELEASE}/authelia_${RELEASE}_arm64.deb" -o "authelia_${RELEASE}_arm64.deb"
-$STD dpkg -i "authelia_${RELEASE}_arm64.deb"
-msg_ok "Install Authelia completed"
+fetch_and_deploy_gh_release "authelia" "authelia/authelia" "binary"
-read -p "${TAB3}Enter your domain (ex. example.com): " DOMAIN
+read -rp "${TAB3}Enter your domain (ex. example.com): " DOMAIN
msg_info "Setting Authelia up"
touch /etc/authelia/emails.txt
@@ -72,7 +68,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -f "authelia_${RELEASE}_arm64.deb"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/babybuddy-install.sh b/install/babybuddy-install.sh
index 1821f1845..1c7158823 100644
--- a/install/babybuddy-install.sh
+++ b/install/babybuddy-install.sh
@@ -24,13 +24,10 @@ $STD apt-get install -y \
msg_ok "Installed Dependencies"
setup_uv
+fetch_and_deploy_gh_release "babybuddy" "babybuddy/babybuddy"
msg_info "Installing Babybuddy"
-RELEASE=$(curl -fsSL https://api.github.com/repos/babybuddy/babybuddy/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
-temp_file=$(mktemp)
-mkdir -p /opt/{babybuddy,data}
-curl -fsSL "https://github.com/babybuddy/babybuddy/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
-tar zxf "$temp_file" --strip-components=1 -C /opt/babybuddy
+mkdir -p /opt/data
cd /opt/babybuddy
$STD uv venv .venv
$STD source .venv/bin/activate
@@ -102,7 +99,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -f "$temp_file"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/baikal-install.sh b/install/baikal-install.sh
index 80f823050..28d87a95b 100644
--- a/install/baikal-install.sh
+++ b/install/baikal-install.sh
@@ -13,14 +13,10 @@ setting_up_container
network_check
update_os
-msg_info "Installing Dependencies"
-$STD apt-get install -y \
- apache2 \
- libapache2-mod-php \
- php-{pgsql,dom}
-msg_ok "Installed Dependencies"
-
PG_VERSION="16" setup_postgresql
+PHP_APACHE="YES" PHP_MODULE="pgsql" PHP_VERSION="8.2" setup_php
+setup_composer
+fetch_and_deploy_gh_release "baikal" "sabre-io/Baikal"
msg_info "Setting up PostgreSQL Database"
DB_NAME=baikal
@@ -36,11 +32,9 @@ $STD sudo -u postgres psql -c "CREATE DATABASE $DB_NAME WITH OWNER $DB_USER TEMP
} >>~/baikal.creds
msg_ok "Set up PostgreSQL Database"
-msg_info "Installing Baikal"
-RELEASE=$(curl -fsSL https://api.github.com/repos/sabre-io/Baikal/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
-cd /opt
-curl -fsSL "https://github.com/sabre-io/baikal/releases/download/${RELEASE}/baikal-${RELEASE}.zip" -o "baikal-${RELEASE}.zip"
-$STD unzip "baikal-${RELEASE}.zip"
+msg_info "Configuring Baikal"
+cd /opt/baikal
+$STD composer install
cat </opt/baikal/config/baikal.yaml
database:
backend: pgsql
@@ -51,7 +45,6 @@ database:
EOF
chown -R www-data:www-data /opt/baikal/
chmod -R 755 /opt/baikal/
-echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
msg_ok "Installed Baikal"
msg_info "Creating Service"
@@ -90,7 +83,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -rf "/opt/baikal-${RELEASE}.zip"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/barcode-buddy-install.sh b/install/barcode-buddy-install.sh
index 139af1d16..c3d787abc 100644
--- a/install/barcode-buddy-install.sh
+++ b/install/barcode-buddy-install.sh
@@ -14,22 +14,15 @@ network_check
update_os
msg_info "Installing Dependencies"
-$STD apt-get install -y \
- apache2 \
- redis \
- php-{curl,date,json,mbstring,redis,sqlite3,sockets} \
- libapache2-mod-php
+$STD apt-get install -y redis
msg_ok "Installed Dependencies"
-msg_info "Installing barcodebuddy"
-RELEASE=$(curl -fsSL https://api.github.com/repos/Forceu/barcodebuddy/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
-cd /opt
-curl -fsSL "https://github.com/Forceu/barcodebuddy/archive/refs/tags/v${RELEASE}.zip" -o "v${RELEASE}.zip"
-$STD unzip "v${RELEASE}.zip"
-mv "/opt/barcodebuddy-${RELEASE}" /opt/barcodebuddy
+PHP_VERSION="8.2" PHP_APACHE="YES" PHP_MODULE="redis, sqlite3" setup_php
+fetch_and_deploy_gh_release "barcodebuddy" "Forceu/barcodebuddy"
+
+msg_info "Configuring barcodebuddy"
chown -R www-data:www-data /opt/barcodebuddy/data
-echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
-msg_ok "Installed barcodebuddy"
+msg_ok "Configured barcodebuddy"
msg_info "Creating Services"
cat </etc/systemd/system/barcodebuddy.service
@@ -73,7 +66,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -rf "/opt/v${RELEASE}.zip"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/bitmagnet-install.sh b/install/bitmagnet-install.sh
index cbd069e7b..f105ac027 100644
--- a/install/bitmagnet-install.sh
+++ b/install/bitmagnet-install.sh
@@ -22,17 +22,10 @@ msg_ok "Installed Dependencies"
PG_VERSION="16" setup_postgresql
setup_go
+fetch_and_deploy_gh_release "bitmagnet" "bitmagnet-io/bitmagnet"
RELEASE=$(curl -fsSL https://api.github.com/repos/bitmagnet-io/bitmagnet/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
-msg_info "Installing bitmagnet v${RELEASE}"
-mkdir -p /opt/bitmagnet
-temp_file=$(mktemp)
-curl -fsSL "https://github.com/bitmagnet-io/bitmagnet/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
-tar zxf "$temp_file" --strip-components=1 -C /opt/bitmagnet
-cd /opt/bitmagnet
-VREL=v$RELEASE
-$STD go build -ldflags "-s -w -X github.com/bitmagnet-io/bitmagnet/internal/version.GitTag=$VREL"
-chmod +x bitmagnet
+msg_info "Setting up database"
POSTGRES_PASSWORD=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | head -c13)
$STD sudo -u postgres psql -c "ALTER USER postgres WITH PASSWORD '$POSTGRES_PASSWORD';"
$STD sudo -u postgres psql -c "CREATE DATABASE bitmagnet;"
@@ -41,8 +34,14 @@ $STD sudo -u postgres psql -c "CREATE DATABASE bitmagnet;"
echo ""
echo "postgres user password: $POSTGRES_PASSWORD"
} >>~/postgres.creds
-echo "${RELEASE}" >/opt/bitmagnet_version.txt
-msg_ok "Installed bitmagnet v${RELEASE}"
+msg_ok "Database set up"
+
+msg_info "Configuring bitmagnet v${RELEASE}"
+cd /opt/bitmagnet
+VREL=v$RELEASE
+$STD go build -ldflags "-s -w -X github.com/bitmagnet-io/bitmagnet/internal/version.GitTag=$VREL"
+chmod +x bitmagnet
+msg_ok "Configured bitmagnet v${RELEASE}"
read -r -p "${TAB3}Enter your TMDB API key if you have one: " tmdbapikey
@@ -72,7 +71,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -f "$temp_file"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/bookstack-install.sh b/install/bookstack-install.sh
index fa92a1f50..6091d545a 100644
--- a/install/bookstack-install.sh
+++ b/install/bookstack-install.sh
@@ -16,13 +16,12 @@ update_os
msg_info "Installing Dependencies (Patience)"
$STD apt-get install -y \
apache2 \
- unzip \
- php8.2-{mbstring,gd,fpm,curl,intl,ldap,tidy,bz2,mysql,zip,xml} \
- composer \
- libapache2-mod-php \
make
msg_ok "Installed Dependencies"
+PHP_MODULE="ldap,tidy,bz2,mysqli" PHP_FPM="YES" PHP_APACHE="YES" PHP_VERSION="8.3" setup_php
+
+setup_composer
setup_mariadb
msg_info "Setting up Database"
@@ -40,13 +39,10 @@ $STD mariadb -u root -e "GRANT ALL ON $DB_NAME.* TO '$DB_USER'@'localhost'; FLUS
} >>~/bookstack.creds
msg_ok "Set up database"
-msg_info "Setup Bookstack (Patience)"
+fetch_and_deploy_gh_release "bookstack" "BookStackApp/BookStack"
LOCAL_IP="$(hostname -I | awk '{print $1}')"
-cd /opt
-RELEASE=$(curl -fsSL https://api.github.com/repos/BookStackApp/BookStack/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
-curl -fsSL "https://github.com/BookStackApp/BookStack/archive/refs/tags/v${RELEASE}.zip" -o "v${RELEASE}.zip"
-$STD unzip v${RELEASE}.zip
-mv BookStack-${RELEASE} /opt/bookstack
+
+msg_info "Configuring Bookstack (Patience)"
cd /opt/bookstack
cp .env.example .env
sudo sed -i "s|APP_URL=.*|APP_URL=http://$LOCAL_IP|g" /opt/bookstack/.env
@@ -61,9 +57,8 @@ chmod -R 755 /opt/bookstack /opt/bookstack/bootstrap/cache /opt/bookstack/public
chmod -R 775 /opt/bookstack/storage /opt/bookstack/bootstrap/cache /opt/bookstack/public/uploads
chmod -R 640 /opt/bookstack/.env
$STD a2enmod rewrite
-$STD a2enmod php8.2
-echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
-msg_ok "Installed Bookstack"
+$STD a2enmod php8.3
+msg_ok "Configured Bookstack"
msg_info "Creating Service"
cat </etc/apache2/sites-available/bookstack.conf
@@ -112,7 +107,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -rf /opt/v${RELEASE}.zip
$STD apt-get autoremove
$STD apt-get autoclean
msg_ok "Cleaned"
diff --git a/install/bunkerweb-install.sh b/install/bunkerweb-install.sh
index c1ebfd6e2..2bd0dcbc6 100644
--- a/install/bunkerweb-install.sh
+++ b/install/bunkerweb-install.sh
@@ -18,19 +18,12 @@ $STD apt-get install -y apt-transport-https
$STD apt-get install -y lsb-release
msg_ok "Installed Dependencies"
-msg_info "Installing Nginx"
-curl -fsSL "https://nginx.org/keys/nginx_signing.key" | gpg --dearmor >/usr/share/keyrings/nginx-archive-keyring.gpg
-echo "deb [signed-by=/usr/share/keyrings/nginx-archive-keyring.gpg] http://nginx.org/packages/debian $(lsb_release -cs) nginx" >/etc/apt/sources.list.d/nginx.list
-$STD apt-get update
-$STD apt-get install -y nginx=1.26.3*
-msg_ok "Installed Nginx"
-
RELEASE=$(curl -fsSL https://api.github.com/repos/bunkerity/bunkerweb/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
msg_info "Installing BunkerWeb v${RELEASE} (Patience)"
-curl -fsSL "https://repo.bunkerweb.io/bunkerity/bunkerweb/gpgkey" | gpg --dearmor >/etc/apt/keyrings/bunkerity_bunkerweb-archive-keyring.gpg
-echo "deb [signed-by=/etc/apt/keyrings/bunkerity_bunkerweb-archive-keyring.gpg] https://repo.bunkerweb.io/bunkerity/bunkerweb/debian/ bookworm main" >/etc/apt/sources.list.d/bunkerity_bunkerweb.list
-$STD apt-get update
-$STD apt-get install -y bunkerweb=${RELEASE}
+curl -fsSL -o install-bunkerweb.sh https://github.com/bunkerity/bunkerweb/raw/v${RELEASE}/misc/install-bunkerweb.sh
+chmod +x install-bunkerweb.sh
+$STD ./install-bunkerweb.sh --yes
+$STD apt-mark unhold bunkerweb nginx
cat </etc/apt/preferences.d/bunkerweb
Package: bunkerweb
Pin: version ${RELEASE}
diff --git a/install/bytestash-install.sh b/install/bytestash-install.sh
index a8be713a7..b732978a0 100644
--- a/install/bytestash-install.sh
+++ b/install/bytestash-install.sh
@@ -14,22 +14,17 @@ network_check
update_os
NODE_VERSION="22" setup_nodejs
+fetch_and_deploy_gh_release "bytestash" "jordan-dalby/ByteStash"
msg_info "Installing ByteStash"
JWT_SECRET=$(openssl rand -base64 32 | tr -d '/+=')
-temp_file=$(mktemp)
-RELEASE=$(curl -fsSL https://api.github.com/repos/jordan-dalby/ByteStash/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
-curl -fsSL "https://github.com/jordan-dalby/ByteStash/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
-tar zxf $temp_file
-mv ByteStash-${RELEASE} /opt/bytestash
cd /opt/bytestash/server
$STD npm install
cd /opt/bytestash/client
$STD npm install
-echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
msg_ok "Installed ByteStash"
-read -p "${TAB3}Do you want to allow registration of multiple accounts? [y/n]: " allowreg
+read -rp "${TAB3}Do you want to allow registration of multiple accounts? [y/n]: " allowreg
msg_info "Creating Service"
cat </etc/systemd/system/bytestash-backend.service
@@ -73,7 +68,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -f $temp_file
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/changedetection-install.sh b/install/changedetection-install.sh
index ac2abd286..7e0bcb057 100644
--- a/install/changedetection-install.sh
+++ b/install/changedetection-install.sh
@@ -116,7 +116,7 @@ Wants=browserless.service
Type=simple
WorkingDirectory=/opt/changedetection
Environment=WEBDRIVER_URL=http://127.0.0.1:4444/wd/hub
-Environment=PLAYWRIGHT_DRIVER_URL=ws://localhost:3000/chrome?launch={"defaultViewport":{"height":720,"width":1280},"headless":false,"stealth":true}&blockAds=true
+Environment=PLAYWRIGHT_DRIVER_URL=ws://localhost:3000/chrome?launch=eyJkZWZhdWx0Vmlld3BvcnQiOnsiaGVpZ2h0Ijo3MjAsIndpZHRoIjoxMjgwfSwiaGVhZGxlc3MiOmZhbHNlLCJzdGVhbHRoIjp0cnVlfQ==&blockAds=true
ExecStart=changedetection.io -d /opt/changedetection -p 5000
[Install]
WantedBy=multi-user.target
diff --git a/install/cloudflare-ddns-install.sh b/install/cloudflare-ddns-install.sh
index 0171554bf..379217e61 100644
--- a/install/cloudflare-ddns-install.sh
+++ b/install/cloudflare-ddns-install.sh
@@ -15,16 +15,15 @@ update_os
setup_go
-msg_info "Configure Application"
var_cf_api_token="default"
-read -rp "Enter the Cloudflare API token: " var_cf_api_token
+read -rp "${TAB3}Enter the Cloudflare API token: " var_cf_api_token
var_cf_domains="default"
-read -rp "Enter the domains separated with a comma (*.example.org,www.example.org) " var_cf_domains
+read -rp "${TAB3}Enter the domains separated with a comma (*.example.org,www.example.org) " var_cf_domains
var_cf_proxied="false"
while true; do
- read -rp "Proxied? (y/n): " answer
+ read -rp "${TAB3}Proxied? (y/n): " answer
case "$answer" in
[Yy]*)
var_cf_proxied="true"
@@ -39,7 +38,7 @@ while true; do
done
var_cf_ip6_provider="none"
while true; do
- read -rp "Enable IPv6 support? (y/n): " answer
+ read -rp "${TAB3}Enable IPv6 support? (y/n): " answer
case "$answer" in
[Yy]*)
var_cf_ip6_provider="auto"
diff --git a/install/commafeed-install.sh b/install/commafeed-install.sh
index 92cfbbbbb..11910e700 100644
--- a/install/commafeed-install.sh
+++ b/install/commafeed-install.sh
@@ -17,22 +17,8 @@ msg_info "Installing Dependencies"
$STD apt-get install -y rsync
msg_ok "Installed Dependencies"
-msg_info "Installing Azul Zulu"
-curl -fsSL "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xB1998361219BD9C9" -o "/etc/apt/trusted.gpg.d/zulu-repo.asc"
-curl -fsSL "https://cdn.azul.com/zulu/bin/zulu-repo_1.0.0-3_all.deb" -o "zulu-repo_1.0.0-3_all.deb"
-$STD dpkg -i zulu-repo_1.0.0-3_all.deb
-$STD apt-get update
-$STD apt-get -y install zulu17-jdk
-msg_ok "Installed Azul Zulu"
-
-RELEASE=$(curl -fsSL https://api.github.com/repos/Athou/commafeed/releases/latest | grep '"tag_name":' | cut -d'"' -f4)
-msg_info "Installing CommaFeed ${RELEASE}"
-mkdir /opt/commafeed
-curl -fsSL "https://github.com/Athou/commafeed/releases/download/${RELEASE}/commafeed-${RELEASE}-h2-jvm.zip" -o "commafeed-${RELEASE}-h2-jvm.zip"
-$STD unzip commafeed-${RELEASE}-h2-jvm.zip
-mv commafeed-${RELEASE}-h2/* /opt/commafeed/
-echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
-msg_ok "Installed CommaFeed ${RELEASE}"
+JAVA_VERSION="17" setup_java
+fetch_and_deploy_gh_release "commafeed" "Athou/commafeed" "prebuild" "latest" "/opt/commafeed" "commafeed-*-h2-jvm.zip"
msg_info "Creating Service"
cat </etc/systemd/system/commafeed.service
@@ -55,7 +41,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -rf commafeed-${RELEASE}-h2 commafeed-${RELEASE}-h2-jvm.zip zulu-repo_1.0.0-3_all.deb
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/cryptpad-install.sh b/install/cryptpad-install.sh
index 877228764..a79ec99b1 100644
--- a/install/cryptpad-install.sh
+++ b/install/cryptpad-install.sh
@@ -14,20 +14,15 @@ network_check
update_os
msg_info "Installing Dependencies"
-$STD apt-get install -y \
- git
+$STD apt-get install -y git
msg_ok "Installed Dependencies"
NODE_VERSION="22" setup_nodejs
-read -p "${TAB3}Install OnlyOffice components instead of CKEditor? (Y/N): " onlyoffice
+read -rp "${TAB3}Install OnlyOffice components instead of CKEditor? (Y/N): " onlyoffice
+fetch_and_deploy_gh_release "cryptpad" "cryptpad/cryptpad"
msg_info "Setup ${APPLICATION}"
-temp_file=$(mktemp)
-RELEASE=$(curl -fsSL https://api.github.com/repos/cryptpad/cryptpad/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
-curl -fsSL "https://github.com/cryptpad/cryptpad/archive/refs/tags/${RELEASE}.tar.gz" -o "$temp_file"
-tar zxf $temp_file
-mv cryptpad-$RELEASE /opt/cryptpad
cd /opt/cryptpad
$STD npm ci
$STD npm run install:components
@@ -39,7 +34,6 @@ sed -i "80s#//httpAddress: 'localhost'#httpAddress: '0.0.0.0'#g" /opt/cryptpad/c
if [[ "$onlyoffice" =~ ^[Yy]$ ]]; then
$STD bash -c "./install-onlyoffice.sh --accept-license"
fi
-echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
msg_ok "Setup ${APPLICATION}"
msg_info "Creating Service"
@@ -69,7 +63,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -f $temp_file
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/dashy-install.sh b/install/dashy-install.sh
index b2da3d820..e72493135 100644
--- a/install/dashy-install.sh
+++ b/install/dashy-install.sh
@@ -14,15 +14,12 @@ network_check
update_os
NODE_VERSION="22" setup_nodejs
+fetch_and_deploy_gh_release "dashy" "Lissy93/dashy"
-RELEASE=$(curl -fsSL https://api.github.com/repos/Lissy93/dashy/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
msg_info "Installing Dashy ${RELEASE} (Patience)"
-mkdir -p /opt/dashy
-curl -fsSL "https://github.com/Lissy93/dashy/archive/refs/tags/${RELEASE}.tar.gz" | tar -xz -C /opt/dashy --strip-components=1
cd /opt/dashy
$STD npm install
$STD npm run build
-echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
msg_ok "Installed Dashy ${RELEASE}"
msg_info "Creating Service"
diff --git a/install/docker-install.sh b/install/docker-install.sh
index aee1023ce..7ce145531 100644
--- a/install/docker-install.sh
+++ b/install/docker-install.sh
@@ -29,7 +29,17 @@ echo -e '{\n "log-driver": "journald"\n}' >/etc/docker/daemon.json
$STD sh <(curl -fsSL https://get.docker.com)
msg_ok "Installed Docker $DOCKER_LATEST_VERSION"
-read -r -p "${TAB3}Would you like to add Portainer? " prompt
+read -r -p "${TAB3}Install Docker Compose v2 plugin? " prompt_compose
+if [[ ${prompt_compose,,} =~ ^(y|yes)$ ]]; then
+ msg_info "Installing Docker Compose $DOCKER_COMPOSE_LATEST_VERSION"
+ mkdir -p /usr/local/lib/docker/cli-plugins
+ curl -fsSL "https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_LATEST_VERSION}/docker-compose-$(uname -s)-$(uname -m)" \
+ -o /usr/local/lib/docker/cli-plugins/docker-compose
+ chmod +x /usr/local/lib/docker/cli-plugins/docker-compose
+ msg_ok "Installed Docker Compose $DOCKER_COMPOSE_LATEST_VERSION"
+fi
+
+read -r -p "${TAB3}Would you like to add Portainer (UI)? " prompt
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
msg_info "Installing Portainer $PORTAINER_LATEST_VERSION"
docker volume create portainer_data >/dev/null
@@ -43,9 +53,9 @@ if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
portainer/portainer-ce:latest
msg_ok "Installed Portainer $PORTAINER_LATEST_VERSION"
else
- read -r -p "${TAB3}Would you like to add the Portainer Agent? " prompt
- if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
- msg_info "Installing Portainer agent $PORTAINER_AGENT_LATEST_VERSION"
+ read -r -p "${TAB3}Would you like to install the Portainer Agent (for remote management)? " prompt_agent
+ if [[ ${prompt_agent,,} =~ ^(y|yes)$ ]]; then
+ msg_info "Installing Portainer Agent $PORTAINER_AGENT_LATEST_VERSION"
$STD docker run -d \
-p 9001:9001 \
--name portainer_agent \
@@ -57,6 +67,43 @@ else
fi
fi
+read -r -p "${TAB3}Expose Docker TCP socket (insecure) ? [n = No, l = Local only (127.0.0.1), a = All interfaces (0.0.0.0)] : " socket_choice
+case "${socket_choice,,}" in
+ l)
+ socket="tcp://127.0.0.1:2375"
+ ;;
+ a)
+ socket="tcp://0.0.0.0:2375"
+ ;;
+ *)
+ socket=""
+ ;;
+esac
+
+if [[ -n "$socket" ]]; then
+ msg_info "Enabling Docker TCP socket on $socket"
+ $STD apt-get install -y jq
+
+ tmpfile=$(mktemp)
+ jq --arg sock "$socket" '. + { "hosts": ["unix:///var/run/docker.sock", $sock] }' /etc/docker/daemon.json > "$tmpfile" && mv "$tmpfile" /etc/docker/daemon.json
+
+ mkdir -p /etc/systemd/system/docker.service.d
+ cat < /etc/systemd/system/docker.service.d/override.conf
+[Service]
+ExecStart=
+ExecStart=/usr/bin/dockerd
+EOF
+
+ $STD systemctl daemon-reexec
+ $STD systemctl daemon-reload
+
+ if systemctl restart docker; then
+ msg_ok "Docker TCP socket available on $socket"
+ else
+ msg_error "Docker failed to restart. Check journalctl -xeu docker.service"
+ exit 1
+ fi
+fi
motd_ssh
customize
diff --git a/install/docmost-install.sh b/install/docmost-install.sh
index c6f925ebe..eeddadb9d 100644
--- a/install/docmost-install.sh
+++ b/install/docmost-install.sh
@@ -22,6 +22,7 @@ msg_ok "Installed Dependencies"
NODE_VERSION="22" NODE_MODULE="pnpm@$(curl -s https://raw.githubusercontent.com/docmost/docmost/main/package.json | jq -r '.packageManager | split("@")[1]')" setup_nodejs
PG_VERSION="16" setup_postgresql
+fetch_and_deploy_gh_release "docmost" "docmost/docmost"
msg_info "Setting up PostgreSQL"
DB_NAME="docmost_db"
@@ -40,12 +41,7 @@ $STD sudo -u postgres psql -c "ALTER ROLE $DB_USER SET timezone TO 'UTC'"
} >>~/docmost.creds
msg_ok "Set up PostgreSQL"
-msg_info "Installing Docmost (Patience)"
-temp_file=$(mktemp)
-RELEASE=$(curl -fsSL https://api.github.com/repos/docmost/docmost/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
-curl -fsSL "https://github.com/docmost/docmost/archive/refs/tags/v${RELEASE}.tar.gz" -o ""$temp_file""
-tar -xzf "$temp_file"
-mv docmost-${RELEASE} /opt/docmost
+msg_info "Configuring Docmost (Patience)"
cd /opt/docmost
mv .env.example .env
mkdir data
@@ -56,8 +52,7 @@ sed -i -e "s|APP_SECRET=.*|APP_SECRET=$(openssl rand -base64 32 | tr -dc 'a-zA-Z
export NODE_OPTIONS="--max-old-space-size=2048"
$STD pnpm install
$STD pnpm build
-echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
-msg_ok "Installed Docmost"
+msg_ok "Configured Docmost"
msg_info "Creating Service"
cat </etc/systemd/system/docmost.service
@@ -81,7 +76,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -f "$temp_file"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/emby-install.sh b/install/emby-install.sh
index f8fd7b8cf..5b3b6d639 100644
--- a/install/emby-install.sh
+++ b/install/emby-install.sh
@@ -24,17 +24,15 @@ if [[ "$CTTYPE" == "0" ]]; then
fi
msg_ok "Set Up Hardware Acceleration"
-LATEST=$(curl -fsSL https://api.github.com/repos/MediaBrowser/Emby.Releases/releases/latest | grep '"tag_name":' | cut -d'"' -f4)
+fetch_and_deploy_gh_release "emby" "MediaBrowser/Emby.Releases" "binary"
-msg_info "Installing Emby"
-curl -fsSL "https://github.com/MediaBrowser/Emby.Releases/releases/download/${LATEST}/emby-server-deb_${LATEST}_arm64.deb" -o "emby-server-deb_${LATEST}_arm64.deb"
-$STD dpkg -i emby-server-deb_${LATEST}_arm64.deb
+msg_info "Configuring Emby"
if [[ "$CTTYPE" == "0" ]]; then
sed -i -e 's/^ssl-cert:x:104:$/render:x:104:root,emby/' -e 's/^render:x:108:root,emby$/ssl-cert:x:108:/' /etc/group
else
sed -i -e 's/^ssl-cert:x:104:$/render:x:104:emby/' -e 's/^render:x:108:emby$/ssl-cert:x:108:/' /etc/group
fi
-msg_ok "Installed Emby"
+msg_ok "Configured Emby"
motd_ssh
customize
@@ -42,5 +40,4 @@ customize
msg_info "Cleaning up"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
-rm emby-server-deb_${LATEST}_arm64.deb
msg_ok "Cleaned"
diff --git a/install/emqx-install.sh b/install/emqx-install.sh
index 7ddd7b0da..991ba8748 100644
--- a/install/emqx-install.sh
+++ b/install/emqx-install.sh
@@ -13,16 +13,39 @@ setting_up_container
network_check
update_os
+msg_info "Installing dependencies"
+$STD apt-get install -y ca-certificates
+msg_ok "Installed dependencies"
+
+msg_info "Fetching latest EMQX Enterprise version"
+LATEST_VERSION=$(curl -fsSL https://www.emqx.com/en/downloads/enterprise | grep -oP '/en/downloads/enterprise/v\K[0-9]+\.[0-9]+\.[0-9]+' | sort -V | tail -n1)
+if [[ -z "$LATEST_VERSION" ]]; then
+ msg_error "Failed to determine latest EMQX version"
+ exit 1
+fi
+msg_ok "Latest version: v$LATEST_VERSION"
+
+DOWNLOAD_URL="https://www.emqx.com/en/downloads/enterprise/v$LATEST_VERSION/emqx-enterprise-${LATEST_VERSION}-debian12-amd64.deb"
+DEB_FILE="/tmp/emqx-enterprise-${LATEST_VERSION}-debian12-amd64.deb"
+
+msg_info "Downloading EMQX v$LATEST_VERSION"
+$STD curl -fsSL -o "$DEB_FILE" "$DOWNLOAD_URL"
+msg_ok "Downloaded EMQX"
+
msg_info "Installing EMQX"
-$STD bash <(curl -fsSL https://packagecloud.io/install/repositories/emqx/emqx/script.deb.sh)
-$STD apt-get install -y emqx
-$STD systemctl enable --now emqx
+$STD apt-get install -y "$DEB_FILE"
+echo "$LATEST_VERSION" >~/.emqx
msg_ok "Installed EMQX"
+msg_info "Starting EMQX service"
+$STD systemctl enable -q --now emqx
+msg_ok "Enabled EMQX service"
+
motd_ssh
customize
msg_info "Cleaning up"
-apt-get autoremove >/dev/null
-apt-get autoclean >/dev/null
+rm -f "$DEB_FILE"
+$STD apt-get autoremove
+$STD apt-get autoclean
msg_ok "Cleaned"
diff --git a/install/ersatztv-install.sh b/install/ersatztv-install.sh
index a4040356c..cbf427e84 100644
--- a/install/ersatztv-install.sh
+++ b/install/ersatztv-install.sh
@@ -13,19 +13,7 @@ setting_up_container
network_check
update_os
-msg_info "Installing Dependencies"
-$STD apt-get install -y ca-certificates
-msg_ok "Installed Dependencies"
-
-msg_info "Installing FFmpeg (Patience)"
-cd /usr/local/bin
-curl -fsSL "https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-arm64-static.tar.xz" -o "ffmpeg-release-arm64-static.tar.xz"
-$STD tar -xvf ffmpeg-release-arm64-static.tar.xz
-rm -f ffmpeg-*.tar.xz
-cd ffmpeg-*
-mv ffmpeg ffprobe /usr/local/bin/
-rm -rf /usr/local/bin/ffmpeg-*
-msg_ok "Installed FFmpeg"
+FFMPEG_VERSION="latest" FFMPEG_TYPE="medium" setup_ffmpeg
msg_info "Setting Up Hardware Acceleration"
$STD apt-get -y install {va-driver-all,ocl-icd-libopencl1,intel-opencl-icd,vainfo,intel-gpu-tools}
@@ -38,15 +26,7 @@ if [[ "$CTTYPE" == "0" ]]; then
fi
msg_ok "Set Up Hardware Acceleration"
-msg_info "Installing ErsatzTV"
-temp_file=$(mktemp)
-cd /opt
-RELEASE=$(curl -fsSL https://api.github.com/repos/ErsatzTV/ErsatzTV/releases | grep -oP '"tag_name": "\K[^"]+' | head -n 1)
-curl -fsSL "https://github.com/ErsatzTV/ErsatzTV/releases/download/${RELEASE}/ErsatzTV-${RELEASE}-linux-arm64.tar.gz" -o "$temp_file"
-tar -xzf "$temp_file"
-mv /opt/ErsatzTV-${RELEASE}-linux-arm64 /opt/ErsatzTV
-echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
-msg_ok "Installed ErsatzTV"
+fetch_and_deploy_gh_release "ersatztv" "ErsatzTV/ErsatzTV" "prebuild" "latest" "/opt/ErsatzTV" "*linux-arm64.tar.gz"
msg_info "Creating Service"
cat </etc/systemd/system/ersatzTV.service
@@ -57,8 +37,8 @@ After=multi-user.target
[Service]
Type=simple
User=root
-WorkingDirectory=/opt/ErsatzTV
-ExecStart=/opt/ErsatzTV/ErsatzTV
+WorkingDirectory=/opt/ErsatzTV
+ExecStart=/opt/ErsatzTV/ErsatzTV
Restart=always
RestartSec=30
@@ -72,7 +52,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -f ${temp_file}
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/esphome-install.sh b/install/esphome-install.sh
index 4ec235769..2793aaca8 100644
--- a/install/esphome-install.sh
+++ b/install/esphome-install.sh
@@ -29,6 +29,11 @@ $STD /opt/esphome/.venv/bin/python -m pip install --upgrade pip
$STD /opt/esphome/.venv/bin/python -m pip install esphome tornado esptool
msg_ok "Setup and Installed ESPHome"
+msg_info "Linking esphome to /usr/local/bin"
+rm -f /usr/local/bin/esphome
+ln -s /opt/esphome/.venv/bin/esphome /usr/local/bin/esphome
+msg_ok "Linked esphome binary"
+
msg_info "Creating Service"
mkdir -p /root/config
cat </etc/systemd/system/esphomeDashboard.service
diff --git a/install/excalidraw-install.sh b/install/excalidraw-install.sh
index 4fd1ac868..10431e239 100644
--- a/install/excalidraw-install.sh
+++ b/install/excalidraw-install.sh
@@ -14,21 +14,15 @@ network_check
update_os
msg_info "Installing Dependencies"
-$STD apt-get install -y \
- xdg-utils
+$STD apt-get install -y xdg-utils
msg_ok "Installed Dependencies"
NODE_VERSION="22" NODE_MODULE="yarn@latest" setup_nodejs
+fetch_and_deploy_gh_release "excalidraw" "excalidraw/excalidraw"
-msg_info "Setup Excalidraw"
-temp_file=$(mktemp)
-RELEASE=$(curl -fsSL https://api.github.com/repos/excalidraw/excalidraw/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
-curl -fsSL "https://github.com/excalidraw/excalidraw/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
-tar xzf $temp_file
-mv excalidraw-${RELEASE} /opt/excalidraw
+msg_info "Configuring Excalidraw"
cd /opt/excalidraw
$STD yarn
-echo "${RELEASE}" >/opt/excalidraw_version.txt
msg_ok "Setup Excalidraw"
msg_info "Creating Service"
@@ -53,7 +47,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -f $temp_file
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/firefly-install.sh b/install/firefly-install.sh
index 7d4f405bc..83362b8fa 100644
--- a/install/firefly-install.sh
+++ b/install/firefly-install.sh
@@ -13,18 +13,10 @@ setting_up_container
network_check
update_os
-msg_info "Installing Dependencies"
-curl -fsSLo /usr/share/keyrings/deb.sury.org-php.gpg https://packages.sury.org/php/apt.gpg
-echo "deb [signed-by=/usr/share/keyrings/deb.sury.org-php.gpg] https://packages.sury.org/php/ bookworm main" >/etc/apt/sources.list.d/php.list
-$STD apt-get update
-$STD apt-get install -y \
- apache2 \
- libapache2-mod-php8.4 \
- php8.4-{bcmath,cli,intl,curl,zip,gd,xml,mbstring,mysql} \
- composer
-msg_ok "Installed Dependencies"
-
+PHP_VERSION="8.4" PHP_APACHE="YES" PHP_MODULE="mysql" setup_php
+setup_composer
setup_mariadb
+LOCAL_IP=$(hostname -I | awk '{print $1}')
msg_info "Setting up database"
DB_NAME=firefly
@@ -41,21 +33,15 @@ mariadb -u root -e "GRANT ALL ON $DB_NAME.* TO '$DB_USER'@'localhost'; FLUSH PRI
} >>~/firefly.creds
msg_ok "Set up database"
-msg_info "Installing Firefly III (Patience)"
-LOCAL_IP=$(hostname -I | awk '{print $1}')
-RELEASE=$(curl -fsSL https://api.github.com/repos/firefly-iii/firefly-iii/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4)}')
-cd /opt
-curl -fsSL "https://github.com/firefly-iii/firefly-iii/releases/download/v${RELEASE}/FireflyIII-v${RELEASE}.tar.gz" -o "FireflyIII-v${RELEASE}.tar.gz"
-mkdir -p /opt/firefly
-tar -xzf FireflyIII-v${RELEASE}.tar.gz -C /opt/firefly
+fetch_and_deploy_gh_release "firefly" "firefly-iii/firefly-iii" "prebuild" "latest" "/opt/firefly" "FireflyIII-*.zip"
+
+msg_info "Configuring Firefly III (Patience)"
chown -R www-data:www-data /opt/firefly
chmod -R 775 /opt/firefly/storage
cd /opt/firefly
cp .env.example .env
sed -i "s/DB_HOST=.*/DB_HOST=localhost/" /opt/firefly/.env
sed -i "s/DB_PASSWORD=.*/DB_PASSWORD=$DB_PASS/" /opt/firefly/.env
-echo "export COMPOSER_ALLOW_SUPERUSER=1" >>~/.bashrc
-source ~/.bashrc
$STD composer install --no-dev --no-plugins --no-interaction
$STD php artisan firefly:upgrade-database
$STD php artisan firefly:correct-database
@@ -69,8 +55,7 @@ tar -xzf "DataImporter-v${IMPORTER_RELEASE}.tar.gz" -C /opt/firefly/dataimporter
cp /opt/firefly/dataimporter/.env.example /opt/firefly/dataimporter/.env
sed -i "s#FIREFLY_III_URL=#FIREFLY_III_URL=http://${LOCAL_IP}#g" /opt/firefly/dataimporter/.env
chown -R www-data:www-data /opt/firefly
-echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
-msg_ok "Installed Firefly III"
+msg_ok "Configured Firefly III"
msg_info "Creating Service"
cat </etc/apache2/sites-available/firefly.conf
@@ -112,7 +97,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -rf "/opt/FireflyIII-v${RELEASE}.tar.gz"
rm -rf "/opt/DataImporter-v${IMPORTER_RELEASE}.tar.gz"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
diff --git a/install/fluid-calendar-install.sh b/install/fluid-calendar-install.sh
index ab95279f9..11827e9e7 100644
--- a/install/fluid-calendar-install.sh
+++ b/install/fluid-calendar-install.sh
@@ -14,17 +14,11 @@ network_check
update_os
msg_info "Installing Dependencies"
-$STD apt-get install -y \
- zip \
- postgresql-common
+$STD apt-get install -y zip
msg_ok "Installed Dependencies"
-msg_info "Installing Additional Dependencies"
-curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
-echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" >/etc/apt/sources.list.d/nodesource.list
-echo "YES" | /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh &>/dev/null
-$STD apt-get install -y postgresql-17 nodejs
-msg_ok "Installed Additional Dependencies"
+PG_VERSION="17" setup_postgresql
+NODE_VERSION="20" setup_nodejs
msg_info "Setting up Postgresql Database"
DB_NAME="fluiddb"
@@ -44,14 +38,9 @@ $STD sudo -u postgres psql -c "ALTER USER $DB_USER WITH SUPERUSER;"
} >>~/$APPLICATION.creds
msg_ok "Set up Postgresql Database"
-msg_info "Setup ${APPLICATION}"
-tmp_file=$(mktemp)
-RELEASE=$(curl -fsSL https://api.github.com/repos/dotnetfactory/fluid-calendar/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
-curl -fsSL "https://github.com/dotnetfactory/fluid-calendar/archive/refs/tags/v${RELEASE}.zip" -o "$tmp_file"
-$STD unzip $tmp_file
-mv ${APPLICATION}-${RELEASE}/ /opt/${APPLICATION}
-echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
+fetch_and_deploy_gh_release "fluid-calendar" "dotnetfactory/fluid-calendar"
+msg_info "Configuring ${APPLICATION}"
cat </opt/fluid-calendar/.env
DATABASE_URL="postgresql://${DB_USER}:${DB_PASS}@localhost:5432/${DB_NAME}"
@@ -72,7 +61,7 @@ $STD npm install --legacy-peer-deps
$STD npm run prisma:generate
$STD npx prisma migrate deploy
$STD npm run build:os
-msg_ok "Setup ${APPLICATION}"
+msg_ok "Configuring ${APPLICATION}"
msg_info "Creating Service"
cat </etc/systemd/system/fluid-calendar.service
@@ -95,7 +84,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -f $tmp_file
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/freshrss-install.sh b/install/freshrss-install.sh
index 31a25c561..37b0ef02f 100644
--- a/install/freshrss-install.sh
+++ b/install/freshrss-install.sh
@@ -13,13 +13,7 @@ setting_up_container
network_check
update_os
-msg_info "Installing Dependencies"
-$STD apt-get install -y \
- apache2 \
- php-{curl,dom,json,ctype,pgsql,gmp,mbstring,iconv,zip} \
- libapache2-mod-php
-msg_ok "Installed Dependencies"
-
+PHP_VERSION="8.2" PHP_MODULE="curl,xml,mbstring,intl,zip,pgsql,gmp" PHP_APACHE="YES" setup_php
PG_VERSION="16" setup_postgresql
msg_info "Setting up PostgreSQL"
@@ -36,17 +30,14 @@ $STD sudo -u postgres psql -c "CREATE DATABASE $DB_NAME WITH OWNER $DB_USER TEMP
} >>~/freshrss.creds
msg_ok "Set up PostgreSQL"
-msg_info "Installing FreshRSS"
-RELEASE=$(curl -fsSL https://api.github.com/repos/FreshRSS/FreshRSS/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
-cd /opt
-curl -fsSL "https://github.com/FreshRSS/FreshRSS/archive/refs/tags/${RELEASE}.zip" -o "${RELEASE}.zip"
-$STD unzip "${RELEASE}.zip"
-mv "/opt/FreshRSS-${RELEASE}" /opt/freshrss
+fetch_and_deploy_gh_release "freshrss" "FreshRSS/FreshRSS"
+
+msg_info "Configuring FreshRSS"
cd /opt/freshrss
chown -R www-data:www-data /opt/freshrss
chmod -R g+rX /opt/freshrss
chmod -R g+w /opt/freshrss/data/
-msg_ok "Installed FreshRSS"
+msg_ok "Configured FreshRSS"
msg_info "Setting up cron job for feed refresh"
cat </etc/cron.d/freshrss-actualize
@@ -83,7 +74,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -rf "/opt/${RELEASE}.zip"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/gatus-install.sh b/install/gatus-install.sh
index b857bacad..f0b70959b 100644
--- a/install/gatus-install.sh
+++ b/install/gatus-install.sh
@@ -20,20 +20,15 @@ $STD apt-get install -y \
msg_ok "Installed Dependencies"
setup_go
+fetch_and_deploy_gh_release "gatus" "TwiN/gatus"
-RELEASE=$(curl -s https://api.github.com/repos/TwiN/gatus/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
-msg_info "Setting up gatus v${RELEASE}"
-temp_file=$(mktemp)
-mkdir -p /opt/gatus
-curl -fsSL "https://github.com/TwiN/gatus/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
-tar zxf "$temp_file" --strip-components=1 -C /opt/gatus
+msg_info "Configuring gatus"
cd /opt/gatus
$STD go mod tidy
CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o gatus .
setcap CAP_NET_RAW+ep gatus
mv config.yaml config
-echo "${RELEASE}" >/opt/gatus_version.txt
-msg_ok "Done setting up gatus"
+msg_ok "Configured gatus"
msg_info "Creating Service"
cat </etc/systemd/system/gatus.service
@@ -58,10 +53,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -f "$temp_file"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
-
-motd_ssh
-customize
diff --git a/install/ghost-install.sh b/install/ghost-install.sh
index 5a522afaf..c9bd705ba 100644
--- a/install/ghost-install.sh
+++ b/install/ghost-install.sh
@@ -16,19 +16,19 @@ update_os
msg_info "Installing Dependencies"
$STD apt-get install -y \
nginx \
- ca-certificates
+ ca-certificates \
+ libjemalloc2
msg_ok "Installed Dependencies"
-setup_mariadb
+setup_mysql
msg_info "Configuring Database"
DB_NAME=ghost
DB_USER=ghostuser
DB_PASS=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | head -c13)
-$STD mariadb -u root -e "CREATE DATABASE $DB_NAME;"
-$STD mariadb -u root -e "CREATE USER '$DB_USER'@'localhost' IDENTIFIED BY '$DB_PASS';"
-$STD mariadb -u root -e "GRANT ALL ON $DB_NAME.* TO '$DB_USER'@'localhost'; FLUSH PRIVILEGES;"
-
+$STD mysql -u root -e "CREATE DATABASE $DB_NAME;"
+$STD mysql -u root -e "CREATE USER '$DB_USER'@'localhost' IDENTIFIED BY '$DB_PASS';"
+$STD mysql -u root -e "GRANT ALL ON $DB_NAME.* TO '$DB_USER'@'localhost'; FLUSH PRIVILEGES;"
{
echo "Ghost-Credentials"
echo "Ghost Database User: $DB_USER"
@@ -37,7 +37,7 @@ $STD mariadb -u root -e "GRANT ALL ON $DB_NAME.* TO '$DB_USER'@'localhost'; FLUS
} >>~/ghost.creds
msg_ok "Configured MySQL"
-NODE_VERSION="20" setup_nodejs
+NODE_VERSION="22" setup_nodejs
msg_info "Installing Ghost CLI"
$STD npm install ghost-cli@latest -g
diff --git a/install/gitea-install.sh b/install/gitea-install.sh
index 582d4c65c..d48360fe0 100644
--- a/install/gitea-install.sh
+++ b/install/gitea-install.sh
@@ -19,10 +19,9 @@ $STD apt-get install -y git
$STD apt-get install -y sqlite3
msg_ok "Installed Dependencies"
-msg_info "Installing Gitea"
-RELEASE=$(curl -fsSL https://github.com/go-gitea/gitea/releases/latest | grep "title>Release" | cut -d " " -f 4 | sed 's/^v//')
-curl -fsSL "https://github.com/go-gitea/gitea/releases/download/v$RELEASE/gitea-$RELEASE-linux-arm64" -o "gitea-$RELEASE-linux-arm64"
-mv gitea* /usr/local/bin/gitea
+fetch_and_deploy_gh_release "gitea" "go-gitea/gitea" "singlefile" "latest" "/usr/local/bin" "gitea-*-linux-arm64"
+
+msg_info "Configuring Gitea"
chmod +x /usr/local/bin/gitea
adduser --system --group --disabled-password --shell /bin/bash --home /etc/gitea gitea >/dev/null
mkdir -p /var/lib/gitea/{custom,data,log}
@@ -31,7 +30,7 @@ chmod -R 750 /var/lib/gitea/
chown root:gitea /etc/gitea
chmod 770 /etc/gitea
sudo -u gitea ln -s /var/lib/gitea/data/.ssh/ /etc/gitea/.ssh
-msg_ok "Installed Gitea"
+msg_ok "Configured Gitea"
msg_info "Creating Service"
cat </etc/systemd/system/gitea.service
diff --git a/install/gitea-mirror-install.sh b/install/gitea-mirror-install.sh
index 54c3b5d75..0f8915017 100644
--- a/install/gitea-mirror-install.sh
+++ b/install/gitea-mirror-install.sh
@@ -3,7 +3,7 @@
# Copyright (c) 2021-2025 community-scripts ORG
# Author: CrazyWolf13
# License: MIT | https://github.com/asylumexp/Proxmox/raw/main/LICENSE
-# Source: https://github.com/arunavo4/gitea-mirror
+# Source: https://github.com/RayLabsHQ/gitea-mirror
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
@@ -28,7 +28,7 @@ ln -sf /opt/bun/bin/bun /usr/local/bin/bun
ln -sf /opt/bun/bin/bun /usr/local/bin/bunx
msg_ok "Installed Bun"
-fetch_and_deploy_gh_release "gitea-mirror" "arunavo4/gitea-mirror"
+fetch_and_deploy_gh_release "gitea-mirror" "RayLabsHQ/gitea-mirror"
msg_info "Installing gitea-mirror"
cd /opt/gitea-mirror
diff --git a/install/glance-install.sh b/install/glance-install.sh
index ba987e8dc..8715f1e55 100644
--- a/install/glance-install.sh
+++ b/install/glance-install.sh
@@ -13,12 +13,9 @@ setting_up_container
network_check
update_os
-msg_info "Installing Glance"
-RELEASE=$(curl -fsSL https://api.github.com/repos/glanceapp/glance/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
-cd /opt
-curl -fsSL "https://github.com/glanceapp/glance/releases/download/v${RELEASE}/glance-linux-arm64.tar.gz" -o "glance-linux-arm64.tar.gz"
-mkdir -p /opt/glance
-tar -xzf glance-linux-arm64.tar.gz -C /opt/glance
+fetch_and_deploy_gh_release "glance" "glanceapp/glance" "prebuild" "latest" "/opt/glance" "glance-linux-arm64.tar.gz"
+
+msg_info "Configuring Glance"
cat </opt/glance/glance.yml
pages:
- name: Startpage
@@ -39,9 +36,7 @@ pages:
- title: Helper Scripts
url: https://github.com/community-scripts/ProxmoxVE
EOF
-
-echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
-msg_ok "Installed Glance"
+msg_ok "Configured Glance"
msg_info "Creating Service"
service_path="/etc/systemd/system/glance.service"
@@ -67,7 +62,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -rf /opt/glance-linux-arm64.tar.gz
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/habitica-install.sh b/install/habitica-install.sh
index ade1916fe..aea4c4c59 100644
--- a/install/habitica-install.sh
+++ b/install/habitica-install.sh
@@ -22,18 +22,16 @@ curl -fsSL "http://ports.ubuntu.com/pool/main/o/openssl/libssl1.1_1.1.1f-1ubuntu
$STD dpkg -i libssl1.1_1.1.1f-1ubuntu2_arm64.deb
msg_ok "Installed Dependencies"
-NODE_VERSION="20" setup_nodejs
+NODE_VERSION="20" NODE_MODULE="gulp-cli,mocha" setup_nodejs
+fetch_and_deploy_gh_release "habitica" "HabitRPG/habitica" "tarball" "latest" "/opt/habitica"
msg_info "Setup ${APPLICATION}"
-temp_file=$(mktemp)
-RELEASE=$(curl -fsSL https://api.github.com/repos/HabitRPG/habitica/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
-curl -fsSL "https://github.com/HabitRPG/habitica/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
-tar zxf $temp_file
-mv habitica-${RELEASE}/ /opt/habitica
cd /opt/habitica
$STD npm i
+$STD npm run postinstall
+$STD npm run client:build
+$STD gulp build:prod
cp config.json.example config.json
-echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
msg_ok "Setup ${APPLICATION}"
msg_info "Creating Service"
@@ -91,7 +89,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -f $temp_file
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/immich-install.sh b/install/immich-install.sh
index 4a75d0e5f..26c39994e 100644
--- a/install/immich-install.sh
+++ b/install/immich-install.sh
@@ -66,6 +66,7 @@ $STD apt-get install --no-install-recommends -y \
mesa-vulkan-drivers \
ocl-icd-libopencl1 \
tini \
+ libaom-dev \
zlib1g
$STD apt-get install -y \
libgdk-pixbuf-2.0-dev librsvg2-dev libtool
@@ -136,7 +137,6 @@ if [[ -f ~/.openvino ]]; then
fi
msg_ok "Packages from Testing Repo Installed"
-# Fix default DB collation issue after libc update
$STD sudo -u postgres psql -c "ALTER DATABASE postgres REFRESH COLLATION VERSION;"
$STD sudo -u postgres psql -c "ALTER DATABASE $DB_NAME REFRESH COLLATION VERSION;"
@@ -201,7 +201,7 @@ $STD cmake --preset=release-noplugins \
-DWITH_LIBSHARPYUV=ON \
-DWITH_LIBDE265=ON \
-DWITH_AOM_DECODER=OFF \
- -DWITH_AOM_ENCODER=OFF \
+ -DWITH_AOM_ENCODER=ON \
-DWITH_X265=OFF \
-DWITH_EXAMPLES=OFF \
..
@@ -237,7 +237,8 @@ $STD make clean
cd "$STAGING_DIR"
SOURCE=$SOURCE_DIR/libvips
-: "${LIBVIPS_REVISION:=$(jq -cr '.revision' $BASE_DIR/server/sources/libvips.json)}"
+# : "${LIBVIPS_REVISION:=$(jq -cr '.revision' $BASE_DIR/server/sources/libvips.json)}"
+: "${LIBVIPS_REVISION:=8fa37a64547e392d3808eed8d72adab7e02b3d00}"
$STD git clone https://github.com/libvips/libvips.git "$SOURCE"
cd "$SOURCE"
$STD git reset --hard "$LIBVIPS_REVISION"
@@ -284,6 +285,10 @@ cd "$SRC_DIR"
cp -a server/{node_modules,dist,bin,resources,package.json,package-lock.json,start*.sh} "$APP_DIR"/
cp -a web/build "$APP_DIR"/www
cp LICENSE "$APP_DIR"
+cd "$APP_DIR"
+export SHARP_FORCE_GLOBAL_LIBVIPS=true
+$STD npm install sharp
+rm -rf "$APP_DIR"/node_modules/@img/sharp-{libvips*,linuxmusl-x64}
msg_ok "Installed Immich Web Components"
cd "$SRC_DIR"/machine-learning
@@ -307,8 +312,6 @@ ln -s "$UPLOAD_DIR" "$APP_DIR"/upload
ln -s "$UPLOAD_DIR" "$ML_DIR"/upload
msg_info "Installing Immich CLI"
-$STD npm install --build-from-source sharp
-rm -rf "$APP_DIR"/node_modules/@img/sharp-{libvips*,linuxmusl-aarch64}
$STD npm i -g @immich/cli
msg_ok "Installed Immich CLI"
diff --git a/install/komodo-install.sh b/install/komodo-install.sh
index 05106d0c2..5191452b6 100644
--- a/install/komodo-install.sh
+++ b/install/komodo-install.sh
@@ -39,8 +39,7 @@ msg_ok "Installed Docker"
echo "${TAB3}Choose the database for Komodo installation:"
echo "${TAB3}1) MongoDB (recommended)"
-echo "${TAB3}2) SQLite"
-echo "${TAB3}3) PostgreSQL"
+echo "${TAB3}2) FerretDB"
read -rp "${TAB3}Enter your choice (default: 1): " DB_CHOICE
DB_CHOICE=${DB_CHOICE:-1}
@@ -49,10 +48,7 @@ case $DB_CHOICE in
DB_COMPOSE_FILE="mongo.compose.yaml"
;;
2)
- DB_COMPOSE_FILE="sqlite.compose.yaml"
- ;;
-3)
- DB_COMPOSE_FILE="postgres.compose.yaml"
+ DB_COMPOSE_FILE="ferretdb.compose.yaml"
;;
*)
echo "Invalid choice. Defaulting to MongoDB."
diff --git a/install/mafl-install.sh b/install/mafl-install.sh
index 2fde82673..cf4478f27 100644
--- a/install/mafl-install.sh
+++ b/install/mafl-install.sh
@@ -14,22 +14,17 @@ network_check
update_os
msg_info "Installing Dependencies"
-$STD apt-get install -y make
-$STD apt-get install -y g++
-$STD apt-get install -y gcc
-$STD apt-get install -y ca-certificates
+$STD apt-get install -y \
+ ca-certificates \
+ build-essential
msg_ok "Installed Dependencies"
NODE_VERSION="22" NODE_MODULE="yarn@latest" setup_nodejs
+fetch_and_deploy_gh_release "mafl" "hywax/mafl"
-RELEASE=$(curl -fsSL https://api.github.com/repos/hywax/mafl/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
msg_info "Installing Mafl v${RELEASE}"
-curl -fsSL "https://github.com/hywax/mafl/archive/refs/tags/v${RELEASE}.tar.gz" -o "v${RELEASE}.tar.gz"
-tar -xzf v${RELEASE}.tar.gz
mkdir -p /opt/mafl/data
curl -fsSL "https://raw.githubusercontent.com/hywax/mafl/main/.example/config.yml" -o "/opt/mafl/data/config.yml"
-mv mafl-${RELEASE}/* /opt/mafl
-rm -rf mafl-${RELEASE}
cd /opt/mafl
export NUXT_TELEMETRY_DISABLED=true
$STD yarn install
diff --git a/install/mqtt-install.sh b/install/mqtt-install.sh
index 34bbece23..74e562d6e 100644
--- a/install/mqtt-install.sh
+++ b/install/mqtt-install.sh
@@ -15,7 +15,7 @@ update_os
msg_info "Installing Mosquitto MQTT Broker"
source /etc/os-release
-curl -fsSL http://repo.mosquitto.org/debian/mosquitto-repo.gpg.key >/usr/share/keyrings/mosquitto-repo.gpg.key
+curl -fsSL http://repo.mosquitto.org/debian/mosquitto-repo.gpg >/usr/share/keyrings/mosquitto-repo.gpg.key
chmod go+r /usr/share/keyrings/mosquitto-repo.gpg.key
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/mosquitto-repo.gpg.key] http://repo.mosquitto.org/debian ${VERSION_CODENAME} main" >/etc/apt/sources.list.d/mosquitto.list
$STD apt-get update
diff --git a/install/outline-install.sh b/install/outline-install.sh
index d243cee9d..5fd5f3bfe 100644
--- a/install/outline-install.sh
+++ b/install/outline-install.sh
@@ -55,7 +55,7 @@ sed -i 's/NODE_ENV=production/NODE_ENV=development/g' /opt/outline/.env
sed -i "s/generate_a_new_key/${SECRET_KEY}/g" /opt/outline/.env
sed -i "s/user:pass@postgres/${DB_USER}:${DB_PASS}@localhost/g" /opt/outline/.env
sed -i 's/redis:6379/localhost:6379/g' /opt/outline/.env
-sed -i "32s#URL=#URL=http://${LOCAL_IP}#g" /opt/outline/.env
+sed -i "5s#URL=#URL=http://${LOCAL_IP}#g" /opt/outline/.env
sed -i 's/FORCE_HTTPS=true/FORCE_HTTPS=false/g' /opt/outline/.env
$STD yarn install --frozen-lockfile
export NODE_OPTIONS="--max-old-space-size=3584"
diff --git a/install/planka-install.sh b/install/planka-install.sh
index d46600ef5..989dc8e96 100644
--- a/install/planka-install.sh
+++ b/install/planka-install.sh
@@ -45,12 +45,12 @@ fetch_and_deploy_gh_release "planka" "plankanban/planka" "prebuild" "latest" "/o
msg_info "Configuring PLANKA"
LOCAL_IP=$(hostname -I | awk '{print $1}')
SECRET_KEY=$(openssl rand -hex 64)
-cd /opt/planka/planka
+cd /opt/planka
$STD npm install
cp .env.sample .env
-sed -i "s#http://localhost:1337#http://$LOCAL_IP:1337#g" /opt/planka/planka/.env
-sed -i "s#postgres@localhost#planka:$DB_PASS@localhost#g" /opt/planka/planka/.env
-sed -i "s#notsecretkey#$SECRET_KEY#g" /opt/planka/planka/.env
+sed -i "s#http://localhost:1337#http://$LOCAL_IP:1337#g" /opt/planka/.env
+sed -i "s#postgres@localhost#planka:$DB_PASS@localhost#g" /opt/planka/.env
+sed -i "s#notsecretkey#$SECRET_KEY#g" /opt/planka/.env
$STD npm run db:init
msg_ok "Configured PLANKA"
@@ -84,7 +84,7 @@ Description=planka Service
After=network.target
[Service]
-WorkingDirectory=/opt/planka/planka
+WorkingDirectory=/opt/planka
ExecStart=/usr/bin/npm start --prod
Restart=always
diff --git a/install/stirling-pdf-install.sh b/install/stirling-pdf-install.sh
index e3abf7daf..ce12f9d68 100644
--- a/install/stirling-pdf-install.sh
+++ b/install/stirling-pdf-install.sh
@@ -15,7 +15,6 @@ update_os
msg_info "Installing Dependencies (Patience)"
$STD apt-get install -y \
- git \
automake \
autoconf \
libtool \
@@ -25,10 +24,26 @@ $STD apt-get install -y \
make \
g++ \
unpaper \
- ocrmypdf \
+ fonts-urw-base35 \
+ qpdf \
poppler-utils
msg_ok "Installed Dependencies"
+PYTHON_VERSION="3.12" setup_uv
+JAVA_VERSION="21" setup_java
+
+read -r -p "${TAB3}Do you want to Stirling-PDF with Login? (no/n = without Login) [Y/n] " response
+response=${response,,} # Convert to lowercase
+login_mode="false"
+if [[ "$response" == "y" || "$response" == "yes" || -z "$response" ]]; then
+ USE_ORIGINAL_FILENAME=true fetch_and_deploy_gh_release "stirling-pdf" "Stirling-Tools/Stirling-PDF" "singlefile" "latest" "/opt/Stirling-PDF" "Stirling-PDF-with-login.jar"
+ mv /opt/Stirling-PDF/Stirling-PDF-with-login.jar /opt/Stirling-PDF/Stirling-PDF.jar
+ touch ~/.Stirling-PDF-login
+ login_mode="true"
+else
+ USE_ORIGINAL_FILENAME=true fetch_and_deploy_gh_release "stirling-pdf" "Stirling-Tools/Stirling-PDF" "singlefile" "latest" "/opt/Stirling-PDF" "Stirling-PDF.jar"
+fi
+
msg_info "Installing LibreOffice Components"
$STD apt-get install -y \
libreoffice-writer \
@@ -37,32 +52,35 @@ $STD apt-get install -y \
libreoffice-core \
libreoffice-common \
libreoffice-base-core \
- python3-uno
+ libreoffice-script-provider-python \
+ libreoffice-java-common \
+ unoconv \
+ pngquant \
+ weasyprint
msg_ok "Installed LibreOffice Components"
msg_info "Installing Python Dependencies"
-$STD apt-get install -y \
- python3 \
- python3-pip
-rm -rf /usr/lib/python3.*/EXTERNALLY-MANAGED
-$STD pip3 install \
- uno \
+mkdir -p /tmp/stirling-pdf
+$STD uv venv /opt/.venv
+export PATH="/opt/.venv/bin:$PATH"
+source /opt/.venv/bin/activate
+$STD uv pip install --upgrade pip
+$STD uv pip install \
opencv-python-headless \
- unoconv \
- pngquant \
- WeasyPrint
+ ocrmypdf \
+ pillow \
+ pdf2image
+
+$STD apt-get install -y python3-uno python3-pip
+$STD pip3 install --break-system-packages unoserver
+ln -sf /opt/.venv/bin/python3 /usr/local/bin/python3
+ln -sf /opt/.venv/bin/pip /usr/local/bin/pip
msg_ok "Installed Python Dependencies"
-msg_info "Installing Azul Zulu"
-curl -fsSL "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xB1998361219BD9C9" -o "/etc/apt/trusted.gpg.d/zulu-repo.asc"
-curl -fsSL "https://cdn.azul.com/zulu/bin/zulu-repo_1.0.0-3_all.deb" -o "/zulu-repo_1.0.0-3_all.deb"
-$STD dpkg -i zulu-repo_1.0.0-3_all.deb
-$STD apt-get update
-$STD apt-get -y install zulu17-jdk
-msg_ok "Installed Azul Zulu"
-
msg_info "Installing JBIG2"
-$STD git clone https://github.com/agl/jbig2enc /opt/jbig2enc
+$STD curl -fsSL -o /tmp/jbig2enc.tar.gz https://github.com/agl/jbig2enc/archive/refs/tags/0.30.tar.gz
+mkdir -p /opt/jbig2enc
+tar -xzf /tmp/jbig2enc.tar.gz -C /opt/jbig2enc --strip-components=1
cd /opt/jbig2enc
$STD bash ./autogen.sh
$STD bash ./configure
@@ -74,23 +92,46 @@ msg_info "Installing Language Packs (Patience)"
$STD apt-get install -y 'tesseract-ocr-*'
msg_ok "Installed Language Packs"
-msg_info "Installing Stirling-PDF (Additional Patience)"
-RELEASE=$(curl -fsSL https://api.github.com/repos/Stirling-Tools/Stirling-PDF/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
-curl -fsSL "https://github.com/Stirling-Tools/Stirling-PDF/archive/refs/tags/v${RELEASE}.tar.gz" -o "v${RELEASE}.tar.gz"
-tar -xzf v${RELEASE}.tar.gz
-cd Stirling-PDF-$RELEASE
-chmod +x ./gradlew
-$STD ./gradlew build
-mkdir -p /opt/Stirling-PDF
-touch /opt/Stirling-PDF/.env
-mv ./build/libs/Stirling-PDF-*.jar /opt/Stirling-PDF/
-mv scripts /opt/Stirling-PDF/
-ln -s /opt/Stirling-PDF/Stirling-PDF-$RELEASE.jar /opt/Stirling-PDF/Stirling-PDF.jar
-ln -s /usr/share/tesseract-ocr/5/tessdata/ /usr/share/tessdata
-msg_ok "Installed Stirling-PDF"
+msg_info "Creating Environment Variables"
+cat </opt/Stirling-PDF/.env
+# Java tuning
+JAVA_BASE_OPTS="-XX:+UnlockExperimentalVMOptions -XX:MaxRAMPercentage=75 -XX:InitiatingHeapOccupancyPercent=20 -XX:+G1PeriodicGCInvokesConcurrent -XX:G1PeriodicGCInterval=10000 -XX:+UseStringDeduplication -XX:G1PeriodicGCSystemLoadThreshold=70"
+JAVA_CUSTOM_OPTS=""
+
+# LibreOffice
+PATH=/opt/.venv/bin:/usr/lib/libreoffice/program:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+UNO_PATH=/usr/lib/libreoffice/program
+URE_BOOTSTRAP=file:///usr/lib/libreoffice/program/fundamentalrc
+PYTHONPATH=/usr/lib/libreoffice/program:/opt/.venv/lib/python3.12/site-packages
+LD_LIBRARY_PATH=/usr/lib/libreoffice/program
+
+STIRLING_TEMPFILES_DIRECTORY=/tmp/stirling-pdf
+TMPDIR=/tmp/stirling-pdf
+TEMP=/tmp/stirling-pdf
+TMP=/tmp/stirling-pdf
+
+# Paths
+PATH=/opt/.venv/bin:/usr/lib/libreoffice/program:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+EOF
+
+if [[ "$login_mode" == "true" ]]; then
+ cat <>/opt/Stirling-PDF/.env
+# activate Login
+DISABLE_ADDITIONAL_FEATURES=false
+SECURITY_ENABLELOGIN=true
+
+# login credentials
+SECURITY_INITIALLOGIN_USERNAME=admin
+SECURITY_INITIALLOGIN_PASSWORD=stirling
+EOF
+fi
+msg_ok "Created Environment Variables"
+
+msg_info "Refreshing Font Cache"
+$STD fc-cache -fv
+msg_ok "Font Cache Updated"
msg_info "Creating Service"
-# Create LibreOffice listener service
cat </etc/systemd/system/libreoffice-listener.service
[Unit]
Description=LibreOffice Headless Listener Service
@@ -107,14 +148,6 @@ Restart=always
WantedBy=multi-user.target
EOF
-# Set up environment variables
-cat </opt/Stirling-PDF/.env
-PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/lib/libreoffice/program
-UNO_PATH=/usr/lib/libreoffice/program
-PYTHONPATH=/usr/lib/python3/dist-packages:/usr/lib/libreoffice/program
-LD_LIBRARY_PATH=/usr/lib/libreoffice/program
-EOF
-
cat </etc/systemd/system/stirlingpdf.service
[Unit]
Description=Stirling-PDF service
@@ -137,16 +170,32 @@ RestartSec=10
WantedBy=multi-user.target
EOF
-# Enable and start services
+cat </etc/systemd/system/unoserver.service
+[Unit]
+Description=UnoServer RPC Interface
+After=libreoffice-listener.service
+Requires=libreoffice-listener.service
+
+[Service]
+Type=simple
+ExecStart=/usr/local/bin/unoserver --port 2003 --interface 127.0.0.1
+Restart=always
+EnvironmentFile=/opt/Stirling-PDF/.env
+
+[Install]
+WantedBy=multi-user.target
+EOF
+
systemctl enable -q --now libreoffice-listener
systemctl enable -q --now stirlingpdf
+systemctl enable -q --now unoserver
msg_ok "Created Service"
motd_ssh
customize
msg_info "Cleaning up"
-rm -rf v${RELEASE}.tar.gz /zulu-repo_1.0.0-3_all.deb
+rm -f /tmp/jbig2enc.tar.gz
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/threadfin-install.sh b/install/threadfin-install.sh
index a717caebf..e432ed8b1 100644
--- a/install/threadfin-install.sh
+++ b/install/threadfin-install.sh
@@ -14,16 +14,12 @@ network_check
update_os
msg_info "Installing Dependencies"
-$STD apt-get install -y ffmpeg
-$STD apt-get install -y vlc
+$STD apt-get install -y \
+ ffmpeg \
+ vlc
msg_ok "Installed Dependencies"
-msg_info "Installing Threadfin"
-mkdir -p /opt/threadfin
-curl -fsSL "https://github.com/Threadfin/Threadfin/releases/latest/download/Threadfin_linux_arm64" -o "/opt/threadfin/threadfin"
-chmod +x /opt/threadfin/threadfin
-
-msg_ok "Installed Threadfin"
+fetch_and_deploy_gh_release "threadfin" "threadfin/threadfin" "singlefile" "latest" "/opt/threadfin" "Threadfin_linux_arm64"
msg_info "Creating Service"
cat </etc/systemd/system/threadfin.service
diff --git a/install/trilium-install.sh b/install/trilium-install.sh
index a08d0ad1c..a88212bc0 100644
--- a/install/trilium-install.sh
+++ b/install/trilium-install.sh
@@ -3,7 +3,7 @@
# Copyright (c) 2021-2025 tteck
# Author: tteck (tteckster)
# License: MIT | https://github.com/asylumexp/Proxmox/raw/main/LICENSE
-# Source: https://triliumnext.github.io/Docs/
+# Source: https://github.com/TriliumNext/Trilium
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
@@ -13,14 +13,7 @@ setting_up_container
network_check
update_os
-msg_info "Setup TriliumNext"
-cd /opt
-RELEASE=$(curl -fsSL https://api.github.com/repos/TriliumNext/trilium/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
-curl -fsSL "https://github.com/TriliumNext/trilium/releases/download/v${RELEASE}/TriliumNextNotes-Server-v${RELEASE}-linux-arm64.tar.xz" -o "TriliumNextNotes-Server-v${RELEASE}-linux-arm64.tar.xz"
-tar -xf TriliumNextNotes-Server-v${RELEASE}-linux-arm64.tar.xz
-mv TriliumNextNotes-Server-$RELEASE-linux-arm64 /opt/trilium
-echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
-msg_ok "Setup TriliumNext"
+fetch_and_deploy_gh_release "Trilium" "TriliumNext/Trilium" "prebuild" "latest" "/opt/trilium" "TriliumNotes-Server-*linux-arm64.tar.xz"
msg_info "Creating Service"
cat </etc/systemd/system/trilium.service
@@ -46,7 +39,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -rf /opt/TriliumNextNotes-Server-${RELEASE}-linux-arm64.tar.xz
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/zigbee2mqtt-install.sh b/install/zigbee2mqtt-install.sh
index 661dd2742..1a1a1e61b 100644
--- a/install/zigbee2mqtt-install.sh
+++ b/install/zigbee2mqtt-install.sh
@@ -19,17 +19,15 @@ $STD apt-get install -y \
make \
g++ \
gcc \
- ca-certificates
+ ca-certificates \
+ jq
msg_ok "Installed Dependencies"
-NODE_VERSION="22" NODE_MODULE="pnpm@latest" setup_nodejs
+NODE_VERSION="24" NODE_MODULE="pnpm@$(curl -fsSL https://raw.githubusercontent.com/Koenkk/zigbee2mqtt/master/package.json | jq -r '.packageManager | split("@")[1]')" setup_nodejs
+
+fetch_and_deploy_gh_release "Zigbee2MQTT" "Koenkk/zigbee2mqtt" "tarball" "latest" "/opt/zigbee2mqtt"
msg_info "Setting up Zigbee2MQTT"
-cd /opt
-RELEASE=$(curl -fsSL https://api.github.com/repos/Koenkk/zigbee2mqtt/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
-curl -fsSL "https://github.com/Koenkk/zigbee2mqtt/archive/refs/tags/${RELEASE}.zip" -o "${RELEASE}.zip"
-$STD unzip ${RELEASE}.zip
-mv zigbee2mqtt-${RELEASE} /opt/zigbee2mqtt
cd /opt/zigbee2mqtt/data
mv configuration.example.yaml configuration.yaml
cd /opt/zigbee2mqtt
@@ -60,7 +58,6 @@ motd_ssh
customize
msg_info "Cleaning up"
-rm -rf /opt/${RELEASE}.zip
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
diff --git a/install/zipline-install.sh b/install/zipline-install.sh
index 0c2266e95..e9326ac7d 100644
--- a/install/zipline-install.sh
+++ b/install/zipline-install.sh
@@ -50,9 +50,9 @@ CORE_HOSTNAME=0.0.0.0
CORE_PORT=3000
CORE_RETURN_HTTPS=false
DATASOURCE_TYPE=local
-DATASOURCE_LOCAL_DIRECTORY=/opt/zipline-upload
+DATASOURCE_LOCAL_DIRECTORY=/opt/zipline-uploads
EOF
-mkdir -p /opt/zipline-upload
+mkdir -p /opt/zipline-uploads
$STD pnpm install
$STD pnpm build
echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
diff --git a/misc/alpine-install.func b/misc/alpine-install.func
index a2a804bf2..57a8eb019 100644
--- a/misc/alpine-install.func
+++ b/misc/alpine-install.func
@@ -83,11 +83,6 @@ update_os() {
msg_info "Updating Container OS"
$STD apk -U upgrade
msg_ok "Updated Container OS"
-
- msg_info "Installing core dependencies"
- $STD apk update
- $STD apk add newt curl openssh nano mc ncurses gpg
- msg_ok "Core dependencies installed"
}
# This function modifies the message of the day (motd) and SSH settings
diff --git a/misc/build.func b/misc/build.func
index c3307f6f7..1069dc528 100644
--- a/misc/build.func
+++ b/misc/build.func
@@ -303,13 +303,12 @@ echo_default() {
fi
# Output the selected values with icons
- echo -e "${OS}${BOLD}${DGN}Operating System: ${BGN}$var_os${CL}"
- echo -e "${OSVERSION}${BOLD}${DGN}Version: ${BGN}$var_version${CL}"
+ echo -e "${CONTAINERID}${BOLD}${DGN}Container ID: ${BGN}${CT_ID}${CL}"
+ echo -e "${OS}${BOLD}${DGN}Operating System: ${BGN}$var_os ($var_version)${CL}"
echo -e "${CONTAINERTYPE}${BOLD}${DGN}Container Type: ${BGN}$CT_TYPE_DESC${CL}"
echo -e "${DISKSIZE}${BOLD}${DGN}Disk Size: ${BGN}${DISK_SIZE} GB${CL}"
echo -e "${CPUCORE}${BOLD}${DGN}CPU Cores: ${BGN}${CORE_COUNT}${CL}"
echo -e "${RAMSIZE}${BOLD}${DGN}RAM Size: ${BGN}${RAM_SIZE} MiB${CL}"
- echo -e "${CONTAINERID}${BOLD}${DGN}Container ID: ${BGN}${CT_ID}${CL}"
if [ "$VERB" == "yes" ]; then
echo -e "${SEARCH}${BOLD}${DGN}Verbose Mode: ${BGN}Enabled${CL}"
fi
@@ -1101,7 +1100,9 @@ build_container() {
# This executes create_lxc.sh and creates the container and .conf file
bash -c "$(curl -fsSL https://raw.githubusercontent.com/asylumexp/Proxmox/main/misc/create_lxc.sh)" $?
- LXC_CONFIG=/etc/pve/lxc/${CTID}.conf
+ LXC_CONFIG="/etc/pve/lxc/${CTID}.conf"
+
+ # USB passthrough for privileged LXC (CT_TYPE=0)
if [ "$CT_TYPE" == "0" ]; then
cat <>"$LXC_CONFIG"
# USB passthrough
@@ -1117,38 +1118,82 @@ lxc.mount.entry: /dev/ttyACM1 dev/ttyACM1 none bind,optional,create=
EOF
fi
- if [ "$CT_TYPE" == "0" ]; then
- if [[ "$APP" == "Channels" || "$APP" == "Emby" || "$APP" == "ErsatzTV" || "$APP" == "Frigate" || "$APP" == "Jellyfin" || "$APP" == "Plex" || "$APP" == "immich" || "$APP" == "Tdarr" || "$APP" == "Open WebUI" || "$APP" == "Unmanic" || "$APP" == "Ollama" || "$APP" == "FileFlows" ]]; then
- cat <>"$LXC_CONFIG"
-# VAAPI hardware transcoding
-lxc.cgroup2.devices.allow: c 226:0 rwm
-lxc.cgroup2.devices.allow: c 226:128 rwm
-lxc.cgroup2.devices.allow: c 29:0 rwm
-lxc.mount.entry: /dev/fb0 dev/fb0 none bind,optional,create=file
-lxc.mount.entry: /dev/dri dev/dri none bind,optional,create=dir
-lxc.mount.entry: /dev/dri/renderD128 dev/dri/renderD128 none bind,optional,create=file
-EOF
+ # VAAPI passthrough for privileged containers or known apps
+ VAAPI_APPS=(
+ "immich"
+ "Channels"
+ "Emby"
+ "ErsatzTV"
+ "Frigate"
+ "Jellyfin"
+ "Plex"
+ "Scrypted"
+ "Tdarr"
+ "Unmanic"
+ "Ollama"
+ "FileFlows"
+ "Open WebUI"
+ )
+
+ is_vaapi_app=false
+ for vaapi_app in "${VAAPI_APPS[@]}"; do
+ if [[ "$APP" == "$vaapi_app" ]]; then
+ is_vaapi_app=true
+ break
fi
- else
- if [[ "$APP" == "Channels" || "$APP" == "Emby" || "$APP" == "ErsatzTV" || "$APP" == "Frigate" || "$APP" == "Jellyfin" || "$APP" == "Plex" || "$APP" == "immich" || "$APP" == "Tdarr" || "$APP" == "Open WebUI" || "$APP" == "Unmanic" || "$APP" == "Ollama" || "$APP" == "FileFlows" ]]; then
- if [[ -e "/dev/dri/renderD128" ]]; then
- if [[ -e "/dev/dri/card0" ]]; then
- cat <>"$LXC_CONFIG"
-# VAAPI hardware transcoding
-dev0: /dev/dri/card0,gid=44
-dev1: /dev/dri/renderD128,gid=104
-EOF
- else
- cat <>"$LXC_CONFIG"
-# VAAPI hardware transcoding
-dev0: /dev/dri/card1,gid=44
-dev1: /dev/dri/renderD128,gid=104
-EOF
- fi
+ done
+
+ if ([ "$CT_TYPE" == "0" ] || [ "$is_vaapi_app" == "true" ]) &&
+ ([[ -e /dev/dri/renderD128 ]] || [[ -e /dev/dri/card0 ]] || [[ -e /dev/fb0 ]]); then
+
+ echo ""
+ msg_custom "βοΈ " "\e[96m" "Configuring VAAPI passthrough for LXC container"
+ if [ "$CT_TYPE" != "0" ]; then
+ msg_custom "β οΈ " "\e[33m" "Container is unprivileged β VAAPI passthrough may not work without additional host configuration (e.g., idmap)."
+ fi
+ msg_custom "βΉοΈ " "\e[96m" "VAAPI enables GPU hardware acceleration (e.g., for video transcoding in Jellyfin or Plex)."
+ echo ""
+ read -rp "β€ Automatically mount all available VAAPI devices? [Y/n]: " VAAPI_ALL
+
+ if [[ "$VAAPI_ALL" =~ ^[Yy]$|^$ ]]; then
+ if [ "$CT_TYPE" == "0" ]; then
+ # PRV Container β alles zulΓ€ssig
+ [[ -e /dev/dri/renderD128 ]] && {
+ echo "lxc.cgroup2.devices.allow: c 226:128 rwm" >>"$LXC_CONFIG"
+ echo "lxc.mount.entry: /dev/dri/renderD128 dev/dri/renderD128 none bind,optional,create=file" >>"$LXC_CONFIG"
+ }
+ [[ -e /dev/dri/card0 ]] && {
+ echo "lxc.cgroup2.devices.allow: c 226:0 rwm" >>"$LXC_CONFIG"
+ echo "lxc.mount.entry: /dev/dri/card0 dev/dri/card0 none bind,optional,create=file" >>"$LXC_CONFIG"
+ }
+ [[ -e /dev/fb0 ]] && {
+ echo "lxc.cgroup2.devices.allow: c 29:0 rwm" >>"$LXC_CONFIG"
+ echo "lxc.mount.entry: /dev/fb0 dev/fb0 none bind,optional,create=file" >>"$LXC_CONFIG"
+ }
+ [[ -d /dev/dri ]] && {
+ echo "lxc.mount.entry: /dev/dri dev/dri none bind,optional,create=dir" >>"$LXC_CONFIG"
+ }
+ else
+ # UNPRV Container β nur devX fΓΌr UI
+ [[ -e /dev/dri/card0 ]] && echo "dev0: /dev/dri/card0,gid=44" >>"$LXC_CONFIG"
+ [[ -e /dev/dri/card1 ]] && echo "dev0: /dev/dri/card1,gid=44" >>"$LXC_CONFIG"
+ [[ -e /dev/dri/renderD128 ]] && echo "dev1: /dev/dri/renderD128,gid=104" >>"$LXC_CONFIG"
fi
fi
+
+ fi
+ if [ "$CT_TYPE" == "1" ] && [ "$is_vaapi_app" == "true" ]; then
+ if [[ -e /dev/dri/card0 ]]; then
+ echo "dev0: /dev/dri/card0,gid=44" >>"$LXC_CONFIG"
+ elif [[ -e /dev/dri/card1 ]]; then
+ echo "dev0: /dev/dri/card1,gid=44" >>"$LXC_CONFIG"
+ fi
+ if [[ -e /dev/dri/renderD128 ]]; then
+ echo "dev1: /dev/dri/renderD128,gid=104" >>"$LXC_CONFIG"
+ fi
fi
+ # TUN device passthrough
if [ "$ENABLE_TUN" == "yes" ]; then
cat <>"$LXC_CONFIG"
lxc.cgroup2.devices.allow: c 10:200 rwm
@@ -1178,10 +1223,13 @@ EOF'
locale-gen >/dev/null && \
export LANG=\$locale_line"
+ if [[ -z "${tz:-}" ]]; then
+ tz=$(timedatectl show --property=Timezone --value 2>/dev/null || echo "Etc/UTC")
+ fi
if pct exec "$CTID" -- test -e "/usr/share/zoneinfo/$tz"; then
- pct exec "$CTID" -- bash -c "echo $tz >/etc/timezone && ln -sf /usr/share/zoneinfo/$tz /etc/localtime"
+ pct exec "$CTID" -- bash -c "tz='$tz'; echo \"\$tz\" >/etc/timezone && ln -sf \"/usr/share/zoneinfo/\$tz\" /etc/localtime"
else
- msg_info "Skipping timezone setup β zone '$tz' not found in container"
+ msg_warn "Skipping timezone setup β zone '$tz' not found in container"
fi
pct exec "$CTID" -- bash -c "apt-get update >/dev/null && apt-get install -y sudo curl mc gnupg2 >/dev/null"
@@ -1261,7 +1309,9 @@ api_exit_script() {
fi
}
-trap 'api_exit_script' EXIT
+if command -v pveversion >/dev/null 2>&1; then
+ trap 'api_exit_script' EXIT
+fi
trap 'post_update_to_api "failed" "$BASH_COMMAND"' ERR
trap 'post_update_to_api "failed" "INTERRUPTED"' SIGINT
trap 'post_update_to_api "failed" "TERMINATED"' SIGTERM
diff --git a/misc/core.func b/misc/core.func
index 880b0555f..57f545159 100644
--- a/misc/core.func
+++ b/misc/core.func
@@ -1,30 +1,6 @@
# Copyright (c) 2021-2025 community-scripts ORG
# License: MIT | https://raw.githubusercontent.com/asylumexp/Proxmox/main/LICENSE
-# if ! declare -f wait_for >/dev/null; then
-# echo "[DEBUG] Undefined function 'wait_for' used from: ${BASH_SOURCE[*]}" >&2
-# wait_for() {
-# echo "[DEBUG] Fallback: wait_for called with: $*" >&2
-# true
-# }
-# fi
-
-trap 'on_error $? $LINENO' ERR
-trap 'on_exit' EXIT
-trap 'on_interrupt' INT
-trap 'on_terminate' TERM
-
-if ! declare -f wait_for >/dev/null; then
- wait_for() {
- true
- }
-fi
-
-declare -A MSG_INFO_SHOWN=()
-SPINNER_PID=""
-SPINNER_ACTIVE=0
-SPINNER_MSG=""
-
# ------------------------------------------------------------------------------
# Loads core utility groups once (colors, formatting, icons, defaults).
# ------------------------------------------------------------------------------
@@ -43,101 +19,51 @@ load_functions() {
# add more
}
-on_error() {
- local exit_code="$1"
- local lineno="$2"
+# ============================================================================
+# Error & Signal Handling β robust, universal, subshell-safe
+# ============================================================================
- stop_spinner
-
- case "$exit_code" in
- 1) msg_error "Generic error occurred (line $lineno)" ;;
- 2) msg_error "Shell misuse (line $lineno)" ;;
- 126) msg_error "Command cannot execute (line $lineno)" ;;
- 127) msg_error "Command not found (line $lineno)" ;;
- 128) msg_error "Invalid exit argument (line $lineno)" ;;
- 130) msg_error "Script aborted by user (CTRL+C)" ;;
- 143) msg_error "Script terminated by SIGTERM" ;;
- *) msg_error "Script failed at line $lineno with exit code $exit_code" ;;
- esac
-
- msg_error "Please make an issue on GitHub if you believe this is a script bug."
- exit "$exit_code"
-}
-
-on_exit() {
- cleanup_spinner || true
- [[ "${VERBOSE:-no}" == "yes" ]] && msg_info "Script exited"
-}
-
-on_interrupt() {
- msg_error "Interrupted by user (CTRL+C)"
- exit 130
-}
-
-on_terminate() {
- msg_error "Terminated by signal (TERM)"
- exit 143
-}
-
-setup_trap_abort_handling() {
- trap '__handle_signal_abort SIGINT' SIGINT
- trap '__handle_signal_abort SIGTERM' SIGTERM
- trap '__handle_unexpected_error $?' ERR
-}
-
-__handle_signal_abort() {
- local signal="$1"
- echo
- [ -n "${SPINNER_PID:-}" ] && kill "$SPINNER_PID" 2>/dev/null && wait "$SPINNER_PID" 2>/dev/null
-
- case "$signal" in
- SIGINT)
- msg_error "Script aborted by user (CTRL+C)"
- exit 130
+_tool_error_hint() {
+ local cmd="$1"
+ local code="$2"
+ case "$cmd" in
+ curl)
+ case "$code" in
+ 6) echo "Curl: Could not resolve host (DNS problem)" ;;
+ 7) echo "Curl: Failed to connect to host (connection refused)" ;;
+ 22) echo "Curl: HTTP error (404/403 etc)" ;;
+ 28) echo "Curl: Operation timeout" ;;
+ *) echo "Curl: Unknown error ($code)" ;;
+ esac
;;
- SIGTERM)
- msg_error "Script terminated (SIGTERM)"
- exit 143
+ wget)
+ echo "Wget failed β URL unreachable or permission denied"
;;
- *)
- msg_error "Script interrupted (unknown signal: $signal)"
- exit 1
+ systemctl)
+ echo "Systemd unit failure β check service name and permissions"
;;
+ jq)
+ echo "jq parse error β malformed JSON or missing key"
+ ;;
+ mariadb | mysql)
+ echo "MySQL/MariaDB command failed β check credentials or DB"
+ ;;
+ unzip)
+ echo "unzip failed β corrupt file or missing permission"
+ ;;
+ tar)
+ echo "tar failed β invalid format or missing binary"
+ ;;
+ node | npm | pnpm | yarn)
+ echo "Node tool failed β check version compatibility or package.json"
+ ;;
+ *) echo "" ;;
esac
}
-__handle_unexpected_error() {
- local exit_code="$1"
- echo
- [ -n "${SPINNER_PID:-}" ] && kill "$SPINNER_PID" 2>/dev/null && wait "$SPINNER_PID" 2>/dev/null
-
- case "$exit_code" in
- 1)
- msg_error "Generic error occurred (exit code 1)"
- ;;
- 2)
- msg_error "Misuse of shell builtins (exit code 2)"
- ;;
- 126)
- msg_error "Command invoked cannot execute (exit code 126)"
- ;;
- 127)
- msg_error "Command not found (exit code 127)"
- ;;
- 128)
- msg_error "Invalid exit argument (exit code 128)"
- ;;
- 130)
- msg_error "Script aborted by user (CTRL+C)"
- ;;
- 143)
- msg_error "Script terminated by SIGTERM"
- ;;
- *)
- msg_error "Unexpected error occurred (exit code $exit_code)"
- ;;
- esac
- exit "$exit_code"
+catch_errors() {
+ set -Eeuo pipefail
+ trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
}
# ------------------------------------------------------------------------------
@@ -154,6 +80,13 @@ color() {
CL=$(echo "\033[m")
}
+# Special for spinner and colorized output via printf
+color_spinner() {
+ CS_YW=$'\033[33m'
+ CS_YWB=$'\033[93m'
+ CS_CL=$'\033[m'
+}
+
# ------------------------------------------------------------------------------
# Defines formatting helpers like tab, bold, and line reset sequences.
# ------------------------------------------------------------------------------
@@ -197,6 +130,7 @@ icons() {
ADVANCED="${TAB}π§©${TAB}${CL}"
FUSE="${TAB}ποΈ${TAB}${CL}"
HOURGLASS="${TAB}β³${TAB}"
+
}
# ------------------------------------------------------------------------------
@@ -228,7 +162,7 @@ silent() {
# Function to download & save header files
get_header() {
local app_name=$(echo "${APP,,}" | tr -d ' ')
- local app_type=${APP_TYPE:-ct} # Default 'ct'
+ local app_type=${APP_TYPE:-ct}
local header_url="https://raw.githubusercontent.com/asylumexp/Proxmox/main/${app_type}/headers/${app_name}"
local local_header_path="/usr/local/community-scripts/headers/${app_type}/${app_name}"
@@ -258,77 +192,39 @@ header_info() {
fi
}
-# ------------------------------------------------------------------------------
-# Performs a curl request with retry logic and inline feedback.
-# ------------------------------------------------------------------------------
-
-run_curl() {
- if [ "$VERBOSE" = "no" ]; then
- $STD curl "$@"
- else
- curl "$@"
+ensure_tput() {
+ if ! command -v tput >/dev/null 2>&1; then
+ if grep -qi 'alpine' /etc/os-release; then
+ apk add --no-cache ncurses >/dev/null 2>&1
+ elif command -v apt-get >/dev/null 2>&1; then
+ apt-get update -qq >/dev/null
+ apt-get install -y -qq ncurses-bin >/dev/null 2>&1
+ fi
fi
}
-curl_handler() {
- set +e
- trap 'set -e' RETURN
- local args=()
- local url=""
- local max_retries=3
- local delay=2
- local attempt=1
- local exit_code
- local has_output_file=false
- local result=""
+is_alpine() {
+ local os_id="${var_os:-${PCT_OSTYPE:-}}"
- # Parse arguments
- for arg in "$@"; do
- if [[ "$arg" != -* && -z "$url" ]]; then
- url="$arg"
- fi
- [[ "$arg" == "-o" || "$arg" == --output ]] && has_output_file=true
- args+=("$arg")
- done
-
- if [[ -z "$url" ]]; then
- msg_error "No valid URL or option entered for curl_handler"
- return 1
+ if [[ -z "$os_id" && -f /etc/os-release ]]; then
+ os_id="$(
+ . /etc/os-release 2>/dev/null
+ echo "${ID:-}"
+ )"
fi
- $STD msg_info "Fetching: $url"
+ [[ "$os_id" == "alpine" ]]
+}
- while [[ $attempt -le $max_retries ]]; do
- if $has_output_file; then
- $STD run_curl "${args[@]}"
- exit_code=$?
- else
- result=$(run_curl "${args[@]}")
- exit_code=$?
- fi
-
- if [[ $exit_code -eq 0 ]]; then
- $STD msg_ok "Fetched: $url"
- $has_output_file || printf '%s' "$result"
- return 0
- fi
-
- if ((attempt >= max_retries)); then
- # Read error log if it exists
- if [ -s /tmp/curl_error.log ]; then
- local curl_stderr
- curl_stderr=$(&2
- sleep "$delay"
- ((attempt++))
- done
- set -e
+is_verbose_mode() {
+ local verbose="${VERBOSE:-${var_verbose:-no}}"
+ local tty_status
+ if [[ -t 2 ]]; then
+ tty_status="interactive"
+ else
+ tty_status="not-a-tty"
+ fi
+ [[ "$verbose" != "no" || ! -t 2 ]]
}
# ------------------------------------------------------------------------------
@@ -373,144 +269,92 @@ fatal() {
kill -INT $$
}
-# Ensure POSIX compatibility across Alpine and Debian/Ubuntu
-# === Spinner Start ===
-# Trap cleanup on various signals
-trap 'cleanup_spinner' EXIT INT TERM HUP
-
-spinner_frames=('β ' 'β ' 'β Ή' 'β Έ' 'β Ό' 'β ΄' 'β ¦' 'β §' 'β ' 'β ')
-
-# === Spinner Start ===
-start_spinner() {
- local msg="$1"
- local spin_i=0
- local interval=0.1
-
- stop_spinner
- SPINNER_MSG="$msg"
- SPINNER_ACTIVE=1
-
- {
- while [[ "$SPINNER_ACTIVE" -eq 1 ]]; do
- if [[ -t 2 ]]; then
- printf "\r\e[2K%s %b" "${TAB}${spinner_frames[spin_i]}${TAB}" "${YW}${SPINNER_MSG}${CL}" >&2
- else
- printf "%s...\n" "$SPINNER_MSG" >&2
- break
- fi
- spin_i=$(((spin_i + 1) % ${#spinner_frames[@]}))
- sleep "$interval"
- done
- } &
-
- local pid=$!
- if ps -p "$pid" >/dev/null 2>&1; then
- SPINNER_PID="$pid"
- else
- SPINNER_ACTIVE=0
- SPINNER_PID=""
- fi
+spinner() {
+ local chars=(β β β Ή β Έ β Ό β ΄ β ¦ β § β β )
+ local i=0
+ while true; do
+ local index=$((i++ % ${#chars[@]}))
+ printf "\r\033[2K%s %b" "${CS_YWB}${chars[$index]}${CS_CL}" "${CS_YWB}${SPINNER_MSG:-}${CS_CL}"
+ sleep 0.1
+ done
+}
+
+clear_line() {
+ tput cr 2>/dev/null || echo -en "\r"
+ tput el 2>/dev/null || echo -en "\033[K"
}
-# === Spinner Stop ===
stop_spinner() {
- if [[ "$SPINNER_ACTIVE" -eq 1 && -n "$SPINNER_PID" ]]; then
- SPINNER_ACTIVE=0
+ local pid="${SPINNER_PID:-}"
+ [[ -z "$pid" && -f /tmp/.spinner.pid ]] && pid=$(/dev/null; then
- kill "$SPINNER_PID" 2>/dev/null || true
- for _ in $(seq 1 10); do
- sleep 0.05
- kill -0 "$SPINNER_PID" 2>/dev/null || break
- done
+ if [[ -n "$pid" && "$pid" =~ ^[0-9]+$ ]]; then
+ if kill "$pid" 2>/dev/null; then
+ sleep 0.05
+ kill -9 "$pid" 2>/dev/null || true
+ wait "$pid" 2>/dev/null || true
fi
-
- if [[ "$SPINNER_PID" =~ ^[0-9]+$ ]]; then
- ps -p "$SPINNER_PID" -o pid= >/dev/null 2>&1 && wait "$SPINNER_PID" 2>/dev/null || true
- fi
-
- printf "\r\e[2K" >&2
- SPINNER_PID=""
+ rm -f /tmp/.spinner.pid
fi
-}
-cleanup_spinner() {
- stop_spinner
+ unset SPINNER_PID SPINNER_MSG
+ stty sane 2>/dev/null || true
}
msg_info() {
local msg="$1"
- [[ -z "$msg" || -n "${MSG_INFO_SHOWN["$msg"]+x}" ]] && return
+ [[ -z "$msg" ]] && return
+
+ if ! declare -p MSG_INFO_SHOWN &>/dev/null || ! declare -A MSG_INFO_SHOWN &>/dev/null; then
+ declare -gA MSG_INFO_SHOWN=()
+ fi
+ [[ -n "${MSG_INFO_SHOWN["$msg"]+x}" ]] && return
MSG_INFO_SHOWN["$msg"]=1
stop_spinner
+ SPINNER_MSG="$msg"
- if [[ "${VERBOSE:-no}" == "no" && -t 2 ]]; then
- start_spinner "$msg"
- else
+ if is_verbose_mode || is_alpine; then
+ local HOURGLASS="${TAB}β³${TAB}"
printf "\r\e[2K%s %b" "$HOURGLASS" "${YW}${msg}${CL}" >&2
+ return
fi
+
+ color_spinner
+ spinner &
+ SPINNER_PID=$!
+ echo "$SPINNER_PID" >/tmp/.spinner.pid
+ disown "$SPINNER_PID" 2>/dev/null || true
}
msg_ok() {
local msg="$1"
[[ -z "$msg" ]] && return
stop_spinner
- printf "\r\e[2K%s %b\n" "$CM" "${GN}${msg}${CL}" >&2
+ clear_line
+ printf "%s %b\n" "$CM" "${GN}${msg}${CL}" >&2
unset MSG_INFO_SHOWN["$msg"]
}
msg_error() {
- local msg="$1"
- [[ -z "$msg" ]] && return
stop_spinner
- printf "\r\e[2K%s %b\n" "$CROSS" "${RD}${msg}${CL}" >&2
+ local msg="$1"
+ echo -e "${BFR:-} ${CROSS:-βοΈ} ${RD}${msg}${CL}"
}
msg_warn() {
- local msg="$1"
- [[ -z "$msg" ]] && return
stop_spinner
- printf "\r\e[2K%s %b\n" "$INFO" "${YWB}${msg}${CL}" >&2
- unset MSG_INFO_SHOWN["$msg"]
+ local msg="$1"
+ echo -e "${BFR:-} ${INFO:-βΉοΈ} ${YWB}${msg}${CL}"
}
msg_custom() {
local symbol="${1:-"[*]"}"
- local color="${2:-"\e[36m"}" # Default: Cyan
+ local color="${2:-"\e[36m"}"
local msg="${3:-}"
-
[[ -z "$msg" ]] && return
- stop_spinner 2>/dev/null || true
- printf "\r\e[2K%s %b\n" "$symbol" "${color}${msg}${CL:-\e[0m}" >&2
-}
-
-msg_progress() {
- local current="$1"
- local total="$2"
- local label="$3"
- local width=40
- local filled percent bar empty
- local fill_char="#"
- local empty_char="-"
-
- if ! [[ "$current" =~ ^[0-9]+$ ]] || ! [[ "$total" =~ ^[0-9]+$ ]] || [[ "$total" -eq 0 ]]; then
- printf "\r\e[2K%s %b\n" "$CROSS" "${RD}Invalid progress input${CL}" >&2
- return
- fi
-
- percent=$(((current * 100) / total))
- filled=$(((current * width) / total))
- empty=$((width - filled))
-
- bar=$(printf "%${filled}s" | tr ' ' "$fill_char")
- bar+=$(printf "%${empty}s" | tr ' ' "$empty_char")
-
- printf "\r\e[2K%s [%s] %3d%% %s" "${TAB}" "$bar" "$percent" "$label" >&2
-
- if [[ "$current" -eq "$total" ]]; then
- printf "\n" >&2
- fi
+ stop_spinner
+ echo -e "${BFR:-} ${symbol} ${color}${msg}${CL:-\e[0m}"
}
run_container_safe() {
@@ -561,3 +405,5 @@ check_or_create_swap() {
return 1
fi
}
+
+trap 'stop_spinner' EXIT INT TERM
diff --git a/misc/create_lxc.sh b/misc/create_lxc.sh
index f13906951..c35e45667 100644
--- a/misc/create_lxc.sh
+++ b/misc/create_lxc.sh
@@ -20,36 +20,67 @@ fi
# This sets error handling options and defines the error_handler function to handle errors
set -Eeuo pipefail
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
+trap on_exit EXIT
+trap on_interrupt INT
+trap on_terminate TERM
+
+function on_exit() {
+ local exit_code="$?"
+ [[ -n "${lockfile:-}" && -e "$lockfile" ]] && rm -f "$lockfile"
+ exit "$exit_code"
+}
-# This function handles errors
function error_handler() {
- printf "\e[?25h"
+
local exit_code="$?"
local line_number="$1"
local command="$2"
- local error_message="${RD}[ERROR]${CL} in line ${RD}$line_number${CL}: exit code ${RD}$exit_code${CL}: while executing command ${YW}$command${CL}"
- echo -e "\n$error_message\n"
- exit 200
+ printf "\e[?25h"
+ echo -e "\n${RD}[ERROR]${CL} in line ${RD}$line_number${CL}: exit code ${RD}$exit_code${CL}: while executing command ${YW}$command${CL}\n"
+ exit "$exit_code"
+}
+
+function on_interrupt() {
+ echo -e "\n${RD}Interrupted by user (SIGINT)${CL}"
+ exit 130
+}
+
+function on_terminate() {
+ echo -e "\n${RD}Terminated by signal (SIGTERM)${CL}"
+ exit 143
+}
+
+function check_storage_support() {
+ local CONTENT="$1"
+ local -a VALID_STORAGES=()
+
+ while IFS= read -r line; do
+ local STORAGE=$(awk '{print $1}' <<<"$line")
+ [[ "$STORAGE" == "storage" || -z "$STORAGE" ]] && continue
+ VALID_STORAGES+=("$STORAGE")
+ done < <(pvesm status -content "$CONTENT" 2>/dev/null | awk 'NR>1')
+
+ [[ ${#VALID_STORAGES[@]} -gt 0 ]]
}
# This checks for the presence of valid Container Storage and Template Storage locations
msg_info "Validating Storage"
-VALIDCT=$(pvesm status -content rootdir | awk 'NR>1')
-if [ -z "$VALIDCT" ]; then
- msg_error "Unable to detect a valid Container Storage location."
+if ! check_storage_support "rootdir"; then
+
+ msg_error "No valid storage found for 'rootdir' (Container)."
exit 1
fi
-VALIDTMP=$(pvesm status -content vztmpl | awk 'NR>1')
-if [ -z "$VALIDTMP" ]; then
- msg_error "Unable to detect a valid Template Storage location."
+if ! check_storage_support "vztmpl"; then
+
+ msg_error "No valid storage found for 'vztmpl' (Template)."
exit 1
fi
+msg_ok "Validated Storage (rootdir / vztmpl)."
# This function is used to select the storage class and determine the corresponding storage content type and label.
function select_storage() {
- local CLASS=$1
- local CONTENT
- local CONTENT_LABEL
+ local CLASS=$1 CONTENT CONTENT_LABEL
+
case $CLASS in
container)
CONTENT='rootdir'
@@ -59,51 +90,72 @@ function select_storage() {
CONTENT='vztmpl'
CONTENT_LABEL='Container template'
;;
- *) false || {
- msg_error "Invalid storage class."
- exit 201
- } ;;
+ iso)
+ CONTENT='iso'
+ CONTENT_LABEL='ISO image'
+ ;;
+ images)
+ CONTENT='images'
+ CONTENT_LABEL='VM Disk image'
+ ;;
+ backup)
+ CONTENT='backup'
+ CONTENT_LABEL='Backup'
+ ;;
+ snippets)
+ CONTENT='snippets'
+ CONTENT_LABEL='Snippets'
+ ;;
+ *)
+ msg_error "Invalid storage class '$CLASS'"
+ return 1
+ ;;
esac
- # Collect storage options
- local -a MENU
- local MSG_MAX_LENGTH=0
+local -a MENU
+ local -A STORAGE_MAP
+ local COL_WIDTH=0
- while read -r TAG TYPE _ _ _ FREE _; do
- local TYPE_PADDED
- local FREE_FMT
-
- TYPE_PADDED=$(printf "%-10s" "$TYPE")
- FREE_FMT=$(numfmt --to=iec --from-unit=K --format %.2f <<<"$FREE")B
- local ITEM="Type: $TYPE_PADDED Free: $FREE_FMT"
-
- ((${#ITEM} + 2 > MSG_MAX_LENGTH)) && MSG_MAX_LENGTH=$((${#ITEM} + 2))
-
- MENU+=("$TAG" "$ITEM" "OFF")
+ while read -r TAG TYPE _ TOTAL USED FREE _; do
+ [[ -n "$TAG" && -n "$TYPE" ]] || continue
+ local DISPLAY="${TAG} (${TYPE})"
+ local USED_FMT=$(numfmt --to=iec --from-unit=K --format %.1f <<<"$USED")
+ local FREE_FMT=$(numfmt --to=iec --from-unit=K --format %.1f <<<"$FREE")
+ local INFO="Free: ${FREE_FMT}B Used: ${USED_FMT}B"
+ STORAGE_MAP["$DISPLAY"]="$TAG"
+ MENU+=("$DISPLAY" "$INFO" "OFF")
+ ((${#DISPLAY} > COL_WIDTH)) && COL_WIDTH=${#DISPLAY}
done < <(pvesm status -content "$CONTENT" | awk 'NR>1')
- local OPTION_COUNT=$((${#MENU[@]} / 3))
+ if [ ${#MENU[@]} -eq 0 ]; then
+ msg_error "No storage found for content type '$CONTENT'."
+ return 2
+ fi
- # Auto-select if only one option available
- if [[ "$OPTION_COUNT" -eq 1 ]]; then
- echo "${MENU[0]}"
+ if [ $((${#MENU[@]} / 3)) -eq 1 ]; then
+ STORAGE_RESULT="${STORAGE_MAP[${MENU[0]}]}"
return 0
fi
- # Display selection menu
- local STORAGE
- while [[ -z "${STORAGE:+x}" ]]; do
- STORAGE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Storage Pools" --radiolist \
- "Select the storage pool to use for the ${CONTENT_LABEL,,}.\nUse the spacebar to make a selection.\n" \
- 16 $((MSG_MAX_LENGTH + 23)) 6 \
- "${MENU[@]}" 3>&1 1>&2 2>&3) || {
- msg_error "Storage selection cancelled."
- exit 202
- }
- done
+ local WIDTH=$((COL_WIDTH + 42))
+ while true; do
+ local DISPLAY_SELECTED=$(whiptail --backtitle "Proxmox VE Helper Scripts" \
+ --title "Storage Pools" \
+ --radiolist "Which storage pool for ${CONTENT_LABEL,,}?\n(Spacebar to select)" \
+ 16 "$WIDTH" 6 "${MENU[@]}" 3>&1 1>&2 2>&3)
- echo "$STORAGE"
+ [[ $? -ne 0 ]] && return 3
+
+ if [[ -z "$DISPLAY_SELECTED" || -z "${STORAGE_MAP[$DISPLAY_SELECTED]+_}" ]]; then
+ whiptail --msgbox "No valid storage selected. Please try again." 8 58
+ continue
+ fi
+
+ STORAGE_RESULT="${STORAGE_MAP[$DISPLAY_SELECTED]}"
+ return 0
+ done
}
+
# Test if required variables are set
[[ "${CTID:-}" ]] || {
msg_error "You need to set 'CTID' variable."
@@ -128,13 +180,55 @@ if qm status "$CTID" &>/dev/null || pct status "$CTID" &>/dev/null; then
exit 206
fi
-# Get template storage
-TEMPLATE_STORAGE=$(select_storage template)
-msg_ok "Using ${BL}$TEMPLATE_STORAGE${CL} ${GN}for Template Storage."
+# DEFAULT_FILE="/usr/local/community-scripts/default_storage"
+# if [[ -f "$DEFAULT_FILE" ]]; then
+# source "$DEFAULT_FILE"
+# if [[ -n "$TEMPLATE_STORAGE" && -n "$CONTAINER_STORAGE" ]]; then
+# msg_info "Using default storage configuration from: $DEFAULT_FILE"
+# msg_ok "Template Storage: ${BL}$TEMPLATE_STORAGE${CL} ${GN}|${CL} Container Storage: ${BL}$CONTAINER_STORAGE${CL}"
+# else
+# msg_warn "Default storage file exists but is incomplete β falling back to manual selection"
+# TEMPLATE_STORAGE=$(select_storage template)
+# msg_ok "Using ${BL}$TEMPLATE_STORAGE${CL} ${GN}for Template Storage."
+# CONTAINER_STORAGE=$(select_storage container)
+# msg_ok "Using ${BL}$CONTAINER_STORAGE${CL} ${GN}for Container Storage."
+# fi
+# else
+# # TEMPLATE STORAGE SELECTION
+# # Template Storage
+# while true; do
+# TEMPLATE_STORAGE=$(select_storage template)
+# if [[ -n "$TEMPLATE_STORAGE" ]]; then
+# msg_ok "Using ${BL}$TEMPLATE_STORAGE${CL} ${GN}for Template Storage."
+# break
+# fi
+# msg_warn "No valid template storage selected. Please try again."
+# done
-# Get container storage
-CONTAINER_STORAGE=$(select_storage container)
-msg_ok "Using ${BL}$CONTAINER_STORAGE${CL} ${GN}for Container Storage."
+# while true; do
+# CONTAINER_STORAGE=$(select_storage container)
+# if [[ -n "$CONTAINER_STORAGE" ]]; then
+# msg_ok "Using ${BL}$CONTAINER_STORAGE${CL} ${GN}for Container Storage."
+# break
+# fi
+# msg_warn "No valid container storage selected. Please try again."
+# done
+
+# fi
+
+while true; do
+ if select_storage template; then
+ TEMPLATE_STORAGE="$STORAGE_RESULT"
+ break
+ fi
+done
+
+while true; do
+ if select_storage container; then
+ CONTAINER_STORAGE="$STORAGE_RESULT"
+ break
+ fi
+done
# Check free space on selected container storage
STORAGE_FREE=$(pvesm status | awk -v s="$CONTAINER_STORAGE" '$1 == s { print $6 }')
@@ -158,7 +252,7 @@ fi
TEMPLATE_SEARCH="${PCT_OSTYPE}-${PCT_OSVERSION:-}"
msg_info "Updating LXC Template List"
-if ! timeout 15 pveam update >/dev/null 2>&1; then
+if ! pveam update >/dev/null 2>&1; then
TEMPLATE_FALLBACK=$(pveam list "$TEMPLATE_STORAGE" | awk "/$TEMPLATE_SEARCH/ {print \$2}" | sort -t - -k 2 -V | tail -n1)
if [[ -z "$TEMPLATE_FALLBACK" ]]; then
msg_error "Failed to update LXC template list and no local template matching '$TEMPLATE_SEARCH' found."
@@ -231,7 +325,7 @@ fi
msg_ok "LXC Template is ready to use."
-msg_ok "LXC Template '$TEMPLATE' is ready to use."
+msg_info "Creating LXC Container"
# Check and fix subuid/subgid
grep -q "root:100000:65536" /etc/subuid || echo "root:100000:65536" >>/etc/subuid
grep -q "root:100000:65536" /etc/subgid || echo "root:100000:65536" >>/etc/subgid
@@ -242,12 +336,15 @@ PCT_OPTIONS=(${PCT_OPTIONS[@]:-${DEFAULT_PCT_OPTIONS[@]}})
# Secure creation of the LXC container with lock and template check
lockfile="/tmp/template.${TEMPLATE}.lock"
-exec 9>"$lockfile"
+exec 9>"$lockfile" >/dev/null 2>&1 || {
+ msg_error "Failed to create lock file '$lockfile'."
+ exit 200
+}
flock -w 60 9 || {
msg_error "Timeout while waiting for template lock"
exit 211
}
-msg_info "Creating LXC Container"
+
if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" &>/dev/null; then
msg_error "Container creation failed. Checking if template is corrupted or incomplete."
@@ -279,16 +376,23 @@ if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[
sleep 1 # I/O-Sync-Delay
msg_ok "Re-downloaded LXC Template"
+fi
- if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" &>/dev/null; then
- msg_error "Container creation failed after re-downloading template."
- exit 200
+if ! pct list | awk '{print $1}' | grep -qx "$CTID"; then
+ msg_error "Container ID $CTID not listed in 'pct list' β unexpected failure."
+ exit 215
+fi
+
+if ! grep -q '^rootfs:' "/etc/pve/lxc/$CTID.conf"; then
+ msg_error "RootFS entry missing in container config β storage not correctly assigned."
+ exit 216
+fi
+
+if grep -q '^hostname:' "/etc/pve/lxc/$CTID.conf"; then
+ CT_HOSTNAME=$(grep '^hostname:' "/etc/pve/lxc/$CTID.conf" | awk '{print $2}')
+ if [[ ! "$CT_HOSTNAME" =~ ^[a-z0-9-]+$ ]]; then
+ msg_warn "Hostname '$CT_HOSTNAME' contains invalid characters β may cause issues with networking or DNS."
fi
fi
-if ! pct status "$CTID" &>/dev/null; then
- msg_error "Container not found after pct create β assuming failure."
- exit 210
-fi
-
msg_ok "LXC Container ${BL}$CTID${CL} ${GN}was successfully created."
diff --git a/misc/install.func b/misc/install.func
index ac2de2162..04e38d977 100644
--- a/misc/install.func
+++ b/misc/install.func
@@ -61,9 +61,11 @@ setting_up_container() {
rm -rf /usr/lib/python3.*/EXTERNALLY-MANAGED
systemctl disable -q --now systemd-networkd-wait-online.service
msg_ok "Set up Container OS"
- msg_custom "${CM}" "${GN}" "Network Connected: ${BL}$(hostname -I)"
+ #msg_custom "${CM}" "${GN}" "Network Connected: ${BL}$(hostname -I)"
+ msg_ok "Network Connected: ${BL}$(hostname -I)"
}
+# This function checks the network connection by pinging a known IP address and prompts the user to continue if the internet is not connected
# This function checks the network connection by pinging a known IP address and prompts the user to continue if the internet is not connected
network_check() {
set +e
@@ -71,6 +73,7 @@ network_check() {
ipv4_connected=false
ipv6_connected=false
sleep 1
+
# Check IPv4 connectivity to Google, Cloudflare & Quad9 DNS servers.
if ping -c 1 -W 1 1.1.1.1 &>/dev/null || ping -c 1 -W 1 8.8.8.8 &>/dev/null || ping -c 1 -W 1 9.9.9.9 &>/dev/null; then
msg_ok "IPv4 Internet Connected"
@@ -99,25 +102,26 @@ network_check() {
fi
# DNS resolution checks for GitHub-related domains (IPv4 and/or IPv6)
- GITHUB_HOSTS=("github.com" "raw.githubusercontent.com" "api.github.com")
- GITHUB_STATUS="GitHub DNS:"
+ GIT_HOSTS=("github.com" "raw.githubusercontent.com" "api.github.com" "git.community-scripts.org")
+ GIT_STATUS="Git DNS:"
DNS_FAILED=false
- for HOST in "${GITHUB_HOSTS[@]}"; do
+ for HOST in "${GIT_HOSTS[@]}"; do
RESOLVEDIP=$(getent hosts "$HOST" | awk '{ print $1 }' | grep -E '(^([0-9]{1,3}\.){3}[0-9]{1,3}$)|(^[a-fA-F0-9:]+$)' | head -n1)
if [[ -z "$RESOLVEDIP" ]]; then
- GITHUB_STATUS+="$HOST:($DNSFAIL)"
+ GIT_STATUS+="$HOST:($DNSFAIL)"
DNS_FAILED=true
else
- GITHUB_STATUS+=" $HOST:($DNSOK)"
+ GIT_STATUS+=" $HOST:($DNSOK)"
fi
done
if [[ "$DNS_FAILED" == true ]]; then
- fatal "$GITHUB_STATUS"
+ fatal "$GIT_STATUS"
else
- msg_ok "$GITHUB_STATUS"
+ msg_ok "$GIT_STATUS"
fi
+
set -e
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
}
diff --git a/misc/tools.func b/misc/tools.func
index 65009a92a..9e52df8e7 100644
--- a/misc/tools.func
+++ b/misc/tools.func
@@ -54,9 +54,14 @@ function setup_nodejs() {
echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_${NODE_VERSION}.x nodistro main" \
>/etc/apt/sources.list.d/nodesource.list
+ sleep 2
if ! apt-get update >/dev/null 2>&1; then
- msg_error "Failed to update APT repositories after adding NodeSource"
- exit 1
+ msg_warn "APT update failed β retrying in 5s"
+ sleep 5
+ if ! apt-get update >/dev/null 2>&1; then
+ msg_error "Failed to update APT repositories after adding NodeSource"
+ exit 1
+ fi
fi
if ! apt-get install -y nodejs >/dev/null 2>&1; then
@@ -239,10 +244,14 @@ setup_mariadb() {
DISTRO_CODENAME="$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)"
CURRENT_OS="$(awk -F= '/^ID=/{print $2}' /etc/os-release)"
+ if ! curl -fsI http://mirror.mariadb.org/repo/ >/dev/null; then
+ msg_error "MariaDB mirror not reachable"
+ return 1
+ fi
+
msg_info "Setting up MariaDB $MARIADB_VERSION"
# grab dynamic latest LTS version
if [[ "$MARIADB_VERSION" == "latest" ]]; then
- $STD msg_info "Resolving latest GA MariaDB version"
MARIADB_VERSION=$(curl -fsSL http://mirror.mariadb.org/repo/ |
grep -Eo '[0-9]+\.[0-9]+\.[0-9]+/' |
grep -vE 'rc/|rolling/' |
@@ -253,7 +262,6 @@ setup_mariadb() {
msg_error "Could not determine latest GA MariaDB version"
return 1
fi
- $STD msg_ok "Latest GA MariaDB version is $MARIADB_VERSION"
fi
local CURRENT_VERSION=""
@@ -278,7 +286,6 @@ setup_mariadb() {
$STD msg_info "Setup MariaDB $MARIADB_VERSION"
fi
- $STD msg_info "Setting up MariaDB Repository"
curl -fsSL "https://mariadb.org/mariadb_release_signing_key.asc" |
gpg --dearmor -o /etc/apt/trusted.gpg.d/mariadb.gpg
@@ -366,25 +373,6 @@ function setup_mysql() {
# PHP_MAX_EXECUTION_TIME - (default: 300)
# ------------------------------------------------------------------------------
-# ------------------------------------------------------------------------------
-# Installs PHP with selected modules and configures Apache/FPM support.
-#
-# Description:
-# - Adds Sury PHP repo if needed
-# - Installs default and user-defined modules
-# - Patches php.ini for CLI, Apache, and FPM as needed
-#
-# Variables:
-# PHP_VERSION - PHP version to install (default: 8.4)
-# PHP_MODULE - Additional comma-separated modules
-# PHP_APACHE - Set YES to enable PHP with Apache
-# PHP_FPM - Set YES to enable PHP-FPM
-# PHP_MEMORY_LIMIT - (default: 512M)
-# PHP_UPLOAD_MAX_FILESIZE - (default: 128M)
-# PHP_POST_MAX_SIZE - (default: 128M)
-# PHP_MAX_EXECUTION_TIME - (default: 300)
-# ------------------------------------------------------------------------------
-
function setup_php() {
local PHP_VERSION="${PHP_VERSION:-8.4}"
local PHP_MODULE="${PHP_MODULE:-}"
@@ -433,6 +421,13 @@ function setup_php() {
fi
local MODULE_LIST="php${PHP_VERSION}"
+ for pkg in $MODULE_LIST; do
+ if ! apt-cache show "$pkg" >/dev/null 2>&1; then
+ msg_error "Package not found: $pkg"
+ exit 1
+ fi
+ done
+
IFS=',' read -ra MODULES <<<"$COMBINED_MODULES"
for mod in "${MODULES[@]}"; do
MODULE_LIST+=" php${PHP_VERSION}-${mod}"
@@ -441,11 +436,17 @@ function setup_php() {
if [[ "$PHP_FPM" == "YES" ]]; then
MODULE_LIST+=" php${PHP_VERSION}-fpm"
fi
+ if [[ "$PHP_APACHE" == "YES" ]]; then
+ $STD apt-get install -y apache2 libapache2-mod-php${PHP_VERSION}
+ $STD systemctl restart apache2 || true
+ fi
if [[ "$PHP_APACHE" == "YES" ]] && [[ -n "$CURRENT_PHP" ]]; then
if [[ -f /etc/apache2/mods-enabled/php${CURRENT_PHP}.load ]]; then
$STD a2dismod php${CURRENT_PHP} || true
fi
+ $STD a2enmod php${PHP_VERSION}
+ $STD systemctl restart apache2 || true
fi
if [[ "$PHP_FPM" == "YES" ]] && [[ -n "$CURRENT_PHP" ]]; then
@@ -456,10 +457,6 @@ function setup_php() {
$STD apt-get install -y $MODULE_LIST
msg_ok "Setup PHP $PHP_VERSION"
- if [[ "$PHP_APACHE" == "YES" ]]; then
- $STD systemctl restart apache2 || true
- fi
-
if [[ "$PHP_FPM" == "YES" ]]; then
$STD systemctl enable php${PHP_VERSION}-fpm
$STD systemctl restart php${PHP_VERSION}-fpm
@@ -649,6 +646,15 @@ function setup_mongodb() {
DISTRO_ID=$(awk -F= '/^ID=/{ gsub(/"/,"",$2); print $2 }' /etc/os-release)
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{ print $2 }' /etc/os-release)
+ # Check AVX support
+ if ! grep -qm1 'avx[^ ]*' /proc/cpuinfo; then
+ local major="${MONGO_VERSION%%.*}"
+ if ((major > 5)); then
+ msg_error "MongoDB ${MONGO_VERSION} requires AVX support, which is not available on this system."
+ return 1
+ fi
+ fi
+
case "$DISTRO_ID" in
ubuntu) MONGO_BASE_URL="https://repo.mongodb.org/apt/ubuntu" ;;
debian) MONGO_BASE_URL="https://repo.mongodb.org/apt/ubuntu" ;;
@@ -751,6 +757,7 @@ function fetch_and_deploy_gh_release() {
local mode="${3:-tarball}" # tarball | binary | prebuild | singlefile
local version="${4:-latest}"
local target="${5:-/opt/$app}"
+ local asset_pattern="${6:-}"
local app_lc=$(echo "${app,,}" | tr -d ' ')
local version_file="$HOME/.${app_lc}"
@@ -813,6 +820,7 @@ function fetch_and_deploy_gh_release() {
msg_info "Fetching GitHub release: $app ($version)"
+ ### Tarball Mode ###
if [[ "$mode" == "tarball" || "$mode" == "source" ]]; then
url=$(echo "$json" | jq -r '.tarball_url // empty')
[[ -z "$url" ]] && url="https://github.com/$repo/archive/refs/tags/v$version.tar.gz"
@@ -833,6 +841,7 @@ function fetch_and_deploy_gh_release() {
cp -r "$unpack_dir"/* "$target/"
shopt -u dotglob nullglob
+ ### Binary Mode ###
elif [[ "$mode" == "binary" ]]; then
local arch
arch=$(dpkg --print-architecture 2>/dev/null || uname -m)
@@ -842,12 +851,14 @@ function fetch_and_deploy_gh_release() {
local assets url_match=""
assets=$(echo "$json" | jq -r '.assets[].browser_download_url')
- if [[ -n "$6" ]]; then
+ # If explicit filename pattern is provided (param $6), match that first
+ if [[ -n "$asset_pattern" ]]; then
for u in $assets; do
- [[ "$u" =~ $6 || "$u" == *"$6" ]] && url_match="$u" && break
+ [[ "$u" =~ $asset_pattern || "$u" == *"$asset_pattern" ]] && url_match="$u" && break
done
fi
+ # If no match via explicit pattern, fall back to architecture heuristic
if [[ -z "$url_match" ]]; then
for u in $assets; do
if [[ "$u" =~ ($arch|amd64|x86_64|aarch64|arm64).*\.deb$ ]]; then
@@ -857,6 +868,7 @@ function fetch_and_deploy_gh_release() {
done
fi
+ # Fallback: any .deb file
if [[ -z "$url_match" ]]; then
for u in $assets; do
[[ "$u" =~ \.deb$ ]] && url_match="$u" && break
@@ -885,8 +897,10 @@ function fetch_and_deploy_gh_release() {
}
}
+ ### Prebuild Mode ###
elif [[ "$mode" == "prebuild" ]]; then
- local pattern="$6"
+ local pattern="${6%\"}"
+ pattern="${pattern#\"}"
[[ -z "$pattern" ]] && {
msg_error "Mode 'prebuild' requires 6th parameter (asset filename pattern)"
rm -rf "$tmpdir"
@@ -895,7 +909,13 @@ function fetch_and_deploy_gh_release() {
local asset_url=""
for u in $(echo "$json" | jq -r '.assets[].browser_download_url'); do
- [[ "$u" =~ $pattern || "$u" == *"$pattern" ]] && asset_url="$u" && break
+ filename_candidate="${u##*/}"
+ case "$filename_candidate" in
+ $pattern)
+ asset_url="$u"
+ break
+ ;;
+ esac
done
[[ -z "$asset_url" ]] && {
@@ -911,22 +931,46 @@ function fetch_and_deploy_gh_release() {
return 1
}
+ local unpack_tmp
+ unpack_tmp=$(mktemp -d)
mkdir -p "$target"
+
if [[ "$filename" == *.zip ]]; then
if ! command -v unzip &>/dev/null; then
$STD apt-get install -y unzip
fi
- $STD unzip "$tmpdir/$filename" -d "$target"
- elif [[ "$filename" == *.tar.gz ]]; then
- tar -xzf "$tmpdir/$filename" -C "$target"
+ unzip -q "$tmpdir/$filename" -d "$unpack_tmp"
+ elif [[ "$filename" == *.tar.* ]]; then
+ tar -xf "$tmpdir/$filename" -C "$unpack_tmp"
else
msg_error "Unsupported archive format: $filename"
- rm -rf "$tmpdir"
+ rm -rf "$tmpdir" "$unpack_tmp"
return 1
fi
+ local top_dirs
+ top_dirs=$(find "$unpack_tmp" -mindepth 1 -maxdepth 1 -type d | wc -l)
+
+ if [[ "$top_dirs" -eq 1 ]]; then
+ # Strip leading folder
+ local inner_dir
+ inner_dir=$(find "$unpack_tmp" -mindepth 1 -maxdepth 1 -type d)
+ shopt -s dotglob nullglob
+ cp -r "$inner_dir"/* "$target/"
+ shopt -u dotglob nullglob
+ else
+ # Copy all contents
+ shopt -s dotglob nullglob
+ cp -r "$unpack_tmp"/* "$target/"
+ shopt -u dotglob nullglob
+ fi
+
+ rm -rf "$unpack_tmp"
+
+ ### Singlefile Mode ###
elif [[ "$mode" == "singlefile" ]]; then
- local pattern="$6"
+ local pattern="${6%\"}"
+ pattern="${pattern#\"}"
[[ -z "$pattern" ]] && {
msg_error "Mode 'singlefile' requires 6th parameter (asset filename pattern)"
rm -rf "$tmpdir"
@@ -935,7 +979,13 @@ function fetch_and_deploy_gh_release() {
local asset_url=""
for u in $(echo "$json" | jq -r '.assets[].browser_download_url'); do
- [[ "$u" =~ $pattern || "$u" == *"$pattern" ]] && asset_url="$u" && break
+ filename_candidate="${u##*/}"
+ case "$filename_candidate" in
+ $pattern)
+ asset_url="$u"
+ break
+ ;;
+ esac
done
[[ -z "$asset_url" ]] && {
@@ -946,13 +996,20 @@ function fetch_and_deploy_gh_release() {
filename="${asset_url##*/}"
mkdir -p "$target"
- curl $download_timeout -fsSL -o "$target/$app" "$asset_url" || {
+
+ local use_filename="${USE_ORIGINAL_FILENAME:-false}"
+ local target_file="$app"
+ [[ "$use_filename" == "true" ]] && target_file="$filename"
+
+ curl $download_timeout -fsSL -o "$target/$target_file" "$asset_url" || {
msg_error "Download failed: $asset_url"
rm -rf "$tmpdir"
return 1
}
- chmod +x "$target/$app"
+ if [[ "$target_file" != *.jar && -f "$target/$target_file" ]]; then
+ chmod +x "$target/$target_file"
+ fi
else
msg_error "Unknown mode: $mode"
@@ -1631,3 +1688,154 @@ function setup_imagemagick() {
ensure_usr_local_bin_persist
msg_ok "Setup ImageMagick $VERSION"
}
+
+# ------------------------------------------------------------------------------
+# Installs FFmpeg from source or prebuilt binary (Debian/Ubuntu only).
+#
+# Description:
+# - Downloads and builds FFmpeg from GitHub (https://github.com/FFmpeg/FFmpeg)
+# - Supports specific version override via FFMPEG_VERSION (e.g. n7.1.1)
+# - Supports build profile via FFMPEG_TYPE:
+# - minimal : x264, vpx, mp3 only
+# - medium : adds subtitles, fonts, opus, vorbis
+# - full : adds dav1d, svt-av1, zlib, numa
+# - binary : downloads static build (johnvansickle.com)
+# - Defaults to latest stable version and full feature set
+#
+# Notes:
+# - Requires: curl, jq, build-essential, and matching codec libraries
+# - Result is installed to /usr/local/bin/ffmpeg
+# ------------------------------------------------------------------------------
+
+function setup_ffmpeg() {
+ local TMP_DIR
+ TMP_DIR=$(mktemp -d)
+ local GITHUB_REPO="FFmpeg/FFmpeg"
+ local VERSION="${FFMPEG_VERSION:-latest}"
+ local TYPE="${FFMPEG_TYPE:-full}"
+ local BIN_PATH="/usr/local/bin/ffmpeg"
+
+ # Binary fallback mode
+ if [[ "$TYPE" == "binary" ]]; then
+ msg_info "Installing FFmpeg (static binary)"
+ curl -fsSL https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz -o "$TMP_DIR/ffmpeg.tar.xz"
+ tar -xf "$TMP_DIR/ffmpeg.tar.xz" -C "$TMP_DIR"
+ local EXTRACTED_DIR
+ EXTRACTED_DIR=$(find "$TMP_DIR" -maxdepth 1 -type d -name "ffmpeg-*")
+ cp "$EXTRACTED_DIR/ffmpeg" "$BIN_PATH"
+ cp "$EXTRACTED_DIR/ffprobe" /usr/local/bin/ffprobe
+ chmod +x "$BIN_PATH" /usr/local/bin/ffprobe
+ rm -rf "$TMP_DIR"
+ msg_ok "Installed FFmpeg binary ($($BIN_PATH -version | head -n1))"
+ return
+ fi
+
+ if ! command -v jq &>/dev/null; then
+ $STD apt-get update
+ $STD apt-get install -y jq
+ fi
+
+ # Auto-detect latest stable version if none specified
+ if [[ "$VERSION" == "latest" || -z "$VERSION" ]]; then
+ msg_info "Resolving latest FFmpeg tag"
+ VERSION=$(curl -fsSL "https://api.github.com/repos/${GITHUB_REPO}/tags" |
+ jq -r '.[].name' |
+ grep -E '^n[0-9]+\.[0-9]+\.[0-9]+$' |
+ sort -V | tail -n1)
+ fi
+
+ if [[ -z "$VERSION" ]]; then
+ msg_error "Could not determine FFmpeg version"
+ rm -rf "$TMP_DIR"
+ return 1
+ fi
+
+ msg_info "Installing FFmpeg ${VERSION} ($TYPE)"
+
+ # Dependency selection
+ local DEPS=(build-essential yasm nasm pkg-config)
+ case "$TYPE" in
+ minimal)
+ DEPS+=(libx264-dev libvpx-dev libmp3lame-dev)
+ ;;
+ medium)
+ DEPS+=(libx264-dev libvpx-dev libmp3lame-dev libfreetype6-dev libass-dev libopus-dev libvorbis-dev)
+ ;;
+ full)
+ DEPS+=(
+ libx264-dev libx265-dev libvpx-dev libmp3lame-dev
+ libfreetype6-dev libass-dev libopus-dev libvorbis-dev
+ libdav1d-dev libsvtav1-dev zlib1g-dev libnuma-dev
+ )
+ ;;
+ *)
+ msg_error "Invalid FFMPEG_TYPE: $TYPE"
+ rm -rf "$TMP_DIR"
+ return 1
+ ;;
+ esac
+
+ $STD apt-get update
+ $STD apt-get install -y "${DEPS[@]}"
+
+ curl -fsSL "https://github.com/${GITHUB_REPO}/archive/refs/tags/${VERSION}.tar.gz" -o "$TMP_DIR/ffmpeg.tar.gz"
+ tar -xzf "$TMP_DIR/ffmpeg.tar.gz" -C "$TMP_DIR"
+ cd "$TMP_DIR/FFmpeg-"* || {
+ msg_error "Source extraction failed"
+ rm -rf "$TMP_DIR"
+ return 1
+ }
+
+ local args=(
+ --enable-gpl
+ --enable-shared
+ --enable-nonfree
+ --disable-static
+ --enable-libx264
+ --enable-libvpx
+ --enable-libmp3lame
+ )
+
+ if [[ "$TYPE" != "minimal" ]]; then
+ args+=(--enable-libfreetype --enable-libass --enable-libopus --enable-libvorbis)
+ fi
+
+ if [[ "$TYPE" == "full" ]]; then
+ args+=(--enable-libx265 --enable-libdav1d --enable-zlib)
+ fi
+
+ if [[ ${#args[@]} -eq 0 ]]; then
+ msg_error "FFmpeg configure args array is empty β aborting."
+ rm -rf "$TMP_DIR"
+ return 1
+ fi
+
+ ./configure "${args[@]}" >"$TMP_DIR/configure.log" 2>&1 || {
+ msg_error "FFmpeg ./configure failed (see $TMP_DIR/configure.log)"
+ cat "$TMP_DIR/configure.log" | tail -n 20
+ rm -rf "$TMP_DIR"
+ return 1
+ }
+
+ $STD make -j"$(nproc)"
+ $STD make install
+ echo "/usr/local/lib" >/etc/ld.so.conf.d/ffmpeg.conf
+ ldconfig
+
+ ldconfig -p | grep libavdevice >/dev/null || {
+ msg_error "libavdevice not registered with dynamic linker"
+ return 1
+ }
+
+ if ! command -v ffmpeg &>/dev/null; then
+ msg_error "FFmpeg installation failed"
+ rm -rf "$TMP_DIR"
+ return 1
+ fi
+
+ local FINAL_VERSION
+ FINAL_VERSION=$(ffmpeg -version | head -n1 | awk '{print $3}')
+ rm -rf "$TMP_DIR"
+ ensure_usr_local_bin_persist
+ msg_ok "Setup FFmpeg $FINAL_VERSION"
+}
diff --git a/tools/pve/add-iptag.sh b/tools/pve/add-iptag.sh
index ba81b838e..21b0d7104 100644
--- a/tools/pve/add-iptag.sh
+++ b/tools/pve/add-iptag.sh
@@ -3,7 +3,6 @@
# Copyright (c) 2021-2025 community-scripts ORG
# Author: MickLesk (Canbiz) && Desert_Gamer
# License: MIT
-# Source: https://github.com/gitsang/iptag
function header_info {
clear
@@ -22,7 +21,7 @@ header_info
APP="IP-Tag"
hostname=$(hostname)
-# Farbvariablen
+# Color variables
YW=$(echo "\033[33m")
GN=$(echo "\033[1;92m")
RD=$(echo "\033[01;31m")
@@ -32,13 +31,7 @@ HOLD=" "
CM=" βοΈ ${CL}"
CROSS=" βοΈ ${CL}"
-# This function enables error handling in the script by setting options and defining a trap for the ERR signal.
-catch_errors() {
- set -Eeuo pipefail
- trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
-}
-
-# This function is called when an error occurs. It receives the exit code, line number, and command that caused the error, and displays an error message.
+# Error handler for displaying error messages
error_handler() {
if [ -n "$SPINNER_PID" ] && ps -p $SPINNER_PID >/dev/null; then
kill $SPINNER_PID >/dev/null
@@ -51,7 +44,7 @@ error_handler() {
echo -e "\n$error_message\n"
}
-# This function displays a spinner.
+# Spinner for progress indication
spinner() {
local frames=('β ' 'β ' 'β Ή' 'β Έ' 'β Ό' 'β ΄' 'β ¦' 'β §' 'β ' 'β ')
local spin_i=0
@@ -67,7 +60,7 @@ spinner() {
done
}
-# This function displays an informational message with a yellow color.
+# Info message
msg_info() {
local msg="$1"
echo -ne "${TAB}${YW}${HOLD}${msg}${HOLD}"
@@ -75,7 +68,7 @@ msg_info() {
SPINNER_PID=$!
}
-# This function displays a success message with a green color.
+# Success message
msg_ok() {
if [ -n "$SPINNER_PID" ] && ps -p $SPINNER_PID >/dev/null; then
kill $SPINNER_PID >/dev/null
@@ -85,7 +78,7 @@ msg_ok() {
echo -e "${BFR}${CM}${GN}${msg}${CL}"
}
-# This function displays a error message with a red color.
+# Error message
msg_error() {
if [ -n "$SPINNER_PID" ] && ps -p $SPINNER_PID >/dev/null; then
kill $SPINNER_PID >/dev/null
@@ -124,17 +117,138 @@ migrate_config() {
update_installation() {
msg_info "Updating IP-Tag Scripts"
systemctl stop iptag.service &>/dev/null
+ msg_ok "Stopped IP-Tag service"
# Create directory if it doesn't exist
if [[ ! -d "/opt/iptag" ]]; then
mkdir -p /opt/iptag
fi
- # Migrate config if needed
- migrate_config
+ # Create new config file (check if exists and ask user)
+ if [[ -f "/opt/iptag/iptag.conf" ]]; then
+ echo -e "\n${YW}Configuration file already exists.${CL}"
+ while true; do
+ read -p "Do you want to replace it with defaults? (y/n): " yn
+ case $yn in
+ [Yy]*)
+ msg_info "Replacing configuration file"
+ generate_config >/opt/iptag/iptag.conf
+ msg_ok "Configuration file replaced with defaults"
+ break
+ ;;
+ [Nn]*)
+ echo -e "${GN}βοΈ Keeping existing configuration file${CL}"
+ break
+ ;;
+ *)
+ echo -e "${RD}Please answer yes or no.${CL}"
+ ;;
+ esac
+ done
+ else
+ msg_info "Creating new configuration file"
+ generate_config >/opt/iptag/iptag.conf
+ msg_ok "Created new configuration file at /opt/iptag/iptag.conf"
+ fi
# Update main script
- cat <<'EOF' >/opt/iptag/iptag
+ msg_info "Updating main script"
+ generate_main_script >/opt/iptag/iptag
+ chmod +x /opt/iptag/iptag
+ msg_ok "Updated main script"
+
+ # Update service file
+ msg_info "Updating service file"
+ generate_service >/lib/systemd/system/iptag.service
+ msg_ok "Updated service file"
+
+ msg_info "Creating manual run command"
+ cat <<'EOF' >/usr/local/bin/iptag-run
+#!/usr/bin/env bash
+CONFIG_FILE="/opt/iptag/iptag.conf"
+SCRIPT_FILE="/opt/iptag/iptag"
+if [[ ! -f "$SCRIPT_FILE" ]]; then
+ echo "β Main script not found: $SCRIPT_FILE"
+ exit 1
+fi
+export FORCE_SINGLE_RUN=true
+exec "$SCRIPT_FILE"
+EOF
+ chmod +x /usr/local/bin/iptag-run
+ msg_ok "Created iptag-run executable - You can execute this manually by entering βiptag-runβ in the Proxmox host, so the script is executed by hand."
+
+ msg_info "Restarting service"
+ systemctl daemon-reload &>/dev/null
+ systemctl enable -q --now iptag.service &>/dev/null
+ msg_ok "Updated IP-Tag Scripts"
+}
+
+# Generate configuration file content
+generate_config() {
+ cat <&2
+ fi
+}
+
+# Color constants
+readonly RED='\033[0;31m'
+readonly GREEN='\033[0;32m'
+readonly YELLOW='\033[0;33m'
+readonly BLUE='\033[0;34m'
+readonly PURPLE='\033[0;35m'
+readonly CYAN='\033[0;36m'
+readonly WHITE='\033[1;37m'
+readonly GRAY='\033[0;37m'
+readonly NC='\033[0m' # No Color
+
+# Logging functions with colors
+log_success() {
+ echo -e "${GREEN}β${NC} $*"
+}
+
+log_info() {
+ echo -e "${BLUE}βΉ${NC} $*"
+}
+
+log_warning() {
+ echo -e "${YELLOW}β ${NC} $*"
+}
+
+log_error() {
+ echo -e "${RED}β${NC} $*"
+}
+
+log_change() {
+ echo -e "${CYAN}~${NC} $*"
+}
+
+log_unchanged() {
+ echo -e "${GRAY}=${NC} $*"
}
# Check if IP is in CIDR
ip_in_cidr() {
local ip="$1" cidr="$2"
- ipcalc -c "$ip" "$cidr" >/dev/null 2>&1 || return 1
-
- local network prefix ip_parts net_parts
- network=$(echo "$cidr" | cut -d/ -f1)
- prefix=$(echo "$cidr" | cut -d/ -f2)
- IFS=. read -r -a ip_parts <<< "$ip"
- IFS=. read -r -a net_parts <<< "$network"
-
- case $prefix in
- 8) [[ "${ip_parts[0]}" == "${net_parts[0]}" ]] ;;
- 16) [[ "${ip_parts[0]}.${ip_parts[1]}" == "${net_parts[0]}.${net_parts[1]}" ]] ;;
- 24) [[ "${ip_parts[0]}.${ip_parts[1]}.${ip_parts[2]}" == "${net_parts[0]}.${net_parts[1]}.${net_parts[2]}" ]] ;;
- 32) [[ "$ip" == "$network" ]] ;;
- *) return 1 ;;
- esac
+ debug_log "ip_in_cidr: checking '$ip' against '$cidr'"
+
+ # Manual CIDR check - Π±ΠΎΠ»Π΅Π΅ Π½Π°Π΄ΡΠΆΠ½ΡΠΉ ΠΌΠ΅ΡΠΎΠ΄
+ debug_log "ip_in_cidr: using manual check (bypassing ipcalc)"
+ local network prefix
+ IFS='/' read -r network prefix <<< "$cidr"
+
+ # Convert IP and network to integers for comparison
+ local ip_int net_int mask
+ IFS='.' read -r a b c d <<< "$ip"
+ ip_int=$(( (a << 24) + (b << 16) + (c << 8) + d ))
+
+ IFS='.' read -r a b c d <<< "$network"
+ net_int=$(( (a << 24) + (b << 16) + (c << 8) + d ))
+
+ # Create subnet mask
+ mask=$(( 0xFFFFFFFF << (32 - prefix) ))
+
+ # Apply mask and compare
+ local ip_masked=$((ip_int & mask))
+ local net_masked=$((net_int & mask))
+
+ debug_log "ip_in_cidr: IP=$ip ($ip_int), Network=$network ($net_int), Prefix=$prefix"
+ debug_log "ip_in_cidr: Mask=$mask (hex: $(printf '0x%08x' $mask))"
+ debug_log "ip_in_cidr: IP&Mask=$ip_masked ($(printf '%d.%d.%d.%d' $((ip_masked>>24&255)) $((ip_masked>>16&255)) $((ip_masked>>8&255)) $((ip_masked&255))))"
+ debug_log "ip_in_cidr: Net&Mask=$net_masked ($(printf '%d.%d.%d.%d' $((net_masked>>24&255)) $((net_masked>>16&255)) $((net_masked>>8&255)) $((net_masked&255))))"
+
+ if (( ip_masked == net_masked )); then
+ debug_log "ip_in_cidr: manual check PASSED - IP is in CIDR"
+ return 0
+ else
+ debug_log "ip_in_cidr: manual check FAILED - IP is NOT in CIDR"
+ return 1
+ fi
}
# Format IP address according to the configuration
format_ip_tag() {
local ip="$1"
+ [[ -z "$ip" ]] && return
local format="${TAG_FORMAT:-$DEFAULT_TAG_FORMAT}"
-
case "$format" in
"last_octet") echo "${ip##*.}" ;;
"last_two_octets") echo "${ip#*.*.}" ;;
@@ -192,7 +363,17 @@ ip_in_cidrs() {
local ip="$1" cidrs="$2"
[[ -z "$cidrs" ]] && return 1
local IFS=' '
- for cidr in $cidrs; do ip_in_cidr "$ip" "$cidr" && return 0; done
+ debug_log "Checking IP '$ip' against CIDRs: '$cidrs'"
+ for cidr in $cidrs; do
+ debug_log "Testing IP '$ip' against CIDR '$cidr'"
+ if ip_in_cidr "$ip" "$cidr"; then
+ debug_log "IP '$ip' matches CIDR '$cidr' - PASSED"
+ return 0
+ else
+ debug_log "IP '$ip' does not match CIDR '$cidr'"
+ fi
+ done
+ debug_log "IP '$ip' failed all CIDR checks"
return 1
}
@@ -209,76 +390,225 @@ is_valid_ipv4() {
return 0
}
-lxc_status_changed() {
- current_lxc_status=$(pct list 2>/dev/null)
- if [ "${last_lxc_status}" == "${current_lxc_status}" ]; then
- return 1
- else
- last_lxc_status="${current_lxc_status}"
- return 0
- fi
-}
-
-vm_status_changed() {
- current_vm_status=$(qm list 2>/dev/null)
- if [ "${last_vm_status}" == "${current_vm_status}" ]; then
- return 1
- else
- last_vm_status="${current_vm_status}"
- return 0
- fi
-}
-
-fw_net_interface_changed() {
- current_net_interface=$(ifconfig | grep "^fw")
- if [ "${last_net_interface}" == "${current_net_interface}" ]; then
- return 1
- else
- last_net_interface="${current_net_interface}"
- return 0
- fi
-}
-
-# Get VM IPs using MAC addresses and ARP table
+# Get VM IPs using multiple methods with performance optimizations
get_vm_ips() {
- local vmid=$1 ips="" macs found_ip=false
- qm status "$vmid" 2>/dev/null | grep -q "status: running" || return
-
- macs=$(qm config "$vmid" 2>/dev/null | grep -E 'net[0-9]+' | grep -oE '[a-fA-F0-9]{2}(:[a-fA-F0-9]{2}){5}')
- [[ -z "$macs" ]] && return
-
- for mac in $macs; do
- local ip
- ip=$(arp -an 2>/dev/null | grep -i "$mac" | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}')
- [[ -n "$ip" ]] && { ips+="$ip "; found_ip=true; }
- done
-
- if ! $found_ip; then
- local agent_ip
- agent_ip=$(qm agent "$vmid" network-get-interfaces 2>/dev/null | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' || true)
- [[ -n "$agent_ip" ]] && ips+="$agent_ip "
+ local vmid=$1 ips=""
+ local vm_config="/etc/pve/qemu-server/${vmid}.conf"
+ [[ ! -f "$vm_config" ]] && return
+
+ debug_log "vm $vmid: starting optimized IP detection"
+
+ # Check if VM is running first (avoid expensive operations for stopped VMs)
+ local vm_status=""
+ if command -v qm >/dev/null 2>&1; then
+ vm_status=$(qm status "$vmid" 2>/dev/null | awk '{print $2}')
fi
-
- echo "${ips% }"
+
+ if [[ "$vm_status" != "running" ]]; then
+ debug_log "vm $vmid: not running (status: $vm_status), skipping expensive detection"
+ return
+ fi
+
+ # Cache for this execution
+ local cache_file="/tmp/iptag_vm_${vmid}_cache"
+ local cache_ttl=60 # 60 seconds cache
+
+ # Check cache first
+ if [[ -f "$cache_file" ]] && [[ $(($(date +%s) - $(stat -c %Y "$cache_file" 2>/dev/null || echo 0))) -lt $cache_ttl ]]; then
+ local cached_ips=$(cat "$cache_file" 2>/dev/null)
+ if [[ -n "$cached_ips" ]]; then
+ debug_log "vm $vmid: using cached IPs: $cached_ips"
+ echo "$cached_ips"
+ return
+ fi
+ fi
+
+ # Method 1: Quick ARP table lookup (fastest)
+ local mac_addresses=$(grep -E "^net[0-9]+:" "$vm_config" | grep -oE "([0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2}" | head -3)
+ debug_log "vm $vmid: found MACs: $mac_addresses"
+
+ # Quick ARP check without forced refresh (most common case)
+ for mac in $mac_addresses; do
+ local mac_lower=$(echo "$mac" | tr '[:upper:]' '[:lower:]')
+ local ip=$(ip neighbor show | grep "$mac_lower" | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' | head -1)
+ if [[ -n "$ip" && "$ip" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]]; then
+ debug_log "vm $vmid: found IP $ip via quick ARP for MAC $mac_lower"
+ ips+="$ip "
+ fi
+ done
+
+ # Early exit if we found IPs via ARP
+ if [[ -n "$ips" ]]; then
+ local unique_ips=$(echo "$ips" | tr ' ' '\n' | sort -u | tr '\n' ' ')
+ unique_ips="${unique_ips% }"
+ debug_log "vm $vmid: early exit with IPs: '$unique_ips'"
+ echo "$unique_ips" > "$cache_file"
+ echo "$unique_ips"
+ return
+ fi
+
+ # Method 2: QM guest agent (fast if available)
+ if command -v qm >/dev/null 2>&1; then
+ local qm_ips=$(timeout 3 qm guest cmd "$vmid" network-get-interfaces 2>/dev/null | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' | grep -v "127.0.0.1" | head -2)
+ for qm_ip in $qm_ips; do
+ if [[ "$qm_ip" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]]; then
+ debug_log "vm $vmid: found IP $qm_ip via qm guest cmd"
+ ips+="$qm_ip "
+ fi
+ done
+ fi
+
+ # Early exit if we found IPs via QM
+ if [[ -n "$ips" ]]; then
+ local unique_ips=$(echo "$ips" | tr ' ' '\n' | sort -u | tr '\n' ' ')
+ unique_ips="${unique_ips% }"
+ debug_log "vm $vmid: early exit with QM IPs: '$unique_ips'"
+ echo "$unique_ips" > "$cache_file"
+ echo "$unique_ips"
+ return
+ fi
+
+ # Method 3: DHCP leases check (medium cost)
+ for mac in $mac_addresses; do
+ local mac_lower=$(echo "$mac" | tr '[:upper:]' '[:lower:]')
+
+ for dhcp_file in "/var/lib/dhcp/dhcpd.leases" "/var/lib/dhcpcd5/dhcpcd.leases" "/tmp/dhcp.leases"; do
+ if [[ -f "$dhcp_file" ]]; then
+ local dhcp_ip=$(timeout 2 grep -A 10 "ethernet $mac_lower" "$dhcp_file" 2>/dev/null | grep "binding state active" -A 5 | grep -oE "([0-9]{1,3}\.){3}[0-9]{1,3}" | head -1)
+ if [[ -n "$dhcp_ip" && "$dhcp_ip" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]]; then
+ debug_log "vm $vmid: found IP $dhcp_ip via DHCP leases for MAC $mac_lower"
+ ips+="$dhcp_ip "
+ break 2
+ fi
+ fi
+ done
+ done
+
+ # Early exit if we found IPs via DHCP
+ if [[ -n "$ips" ]]; then
+ local unique_ips=$(echo "$ips" | tr ' ' '\n' | sort -u | tr '\n' ' ')
+ unique_ips="${unique_ips% }"
+ debug_log "vm $vmid: early exit with DHCP IPs: '$unique_ips'"
+ echo "$unique_ips" > "$cache_file"
+ echo "$unique_ips"
+ return
+ fi
+
+ # Method 4: Limited network discovery (expensive - only if really needed)
+ debug_log "vm $vmid: falling back to limited network discovery"
+
+ for mac in $mac_addresses; do
+ local mac_lower=$(echo "$mac" | tr '[:upper:]' '[:lower:]')
+
+ # Get bridge interfaces
+ local bridges=$(grep -E "^net[0-9]+:" "$vm_config" | grep -oE "bridge=\w+" | cut -d= -f2 | head -1)
+ for bridge in $bridges; do
+ if [[ -n "$bridge" && -d "/sys/class/net/$bridge" ]]; then
+ # Get bridge IP range
+ local bridge_ip=$(ip addr show "$bridge" 2>/dev/null | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}/[0-9]+' | head -1)
+ if [[ -n "$bridge_ip" ]]; then
+ local network=$(echo "$bridge_ip" | cut -d'/' -f1)
+ debug_log "vm $vmid: limited scan on bridge $bridge network $bridge_ip"
+
+ # Force ARP refresh with broadcast ping (limited)
+ IFS='.' read -r a b c d <<< "$network"
+ local broadcast="$a.$b.$c.255"
+ timeout 1 ping -c 1 -b "$broadcast" >/dev/null 2>&1 || true
+
+ # Check ARP again after refresh
+ sleep 0.5
+ local ip=$(ip neighbor show | grep "$mac_lower" | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' | head -1)
+ if [[ -n "$ip" && "$ip" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]]; then
+ debug_log "vm $vmid: found IP $ip via ARP after broadcast for MAC $mac_lower"
+ ips+="$ip "
+ break 2
+ fi
+
+ # Only do very limited ping scan (reduced range)
+ IFS='.' read -r a b c d <<< "$network"
+ local base_net="$a.$b.$c"
+
+ # Try only most common ranges (much smaller than before)
+ for last_octet in {100..105} {200..205}; do
+ local test_ip="$base_net.$last_octet"
+
+ # Very quick ping test (reduced timeout)
+ if timeout 0.2 ping -c 1 -W 1 "$test_ip" >/dev/null 2>&1; then
+ # Check if this IP corresponds to our MAC
+ sleep 0.1
+ local found_mac=$(ip neighbor show "$test_ip" 2>/dev/null | grep -oE "([0-9a-f]{2}:){5}[0-9a-f]{2}")
+ if [[ "$found_mac" == "$mac_lower" ]]; then
+ debug_log "vm $vmid: found IP $test_ip via limited ping scan for MAC $mac_lower"
+ ips+="$test_ip "
+ break 2
+ fi
+ fi
+ done
+
+ # Skip extended scanning entirely (too expensive)
+ debug_log "vm $vmid: skipping extended scan to preserve CPU"
+ fi
+ fi
+ done
+ done
+
+ # Method 5: Static configuration check (fast)
+ if [[ -z "$ips" ]]; then
+ debug_log "vm $vmid: checking for static IP configuration"
+
+ # Check cloud-init configuration if exists
+ local cloudinit_file="/var/lib/vz/snippets/${vmid}-cloud-init.yml"
+ if [[ -f "$cloudinit_file" ]]; then
+ local static_ip=$(grep -E "addresses?:" "$cloudinit_file" 2>/dev/null | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' | head -1)
+ if [[ -n "$static_ip" && "$static_ip" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]]; then
+ debug_log "vm $vmid: found static IP $static_ip in cloud-init config"
+ ips+="$static_ip "
+ fi
+ fi
+
+ # Check VM config for any IP hints
+ local config_ip=$(grep -E "(ip=|gw=)" "$vm_config" 2>/dev/null | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' | head -1)
+ if [[ -n "$config_ip" && "$config_ip" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]]; then
+ debug_log "vm $vmid: found IP hint $config_ip in VM config"
+ ips+="$config_ip "
+ fi
+ fi
+
+ # Remove duplicates and cache result
+ local unique_ips=$(echo "$ips" | tr ' ' '\n' | sort -u | tr '\n' ' ')
+ unique_ips="${unique_ips% }"
+
+ # Cache the result (even if empty)
+ echo "$unique_ips" > "$cache_file"
+
+ debug_log "vm $vmid: final optimized IPs: '$unique_ips'"
+ echo "$unique_ips"
}
-# Update tags
+# Update tags for container or VM
update_tags() {
- local type="$1" vmid="$2" config_cmd="pct"
- [[ "$type" == "vm" ]] && config_cmd="qm"
-
+ local type="$1" vmid="$2"
local current_ips_full
+
if [[ "$type" == "lxc" ]]; then
- current_ips_full=$(lxc-info -n "${vmid}" -i 2>/dev/null | grep -E "^IP:" | awk '{print $2}')
+ current_ips_full=$(get_lxc_ips "${vmid}")
+ while IFS= read -r line; do
+ [[ "$line" == tags:* ]] && current_tags_raw="${line#tags: }" && break
+ done < <(pct config "$vmid" 2>/dev/null)
else
current_ips_full=$(get_vm_ips "${vmid}")
+ local vm_config="/etc/pve/qemu-server/${vmid}.conf"
+ if [[ -f "$vm_config" ]]; then
+ local current_tags_raw=$(grep "^tags:" "$vm_config" 2>/dev/null | cut -d: -f2 | sed 's/^[[:space:]]*//')
+ fi
fi
- [[ -z "$current_ips_full" ]] && return
local current_tags=() next_tags=() current_ip_tags=()
- mapfile -t current_tags < <($config_cmd config "${vmid}" 2>/dev/null | grep tags | awk '{print $2}' | sed 's/;/\n/g')
+ if [[ -n "$current_tags_raw" ]]; then
+ mapfile -t current_tags < <(echo "$current_tags_raw" | sed 's/;/\n/g')
+ fi
- # Separate IP and non-IP tags
+ # Separate IP/numeric and user tags
for tag in "${current_tags[@]}"; do
if is_valid_ipv4 "${tag}" || [[ "$tag" =~ ^[0-9]+(\.[0-9]+)*$ ]]; then
current_ip_tags+=("${tag}")
@@ -287,48 +617,185 @@ update_tags() {
fi
done
- local formatted_ips=() needs_update=false added_ips=()
- for ip in ${current_ips_full}; do
- if is_valid_ipv4 "$ip" && ip_in_cidrs "$ip" "${CIDR_LIST[*]}"; then
- local formatted_ip=$(format_ip_tag "$ip")
- formatted_ips+=("$formatted_ip")
- if [[ ! " ${current_ip_tags[*]} " =~ " ${formatted_ip} " ]]; then
- needs_update=true
- added_ips+=("$formatted_ip")
- next_tags+=("$formatted_ip")
+ # Generate new IP tags from current IPs
+ local formatted_ips=()
+ debug_log "$type $vmid current_ips_full: '$current_ips_full'"
+ debug_log "$type $vmid CIDR_LIST: ${CIDR_LIST[*]}"
+ for ip in $current_ips_full; do
+ [[ -z "$ip" ]] && continue
+ debug_log "$type $vmid processing IP: '$ip'"
+ if is_valid_ipv4 "$ip"; then
+ debug_log "$type $vmid IP '$ip' is valid"
+ if ip_in_cidrs "$ip" "${CIDR_LIST[*]}"; then
+ debug_log "$type $vmid IP '$ip' passed CIDR check"
+ local formatted_ip=$(format_ip_tag "$ip")
+ debug_log "$type $vmid formatted '$ip' -> '$formatted_ip'"
+ [[ -n "$formatted_ip" ]] && formatted_ips+=("$formatted_ip")
+ else
+ debug_log "$type $vmid IP '$ip' failed CIDR check"
fi
+ else
+ debug_log "$type $vmid IP '$ip' is invalid"
fi
done
+ debug_log "$type $vmid final formatted_ips: ${formatted_ips[*]}"
- [[ ${#formatted_ips[@]} -eq 0 ]] && return
+ # If LXC and no IPs detected, do not touch tags at all
+ if [[ "$type" == "lxc" && ${#formatted_ips[@]} -eq 0 ]]; then
+ log_unchanged "LXC ${GRAY}${vmid}${NC}: No IP detected, tags unchanged"
+ return
+ fi
- # Add existing IP tags that are still valid
- for tag in "${current_ip_tags[@]}"; do
- if [[ " ${formatted_ips[*]} " =~ " ${tag} " ]]; then
- if [[ ! " ${next_tags[*]} " =~ " ${tag} " ]]; then
- next_tags+=("$tag")
- fi
- fi
+ # Add new IP tags
+ for new_ip in "${formatted_ips[@]}"; do
+ next_tags+=("$new_ip")
done
- if [[ "$needs_update" == true ]]; then
- echo "${type^} ${vmid}: adding IP tags: ${added_ips[*]}"
- $config_cmd set "${vmid}" -tags "$(IFS=';'; echo "${next_tags[*]}")" &>/dev/null
- elif [[ ${#current_ip_tags[@]} -gt 0 ]]; then
- echo "${type^} ${vmid}: IP tags already set: ${current_ip_tags[*]}"
+ # Update tags if there are changes
+ local old_tags_str=$(IFS=';'; echo "${current_tags[*]}")
+ local new_tags_str=$(IFS=';'; echo "${next_tags[*]}")
+
+ debug_log "$type $vmid old_tags: '$old_tags_str'"
+ debug_log "$type $vmid new_tags: '$new_tags_str'"
+ debug_log "$type $vmid tags_equal: $([[ "$old_tags_str" == "$new_tags_str" ]] && echo true || echo false)"
+
+ if [[ "$old_tags_str" != "$new_tags_str" ]]; then
+ # Determine what changed
+ local old_ip_tags_count=${#current_ip_tags[@]}
+ local new_ip_tags_count=${#formatted_ips[@]}
+
+ # Build detailed change message
+ local change_details=""
+
+ if [[ $old_ip_tags_count -eq 0 ]]; then
+ change_details="added ${new_ip_tags_count} IP tag(s): [${GREEN}${formatted_ips[*]}${NC}]"
+ else
+ # Compare old and new IP tags
+ local added_tags=() removed_tags=() common_tags=()
+
+ # Find removed tags
+ for old_tag in "${current_ip_tags[@]}"; do
+ local found=false
+ for new_tag in "${formatted_ips[@]}"; do
+ if [[ "$old_tag" == "$new_tag" ]]; then
+ found=true
+ break
+ fi
+ done
+ if [[ "$found" == false ]]; then
+ removed_tags+=("$old_tag")
+ else
+ common_tags+=("$old_tag")
+ fi
+ done
+
+ # Find added tags
+ for new_tag in "${formatted_ips[@]}"; do
+ local found=false
+ for old_tag in "${current_ip_tags[@]}"; do
+ if [[ "$new_tag" == "$old_tag" ]]; then
+ found=true
+ break
+ fi
+ done
+ if [[ "$found" == false ]]; then
+ added_tags+=("$new_tag")
+ fi
+ done
+
+ # Build change message
+ local change_parts=()
+ if [[ ${#added_tags[@]} -gt 0 ]]; then
+ change_parts+=("added [${GREEN}${added_tags[*]}${NC}]")
+ fi
+ if [[ ${#removed_tags[@]} -gt 0 ]]; then
+ change_parts+=("removed [${YELLOW}${removed_tags[*]}${NC}]")
+ fi
+ if [[ ${#common_tags[@]} -gt 0 ]]; then
+ change_parts+=("kept [${GRAY}${common_tags[*]}${NC}]")
+ fi
+
+ change_details=$(IFS=', '; echo "${change_parts[*]}")
+ fi
+
+ log_change "${type^^} ${CYAN}${vmid}${NC}: ${change_details}"
+
+ if [[ "$type" == "lxc" ]]; then
+ pct set "${vmid}" -tags "$(IFS=';'; echo "${next_tags[*]}")" &>/dev/null
+ else
+ local vm_config="/etc/pve/qemu-server/${vmid}.conf"
+ if [[ -f "$vm_config" ]]; then
+ sed -i '/^tags:/d' "$vm_config"
+ if [[ ${#next_tags[@]} -gt 0 ]]; then
+ echo "tags: $(IFS=';'; echo "${next_tags[*]}")" >> "$vm_config"
+ fi
+ fi
+ fi
else
- echo "${type^} ${vmid}: setting initial IP tags: ${formatted_ips[*]}"
- $config_cmd set "${vmid}" -tags "$(IFS=';'; echo "${formatted_ips[*]}")" &>/dev/null
+ # Tags unchanged
+ local ip_count=${#formatted_ips[@]}
+ local status_msg=""
+
+ if [[ $ip_count -eq 0 ]]; then
+ status_msg="No IPs detected"
+ elif [[ $ip_count -eq 1 ]]; then
+ status_msg="IP tag [${GRAY}${formatted_ips[0]}${NC}] unchanged"
+ else
+ status_msg="${ip_count} IP tags [${GRAY}${formatted_ips[*]}${NC}] unchanged"
+ fi
+
+ log_unchanged "${type^^} ${GRAY}${vmid}${NC}: ${status_msg}"
+ fi
+}
+
+# Update all instances of specified type
+update_all_tags() {
+ local type="$1" vmids count=0
+
+ if [[ "$type" == "lxc" ]]; then
+ vmids=($(pct list 2>/dev/null | grep -v VMID | awk '{print $1}'))
+ else
+ local all_vm_configs=($(ls /etc/pve/qemu-server/*.conf 2>/dev/null | sed 's/.*\/\([0-9]*\)\.conf/\1/' | sort -n))
+ vmids=("${all_vm_configs[@]}")
+ fi
+
+ count=${#vmids[@]}
+ [[ $count -eq 0 ]] && return
+
+ # Display processing header with color
+ if [[ "$type" == "lxc" ]]; then
+ log_info "Processing ${WHITE}${count}${NC} LXC container(s) in parallel"
+
+ # Clean up old cache files before processing LXC
+ cleanup_vm_cache
+
+ # Process LXC containers in parallel for better performance
+ process_lxc_parallel "${vmids[@]}"
+ else
+ log_info "Processing ${WHITE}${count}${NC} virtual machine(s) in parallel"
+
+ # Clean up old cache files before processing VMs
+ cleanup_vm_cache
+
+ # Process VMs in parallel for better performance
+ process_vms_parallel "${vmids[@]}"
+ fi
+
+ # Add completion message
+ if [[ "$type" == "lxc" ]]; then
+ log_success "Completed processing LXC containers"
+ else
+ log_success "Completed processing virtual machines"
fi
}
# Check if status changed
-check_status() {
+check_status_changed() {
local type="$1" current
case "$type" in
"lxc") current=$(pct list 2>/dev/null | grep -v VMID) ;;
- "vm") current=$(qm list 2>/dev/null | grep -v VMID) ;;
- "fw") current=$(ifconfig 2>/dev/null | grep "^fw") ;;
+ "vm") current=$(ls -la /etc/pve/qemu-server/*.conf 2>/dev/null) ;;
+ "fw") current=$(ip link show type bridge 2>/dev/null) ;;
esac
local last_var="last_${type}_status"
[[ "${!last_var}" == "$current" ]] && return 1
@@ -336,101 +803,440 @@ check_status() {
return 0
}
-# Update all instances
-update_all() {
- local type="$1" list_cmd="pct" vmids count=0
- [[ "$type" == "vm" ]] && list_cmd="qm"
-
- vmids=$($list_cmd list 2>/dev/null | grep -v VMID | awk '{print $1}')
- for vmid in $vmids; do ((count++)); done
-
- echo "Found ${count} running ${type}s"
- [[ $count -eq 0 ]] && return
-
- for vmid in $vmids; do
- update_tags "$type" "$vmid"
- done
-}
-
# Main check function
check() {
local current_time changes_detected=false
current_time=$(date +%s)
- for type in "lxc" "vm"; do
- local interval_var="${type^^}_STATUS_CHECK_INTERVAL"
- local last_check_var="last_${type}_check_time"
- local last_update_var="last_update_${type}_time"
-
- if [[ "${!interval_var}" -gt 0 ]] && (( current_time - ${!last_check_var} >= ${!interval_var} )); then
- echo "Checking ${type^^} status..."
- eval "${last_check_var}=\$current_time"
- if check_status "$type"; then
- changes_detected=true
- update_all "$type"
- eval "${last_update_var}=\$current_time"
- fi
- fi
+ local update_lxc=false
+ local update_vm=false
- if (( current_time - ${!last_update_var} >= FORCE_UPDATE_INTERVAL )); then
- echo "Force updating ${type} tags..."
- changes_detected=true
- update_all "$type"
- eval "${last_update_var}=\$current_time"
- fi
- done
+ # Periodic cache cleanup (every 10 minutes)
+ local time_since_last_cleanup=$((current_time - ${last_cleanup_time:-0}))
+ if [[ $time_since_last_cleanup -ge 600 ]]; then
+ cleanup_vm_cache
+ last_cleanup_time=$current_time
+ debug_log "Performed periodic cache cleanup"
+ fi
- if [[ "${FW_NET_INTERFACE_CHECK_INTERVAL}" -gt 0 ]] && \
- (( current_time - last_fw_check_time >= FW_NET_INTERFACE_CHECK_INTERVAL )); then
- echo "Checking network interfaces..."
- last_fw_check_time=$current_time
- if check_status "fw"; then
- changes_detected=true
- update_all "lxc"
- update_all "vm"
- last_update_lxc_time=$current_time
- last_update_vm_time=$current_time
+ # Check LXC status
+ local time_since_last_lxc_check=$((current_time - last_lxc_status_check_time))
+ if [[ "${LXC_STATUS_CHECK_INTERVAL:-60}" -gt 0 ]] && \
+ [[ "$time_since_last_lxc_check" -ge "${LXC_STATUS_CHECK_INTERVAL:-60}" ]]; then
+ last_lxc_status_check_time=$current_time
+ if check_status_changed "lxc"; then
+ update_lxc=true
+ log_warning "LXC status changes detected"
fi
fi
- $changes_detected || echo "No changes detected in system status"
+ # Check VM status
+ local time_since_last_vm_check=$((current_time - last_vm_status_check_time))
+ if [[ "${VM_STATUS_CHECK_INTERVAL:-60}" -gt 0 ]] && \
+ [[ "$time_since_last_vm_check" -ge "${VM_STATUS_CHECK_INTERVAL:-60}" ]]; then
+ last_vm_status_check_time=$current_time
+ if check_status_changed "vm"; then
+ update_vm=true
+ log_warning "VM status changes detected"
+ fi
+ fi
+
+ # Check network interface changes
+ local time_since_last_fw_check=$((current_time - last_fw_net_interface_check_time))
+ if [[ "${FW_NET_INTERFACE_CHECK_INTERVAL:-60}" -gt 0 ]] && \
+ [[ "$time_since_last_fw_check" -ge "${FW_NET_INTERFACE_CHECK_INTERVAL:-60}" ]]; then
+ last_fw_net_interface_check_time=$current_time
+ if check_status_changed "fw"; then
+ update_lxc=true
+ update_vm=true
+ log_warning "Network interface changes detected"
+ fi
+ fi
+
+ # Force update if interval exceeded
+ for type in "lxc" "vm"; do
+ local last_update_var="last_update_${type}_time"
+ local time_since_last_update=$((current_time - ${!last_update_var}))
+ if [[ $time_since_last_update -ge ${FORCE_UPDATE_INTERVAL:-1800} ]]; then
+ if [[ "$type" == "lxc" ]]; then
+ update_lxc=true
+ log_info "Scheduled LXC update (every $((FORCE_UPDATE_INTERVAL / 60)) minutes)"
+ else
+ update_vm=true
+ log_info "Scheduled VM update (every $((FORCE_UPDATE_INTERVAL / 60)) minutes)"
+ fi
+ eval "${last_update_var}=${current_time}"
+ fi
+ done
+
+ # Final execution
+ $update_lxc && update_all_tags "lxc"
+ $update_vm && update_all_tags "vm"
}
# Initialize time variables
declare -g last_lxc_status="" last_vm_status="" last_fw_status=""
-declare -g last_lxc_check_time=0 last_vm_check_time=0 last_fw_check_time=0
-declare -g last_update_lxc_time=0 last_update_vm_time=0
+declare -g last_lxc_status_check_time=0 last_vm_status_check_time=0 last_fw_net_interface_check_time=0
+declare -g last_update_lxc_time=0 last_update_vm_time=0 last_cleanup_time=0
# Main loop
main() {
+ # Display startup message
+ echo -e "\n${PURPLE}ββββββββββββββββββββββββββββββββββββββββββββββββββ${NC}"
+ log_success "IP-Tag service started successfully"
+ echo -e "${BLUE}βΉ${NC} Loop interval: ${WHITE}${LOOP_INTERVAL:-$DEFAULT_CHECK_INTERVAL}${NC} seconds"
+ echo -e "${BLUE}βΉ${NC} Debug mode: ${WHITE}${DEBUG:-false}${NC}"
+ echo -e "${BLUE}βΉ${NC} Tag format: ${WHITE}${TAG_FORMAT:-$DEFAULT_TAG_FORMAT}${NC}"
+ echo -e "${BLUE}βΉ${NC} Allowed CIDRs: ${WHITE}${CIDR_LIST[*]}${NC}"
+ echo -e "${PURPLE}ββββββββββββββββββββββββββββββββββββββββββββββββββ${NC}\n"
+
+ if [[ "$FORCE_SINGLE_RUN" == "true" ]]; then
+ check
+ exit 0
+ fi
+
while true; do
check
- sleep "${LOOP_INTERVAL:-$DEFAULT_CHECK_INTERVAL}"
+ sleep "${LOOP_INTERVAL:-300}"
done
}
+
+# Cache cleanup function
+cleanup_vm_cache() {
+ local cache_dir="/tmp"
+ local vm_cache_ttl=${VM_IP_CACHE_TTL:-120}
+ local lxc_cache_ttl=${LXC_IP_CACHE_TTL:-120}
+ local status_cache_ttl=${LXC_STATUS_CACHE_TTL:-30}
+ local current_time=$(date +%s)
+
+ debug_log "Starting extreme cache cleanup"
+
+ # Clean VM cache files
+ for cache_file in "$cache_dir"/iptag_vm_*_cache; do
+ if [[ -f "$cache_file" ]]; then
+ local file_time=$(stat -c %Y "$cache_file" 2>/dev/null || echo 0)
+ if [[ $((current_time - file_time)) -gt $vm_cache_ttl ]]; then
+ rm -f "$cache_file" 2>/dev/null
+ debug_log "Cleaned up expired VM cache file: $cache_file"
+ fi
+ fi
+ done
+
+ # Clean LXC IP cache files
+ for cache_file in "$cache_dir"/iptag_lxc_*_cache; do
+ if [[ -f "$cache_file" ]]; then
+ local file_time=$(stat -c %Y "$cache_file" 2>/dev/null || echo 0)
+ if [[ $((current_time - file_time)) -gt $lxc_cache_ttl ]]; then
+ rm -f "$cache_file" 2>/dev/null
+ # Also clean meta files
+ rm -f "${cache_file}.meta" 2>/dev/null
+ debug_log "Cleaned up expired LXC cache file: $cache_file"
+ fi
+ fi
+ done
+
+ # Clean LXC status cache files (shorter TTL)
+ for cache_file in "$cache_dir"/iptag_lxc_status_*_cache; do
+ if [[ -f "$cache_file" ]]; then
+ local file_time=$(stat -c %Y "$cache_file" 2>/dev/null || echo 0)
+ if [[ $((current_time - file_time)) -gt $status_cache_ttl ]]; then
+ rm -f "$cache_file" 2>/dev/null
+ debug_log "Cleaned up expired LXC status cache: $cache_file"
+ fi
+ fi
+ done
+
+ # Clean LXC PID cache files (60 second TTL)
+ for cache_file in "$cache_dir"/iptag_lxc_pid_*_cache; do
+ if [[ -f "$cache_file" ]]; then
+ local file_time=$(stat -c %Y "$cache_file" 2>/dev/null || echo 0)
+ if [[ $((current_time - file_time)) -gt 60 ]]; then
+ rm -f "$cache_file" 2>/dev/null
+ debug_log "Cleaned up expired LXC PID cache: $cache_file"
+ fi
+ fi
+ done
+
+ # Clean any orphaned meta files
+ for meta_file in "$cache_dir"/iptag_*.meta; do
+ if [[ -f "$meta_file" ]]; then
+ local base_file="${meta_file%.meta}"
+ if [[ ! -f "$base_file" ]]; then
+ rm -f "$meta_file" 2>/dev/null
+ debug_log "Cleaned up orphaned meta file: $meta_file"
+ fi
+ fi
+ done
+
+ debug_log "Completed extreme cache cleanup"
+}
+
+# Parallel VM processing function
+process_vms_parallel() {
+ local vm_list=("$@")
+ local max_parallel=${MAX_PARALLEL_VM_CHECKS:-5}
+ local job_count=0
+ local pids=()
+ local pid_start_times=()
+
+ for vmid in "${vm_list[@]}"; do
+ if [[ $job_count -ge $max_parallel ]]; then
+ local pid_to_wait="${pids[0]}"
+ local start_time="${pid_start_times[0]}"
+ local waited=0
+ while kill -0 "$pid_to_wait" 2>/dev/null && [[ $waited -lt 10 ]]; do
+ sleep 1
+ ((waited++))
+ done
+ if kill -0 "$pid_to_wait" 2>/dev/null; then
+ kill -9 "$pid_to_wait" 2>/dev/null
+ log_warning "VM parallel: killed stuck process $pid_to_wait after 10s timeout"
+ else
+ wait "$pid_to_wait"
+ fi
+ pids=("${pids[@]:1}")
+ pid_start_times=("${pid_start_times[@]:1}")
+ ((job_count--))
+ fi
+ # Start background job
+ (update_tags "vm" "$vmid") &
+ pids+=($!)
+ pid_start_times+=("$(date +%s)")
+ ((job_count++))
+ done
+ for i in "${!pids[@]}"; do
+ local pid="${pids[$i]}"
+ local waited=0
+ while kill -0 "$pid" 2>/dev/null && [[ $waited -lt 10 ]]; do
+ sleep 1
+ ((waited++))
+ done
+ if kill -0 "$pid" 2>/dev/null; then
+ kill -9 "$pid" 2>/dev/null
+ log_warning "VM parallel: killed stuck process $pid after 10s timeout"
+ else
+ wait "$pid"
+ fi
+ done
+}
+
+# Parallel LXC processing function
+process_lxc_parallel() {
+ local lxc_list=("$@")
+ local max_parallel=${MAX_PARALLEL_LXC_CHECKS:-2}
+ local batch_size=${LXC_BATCH_SIZE:-20}
+ local job_count=0
+ local pids=()
+ local pid_start_times=()
+
+ debug_log "Starting parallel LXC processing: ${#lxc_list[@]} containers, max_parallel=$max_parallel"
+
+ if [[ ${#lxc_list[@]} -gt 5 ]]; then
+ debug_log "Pre-loading LXC statuses for ${#lxc_list[@]} containers"
+ local all_statuses=$(pct list 2>/dev/null)
+ for vmid in "${lxc_list[@]}"; do
+ local status=$(echo "$all_statuses" | grep "^$vmid" | awk '{print $2}')
+ if [[ -n "$status" ]]; then
+ local status_cache_file="/tmp/iptag_lxc_status_${vmid}_cache"
+ echo "$status" > "$status_cache_file" 2>/dev/null &
+ fi
+ done
+ wait
+ debug_log "Completed batch status pre-loading"
+ fi
+ for vmid in "${lxc_list[@]}"; do
+ if [[ $job_count -ge $max_parallel ]]; then
+ local pid_to_wait="${pids[0]}"
+ local start_time="${pid_start_times[0]}"
+ local waited=0
+ while kill -0 "$pid_to_wait" 2>/dev/null && [[ $waited -lt 10 ]]; do
+ sleep 1
+ ((waited++))
+ done
+ if kill -0 "$pid_to_wait" 2>/dev/null; then
+ kill -9 "$pid_to_wait" 2>/dev/null
+ log_warning "LXC parallel: killed stuck process $pid_to_wait after 10s timeout"
+ else
+ wait "$pid_to_wait"
+ fi
+ pids=("${pids[@]:1}")
+ pid_start_times=("${pid_start_times[@]:1}")
+ ((job_count--))
+ fi
+ # Start background job with higher priority
+ (update_tags "lxc" "$vmid") &
+ pids+=($!)
+ pid_start_times+=("$(date +%s)")
+ ((job_count++))
+ done
+ for i in "${!pids[@]}"; do
+ local pid="${pids[$i]}"
+ local waited=0
+ while kill -0 "$pid" 2>/dev/null && [[ $waited -lt 10 ]]; do
+ sleep 1
+ ((waited++))
+ done
+ if kill -0 "$pid" 2>/dev/null; then
+ kill -9 "$pid" 2>/dev/null
+ log_warning "LXC parallel: killed stuck process $pid after 10s timeout"
+ else
+ wait "$pid"
+ fi
+ done
+ debug_log "Completed parallel LXC processing"
+}
+
+# Optimized LXC IP detection with caching and alternative methods
+get_lxc_ips() {
+ local vmid=$1
+ local status_cache_file="/tmp/iptag_lxc_status_${vmid}_cache"
+ local status_cache_ttl=${LXC_STATUS_CACHE_TTL:-30}
+
+ debug_log "lxc $vmid: starting extreme optimized IP detection"
+
+ # Check status cache first (avoid expensive pct status calls)
+ local lxc_status=""
+ if [[ -f "$status_cache_file" ]] && [[ $(($(date +%s) - $(stat -c %Y "$status_cache_file" 2>/dev/null || echo 0))) -lt $status_cache_ttl ]]; then
+ lxc_status=$(cat "$status_cache_file" 2>/dev/null)
+ debug_log "lxc $vmid: using cached status: $lxc_status"
+ else
+ lxc_status=$(pct status "${vmid}" 2>/dev/null | awk '{print $2}')
+ echo "$lxc_status" > "$status_cache_file" 2>/dev/null
+ debug_log "lxc $vmid: fetched fresh status: $lxc_status"
+ fi
+
+ if [[ "$lxc_status" != "running" ]]; then
+ debug_log "lxc $vmid: not running (status: $lxc_status)"
+ return
+ fi
+
+ local ips=""
+ local method_used=""
+
+ # EXTREME Method 1: Direct Proxmox config inspection (super fast)
+ debug_log "lxc $vmid: trying direct Proxmox config inspection"
+ local pve_lxc_config="/etc/pve/lxc/${vmid}.conf"
+ if [[ -f "$pve_lxc_config" ]]; then
+ local static_ip=$(grep -E "^net[0-9]+:" "$pve_lxc_config" 2>/dev/null | grep -oE 'ip=([0-9]{1,3}\.){3}[0-9]{1,3}' | cut -d'=' -f2 | head -1)
+ debug_log "lxc $vmid: [CONFIG] static_ip='$static_ip' (from $pve_lxc_config)"
+ if [[ -n "$static_ip" && "$static_ip" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]]; then
+ debug_log "lxc $vmid: found static IP $static_ip in Proxmox config"
+ ips="$static_ip"
+ method_used="proxmox_config"
+ fi
+ else
+ debug_log "lxc $vmid: [CONFIG] config file not found: $pve_lxc_config"
+ fi
+
+ # EXTREME Method 2: Direct network namespace inspection (fastest dynamic)
+ if [[ -z "$ips" ]]; then
+ debug_log "lxc $vmid: trying optimized namespace inspection"
+ local ns_file="/var/lib/lxc/${vmid}/rootfs/proc/net/fib_trie"
+ if [[ -f "$ns_file" ]]; then
+ local ns_ip=$(timeout 1 grep -m1 -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' "$ns_file" 2>/dev/null | grep -v '127.0.0.1' | head -1)
+ debug_log "lxc $vmid: [NAMESPACE] ns_ip='$ns_ip'"
+ if [[ -n "$ns_ip" ]] && is_valid_ipv4 "$ns_ip"; then
+ debug_log "lxc $vmid: found IP $ns_ip via namespace inspection"
+ ips="$ns_ip"
+ method_used="namespace"
+ fi
+ else
+ debug_log "lxc $vmid: [NAMESPACE] ns_file not found: $ns_file"
+ fi
+ fi
+
+ # EXTREME Method 3: Batch ARP table lookup (if namespace failed)
+ if [[ -z "$ips" ]]; then
+ debug_log "lxc $vmid: trying batch ARP lookup"
+ local bridge_name=""; local mac_addr=""
+ if [[ -f "$pve_lxc_config" ]]; then
+ bridge_name=$(grep -Eo 'bridge=[^,]+' "$pve_lxc_config" | head -1 | cut -d'=' -f2)
+ mac_addr=$(grep -Eo 'hwaddr=([0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2}' "$pve_lxc_config" | head -1 | cut -d'=' -f2)
+ debug_log "lxc $vmid: [ARP] bridge_name='$bridge_name' mac_addr='$mac_addr' (from $pve_lxc_config)"
+ fi
+ if [[ -z "$bridge_name" || -z "$mac_addr" ]]; then
+ local lxc_config="/var/lib/lxc/${vmid}/config"
+ if [[ -f "$lxc_config" ]]; then
+ [[ -z "$bridge_name" ]] && bridge_name=$(grep "lxc.net.0.link" "$lxc_config" 2>/dev/null | cut -d'=' -f2 | tr -d ' ')
+ [[ -z "$mac_addr" ]] && mac_addr=$(grep "lxc.net.0.hwaddr" "$lxc_config" 2>/dev/null | cut -d'=' -f2 | tr -d ' ')
+ debug_log "lxc $vmid: [ARP] bridge_name='$bridge_name' mac_addr='$mac_addr' (from $lxc_config)"
+ else
+ debug_log "lxc $vmid: [ARP] lxc config not found: $lxc_config"
+ fi
+ fi
+ if [[ -n "$bridge_name" && -n "$mac_addr" ]]; then
+ local bridge_ip=$(ip neighbor show dev "$bridge_name" 2>/dev/null | grep "$mac_addr" | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' | head -1)
+ debug_log "lxc $vmid: [ARP] bridge_ip='$bridge_ip'"
+ if [[ -n "$bridge_ip" && "$bridge_ip" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]]; then
+ debug_log "lxc $vmid: found IP $bridge_ip via ARP table"
+ ips="$bridge_ip"
+ method_used="arp_table"
+ fi
+ fi
+ fi
+
+ # EXTREME Method 4: Fast process namespace (if ARP failed)
+ if [[ -z "$ips" ]] && [[ "${LXC_SKIP_SLOW_METHODS:-true}" != "true" ]]; then
+ debug_log "lxc $vmid: trying fast process namespace"
+ local pid_cache_file="/tmp/iptag_lxc_pid_${vmid}_cache"
+ local container_pid=""
+ if [[ -f "$pid_cache_file" ]] && [[ $(($(date +%s) - $(stat -c %Y "$pid_cache_file" 2>/dev/null || echo 0))) -lt 60 ]]; then
+ container_pid=$(cat "$pid_cache_file" 2>/dev/null)
+ else
+ container_pid=$(pct list 2>/dev/null | grep "^$vmid" | awk '{print $3}')
+ [[ -n "$container_pid" && "$container_pid" != "-" ]] && echo "$container_pid" > "$pid_cache_file"
+ fi
+ debug_log "lxc $vmid: [PROCESS_NS] container_pid='$container_pid'"
+ if [[ -n "$container_pid" && "$container_pid" != "-" ]]; then
+ local ns_ip=$(timeout 1 nsenter -t "$container_pid" -n ip -4 addr show 2>/dev/null | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' | grep -v '127.0.0.1' | head -1)
+ debug_log "lxc $vmid: [PROCESS_NS] ns_ip='$ns_ip'"
+ if [[ -n "$ns_ip" ]] && is_valid_ipv4 "$ns_ip"; then
+ debug_log "lxc $vmid: found IP $ns_ip via process namespace"
+ ips="$ns_ip"
+ method_used="process_ns"
+ fi
+ fi
+ fi
+
+ # Fallback: always do lxc-attach/pct exec with timeout if nothing found
+ if [[ -z "$ips" && "${LXC_ALLOW_FORCED_COMMANDS:-true}" == "true" ]]; then
+ debug_log "lxc $vmid: trying fallback lxc-attach (forced)"
+ local attach_ip=""
+ attach_ip=$(timeout 7s lxc-attach -n "$vmid" -- ip -4 addr show 2>/dev/null | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' | grep -v '127.0.0.1' | head -1)
+ local attach_status=$?
+ debug_log "lxc $vmid: [LXC_ATTACH] attach_ip='$attach_ip' status=$attach_status"
+ if [[ $attach_status -eq 124 ]]; then
+ debug_log "lxc $vmid: lxc-attach timed out after 7s"
+ fi
+ if [[ -n "$attach_ip" ]] && is_valid_ipv4 "$attach_ip"; then
+ debug_log "lxc $vmid: found IP $attach_ip via lxc-attach (forced)"
+ ips="$attach_ip"
+ method_used="lxc_attach_forced"
+ fi
+ fi
+ if [[ -z "$ips" && "${LXC_ALLOW_FORCED_COMMANDS:-true}" == "true" ]]; then
+ debug_log "lxc $vmid: trying fallback pct exec (forced)"
+ local pct_ip=""
+ pct_ip=$(timeout 7s pct exec "$vmid" -- ip -4 addr show 2>/dev/null | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' | grep -v '127.0.0.1' | head -1)
+ local pct_status=$?
+ debug_log "lxc $vmid: [PCT_EXEC] pct_ip='$pct_ip' status=$pct_status"
+ if [[ $pct_status -eq 124 ]]; then
+ debug_log "lxc $vmid: pct exec timed out after 7s"
+ fi
+ if [[ -n "$pct_ip" ]] && is_valid_ipv4 "$pct_ip"; then
+ debug_log "lxc $vmid: found IP $pct_ip via pct exec (forced)"
+ ips="$pct_ip"
+ method_used="pct_exec_forced"
+ fi
+ fi
+
+ debug_log "lxc $vmid: [RESULT] ips='$ips' method='$method_used'"
+ echo "$ips"
+}
+
main
EOF
- chmod +x /opt/iptag/iptag
-
- # Update service file
- cat </lib/systemd/system/iptag.service
-[Unit]
-Description=IP-Tag service
-After=network.target
-
-[Service]
-Type=simple
-ExecStart=/opt/iptag/iptag
-Restart=always
-
-[Install]
-WantedBy=multi-user.target
-EOF
-
- systemctl daemon-reload &>/dev/null
- systemctl enable -q --now iptag.service &>/dev/null
- msg_ok "Updated IP-Tag Scripts"
}
# Main installation process
@@ -497,30 +1303,7 @@ migrate_config
msg_info "Setup Default Config"
if [[ ! -f /opt/iptag/iptag.conf ]]; then
- cat </opt/iptag/iptag.conf
-# Configuration file for LXC IP tagging
-
-# List of allowed CIDRs
-CIDR_LIST=(
- 192.168.0.0/16
- 172.16.0.0/12
- 10.0.0.0/8
- 100.64.0.0/10
-)
-
-# Tag format options:
-# - "full": full IP address (e.g., 192.168.0.100)
-# - "last_octet": only the last octet (e.g., 100)
-# - "last_two_octets": last two octets (e.g., 0.100)
-TAG_FORMAT="full"
-
-# Interval settings (in seconds)
-LOOP_INTERVAL=60
-VM_STATUS_CHECK_INTERVAL=60
-FW_NET_INTERFACE_CHECK_INTERVAL=60
-LXC_STATUS_CHECK_INTERVAL=60
-FORCE_UPDATE_INTERVAL=1800
-EOF
+ generate_config >/opt/iptag/iptag.conf
msg_ok "Setup default config"
else
msg_ok "Default config already exists"
@@ -528,288 +1311,16 @@ fi
msg_info "Setup Main Function"
if [[ ! -f /opt/iptag/iptag ]]; then
- cat <<'EOF' >/opt/iptag/iptag
-#!/bin/bash
-# =============== CONFIGURATION =============== #
-readonly CONFIG_FILE="/opt/iptag/iptag.conf"
-readonly DEFAULT_TAG_FORMAT="full"
-readonly DEFAULT_CHECK_INTERVAL=60
-
-# Load the configuration file if it exists
-if [ -f "$CONFIG_FILE" ]; then
- # shellcheck source=./iptag.conf
- source "$CONFIG_FILE"
-fi
-
-# Convert IP to integer for comparison
-ip_to_int() {
- local ip="$1"
- local a b c d
- IFS=. read -r a b c d <<< "${ip}"
- echo "$((a << 24 | b << 16 | c << 8 | d))"
-}
-
-# Check if IP is in CIDR
-ip_in_cidr() {
- local ip="$1" cidr="$2"
- ipcalc -c "$ip" "$cidr" >/dev/null 2>&1 || return 1
-
- local network prefix ip_parts net_parts
- network=$(echo "$cidr" | cut -d/ -f1)
- prefix=$(echo "$cidr" | cut -d/ -f2)
- IFS=. read -r -a ip_parts <<< "$ip"
- IFS=. read -r -a net_parts <<< "$network"
-
- case $prefix in
- 8) [[ "${ip_parts[0]}" == "${net_parts[0]}" ]] ;;
- 16) [[ "${ip_parts[0]}.${ip_parts[1]}" == "${net_parts[0]}.${net_parts[1]}" ]] ;;
- 24) [[ "${ip_parts[0]}.${ip_parts[1]}.${ip_parts[2]}" == "${net_parts[0]}.${net_parts[1]}.${net_parts[2]}" ]] ;;
- 32) [[ "$ip" == "$network" ]] ;;
- *) return 1 ;;
- esac
-}
-
-# Format IP address according to the configuration
-format_ip_tag() {
- local ip="$1"
- local format="${TAG_FORMAT:-$DEFAULT_TAG_FORMAT}"
-
- case "$format" in
- "last_octet") echo "${ip##*.}" ;;
- "last_two_octets") echo "${ip#*.*.}" ;;
- *) echo "$ip" ;;
- esac
-}
-
-# Check if IP is in any CIDRs
-ip_in_cidrs() {
- local ip="$1" cidrs="$2"
- [[ -z "$cidrs" ]] && return 1
- local IFS=' '
- for cidr in $cidrs; do
- ip_in_cidr "$ip" "$cidr" && return 0
- done
- return 1
-}
-
-# Check if IP is valid
-is_valid_ipv4() {
- local ip="$1"
- [[ "$ip" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]] || return 1
-
- local IFS='.' parts
- read -ra parts <<< "$ip"
- for part in "${parts[@]}"; do
- (( part >= 0 && part <= 255 )) || return 1
- done
- return 0
-}
-
-# Get VM IPs using MAC addresses and ARP table
-get_vm_ips() {
- local vmid=$1 ips="" macs found_ip=false
- qm status "$vmid" 2>/dev/null | grep -q "status: running" || return
-
- macs=$(qm config "$vmid" 2>/dev/null | grep -E 'net[0-9]+' | grep -oE '[a-fA-F0-9]{2}(:[a-fA-F0-9]{2}){5}')
- [[ -z "$macs" ]] && return
-
- for mac in $macs; do
- local ip
- ip=$(arp -an 2>/dev/null | grep -i "$mac" | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}')
- [[ -n "$ip" ]] && { ips+="$ip "; found_ip=true; }
- done
-
- if ! $found_ip; then
- local agent_ip
- agent_ip=$(qm agent "$vmid" network-get-interfaces 2>/dev/null | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' || true)
- [[ -n "$agent_ip" ]] && ips+="$agent_ip "
- fi
-
- echo "${ips% }"
-}
-
-# Update tags for container or VM
-update_tags() {
- local type="$1" vmid="$2" config_cmd="pct"
- [[ "$type" == "vm" ]] && config_cmd="qm"
-
- local current_ips_full
- if [[ "$type" == "lxc" ]]; then
- current_ips_full=$(lxc-info -n "${vmid}" -i 2>/dev/null | grep -E "^IP:" | awk '{print $2}')
- else
- current_ips_full=$(get_vm_ips "${vmid}")
- fi
- [[ -z "$current_ips_full" ]] && return
-
- local current_tags=() next_tags=() current_ip_tags=()
- mapfile -t current_tags < <($config_cmd config "${vmid}" 2>/dev/null | grep tags | awk '{print $2}' | sed 's/;/\n/g')
-
- # Separate IP and non-IP tags
- for tag in "${current_tags[@]}"; do
- if is_valid_ipv4 "${tag}" || [[ "$tag" =~ ^[0-9]+(\.[0-9]+)*$ ]]; then
- current_ip_tags+=("${tag}")
- else
- next_tags+=("${tag}")
- fi
- done
-
- local formatted_ips=() needs_update=false added_ips=()
- for ip in ${current_ips_full}; do
- if is_valid_ipv4 "$ip" && ip_in_cidrs "$ip" "${CIDR_LIST[*]}"; then
- local formatted_ip=$(format_ip_tag "$ip")
- formatted_ips+=("$formatted_ip")
- if [[ ! " ${current_ip_tags[*]} " =~ " ${formatted_ip} " ]]; then
- needs_update=true
- added_ips+=("$formatted_ip")
- next_tags+=("$formatted_ip")
- fi
- fi
- done
-
- [[ ${#formatted_ips[@]} -eq 0 ]] && return
-
- # Add existing IP tags that are still valid
- for tag in "${current_ip_tags[@]}"; do
- if [[ " ${formatted_ips[*]} " =~ " ${tag} " ]]; then
- if [[ ! " ${next_tags[*]} " =~ " ${tag} " ]]; then
- next_tags+=("$tag")
- fi
- fi
- done
-
- if [[ "$needs_update" == true ]]; then
- echo "${type^} ${vmid}: adding IP tags: ${added_ips[*]}"
- $config_cmd set "${vmid}" -tags "$(IFS=';'; echo "${next_tags[*]}")" &>/dev/null
- fi
-}
-
-# Update all instances of specified type
-update_all_tags() {
- local type="$1" list_cmd="pct" vmids count=0
- [[ "$type" == "vm" ]] && list_cmd="qm"
-
- vmids=$($list_cmd list 2>/dev/null | grep -v VMID | awk '{print $1}')
- for vmid in $vmids; do ((count++)); done
-
- echo "Found ${count} running ${type}s"
- [[ $count -eq 0 ]] && return
-
- for vmid in $vmids; do
- update_tags "$type" "$vmid"
- done
-}
-
-# Check if status changed
-check_status_changed() {
- local type="$1" current
- case "$type" in
- "lxc") current=$(pct list 2>/dev/null | grep -v VMID) ;;
- "vm") current=$(qm list 2>/dev/null | grep -v VMID) ;;
- "fw") current=$(ifconfig 2>/dev/null | grep "^fw") ;;
- esac
- local last_var="last_${type}_status"
- [[ "${!last_var}" == "$current" ]] && return 1
- eval "$last_var='$current'"
- return 0
-}
-
-# Main check function
-check() {
- local current_time changes_detected=false
- current_time=$(date +%s)
-
- # Check LXC status
- local time_since_last_lxc_check=$((current_time - last_lxc_status_check_time))
- if [[ "${LXC_STATUS_CHECK_INTERVAL:-60}" -gt 0 ]] && \
- [[ "${time_since_last_lxc_check}" -ge "${LXC_STATUS_CHECK_INTERVAL:-60}" ]]; then
- echo "Checking LXC status..."
- last_lxc_status_check_time=${current_time}
- if check_status_changed "lxc"; then
- changes_detected=true
- update_all_tags "lxc"
- last_update_lxc_time=${current_time}
- fi
- fi
-
- # Check VM status
- local time_since_last_vm_check=$((current_time - last_vm_status_check_time))
- if [[ "${VM_STATUS_CHECK_INTERVAL:-60}" -gt 0 ]] && \
- [[ "${time_since_last_vm_check}" -ge "${VM_STATUS_CHECK_INTERVAL:-60}" ]]; then
- echo "Checking VM status..."
- last_vm_status_check_time=${current_time}
- if check_status_changed "vm"; then
- changes_detected=true
- update_all_tags "vm"
- last_update_vm_time=${current_time}
- fi
- fi
-
- # Check network interface changes
- local time_since_last_fw_check=$((current_time - last_fw_net_interface_check_time))
- if [[ "${FW_NET_INTERFACE_CHECK_INTERVAL:-60}" -gt 0 ]] && \
- [[ "${time_since_last_fw_check}" -ge "${FW_NET_INTERFACE_CHECK_INTERVAL:-60}" ]]; then
- echo "Checking network interfaces..."
- last_fw_net_interface_check_time=${current_time}
- if check_status_changed "fw"; then
- changes_detected=true
- update_all_tags "lxc"
- update_all_tags "vm"
- last_update_lxc_time=${current_time}
- last_update_vm_time=${current_time}
- fi
- fi
-
- # Force update if needed
- for type in "lxc" "vm"; do
- local last_update_var="last_update_${type}_time"
- local time_since_last_update=$((current_time - ${!last_update_var}))
- if [[ ${time_since_last_update} -ge ${FORCE_UPDATE_INTERVAL:-1800} ]]; then
- echo "Force updating ${type} tags..."
- changes_detected=true
- update_all_tags "$type"
- eval "${last_update_var}=${current_time}"
- fi
- done
-
- $changes_detected || echo "No changes detected in system status"
-}
-
-# Initialize time variables
-declare -g last_lxc_status="" last_vm_status="" last_fw_status=""
-declare -g last_lxc_status_check_time=0 last_vm_status_check_time=0 last_fw_net_interface_check_time=0
-declare -g last_update_lxc_time=0 last_update_vm_time=0
-
-# Main loop
-main() {
- while true; do
- check
- sleep "${LOOP_INTERVAL:-$DEFAULT_CHECK_INTERVAL}"
- done
-}
-
-main
-EOF
+ generate_main_script >/opt/iptag/iptag
+ chmod +x /opt/iptag/iptag
msg_ok "Setup Main Function"
else
msg_ok "Main Function already exists"
fi
-chmod +x /opt/iptag/iptag
msg_info "Creating Service"
if [[ ! -f /lib/systemd/system/iptag.service ]]; then
- cat </lib/systemd/system/iptag.service
-[Unit]
-Description=IP-Tag service
-After=network.target
-
-[Service]
-Type=simple
-ExecStart=/opt/iptag/iptag
-Restart=always
-
-[Install]
-WantedBy=multi-user.target
-EOF
+ generate_service >/lib/systemd/system/iptag.service
msg_ok "Created Service"
else
msg_ok "Service already exists."
@@ -821,5 +1332,28 @@ msg_info "Starting Service"
systemctl daemon-reload &>/dev/null
systemctl enable -q --now iptag.service &>/dev/null
msg_ok "Started Service"
+
+msg_info "Restarting Service with optimizations"
+systemctl restart iptag.service &>/dev/null
+msg_ok "Service restarted with CPU optimizations"
+
+msg_info "Creating manual run command"
+cat <<'EOF' >/usr/local/bin/iptag-run
+#!/usr/bin/env bash
+CONFIG_FILE="/opt/iptag/iptag.conf"
+SCRIPT_FILE="/opt/iptag/iptag"
+if [[ ! -f "$SCRIPT_FILE" ]]; then
+ echo "β Main script not found: $SCRIPT_FILE"
+ exit 1
+fi
+export FORCE_SINGLE_RUN=true
+exec "$SCRIPT_FILE"
+EOF
+chmod +x /usr/local/bin/iptag-run
+msg_ok "Created iptag-run executable - You can execute this manually by entering βiptag-runβ in the Proxmox host, so the script is executed by hand."
+
SPINNER_PID=""
echo -e "\n${APP} installation completed successfully! ${CL}\n"
+
+# Proper script termination
+exit 0