mirror of
https://github.com/community-scripts/ProxmoxVE.git
synced 2025-11-06 19:32:49 +00:00
Introduces explicit mapping of distro codenames to repository suites for Debian and Ubuntu, with fallbacks for newer or unknown releases. This ensures proper repository setup even when upstream does not yet support the latest distributions.
4468 lines
140 KiB
Bash
4468 lines
140 KiB
Bash
#!/bin/bash
|
||
|
||
# ==============================================================================
|
||
# HELPER FUNCTIONS FOR PACKAGE MANAGEMENT
|
||
# ==============================================================================
|
||
#
|
||
# This file provides unified helper functions for robust package installation
|
||
# and repository management across Debian/Ubuntu OS upgrades.
|
||
#
|
||
# Key Features:
|
||
# - Automatic retry logic for transient APT/network failures
|
||
# - Unified keyring cleanup from all 3 locations
|
||
# - Legacy installation cleanup (nvm, rbenv, rustup)
|
||
# - OS-upgrade-safe repository preparation
|
||
# - Service pattern matching for multi-version tools
|
||
#
|
||
# Usage in install scripts:
|
||
# source /dev/stdin <<< "$FUNCTIONS" # Load from build.func
|
||
# prepare_repository_setup "mysql"
|
||
# install_packages_with_retry "mysql-server" "mysql-client"
|
||
#
|
||
# Quick Reference (Core Helpers):
|
||
# cleanup_tool_keyrings() - Remove keyrings from all 3 locations
|
||
# stop_all_services() - Stop services by pattern (e.g. "php*-fpm")
|
||
# verify_tool_version() - Validate installed version matches expected
|
||
# cleanup_legacy_install() - Remove nvm, rbenv, rustup, etc.
|
||
# prepare_repository_setup() - Cleanup repos + keyrings + validate APT
|
||
# install_packages_with_retry() - Install with 3 retries and APT refresh
|
||
# upgrade_packages_with_retry() - Upgrade with 3 retries and APT refresh
|
||
#
|
||
# ==============================================================================
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Cache installed version to avoid repeated checks
|
||
# ------------------------------------------------------------------------------
|
||
cache_installed_version() {
|
||
local app="$1"
|
||
local version="$2"
|
||
mkdir -p /var/cache/app-versions
|
||
echo "$version" >"/var/cache/app-versions/${app}_version.txt"
|
||
}
|
||
|
||
get_cached_version() {
|
||
local app="$1"
|
||
mkdir -p /var/cache/app-versions
|
||
if [[ -f "/var/cache/app-versions/${app}_version.txt" ]]; then
|
||
cat "/var/cache/app-versions/${app}_version.txt"
|
||
return 0
|
||
fi
|
||
return 0
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Clean up ALL keyring locations for a tool (unified helper)
|
||
# Usage: cleanup_tool_keyrings "mariadb" "mysql" "postgresql"
|
||
# ------------------------------------------------------------------------------
|
||
cleanup_tool_keyrings() {
|
||
local tool_patterns=("$@")
|
||
|
||
for pattern in "${tool_patterns[@]}"; do
|
||
rm -f /usr/share/keyrings/${pattern}*.gpg \
|
||
/etc/apt/keyrings/${pattern}*.gpg \
|
||
/etc/apt/trusted.gpg.d/${pattern}*.gpg 2>/dev/null || true
|
||
done
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Stop and disable all service instances matching a pattern
|
||
# Usage: stop_all_services "php*-fpm" "mysql" "mariadb"
|
||
# ------------------------------------------------------------------------------
|
||
stop_all_services() {
|
||
local service_patterns=("$@")
|
||
|
||
for pattern in "${service_patterns[@]}"; do
|
||
# Find all matching services
|
||
systemctl list-units --type=service --all 2>/dev/null |
|
||
grep -oE "${pattern}[^ ]*\.service" |
|
||
sort -u |
|
||
while read -r service; do
|
||
$STD systemctl stop "$service" 2>/dev/null || true
|
||
$STD systemctl disable "$service" 2>/dev/null || true
|
||
done
|
||
done
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Verify installed tool version matches expected version
|
||
# Returns: 0 if match, 1 if mismatch (with warning)
|
||
# Usage: verify_tool_version "nodejs" "22" "$(node -v | grep -oP '^v\K[0-9]+')"
|
||
# ------------------------------------------------------------------------------
|
||
verify_tool_version() {
|
||
local tool_name="$1"
|
||
local expected_version="$2"
|
||
local installed_version="$3"
|
||
|
||
# Extract major version for comparison
|
||
local expected_major="${expected_version%%.*}"
|
||
local installed_major="${installed_version%%.*}"
|
||
|
||
if [[ "$installed_major" != "$expected_major" ]]; then
|
||
msg_warn "$tool_name version mismatch: expected $expected_version, got $installed_version"
|
||
return 1
|
||
fi
|
||
|
||
return 0
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Clean up legacy installation methods (nvm, rbenv, rustup, etc.)
|
||
# Usage: cleanup_legacy_install "nodejs" -> removes nvm
|
||
# ------------------------------------------------------------------------------
|
||
cleanup_legacy_install() {
|
||
local tool_name="$1"
|
||
|
||
case "$tool_name" in
|
||
nodejs | node)
|
||
if [[ -d "$HOME/.nvm" ]]; then
|
||
msg_info "Removing legacy nvm installation"
|
||
rm -rf "$HOME/.nvm" "$HOME/.npm" "$HOME/.bower" "$HOME/.config/yarn" 2>/dev/null || true
|
||
sed -i '/NVM_DIR/d' "$HOME/.bashrc" "$HOME/.profile" 2>/dev/null || true
|
||
msg_ok "Legacy nvm installation removed"
|
||
fi
|
||
;;
|
||
ruby)
|
||
if [[ -d "$HOME/.rbenv" ]]; then
|
||
msg_info "Removing legacy rbenv installation"
|
||
rm -rf "$HOME/.rbenv" 2>/dev/null || true
|
||
sed -i '/rbenv/d' "$HOME/.bashrc" "$HOME/.profile" 2>/dev/null || true
|
||
msg_ok "Legacy rbenv installation removed"
|
||
fi
|
||
;;
|
||
rust)
|
||
if [[ -d "$HOME/.cargo" ]] || [[ -d "$HOME/.rustup" ]]; then
|
||
msg_info "Removing legacy rustup installation"
|
||
rm -rf "$HOME/.cargo" "$HOME/.rustup" 2>/dev/null || true
|
||
sed -i '/cargo/d' "$HOME/.bashrc" "$HOME/.profile" 2>/dev/null || true
|
||
msg_ok "Legacy rustup installation removed"
|
||
fi
|
||
;;
|
||
go | golang)
|
||
if [[ -d "$HOME/go" ]]; then
|
||
msg_info "Removing legacy Go workspace"
|
||
# Keep user code, just remove GOPATH env
|
||
sed -i '/GOPATH/d' "$HOME/.bashrc" "$HOME/.profile" 2>/dev/null || true
|
||
msg_ok "Legacy Go workspace cleaned"
|
||
fi
|
||
;;
|
||
esac
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Unified repository preparation before setup
|
||
# Cleans up old repos, keyrings, and ensures APT is working
|
||
# Usage: prepare_repository_setup "mariadb" "mysql"
|
||
# ------------------------------------------------------------------------------
|
||
prepare_repository_setup() {
|
||
local repo_names=("$@")
|
||
|
||
# Clean up all old repository files
|
||
for repo in "${repo_names[@]}"; do
|
||
cleanup_old_repo_files "$repo"
|
||
done
|
||
|
||
# Clean up all keyrings
|
||
cleanup_tool_keyrings "${repo_names[@]}"
|
||
|
||
# Ensure APT is in working state
|
||
ensure_apt_working || return 1
|
||
|
||
return 0
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Install packages with retry logic
|
||
# Usage: install_packages_with_retry "mysql-server" "mysql-client"
|
||
# ------------------------------------------------------------------------------
|
||
install_packages_with_retry() {
|
||
local packages=("$@")
|
||
local max_retries=2
|
||
local retry=0
|
||
|
||
while [[ $retry -le $max_retries ]]; do
|
||
if $STD apt install -y "${packages[@]}" 2>/dev/null; then
|
||
return 0
|
||
fi
|
||
|
||
retry=$((retry + 1))
|
||
if [[ $retry -le $max_retries ]]; then
|
||
msg_warn "Package installation failed, retrying ($retry/$max_retries)..."
|
||
sleep 2
|
||
$STD apt update 2>/dev/null || true
|
||
fi
|
||
done
|
||
|
||
return 1
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Upgrade specific packages with retry logic
|
||
# Usage: upgrade_packages_with_retry "mariadb-server" "mariadb-client"
|
||
# ------------------------------------------------------------------------------
|
||
upgrade_packages_with_retry() {
|
||
local packages=("$@")
|
||
local max_retries=2
|
||
local retry=0
|
||
|
||
while [[ $retry -le $max_retries ]]; do
|
||
if $STD apt install --only-upgrade -y "${packages[@]}" 2>/dev/null; then
|
||
return 0
|
||
fi
|
||
|
||
retry=$((retry + 1))
|
||
if [[ $retry -le $max_retries ]]; then
|
||
msg_warn "Package upgrade failed, retrying ($retry/$max_retries)..."
|
||
sleep 2
|
||
$STD apt update 2>/dev/null || true
|
||
fi
|
||
done
|
||
|
||
return 1
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Check if tool is already installed and optionally verify exact version
|
||
# Returns: 0 if installed (with optional version match), 1 if not installed
|
||
# Usage: is_tool_installed "mariadb" "11.4" || echo "Not installed"
|
||
# ------------------------------------------------------------------------------
|
||
is_tool_installed() {
|
||
local tool_name="$1"
|
||
local required_version="${2:-}"
|
||
local installed_version=""
|
||
|
||
case "$tool_name" in
|
||
mariadb)
|
||
if command -v mariadb >/dev/null 2>&1; then
|
||
installed_version=$(mariadb --version 2>/dev/null | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -1)
|
||
fi
|
||
;;
|
||
mysql)
|
||
if command -v mysql >/dev/null 2>&1; then
|
||
installed_version=$(mysql --version 2>/dev/null | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -1)
|
||
fi
|
||
;;
|
||
mongodb | mongod)
|
||
if command -v mongod >/dev/null 2>&1; then
|
||
installed_version=$(mongod --version 2>/dev/null | awk '/db version/{print $3}' | cut -d. -f1,2)
|
||
fi
|
||
;;
|
||
node | nodejs)
|
||
if command -v node >/dev/null 2>&1; then
|
||
installed_version=$(node -v 2>/dev/null | grep -oP '^v\K[0-9]+')
|
||
fi
|
||
;;
|
||
php)
|
||
if command -v php >/dev/null 2>&1; then
|
||
installed_version=$(php -v 2>/dev/null | awk '/^PHP/{print $2}' | cut -d. -f1,2)
|
||
fi
|
||
;;
|
||
postgres | postgresql)
|
||
if command -v psql >/dev/null 2>&1; then
|
||
installed_version=$(psql --version 2>/dev/null | awk '{print $3}' | cut -d. -f1)
|
||
fi
|
||
;;
|
||
ruby)
|
||
if command -v ruby >/dev/null 2>&1; then
|
||
installed_version=$(ruby --version 2>/dev/null | awk '{print $2}' | cut -d. -f1,2)
|
||
fi
|
||
;;
|
||
rust | rustc)
|
||
if command -v rustc >/dev/null 2>&1; then
|
||
installed_version=$(rustc --version 2>/dev/null | awk '{print $2}')
|
||
fi
|
||
;;
|
||
go | golang)
|
||
if command -v go >/dev/null 2>&1; then
|
||
installed_version=$(go version 2>/dev/null | awk '{print $3}' | sed 's/go//')
|
||
fi
|
||
;;
|
||
clickhouse)
|
||
if command -v clickhouse >/dev/null 2>&1; then
|
||
installed_version=$(clickhouse --version 2>/dev/null | awk '{print $2}')
|
||
fi
|
||
;;
|
||
esac
|
||
|
||
if [[ -z "$installed_version" ]]; then
|
||
return 1 # Not installed
|
||
fi
|
||
|
||
if [[ -n "$required_version" && "$installed_version" != "$required_version" ]]; then
|
||
echo "$installed_version"
|
||
return 1 # Version mismatch
|
||
fi
|
||
|
||
echo "$installed_version"
|
||
return 0 # Installed and version matches (if specified)
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Remove old tool version completely (purge + cleanup repos)
|
||
# Usage: remove_old_tool_version "mariadb" "repository-name"
|
||
# ------------------------------------------------------------------------------
|
||
remove_old_tool_version() {
|
||
local tool_name="$1"
|
||
local repo_name="${2:-$tool_name}"
|
||
|
||
case "$tool_name" in
|
||
mariadb)
|
||
stop_all_services "mariadb"
|
||
$STD apt purge -y 'mariadb*' >/dev/null 2>&1 || true
|
||
cleanup_tool_keyrings "mariadb"
|
||
;;
|
||
mysql)
|
||
stop_all_services "mysql"
|
||
$STD apt purge -y 'mysql*' >/dev/null 2>&1 || true
|
||
rm -rf /var/lib/mysql 2>/dev/null || true
|
||
cleanup_tool_keyrings "mysql"
|
||
;;
|
||
mongodb)
|
||
stop_all_services "mongod"
|
||
$STD apt purge -y 'mongodb*' >/dev/null 2>&1 || true
|
||
rm -rf /var/lib/mongodb 2>/dev/null || true
|
||
cleanup_tool_keyrings "mongodb"
|
||
;;
|
||
node | nodejs)
|
||
$STD apt purge -y nodejs npm >/dev/null 2>&1 || true
|
||
# Clean up npm global modules
|
||
if command -v npm >/dev/null 2>&1; then
|
||
npm list -g 2>/dev/null | grep -oE '^ \S+' | awk '{print $1}' | while read -r module; do
|
||
npm uninstall -g "$module" >/dev/null 2>&1 || true
|
||
done
|
||
fi
|
||
cleanup_legacy_install "nodejs"
|
||
cleanup_tool_keyrings "nodesource"
|
||
;;
|
||
php)
|
||
stop_all_services "php.*-fpm"
|
||
$STD apt purge -y 'php*' >/dev/null 2>&1 || true
|
||
rm -rf /etc/php 2>/dev/null || true
|
||
cleanup_tool_keyrings "deb.sury.org-php" "php"
|
||
;;
|
||
postgresql)
|
||
stop_all_services "postgresql"
|
||
$STD apt purge -y 'postgresql*' >/dev/null 2>&1 || true
|
||
# Keep data directory for safety (can be removed manually if needed)
|
||
# rm -rf /var/lib/postgresql 2>/dev/null || true
|
||
cleanup_tool_keyrings "postgresql" "pgdg"
|
||
;;
|
||
java)
|
||
$STD apt purge -y 'temurin*' 'adoptium*' 'openjdk*' >/dev/null 2>&1 || true
|
||
cleanup_tool_keyrings "adoptium"
|
||
;;
|
||
ruby)
|
||
cleanup_legacy_install "ruby"
|
||
$STD apt purge -y 'ruby*' >/dev/null 2>&1 || true
|
||
;;
|
||
rust)
|
||
cleanup_legacy_install "rust"
|
||
;;
|
||
go | golang)
|
||
rm -rf /usr/local/go 2>/dev/null || true
|
||
cleanup_legacy_install "golang"
|
||
;;
|
||
clickhouse)
|
||
stop_all_services "clickhouse-server"
|
||
$STD apt purge -y 'clickhouse*' >/dev/null 2>&1 || true
|
||
rm -rf /var/lib/clickhouse 2>/dev/null || true
|
||
cleanup_tool_keyrings "clickhouse"
|
||
;;
|
||
esac
|
||
|
||
# Clean up old repository files (both .list and .sources)
|
||
cleanup_old_repo_files "$repo_name"
|
||
|
||
return 0
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Determine if tool update/upgrade is needed
|
||
# Returns: 0 (update needed), 1 (already up-to-date)
|
||
# Usage: if should_update_tool "mariadb" "11.4"; then ... fi
|
||
# ------------------------------------------------------------------------------
|
||
should_update_tool() {
|
||
local tool_name="$1"
|
||
local target_version="$2"
|
||
local current_version=""
|
||
|
||
# Get currently installed version
|
||
current_version=$(is_tool_installed "$tool_name" 2>/dev/null) || return 0 # Not installed = needs install
|
||
|
||
# If versions are identical, no update needed
|
||
if [[ "$current_version" == "$target_version" ]]; then
|
||
return 1 # No update needed
|
||
fi
|
||
|
||
return 0 # Update needed
|
||
}
|
||
|
||
# ---------------------–----------------------------------------------------------
|
||
# Unified repository management for tools
|
||
# Handles adding, updating, and verifying tool repositories
|
||
# Usage: manage_tool_repository "mariadb" "11.4" "https://repo..." "GPG_key_url"
|
||
# Supports: mariadb, mongodb, nodejs, postgresql, php, mysql
|
||
# ------------------------------------------------------------------------------
|
||
manage_tool_repository() {
|
||
local tool_name="$1"
|
||
local version="$2"
|
||
local repo_url="$3"
|
||
local gpg_key_url="${4:-}"
|
||
local distro_id repo_component suite
|
||
|
||
distro_id=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
||
|
||
case "$tool_name" in
|
||
mariadb)
|
||
if [[ -z "$repo_url" || -z "$gpg_key_url" ]]; then
|
||
msg_error "MariaDB repository requires repo_url and gpg_key_url"
|
||
return 1
|
||
fi
|
||
|
||
# Clean old repos first
|
||
cleanup_old_repo_files "mariadb"
|
||
|
||
# Get suite for fallback handling
|
||
local distro_codename
|
||
distro_codename=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
||
suite=$(get_fallback_suite "$distro_id" "$distro_codename" "$repo_url/$distro_id")
|
||
|
||
# Setup new repository using deb822 format
|
||
setup_deb822_repo "mariadb" "$gpg_key_url" "$repo_url/$distro_id" "$suite" "main" "amd64 arm64" || return 1
|
||
return 0
|
||
;;
|
||
|
||
mongodb)
|
||
if [[ -z "$repo_url" || -z "$gpg_key_url" ]]; then
|
||
msg_error "MongoDB repository requires repo_url and gpg_key_url"
|
||
return 1
|
||
fi
|
||
|
||
# Clean old repos first
|
||
cleanup_old_repo_files "mongodb"
|
||
|
||
# Import GPG key
|
||
mkdir -p /etc/apt/keyrings
|
||
if ! curl -fsSL "$gpg_key_url" | gpg --dearmor --yes -o "/etc/apt/keyrings/mongodb-server-${version}.gpg" 2>/dev/null; then
|
||
msg_error "Failed to download MongoDB GPG key"
|
||
return 1
|
||
fi
|
||
|
||
# Setup repository
|
||
local distro_codename
|
||
distro_codename=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
||
|
||
# Suite mapping with fallback for newer releases not yet supported by upstream
|
||
if [[ "$distro_id" == "debian" ]]; then
|
||
case "$distro_codename" in
|
||
trixie | forky | sid)
|
||
# Testing/unstable releases fallback to latest stable suite
|
||
suite="bookworm"
|
||
;;
|
||
bookworm)
|
||
suite="bookworm"
|
||
;;
|
||
bullseye)
|
||
suite="bullseye"
|
||
;;
|
||
*)
|
||
# Unknown release: fallback to latest stable suite
|
||
msg_warn "Unknown Debian release '${distro_codename}', using bookworm"
|
||
suite="bookworm"
|
||
;;
|
||
esac
|
||
elif [[ "$distro_id" == "ubuntu" ]]; then
|
||
case "$distro_codename" in
|
||
oracular | plucky)
|
||
# Newer releases fallback to latest LTS
|
||
suite="noble"
|
||
;;
|
||
noble)
|
||
suite="noble"
|
||
;;
|
||
jammy)
|
||
suite="jammy"
|
||
;;
|
||
focal)
|
||
suite="focal"
|
||
;;
|
||
*)
|
||
# Unknown release: fallback to latest LTS
|
||
msg_warn "Unknown Ubuntu release '${distro_codename}', using noble"
|
||
suite="noble"
|
||
;;
|
||
esac
|
||
else
|
||
# For other distros, try generic fallback
|
||
suite=$(get_fallback_suite "$distro_id" "$distro_codename" "$repo_url")
|
||
fi
|
||
|
||
repo_component="main"
|
||
[[ "$distro_id" == "ubuntu" ]] && repo_component="multiverse"
|
||
|
||
cat <<EOF >/etc/apt/sources.list.d/mongodb-org-${version}.sources
|
||
Types: deb
|
||
URIs: ${repo_url}
|
||
Suites: ${suite}/mongodb-org/${version}
|
||
Components: ${repo_component}
|
||
Architectures: amd64 arm64
|
||
Signed-By: /etc/apt/keyrings/mongodb-server-${version}.gpg
|
||
EOF
|
||
return 0
|
||
;;
|
||
|
||
nodejs)
|
||
if [[ -z "$repo_url" || -z "$gpg_key_url" ]]; then
|
||
msg_error "Node.js repository requires repo_url and gpg_key_url"
|
||
return 1
|
||
fi
|
||
|
||
cleanup_old_repo_files "nodesource"
|
||
|
||
# NodeSource uses deb822 format with GPG from repo
|
||
local distro_codename
|
||
distro_codename=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
||
|
||
# Create keyring directory first
|
||
mkdir -p /etc/apt/keyrings
|
||
|
||
# Download GPG key from NodeSource
|
||
curl -fsSL "$gpg_key_url" | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg || {
|
||
msg_error "Failed to import NodeSource GPG key"
|
||
return 1
|
||
}
|
||
|
||
cat <<EOF >/etc/apt/sources.list.d/nodesource.sources
|
||
Types: deb
|
||
URIs: $repo_url
|
||
Suites: nodistro
|
||
Components: main
|
||
Architectures: amd64 arm64
|
||
Signed-By: /etc/apt/keyrings/nodesource.gpg
|
||
EOF
|
||
return 0
|
||
;;
|
||
|
||
php)
|
||
if [[ -z "$gpg_key_url" ]]; then
|
||
msg_error "PHP repository requires gpg_key_url"
|
||
return 1
|
||
fi
|
||
|
||
cleanup_old_repo_files "php"
|
||
|
||
# Download and install keyring
|
||
curl -fsSLo /tmp/debsuryorg-archive-keyring.deb "$gpg_key_url" || {
|
||
msg_error "Failed to download PHP keyring"
|
||
return 1
|
||
}
|
||
dpkg -i /tmp/debsuryorg-archive-keyring.deb >/dev/null 2>&1 || {
|
||
msg_error "Failed to install PHP keyring"
|
||
rm -f /tmp/debsuryorg-archive-keyring.deb
|
||
return 1
|
||
}
|
||
rm -f /tmp/debsuryorg-archive-keyring.deb
|
||
|
||
# Setup repository
|
||
local distro_codename
|
||
distro_codename=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
||
cat <<EOF >/etc/apt/sources.list.d/php.sources
|
||
Types: deb
|
||
URIs: https://packages.sury.org/php
|
||
Suites: $distro_codename
|
||
Components: main
|
||
Architectures: amd64 arm64
|
||
Signed-By: /usr/share/keyrings/deb.sury.org-php.gpg
|
||
EOF
|
||
return 0
|
||
;;
|
||
|
||
postgresql)
|
||
if [[ -z "$gpg_key_url" ]]; then
|
||
msg_error "PostgreSQL repository requires gpg_key_url"
|
||
return 1
|
||
fi
|
||
|
||
cleanup_old_repo_files "postgresql"
|
||
|
||
# Create keyring directory first
|
||
mkdir -p /etc/apt/keyrings
|
||
|
||
# Import PostgreSQL key
|
||
curl -fsSL "$gpg_key_url" | gpg --dearmor -o /etc/apt/keyrings/postgresql.gpg || {
|
||
msg_error "Failed to import PostgreSQL GPG key"
|
||
return 1
|
||
}
|
||
|
||
# Setup repository
|
||
local distro_codename
|
||
distro_codename=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
||
cat <<EOF >/etc/apt/sources.list.d/postgresql.sources
|
||
Types: deb
|
||
URIs: http://apt.postgresql.org/pub/repos/apt
|
||
Suites: $distro_codename-pgdg
|
||
Components: main
|
||
Architectures: amd64 arm64
|
||
Signed-By: /etc/apt/keyrings/postgresql.gpg
|
||
EOF
|
||
return 0
|
||
;;
|
||
|
||
*)
|
||
msg_error "Unknown tool repository: $tool_name"
|
||
return 1
|
||
;;
|
||
esac
|
||
|
||
return 0
|
||
}
|
||
|
||
# ------–----------------------------------------------------------------------
|
||
# Unified package upgrade function (with apt update caching)
|
||
# ------------------------------------------------------------------------------
|
||
upgrade_package() {
|
||
local package="$1"
|
||
|
||
# Use same caching logic as ensure_dependencies
|
||
local apt_cache_file="/var/cache/apt-update-timestamp"
|
||
local current_time=$(date +%s)
|
||
local last_update=0
|
||
|
||
if [[ -f "$apt_cache_file" ]]; then
|
||
last_update=$(cat "$apt_cache_file" 2>/dev/null || echo 0)
|
||
fi
|
||
|
||
if ((current_time - last_update > 300)); then
|
||
$STD apt update || {
|
||
msg_warn "APT update failed in upgrade_package - continuing with cached packages"
|
||
}
|
||
echo "$current_time" >"$apt_cache_file"
|
||
fi
|
||
|
||
$STD apt install --only-upgrade -y "$package" || {
|
||
msg_warn "Failed to upgrade $package"
|
||
return 1
|
||
}
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Repository availability check
|
||
# ------------------------------------------------------------------------------
|
||
verify_repo_available() {
|
||
local repo_url="$1"
|
||
local suite="$2"
|
||
|
||
if curl -fsSL --max-time 10 "${repo_url}/dists/${suite}/Release" &>/dev/null; then
|
||
return 0
|
||
fi
|
||
return 1
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Ensure dependencies are installed (with apt update caching)
|
||
# ------------------------------------------------------------------------------
|
||
ensure_dependencies() {
|
||
local deps=("$@")
|
||
local missing=()
|
||
|
||
for dep in "${deps[@]}"; do
|
||
if ! command -v "$dep" &>/dev/null && ! is_package_installed "$dep"; then
|
||
missing+=("$dep")
|
||
fi
|
||
done
|
||
|
||
if [[ ${#missing[@]} -gt 0 ]]; then
|
||
# Only run apt update if not done recently (within last 5 minutes)
|
||
local apt_cache_file="/var/cache/apt-update-timestamp"
|
||
local current_time=$(date +%s)
|
||
local last_update=0
|
||
|
||
if [[ -f "$apt_cache_file" ]]; then
|
||
last_update=$(cat "$apt_cache_file" 2>/dev/null || echo 0)
|
||
fi
|
||
|
||
if ((current_time - last_update > 300)); then
|
||
# Ensure orphaned sources are cleaned before updating
|
||
cleanup_orphaned_sources 2>/dev/null || true
|
||
|
||
if ! $STD apt update; then
|
||
ensure_apt_working || return 1
|
||
fi
|
||
echo "$current_time" >"$apt_cache_file"
|
||
fi
|
||
|
||
$STD apt install -y "${missing[@]}" || {
|
||
msg_error "Failed to install dependencies: ${missing[*]}"
|
||
return 1
|
||
}
|
||
fi
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Smart version comparison
|
||
# ------------------------------------------------------------------------------
|
||
version_gt() {
|
||
test "$(printf '%s\n' "$@" | sort -V | head -n 1)" != "$1"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Get system architecture (normalized)
|
||
# ------------------------------------------------------------------------------
|
||
get_system_arch() {
|
||
local arch_type="${1:-dpkg}" # dpkg, uname, or both
|
||
local arch
|
||
|
||
case "$arch_type" in
|
||
dpkg)
|
||
arch=$(dpkg --print-architecture 2>/dev/null)
|
||
;;
|
||
uname)
|
||
arch=$(uname -m)
|
||
[[ "$arch" == "x86_64" ]] && arch="amd64"
|
||
[[ "$arch" == "aarch64" ]] && arch="arm64"
|
||
;;
|
||
both | *)
|
||
arch=$(dpkg --print-architecture 2>/dev/null || uname -m)
|
||
[[ "$arch" == "x86_64" ]] && arch="amd64"
|
||
[[ "$arch" == "aarch64" ]] && arch="arm64"
|
||
;;
|
||
esac
|
||
|
||
echo "$arch"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Create temporary directory with automatic cleanup
|
||
# ------------------------------------------------------------------------------
|
||
create_temp_dir() {
|
||
local tmp_dir=$(mktemp -d)
|
||
# Set trap to cleanup on EXIT, ERR, INT, TERM
|
||
trap "rm -rf '$tmp_dir'" EXIT ERR INT TERM
|
||
echo "$tmp_dir"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Check if package is installed (faster than dpkg -l | grep)
|
||
# ------------------------------------------------------------------------------
|
||
is_package_installed() {
|
||
local package="$1"
|
||
dpkg-query -W -f='${Status}' "$package" 2>/dev/null | grep -q "^install ok installed$"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# GitHub API call with authentication and rate limit handling
|
||
# ------------------------------------------------------------------------------
|
||
github_api_call() {
|
||
local url="$1"
|
||
local output_file="${2:-/dev/stdout}"
|
||
local max_retries=3
|
||
local retry_delay=2
|
||
|
||
local header_args=()
|
||
[[ -n "${GITHUB_TOKEN:-}" ]] && header_args=(-H "Authorization: Bearer $GITHUB_TOKEN")
|
||
|
||
for attempt in $(seq 1 $max_retries); do
|
||
local http_code
|
||
http_code=$(curl -fsSL -w "%{http_code}" -o "$output_file" \
|
||
-H "Accept: application/vnd.github+json" \
|
||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||
"${header_args[@]}" \
|
||
"$url" 2>/dev/null || echo "000")
|
||
|
||
case "$http_code" in
|
||
200)
|
||
return 0
|
||
;;
|
||
403)
|
||
# Rate limit - check if we can retry
|
||
if [[ $attempt -lt $max_retries ]]; then
|
||
msg_warn "GitHub API rate limit, waiting ${retry_delay}s... (attempt $attempt/$max_retries)"
|
||
sleep "$retry_delay"
|
||
retry_delay=$((retry_delay * 2))
|
||
continue
|
||
fi
|
||
msg_error "GitHub API rate limit exceeded. Set GITHUB_TOKEN to increase limits."
|
||
return 1
|
||
;;
|
||
404)
|
||
msg_error "GitHub API endpoint not found: $url"
|
||
return 1
|
||
;;
|
||
*)
|
||
if [[ $attempt -lt $max_retries ]]; then
|
||
sleep "$retry_delay"
|
||
continue
|
||
fi
|
||
msg_error "GitHub API call failed with HTTP $http_code"
|
||
return 1
|
||
;;
|
||
esac
|
||
done
|
||
|
||
return 1
|
||
}
|
||
|
||
should_upgrade() {
|
||
local current="$1"
|
||
local target="$2"
|
||
|
||
[[ -z "$current" ]] && return 0
|
||
version_gt "$target" "$current" && return 0
|
||
return 1
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Get OS information (cached for performance)
|
||
# ------------------------------------------------------------------------------
|
||
get_os_info() {
|
||
local field="${1:-all}" # id, codename, version, version_id, all
|
||
|
||
# Cache OS info to avoid repeated file reads
|
||
if [[ -z "${_OS_ID:-}" ]]; then
|
||
export _OS_ID=$(awk -F= '/^ID=/{gsub(/"/,"",$2); print $2}' /etc/os-release)
|
||
export _OS_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{gsub(/"/,"",$2); print $2}' /etc/os-release)
|
||
export _OS_VERSION=$(awk -F= '/^VERSION_ID=/{gsub(/"/,"",$2); print $2}' /etc/os-release)
|
||
export _OS_VERSION_FULL=$(awk -F= '/^VERSION=/{gsub(/"/,"",$2); print $2}' /etc/os-release)
|
||
fi
|
||
|
||
case "$field" in
|
||
id) echo "$_OS_ID" ;;
|
||
codename) echo "$_OS_CODENAME" ;;
|
||
version) echo "$_OS_VERSION" ;;
|
||
version_id) echo "$_OS_VERSION" ;;
|
||
version_full) echo "$_OS_VERSION_FULL" ;;
|
||
all) echo "ID=$_OS_ID CODENAME=$_OS_CODENAME VERSION=$_OS_VERSION" ;;
|
||
*) echo "$_OS_ID" ;;
|
||
esac
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Check if running on specific OS
|
||
# ------------------------------------------------------------------------------
|
||
is_debian() {
|
||
[[ "$(get_os_info id)" == "debian" ]]
|
||
}
|
||
|
||
is_ubuntu() {
|
||
[[ "$(get_os_info id)" == "ubuntu" ]]
|
||
}
|
||
|
||
is_alpine() {
|
||
[[ "$(get_os_info id)" == "alpine" ]]
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Get Debian/Ubuntu major version
|
||
# ------------------------------------------------------------------------------
|
||
get_os_version_major() {
|
||
local version=$(get_os_info version)
|
||
echo "${version%%.*}"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Download file with retry logic and progress
|
||
# ------------------------------------------------------------------------------
|
||
download_file() {
|
||
local url="$1"
|
||
local output="$2"
|
||
local max_retries="${3:-3}"
|
||
local show_progress="${4:-false}"
|
||
|
||
local curl_opts=(-fsSL)
|
||
[[ "$show_progress" == "true" ]] && curl_opts=(-fL#)
|
||
|
||
for attempt in $(seq 1 $max_retries); do
|
||
if curl "${curl_opts[@]}" -o "$output" "$url"; then
|
||
return 0
|
||
fi
|
||
|
||
if [[ $attempt -lt $max_retries ]]; then
|
||
msg_warn "Download failed, retrying... (attempt $attempt/$max_retries)"
|
||
sleep 2
|
||
fi
|
||
done
|
||
|
||
msg_error "Failed to download: $url"
|
||
return 1
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Get fallback suite for repository (comprehensive mapping)
|
||
# ------------------------------------------------------------------------------
|
||
get_fallback_suite() {
|
||
local distro_id="$1"
|
||
local distro_codename="$2"
|
||
local repo_base_url="$3"
|
||
|
||
# Check if current codename works
|
||
if verify_repo_available "$repo_base_url" "$distro_codename"; then
|
||
echo "$distro_codename"
|
||
return 0
|
||
fi
|
||
|
||
# Comprehensive fallback mappings
|
||
case "$distro_id" in
|
||
debian)
|
||
case "$distro_codename" in
|
||
# Debian 13 (Trixie) → Debian 12 (Bookworm)
|
||
trixie | forky | sid)
|
||
echo "bookworm"
|
||
;;
|
||
# Debian 12 (Bookworm) stays
|
||
bookworm)
|
||
echo "bookworm"
|
||
;;
|
||
# Debian 11 (Bullseye) stays
|
||
bullseye)
|
||
echo "bullseye"
|
||
;;
|
||
# Unknown → latest stable
|
||
*)
|
||
echo "bookworm"
|
||
;;
|
||
esac
|
||
;;
|
||
ubuntu)
|
||
case "$distro_codename" in
|
||
# Ubuntu 24.10 (Oracular) → 24.04 LTS (Noble)
|
||
oracular | plucky)
|
||
echo "noble"
|
||
;;
|
||
# Ubuntu 24.04 LTS (Noble) stays
|
||
noble)
|
||
echo "noble"
|
||
;;
|
||
# Ubuntu 23.10 (Mantic) → 22.04 LTS (Jammy)
|
||
mantic | lunar)
|
||
echo "jammy"
|
||
;;
|
||
# Ubuntu 22.04 LTS (Jammy) stays
|
||
jammy)
|
||
echo "jammy"
|
||
;;
|
||
# Ubuntu 20.04 LTS (Focal) stays
|
||
focal)
|
||
echo "focal"
|
||
;;
|
||
# Unknown → latest LTS
|
||
*)
|
||
echo "jammy"
|
||
;;
|
||
esac
|
||
;;
|
||
*)
|
||
echo "$distro_codename"
|
||
;;
|
||
esac
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Verify package source and version
|
||
# ------------------------------------------------------------------------------
|
||
verify_package_source() {
|
||
local package="$1"
|
||
local expected_version="$2"
|
||
|
||
if apt-cache policy "$package" 2>/dev/null | grep -q "$expected_version"; then
|
||
return 0
|
||
fi
|
||
return 1
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Check if running on LTS version
|
||
# ------------------------------------------------------------------------------
|
||
is_lts_version() {
|
||
local os_id=$(get_os_info id)
|
||
local codename=$(get_os_info codename)
|
||
|
||
if [[ "$os_id" == "ubuntu" ]]; then
|
||
case "$codename" in
|
||
focal | jammy | noble) return 0 ;; # 20.04, 22.04, 24.04
|
||
*) return 1 ;;
|
||
esac
|
||
elif [[ "$os_id" == "debian" ]]; then
|
||
# Debian releases are all "stable"
|
||
case "$codename" in
|
||
bullseye | bookworm | trixie) return 0 ;;
|
||
*) return 1 ;;
|
||
esac
|
||
fi
|
||
|
||
return 1
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Get optimal number of parallel jobs (cached)
|
||
# ------------------------------------------------------------------------------
|
||
get_parallel_jobs() {
|
||
if [[ -z "${_PARALLEL_JOBS:-}" ]]; then
|
||
local cpu_count=$(nproc 2>/dev/null || echo 1)
|
||
local mem_gb=$(free -g | awk '/^Mem:/{print $2}')
|
||
|
||
# Limit by available memory (assume 1GB per job for compilation)
|
||
local max_by_mem=$((mem_gb > 0 ? mem_gb : 1))
|
||
local max_jobs=$((cpu_count < max_by_mem ? cpu_count : max_by_mem))
|
||
|
||
# At least 1, at most cpu_count
|
||
export _PARALLEL_JOBS=$((max_jobs > 0 ? max_jobs : 1))
|
||
fi
|
||
echo "$_PARALLEL_JOBS"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Get default PHP version for OS
|
||
# ------------------------------------------------------------------------------
|
||
get_default_php_version() {
|
||
local os_id=$(get_os_info id)
|
||
local os_version=$(get_os_version_major)
|
||
|
||
case "$os_id" in
|
||
debian)
|
||
case "$os_version" in
|
||
13) echo "8.3" ;; # Debian 13 (Trixie)
|
||
12) echo "8.2" ;; # Debian 12 (Bookworm)
|
||
11) echo "7.4" ;; # Debian 11 (Bullseye)
|
||
*) echo "8.2" ;;
|
||
esac
|
||
;;
|
||
ubuntu)
|
||
case "$os_version" in
|
||
24) echo "8.3" ;; # Ubuntu 24.04 LTS (Noble)
|
||
22) echo "8.1" ;; # Ubuntu 22.04 LTS (Jammy)
|
||
20) echo "7.4" ;; # Ubuntu 20.04 LTS (Focal)
|
||
*) echo "8.1" ;;
|
||
esac
|
||
;;
|
||
*)
|
||
echo "8.2"
|
||
;;
|
||
esac
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Get default Python version for OS
|
||
# ------------------------------------------------------------------------------
|
||
get_default_python_version() {
|
||
local os_id=$(get_os_info id)
|
||
local os_version=$(get_os_version_major)
|
||
|
||
case "$os_id" in
|
||
debian)
|
||
case "$os_version" in
|
||
13) echo "3.12" ;; # Debian 13 (Trixie)
|
||
12) echo "3.11" ;; # Debian 12 (Bookworm)
|
||
11) echo "3.9" ;; # Debian 11 (Bullseye)
|
||
*) echo "3.11" ;;
|
||
esac
|
||
;;
|
||
ubuntu)
|
||
case "$os_version" in
|
||
24) echo "3.12" ;; # Ubuntu 24.04 LTS
|
||
22) echo "3.10" ;; # Ubuntu 22.04 LTS
|
||
20) echo "3.8" ;; # Ubuntu 20.04 LTS
|
||
*) echo "3.10" ;;
|
||
esac
|
||
;;
|
||
*)
|
||
echo "3.11"
|
||
;;
|
||
esac
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Get default Node.js LTS version
|
||
# ------------------------------------------------------------------------------
|
||
get_default_nodejs_version() {
|
||
# Always return current LTS (as of 2025)
|
||
echo "22"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Check if package manager is locked
|
||
# ------------------------------------------------------------------------------
|
||
is_apt_locked() {
|
||
if fuser /var/lib/dpkg/lock-frontend &>/dev/null ||
|
||
fuser /var/lib/apt/lists/lock &>/dev/null ||
|
||
fuser /var/cache/apt/archives/lock &>/dev/null; then
|
||
return 0
|
||
fi
|
||
return 1
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Wait for apt to be available
|
||
# ------------------------------------------------------------------------------
|
||
wait_for_apt() {
|
||
local max_wait="${1:-300}" # 5 minutes default
|
||
local waited=0
|
||
|
||
while is_apt_locked; do
|
||
if [[ $waited -ge $max_wait ]]; then
|
||
msg_error "Timeout waiting for apt to be available"
|
||
return 1
|
||
fi
|
||
|
||
sleep 5
|
||
waited=$((waited + 5))
|
||
done
|
||
|
||
return 0
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Cleanup old repository files (migration helper)
|
||
# ------------------------------------------------------------------------------
|
||
cleanup_old_repo_files() {
|
||
local app="$1"
|
||
|
||
# Remove old-style .list files (including backups)
|
||
rm -f /etc/apt/sources.list.d/"${app}"*.list
|
||
rm -f /etc/apt/sources.list.d/"${app}"*.list.save
|
||
rm -f /etc/apt/sources.list.d/"${app}"*.list.distUpgrade
|
||
rm -f /etc/apt/sources.list.d/"${app}"*.list.dpkg-*
|
||
|
||
# Remove old GPG keys from trusted.gpg.d
|
||
rm -f /etc/apt/trusted.gpg.d/"${app}"*.gpg
|
||
|
||
# Remove keyrings from /etc/apt/keyrings
|
||
rm -f /etc/apt/keyrings/"${app}"*.gpg
|
||
|
||
# Remove ALL .sources files for this app (including the main one)
|
||
# This ensures no orphaned .sources files reference deleted keyrings
|
||
rm -f /etc/apt/sources.list.d/"${app}"*.sources
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Cleanup orphaned .sources files that reference missing keyrings
|
||
# This prevents APT signature verification errors
|
||
# Call this at the start of any setup function to ensure APT is in a clean state
|
||
# ------------------------------------------------------------------------------
|
||
cleanup_orphaned_sources() {
|
||
local sources_dir="/etc/apt/sources.list.d"
|
||
local keyrings_dir="/etc/apt/keyrings"
|
||
|
||
[[ ! -d "$sources_dir" ]] && return 0
|
||
|
||
while IFS= read -r -d '' sources_file; do
|
||
local basename_file
|
||
basename_file=$(basename "$sources_file")
|
||
|
||
# NEVER remove debian.sources - this is the standard Debian repository
|
||
if [[ "$basename_file" == "debian.sources" ]]; then
|
||
continue
|
||
fi
|
||
|
||
# Extract Signed-By path from .sources file
|
||
local keyring_path
|
||
keyring_path=$(grep -E '^Signed-By:' "$sources_file" 2>/dev/null | awk '{print $2}')
|
||
|
||
# If keyring doesn't exist, remove the .sources file
|
||
if [[ -n "$keyring_path" ]] && [[ ! -f "$keyring_path" ]]; then
|
||
rm -f "$sources_file"
|
||
fi
|
||
done < <(find "$sources_dir" -name "*.sources" -print0 2>/dev/null)
|
||
|
||
# Also check for broken symlinks in keyrings directory
|
||
if [[ -d "$keyrings_dir" ]]; then
|
||
find "$keyrings_dir" -type l ! -exec test -e {} \; -delete 2>/dev/null || true
|
||
fi
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Ensure APT is in a working state before installing packages
|
||
# This should be called at the start of any setup function
|
||
# ------------------------------------------------------------------------------
|
||
ensure_apt_working() {
|
||
# Clean up orphaned sources first
|
||
cleanup_orphaned_sources
|
||
|
||
# Try to update package lists
|
||
if ! $STD apt update; then
|
||
# More aggressive cleanup
|
||
rm -f /etc/apt/sources.list.d/*.sources 2>/dev/null || true
|
||
cleanup_orphaned_sources
|
||
|
||
# Try again
|
||
if ! $STD apt update; then
|
||
msg_error "Cannot update package lists - APT is critically broken"
|
||
return 1
|
||
fi
|
||
fi
|
||
|
||
return 0
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Standardized deb822 repository setup
|
||
# Validates all parameters and fails safely if any are empty
|
||
# ------------------------------------------------------------------------------
|
||
setup_deb822_repo() {
|
||
local name="$1"
|
||
local gpg_url="$2"
|
||
local repo_url="$3"
|
||
local suite="$4"
|
||
local component="${5:-main}"
|
||
local architectures="${6:-amd64 arm64}"
|
||
|
||
# Validate required parameters
|
||
if [[ -z "$name" || -z "$gpg_url" || -z "$repo_url" || -z "$suite" ]]; then
|
||
msg_error "setup_deb822_repo: missing required parameters (name=$name, gpg=$gpg_url, repo=$repo_url, suite=$suite)"
|
||
return 1
|
||
fi
|
||
|
||
# Cleanup old configs for this app
|
||
cleanup_old_repo_files "$name"
|
||
|
||
# Cleanup any orphaned .sources files from other apps
|
||
cleanup_orphaned_sources
|
||
|
||
# Ensure keyring directory exists
|
||
mkdir -p /etc/apt/keyrings || {
|
||
msg_error "Failed to create /etc/apt/keyrings directory"
|
||
return 1
|
||
}
|
||
|
||
# Download GPG key (with --yes to avoid interactive prompts)
|
||
curl -fsSL "$gpg_url" | gpg --dearmor --yes -o "/etc/apt/keyrings/${name}.gpg" 2>/dev/null || {
|
||
msg_error "Failed to download or import GPG key for ${name} from $gpg_url"
|
||
return 1
|
||
}
|
||
|
||
# Create deb822 sources file
|
||
cat <<EOF >/etc/apt/sources.list.d/${name}.sources
|
||
Types: deb
|
||
URIs: $repo_url
|
||
Suites: $suite
|
||
Components: $component
|
||
Architectures: $architectures
|
||
Signed-By: /etc/apt/keyrings/${name}.gpg
|
||
EOF
|
||
|
||
# Use cached apt update
|
||
local apt_cache_file="/var/cache/apt-update-timestamp"
|
||
local current_time=$(date +%s)
|
||
local last_update=0
|
||
|
||
if [[ -f "$apt_cache_file" ]]; then
|
||
last_update=$(cat "$apt_cache_file" 2>/dev/null || echo 0)
|
||
fi
|
||
|
||
# For repo changes, always update but respect short-term cache (30s)
|
||
if ((current_time - last_update > 30)); then
|
||
$STD apt update
|
||
echo "$current_time" >"$apt_cache_file"
|
||
fi
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Package version hold/unhold helpers
|
||
# ------------------------------------------------------------------------------
|
||
hold_package_version() {
|
||
local package="$1"
|
||
$STD apt-mark hold "$package"
|
||
}
|
||
|
||
unhold_package_version() {
|
||
local package="$1"
|
||
$STD apt-mark unhold "$package"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Safe service restart with verification
|
||
# ------------------------------------------------------------------------------
|
||
safe_service_restart() {
|
||
local service="$1"
|
||
|
||
if systemctl is-active --quiet "$service"; then
|
||
$STD systemctl restart "$service"
|
||
else
|
||
$STD systemctl start "$service"
|
||
fi
|
||
|
||
if ! systemctl is-active --quiet "$service"; then
|
||
msg_error "Failed to start $service"
|
||
systemctl status "$service" --no-pager
|
||
return 1
|
||
fi
|
||
return 0
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Enable and start service (with error handling)
|
||
# ------------------------------------------------------------------------------
|
||
enable_and_start_service() {
|
||
local service="$1"
|
||
|
||
if ! systemctl enable "$service" &>/dev/null; then
|
||
return 1
|
||
fi
|
||
|
||
if ! systemctl start "$service" &>/dev/null; then
|
||
msg_error "Failed to start $service"
|
||
systemctl status "$service" --no-pager
|
||
return 1
|
||
fi
|
||
|
||
return 0
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Check if service is enabled
|
||
# ------------------------------------------------------------------------------
|
||
is_service_enabled() {
|
||
local service="$1"
|
||
systemctl is-enabled --quiet "$service" 2>/dev/null
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Check if service is running
|
||
# ------------------------------------------------------------------------------
|
||
is_service_running() {
|
||
local service="$1"
|
||
systemctl is-active --quiet "$service" 2>/dev/null
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Extract version from JSON (GitHub releases)
|
||
# ------------------------------------------------------------------------------
|
||
extract_version_from_json() {
|
||
local json="$1"
|
||
local field="${2:-tag_name}"
|
||
local strip_v="${3:-true}"
|
||
|
||
ensure_dependencies jq
|
||
|
||
local version
|
||
version=$(echo "$json" | jq -r ".${field} // empty")
|
||
|
||
if [[ -z "$version" ]]; then
|
||
return 1
|
||
fi
|
||
|
||
if [[ "$strip_v" == "true" ]]; then
|
||
echo "${version#v}"
|
||
else
|
||
echo "$version"
|
||
fi
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Get latest GitHub release version
|
||
# ------------------------------------------------------------------------------
|
||
get_latest_github_release() {
|
||
local repo="$1"
|
||
local strip_v="${2:-true}"
|
||
local temp_file=$(mktemp)
|
||
|
||
if ! github_api_call "https://api.github.com/repos/${repo}/releases/latest" "$temp_file"; then
|
||
rm -f "$temp_file"
|
||
return 1
|
||
fi
|
||
|
||
local version
|
||
version=$(extract_version_from_json "$(cat "$temp_file")" "tag_name" "$strip_v")
|
||
rm -f "$temp_file"
|
||
|
||
if [[ -z "$version" ]]; then
|
||
return 1
|
||
fi
|
||
|
||
echo "$version"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Debug logging (only if DEBUG=1)
|
||
# ------------------------------------------------------------------------------
|
||
debug_log() {
|
||
[[ "${DEBUG:-0}" == "1" ]] && echo "[DEBUG] $*" >&2
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Performance timing helper
|
||
# ------------------------------------------------------------------------------
|
||
start_timer() {
|
||
echo $(date +%s)
|
||
}
|
||
|
||
end_timer() {
|
||
local start_time="$1"
|
||
local label="${2:-Operation}"
|
||
local end_time=$(date +%s)
|
||
local duration=$((end_time - start_time))
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# GPG key fingerprint verification
|
||
# ------------------------------------------------------------------------------
|
||
verify_gpg_fingerprint() {
|
||
local key_file="$1"
|
||
local expected_fingerprint="$2"
|
||
|
||
local actual_fingerprint
|
||
actual_fingerprint=$(gpg --show-keys --with-fingerprint --with-colons "$key_file" 2>&1 | grep -m1 '^fpr:' | cut -d: -f10)
|
||
|
||
if [[ "$actual_fingerprint" == "$expected_fingerprint" ]]; then
|
||
return 0
|
||
fi
|
||
|
||
msg_error "GPG fingerprint mismatch! Expected: $expected_fingerprint, Got: $actual_fingerprint"
|
||
return 1
|
||
}
|
||
|
||
# ==============================================================================
|
||
# EXISTING FUNCTIONS
|
||
# ==============================================================================
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Checks for new GitHub release (latest tag).
|
||
#
|
||
# Description:
|
||
# - Queries the GitHub API for the latest release tag
|
||
# - Compares it to a local cached version (~/.<app>)
|
||
# - If newer, sets global CHECK_UPDATE_RELEASE and returns 0
|
||
#
|
||
# Usage:
|
||
# if check_for_gh_release "flaresolverr" "FlareSolverr/FlareSolverr" [optional] "v1.1.1"; then
|
||
# # trigger update...
|
||
# fi
|
||
# exit 0
|
||
# } (end of update_script not from the function)
|
||
#
|
||
# Notes:
|
||
# - Requires `jq` (auto-installed if missing)
|
||
# - Does not modify anything, only checks version state
|
||
# - Does not support pre-releases
|
||
# ------------------------------------------------------------------------------
|
||
check_for_gh_release() {
|
||
local app="$1"
|
||
local source="$2"
|
||
local pinned_version_in="${3:-}" # optional
|
||
local app_lc="${app,,}"
|
||
local current_file="$HOME/.${app_lc}"
|
||
|
||
msg_info "Checking for update: ${app}"
|
||
|
||
# DNS check
|
||
if ! getent hosts api.github.com >/dev/null 2>&1; then
|
||
msg_error "Network error: cannot resolve api.github.com"
|
||
return 1
|
||
fi
|
||
|
||
ensure_dependencies jq
|
||
|
||
# Fetch releases and exclude drafts/prereleases
|
||
local releases_json
|
||
releases_json=$(curl -fsSL --max-time 20 \
|
||
-H 'Accept: application/vnd.github+json' \
|
||
-H 'X-GitHub-Api-Version: 2022-11-28' \
|
||
"https://api.github.com/repos/${source}/releases") || {
|
||
msg_error "Unable to fetch releases for ${app}"
|
||
return 1
|
||
}
|
||
|
||
mapfile -t raw_tags < <(jq -r '.[] | select(.draft==false and .prerelease==false) | .tag_name' <<<"$releases_json")
|
||
if ((${#raw_tags[@]} == 0)); then
|
||
msg_error "No stable releases found for ${app}"
|
||
return 1
|
||
fi
|
||
|
||
local clean_tags=()
|
||
for t in "${raw_tags[@]}"; do
|
||
clean_tags+=("${t#v}")
|
||
done
|
||
|
||
local latest_raw="${raw_tags[0]}"
|
||
local latest_clean="${clean_tags[0]}"
|
||
|
||
# current installed (stored without v)
|
||
local current=""
|
||
if [[ -f "$current_file" ]]; then
|
||
current="$(<"$current_file")"
|
||
else
|
||
# Migration: search for any /opt/*_version.txt
|
||
local legacy_files
|
||
mapfile -t legacy_files < <(find /opt -maxdepth 1 -type f -name "*_version.txt" 2>/dev/null)
|
||
if ((${#legacy_files[@]} == 1)); then
|
||
current="$(<"${legacy_files[0]}")"
|
||
echo "${current#v}" >"$current_file"
|
||
rm -f "${legacy_files[0]}"
|
||
fi
|
||
fi
|
||
current="${current#v}"
|
||
|
||
# Pinned version handling
|
||
if [[ -n "$pinned_version_in" ]]; then
|
||
local pin_clean="${pinned_version_in#v}"
|
||
local match_raw=""
|
||
for i in "${!clean_tags[@]}"; do
|
||
if [[ "${clean_tags[$i]}" == "$pin_clean" ]]; then
|
||
match_raw="${raw_tags[$i]}"
|
||
break
|
||
fi
|
||
done
|
||
|
||
if [[ -z "$match_raw" ]]; then
|
||
msg_error "Pinned version ${pinned_version_in} not found upstream"
|
||
return 1
|
||
fi
|
||
|
||
if [[ "$current" != "$pin_clean" ]]; then
|
||
CHECK_UPDATE_RELEASE="$match_raw"
|
||
msg_ok "Update available: ${app} ${current:-not installed} → ${pin_clean}"
|
||
return 0
|
||
fi
|
||
|
||
msg_error "No update available: ${app} is not installed!"
|
||
return 1
|
||
fi
|
||
|
||
# No pinning → use latest
|
||
if [[ -z "$current" || "$current" != "$latest_clean" ]]; then
|
||
CHECK_UPDATE_RELEASE="$latest_raw"
|
||
msg_ok "Update available: ${app} ${current:-not installed} → ${latest_clean}"
|
||
return 0
|
||
fi
|
||
|
||
msg_ok "No update available: ${app} (${latest_clean})"
|
||
return 1
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Creates and installs self-signed certificates.
|
||
#
|
||
# Description:
|
||
# - Create a self-signed certificate with option to override application name
|
||
#
|
||
# Variables:
|
||
# APP - Application name (default: $APPLICATION variable)
|
||
# ------------------------------------------------------------------------------
|
||
create_self_signed_cert() {
|
||
local APP_NAME="${1:-${APPLICATION}}"
|
||
local CERT_DIR="/etc/ssl/${APP_NAME}"
|
||
local CERT_KEY="${CERT_DIR}/${APP_NAME}.key"
|
||
local CERT_CRT="${CERT_DIR}/${APP_NAME}.crt"
|
||
|
||
if [[ -f "$CERT_CRT" && -f "$CERT_KEY" ]]; then
|
||
return 0
|
||
fi
|
||
|
||
# Use ensure_dependencies for cleaner handling
|
||
ensure_dependencies openssl || {
|
||
msg_error "Failed to install OpenSSL"
|
||
return 1
|
||
}
|
||
|
||
mkdir -p "$CERT_DIR"
|
||
$STD openssl req -new -newkey rsa:2048 -days 365 -nodes -x509 \
|
||
-subj "/C=US/ST=State/L=City/O=Organization/CN=${APP_NAME}" \
|
||
-keyout "$CERT_KEY" \
|
||
-out "$CERT_CRT" || {
|
||
msg_error "Failed to create self-signed certificate"
|
||
return 1
|
||
}
|
||
|
||
chmod 600 "$CERT_KEY"
|
||
chmod 644 "$CERT_CRT"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Downloads file with optional progress indicator using pv.
|
||
#
|
||
# Arguments:
|
||
# $1 - URL
|
||
# $2 - Destination path
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function download_with_progress() {
|
||
local url="$1"
|
||
local output="$2"
|
||
if [ -n "$SPINNER_PID" ] && ps -p "$SPINNER_PID" >/dev/null; then kill "$SPINNER_PID" >/dev/null; fi
|
||
|
||
ensure_dependencies pv
|
||
set -o pipefail
|
||
|
||
# Content-Length aus HTTP-Header holen
|
||
local content_length
|
||
content_length=$(curl -fsSLI "$url" | awk '/Content-Length/ {print $2}' | tr -d '\r' || true)
|
||
|
||
if [[ -z "$content_length" ]]; then
|
||
if ! curl -fL# -o "$output" "$url"; then
|
||
msg_error "Download failed"
|
||
return 1
|
||
fi
|
||
else
|
||
if ! curl -fsSL "$url" | pv -s "$content_length" >"$output"; then
|
||
msg_error "Download failed"
|
||
return 1
|
||
fi
|
||
fi
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Ensures /usr/local/bin is permanently in system PATH.
|
||
#
|
||
# Description:
|
||
# - Adds to /etc/profile.d if not present
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function ensure_usr_local_bin_persist() {
|
||
local PROFILE_FILE="/etc/profile.d/custom_path.sh"
|
||
|
||
if [[ ! -f "$PROFILE_FILE" ]] && ! command -v pveversion &>/dev/null; then
|
||
echo 'export PATH="/usr/local/bin:$PATH"' >"$PROFILE_FILE"
|
||
chmod +x "$PROFILE_FILE"
|
||
fi
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Downloads and deploys latest GitHub release (source, binary, tarball, asset).
|
||
#
|
||
# Description:
|
||
# - Fetches latest release metadata from GitHub API
|
||
# - Supports the following modes:
|
||
# - tarball: Source code tarball (default if omitted)
|
||
# - source: Alias for tarball (same behavior)
|
||
# - binary: .deb package install (arch-dependent)
|
||
# - prebuild: Prebuilt .tar.gz archive (e.g. Go binaries)
|
||
# - singlefile: Standalone binary (no archive, direct chmod +x install)
|
||
# - Handles download, extraction/installation and version tracking in ~/.<app>
|
||
#
|
||
# Parameters:
|
||
# $1 APP - Application name (used for install path and version file)
|
||
# $2 REPO - GitHub repository in form user/repo
|
||
# $3 MODE - Release type:
|
||
# tarball → source tarball (.tar.gz)
|
||
# binary → .deb file (auto-arch matched)
|
||
# prebuild → prebuilt archive (e.g. tar.gz)
|
||
# singlefile→ standalone binary (chmod +x)
|
||
# $4 VERSION - Optional release tag (default: latest)
|
||
# $5 TARGET_DIR - Optional install path (default: /opt/<app>)
|
||
# $6 ASSET_FILENAME - Required for:
|
||
# - prebuild → archive filename or pattern
|
||
# - singlefile→ binary filename or pattern
|
||
#
|
||
# Optional:
|
||
# - Set GITHUB_TOKEN env var to increase API rate limit (recommended for CI/CD).
|
||
#
|
||
# Examples:
|
||
# # 1. Minimal: Fetch and deploy source tarball
|
||
# fetch_and_deploy_gh_release "myapp" "myuser/myapp"
|
||
#
|
||
# # 2. Binary install via .deb asset (architecture auto-detected)
|
||
# fetch_and_deploy_gh_release "myapp" "myuser/myapp" "binary"
|
||
#
|
||
# # 3. Prebuilt archive (.tar.gz) with asset filename match
|
||
# fetch_and_deploy_gh_release "hanko" "teamhanko/hanko" "prebuild" "latest" "/opt/hanko" "hanko_Linux_x86_64.tar.gz"
|
||
#
|
||
# # 4. Single binary (chmod +x) like Argus, Promtail etc.
|
||
# fetch_and_deploy_gh_release "argus" "release-argus/Argus" "singlefile" "0.26.3" "/opt/argus" "Argus-.*linux-amd64"
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function fetch_and_deploy_gh_release() {
|
||
local app="$1"
|
||
local repo="$2"
|
||
local mode="${3:-tarball}" # tarball | binary | prebuild | singlefile
|
||
local version="${4:-latest}"
|
||
local target="${5:-/opt/$app}"
|
||
local asset_pattern="${6:-}"
|
||
|
||
local app_lc=$(echo "${app,,}" | tr -d ' ')
|
||
local version_file="$HOME/.${app_lc}"
|
||
|
||
local api_timeout="--connect-timeout 10 --max-time 60"
|
||
local download_timeout="--connect-timeout 15 --max-time 900"
|
||
|
||
local current_version=""
|
||
[[ -f "$version_file" ]] && current_version=$(<"$version_file")
|
||
|
||
ensure_dependencies jq
|
||
|
||
local api_url="https://api.github.com/repos/$repo/releases"
|
||
[[ "$version" != "latest" ]] && api_url="$api_url/tags/$version" || api_url="$api_url/latest"
|
||
local header=()
|
||
[[ -n "${GITHUB_TOKEN:-}" ]] && header=(-H "Authorization: token $GITHUB_TOKEN")
|
||
|
||
# dns pre check
|
||
local gh_host
|
||
gh_host=$(awk -F/ '{print $3}' <<<"$api_url")
|
||
if ! getent hosts "$gh_host" &>/dev/null; then
|
||
msg_error "DNS resolution failed for $gh_host – check /etc/resolv.conf or networking"
|
||
return 1
|
||
fi
|
||
|
||
local max_retries=3 retry_delay=2 attempt=1 success=false resp http_code
|
||
|
||
while ((attempt <= max_retries)); do
|
||
resp=$(curl $api_timeout -fsSL -w "%{http_code}" -o /tmp/gh_rel.json "${header[@]}" "$api_url") && success=true && break
|
||
sleep "$retry_delay"
|
||
((attempt++))
|
||
done
|
||
|
||
if ! $success; then
|
||
msg_error "Failed to fetch release metadata from $api_url after $max_retries attempts"
|
||
return 1
|
||
fi
|
||
|
||
http_code="${resp:(-3)}"
|
||
[[ "$http_code" != "200" ]] && {
|
||
msg_error "GitHub API returned HTTP $http_code"
|
||
return 1
|
||
}
|
||
|
||
local json tag_name
|
||
json=$(</tmp/gh_rel.json)
|
||
tag_name=$(echo "$json" | jq -r '.tag_name // .name // empty')
|
||
[[ "$tag_name" =~ ^v ]] && version="${tag_name:1}" || version="$tag_name"
|
||
|
||
if [[ "$current_version" == "$version" ]]; then
|
||
$STD msg_ok "$app is already up-to-date (v$version)"
|
||
return 0
|
||
fi
|
||
|
||
local tmpdir
|
||
tmpdir=$(mktemp -d) || return 1
|
||
local filename="" url=""
|
||
|
||
msg_info "Fetching GitHub release: $app ($version)"
|
||
|
||
local clean_install=false
|
||
[[ -n "${CLEAN_INSTALL:-}" && "$CLEAN_INSTALL" == "1" ]] && clean_install=true
|
||
|
||
### Tarball Mode ###
|
||
if [[ "$mode" == "tarball" || "$mode" == "source" ]]; then
|
||
url=$(echo "$json" | jq -r '.tarball_url // empty')
|
||
[[ -z "$url" ]] && url="https://github.com/$repo/archive/refs/tags/v$version.tar.gz"
|
||
filename="${app_lc}-${version}.tar.gz"
|
||
|
||
curl $download_timeout -fsSL -o "$tmpdir/$filename" "$url" || {
|
||
msg_error "Download failed: $url"
|
||
rm -rf "$tmpdir"
|
||
return 1
|
||
}
|
||
|
||
mkdir -p "$target"
|
||
if [[ "${CLEAN_INSTALL:-0}" == "1" ]]; then
|
||
rm -rf "${target:?}/"*
|
||
fi
|
||
|
||
tar --no-same-owner -xzf "$tmpdir/$filename" -C "$tmpdir" || {
|
||
msg_error "Failed to extract tarball"
|
||
rm -rf "$tmpdir"
|
||
return 1
|
||
}
|
||
local unpack_dir
|
||
unpack_dir=$(find "$tmpdir" -mindepth 1 -maxdepth 1 -type d | head -n1)
|
||
|
||
shopt -s dotglob nullglob
|
||
cp -r "$unpack_dir"/* "$target/"
|
||
shopt -u dotglob nullglob
|
||
|
||
### Binary Mode ###
|
||
elif [[ "$mode" == "binary" ]]; then
|
||
local arch
|
||
arch=$(dpkg --print-architecture 2>/dev/null || uname -m)
|
||
[[ "$arch" == "x86_64" ]] && arch="amd64"
|
||
[[ "$arch" == "aarch64" ]] && arch="arm64"
|
||
|
||
local assets url_match=""
|
||
assets=$(echo "$json" | jq -r '.assets[].browser_download_url')
|
||
|
||
# If explicit filename pattern is provided (param $6), match that first
|
||
if [[ -n "$asset_pattern" ]]; then
|
||
for u in $assets; do
|
||
case "${u##*/}" in
|
||
$asset_pattern)
|
||
url_match="$u"
|
||
break
|
||
;;
|
||
esac
|
||
done
|
||
fi
|
||
|
||
# If no match via explicit pattern, fall back to architecture heuristic
|
||
if [[ -z "$url_match" ]]; then
|
||
for u in $assets; do
|
||
if [[ "$u" =~ ($arch|amd64|x86_64|aarch64|arm64).*\.deb$ ]]; then
|
||
url_match="$u"
|
||
break
|
||
fi
|
||
done
|
||
fi
|
||
|
||
# Fallback: any .deb file
|
||
if [[ -z "$url_match" ]]; then
|
||
for u in $assets; do
|
||
[[ "$u" =~ \.deb$ ]] && url_match="$u" && break
|
||
done
|
||
fi
|
||
|
||
if [[ -z "$url_match" ]]; then
|
||
msg_error "No suitable .deb asset found for $app"
|
||
rm -rf "$tmpdir"
|
||
return 1
|
||
fi
|
||
|
||
filename="${url_match##*/}"
|
||
curl $download_timeout -fsSL -o "$tmpdir/$filename" "$url_match" || {
|
||
msg_error "Download failed: $url_match"
|
||
rm -rf "$tmpdir"
|
||
return 1
|
||
}
|
||
|
||
chmod 644 "$tmpdir/$filename"
|
||
$STD apt install -y "$tmpdir/$filename" || {
|
||
$STD dpkg -i "$tmpdir/$filename" || {
|
||
msg_error "Both apt and dpkg installation failed"
|
||
rm -rf "$tmpdir"
|
||
return 1
|
||
}
|
||
}
|
||
|
||
### Prebuild Mode ###
|
||
elif [[ "$mode" == "prebuild" ]]; then
|
||
local pattern="${6%\"}"
|
||
pattern="${pattern#\"}"
|
||
[[ -z "$pattern" ]] && {
|
||
msg_error "Mode 'prebuild' requires 6th parameter (asset filename pattern)"
|
||
rm -rf "$tmpdir"
|
||
return 1
|
||
}
|
||
|
||
local asset_url=""
|
||
for u in $(echo "$json" | jq -r '.assets[].browser_download_url'); do
|
||
filename_candidate="${u##*/}"
|
||
case "$filename_candidate" in
|
||
$pattern)
|
||
asset_url="$u"
|
||
break
|
||
;;
|
||
esac
|
||
done
|
||
|
||
[[ -z "$asset_url" ]] && {
|
||
msg_error "No asset matching '$pattern' found"
|
||
rm -rf "$tmpdir"
|
||
return 1
|
||
}
|
||
|
||
filename="${asset_url##*/}"
|
||
curl $download_timeout -fsSL -o "$tmpdir/$filename" "$asset_url" || {
|
||
msg_error "Download failed: $asset_url"
|
||
rm -rf "$tmpdir"
|
||
return 1
|
||
}
|
||
|
||
local unpack_tmp
|
||
unpack_tmp=$(mktemp -d)
|
||
mkdir -p "$target"
|
||
if [[ "${CLEAN_INSTALL:-0}" == "1" ]]; then
|
||
rm -rf "${target:?}/"*
|
||
fi
|
||
|
||
if [[ "$filename" == *.zip ]]; then
|
||
ensure_dependencies unzip
|
||
unzip -q "$tmpdir/$filename" -d "$unpack_tmp" || {
|
||
msg_error "Failed to extract ZIP archive"
|
||
rm -rf "$tmpdir" "$unpack_tmp"
|
||
return 1
|
||
}
|
||
elif [[ "$filename" == *.tar.* || "$filename" == *.tgz ]]; then
|
||
tar --no-same-owner -xf "$tmpdir/$filename" -C "$unpack_tmp" || {
|
||
msg_error "Failed to extract TAR archive"
|
||
rm -rf "$tmpdir" "$unpack_tmp"
|
||
return 1
|
||
}
|
||
else
|
||
msg_error "Unsupported archive format: $filename"
|
||
rm -rf "$tmpdir" "$unpack_tmp"
|
||
return 1
|
||
fi
|
||
|
||
local top_dirs
|
||
top_dirs=$(find "$unpack_tmp" -mindepth 1 -maxdepth 1 -type d | wc -l)
|
||
local top_entries inner_dir
|
||
top_entries=$(find "$unpack_tmp" -mindepth 1 -maxdepth 1)
|
||
if [[ "$(echo "$top_entries" | wc -l)" -eq 1 && -d "$top_entries" ]]; then
|
||
# Strip leading folder
|
||
inner_dir="$top_entries"
|
||
shopt -s dotglob nullglob
|
||
if compgen -G "$inner_dir/*" >/dev/null; then
|
||
cp -r "$inner_dir"/* "$target/" || {
|
||
msg_error "Failed to copy contents from $inner_dir to $target"
|
||
rm -rf "$tmpdir" "$unpack_tmp"
|
||
return 1
|
||
}
|
||
else
|
||
msg_error "Inner directory is empty: $inner_dir"
|
||
rm -rf "$tmpdir" "$unpack_tmp"
|
||
return 1
|
||
fi
|
||
shopt -u dotglob nullglob
|
||
else
|
||
# Copy all contents
|
||
shopt -s dotglob nullglob
|
||
if compgen -G "$unpack_tmp/*" >/dev/null; then
|
||
cp -r "$unpack_tmp"/* "$target/" || {
|
||
msg_error "Failed to copy contents to $target"
|
||
rm -rf "$tmpdir" "$unpack_tmp"
|
||
return 1
|
||
}
|
||
else
|
||
msg_error "Unpacked archive is empty"
|
||
rm -rf "$tmpdir" "$unpack_tmp"
|
||
return 1
|
||
fi
|
||
shopt -u dotglob nullglob
|
||
fi
|
||
|
||
### Singlefile Mode ###
|
||
elif [[ "$mode" == "singlefile" ]]; then
|
||
local pattern="${6%\"}"
|
||
pattern="${pattern#\"}"
|
||
[[ -z "$pattern" ]] && {
|
||
msg_error "Mode 'singlefile' requires 6th parameter (asset filename pattern)"
|
||
rm -rf "$tmpdir"
|
||
return 1
|
||
}
|
||
|
||
local asset_url=""
|
||
for u in $(echo "$json" | jq -r '.assets[].browser_download_url'); do
|
||
filename_candidate="${u##*/}"
|
||
case "$filename_candidate" in
|
||
$pattern)
|
||
asset_url="$u"
|
||
break
|
||
;;
|
||
esac
|
||
done
|
||
|
||
[[ -z "$asset_url" ]] && {
|
||
msg_error "No asset matching '$pattern' found"
|
||
rm -rf "$tmpdir"
|
||
return 1
|
||
}
|
||
|
||
filename="${asset_url##*/}"
|
||
mkdir -p "$target"
|
||
|
||
local use_filename="${USE_ORIGINAL_FILENAME:-false}"
|
||
local target_file="$app"
|
||
[[ "$use_filename" == "true" ]] && target_file="$filename"
|
||
|
||
curl $download_timeout -fsSL -o "$target/$target_file" "$asset_url" || {
|
||
msg_error "Download failed: $asset_url"
|
||
rm -rf "$tmpdir"
|
||
return 1
|
||
}
|
||
|
||
if [[ "$target_file" != *.jar && -f "$target/$target_file" ]]; then
|
||
chmod +x "$target/$target_file"
|
||
fi
|
||
|
||
else
|
||
msg_error "Unknown mode: $mode"
|
||
rm -rf "$tmpdir"
|
||
return 1
|
||
fi
|
||
|
||
echo "$version" >"$version_file"
|
||
msg_ok "Deployed: $app ($version)"
|
||
rm -rf "$tmpdir"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Loads LOCAL_IP from persistent store or detects if missing.
|
||
#
|
||
# Description:
|
||
# - Loads from /run/local-ip.env or performs runtime lookup
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function import_local_ip() {
|
||
local IP_FILE="/run/local-ip.env"
|
||
if [[ -f "$IP_FILE" ]]; then
|
||
# shellcheck disable=SC1090
|
||
source "$IP_FILE"
|
||
fi
|
||
|
||
if [[ -z "${LOCAL_IP:-}" ]]; then
|
||
get_current_ip() {
|
||
local targets=("8.8.8.8" "1.1.1.1" "192.168.1.1" "10.0.0.1" "172.16.0.1" "default")
|
||
local ip
|
||
|
||
for target in "${targets[@]}"; do
|
||
if [[ "$target" == "default" ]]; then
|
||
ip=$(ip route get 1 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||
else
|
||
ip=$(ip route get "$target" 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||
fi
|
||
if [[ -n "$ip" ]]; then
|
||
echo "$ip"
|
||
return 0
|
||
fi
|
||
done
|
||
|
||
return 1
|
||
}
|
||
|
||
LOCAL_IP="$(get_current_ip || true)"
|
||
if [[ -z "$LOCAL_IP" ]]; then
|
||
msg_error "Could not determine LOCAL_IP"
|
||
return 1
|
||
fi
|
||
fi
|
||
|
||
export LOCAL_IP
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs Adminer (Debian/Ubuntu via APT, Alpine via direct download).
|
||
#
|
||
# Description:
|
||
# - Adds Adminer to Apache or web root
|
||
# - Supports Alpine and Debian-based systems
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function setup_adminer() {
|
||
if grep -qi alpine /etc/os-release; then
|
||
msg_info "Setup Adminer (Alpine)"
|
||
mkdir -p /var/www/localhost/htdocs/adminer
|
||
curl -fsSL https://github.com/vrana/adminer/releases/latest/download/adminer.php \
|
||
-o /var/www/localhost/htdocs/adminer/index.php || {
|
||
msg_error "Failed to download Adminer"
|
||
return 1
|
||
}
|
||
cache_installed_version "adminer" "latest-alpine"
|
||
msg_ok "Setup Adminer (Alpine)"
|
||
else
|
||
msg_info "Setup Adminer (Debian/Ubuntu)"
|
||
ensure_dependencies adminer
|
||
$STD a2enconf adminer || {
|
||
msg_error "Failed to enable Adminer Apache config"
|
||
return 1
|
||
}
|
||
$STD systemctl reload apache2 || {
|
||
msg_error "Failed to reload Apache"
|
||
return 1
|
||
}
|
||
local VERSION
|
||
VERSION=$(dpkg -s adminer 2>/dev/null | grep '^Version:' | awk '{print $2}')
|
||
cache_installed_version "adminer" "${VERSION:-unknown}"
|
||
msg_ok "Setup Adminer (Debian/Ubuntu)"
|
||
fi
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs or updates Composer globally (robust, idempotent).
|
||
#
|
||
# - Installs to /usr/local/bin/composer
|
||
# - Removes old binaries/symlinks in /usr/bin, /bin, /root/.composer, etc.
|
||
# - Ensures /usr/local/bin is in PATH (permanent)
|
||
# - Auto-updates to latest version
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function setup_composer() {
|
||
local COMPOSER_BIN="/usr/local/bin/composer"
|
||
export COMPOSER_ALLOW_SUPERUSER=1
|
||
|
||
# Get currently installed version
|
||
local INSTALLED_VERSION=""
|
||
if [[ -x "$COMPOSER_BIN" ]]; then
|
||
INSTALLED_VERSION=$("$COMPOSER_BIN" --version 2>/dev/null | awk '{print $3}')
|
||
fi
|
||
|
||
# Scenario 1: Already installed - just self-update
|
||
if [[ -n "$INSTALLED_VERSION" ]]; then
|
||
msg_info "Update Composer $INSTALLED_VERSION"
|
||
$STD "$COMPOSER_BIN" self-update --no-interaction || true
|
||
local UPDATED_VERSION
|
||
UPDATED_VERSION=$("$COMPOSER_BIN" --version 2>/dev/null | awk '{print $3}')
|
||
cache_installed_version "composer" "$UPDATED_VERSION"
|
||
msg_ok "Update Composer $UPDATED_VERSION"
|
||
return 0
|
||
fi
|
||
|
||
# Scenario 2: Fresh install
|
||
msg_info "Setup Composer"
|
||
|
||
for old in /usr/bin/composer /bin/composer /root/.composer/vendor/bin/composer; do
|
||
[[ -e "$old" && "$old" != "$COMPOSER_BIN" ]] && rm -f "$old"
|
||
done
|
||
|
||
ensure_usr_local_bin_persist
|
||
export PATH="/usr/local/bin:$PATH"
|
||
|
||
curl -fsSL https://getcomposer.org/installer -o /tmp/composer-setup.php || {
|
||
msg_error "Failed to download Composer installer"
|
||
return 1
|
||
}
|
||
|
||
$STD php /tmp/composer-setup.php --install-dir=/usr/local/bin --filename=composer || {
|
||
msg_error "Failed to install Composer"
|
||
rm -f /tmp/composer-setup.php
|
||
return 1
|
||
}
|
||
rm -f /tmp/composer-setup.php
|
||
|
||
if [[ ! -x "$COMPOSER_BIN" ]]; then
|
||
msg_error "Composer installation failed"
|
||
return 1
|
||
fi
|
||
|
||
chmod +x "$COMPOSER_BIN"
|
||
$STD "$COMPOSER_BIN" self-update --no-interaction || true
|
||
|
||
local FINAL_VERSION
|
||
FINAL_VERSION=$("$COMPOSER_BIN" --version 2>/dev/null | awk '{print $3}')
|
||
cache_installed_version "composer" "$FINAL_VERSION"
|
||
msg_ok "Setup Composer"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs FFmpeg from source or prebuilt binary (Debian/Ubuntu only).
|
||
#
|
||
# Description:
|
||
# - Downloads and builds FFmpeg from GitHub (https://github.com/FFmpeg/FFmpeg)
|
||
# - Supports specific version override via FFMPEG_VERSION (e.g. n7.1.1)
|
||
# - Supports build profile via FFMPEG_TYPE:
|
||
# - minimal : x264, vpx, mp3 only
|
||
# - medium : adds subtitles, fonts, opus, vorbis
|
||
# - full : adds dav1d, svt-av1, zlib, numa
|
||
# - binary : downloads static build (johnvansickle.com)
|
||
# - Defaults to latest stable version and full feature set
|
||
#
|
||
# Notes:
|
||
# - Requires: curl, jq, build-essential, and matching codec libraries
|
||
# - Result is installed to /usr/local/bin/ffmpeg
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function setup_ffmpeg() {
|
||
local TMP_DIR=$(mktemp -d)
|
||
local GITHUB_REPO="FFmpeg/FFmpeg"
|
||
local VERSION="${FFMPEG_VERSION:-latest}"
|
||
local TYPE="${FFMPEG_TYPE:-full}"
|
||
local BIN_PATH="/usr/local/bin/ffmpeg"
|
||
|
||
# Get currently installed version
|
||
local INSTALLED_VERSION=""
|
||
if command -v ffmpeg &>/dev/null; then
|
||
INSTALLED_VERSION=$(ffmpeg -version 2>/dev/null | head -n1 | awk '{print $3}')
|
||
fi
|
||
|
||
msg_info "Setup FFmpeg ${VERSION} ($TYPE)"
|
||
|
||
# Binary fallback mode
|
||
if [[ "$TYPE" == "binary" ]]; then
|
||
curl -fsSL https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz -o "$TMP_DIR/ffmpeg.tar.xz" || {
|
||
msg_error "Failed to download FFmpeg binary"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
tar -xf "$TMP_DIR/ffmpeg.tar.xz" -C "$TMP_DIR" || {
|
||
msg_error "Failed to extract FFmpeg binary"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
local EXTRACTED_DIR
|
||
EXTRACTED_DIR=$(find "$TMP_DIR" -maxdepth 1 -type d -name "ffmpeg-*")
|
||
cp "$EXTRACTED_DIR/ffmpeg" "$BIN_PATH"
|
||
cp "$EXTRACTED_DIR/ffprobe" /usr/local/bin/ffprobe
|
||
chmod +x "$BIN_PATH" /usr/local/bin/ffprobe
|
||
local FINAL_VERSION=$($BIN_PATH -version 2>/dev/null | head -n1 | awk '{print $3}')
|
||
rm -rf "$TMP_DIR"
|
||
cache_installed_version "ffmpeg" "$FINAL_VERSION"
|
||
ensure_usr_local_bin_persist
|
||
[[ -n "$INSTALLED_VERSION" ]] && msg_ok "Upgrade FFmpeg $INSTALLED_VERSION → $FINAL_VERSION" || msg_ok "Setup FFmpeg $FINAL_VERSION"
|
||
return 0
|
||
fi
|
||
|
||
ensure_dependencies jq
|
||
|
||
# Auto-detect latest stable version if none specified
|
||
if [[ "$VERSION" == "latest" || -z "$VERSION" ]]; then
|
||
local ffmpeg_tags
|
||
ffmpeg_tags=$(curl -fsSL --max-time 15 "https://api.github.com/repos/${GITHUB_REPO}/tags" 2>/dev/null || echo "")
|
||
|
||
if [[ -z "$ffmpeg_tags" ]]; then
|
||
msg_warn "Could not fetch FFmpeg versions from GitHub, trying binary fallback"
|
||
VERSION="" # Will trigger binary fallback below
|
||
else
|
||
VERSION=$(echo "$ffmpeg_tags" | jq -r '.[].name' 2>/dev/null |
|
||
grep -E '^n[0-9]+\.[0-9]+\.[0-9]+$' |
|
||
sort -V | tail -n1 || echo "")
|
||
fi
|
||
fi
|
||
|
||
if [[ -z "$VERSION" ]]; then
|
||
msg_info "Could not determine FFmpeg source version, using pre-built binary"
|
||
VERSION="" # Will use binary fallback
|
||
fi
|
||
|
||
# Dependency selection
|
||
local DEPS=(build-essential yasm nasm pkg-config)
|
||
case "$TYPE" in
|
||
minimal)
|
||
DEPS+=(libx264-dev libvpx-dev libmp3lame-dev)
|
||
;;
|
||
medium)
|
||
DEPS+=(libx264-dev libvpx-dev libmp3lame-dev libfreetype6-dev libass-dev libopus-dev libvorbis-dev)
|
||
;;
|
||
full)
|
||
DEPS+=(
|
||
libx264-dev libx265-dev libvpx-dev libmp3lame-dev
|
||
libfreetype6-dev libass-dev libopus-dev libvorbis-dev
|
||
libdav1d-dev libsvtav1-dev zlib1g-dev libnuma-dev
|
||
libva-dev libdrm-dev
|
||
)
|
||
;;
|
||
*)
|
||
msg_error "Invalid FFMPEG_TYPE: $TYPE"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
;;
|
||
esac
|
||
|
||
ensure_dependencies "${DEPS[@]}"
|
||
|
||
# Try to download source if VERSION is set
|
||
if [[ -n "$VERSION" ]]; then
|
||
curl -fsSL "https://github.com/${GITHUB_REPO}/archive/refs/tags/${VERSION}.tar.gz" -o "$TMP_DIR/ffmpeg.tar.gz" || {
|
||
msg_warn "Failed to download FFmpeg source ${VERSION}, falling back to pre-built binary"
|
||
VERSION=""
|
||
}
|
||
fi
|
||
|
||
# If no source download (either VERSION empty or download failed), use binary
|
||
if [[ -z "$VERSION" ]]; then
|
||
msg_info "Setup FFmpeg from pre-built binary"
|
||
curl -fsSL https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz -o "$TMP_DIR/ffmpeg.tar.xz" || {
|
||
msg_error "Failed to download FFmpeg pre-built binary"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
tar -xJf "$TMP_DIR/ffmpeg.tar.xz" -C "$TMP_DIR" || {
|
||
msg_error "Failed to extract FFmpeg binary archive"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
if ! cp "$TMP_DIR/ffmpeg-"*/ffmpeg /usr/local/bin/ffmpeg 2>/dev/null; then
|
||
msg_error "Failed to install FFmpeg binary"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
fi
|
||
|
||
cache_installed_version "ffmpeg" "static"
|
||
rm -rf "$TMP_DIR"
|
||
msg_ok "Setup FFmpeg from pre-built binary"
|
||
return 0
|
||
fi
|
||
|
||
tar -xzf "$TMP_DIR/ffmpeg.tar.gz" -C "$TMP_DIR" || {
|
||
msg_error "Failed to extract FFmpeg source"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
cd "$TMP_DIR/FFmpeg-"* || {
|
||
msg_error "Source extraction failed"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
local args=(
|
||
--enable-gpl
|
||
--enable-shared
|
||
--enable-nonfree
|
||
--disable-static
|
||
--enable-libx264
|
||
--enable-libvpx
|
||
--enable-libmp3lame
|
||
)
|
||
|
||
if [[ "$TYPE" != "minimal" ]]; then
|
||
args+=(--enable-libfreetype --enable-libass --enable-libopus --enable-libvorbis)
|
||
fi
|
||
|
||
if [[ "$TYPE" == "full" ]]; then
|
||
args+=(--enable-libx265 --enable-libdav1d --enable-zlib)
|
||
args+=(--enable-vaapi --enable-libdrm)
|
||
fi
|
||
|
||
if [[ ${#args[@]} -eq 0 ]]; then
|
||
msg_error "FFmpeg configure args array is empty"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
fi
|
||
|
||
$STD ./configure "${args[@]}" || {
|
||
msg_error "FFmpeg configure failed"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
$STD make -j"$(nproc)" || {
|
||
msg_error "FFmpeg compilation failed"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
$STD make install || {
|
||
msg_error "FFmpeg installation failed"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
echo "/usr/local/lib" >/etc/ld.so.conf.d/ffmpeg.conf
|
||
$STD ldconfig
|
||
|
||
ldconfig -p 2>/dev/null | grep libavdevice >/dev/null || {
|
||
msg_error "libavdevice not registered with dynamic linker"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
if ! command -v ffmpeg &>/dev/null; then
|
||
msg_error "FFmpeg installation failed"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
fi
|
||
|
||
local FINAL_VERSION
|
||
FINAL_VERSION=$(ffmpeg -version 2>/dev/null | head -n1 | awk '{print $3}')
|
||
rm -rf "$TMP_DIR"
|
||
cache_installed_version "ffmpeg" "$FINAL_VERSION"
|
||
ensure_usr_local_bin_persist
|
||
[[ -n "$INSTALLED_VERSION" ]] && msg_ok "Upgrade FFmpeg $INSTALLED_VERSION → $FINAL_VERSION" || msg_ok "Setup FFmpeg $FINAL_VERSION"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs Go (Golang) from official tarball.
|
||
#
|
||
# Description:
|
||
# - Determines system architecture
|
||
# - Downloads latest version if GO_VERSION not set
|
||
#
|
||
# Variables:
|
||
# GO_VERSION - Version to install (e.g. 1.22.2 or latest)
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function setup_go() {
|
||
local ARCH
|
||
case "$(uname -m)" in
|
||
x86_64) ARCH="amd64" ;;
|
||
aarch64) ARCH="arm64" ;;
|
||
*)
|
||
msg_error "Unsupported architecture: $(uname -m)"
|
||
return 1
|
||
;;
|
||
esac
|
||
|
||
# Resolve "latest" version
|
||
local GO_VERSION="${GO_VERSION:-latest}"
|
||
if [[ "$GO_VERSION" == "latest" ]]; then
|
||
GO_VERSION=$(curl -fsSL https://go.dev/VERSION?m=text 2>/dev/null | head -n1 | sed 's/^go//') || {
|
||
msg_error "Could not determine latest Go version"
|
||
return 1
|
||
}
|
||
[[ -z "$GO_VERSION" ]] && {
|
||
msg_error "Latest Go version is empty"
|
||
return 1
|
||
}
|
||
fi
|
||
|
||
local GO_BIN="/usr/local/bin/go"
|
||
local GO_INSTALL_DIR="/usr/local/go"
|
||
|
||
# Get currently installed version
|
||
local CURRENT_VERSION=""
|
||
if [[ -x "$GO_BIN" ]]; then
|
||
CURRENT_VERSION=$("$GO_BIN" version 2>/dev/null | awk '{print $3}' | sed 's/go//')
|
||
fi
|
||
|
||
# Scenario 1: Already at target version
|
||
if [[ -n "$CURRENT_VERSION" && "$CURRENT_VERSION" == "$GO_VERSION" ]]; then
|
||
cache_installed_version "go" "$GO_VERSION"
|
||
return 0
|
||
fi
|
||
|
||
# Scenario 2: Different version or not installed
|
||
if [[ -n "$CURRENT_VERSION" && "$CURRENT_VERSION" != "$GO_VERSION" ]]; then
|
||
msg_info "Upgrade Go from $CURRENT_VERSION to $GO_VERSION"
|
||
remove_old_tool_version "go"
|
||
else
|
||
msg_info "Setup Go $GO_VERSION"
|
||
fi
|
||
|
||
local TARBALL="go${GO_VERSION}.linux-${ARCH}.tar.gz"
|
||
local URL="https://go.dev/dl/${TARBALL}"
|
||
local TMP_TAR=$(mktemp)
|
||
|
||
curl -fsSL "$URL" -o "$TMP_TAR" || {
|
||
msg_error "Failed to download Go $GO_VERSION"
|
||
rm -f "$TMP_TAR"
|
||
return 1
|
||
}
|
||
|
||
$STD tar -C /usr/local -xzf "$TMP_TAR" || {
|
||
msg_error "Failed to extract Go tarball"
|
||
rm -f "$TMP_TAR"
|
||
return 1
|
||
}
|
||
|
||
ln -sf /usr/local/go/bin/go /usr/local/bin/go
|
||
ln -sf /usr/local/go/bin/gofmt /usr/local/bin/gofmt
|
||
rm -f "$TMP_TAR"
|
||
|
||
cache_installed_version "go" "$GO_VERSION"
|
||
ensure_usr_local_bin_persist
|
||
msg_ok "Setup Go $GO_VERSION"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs or updates Ghostscript (gs) from source.
|
||
#
|
||
# Description:
|
||
# - Fetches latest release
|
||
# - Builds and installs system-wide
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function setup_gs() {
|
||
local TMP_DIR=$(mktemp -d)
|
||
local CURRENT_VERSION=$(gs --version 2>/dev/null || echo "0")
|
||
|
||
ensure_dependencies jq
|
||
|
||
local RELEASE_JSON
|
||
RELEASE_JSON=$(curl -fsSL --max-time 15 https://api.github.com/repos/ArtifexSoftware/ghostpdl-downloads/releases/latest 2>/dev/null || echo "")
|
||
|
||
if [[ -z "$RELEASE_JSON" ]]; then
|
||
msg_warn "Cannot fetch latest Ghostscript version from GitHub API"
|
||
# Try to get from current version
|
||
if command -v gs &>/dev/null; then
|
||
gs --version | head -n1
|
||
cache_installed_version "ghostscript" "$CURRENT_VERSION"
|
||
return 0
|
||
fi
|
||
msg_error "Cannot determine Ghostscript version and no existing installation found"
|
||
return 1
|
||
fi
|
||
local LATEST_VERSION
|
||
LATEST_VERSION=$(echo "$RELEASE_JSON" | jq -r '.tag_name' | sed 's/^gs//')
|
||
local LATEST_VERSION_DOTTED
|
||
LATEST_VERSION_DOTTED=$(echo "$RELEASE_JSON" | jq -r '.name' | grep -o '[0-9]\+\.[0-9]\+\.[0-9]\+')
|
||
|
||
if [[ -z "$LATEST_VERSION" || -z "$LATEST_VERSION_DOTTED" ]]; then
|
||
msg_warn "Could not determine latest Ghostscript version from GitHub - checking system"
|
||
# Fallback: try to use system version or return error
|
||
if [[ "$CURRENT_VERSION" == "0" ]]; then
|
||
msg_error "Ghostscript not installed and cannot determine latest version"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
fi
|
||
rm -rf "$TMP_DIR"
|
||
return 0
|
||
fi
|
||
|
||
# Scenario 1: Already at latest version
|
||
if [[ -n "$LATEST_VERSION_DOTTED" ]] && dpkg --compare-versions "$CURRENT_VERSION" ge "$LATEST_VERSION_DOTTED" 2>/dev/null; then
|
||
cache_installed_version "ghostscript" "$LATEST_VERSION_DOTTED"
|
||
rm -rf "$TMP_DIR"
|
||
return 0
|
||
fi
|
||
|
||
# Scenario 2: New install or upgrade
|
||
if [[ "$CURRENT_VERSION" != "0" && "$CURRENT_VERSION" != "$LATEST_VERSION_DOTTED" ]]; then
|
||
msg_info "Upgrade Ghostscript from $CURRENT_VERSION to $LATEST_VERSION_DOTTED"
|
||
else
|
||
msg_info "Setup Ghostscript $LATEST_VERSION_DOTTED"
|
||
fi
|
||
|
||
curl -fsSL "https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs${LATEST_VERSION}/ghostscript-${LATEST_VERSION_DOTTED}.tar.gz" -o "$TMP_DIR/ghostscript.tar.gz" || {
|
||
msg_error "Failed to download Ghostscript"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
if ! tar -xzf "$TMP_DIR/ghostscript.tar.gz" -C "$TMP_DIR"; then
|
||
msg_error "Failed to extract Ghostscript archive"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
fi
|
||
|
||
# Verify directory exists before cd
|
||
if [[ ! -d "$TMP_DIR/ghostscript-${LATEST_VERSION_DOTTED}" ]]; then
|
||
msg_error "Ghostscript source directory not found: $TMP_DIR/ghostscript-${LATEST_VERSION_DOTTED}"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
fi
|
||
|
||
cd "$TMP_DIR/ghostscript-${LATEST_VERSION_DOTTED}" || {
|
||
msg_error "Failed to enter Ghostscript source directory"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
ensure_dependencies build-essential libpng-dev zlib1g-dev
|
||
|
||
$STD ./configure || {
|
||
msg_error "Ghostscript configure failed"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
$STD make -j"$(nproc)" || {
|
||
msg_error "Ghostscript compilation failed"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
$STD make install || {
|
||
msg_error "Ghostscript installation failed"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
hash -r
|
||
if [[ ! -x "$(command -v gs)" ]]; then
|
||
if [[ -x /usr/local/bin/gs ]]; then
|
||
ln -sf /usr/local/bin/gs /usr/bin/gs
|
||
fi
|
||
fi
|
||
|
||
rm -rf "$TMP_DIR"
|
||
cache_installed_version "ghostscript" "$LATEST_VERSION_DOTTED"
|
||
ensure_usr_local_bin_persist
|
||
msg_ok "Setup Ghostscript $LATEST_VERSION_DOTTED"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Sets up Hardware Acceleration on debian or ubuntu.
|
||
#
|
||
# Description:
|
||
# - Determites CPU/GPU/APU Vendor
|
||
# - Installs the correct libraries and packages
|
||
# - Sets up Hardware Acceleration
|
||
#
|
||
# Notes:
|
||
# - Some things are fetched from intel repositories due to not being in debian repositories.
|
||
# ------------------------------------------------------------------------------
|
||
function setup_hwaccel() {
|
||
msg_info "Setup Hardware Acceleration"
|
||
|
||
if ! command -v lspci &>/dev/null; then
|
||
$STD apt -y update || {
|
||
msg_error "Failed to update package list"
|
||
return 1
|
||
}
|
||
$STD apt -y install pciutils || {
|
||
msg_error "Failed to install pciutils"
|
||
return 1
|
||
}
|
||
fi
|
||
|
||
# Detect GPU vendor (Intel, AMD, NVIDIA)
|
||
local gpu_vendor
|
||
gpu_vendor=$(lspci 2>/dev/null | grep -Ei 'vga|3d|display' | grep -Eo 'Intel|AMD|NVIDIA' | head -n1 || echo "")
|
||
|
||
# Detect CPU vendor (relevant for AMD APUs)
|
||
local cpu_vendor
|
||
cpu_vendor=$(lscpu 2>/dev/null | grep -i 'Vendor ID' | awk '{print $3}' || echo "")
|
||
|
||
if [[ -z "$gpu_vendor" && -z "$cpu_vendor" ]]; then
|
||
msg_error "No GPU or CPU vendor detected (missing lspci/lscpu output)"
|
||
return 1
|
||
fi
|
||
|
||
# Detect OS with fallbacks
|
||
local os_id os_codename
|
||
os_id=$(grep -oP '(?<=^ID=).+' /etc/os-release 2>/dev/null | tr -d '"' || grep '^ID=' /etc/os-release 2>/dev/null | cut -d'=' -f2 | tr -d '"' || echo "debian")
|
||
os_codename=$(grep -oP '(?<=^VERSION_CODENAME=).+' /etc/os-release 2>/dev/null | tr -d '"' || grep '^VERSION_CODENAME=' /etc/os-release 2>/dev/null | cut -d'=' -f2 | tr -d '"' || echo "unknown")
|
||
|
||
# Validate os_id
|
||
if [[ -z "$os_id" ]]; then
|
||
os_id="debian"
|
||
fi
|
||
|
||
# Determine if we are on a VM or LXC
|
||
local in_ct="${CTTYPE:-0}"
|
||
|
||
case "$gpu_vendor" in
|
||
Intel)
|
||
if [[ "$os_id" == "ubuntu" ]]; then
|
||
$STD apt -y install intel-opencl-icd || {
|
||
msg_error "Failed to install intel-opencl-icd"
|
||
return 1
|
||
}
|
||
else
|
||
# For Debian: fetch Intel GPU drivers from GitHub
|
||
fetch_and_deploy_gh_release "" "intel/intel-graphics-compiler" "binary" "latest" "" "intel-igc-core-2_*_amd64.deb" || {
|
||
msg_warn "Failed to deploy Intel IGC core 2"
|
||
}
|
||
fetch_and_deploy_gh_release "" "intel/intel-graphics-compiler" "binary" "latest" "" "intel-igc-opencl-2_*_amd64.deb" || {
|
||
msg_warn "Failed to deploy Intel IGC OpenCL 2"
|
||
}
|
||
fetch_and_deploy_gh_release "" "intel/compute-runtime" "binary" "latest" "" "libigdgmm12_*_amd64.deb" || {
|
||
msg_warn "Failed to deploy Intel GDGMM12"
|
||
}
|
||
fetch_and_deploy_gh_release "" "intel/compute-runtime" "binary" "latest" "" "intel-opencl-icd_*_amd64.deb" || {
|
||
msg_warn "Failed to deploy Intel OpenCL ICD"
|
||
}
|
||
fi
|
||
|
||
$STD apt -y install va-driver-all ocl-icd-libopencl1 vainfo intel-gpu-tools || {
|
||
msg_error "Failed to install Intel GPU dependencies"
|
||
return 1
|
||
}
|
||
;;
|
||
AMD)
|
||
$STD apt -y install mesa-va-drivers mesa-vdpau-drivers mesa-opencl-icd vainfo clinfo || {
|
||
msg_error "Failed to install AMD GPU dependencies"
|
||
return 1
|
||
}
|
||
|
||
# For AMD CPUs without discrete GPU (APUs)
|
||
if [[ "$cpu_vendor" == "AuthenticAMD" && -n "$gpu_vendor" ]]; then
|
||
$STD apt -y install libdrm-amdgpu1 firmware-amd-graphics || true
|
||
fi
|
||
;;
|
||
NVIDIA)
|
||
# NVIDIA needs manual driver setup - skip for now
|
||
msg_info "NVIDIA GPU detected - manual driver setup required"
|
||
;;
|
||
*)
|
||
# If no discrete GPU, but AMD CPU (e.g., Ryzen APU)
|
||
if [[ "$cpu_vendor" == "AuthenticAMD" ]]; then
|
||
$STD apt -y install mesa-opencl-icd ocl-icd-libopencl1 clinfo || {
|
||
msg_error "Failed to install Mesa OpenCL stack"
|
||
return 1
|
||
}
|
||
else
|
||
msg_warn "No supported GPU vendor detected - skipping GPU acceleration"
|
||
fi
|
||
;;
|
||
esac
|
||
|
||
if [[ "$in_ct" == "0" ]]; then
|
||
chgrp video /dev/dri 2>/dev/null || true
|
||
chmod 755 /dev/dri 2>/dev/null || true
|
||
chmod 660 /dev/dri/* 2>/dev/null || true
|
||
$STD adduser "$(id -u -n)" video
|
||
$STD adduser "$(id -u -n)" render
|
||
fi
|
||
|
||
cache_installed_version "hwaccel" "1.0"
|
||
msg_ok "Setup Hardware Acceleration"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs ImageMagick 7 from source (Debian/Ubuntu only).
|
||
#
|
||
# Description:
|
||
# - Downloads the latest ImageMagick source tarball
|
||
# - Builds and installs ImageMagick to /usr/local
|
||
# - Configures dynamic linker (ldconfig)
|
||
#
|
||
# Notes:
|
||
# - Requires: build-essential, libtool, libjpeg-dev, libpng-dev, etc.
|
||
# ------------------------------------------------------------------------------
|
||
function setup_imagemagick() {
|
||
local TMP_DIR=$(mktemp -d)
|
||
local BINARY_PATH="/usr/local/bin/magick"
|
||
|
||
# Get currently installed version
|
||
local INSTALLED_VERSION=""
|
||
if command -v magick &>/dev/null; then
|
||
INSTALLED_VERSION=$(magick -version | awk '/^Version/ {print $3}')
|
||
fi
|
||
|
||
msg_info "Setup ImageMagick"
|
||
|
||
ensure_dependencies \
|
||
build-essential \
|
||
libtool \
|
||
libjpeg-dev \
|
||
libpng-dev \
|
||
libtiff-dev \
|
||
libwebp-dev \
|
||
libheif-dev \
|
||
libde265-dev \
|
||
libopenjp2-7-dev \
|
||
libxml2-dev \
|
||
liblcms2-dev \
|
||
libfreetype6-dev \
|
||
libraw-dev \
|
||
libfftw3-dev \
|
||
liblqr-1-0-dev \
|
||
libgsl-dev \
|
||
pkg-config \
|
||
ghostscript
|
||
|
||
curl -fsSL https://imagemagick.org/archive/ImageMagick.tar.gz -o "$TMP_DIR/ImageMagick.tar.gz" || {
|
||
msg_error "Failed to download ImageMagick"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
tar -xzf "$TMP_DIR/ImageMagick.tar.gz" -C "$TMP_DIR" || {
|
||
msg_error "Failed to extract ImageMagick"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
cd "$TMP_DIR"/ImageMagick-* || {
|
||
msg_error "Source extraction failed"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
$STD ./configure --disable-static || {
|
||
msg_error "ImageMagick configure failed"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
$STD make -j"$(nproc)" || {
|
||
msg_error "ImageMagick compilation failed"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
$STD make install || {
|
||
msg_error "ImageMagick installation failed"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
$STD ldconfig /usr/local/lib
|
||
|
||
if [[ ! -x "$BINARY_PATH" ]]; then
|
||
msg_error "ImageMagick installation failed"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
fi
|
||
|
||
local FINAL_VERSION
|
||
FINAL_VERSION=$("$BINARY_PATH" -version | awk '/^Version/ {print $3}')
|
||
rm -rf "$TMP_DIR"
|
||
cache_installed_version "imagemagick" "$FINAL_VERSION"
|
||
ensure_usr_local_bin_persist
|
||
|
||
if [[ -n "$INSTALLED_VERSION" ]]; then
|
||
msg_ok "Upgrade ImageMagick $INSTALLED_VERSION → $FINAL_VERSION"
|
||
else
|
||
msg_ok "Setup ImageMagick $FINAL_VERSION"
|
||
fi
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs Temurin JDK via Adoptium APT repository.
|
||
#
|
||
# Description:
|
||
# - Removes previous JDK if version mismatch
|
||
# - Installs or upgrades to specified JAVA_VERSION
|
||
#
|
||
# Variables:
|
||
# JAVA_VERSION - Temurin JDK version to install (e.g. 17, 21)
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function setup_java() {
|
||
local JAVA_VERSION="${JAVA_VERSION:-21}"
|
||
local DISTRO_ID DISTRO_CODENAME
|
||
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
||
DISTRO_CODENAME=$(awk -F= '/VERSION_CODENAME/ { print $2 }' /etc/os-release)
|
||
local DESIRED_PACKAGE="temurin-${JAVA_VERSION}-jdk"
|
||
|
||
# Prepare repository (cleanup + validation)
|
||
prepare_repository_setup "adoptium" || {
|
||
msg_error "Failed to prepare Adoptium repository"
|
||
return 1
|
||
}
|
||
|
||
# Add repo if needed
|
||
if [[ ! -f /etc/apt/sources.list.d/adoptium.sources ]]; then
|
||
local SUITE
|
||
SUITE=$(get_fallback_suite "$DISTRO_ID" "$DISTRO_CODENAME" "https://packages.adoptium.net/artifactory/deb")
|
||
setup_deb822_repo \
|
||
"adoptium" \
|
||
"https://packages.adoptium.net/artifactory/api/gpg/key/public" \
|
||
"https://packages.adoptium.net/artifactory/deb" \
|
||
"$SUITE" \
|
||
"main" \
|
||
"amd64 arm64"
|
||
fi
|
||
|
||
# Get currently installed version
|
||
local INSTALLED_VERSION=""
|
||
if dpkg -l | grep -q "temurin-.*-jdk" 2>/dev/null; then
|
||
INSTALLED_VERSION=$(dpkg -l 2>/dev/null | awk '/temurin-.*-jdk/{print $2}' | grep -oP 'temurin-\K[0-9]+' | head -n1 || echo "")
|
||
fi
|
||
|
||
# Validate INSTALLED_VERSION is not empty if matched
|
||
local JDK_COUNT=$(dpkg -l 2>/dev/null | grep -c "temurin-.*-jdk" || echo "0")
|
||
if [[ -z "$INSTALLED_VERSION" && "$JDK_COUNT" -gt 0 ]]; then
|
||
msg_warn "Found Temurin JDK but cannot determine version"
|
||
INSTALLED_VERSION="0"
|
||
fi
|
||
|
||
# Scenario 1: Already at correct version
|
||
if [[ "$INSTALLED_VERSION" == "$JAVA_VERSION" ]]; then
|
||
msg_info "Update Temurin JDK $JAVA_VERSION"
|
||
ensure_apt_working || return 1
|
||
upgrade_packages_with_retry "$DESIRED_PACKAGE" || {
|
||
msg_error "Failed to update Temurin JDK"
|
||
return 1
|
||
}
|
||
cache_installed_version "temurin-jdk" "$JAVA_VERSION"
|
||
msg_ok "Update Temurin JDK $JAVA_VERSION"
|
||
return 0
|
||
fi
|
||
|
||
# Scenario 2: Different version - remove old and install new
|
||
if [[ -n "$INSTALLED_VERSION" ]]; then
|
||
msg_info "Upgrade Temurin JDK from $INSTALLED_VERSION to $JAVA_VERSION"
|
||
$STD apt purge -y "temurin-${INSTALLED_VERSION}-jdk" || true
|
||
else
|
||
msg_info "Setup Temurin JDK $JAVA_VERSION"
|
||
fi
|
||
|
||
ensure_apt_working || return 1
|
||
|
||
# Install with retry logic
|
||
install_packages_with_retry "$DESIRED_PACKAGE" || {
|
||
msg_error "Failed to install Temurin JDK $JAVA_VERSION"
|
||
return 1
|
||
}
|
||
|
||
cache_installed_version "temurin-jdk" "$JAVA_VERSION"
|
||
msg_ok "Setup Temurin JDK $JAVA_VERSION"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs a local IP updater script using networkd-dispatcher.
|
||
#
|
||
# Description:
|
||
# - Stores current IP in /run/local-ip.env
|
||
# - Automatically runs on network changes
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function setup_local_ip_helper() {
|
||
local BASE_DIR="/usr/local/community-scripts/ip-management"
|
||
local SCRIPT_PATH="$BASE_DIR/update_local_ip.sh"
|
||
local IP_FILE="/run/local-ip.env"
|
||
local DISPATCHER_SCRIPT="/etc/networkd-dispatcher/routable.d/10-update-local-ip.sh"
|
||
|
||
# Check if already set up
|
||
if [[ -f "$SCRIPT_PATH" && -f "$DISPATCHER_SCRIPT" ]]; then
|
||
msg_info "Update Local IP Helper"
|
||
cache_installed_version "local-ip-helper" "1.0"
|
||
msg_ok "Update Local IP Helper"
|
||
else
|
||
msg_info "Setup Local IP Helper"
|
||
fi
|
||
|
||
mkdir -p "$BASE_DIR"
|
||
|
||
# Install networkd-dispatcher if not present
|
||
if ! dpkg -s networkd-dispatcher >/dev/null 2>&1; then
|
||
ensure_dependencies networkd-dispatcher || {
|
||
msg_error "Failed to install networkd-dispatcher"
|
||
return 1
|
||
}
|
||
fi
|
||
|
||
# Write update_local_ip.sh
|
||
cat <<'EOF' >"$SCRIPT_PATH"
|
||
#!/bin/bash
|
||
set -euo pipefail
|
||
|
||
IP_FILE="/run/local-ip.env"
|
||
mkdir -p "$(dirname "$IP_FILE")"
|
||
|
||
get_current_ip() {
|
||
local targets=("8.8.8.8" "1.1.1.1" "192.168.1.1" "10.0.0.1" "172.16.0.1" "default")
|
||
local ip
|
||
|
||
for target in "${targets[@]}"; do
|
||
if [[ "$target" == "default" ]]; then
|
||
ip=$(ip route get 1 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||
else
|
||
ip=$(ip route get "$target" 2>/dev/null | awk '{for(i=1;i<=NF;i++) if ($i=="src") print $(i+1)}')
|
||
fi
|
||
if [[ -n "$ip" ]]; then
|
||
echo "$ip"
|
||
return 0
|
||
fi
|
||
done
|
||
|
||
return 1
|
||
}
|
||
|
||
current_ip="$(get_current_ip)"
|
||
|
||
if [[ -z "$current_ip" ]]; then
|
||
echo "[ERROR] Could not detect local IP" >&2
|
||
exit 1
|
||
fi
|
||
|
||
if [[ -f "$IP_FILE" ]]; then
|
||
source "$IP_FILE"
|
||
[[ "$LOCAL_IP" == "$current_ip" ]] && exit 0
|
||
fi
|
||
|
||
echo "LOCAL_IP=$current_ip" > "$IP_FILE"
|
||
echo "[INFO] LOCAL_IP updated to $current_ip"
|
||
EOF
|
||
|
||
chmod +x "$SCRIPT_PATH"
|
||
|
||
# Install dispatcher hook
|
||
mkdir -p "$(dirname "$DISPATCHER_SCRIPT")"
|
||
cat <<EOF >"$DISPATCHER_SCRIPT"
|
||
#!/bin/bash
|
||
$SCRIPT_PATH
|
||
EOF
|
||
|
||
chmod +x "$DISPATCHER_SCRIPT"
|
||
systemctl enable -q --now networkd-dispatcher.service || {
|
||
msg_warn "Failed to enable networkd-dispatcher service"
|
||
}
|
||
|
||
cache_installed_version "local-ip-helper" "1.0"
|
||
msg_ok "Setup Local IP Helper"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs or updates MariaDB from official repo.
|
||
#
|
||
# Description:
|
||
# - Detects current MariaDB version and replaces it if necessary
|
||
# - Preserves existing database data
|
||
# - Dynamically determines latest GA version if "latest" is given
|
||
#
|
||
# Variables:
|
||
# MARIADB_VERSION - MariaDB version to install (e.g. 10.11, latest) (default: latest)
|
||
# ------------------------------------------------------------------------------
|
||
|
||
setup_mariadb() {
|
||
local MARIADB_VERSION="${MARIADB_VERSION:-latest}"
|
||
|
||
# Resolve "latest" to actual version
|
||
if [[ "$MARIADB_VERSION" == "latest" ]]; then
|
||
if ! curl -fsI --max-time 10 http://mirror.mariadb.org/repo/ >/dev/null 2>&1; then
|
||
msg_warn "MariaDB mirror not reachable - trying cached package list fallback"
|
||
# Fallback: try to use a known stable version
|
||
MARIADB_VERSION="12.0"
|
||
else
|
||
MARIADB_VERSION=$(curl -fsSL --max-time 15 http://mirror.mariadb.org/repo/ 2>/dev/null |
|
||
grep -Eo '[0-9]+\.[0-9]+\.[0-9]+/' |
|
||
grep -vE 'rc/|rolling/' |
|
||
sed 's|/||' |
|
||
sort -Vr |
|
||
head -n1 || echo "")
|
||
|
||
if [[ -z "$MARIADB_VERSION" ]]; then
|
||
msg_warn "Could not parse latest GA MariaDB version from mirror - using fallback"
|
||
MARIADB_VERSION="12.0"
|
||
fi
|
||
fi
|
||
fi
|
||
|
||
# Get currently installed version
|
||
local CURRENT_VERSION=""
|
||
CURRENT_VERSION=$(is_tool_installed "mariadb" 2>/dev/null) || true
|
||
|
||
# Scenario 1: Already installed at target version - just update packages
|
||
if [[ -n "$CURRENT_VERSION" && "$CURRENT_VERSION" == "$MARIADB_VERSION" ]]; then
|
||
msg_info "Update MariaDB $MARIADB_VERSION"
|
||
|
||
# Ensure APT is working
|
||
ensure_apt_working || return 1
|
||
|
||
# Check if repository needs to be refreshed
|
||
if [[ -f /etc/apt/sources.list.d/mariadb.sources ]]; then
|
||
local REPO_VERSION=""
|
||
REPO_VERSION=$(grep -oP 'repo/\K[0-9]+\.[0-9]+' /etc/apt/sources.list.d/mariadb.sources 2>/dev/null || echo "")
|
||
if [[ -n "$REPO_VERSION" && "$REPO_VERSION" != "${MARIADB_VERSION%.*}" ]]; then
|
||
msg_warn "Repository version mismatch, updating..."
|
||
manage_tool_repository "mariadb" "$MARIADB_VERSION" "http://mirror.mariadb.org/repo/$MARIADB_VERSION" \
|
||
"https://mariadb.org/mariadb_release_signing_key.asc" || {
|
||
msg_error "Failed to update MariaDB repository"
|
||
return 1
|
||
}
|
||
fi
|
||
fi
|
||
|
||
# Perform upgrade with retry logic
|
||
ensure_apt_working || return 1
|
||
upgrade_packages_with_retry "mariadb-server" "mariadb-client" || {
|
||
msg_error "Failed to upgrade MariaDB packages"
|
||
return 1
|
||
}
|
||
cache_installed_version "mariadb" "$MARIADB_VERSION"
|
||
msg_ok "Update MariaDB $MARIADB_VERSION"
|
||
return 0
|
||
fi
|
||
|
||
# Scenario 2: Different version installed - clean upgrade
|
||
if [[ -n "$CURRENT_VERSION" && "$CURRENT_VERSION" != "$MARIADB_VERSION" ]]; then
|
||
msg_info "Upgrade MariaDB from $CURRENT_VERSION to $MARIADB_VERSION"
|
||
remove_old_tool_version "mariadb"
|
||
fi
|
||
|
||
# Scenario 3: Fresh install or version change
|
||
msg_info "Setup MariaDB $MARIADB_VERSION"
|
||
|
||
# Prepare repository (cleanup + validation)
|
||
prepare_repository_setup "mariadb" || {
|
||
msg_error "Failed to prepare MariaDB repository"
|
||
return 1
|
||
}
|
||
|
||
# Install required dependencies first
|
||
local mariadb_deps=()
|
||
for dep in gawk rsync socat libdbi-perl pv; do
|
||
if apt-cache search "^${dep}$" 2>/dev/null | grep -q .; then
|
||
mariadb_deps+=("$dep")
|
||
fi
|
||
done
|
||
|
||
if [[ ${#mariadb_deps[@]} -gt 0 ]]; then
|
||
$STD apt install -y "${mariadb_deps[@]}" 2>/dev/null || true
|
||
fi
|
||
|
||
# Setup repository
|
||
manage_tool_repository "mariadb" "$MARIADB_VERSION" "http://mirror.mariadb.org/repo/$MARIADB_VERSION" \
|
||
"https://mariadb.org/mariadb_release_signing_key.asc" || {
|
||
msg_error "Failed to setup MariaDB repository"
|
||
return 1
|
||
}
|
||
|
||
# Set debconf selections for all potential versions
|
||
local MARIADB_MAJOR_MINOR
|
||
MARIADB_MAJOR_MINOR=$(echo "$MARIADB_VERSION" | awk -F. '{print $1"."$2}')
|
||
if [[ -n "$MARIADB_MAJOR_MINOR" ]]; then
|
||
echo "mariadb-server-$MARIADB_MAJOR_MINOR mariadb-server/feedback boolean false" | debconf-set-selections
|
||
fi
|
||
|
||
# Install packages with retry logic
|
||
export DEBIAN_FRONTEND=noninteractive
|
||
if ! install_packages_with_retry "mariadb-server" "mariadb-client"; then
|
||
# Fallback: try without specific version
|
||
msg_warn "Failed to install MariaDB packages from upstream repo, trying distro fallback..."
|
||
cleanup_old_repo_files "mariadb"
|
||
$STD apt update || {
|
||
msg_warn "APT update also failed, continuing with cache"
|
||
}
|
||
install_packages_with_retry "mariadb-server" "mariadb-client" || {
|
||
msg_error "Failed to install MariaDB packages (both upstream and distro)"
|
||
return 1
|
||
}
|
||
fi
|
||
|
||
cache_installed_version "mariadb" "$MARIADB_VERSION"
|
||
msg_ok "Setup MariaDB $MARIADB_VERSION"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs or updates MongoDB to specified major version.
|
||
#
|
||
# Description:
|
||
# - Preserves data across installations
|
||
# - Adds official MongoDB repo
|
||
#
|
||
# Variables:
|
||
# MONGO_VERSION - MongoDB major version to install (e.g. 7.0, 8.0)
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function setup_mongodb() {
|
||
local MONGO_VERSION="${MONGO_VERSION:-8.0}"
|
||
local DISTRO_ID DISTRO_CODENAME
|
||
DISTRO_ID=$(get_os_info id)
|
||
DISTRO_CODENAME=$(get_os_info codename)
|
||
|
||
# Check AVX support
|
||
if ! grep -qm1 'avx[^ ]*' /proc/cpuinfo; then
|
||
local major="${MONGO_VERSION%%.*}"
|
||
if ((major > 5)); then
|
||
msg_error "MongoDB ${MONGO_VERSION} requires AVX support, which is not available on this system."
|
||
return 1
|
||
fi
|
||
fi
|
||
|
||
case "$DISTRO_ID" in
|
||
ubuntu)
|
||
MONGO_BASE_URL="https://repo.mongodb.org/apt/ubuntu"
|
||
;;
|
||
debian)
|
||
MONGO_BASE_URL="https://repo.mongodb.org/apt/debian"
|
||
;;
|
||
*)
|
||
msg_error "Unsupported distribution: $DISTRO_ID"
|
||
return 1
|
||
;;
|
||
esac
|
||
|
||
# Get currently installed version
|
||
local INSTALLED_VERSION=""
|
||
INSTALLED_VERSION=$(is_tool_installed "mongodb" 2>/dev/null) || true
|
||
|
||
# Scenario 1: Already at target version - just update packages
|
||
if [[ -n "$INSTALLED_VERSION" && "$INSTALLED_VERSION" == "$MONGO_VERSION" ]]; then
|
||
msg_info "Update MongoDB $MONGO_VERSION"
|
||
|
||
ensure_apt_working || return 1
|
||
|
||
# Perform upgrade with retry logic
|
||
upgrade_packages_with_retry "mongodb-org" || {
|
||
msg_error "Failed to upgrade MongoDB"
|
||
return 1
|
||
}
|
||
cache_installed_version "mongodb" "$MONGO_VERSION"
|
||
msg_ok "Update MongoDB $MONGO_VERSION"
|
||
return 0
|
||
fi
|
||
|
||
# Scenario 2: Different version installed - clean upgrade
|
||
if [[ -n "$INSTALLED_VERSION" && "$INSTALLED_VERSION" != "$MONGO_VERSION" ]]; then
|
||
msg_info "Upgrade MongoDB from $INSTALLED_VERSION to $MONGO_VERSION"
|
||
remove_old_tool_version "mongodb"
|
||
else
|
||
msg_info "Setup MongoDB $MONGO_VERSION"
|
||
fi
|
||
|
||
cleanup_orphaned_sources
|
||
|
||
# Prepare repository (cleanup + validation)
|
||
prepare_repository_setup "mongodb" || {
|
||
msg_error "Failed to prepare MongoDB repository"
|
||
return 1
|
||
}
|
||
|
||
# Setup repository
|
||
manage_tool_repository "mongodb" "$MONGO_VERSION" "$MONGO_BASE_URL" \
|
||
"https://www.mongodb.org/static/pgp/server-${MONGO_VERSION}.asc" || {
|
||
msg_error "Failed to setup MongoDB repository"
|
||
return 1
|
||
}
|
||
|
||
# Wait for repo to settle
|
||
$STD apt update || {
|
||
msg_error "APT update failed — invalid MongoDB repo for ${DISTRO_ID}-${DISTRO_CODENAME}?"
|
||
return 1
|
||
}
|
||
|
||
# Install MongoDB with retry logic
|
||
install_packages_with_retry "mongodb-org" || {
|
||
msg_error "Failed to install MongoDB packages"
|
||
return 1
|
||
}
|
||
|
||
# Verify MongoDB was installed correctly
|
||
if ! command -v mongod >/dev/null 2>&1; then
|
||
msg_error "MongoDB binary not found after installation"
|
||
return 1
|
||
fi
|
||
|
||
mkdir -p /var/lib/mongodb
|
||
chown -R mongodb:mongodb /var/lib/mongodb
|
||
|
||
$STD systemctl enable mongod || {
|
||
msg_warn "Failed to enable mongod service"
|
||
}
|
||
safe_service_restart mongod
|
||
|
||
# Verify MongoDB version
|
||
local INSTALLED_VERSION
|
||
INSTALLED_VERSION=$(mongod --version 2>/dev/null | grep -oP 'db version v\K[0-9]+\.[0-9]+' | head -n1 || echo "0.0")
|
||
verify_tool_version "MongoDB" "$MONGO_VERSION" "$INSTALLED_VERSION" || true
|
||
|
||
cache_installed_version "mongodb" "$MONGO_VERSION"
|
||
msg_ok "Setup MongoDB $MONGO_VERSION"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs or upgrades MySQL and configures APT repo.
|
||
#
|
||
# Description:
|
||
# - Detects existing MySQL installation
|
||
# - Purges conflicting packages before installation
|
||
# - Supports clean upgrade
|
||
# - Handles Debian Trixie libaio1t64 transition
|
||
#
|
||
# Variables:
|
||
# MYSQL_VERSION - MySQL version to install (e.g. 5.7, 8.0) (default: 8.0)
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function setup_mysql() {
|
||
local MYSQL_VERSION="${MYSQL_VERSION:-8.0}"
|
||
local DISTRO_ID DISTRO_CODENAME
|
||
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
||
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
||
|
||
# Get currently installed version
|
||
local CURRENT_VERSION=""
|
||
CURRENT_VERSION=$(is_tool_installed "mysql" 2>/dev/null) || true
|
||
|
||
# Scenario 1: Already at target version - just update packages
|
||
if [[ -n "$CURRENT_VERSION" && "$CURRENT_VERSION" == "$MYSQL_VERSION" ]]; then
|
||
msg_info "Update MySQL $MYSQL_VERSION"
|
||
|
||
ensure_apt_working || return 1
|
||
|
||
# Perform upgrade with retry logic (non-fatal if fails)
|
||
upgrade_packages_with_retry "mysql-server" "mysql-client" || true
|
||
|
||
cache_installed_version "mysql" "$MYSQL_VERSION"
|
||
msg_ok "Update MySQL $MYSQL_VERSION"
|
||
return 0
|
||
fi
|
||
|
||
# Scenario 2: Different version installed - clean upgrade
|
||
if [[ -n "$CURRENT_VERSION" && "$CURRENT_VERSION" != "$MYSQL_VERSION" ]]; then
|
||
msg_info "Upgrade MySQL from $CURRENT_VERSION to $MYSQL_VERSION"
|
||
remove_old_tool_version "mysql"
|
||
else
|
||
msg_info "Setup MySQL $MYSQL_VERSION"
|
||
fi
|
||
|
||
# Prepare repository (cleanup + validation)
|
||
prepare_repository_setup "mysql" || {
|
||
msg_error "Failed to prepare MySQL repository"
|
||
return 1
|
||
}
|
||
|
||
# Debian 13+ Fix: MySQL 8.0 incompatible with libaio1t64, use 8.4 LTS
|
||
if [[ "$DISTRO_ID" == "debian" && "$DISTRO_CODENAME" =~ ^(trixie|forky|sid)$ ]]; then
|
||
msg_info "Debian ${DISTRO_CODENAME} detected → using MySQL 8.4 LTS (libaio1t64 compatible)"
|
||
|
||
if ! curl -fsSL https://repo.mysql.com/RPM-GPG-KEY-mysql-2023 | gpg --dearmor -o /etc/apt/keyrings/mysql.gpg 2>/dev/null; then
|
||
msg_error "Failed to import MySQL GPG key"
|
||
return 1
|
||
fi
|
||
|
||
cat >/etc/apt/sources.list.d/mysql.sources <<'EOF'
|
||
Types: deb
|
||
URIs: https://repo.mysql.com/apt/debian/
|
||
Suites: bookworm
|
||
Components: mysql-8.4-lts
|
||
Architectures: amd64 arm64
|
||
Signed-By: /etc/apt/keyrings/mysql.gpg
|
||
EOF
|
||
|
||
$STD apt update || {
|
||
msg_error "Failed to update APT for MySQL 8.4 LTS"
|
||
return 1
|
||
}
|
||
|
||
# Install with retry logic
|
||
if ! install_packages_with_retry "mysql-community-server" "mysql-community-client"; then
|
||
msg_warn "MySQL 8.4 LTS installation failed – falling back to MariaDB"
|
||
cleanup_old_repo_files "mysql"
|
||
$STD apt update
|
||
install_packages_with_retry "mariadb-server" "mariadb-client" || {
|
||
msg_error "Failed to install database engine (MySQL/MariaDB fallback)"
|
||
return 1
|
||
}
|
||
msg_ok "Setup Database Engine (MariaDB fallback on Debian ${DISTRO_CODENAME})"
|
||
return 0
|
||
fi
|
||
|
||
cache_installed_version "mysql" "8.4"
|
||
msg_ok "Setup MySQL 8.4 LTS (Debian ${DISTRO_CODENAME})"
|
||
return 0
|
||
fi
|
||
|
||
# Standard setup for other distributions
|
||
local SUITE
|
||
if [[ "$DISTRO_ID" == "debian" ]]; then
|
||
case "$DISTRO_CODENAME" in
|
||
bookworm | bullseye) SUITE="$DISTRO_CODENAME" ;;
|
||
*) SUITE="bookworm" ;;
|
||
esac
|
||
else
|
||
SUITE=$(get_fallback_suite "$DISTRO_ID" "$DISTRO_CODENAME" "https://repo.mysql.com/apt/${DISTRO_ID}")
|
||
fi
|
||
|
||
# Setup repository
|
||
manage_tool_repository "mysql" "$MYSQL_VERSION" "https://repo.mysql.com/apt/${DISTRO_ID}" \
|
||
"https://repo.mysql.com/RPM-GPG-KEY-mysql-2023" || {
|
||
msg_error "Failed to setup MySQL repository"
|
||
return 1
|
||
}
|
||
|
||
ensure_apt_working || return 1
|
||
|
||
# Try multiple package names with retry logic
|
||
export DEBIAN_FRONTEND=noninteractive
|
||
local mysql_install_success=false
|
||
|
||
if apt-cache search "^mysql-server$" 2>/dev/null | grep -q . &&
|
||
install_packages_with_retry "mysql-server" "mysql-client"; then
|
||
mysql_install_success=true
|
||
elif apt-cache search "^mysql-community-server$" 2>/dev/null | grep -q . &&
|
||
install_packages_with_retry "mysql-community-server" "mysql-community-client"; then
|
||
mysql_install_success=true
|
||
elif apt-cache search "^mysql$" 2>/dev/null | grep -q . &&
|
||
install_packages_with_retry "mysql"; then
|
||
mysql_install_success=true
|
||
fi
|
||
|
||
if [[ "$mysql_install_success" == false ]]; then
|
||
msg_error "MySQL ${MYSQL_VERSION} package not available for suite ${SUITE}"
|
||
return 1
|
||
fi
|
||
|
||
# Verify mysql command is accessible
|
||
if ! command -v mysql >/dev/null 2>&1; then
|
||
hash -r
|
||
if ! command -v mysql >/dev/null 2>&1; then
|
||
msg_error "MySQL installed but mysql command still not found"
|
||
return 1
|
||
fi
|
||
fi
|
||
|
||
cache_installed_version "mysql" "$MYSQL_VERSION"
|
||
msg_ok "Setup MySQL $MYSQL_VERSION"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs Node.js and optional global modules.
|
||
#
|
||
# Description:
|
||
# - Installs specified Node.js version using NodeSource APT repo
|
||
# - Optionally installs or updates global npm modules
|
||
#
|
||
# Variables:
|
||
# NODE_VERSION - Node.js version to install (default: 22)
|
||
# NODE_MODULE - Comma-separated list of global modules (e.g. "yarn,@vue/cli@5.0.0")
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function setup_nodejs() {
|
||
local NODE_VERSION="${NODE_VERSION:-22}"
|
||
local NODE_MODULE="${NODE_MODULE:-}"
|
||
|
||
# ALWAYS clean up legacy installations first (nvm, etc.) to prevent conflicts
|
||
cleanup_legacy_install "nodejs"
|
||
|
||
# Get currently installed version
|
||
local CURRENT_NODE_VERSION=""
|
||
CURRENT_NODE_VERSION=$(is_tool_installed "nodejs" 2>/dev/null) || true
|
||
|
||
# Ensure jq is available for JSON parsing
|
||
if ! command -v jq &>/dev/null; then
|
||
$STD apt update
|
||
$STD apt install -y jq || {
|
||
msg_error "Failed to install jq"
|
||
return 1
|
||
}
|
||
fi
|
||
|
||
# Scenario 1: Already installed at target version - just update packages/modules
|
||
if [[ -n "$CURRENT_NODE_VERSION" && "$CURRENT_NODE_VERSION" == "$NODE_VERSION" ]]; then
|
||
msg_info "Update Node.js $NODE_VERSION"
|
||
|
||
ensure_apt_working || return 1
|
||
|
||
# Just update npm to latest
|
||
$STD npm install -g npm@latest 2>/dev/null || true
|
||
|
||
cache_installed_version "nodejs" "$NODE_VERSION"
|
||
msg_ok "Update Node.js $NODE_VERSION"
|
||
else
|
||
# Scenario 2: Different version installed - clean upgrade
|
||
if [[ -n "$CURRENT_NODE_VERSION" && "$CURRENT_NODE_VERSION" != "$NODE_VERSION" ]]; then
|
||
msg_info "Upgrade Node.js from $CURRENT_NODE_VERSION to $NODE_VERSION"
|
||
remove_old_tool_version "nodejs"
|
||
else
|
||
msg_info "Setup Node.js $NODE_VERSION"
|
||
fi
|
||
|
||
# Remove ALL Debian nodejs packages BEFORE adding NodeSource repo
|
||
if dpkg -l 2>/dev/null | grep -qE "^ii.*(nodejs|libnode|node-cjs|node-acorn|node-balanced|node-brace|node-minimatch|node-undici|node-xtend|node-corepack)"; then
|
||
msg_info "Removing Debian-packaged Node.js and dependencies"
|
||
$STD apt purge -y nodejs nodejs-doc libnode* node-* 2>/dev/null || true
|
||
$STD apt autoremove -y 2>/dev/null || true
|
||
$STD apt clean 2>/dev/null || true
|
||
fi
|
||
|
||
# Remove any APT pinning (not needed)
|
||
rm -f /etc/apt/preferences.d/nodesource 2>/dev/null || true
|
||
|
||
# Prepare repository (cleanup + validation)
|
||
prepare_repository_setup "nodesource" || {
|
||
msg_error "Failed to prepare Node.js repository"
|
||
return 1
|
||
}
|
||
|
||
# Setup NodeSource repository
|
||
manage_tool_repository "nodejs" "$NODE_VERSION" "https://deb.nodesource.com/node_${NODE_VERSION}.x" "https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key" || {
|
||
msg_error "Failed to setup Node.js repository"
|
||
return 1
|
||
}
|
||
|
||
# CRITICAL: Force APT cache refresh AFTER repository setup
|
||
# This ensures NodeSource is the only nodejs source in APT cache
|
||
$STD apt update
|
||
|
||
# Install dependencies (NodeSource is now the only nodejs source)
|
||
ensure_dependencies curl ca-certificates gnupg
|
||
|
||
# Install Node.js from NodeSource
|
||
install_packages_with_retry "nodejs" || {
|
||
msg_error "Failed to install Node.js ${NODE_VERSION} from NodeSource"
|
||
return 1
|
||
}
|
||
|
||
# Verify Node.js was installed correctly
|
||
if ! command -v node >/dev/null 2>&1; then
|
||
msg_error "Node.js binary not found after installation"
|
||
return 1
|
||
fi
|
||
|
||
local INSTALLED_NODE_VERSION
|
||
INSTALLED_NODE_VERSION=$(node -v 2>/dev/null | grep -oP '^v\K[0-9]+' || echo "0")
|
||
verify_tool_version "Node.js" "$NODE_VERSION" "$INSTALLED_NODE_VERSION" || true
|
||
|
||
# Verify npm is available (should come with NodeSource nodejs)
|
||
if ! command -v npm >/dev/null 2>&1; then
|
||
msg_error "npm not found after Node.js installation - repository issue?"
|
||
return 1
|
||
fi
|
||
|
||
# Update to latest npm (with version check to avoid incompatibility)
|
||
local NPM_VERSION
|
||
NPM_VERSION=$(npm -v 2>/dev/null || echo "0")
|
||
if [[ "$NPM_VERSION" != "0" ]]; then
|
||
$STD npm install -g npm@latest 2>/dev/null || {
|
||
msg_warn "Failed to update npm to latest version (continuing with bundled npm $NPM_VERSION)"
|
||
}
|
||
fi
|
||
|
||
cache_installed_version "nodejs" "$NODE_VERSION"
|
||
msg_ok "Setup Node.js $NODE_VERSION"
|
||
fi
|
||
|
||
export NODE_OPTIONS="--max-old-space-size=4096"
|
||
|
||
# Ensure valid working directory for npm (avoids uv_cwd error)
|
||
if [[ ! -d /opt ]]; then
|
||
mkdir -p /opt
|
||
fi
|
||
cd /opt || {
|
||
msg_error "Failed to set safe working directory before npm install"
|
||
return 1
|
||
}
|
||
|
||
# Install global Node modules
|
||
if [[ -n "$NODE_MODULE" ]]; then
|
||
IFS=',' read -ra MODULES <<<"$NODE_MODULE"
|
||
local failed_modules=0
|
||
for mod in "${MODULES[@]}"; do
|
||
local MODULE_NAME MODULE_REQ_VERSION MODULE_INSTALLED_VERSION
|
||
if [[ "$mod" == @*/*@* ]]; then
|
||
# Scoped package with version, e.g. @vue/cli-service@latest
|
||
MODULE_NAME="${mod%@*}"
|
||
MODULE_REQ_VERSION="${mod##*@}"
|
||
elif [[ "$mod" == *"@"* ]]; then
|
||
# Unscoped package with version, e.g. yarn@latest
|
||
MODULE_NAME="${mod%@*}"
|
||
MODULE_REQ_VERSION="${mod##*@}"
|
||
else
|
||
# No version specified
|
||
MODULE_NAME="$mod"
|
||
MODULE_REQ_VERSION="latest"
|
||
fi
|
||
|
||
# Check if the module is already installed
|
||
if $STD npm list -g --depth=0 "$MODULE_NAME" 2>&1 | grep -q "$MODULE_NAME@"; then
|
||
MODULE_INSTALLED_VERSION="$($STD npm list -g --depth=0 "$MODULE_NAME" 2>&1 | grep "$MODULE_NAME@" | awk -F@ '{print $2}' | tr -d '[:space:]')"
|
||
if [[ "$MODULE_REQ_VERSION" != "latest" && "$MODULE_REQ_VERSION" != "$MODULE_INSTALLED_VERSION" ]]; then
|
||
msg_info "Updating $MODULE_NAME from v$MODULE_INSTALLED_VERSION to v$MODULE_REQ_VERSION"
|
||
if ! $STD npm install -g "${MODULE_NAME}@${MODULE_REQ_VERSION}" 2>/dev/null; then
|
||
msg_warn "Failed to update $MODULE_NAME to version $MODULE_REQ_VERSION"
|
||
((failed_modules++))
|
||
continue
|
||
fi
|
||
elif [[ "$MODULE_REQ_VERSION" == "latest" ]]; then
|
||
msg_info "Updating $MODULE_NAME to latest version"
|
||
if ! $STD npm install -g "${MODULE_NAME}@latest" 2>/dev/null; then
|
||
msg_warn "Failed to update $MODULE_NAME to latest version"
|
||
((failed_modules++))
|
||
continue
|
||
fi
|
||
fi
|
||
else
|
||
msg_info "Installing $MODULE_NAME@$MODULE_REQ_VERSION"
|
||
if ! $STD npm install -g "${MODULE_NAME}@${MODULE_REQ_VERSION}" 2>/dev/null; then
|
||
msg_warn "Failed to install $MODULE_NAME@$MODULE_REQ_VERSION"
|
||
((failed_modules++))
|
||
continue
|
||
fi
|
||
fi
|
||
done
|
||
if [[ $failed_modules -eq 0 ]]; then
|
||
msg_ok "Installed Node.js modules: $NODE_MODULE"
|
||
else
|
||
msg_warn "Installed Node.js modules with $failed_modules failure(s): $NODE_MODULE"
|
||
fi
|
||
fi
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs PHP with selected modules and configures Apache/FPM support.
|
||
#
|
||
# Description:
|
||
# - Adds Sury PHP repo if needed
|
||
# - Installs default and user-defined modules
|
||
# - Patches php.ini for CLI, Apache, and FPM as needed
|
||
#
|
||
# Variables:
|
||
# PHP_VERSION - PHP version to install (default: 8.4)
|
||
# PHP_MODULE - Additional comma-separated modules
|
||
# PHP_APACHE - Set YES to enable PHP with Apache
|
||
# PHP_FPM - Set YES to enable PHP-FPM
|
||
# PHP_MEMORY_LIMIT - (default: 512M)
|
||
# PHP_UPLOAD_MAX_FILESIZE - (default: 128M)
|
||
# PHP_POST_MAX_SIZE - (default: 128M)
|
||
# PHP_MAX_EXECUTION_TIME - (default: 300)
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function setup_php() {
|
||
local PHP_VERSION="${PHP_VERSION:-8.4}"
|
||
local PHP_MODULE="${PHP_MODULE:-}"
|
||
local PHP_APACHE="${PHP_APACHE:-NO}"
|
||
local PHP_FPM="${PHP_FPM:-NO}"
|
||
local DISTRO_ID DISTRO_CODENAME
|
||
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
||
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
||
|
||
local DEFAULT_MODULES="bcmath,cli,curl,gd,intl,mbstring,opcache,readline,xml,zip"
|
||
local COMBINED_MODULES
|
||
|
||
local PHP_MEMORY_LIMIT="${PHP_MEMORY_LIMIT:-512M}"
|
||
local PHP_UPLOAD_MAX_FILESIZE="${PHP_UPLOAD_MAX_FILESIZE:-128M}"
|
||
local PHP_POST_MAX_SIZE="${PHP_POST_MAX_SIZE:-128M}"
|
||
local PHP_MAX_EXECUTION_TIME="${PHP_MAX_EXECUTION_TIME:-300}"
|
||
|
||
# Merge default + user-defined modules
|
||
if [[ -n "$PHP_MODULE" ]]; then
|
||
COMBINED_MODULES="${DEFAULT_MODULES},${PHP_MODULE}"
|
||
else
|
||
COMBINED_MODULES="${DEFAULT_MODULES}"
|
||
fi
|
||
|
||
# Deduplicate
|
||
COMBINED_MODULES=$(echo "$COMBINED_MODULES" | tr ',' '\n' | awk '!seen[$0]++' | paste -sd, -)
|
||
|
||
# Get current PHP-CLI version
|
||
local CURRENT_PHP=""
|
||
CURRENT_PHP=$(is_tool_installed "php" 2>/dev/null) || true
|
||
|
||
# Scenario 1: Already at target version - just update packages
|
||
if [[ -n "$CURRENT_PHP" && "$CURRENT_PHP" == "$PHP_VERSION" ]]; then
|
||
msg_info "Update PHP $PHP_VERSION"
|
||
|
||
# Ensure Sury repo is available
|
||
if [[ ! -f /etc/apt/sources.list.d/php.sources ]]; then
|
||
manage_tool_repository "php" "$PHP_VERSION" "" "https://packages.sury.org/debsuryorg-archive-keyring.deb" || {
|
||
msg_error "Failed to setup PHP repository"
|
||
return 1
|
||
}
|
||
fi
|
||
|
||
ensure_apt_working || return 1
|
||
|
||
# Perform upgrade with retry logic (non-fatal if fails)
|
||
upgrade_packages_with_retry "php${PHP_VERSION}" || true
|
||
|
||
cache_installed_version "php" "$PHP_VERSION"
|
||
msg_ok "Update PHP $PHP_VERSION"
|
||
else
|
||
# Scenario 2: Different version installed - clean upgrade
|
||
if [[ -n "$CURRENT_PHP" && "$CURRENT_PHP" != "$PHP_VERSION" ]]; then
|
||
msg_info "Upgrade PHP from $CURRENT_PHP to $PHP_VERSION"
|
||
# Stop and disable ALL PHP-FPM versions
|
||
stop_all_services "php.*-fpm"
|
||
remove_old_tool_version "php"
|
||
else
|
||
msg_info "Setup PHP $PHP_VERSION"
|
||
fi
|
||
|
||
# Prepare repository (cleanup + validation)
|
||
prepare_repository_setup "php" "deb.sury.org-php" || {
|
||
msg_error "Failed to prepare PHP repository"
|
||
return 1
|
||
}
|
||
|
||
# Setup Sury repository
|
||
manage_tool_repository "php" "$PHP_VERSION" "" "https://packages.sury.org/debsuryorg-archive-keyring.deb" || {
|
||
msg_error "Failed to setup PHP repository"
|
||
return 1
|
||
}
|
||
|
||
ensure_apt_working || return 1
|
||
fi
|
||
|
||
# Build module list
|
||
local MODULE_LIST="php${PHP_VERSION}"
|
||
IFS=',' read -ra MODULES <<<"$COMBINED_MODULES"
|
||
for mod in "${MODULES[@]}"; do
|
||
if apt-cache show "php${PHP_VERSION}-${mod}" >/dev/null 2>&1; then
|
||
MODULE_LIST+=" php${PHP_VERSION}-${mod}"
|
||
fi
|
||
done
|
||
if [[ "$PHP_FPM" == "YES" ]]; then
|
||
MODULE_LIST+=" php${PHP_VERSION}-fpm"
|
||
fi
|
||
|
||
# install apache2 with PHP support if requested
|
||
if [[ "$PHP_APACHE" == "YES" ]]; then
|
||
if ! dpkg -l 2>/dev/null | grep -q "libapache2-mod-php${PHP_VERSION}"; then
|
||
install_packages_with_retry "apache2" "libapache2-mod-php${PHP_VERSION}" || {
|
||
msg_error "Failed to install Apache with PHP module"
|
||
return 1
|
||
}
|
||
fi
|
||
fi
|
||
|
||
# Install PHP packages with retry logic
|
||
install_packages_with_retry $MODULE_LIST || {
|
||
msg_error "Failed to install PHP packages"
|
||
return 1
|
||
}
|
||
cache_installed_version "php" "$PHP_VERSION"
|
||
|
||
# Patch all relevant php.ini files
|
||
local PHP_INI_PATHS=("/etc/php/${PHP_VERSION}/cli/php.ini")
|
||
[[ "$PHP_FPM" == "YES" ]] && PHP_INI_PATHS+=("/etc/php/${PHP_VERSION}/fpm/php.ini")
|
||
[[ "$PHP_APACHE" == "YES" ]] && PHP_INI_PATHS+=("/etc/php/${PHP_VERSION}/apache2/php.ini")
|
||
for ini in "${PHP_INI_PATHS[@]}"; do
|
||
if [[ -f "$ini" ]]; then
|
||
$STD sed -i "s|^memory_limit = .*|memory_limit = ${PHP_MEMORY_LIMIT}|" "$ini"
|
||
$STD sed -i "s|^upload_max_filesize = .*|upload_max_filesize = ${PHP_UPLOAD_MAX_FILESIZE}|" "$ini"
|
||
$STD sed -i "s|^post_max_size = .*|post_max_size = ${PHP_POST_MAX_SIZE}|" "$ini"
|
||
$STD sed -i "s|^max_execution_time = .*|max_execution_time = ${PHP_MAX_EXECUTION_TIME}|" "$ini"
|
||
fi
|
||
done
|
||
|
||
# Patch Apache configuration if needed
|
||
if [[ "$PHP_APACHE" == "YES" ]]; then
|
||
for mod in $(ls /etc/apache2/mods-enabled/ 2>/dev/null | grep -E '^php[0-9]\.[0-9]\.conf$' | sed 's/\.conf//'); do
|
||
if [[ "$mod" != "php${PHP_VERSION}" ]]; then
|
||
$STD a2dismod "$mod" || true
|
||
fi
|
||
done
|
||
$STD a2enmod mpm_prefork
|
||
$STD a2enmod "php${PHP_VERSION}"
|
||
safe_service_restart apache2 || true
|
||
fi
|
||
|
||
# Enable and restart PHP-FPM if requested
|
||
if [[ "$PHP_FPM" == "YES" ]]; then
|
||
if systemctl list-unit-files | grep -q "php${PHP_VERSION}-fpm.service"; then
|
||
$STD systemctl enable php${PHP_VERSION}-fpm
|
||
safe_service_restart php${PHP_VERSION}-fpm
|
||
fi
|
||
fi
|
||
|
||
msg_ok "Setup PHP $PHP_VERSION"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs or upgrades PostgreSQL and optional extensions/modules.
|
||
#
|
||
# Description:
|
||
# - Detects existing PostgreSQL version
|
||
# - Dumps all databases before upgrade
|
||
# - Adds PGDG repo and installs specified version
|
||
# - Installs optional PG_MODULES (e.g. postgis, contrib)
|
||
# - Restores dumped data post-upgrade
|
||
#
|
||
# Variables:
|
||
# PG_VERSION - Major PostgreSQL version (e.g. 15, 16) (default: 16)
|
||
function setup_postgresql() {
|
||
local PG_VERSION="${PG_VERSION:-16}"
|
||
local PG_MODULES="${PG_MODULES:-}"
|
||
local DISTRO_ID DISTRO_CODENAME
|
||
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
||
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
||
|
||
# Get currently installed version
|
||
local CURRENT_PG_VERSION=""
|
||
if command -v psql >/dev/null; then
|
||
CURRENT_PG_VERSION="$(psql -V 2>/dev/null | awk '{print $3}' | cut -d. -f1)"
|
||
fi
|
||
|
||
# Scenario 1: Already at correct version
|
||
if [[ "$CURRENT_PG_VERSION" == "$PG_VERSION" ]]; then
|
||
msg_info "Update PostgreSQL $PG_VERSION"
|
||
ensure_apt_working || return 1
|
||
|
||
# Perform upgrade with retry logic (non-fatal if fails)
|
||
upgrade_packages_with_retry "postgresql-${PG_VERSION}" "postgresql-client-${PG_VERSION}" 2>/dev/null || true
|
||
cache_installed_version "postgresql" "$PG_VERSION"
|
||
msg_ok "Update PostgreSQL $PG_VERSION"
|
||
|
||
# Still install modules if specified
|
||
if [[ -n "$PG_MODULES" ]]; then
|
||
IFS=',' read -ra MODULES <<<"$PG_MODULES"
|
||
for module in "${MODULES[@]}"; do
|
||
$STD apt install -y "postgresql-${PG_VERSION}-${module}" 2>/dev/null || true
|
||
done
|
||
fi
|
||
return 0
|
||
fi
|
||
|
||
# Scenario 2: Different version - backup, remove old, install new
|
||
if [[ -n "$CURRENT_PG_VERSION" ]]; then
|
||
msg_info "Upgrade PostgreSQL from $CURRENT_PG_VERSION to $PG_VERSION"
|
||
msg_info "Creating backup of PostgreSQL $CURRENT_PG_VERSION databases..."
|
||
$STD runuser -u postgres -- pg_dumpall >/var/lib/postgresql/backup_$(date +%F)_v${CURRENT_PG_VERSION}.sql || {
|
||
msg_error "Failed to backup PostgreSQL databases"
|
||
return 1
|
||
}
|
||
$STD systemctl stop postgresql || true
|
||
$STD apt purge -y "postgresql-${CURRENT_PG_VERSION}" "postgresql-client-${CURRENT_PG_VERSION}" 2>/dev/null || true
|
||
else
|
||
msg_info "Setup PostgreSQL $PG_VERSION"
|
||
fi
|
||
|
||
# Scenario 3: Fresh install or after removal - setup repo and install
|
||
prepare_repository_setup "pgdg" "postgresql" || {
|
||
msg_error "Failed to prepare PostgreSQL repository"
|
||
return 1
|
||
}
|
||
|
||
local SUITE
|
||
case "$DISTRO_CODENAME" in
|
||
trixie | forky | sid)
|
||
if verify_repo_available "https://apt.postgresql.org/pub/repos/apt" "trixie-pgdg"; then
|
||
SUITE="trixie-pgdg"
|
||
else
|
||
SUITE="bookworm-pgdg"
|
||
fi
|
||
;;
|
||
*)
|
||
SUITE=$(get_fallback_suite "$DISTRO_ID" "$DISTRO_CODENAME" "https://apt.postgresql.org/pub/repos/apt")
|
||
SUITE="${SUITE}-pgdg"
|
||
;;
|
||
esac
|
||
|
||
setup_deb822_repo \
|
||
"pgdg" \
|
||
"https://www.postgresql.org/media/keys/ACCC4CF8.asc" \
|
||
"https://apt.postgresql.org/pub/repos/apt" \
|
||
"$SUITE" \
|
||
"main" \
|
||
"amd64 arm64"
|
||
|
||
if ! $STD apt update; then
|
||
msg_error "APT update failed for PostgreSQL repository"
|
||
return 1
|
||
fi
|
||
|
||
# Install ssl-cert dependency if available
|
||
if apt-cache search "^ssl-cert$" 2>/dev/null | grep -q .; then
|
||
$STD apt install -y ssl-cert 2>/dev/null || true
|
||
fi
|
||
|
||
# Try multiple PostgreSQL package patterns with retry logic
|
||
local pg_install_success=false
|
||
|
||
if apt-cache search "^postgresql-${PG_VERSION}$" 2>/dev/null | grep -q . &&
|
||
install_packages_with_retry "postgresql-${PG_VERSION}" "postgresql-client-${PG_VERSION}"; then
|
||
pg_install_success=true
|
||
fi
|
||
|
||
if [[ "$pg_install_success" == false ]] &&
|
||
apt-cache search "^postgresql-server-${PG_VERSION}$" 2>/dev/null | grep -q . &&
|
||
$STD apt install -y "postgresql-server-${PG_VERSION}" "postgresql-client-${PG_VERSION}" 2>/dev/null; then
|
||
pg_install_success=true
|
||
fi
|
||
|
||
if [[ "$pg_install_success" == false ]] &&
|
||
apt-cache search "^postgresql$" 2>/dev/null | grep -q . &&
|
||
$STD apt install -y postgresql postgresql-client 2>/dev/null; then
|
||
pg_install_success=true
|
||
fi
|
||
|
||
if [[ "$pg_install_success" == false ]]; then
|
||
msg_error "PostgreSQL package not available for suite ${SUITE}"
|
||
return 1
|
||
fi
|
||
|
||
if ! command -v psql >/dev/null 2>&1; then
|
||
msg_error "PostgreSQL installed but psql command not found"
|
||
return 1
|
||
fi
|
||
|
||
# Restore database backup if we upgraded from previous version
|
||
if [[ -n "$CURRENT_PG_VERSION" ]]; then
|
||
msg_info "Restoring PostgreSQL databases from backup..."
|
||
$STD runuser -u postgres -- psql </var/lib/postgresql/backup_$(date +%F)_v${CURRENT_PG_VERSION}.sql 2>/dev/null || {
|
||
msg_warn "Failed to restore database backup - this may be expected for major version upgrades"
|
||
}
|
||
fi
|
||
|
||
$STD systemctl enable --now postgresql 2>/dev/null || true
|
||
|
||
# Add PostgreSQL binaries to PATH
|
||
if ! grep -q '/usr/lib/postgresql' /etc/environment 2>/dev/null; then
|
||
echo 'PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/lib/postgresql/'"${PG_VERSION}"'/bin"' >/etc/environment
|
||
fi
|
||
|
||
cache_installed_version "postgresql" "$PG_VERSION"
|
||
msg_ok "Setup PostgreSQL $PG_VERSION"
|
||
|
||
# Install optional modules
|
||
if [[ -n "$PG_MODULES" ]]; then
|
||
IFS=',' read -ra MODULES <<<"$PG_MODULES"
|
||
for module in "${MODULES[@]}"; do
|
||
$STD apt install -y "postgresql-${PG_VERSION}-${module}" 2>/dev/null || true
|
||
done
|
||
fi
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs rbenv and ruby-build, installs Ruby and optionally Rails.
|
||
#
|
||
# Description:
|
||
# - Downloads rbenv and ruby-build from GitHub
|
||
# - Compiles and installs target Ruby version
|
||
# - Optionally installs Rails via gem
|
||
#
|
||
# Variables:
|
||
# RUBY_VERSION - Ruby version to install (default: 3.4.4)
|
||
# RUBY_INSTALL_RAILS - true/false to install Rails (default: true)
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function setup_ruby() {
|
||
local RUBY_VERSION="${RUBY_VERSION:-3.4.4}"
|
||
local RUBY_INSTALL_RAILS="${RUBY_INSTALL_RAILS:-true}"
|
||
local RBENV_DIR="$HOME/.rbenv"
|
||
local RBENV_BIN="$RBENV_DIR/bin/rbenv"
|
||
local PROFILE_FILE="$HOME/.profile"
|
||
local TMP_DIR=$(mktemp -d)
|
||
|
||
# Get currently installed Ruby version
|
||
local CURRENT_RUBY_VERSION=""
|
||
if [[ -x "$RBENV_BIN" ]]; then
|
||
CURRENT_RUBY_VERSION=$("$RBENV_BIN" global 2>/dev/null || echo "")
|
||
fi
|
||
|
||
# Scenario 1: Already at correct Ruby version
|
||
if [[ "$CURRENT_RUBY_VERSION" == "$RUBY_VERSION" ]]; then
|
||
msg_info "Update Ruby $RUBY_VERSION"
|
||
cache_installed_version "ruby" "$RUBY_VERSION"
|
||
msg_ok "Update Ruby $RUBY_VERSION"
|
||
return 0
|
||
fi
|
||
|
||
# Scenario 2: Different version - reinstall
|
||
if [[ -n "$CURRENT_RUBY_VERSION" ]]; then
|
||
msg_info "Upgrade Ruby from $CURRENT_RUBY_VERSION to $RUBY_VERSION"
|
||
else
|
||
msg_info "Setup Ruby $RUBY_VERSION"
|
||
fi
|
||
|
||
ensure_apt_working || return 1
|
||
|
||
# Install build dependencies with fallbacks
|
||
local ruby_deps=()
|
||
local dep_variations=(
|
||
"jq"
|
||
"autoconf"
|
||
"patch"
|
||
"build-essential"
|
||
"libssl-dev"
|
||
"libyaml-dev"
|
||
"libreadline-dev|libreadline6-dev"
|
||
"zlib1g-dev"
|
||
"libgmp-dev"
|
||
"libncurses-dev|libncurses5-dev"
|
||
"libffi-dev"
|
||
"libgdbm-dev"
|
||
"libdb-dev"
|
||
"uuid-dev"
|
||
)
|
||
|
||
for dep_pattern in "${dep_variations[@]}"; do
|
||
if [[ "$dep_pattern" == *"|"* ]]; then
|
||
IFS='|' read -ra variations <<<"$dep_pattern"
|
||
for var in "${variations[@]}"; do
|
||
if apt-cache search "^${var}$" 2>/dev/null | grep -q .; then
|
||
ruby_deps+=("$var")
|
||
break
|
||
fi
|
||
done
|
||
else
|
||
if apt-cache search "^${dep_pattern}$" 2>/dev/null | grep -q .; then
|
||
ruby_deps+=("$dep_pattern")
|
||
fi
|
||
fi
|
||
done
|
||
|
||
if [[ ${#ruby_deps[@]} -gt 0 ]]; then
|
||
$STD apt install -y "${ruby_deps[@]}" 2>/dev/null || true
|
||
else
|
||
msg_error "No Ruby build dependencies available"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
fi
|
||
|
||
# Download and build rbenv if needed
|
||
if [[ ! -x "$RBENV_BIN" ]]; then
|
||
local RBENV_RELEASE
|
||
local rbenv_json
|
||
rbenv_json=$(curl -fsSL --max-time 15 https://api.github.com/repos/rbenv/rbenv/releases/latest 2>/dev/null || echo "")
|
||
|
||
if [[ -z "$rbenv_json" ]]; then
|
||
msg_error "Failed to fetch latest rbenv version from GitHub"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
fi
|
||
|
||
RBENV_RELEASE=$(echo "$rbenv_json" | jq -r '.tag_name' 2>/dev/null | sed 's/^v//' || echo "")
|
||
|
||
if [[ -z "$RBENV_RELEASE" ]]; then
|
||
msg_error "Could not parse rbenv version from GitHub response"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
fi
|
||
|
||
curl -fsSL "https://github.com/rbenv/rbenv/archive/refs/tags/v${RBENV_RELEASE}.tar.gz" -o "$TMP_DIR/rbenv.tar.gz" || {
|
||
msg_error "Failed to download rbenv"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
tar -xzf "$TMP_DIR/rbenv.tar.gz" -C "$TMP_DIR" || {
|
||
msg_error "Failed to extract rbenv"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
mkdir -p "$RBENV_DIR"
|
||
cp -r "$TMP_DIR/rbenv-${RBENV_RELEASE}/." "$RBENV_DIR/"
|
||
(cd "$RBENV_DIR" && src/configure && $STD make -C src) || {
|
||
msg_error "Failed to build rbenv"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
# Setup profile
|
||
if ! grep -q 'rbenv init' "$PROFILE_FILE" 2>/dev/null; then
|
||
echo 'export PATH="$HOME/.rbenv/bin:$PATH"' >>"$PROFILE_FILE"
|
||
echo 'eval "$(rbenv init -)"' >>"$PROFILE_FILE"
|
||
fi
|
||
fi
|
||
|
||
# Install ruby-build plugin
|
||
if [[ ! -d "$RBENV_DIR/plugins/ruby-build" ]]; then
|
||
local RUBY_BUILD_RELEASE
|
||
local ruby_build_json
|
||
ruby_build_json=$(curl -fsSL --max-time 15 https://api.github.com/repos/rbenv/ruby-build/releases/latest 2>/dev/null || echo "")
|
||
|
||
if [[ -z "$ruby_build_json" ]]; then
|
||
msg_error "Failed to fetch latest ruby-build version from GitHub"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
fi
|
||
|
||
RUBY_BUILD_RELEASE=$(echo "$ruby_build_json" | jq -r '.tag_name' 2>/dev/null | sed 's/^v//' || echo "")
|
||
|
||
if [[ -z "$RUBY_BUILD_RELEASE" ]]; then
|
||
msg_error "Could not parse ruby-build version from GitHub response"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
fi
|
||
|
||
curl -fsSL "https://github.com/rbenv/ruby-build/archive/refs/tags/v${RUBY_BUILD_RELEASE}.tar.gz" -o "$TMP_DIR/ruby-build.tar.gz" || {
|
||
msg_error "Failed to download ruby-build"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
tar -xzf "$TMP_DIR/ruby-build.tar.gz" -C "$TMP_DIR" || {
|
||
msg_error "Failed to extract ruby-build"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
mkdir -p "$RBENV_DIR/plugins/ruby-build"
|
||
cp -r "$TMP_DIR/ruby-build-${RUBY_BUILD_RELEASE}/." "$RBENV_DIR/plugins/ruby-build/"
|
||
fi
|
||
|
||
# Setup PATH and install Ruby version
|
||
export PATH="$RBENV_DIR/bin:$PATH"
|
||
eval "$("$RBENV_BIN" init - bash)" 2>/dev/null || true
|
||
|
||
if ! "$RBENV_BIN" versions --bare 2>/dev/null | grep -qx "$RUBY_VERSION"; then
|
||
$STD "$RBENV_BIN" install "$RUBY_VERSION" || {
|
||
msg_error "Failed to install Ruby $RUBY_VERSION"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
fi
|
||
|
||
"$RBENV_BIN" global "$RUBY_VERSION" || {
|
||
msg_error "Failed to set Ruby $RUBY_VERSION as global version"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
hash -r
|
||
|
||
# Install Rails if requested
|
||
if [[ "$RUBY_INSTALL_RAILS" == "true" ]]; then
|
||
$STD gem install rails || {
|
||
msg_warn "Failed to install Rails - Ruby installation successful"
|
||
}
|
||
fi
|
||
|
||
rm -rf "$TMP_DIR"
|
||
cache_installed_version "ruby" "$RUBY_VERSION"
|
||
msg_ok "Setup Ruby $RUBY_VERSION"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs or upgrades ClickHouse database server.
|
||
#
|
||
# Description:
|
||
# - Adds ClickHouse official repository
|
||
# - Installs specified version
|
||
# - Configures systemd service
|
||
# - Supports Debian/Ubuntu with fallback mechanism
|
||
#
|
||
# Variables:
|
||
# CLICKHOUSE_VERSION - ClickHouse version to install (default: latest)
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function setup_clickhouse() {
|
||
local CLICKHOUSE_VERSION="${CLICKHOUSE_VERSION:-latest}"
|
||
local DISTRO_ID DISTRO_CODENAME
|
||
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
||
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
||
|
||
# Resolve "latest" version
|
||
if [[ "$CLICKHOUSE_VERSION" == "latest" ]]; then
|
||
CLICKHOUSE_VERSION=$(curl -fsSL --max-time 15 https://packages.clickhouse.com/tgz/stable/ 2>/dev/null |
|
||
grep -oP 'clickhouse-common-static-\K[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+' |
|
||
sort -V | tail -n1 || echo "")
|
||
|
||
# Fallback to GitHub API if package server failed
|
||
if [[ -z "$CLICKHOUSE_VERSION" ]]; then
|
||
CLICKHOUSE_VERSION=$(curl -fsSL --max-time 15 https://api.github.com/repos/ClickHouse/ClickHouse/releases/latest 2>/dev/null |
|
||
grep -oP '"tag_name":\s*"v\K[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+' | head -n1 || echo "")
|
||
fi
|
||
|
||
[[ -z "$CLICKHOUSE_VERSION" ]] && {
|
||
msg_error "Could not determine latest ClickHouse version from any source"
|
||
return 1
|
||
}
|
||
fi
|
||
|
||
# Get currently installed version
|
||
local CURRENT_VERSION=""
|
||
if command -v clickhouse-server >/dev/null 2>&1; then
|
||
CURRENT_VERSION=$(clickhouse-server --version 2>/dev/null | grep -oP 'version \K[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+' | head -n1)
|
||
fi
|
||
|
||
# Scenario 1: Already at target version - just update packages
|
||
if [[ -n "$CURRENT_VERSION" && "$CURRENT_VERSION" == "$CLICKHOUSE_VERSION" ]]; then
|
||
msg_info "Update ClickHouse $CLICKHOUSE_VERSION"
|
||
ensure_apt_working || return 1
|
||
|
||
# Perform upgrade with retry logic (non-fatal if fails)
|
||
upgrade_packages_with_retry "clickhouse-server" "clickhouse-client" || true
|
||
cache_installed_version "clickhouse" "$CLICKHOUSE_VERSION"
|
||
msg_ok "Update ClickHouse $CLICKHOUSE_VERSION"
|
||
return 0
|
||
fi
|
||
|
||
# Scenario 2: Different version - clean upgrade
|
||
if [[ -n "$CURRENT_VERSION" && "$CURRENT_VERSION" != "$CLICKHOUSE_VERSION" ]]; then
|
||
msg_info "Upgrade ClickHouse from $CURRENT_VERSION to $CLICKHOUSE_VERSION"
|
||
stop_all_services "clickhouse-server"
|
||
remove_old_tool_version "clickhouse"
|
||
else
|
||
msg_info "Setup ClickHouse $CLICKHOUSE_VERSION"
|
||
fi
|
||
|
||
ensure_dependencies apt-transport-https ca-certificates dirmngr gnupg
|
||
|
||
# Prepare repository (cleanup + validation)
|
||
prepare_repository_setup "clickhouse" || {
|
||
msg_error "Failed to prepare ClickHouse repository"
|
||
return 1
|
||
}
|
||
|
||
# Setup repository (ClickHouse uses 'stable' suite)
|
||
setup_deb822_repo \
|
||
"clickhouse" \
|
||
"https://packages.clickhouse.com/rpm/lts/repodata/repomd.xml.key" \
|
||
"https://packages.clickhouse.com/deb" \
|
||
"stable" \
|
||
"main" \
|
||
"amd64 arm64"
|
||
|
||
# Install packages with retry logic
|
||
export DEBIAN_FRONTEND=noninteractive
|
||
$STD apt update || {
|
||
msg_error "APT update failed for ClickHouse repository"
|
||
return 1
|
||
}
|
||
|
||
install_packages_with_retry "clickhouse-server" "clickhouse-client" || {
|
||
msg_error "Failed to install ClickHouse packages"
|
||
return 1
|
||
}
|
||
|
||
# Verify installation
|
||
if ! command -v clickhouse-server >/dev/null 2>&1; then
|
||
msg_error "ClickHouse installation completed but clickhouse-server command not found"
|
||
return 1
|
||
fi
|
||
|
||
# Setup data directory
|
||
mkdir -p /var/lib/clickhouse
|
||
if id clickhouse >/dev/null 2>&1; then
|
||
chown -R clickhouse:clickhouse /var/lib/clickhouse
|
||
fi
|
||
|
||
# Enable and start service
|
||
$STD systemctl enable clickhouse-server || {
|
||
msg_warn "Failed to enable clickhouse-server service"
|
||
}
|
||
safe_service_restart clickhouse-server || true
|
||
|
||
cache_installed_version "clickhouse" "$CLICKHOUSE_VERSION"
|
||
msg_ok "Setup ClickHouse $CLICKHOUSE_VERSION"
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs Rust toolchain and optional global crates via cargo.
|
||
#
|
||
# Description:
|
||
# - Installs rustup (if missing)
|
||
# - Installs or updates desired Rust toolchain (stable, nightly, or versioned)
|
||
# - Installs or updates specified global crates using `cargo install`
|
||
#
|
||
# Notes:
|
||
# - Skips crate install if exact version is already present
|
||
# - Updates crate if newer version or different version is requested
|
||
#
|
||
# Variables:
|
||
# RUST_TOOLCHAIN - Rust toolchain to install (default: stable)
|
||
# RUST_CRATES - Comma-separated list of crates (e.g. "cargo-edit,wasm-pack@0.12.1")
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function setup_rust() {
|
||
local RUST_TOOLCHAIN="${RUST_TOOLCHAIN:-stable}"
|
||
local RUST_CRATES="${RUST_CRATES:-}"
|
||
local CARGO_BIN="${HOME}/.cargo/bin"
|
||
|
||
# Get currently installed version
|
||
local CURRENT_VERSION=""
|
||
if command -v rustc &>/dev/null; then
|
||
CURRENT_VERSION=$(rustc --version 2>/dev/null | awk '{print $2}')
|
||
fi
|
||
|
||
# Scenario 1: Rustup not installed - fresh install
|
||
if ! command -v rustup &>/dev/null; then
|
||
msg_info "Setup Rust ($RUST_TOOLCHAIN)"
|
||
curl -fsSL https://sh.rustup.rs | $STD sh -s -- -y --default-toolchain "$RUST_TOOLCHAIN" || {
|
||
msg_error "Failed to install Rust"
|
||
return 1
|
||
}
|
||
export PATH="$CARGO_BIN:$PATH"
|
||
echo 'export PATH="$HOME/.cargo/bin:$PATH"' >>"$HOME/.profile"
|
||
local RUST_VERSION=$(rustc --version 2>/dev/null | awk '{print $2}')
|
||
cache_installed_version "rust" "$RUST_VERSION"
|
||
msg_ok "Setup Rust $RUST_VERSION"
|
||
else
|
||
# Scenario 2: Rustup already installed - update/maintain
|
||
msg_info "Update Rust ($RUST_TOOLCHAIN)"
|
||
$STD rustup install "$RUST_TOOLCHAIN" || {
|
||
msg_error "Failed to install Rust toolchain $RUST_TOOLCHAIN"
|
||
return 1
|
||
}
|
||
$STD rustup default "$RUST_TOOLCHAIN" || {
|
||
msg_error "Failed to set default Rust toolchain"
|
||
return 1
|
||
}
|
||
$STD rustup update "$RUST_TOOLCHAIN" || true
|
||
local RUST_VERSION=$(rustc --version 2>/dev/null | awk '{print $2}')
|
||
cache_installed_version "rust" "$RUST_VERSION"
|
||
msg_ok "Update Rust $RUST_VERSION"
|
||
fi
|
||
|
||
# Install global crates
|
||
if [[ -n "$RUST_CRATES" ]]; then
|
||
IFS=',' read -ra CRATES <<<"$RUST_CRATES"
|
||
for crate in "${CRATES[@]}"; do
|
||
local NAME VER INSTALLED_VER
|
||
if [[ "$crate" == *"@"* ]]; then
|
||
NAME="${crate%@*}"
|
||
VER="${crate##*@}"
|
||
else
|
||
NAME="$crate"
|
||
VER=""
|
||
fi
|
||
|
||
INSTALLED_VER=$(cargo install --list 2>/dev/null | awk "/^$NAME v[0-9]/ {print \$2}" | tr -d 'v')
|
||
|
||
if [[ -n "$INSTALLED_VER" ]]; then
|
||
if [[ -n "$VER" && "$VER" != "$INSTALLED_VER" ]]; then
|
||
$STD cargo install "$NAME" --version "$VER" --force
|
||
elif [[ -z "$VER" ]]; then
|
||
$STD cargo install "$NAME" --force
|
||
fi
|
||
else
|
||
$STD cargo install "$NAME" ${VER:+--version "$VER"}
|
||
fi
|
||
done
|
||
fi
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs or upgrades uv (Python package manager) from GitHub releases.
|
||
# - Downloads platform-specific tarball (no install.sh!)
|
||
# - Extracts uv binary
|
||
# - Places it in /usr/local/bin
|
||
# - Optionally installs a specific Python version via uv
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function setup_uv() {
|
||
local UV_BIN="/usr/local/bin/uv"
|
||
local UVX_BIN="/usr/local/bin/uvx"
|
||
local TMP_DIR=$(mktemp -d)
|
||
local CACHED_VERSION
|
||
|
||
# trap for TMP Cleanup
|
||
trap "rm -rf '$TMP_DIR'" EXIT
|
||
|
||
CACHED_VERSION=$(get_cached_version "uv")
|
||
|
||
# Architecture Detection
|
||
local ARCH=$(uname -m)
|
||
local OS_TYPE=""
|
||
local UV_TAR=""
|
||
|
||
if grep -qi "alpine" /etc/os-release; then
|
||
OS_TYPE="musl"
|
||
else
|
||
OS_TYPE="gnu"
|
||
fi
|
||
|
||
case "$ARCH" in
|
||
x86_64)
|
||
UV_TAR="uv-x86_64-unknown-linux-${OS_TYPE}.tar.gz"
|
||
;;
|
||
aarch64)
|
||
UV_TAR="uv-aarch64-unknown-linux-${OS_TYPE}.tar.gz"
|
||
;;
|
||
i686)
|
||
UV_TAR="uv-i686-unknown-linux-${OS_TYPE}.tar.gz"
|
||
;;
|
||
*)
|
||
msg_error "Unsupported architecture: $ARCH (supported: x86_64, aarch64, i686)"
|
||
return 1
|
||
;;
|
||
esac
|
||
|
||
ensure_dependencies jq
|
||
|
||
# Fetch latest version
|
||
local releases_json
|
||
releases_json=$(curl -fsSL --max-time 15 \
|
||
"https://api.github.com/repos/astral-sh/uv/releases/latest" 2>/dev/null || echo "")
|
||
|
||
if [[ -z "$releases_json" ]]; then
|
||
msg_error "Could not fetch latest uv version from GitHub API"
|
||
return 1
|
||
fi
|
||
|
||
local LATEST_VERSION
|
||
LATEST_VERSION=$(echo "$releases_json" | jq -r '.tag_name' 2>/dev/null | sed 's/^v//')
|
||
|
||
if [[ -z "$LATEST_VERSION" ]]; then
|
||
msg_error "Could not parse uv version from GitHub API response"
|
||
return 1
|
||
fi
|
||
|
||
# Get currently installed version
|
||
local INSTALLED_VERSION=""
|
||
if [[ -x "$UV_BIN" ]]; then
|
||
INSTALLED_VERSION=$("$UV_BIN" --version 2>/dev/null | awk '{print $2}')
|
||
fi
|
||
|
||
# Scenario 1: Already at latest version
|
||
if [[ -n "$INSTALLED_VERSION" && "$INSTALLED_VERSION" == "$LATEST_VERSION" ]]; then
|
||
cache_installed_version "uv" "$LATEST_VERSION"
|
||
|
||
# Check if uvx is needed and missing
|
||
if [[ "${USE_UVX:-NO}" == "YES" ]] && [[ ! -x "$UVX_BIN" ]]; then
|
||
msg_info "Installing uvx wrapper"
|
||
_install_uvx_wrapper || return 1
|
||
msg_ok "uvx wrapper installed"
|
||
fi
|
||
|
||
return 0
|
||
fi
|
||
|
||
# Scenario 2: New install or upgrade
|
||
if [[ -n "$INSTALLED_VERSION" && "$INSTALLED_VERSION" != "$LATEST_VERSION" ]]; then
|
||
msg_info "Upgrade uv from $INSTALLED_VERSION to $LATEST_VERSION"
|
||
else
|
||
msg_info "Setup uv $LATEST_VERSION"
|
||
fi
|
||
|
||
local UV_URL="https://github.com/astral-sh/uv/releases/download/${LATEST_VERSION}/${UV_TAR}"
|
||
|
||
$STD curl -fsSL "$UV_URL" -o "$TMP_DIR/uv.tar.gz" || {
|
||
msg_error "Failed to download uv from $UV_URL"
|
||
return 1
|
||
}
|
||
|
||
# Extract
|
||
$STD tar -xzf "$TMP_DIR/uv.tar.gz" -C "$TMP_DIR" || {
|
||
msg_error "Failed to extract uv"
|
||
return 1
|
||
}
|
||
|
||
# Find and install uv binary (tarball extracts to uv-VERSION-ARCH/ directory)
|
||
local UV_BINARY=$(find "$TMP_DIR" -name "uv" -type f -executable | head -n1)
|
||
if [[ ! -f "$UV_BINARY" ]]; then
|
||
msg_error "Could not find uv binary in extracted tarball"
|
||
return 1
|
||
fi
|
||
|
||
$STD install -m 755 "$UV_BINARY" "$UV_BIN" || {
|
||
msg_error "Failed to install uv binary"
|
||
return 1
|
||
}
|
||
|
||
ensure_usr_local_bin_persist
|
||
export PATH="/usr/local/bin:$PATH"
|
||
|
||
# Optional: Install uvx wrapper
|
||
if [[ "${USE_UVX:-NO}" == "YES" ]]; then
|
||
msg_info "Installing uvx wrapper"
|
||
_install_uvx_wrapper || {
|
||
msg_error "Failed to install uvx wrapper"
|
||
return 1
|
||
}
|
||
msg_ok "uvx wrapper installed"
|
||
fi
|
||
|
||
# Optional: Generate shell completions
|
||
$STD uv generate-shell-completion bash >/etc/bash_completion.d/uv 2>/dev/null || true
|
||
$STD uv generate-shell-completion zsh >/usr/share/zsh/site-functions/_uv 2>/dev/null || true
|
||
|
||
# Optional: Install specific Python version if requested
|
||
if [[ -n "${PYTHON_VERSION:-}" ]]; then
|
||
msg_info "Installing Python $PYTHON_VERSION via uv"
|
||
$STD uv python install "$PYTHON_VERSION" || {
|
||
msg_error "Failed to install Python $PYTHON_VERSION"
|
||
return 1
|
||
}
|
||
msg_ok "Python $PYTHON_VERSION installed"
|
||
fi
|
||
|
||
cache_installed_version "uv" "$LATEST_VERSION"
|
||
msg_ok "Setup uv $LATEST_VERSION"
|
||
}
|
||
|
||
# Helper function to install uvx wrapper
|
||
_install_uvx_wrapper() {
|
||
local UVX_BIN="/usr/local/bin/uvx"
|
||
|
||
cat >"$UVX_BIN" <<'EOF'
|
||
#!/bin/bash
|
||
# uvx - Run Python applications from PyPI as command-line tools
|
||
# Wrapper for: uv tool run
|
||
exec /usr/local/bin/uv tool run "$@"
|
||
EOF
|
||
|
||
chmod +x "$UVX_BIN"
|
||
return 0
|
||
}
|
||
|
||
# ------------------------------------------------------------------------------
|
||
# Installs or updates yq (mikefarah/yq - Go version).
|
||
#
|
||
# Description:
|
||
# - Checks if yq is installed and from correct source
|
||
# - Compares with latest release on GitHub
|
||
# - Updates if outdated or wrong implementation
|
||
# ------------------------------------------------------------------------------
|
||
|
||
function setup_yq() {
|
||
local TMP_DIR=$(mktemp -d)
|
||
local BINARY_PATH="/usr/local/bin/yq"
|
||
local GITHUB_REPO="mikefarah/yq"
|
||
|
||
ensure_dependencies jq
|
||
ensure_usr_local_bin_persist
|
||
|
||
# Remove non-mikefarah implementations
|
||
if command -v yq &>/dev/null; then
|
||
if ! yq --version 2>&1 | grep -q 'mikefarah'; then
|
||
rm -f "$(command -v yq)"
|
||
fi
|
||
fi
|
||
|
||
local LATEST_VERSION
|
||
local releases_json
|
||
releases_json=$(curl -fsSL --max-time 15 "https://api.github.com/repos/${GITHUB_REPO}/releases/latest" 2>/dev/null || echo "")
|
||
|
||
if [[ -z "$releases_json" ]]; then
|
||
msg_error "Could not fetch latest yq version from GitHub API"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
fi
|
||
|
||
LATEST_VERSION=$(echo "$releases_json" | jq -r '.tag_name' 2>/dev/null | sed 's/^v//' || echo "")
|
||
|
||
if [[ -z "$LATEST_VERSION" ]]; then
|
||
msg_error "Could not parse yq version from GitHub API response"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
fi
|
||
|
||
# Get currently installed version
|
||
local INSTALLED_VERSION=""
|
||
if command -v yq &>/dev/null && yq --version 2>&1 | grep -q 'mikefarah'; then
|
||
INSTALLED_VERSION=$(yq --version 2>/dev/null | awk '{print $NF}' | sed 's/^v//')
|
||
fi
|
||
|
||
# Scenario 1: Already at latest version
|
||
if [[ -n "$INSTALLED_VERSION" && "$INSTALLED_VERSION" == "$LATEST_VERSION" ]]; then
|
||
cache_installed_version "yq" "$LATEST_VERSION"
|
||
rm -rf "$TMP_DIR"
|
||
return 0
|
||
fi
|
||
|
||
# Scenario 2: New install or upgrade
|
||
if [[ -n "$INSTALLED_VERSION" && "$INSTALLED_VERSION" != "$LATEST_VERSION" ]]; then
|
||
msg_info "Upgrade yq from $INSTALLED_VERSION to $LATEST_VERSION"
|
||
else
|
||
msg_info "Setup yq $LATEST_VERSION"
|
||
fi
|
||
|
||
curl -fsSL "https://github.com/${GITHUB_REPO}/releases/download/v${LATEST_VERSION}/yq_linux_amd64" -o "$TMP_DIR/yq" || {
|
||
msg_error "Failed to download yq"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
chmod +x "$TMP_DIR/yq"
|
||
mv "$TMP_DIR/yq" "$BINARY_PATH" || {
|
||
msg_error "Failed to install yq"
|
||
rm -rf "$TMP_DIR"
|
||
return 1
|
||
}
|
||
|
||
rm -rf "$TMP_DIR"
|
||
hash -r
|
||
|
||
local FINAL_VERSION
|
||
FINAL_VERSION=$("$BINARY_PATH" --version 2>/dev/null | awk '{print $NF}' | sed 's/^v//')
|
||
cache_installed_version "yq" "$FINAL_VERSION"
|
||
msg_ok "Setup yq $FINAL_VERSION"
|
||
}
|