mirror of
https://github.com/community-scripts/ProxmoxVE.git
synced 2025-11-04 18:32:51 +00:00
Compare commits
38 Commits
2025-01-05
...
2025-01-08
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f428146c47 | ||
|
|
0059adecf1 | ||
|
|
e1daaa6409 | ||
|
|
ff712bc36e | ||
|
|
9330e9a640 | ||
|
|
5fc783da67 | ||
|
|
670d64ec43 | ||
|
|
4da57bd76c | ||
|
|
29b98b450b | ||
|
|
c88d3a3883 | ||
|
|
f5a54bc3ad | ||
|
|
2078deca57 | ||
|
|
bc702e2a6d | ||
|
|
ab10013fbe | ||
|
|
9abd8bf9aa | ||
|
|
6a78564cc3 | ||
|
|
0ec532a4e7 | ||
|
|
d712be955c | ||
|
|
701f7e9cba | ||
|
|
5196539d1b | ||
|
|
774cdcaf8d | ||
|
|
784e109012 | ||
|
|
5184f47eb6 | ||
|
|
f7d37a8f3c | ||
|
|
4888c33e4b | ||
|
|
f98d81f5bf | ||
|
|
d7518d8644 | ||
|
|
599c462035 | ||
|
|
0909132d5c | ||
|
|
5123532729 | ||
|
|
d814907f99 | ||
|
|
32a99a44d4 | ||
|
|
e94280f1a1 | ||
|
|
b9ca4b7634 | ||
|
|
8431931cc4 | ||
|
|
8bf7f7a460 | ||
|
|
6adf8e38b0 | ||
|
|
817455360e |
54
.github/check-script.yml
vendored
54
.github/check-script.yml
vendored
@@ -1,54 +0,0 @@
|
||||
name: Check Shell Scripts
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- '**/*.sh' # Führt den Check nur für Shell-Skripte aus
|
||||
|
||||
jobs:
|
||||
check-scripts:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Check `source` Line in Scripts
|
||||
shell: bash
|
||||
run: |
|
||||
set -e
|
||||
ERROR_COUNT=0
|
||||
FILES=$(find . -name "*.sh")
|
||||
|
||||
for FILE in $FILES; do
|
||||
# Check for exact match of the source line in line 2
|
||||
if [[ $(sed -n '2p' "$FILE") =~ ^source[[:space:]]+<(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func) ]]; then
|
||||
echo "Check passed for: $FILE"
|
||||
else
|
||||
echo "Error in $FILE: Line 2 must be exactly 'source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)' if a source line is used."
|
||||
ERROR_COUNT=$((ERROR_COUNT + 1))
|
||||
fi
|
||||
|
||||
# Check for shebang line at the top
|
||||
if [[ $(head -n 1 "$FILE") != "#!/usr/bin/env bash" ]]; then
|
||||
echo "Error in $FILE: The first line must be '#!/usr/bin/env bash'."
|
||||
ERROR_COUNT=$((ERROR_COUNT + 1))
|
||||
fi
|
||||
|
||||
# Check for executable permissions
|
||||
if [[ ! -x "$FILE" ]]; then
|
||||
echo "Warning in $FILE: This script is not executable. Consider running 'chmod +x $FILE'."
|
||||
fi
|
||||
|
||||
# Check for empty lines at the beginning of the script
|
||||
if [[ $(head -n 10 "$FILE" | grep -c '^$') -gt 0 ]]; then
|
||||
echo "Warning in $FILE: There are empty lines at the beginning of the script. Consider removing them."
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$ERROR_COUNT" -gt 0 ]]; then
|
||||
echo "$ERROR_COUNT script(s) failed validation."
|
||||
exit 1
|
||||
else
|
||||
echo "All scripts passed."
|
||||
fi
|
||||
10
.github/workflows/changelog-pr.yml
vendored
10
.github/workflows/changelog-pr.yml
vendored
@@ -157,3 +157,13 @@ jobs:
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review $PR_NUMBER --approve
|
||||
fi
|
||||
|
||||
- name: Re-approve pull request after update
|
||||
if: steps.verify-diff.outputs.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review $PR_NUMBER --approve
|
||||
fi
|
||||
|
||||
92
.github/workflows/check-lowercase.yml
vendored
92
.github/workflows/check-lowercase.yml
vendored
@@ -1,92 +0,0 @@
|
||||
name: Check Lowercase Filenames
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'ct/*.sh'
|
||||
- 'install/*.sh'
|
||||
- 'json/*.json'
|
||||
|
||||
jobs:
|
||||
check_lowercase:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# Step 1: Checkout the code
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Ensure the full history is fetched for accurate diffing
|
||||
|
||||
# Step 2: Fetch the base branch
|
||||
- name: Fetch base branch
|
||||
run: git fetch origin ${{ github.base_ref }}
|
||||
|
||||
# Step 3a: Validate filenames in ct directory
|
||||
- name: "Validate filenames in ct directory"
|
||||
run: |
|
||||
changed_files=$(git diff --name-only origin/${{ github.base_ref }}...HEAD | grep -E '^ct/.*\.sh$')
|
||||
|
||||
ERROR_COUNT=0
|
||||
|
||||
for FILE in $changed_files; do
|
||||
BASENAME=$(basename "$FILE")
|
||||
if [[ "$BASENAME" =~ ^[a-z0-9._-]+$ ]]; then
|
||||
echo "$FILE: Check for lowercase in filename passed."
|
||||
else
|
||||
echo "Error in $FILE. Change filename to lowercase."
|
||||
ERROR_COUNT=$((ERROR_COUNT + 1))
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$ERROR_COUNT" -ne 0 ]; then
|
||||
exit 1
|
||||
else
|
||||
echo "All filenames in ct directory passed the lowercase check."
|
||||
fi
|
||||
|
||||
# Step 3b: Validate filenames in install directory
|
||||
- name: "Validate filenames in install directory"
|
||||
run: |
|
||||
changed_files=$(git diff --name-only origin/${{ github.base_ref }}...HEAD | grep -E '^install/.*\.sh$')
|
||||
|
||||
ERROR_COUNT=0
|
||||
|
||||
for FILE in $changed_files; do
|
||||
BASENAME=$(basename "$FILE")
|
||||
if [[ "$BASENAME" =~ ^[a-z0-9._-]+$ ]]; then
|
||||
echo "$FILE: Check for lowercase in filename passed."
|
||||
else
|
||||
echo "Error in $FILE. Change filename to lowercase."
|
||||
ERROR_COUNT=$((ERROR_COUNT + 1))
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$ERROR_COUNT" -ne 0 ]; then
|
||||
exit 1
|
||||
else
|
||||
echo "All filenames in install directory passed the lowercase check."
|
||||
fi
|
||||
|
||||
# Step 3c: Validate filenames in json directory
|
||||
- name: "Validate filenames in json directory."
|
||||
run: |
|
||||
changed_files=$(git diff --name-only origin/${{ github.base_ref }}...HEAD | grep -E '^json/.*\.json$')
|
||||
|
||||
ERROR_COUNT=0
|
||||
|
||||
for FILE in $changed_files; do
|
||||
BASENAME=$(basename "$FILE")
|
||||
if [[ "$BASENAME" =~ ^[a-z0-9._-]+$ ]]; then
|
||||
echo "$FILE: Check for lowercase in filename passed."
|
||||
else
|
||||
echo "Error in $FILE. Change filename to lowercase."
|
||||
ERROR_COUNT=$((ERROR_COUNT + 1))
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$ERROR_COUNT" -ne 0 ]; then
|
||||
exit 1
|
||||
else
|
||||
echo "All filenames in json directory passed the lowercase check."
|
||||
fi
|
||||
55
.github/workflows/check-metadata.yml
vendored
55
.github/workflows/check-metadata.yml
vendored
@@ -1,55 +0,0 @@
|
||||
name: Check Metadata
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- '/ct/*.sh'
|
||||
- '/install/*.sh'
|
||||
jobs:
|
||||
check-metadata:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
- name: Check Metadata Lines in Scripts
|
||||
shell: bash
|
||||
run: |
|
||||
set -e
|
||||
ERROR_COUNT=0
|
||||
FILES=$(find . -name "*.sh")
|
||||
|
||||
for FILE in $FILES; do
|
||||
if [[ "$(sed -n '3p' "$FILE")" == "# Copyright (c) 2021-2024 community-scripts ORG" ]]; then
|
||||
echo "Check for Copyright metadata passed for line 3 in: $FILE"
|
||||
else
|
||||
echo "Error in $FILE: Copyright metadata missing or not on line 3"
|
||||
ERROR_COUNT=$((ERROR_COUNT + 1))
|
||||
fi
|
||||
|
||||
if sed -n '4p' "$FILE" | grep -qE "^# Author: .+"; then
|
||||
echo "Check for Author metadata passed for line 4 in: $FILE"
|
||||
else
|
||||
echo "Error in $FILE: Author metadata missing or invalid on line 4"
|
||||
ERROR_COUNT=$((ERROR_COUNT + 1))
|
||||
fi
|
||||
|
||||
if [[ "$(sed -n '5p' "$FILE")" == "# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE" ]]; then
|
||||
echo "Check for License metadata passed for line 5 in: $FILE"
|
||||
else
|
||||
echo "Error in $FILE: License metadata missing or not on line 5"
|
||||
ERROR_COUNT=$((ERROR_COUNT + 1))
|
||||
fi
|
||||
|
||||
if sed -n '6p' "$FILE" | grep -qE "^# Source: .+"; then
|
||||
echo "Check for Source metadata passed for line 6 in: $FILE"
|
||||
else
|
||||
echo "Error in $FILE: Source metadata missing or invalid on line 6"
|
||||
ERROR_COUNT=$((ERROR_COUNT + 1))
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$ERROR_COUNT" -gt 0 ]]; then
|
||||
echo "$ERROR_COUNT script(s) failed validation."
|
||||
exit 1
|
||||
else
|
||||
echo "All scripts passed."
|
||||
fi
|
||||
157
.github/workflows/validate-filenames.yml
vendored
Normal file
157
.github/workflows/validate-filenames.yml
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
name: Validate filenames
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "ct/*.sh"
|
||||
- "install/*.sh"
|
||||
- "json/*.json"
|
||||
|
||||
jobs:
|
||||
check-files:
|
||||
name: Check changed files
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Get pull request information
|
||||
if: github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
id: pr
|
||||
with:
|
||||
script: |
|
||||
const { data: pullRequest } = await github.rest.pulls.get({
|
||||
...context.repo,
|
||||
pull_number: context.payload.pull_request.number,
|
||||
});
|
||||
return pullRequest;
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Ensure the full history is fetched for accurate diffing
|
||||
ref: ${{ github.event_name == 'pull_request_target' && fromJSON(steps.pr.outputs.result).merge_commit_sha || '' }}
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
run: |
|
||||
if ${{ github.event_name == 'pull_request_target' }}; then
|
||||
echo "files=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ steps.pr.outputs.result && fromJSON(steps.pr.outputs.result).merge_commit_sha }} | xargs)" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | xargs)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: "Validate filenames in ct and install directory"
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-scripts
|
||||
run: |
|
||||
CHANGED_FILES=$(printf "%s\n" ${{ steps.changed-files.outputs.files }} | { grep -E '^(ct|install)/.*\.sh$' || true; })
|
||||
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in $CHANGED_FILES; do
|
||||
BASENAME=$(echo "$(basename "${FILE%.*}")")
|
||||
if [[ ! "$BASENAME" =~ ^[a-z0-9-]+$ ]]; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Non-compliant filenames found, change to lowercase:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: "Validate filenames in json directory."
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-json
|
||||
run: |
|
||||
CHANGED_FILES=$(printf "%s\n" ${{ steps.changed-files.outputs.files }} | { grep -E '^json/.*\.json$' || true; })
|
||||
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in $CHANGED_FILES; do
|
||||
BASENAME=$(echo "$(basename "${FILE%.*}")")
|
||||
if [[ ! "$BASENAME" =~ ^[a-z0-9-]+$ ]]; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Non-compliant filenames found, change to lowercase:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Post results and comment
|
||||
if: always() && steps.check-scripts.outputs.files != '' && steps.check-json.outputs.files != '' && github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const result = "${{ job.status }}" === "success" ? "success" : "failure";
|
||||
const nonCompliantFiles = {
|
||||
script: "${{ steps.check-scripts.outputs.files }}",
|
||||
JSON: "${{ steps.check-json.outputs.files }}",
|
||||
};
|
||||
|
||||
const issueNumber = context.payload.pull_request
|
||||
? context.payload.pull_request.number
|
||||
: null;
|
||||
const commentIdentifier = "validate-filenames";
|
||||
let newCommentBody = `<!-- ${commentIdentifier}-start -->\n### Filename validation\n\n`;
|
||||
|
||||
if (result === "failure") {
|
||||
newCommentBody += ":x: We found issues in the following changed files:\n\n";
|
||||
for (const [check, files] of Object.entries(nonCompliantFiles)) {
|
||||
if (files) {
|
||||
newCommentBody += `**${check.charAt(0).toUpperCase() + check.slice(1)} filename invalid:**\n${files
|
||||
.trim()
|
||||
.split(" ")
|
||||
.map((file) => `- ${file}`)
|
||||
.join("\n")}\n\n`;
|
||||
}
|
||||
}
|
||||
newCommentBody +=
|
||||
"Please change the filenames to lowercase and use only alphanumeric characters and dashes.\n";
|
||||
} else {
|
||||
newCommentBody += `:rocket: All files passed filename validation!\n`;
|
||||
}
|
||||
|
||||
newCommentBody += `\n\n<!-- ${commentIdentifier}-end -->`;
|
||||
|
||||
if (issueNumber) {
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
});
|
||||
|
||||
const existingComment = comments.find(
|
||||
(comment) => comment.user.login === "github-actions[bot]",
|
||||
);
|
||||
|
||||
if (existingComment) {
|
||||
if (existingComment.body.includes(commentIdentifier)) {
|
||||
const re = new RegExp(String.raw`<!-- ${commentIdentifier}-start -->[\s\S]*?<!-- ${commentIdentifier}-end -->`, "");
|
||||
newCommentBody = existingComment.body.replace(re, newCommentBody);
|
||||
} else {
|
||||
newCommentBody = existingComment.body + '\n\n---\n\n' + newCommentBody;
|
||||
}
|
||||
|
||||
await github.rest.issues.updateComment({
|
||||
...context.repo,
|
||||
comment_id: existingComment.id,
|
||||
body: newCommentBody,
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
body: newCommentBody,
|
||||
});
|
||||
}
|
||||
}
|
||||
130
.github/workflows/validate-formatting.yaml
vendored
Normal file
130
.github/workflows/validate-formatting.yaml
vendored
Normal file
@@ -0,0 +1,130 @@
|
||||
name: Validate script formatting
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "**/*.sh"
|
||||
- "**/*.func"
|
||||
|
||||
jobs:
|
||||
shfmt:
|
||||
name: Check changed files
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Get pull request information
|
||||
if: github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
id: pr
|
||||
with:
|
||||
script: |
|
||||
const { data: pullRequest } = await github.rest.pulls.get({
|
||||
...context.repo,
|
||||
pull_number: context.payload.pull_request.number,
|
||||
});
|
||||
return pullRequest;
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Ensure the full history is fetched for accurate diffing
|
||||
ref: ${{ github.event_name == 'pull_request_target' && fromJSON(steps.pr.outputs.result).merge_commit_sha || '' }}
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
run: |
|
||||
if ${{ github.event_name == 'pull_request_target' }}; then
|
||||
echo "files=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ steps.pr.outputs.result && fromJSON(steps.pr.outputs.result).merge_commit_sha }} | grep -E '\.(sh|func)$' | xargs)" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | grep -E '\.(sh|func)$' | xargs)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Set up Go
|
||||
if: steps.changed-files.outputs.files != ''
|
||||
uses: actions/setup-go@v5
|
||||
|
||||
- name: Install shfmt
|
||||
if: steps.changed-files.outputs.files != ''
|
||||
run: |
|
||||
go install mvdan.cc/sh/v3/cmd/shfmt@latest
|
||||
echo "$GOPATH/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Run shfmt
|
||||
if: steps.changed-files.outputs.files != ''
|
||||
id: shfmt
|
||||
run: |
|
||||
set +e
|
||||
|
||||
shfmt_output=$(shfmt -d ${{ steps.changed-files.outputs.files }})
|
||||
if [[ $? -eq 0 ]]; then
|
||||
exit 0
|
||||
else
|
||||
echo "diff=\"$(echo -n "$shfmt_output" | base64 -w 0)\"" >> $GITHUB_OUTPUT
|
||||
printf "%s" "$shfmt_output"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Post comment with results
|
||||
if: always() && steps.changed-files.outputs.files != '' && github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const result = "${{ job.status }}" === "success" ? "success" : "failure";
|
||||
const diff = Buffer.from(
|
||||
${{ steps.shfmt.outputs.diff }},
|
||||
"base64",
|
||||
).toString();
|
||||
const issueNumber = context.payload.pull_request
|
||||
? context.payload.pull_request.number
|
||||
: null;
|
||||
const commentIdentifier = "validate-formatting";
|
||||
let newCommentBody = `<!-- ${commentIdentifier}-start -->\n### Script formatting\n\n`;
|
||||
|
||||
if (result === "failure") {
|
||||
newCommentBody +=
|
||||
`:x: We found issues in the formatting of the following changed files:\n\n\`\`\`diff\n${diff}\n\`\`\`\n`;
|
||||
} else {
|
||||
newCommentBody += `:rocket: All changed shell scripts are formatted correctly!\n`;
|
||||
}
|
||||
|
||||
newCommentBody += `\n\n<!-- ${commentIdentifier}-end -->`;
|
||||
|
||||
if (issueNumber) {
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
});
|
||||
|
||||
const existingComment = comments.find(
|
||||
(comment) => comment.user.login === "github-actions[bot]",
|
||||
);
|
||||
|
||||
if (existingComment) {
|
||||
if (existingComment.body.includes(commentIdentifier)) {
|
||||
const re = new RegExp(
|
||||
String.raw`<!-- ${commentIdentifier}-start -->[\s\S]*?<!-- ${commentIdentifier}-end -->`,
|
||||
"",
|
||||
);
|
||||
newCommentBody = existingComment.body.replace(re, newCommentBody);
|
||||
} else {
|
||||
newCommentBody = existingComment.body + "\n\n---\n\n" + newCommentBody;
|
||||
}
|
||||
|
||||
await github.rest.issues.updateComment({
|
||||
...context.repo,
|
||||
comment_id: existingComment.id,
|
||||
body: newCommentBody,
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
body: newCommentBody,
|
||||
});
|
||||
}
|
||||
}
|
||||
226
.github/workflows/validate-scripts.yml
vendored
Normal file
226
.github/workflows/validate-scripts.yml
vendored
Normal file
@@ -0,0 +1,226 @@
|
||||
name: Validate scripts
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "ct/*.sh"
|
||||
- "install/*.sh"
|
||||
|
||||
jobs:
|
||||
check-scripts:
|
||||
name: Check changed files
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Get pull request information
|
||||
if: github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
id: pr
|
||||
with:
|
||||
script: |
|
||||
const { data: pullRequest } = await github.rest.pulls.get({
|
||||
...context.repo,
|
||||
pull_number: context.payload.pull_request.number,
|
||||
});
|
||||
return pullRequest;
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Ensure the full history is fetched for accurate diffing
|
||||
ref: ${{ github.event_name == 'pull_request_target' && fromJSON(steps.pr.outputs.result).merge_commit_sha || '' }}
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
run: |
|
||||
if ${{ github.event_name == 'pull_request_target' }}; then
|
||||
echo "files=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ steps.pr.outputs.result && fromJSON(steps.pr.outputs.result).merge_commit_sha }} | xargs)" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | grep -E '\.(sh|func)$' | xargs)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Check build.func line
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: build-func
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if [[ "$FILE" == ct/* ]] && [[ $(sed -n '2p' "$FILE") != "source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)" ]]; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Build.func line missing or incorrect in files:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check executable permissions
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-executable
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if [[ ! -x "$FILE" ]]; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Files not executable:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check copyright
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-copyright
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if ! sed -n '3p' "$FILE" | grep -qE "^# Copyright \(c\) [0-9]{4}(-[0-9]{4})? (tteck \| community-scripts ORG|community-scripts ORG|tteck)$"; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Copyright header missing or not on line 3 in files:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check author
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-author
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if ! sed -n '4p' "$FILE" | grep -qE "^# Author: .+"; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Author header missing or invalid on line 4 in files:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check license
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-license
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if [[ "$(sed -n '5p' "$FILE")" != "# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE" ]]; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "License header missing or not on line 5 in files:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check source
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-source
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if ! sed -n '6p' "$FILE" | grep -qE "^# Source: .+"; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Source header missing or not on line 6 in files:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Post results and comment
|
||||
if: always() && steps.changed-files.outputs.files != '' && github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const result = '${{ job.status }}' === 'success' ? 'success' : 'failure';
|
||||
const nonCompliantFiles = {
|
||||
'Invalid build.func source': "${{ steps.build-func.outputs.files }}",
|
||||
'Not executable': "${{ steps.check-executable.outputs.files }}",
|
||||
'Copyright header line missing or invalid': "${{ steps.check-copyright.outputs.files }}",
|
||||
'Author header line missing or invalid': "${{ steps.check-author.outputs.files }}",
|
||||
'License header line missing or invalid': "${{ steps.check-license.outputs.files }}",
|
||||
'Source header line missing or invalid': "${{ steps.check-source.outputs.files }}"
|
||||
};
|
||||
|
||||
const issueNumber = context.payload.pull_request ? context.payload.pull_request.number : null;
|
||||
const commentIdentifier = 'validate-scripts';
|
||||
let newCommentBody = `<!-- ${commentIdentifier}-start -->\n### Script validation\n\n`;
|
||||
|
||||
if (result === 'failure') {
|
||||
newCommentBody += ':x: We found issues in the following changed files:\n\n';
|
||||
for (const [check, files] of Object.entries(nonCompliantFiles)) {
|
||||
if (files) {
|
||||
newCommentBody += `**${check}:**\n${files.trim().split(' ').map(file => `- ${file}`).join('\n')}\n\n`;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
newCommentBody += `:rocket: All changed shell scripts passed validation!\n`;
|
||||
}
|
||||
|
||||
newCommentBody += `\n\n<!-- ${commentIdentifier}-end -->`;
|
||||
|
||||
if (issueNumber) {
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
...context.repo,
|
||||
issue_number: issueNumber
|
||||
});
|
||||
|
||||
const existingComment = comments.find(comment => comment.user.login === 'github-actions[bot]');
|
||||
|
||||
if (existingComment) {
|
||||
if (existingComment.body.includes(commentIdentifier)) {
|
||||
const re = new RegExp(String.raw`<!-- ${commentIdentifier}-start -->[\s\S]*?<!-- ${commentIdentifier}-end -->`, "");
|
||||
newCommentBody = existingComment.body.replace(re, newCommentBody);
|
||||
} else {
|
||||
newCommentBody = existingComment.body + '\n\n---\n\n' + newCommentBody;
|
||||
}
|
||||
|
||||
await github.rest.issues.updateComment({
|
||||
...context.repo,
|
||||
comment_id: existingComment.id,
|
||||
body: newCommentBody
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
body: newCommentBody
|
||||
});
|
||||
}
|
||||
}
|
||||
50
CHANGELOG.md
50
CHANGELOG.md
@@ -16,6 +16,56 @@ All LXC instances created using this repository come pre-installed with Midnight
|
||||
> [!IMPORTANT]
|
||||
Do not break established syntax in this file, as it is automatically updated by a Github Workflow
|
||||
|
||||
## 2025-01-08
|
||||
|
||||
### Changed
|
||||
|
||||
### 🌐 Website
|
||||
|
||||
- update postgresql json to add post install password setup [@rdiazlugo](https://github.com/rdiazlugo) ([#1318](https://github.com/community-scripts/ProxmoxVE/pull/1318))
|
||||
|
||||
### 🧰 Maintenance
|
||||
|
||||
- fix(ci): formatting event & chmod +x [@se-bastiaan](https://github.com/se-bastiaan) ([#1335](https://github.com/community-scripts/ProxmoxVE/pull/1335))
|
||||
- fix: correctly handle pull_request_target event [@se-bastiaan](https://github.com/se-bastiaan) ([#1327](https://github.com/community-scripts/ProxmoxVE/pull/1327))
|
||||
|
||||
## 2025-01-07
|
||||
|
||||
### Changed
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- Fix: Folder-Check for Updatescript Zammad [@michelroegl-brunner](https://github.com/michelroegl-brunner) ([#1309](https://github.com/community-scripts/ProxmoxVE/pull/1309))
|
||||
|
||||
### 🧰 Maintenance
|
||||
|
||||
- fix: permissions of validate pipelines [@se-bastiaan](https://github.com/se-bastiaan) ([#1316](https://github.com/community-scripts/ProxmoxVE/pull/1316))
|
||||
- Set Execution Rights for GH-Action: Validate Scripts [@MickLesk](https://github.com/MickLesk) ([#1312](https://github.com/community-scripts/ProxmoxVE/pull/1312))
|
||||
|
||||
## 2025-01-06
|
||||
|
||||
### Changed
|
||||
|
||||
### ✨ New Scripts
|
||||
|
||||
- New Script: Typesense [@tlissak](https://github.com/tlissak) ([#1291](https://github.com/community-scripts/ProxmoxVE/pull/1291))
|
||||
- New script: GLPI [@opastorello](https://github.com/opastorello) ([#1201](https://github.com/community-scripts/ProxmoxVE/pull/1201))
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- Fix Tag in HyperHDR Script [@MickLesk](https://github.com/MickLesk) ([#1299](https://github.com/community-scripts/ProxmoxVE/pull/1299))
|
||||
- [Fix]: Fixed rm Bug in pf2etools [@MickLesk](https://github.com/MickLesk) ([#1292](https://github.com/community-scripts/ProxmoxVE/pull/1292))
|
||||
- Fix: Homebox Update Script [@MickLesk](https://github.com/MickLesk) ([#1284](https://github.com/community-scripts/ProxmoxVE/pull/1284))
|
||||
- Add ca-certificates for Install (Frigate) [@MickLesk](https://github.com/MickLesk) ([#1282](https://github.com/community-scripts/ProxmoxVE/pull/1282))
|
||||
- fix: buffer from base64 in formatting pipeline [@se-bastiaan](https://github.com/se-bastiaan) ([#1285](https://github.com/community-scripts/ProxmoxVE/pull/1285))
|
||||
|
||||
### 🧰 Maintenance
|
||||
|
||||
- Add reapproval of Changelog-PR [@MickLesk](https://github.com/MickLesk) ([#1279](https://github.com/community-scripts/ProxmoxVE/pull/1279))
|
||||
- ci: combine header checks into workflow with PR comment [@se-bastiaan](https://github.com/se-bastiaan) ([#1257](https://github.com/community-scripts/ProxmoxVE/pull/1257))
|
||||
- ci: change filename checks into steps with PR comment [@se-bastiaan](https://github.com/se-bastiaan) ([#1255](https://github.com/community-scripts/ProxmoxVE/pull/1255))
|
||||
- ci: add pipeline for code formatting checks [@se-bastiaan](https://github.com/se-bastiaan) ([#1239](https://github.com/community-scripts/ProxmoxVE/pull/1239))
|
||||
|
||||
## 2025-01-05
|
||||
|
||||
### Changed
|
||||
|
||||
52
ct/glpi.sh
Normal file
52
ct/glpi.sh
Normal file
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: Nícolas Pastorello (opastorello)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
|
||||
# App Default Values
|
||||
APP="GLPI"
|
||||
var_tags="asset-management;foss"
|
||||
var_cpu="2"
|
||||
var_ram="2048"
|
||||
var_disk="10"
|
||||
var_os="debian"
|
||||
var_version="12"
|
||||
var_unprivileged="1"
|
||||
|
||||
# App Output & Base Settings
|
||||
header_info "$APP"
|
||||
base_settings
|
||||
|
||||
# Core
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/glpi ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
RELEASE=$(curl -s https://api.github.com/repos/glpi-project/glpi/releases/latest | grep '"tag_name"' | sed -E 's/.*"tag_name": "([^"]+)".*/\1/')
|
||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
||||
msg_error "Ther is currently no automatic update function for ${APP}."
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at v${RELEASE}."
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:80${CL}"
|
||||
@@ -40,8 +40,10 @@ function update_script() {
|
||||
msg_info "Updating ${APP} to ${RELEASE}"
|
||||
cd /opt
|
||||
rm -rf homebox_bak
|
||||
rm -rf /tmp/homebox.tar.gz
|
||||
mv homebox homebox_bak
|
||||
wget -qO- https://github.com/sysadminsmedia/homebox/releases/download/${RELEASE}/homebox_Linux_x86_64.tar.gz | tar -xzf - -C /opt
|
||||
wget -qO /tmp/homebox.tar.gz https://github.com/sysadminsmedia/homebox/releases/download/${RELEASE}/homebox_Linux_x86_64.tar.gz
|
||||
tar -xzf /tmp/homebox.tar.gz -C /opt
|
||||
chmod +x /opt/homebox
|
||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
||||
msg_ok "Updated Homebox"
|
||||
|
||||
@@ -7,7 +7,7 @@ source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVE/m
|
||||
|
||||
# App Default Values
|
||||
APP="HyperHDR"
|
||||
var_tags="ambient lightning"
|
||||
var_tags="ambient-lightning"
|
||||
var_cpu="2"
|
||||
var_ram="2048"
|
||||
var_disk="4"
|
||||
|
||||
@@ -45,15 +45,14 @@ function update_script() {
|
||||
|
||||
# Execute Update
|
||||
msg_info "Updating ${APP}"
|
||||
cd "/opt/${APP}"
|
||||
cd /opt
|
||||
wget -q "https://github.com/Pf2eToolsOrg/Pf2eTools/archive/refs/tags/${RELEASE}.zip"
|
||||
unzip -q "${RELEASE}.zip"
|
||||
unzip -q ${RELEASE}.zip
|
||||
rm -rf "/opt/${APP}"
|
||||
mv "${APP}-${RELEASE:1}" "/opt/${APP}"
|
||||
mv ${APP}-${RELEASE:1} /opt/${APP}
|
||||
cd /opt/Pf2eTools
|
||||
$STD npm install
|
||||
$STD npm run build
|
||||
cd ~
|
||||
echo "${RELEASE}" >"/opt/${APP}_version.txt"
|
||||
msg_ok "Updated ${APP}"
|
||||
|
||||
@@ -62,7 +61,7 @@ function update_script() {
|
||||
|
||||
# Cleaning up
|
||||
msg_info "Cleaning Up"
|
||||
rm -rf /opt/${APP}/${RELEASE}.zip
|
||||
rm -rf /opt/${RELEASE}.zip
|
||||
$STD apt-get -y autoremove
|
||||
$STD apt-get -y autoclean
|
||||
msg_ok "Cleanup Completed"
|
||||
|
||||
54
ct/typesense.sh
Normal file
54
ct/typesense.sh
Normal file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: tlissak | Co-Author MickLesk
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://typesense.org/
|
||||
|
||||
# App Default Values
|
||||
APP="TypeSense"
|
||||
var_tags="database"
|
||||
var_cpu="1"
|
||||
var_ram="1024"
|
||||
var_disk="4"
|
||||
var_os="debian"
|
||||
var_version="12"
|
||||
var_unprivileged="1"
|
||||
|
||||
# App Output & Base Settings
|
||||
header_info "$APP"
|
||||
base_settings
|
||||
|
||||
# Core
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -f /etc/typesense/typesense-server.ini ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
RELEASE=$(curl -s https://api.github.com/repos/typesense/typesense/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
||||
msg_info "Updating ${APP} LXC"
|
||||
apt-get update &>/dev/null
|
||||
apt-get -y upgrade &>/dev/null
|
||||
msg_ok "Updated Successfully"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following IP:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}${IP}:8108${CL}"
|
||||
@@ -28,7 +28,7 @@ function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -d /opt/zamad ]]; then
|
||||
if [[ ! -d /opt/zammad ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
@@ -15,7 +15,7 @@ network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies (Patience)"
|
||||
$STD apt-get install -y {curl,sudo,mc,git,gpg,automake,build-essential,xz-utils,libtool,ccache,pkg-config,libgtk-3-dev,libavcodec-dev,libavformat-dev,libswscale-dev,libv4l-dev,libxvidcore-dev,libx264-dev,libjpeg-dev,libpng-dev,libtiff-dev,gfortran,openexr,libatlas-base-dev,libssl-dev,libtbb2,libtbb-dev,libdc1394-22-dev,libopenexr-dev,libgstreamer-plugins-base1.0-dev,libgstreamer1.0-dev,gcc,gfortran,libopenblas-dev,liblapack-dev,libusb-1.0-0-dev,jq,moreutils}
|
||||
$STD apt-get install -y {curl,sudo,mc,git,gpg,ca-certificates,automake,build-essential,xz-utils,libtool,ccache,pkg-config,libgtk-3-dev,libavcodec-dev,libavformat-dev,libswscale-dev,libv4l-dev,libxvidcore-dev,libx264-dev,libjpeg-dev,libpng-dev,libtiff-dev,gfortran,openexr,libatlas-base-dev,libssl-dev,libtbb2,libtbb-dev,libdc1394-22-dev,libopenexr-dev,libgstreamer-plugins-base1.0-dev,libgstreamer1.0-dev,gcc,gfortran,libopenblas-dev,liblapack-dev,libusb-1.0-0-dev,jq,moreutils}
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
msg_info "Installing Python3 Dependencies"
|
||||
|
||||
151
install/glpi-install.sh
Normal file
151
install/glpi-install.sh
Normal file
@@ -0,0 +1,151 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: Nícolas Pastorello (opastorello)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt-get install -y \
|
||||
curl \
|
||||
git \
|
||||
sudo \
|
||||
mc \
|
||||
apache2 \
|
||||
php8.2-{apcu,cli,common,curl,gd,imap,ldap,mysql,xmlrpc,xml,mbstring,bcmath,intl,zip,redis,bz2,soap} \
|
||||
php-cas \
|
||||
libapache2-mod-php \
|
||||
mariadb-server
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
msg_info "Setting up database"
|
||||
DB_NAME=glpi_db
|
||||
DB_USER=glpi
|
||||
DB_PASS=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | head -c13)
|
||||
mysql_tzinfo_to_sql /usr/share/zoneinfo | mysql mysql
|
||||
mysql -u root -e "CREATE DATABASE $DB_NAME;"
|
||||
mysql -u root -e "CREATE USER '$DB_USER'@'localhost' IDENTIFIED BY '$DB_PASS';"
|
||||
mysql -u root -e "GRANT ALL PRIVILEGES ON $DB_NAME.* TO '$DB_USER'@'localhost';"
|
||||
mysql -u root -e "GRANT SELECT ON \`mysql\`.\`time_zone_name\` TO '$DB_USER'@'localhost'; FLUSH PRIVILEGES;"
|
||||
{
|
||||
echo "GLPI Database Credentials"
|
||||
echo "Database: $DB_NAME"
|
||||
echo "Username: $DB_USER"
|
||||
echo "Password: $DB_PASS"
|
||||
} >> ~/glpi_db.creds
|
||||
msg_ok "Set up database"
|
||||
|
||||
msg_info "Installing GLPi"
|
||||
cd /opt
|
||||
RELEASE=$(curl -s https://api.github.com/repos/glpi-project/glpi/releases/latest | grep '"tag_name"' | sed -E 's/.*"tag_name": "([^"]+)".*/\1/')
|
||||
wget -q "https://github.com/glpi-project/glpi/releases/download/${RELEASE}/glpi-${RELEASE}.tgz"
|
||||
$STD tar -xzvf glpi-${RELEASE}.tgz
|
||||
cd /opt/glpi
|
||||
$STD php bin/console db:install --db-name=$DB_NAME --db-user=$DB_USER --db-password=$DB_PASS --no-interaction
|
||||
echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
|
||||
msg_ok "Installed GLPi"
|
||||
|
||||
msg_info "Setting Downstream file"
|
||||
cat <<EOF > /opt/glpi/inc/downstream.php
|
||||
<?php
|
||||
define('GLPI_CONFIG_DIR', '/etc/glpi/');
|
||||
if (file_exists(GLPI_CONFIG_DIR . '/local_define.php')) {
|
||||
require_once GLPI_CONFIG_DIR . '/local_define.php';
|
||||
}
|
||||
EOF
|
||||
|
||||
mv /opt/glpi/config /etc/glpi
|
||||
mv /opt/glpi/files /var/lib/glpi
|
||||
mv /var/lib/glpi/_log /var/log/glpi
|
||||
|
||||
cat <<EOF > /etc/glpi/local_define.php
|
||||
<?php
|
||||
define('GLPI_VAR_DIR', '/var/lib/glpi');
|
||||
define('GLPI_DOC_DIR', GLPI_VAR_DIR);
|
||||
define('GLPI_CRON_DIR', GLPI_VAR_DIR . '/_cron');
|
||||
define('GLPI_DUMP_DIR', GLPI_VAR_DIR . '/_dumps');
|
||||
define('GLPI_GRAPH_DIR', GLPI_VAR_DIR . '/_graphs');
|
||||
define('GLPI_LOCK_DIR', GLPI_VAR_DIR . '/_lock');
|
||||
define('GLPI_PICTURE_DIR', GLPI_VAR_DIR . '/_pictures');
|
||||
define('GLPI_PLUGIN_DOC_DIR', GLPI_VAR_DIR . '/_plugins');
|
||||
define('GLPI_RSS_DIR', GLPI_VAR_DIR . '/_rss');
|
||||
define('GLPI_SESSION_DIR', GLPI_VAR_DIR . '/_sessions');
|
||||
define('GLPI_TMP_DIR', GLPI_VAR_DIR . '/_tmp');
|
||||
define('GLPI_UPLOAD_DIR', GLPI_VAR_DIR . '/_uploads');
|
||||
define('GLPI_CACHE_DIR', GLPI_VAR_DIR . '/_cache');
|
||||
define('GLPI_LOG_DIR', '/var/log/glpi');
|
||||
EOF
|
||||
msg_ok "Configured Downstream file"
|
||||
|
||||
msg_info "Setting Folder and File Permissions"
|
||||
chown root:root /opt/glpi/ -R
|
||||
chown www-data:www-data /etc/glpi -R
|
||||
chown www-data:www-data /var/lib/glpi -R
|
||||
chown www-data:www-data /var/log/glpi -R
|
||||
chown www-data:www-data /opt/glpi/marketplace -Rf
|
||||
find /opt/glpi/ -type f -exec chmod 0644 {} \;
|
||||
find /opt/glpi/ -type d -exec chmod 0755 {} \;
|
||||
find /etc/glpi -type f -exec chmod 0644 {} \;
|
||||
find /etc/glpi -type d -exec chmod 0755 {} \;
|
||||
find /var/lib/glpi -type f -exec chmod 0644 {} \;
|
||||
find /var/lib/glpi -type d -exec chmod 0755 {} \;
|
||||
find /var/log/glpi -type f -exec chmod 0644 {} \;
|
||||
find /var/log/glpi -type d -exec chmod 0755 {} \;
|
||||
msg_ok "Configured Folder and File Permissions"
|
||||
|
||||
msg_info "Setup Service"
|
||||
cat <<EOF >/etc/apache2/sites-available/glpi.conf
|
||||
<VirtualHost *:80>
|
||||
ServerName localhost
|
||||
DocumentRoot /opt/glpi/public
|
||||
|
||||
<Directory /opt/glpi/public>
|
||||
Require all granted
|
||||
RewriteEngine On
|
||||
RewriteCond %{HTTP:Authorization} ^(.+)$
|
||||
RewriteRule .* - [E=HTTP_AUTHORIZATION:%{HTTP:Authorization}]
|
||||
RewriteCond %{REQUEST_FILENAME} !-f
|
||||
RewriteRule ^(.*)$ index.php [QSA,L]
|
||||
</Directory>
|
||||
|
||||
ErrorLog \${APACHE_LOG_DIR}/glpi_error.log
|
||||
CustomLog \${APACHE_LOG_DIR}/glpi_access.log combined
|
||||
</VirtualHost>
|
||||
EOF
|
||||
$STD a2dissite 000-default.conf
|
||||
$STD a2enmod rewrite
|
||||
$STD a2ensite glpi.conf
|
||||
msg_ok "Setup Service"
|
||||
|
||||
msg_info "Setup Cronjob"
|
||||
echo "* * * * * php /opt/glpi/front/cron.php" | crontab -
|
||||
msg_ok "Setup Cronjob"
|
||||
|
||||
msg_info "Update PHP Params"
|
||||
PHP_VERSION=$(ls /etc/php/ | grep -E '^[0-9]+\.[0-9]+$' | head -n 1)
|
||||
PHP_INI="/etc/php/$PHP_VERSION/apache2/php.ini"
|
||||
sed -i 's/^upload_max_filesize = .*/upload_max_filesize = 20M/' $PHP_INI
|
||||
sed -i 's/^post_max_size = .*/post_max_size = 20M/' $PHP_INI
|
||||
sed -i 's/^max_execution_time = .*/max_execution_time = 60/' $PHP_INI
|
||||
sed -i 's/^max_input_vars = .*/max_input_vars = 5000/' $PHP_INI
|
||||
sed -i 's/^memory_limit = .*/memory_limit = 256M/' $PHP_INI
|
||||
sed -i 's/^;\?\s*session.cookie_httponly\s*=.*/session.cookie_httponly = On/' $PHP_INI
|
||||
systemctl restart apache2
|
||||
msg_ok "Update PHP Params"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
|
||||
msg_info "Cleaning up"
|
||||
rm -rf /opt/glpi/install
|
||||
rm -rf /opt/glpi-${RELEASE}.tgz
|
||||
$STD apt-get -y autoremove
|
||||
$STD apt-get -y autoclean
|
||||
msg_ok "Cleaned"
|
||||
@@ -56,11 +56,11 @@ $STD composer install --no-dev --optimize-autoloader --no-interaction
|
||||
cp .env.dist .env
|
||||
sed -i "/^DATABASE_URL=/c\DATABASE_URL=mysql://$DB_USER:$DB_PASS@127.0.0.1:3306/$DB_NAME?charset=utf8mb4&serverVersion=$MYSQL_VERSION" /opt/kimai/.env
|
||||
$STD bin/console kimai:install -n
|
||||
chown -R :www-data /opt/kimai
|
||||
chmod -R g+r /opt/kimai
|
||||
chmod -R g+rw /opt/kimai
|
||||
sudo chown -R www-data:www-data /opt/kimai
|
||||
sudo chmod -R 755 /opt/kimai
|
||||
chown -R :www-data /opt/*
|
||||
chmod -R g+r /opt/*
|
||||
chmod -R g+rw /opt/*
|
||||
sudo chown -R www-data:www-data /opt/*
|
||||
sudo chmod -R 755 /opt/*
|
||||
$STD expect <<EOF
|
||||
set timeout -1
|
||||
log_user 0
|
||||
|
||||
@@ -38,6 +38,7 @@ msg_ok "Installed Node.js"
|
||||
|
||||
# Setup App
|
||||
msg_info "Setup Pf2eTools"
|
||||
cd /opt
|
||||
RELEASE=$(curl -s https://api.github.com/repos/Pf2eToolsOrg/Pf2eTools/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||
wget -q "https://github.com/Pf2eToolsOrg/Pf2eTools/archive/refs/tags/${RELEASE}.zip"
|
||||
unzip -q "${RELEASE}.zip"
|
||||
@@ -65,7 +66,7 @@ msg_ok "Created Service"
|
||||
|
||||
# Cleanup
|
||||
msg_info "Cleaning up"
|
||||
rm "${RELEASE}.zip"
|
||||
rm -rf /opt/${RELEASE}.zip
|
||||
$STD apt-get -y autoremove
|
||||
$STD apt-get -y autoclean
|
||||
msg_ok "Cleaned"
|
||||
|
||||
39
install/typesense-install.sh
Normal file
39
install/typesense-install.sh
Normal file
@@ -0,0 +1,39 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: tlissak
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://typesense.org/
|
||||
|
||||
source /dev/stdin <<< "$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt-get install -y \
|
||||
curl \
|
||||
mc \
|
||||
sudo
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
msg_info "Installing TypeSense"
|
||||
RELEASE=$(curl -s https://api.github.com/repos/typesense/typesense/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||
cd /opt
|
||||
wget -q https://dl.typesense.org/releases/${RELEASE}/typesense-server-${RELEASE}-amd64.deb
|
||||
$STD apt install -y /opt/typesense-server-${RELEASE}-amd64.deb
|
||||
echo 'enable-cors = true' >> /etc/typesense/typesense-server.ini
|
||||
echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
|
||||
msg_ok "Installed TypeSense"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
|
||||
msg_info "Cleaning up"
|
||||
rm -rf /opt/typesense-server-${RELEASE}-amd64.deb
|
||||
$STD apt-get -y autoremove
|
||||
$STD apt-get -y autoclean
|
||||
msg_ok "Cleaned"
|
||||
34
json/glpi.json
Normal file
34
json/glpi.json
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"name": "GLPI",
|
||||
"slug": "glpi",
|
||||
"categories": [
|
||||
0
|
||||
],
|
||||
"date_created": "2025-01-06",
|
||||
"type": "ct",
|
||||
"updateable": false,
|
||||
"privileged": false,
|
||||
"interface_port": 80,
|
||||
"documentation": "https://glpi-project.org/documentation/",
|
||||
"website": "https://glpi-project.org/",
|
||||
"logo": "https://raw.githubusercontent.com/glpi-project/glpi/refs/heads/main/public/pics/login_logo_glpi.png",
|
||||
"description": "GLPI is a Free Asset and IT Management Software package, Data center management, ITIL Service Desk, licenses tracking and software auditing.",
|
||||
"install_methods": [
|
||||
{
|
||||
"type": "default",
|
||||
"script": "ct/glpi.sh",
|
||||
"resources": {
|
||||
"cpu": 2,
|
||||
"ram": 2048,
|
||||
"hdd": 10,
|
||||
"os": "Debian",
|
||||
"version": "12"
|
||||
}
|
||||
}
|
||||
],
|
||||
"default_credentials": {
|
||||
"username": "glpi",
|
||||
"password": "glpi"
|
||||
},
|
||||
"notes": []
|
||||
}
|
||||
@@ -30,5 +30,10 @@
|
||||
"username": null,
|
||||
"password": null
|
||||
},
|
||||
"notes": []
|
||||
"notes": [
|
||||
{
|
||||
"text": "Set a password after installation for postgres user by running `echo \"ALTER USER postgres with encrypted password 'your_password';\" | sudo -u postgres psql`",
|
||||
"type": "info"
|
||||
}
|
||||
]
|
||||
}
|
||||
39
json/typesense.json
Normal file
39
json/typesense.json
Normal file
@@ -0,0 +1,39 @@
|
||||
{
|
||||
"name": "TypeSense",
|
||||
"slug": "typesense",
|
||||
"categories": [
|
||||
5
|
||||
],
|
||||
"date_created": "2025-01-06",
|
||||
"type": "ct",
|
||||
"updateable": true,
|
||||
"privileged": false,
|
||||
"interface_port": null,
|
||||
"documentation": "https://typesense.org/docs/",
|
||||
"website": "https://typesense.org/",
|
||||
"logo": "https://typesense.org/_nuxt/img/typesense_logo_white.0f9fb0a.svg",
|
||||
"description": "Typesense is an open-source, fast, and lightweight search engine optimized for delivering instant, relevant, and typo-tolerant search results. Designed for ease of use and high performance, it offers features like real-time indexing, fuzzy matching, customizable relevance ranking, and a simple API for integration. Typesense is particularly well-suited for applications requiring instant search capabilities, such as e-commerce, documentation, or any content-rich websites. It is often compared to tools like Elasticsearch but is more developer-friendly and less resource-intensive.",
|
||||
"install_methods": [
|
||||
{
|
||||
"type": "default",
|
||||
"script": "ct/typesense.sh",
|
||||
"resources": {
|
||||
"cpu": 1,
|
||||
"ram": 1024,
|
||||
"hdd": 4,
|
||||
"os": "debian",
|
||||
"version": "12"
|
||||
}
|
||||
}
|
||||
],
|
||||
"default_credentials": {
|
||||
"username": null,
|
||||
"password": null
|
||||
},
|
||||
"notes": [
|
||||
{
|
||||
"text": "This script requires some extra steps after the installation, Please checkout the 'documentation' Button",
|
||||
"type": "info"
|
||||
}
|
||||
]
|
||||
}
|
||||
Reference in New Issue
Block a user