Merge branch 'staging' into beta

This commit is contained in:
florian 2023-05-07 17:31:15 +02:00
commit 9a1c09c564
No known key found for this signature in database
GPG Key ID: 3D80806F12602A7C
100 changed files with 1231 additions and 1137 deletions

View File

@ -7,119 +7,43 @@ on:
branches: [beta]
jobs:
# Build BW amd64 + i386 images
build-bw-amd64:
# Build amd64 + 386 containers images
build-containers:
strategy:
matrix:
image: [bunkerweb, scheduler, autoconf, ui]
arch: [linux/amd64, linux/386]
include:
- release: beta
cache: false
push: false
- image: bunkerweb
dockerfile: src/bw/Dockerfile
- image: scheduler
dockerfile: src/scheduler/Dockerfile
- image: autoconf
dockerfile: src/autoconf/Dockerfile
- image: ui
dockerfile: src/ui/Dockerfile
- arch: linux/amd64
cache_suffix: amd64
- arch: linux/386
cache_suffix: "386"
uses: ./.github/workflows/container-build.yml
with:
RELEASE: beta
ARCH: linux/amd64
IMAGE: bunkerweb
DOCKERFILE: src/bw/Dockerfile
CACHE: false
PUSH: false
CACHE_SUFFIX: "amd64"
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
build-bw-386:
uses: ./.github/workflows/container-build.yml
with:
RELEASE: beta
ARCH: linux/386
IMAGE: bunkerweb
DOCKERFILE: src/bw/Dockerfile
CACHE: false
PUSH: false
CACHE_SUFFIX: "386"
RELEASE: ${{ matrix.release }}
ARCH: ${{ matrix.arch }}
IMAGE: ${{ matrix.image }}
DOCKERFILE: ${{ matrix.dockerfile }}
CACHE: ${{ matrix.cache }}
PUSH: ${{ matrix.push }}
CACHE_SUFFIX: ${{ matrix.cache_suffix }}
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
# Build SC amd64 + i386 images
build-sc-amd64:
uses: ./.github/workflows/container-build.yml
with:
RELEASE: beta
ARCH: linux/amd64
IMAGE: scheduler
DOCKERFILE: src/scheduler/Dockerfile
CACHE: false
PUSH: false
CACHE_SUFFIX: "amd64"
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
build-sc-386:
uses: ./.github/workflows/container-build.yml
with:
RELEASE: beta
ARCH: linux/386
IMAGE: scheduler
DOCKERFILE: src/scheduler/Dockerfile
CACHE: false
PUSH: false
CACHE_SUFFIX: "386"
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
# Build AU amd64 + i386 images
build-au-amd64:
uses: ./.github/workflows/container-build.yml
with:
RELEASE: beta
ARCH: linux/amd64
IMAGE: autoconf
DOCKERFILE: src/autoconf/Dockerfile
CACHE: false
PUSH: false
CACHE_SUFFIX: "amd64"
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
build-au-386:
uses: ./.github/workflows/container-build.yml
with:
RELEASE: beta
ARCH: linux/386
IMAGE: autoconf
DOCKERFILE: src/autoconf/Dockerfile
CACHE: false
PUSH: false
CACHE_SUFFIX: "386"
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
# Build UI amd64 + i386 images
build-ui-amd64:
uses: ./.github/workflows/container-build.yml
with:
RELEASE: beta
ARCH: linux/amd64
IMAGE: ui
DOCKERFILE: src/ui/Dockerfile
CACHE: false
PUSH: false
CACHE_SUFFIX: "amd64"
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
build-ui-386:
uses: ./.github/workflows/container-build.yml
with:
RELEASE: beta
ARCH: linux/386
IMAGE: ui
DOCKERFILE: src/ui/Dockerfile
CACHE: false
PUSH: false
CACHE_SUFFIX: "386"
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
# Build arm64 + arm/v7 images
# Create ARM environment
create-arm:
uses: ./.github/workflows/create-arm.yml
secrets:
@ -129,68 +53,36 @@ jobs:
SCW_DEFAULT_ORGANIZATION_ID: ${{ secrets.SCW_DEFAULT_ORGANIZATION_ID }}
ARM_SSH_KEY: ${{ secrets.ARM_SSH_KEY }}
ARM_SSH_CONFIG: ${{ secrets.ARM_SSH_CONFIG }}
build-bw-arm:
# Build arm64 + arm/v7 images
build-containers-arm:
needs: [create-arm]
strategy:
matrix:
image: [bunkerweb, scheduler, autoconf, ui]
arch: ["linux/arm64,linux/arm/v7"]
include:
- release: beta
cache: false
push: false
cache_suffix: arm
- image: bunkerweb
dockerfile: src/bw/Dockerfile
- image: scheduler
dockerfile: src/scheduler/Dockerfile
- image: autoconf
dockerfile: src/autoconf/Dockerfile
- image: ui
dockerfile: src/ui/Dockerfile
uses: ./.github/workflows/container-build.yml
with:
RELEASE: beta
ARCH: linux/arm64,linux/arm/v7
IMAGE: bunkerweb
DOCKERFILE: src/bw/Dockerfile
CACHE: false
PUSH: false
CACHE_SUFFIX: "arm"
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
ARM_SSH_KEY: ${{ secrets.ARM_SSH_KEY }}
ARM_SSH_IP: ${{ needs.create-arm.outputs.ip }}
ARM_SSH_CONFIG: ${{ secrets.ARM_SSH_CONFIG }}
build-sc-arm:
needs: [create-arm]
uses: ./.github/workflows/container-build.yml
with:
RELEASE: beta
ARCH: linux/arm64,linux/arm/v7
IMAGE: scheduler
DOCKERFILE: src/scheduler/Dockerfile
CACHE: false
PUSH: false
CACHE_SUFFIX: "arm"
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
ARM_SSH_KEY: ${{ secrets.ARM_SSH_KEY }}
ARM_SSH_IP: ${{ needs.create-arm.outputs.ip }}
ARM_SSH_CONFIG: ${{ secrets.ARM_SSH_CONFIG }}
build-au-arm:
needs: [create-arm]
uses: ./.github/workflows/container-build.yml
with:
RELEASE: beta
ARCH: linux/arm64,linux/arm/v7
IMAGE: autoconf
DOCKERFILE: src/autoconf/Dockerfile
CACHE: false
PUSH: false
CACHE_SUFFIX: "arm"
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
ARM_SSH_KEY: ${{ secrets.ARM_SSH_KEY }}
ARM_SSH_IP: ${{ needs.create-arm.outputs.ip }}
ARM_SSH_CONFIG: ${{ secrets.ARM_SSH_CONFIG }}
build-ui-arm:
needs: [create-arm]
uses: ./.github/workflows/container-build.yml
with:
RELEASE: beta
ARCH: linux/arm64,linux/arm/v7
IMAGE: ui
DOCKERFILE: src/ui/Dockerfile
CACHE: false
PUSH: false
CACHE_SUFFIX: "arm"
RELEASE: ${{ matrix.release }}
ARCH: ${{ matrix.arch }}
IMAGE: ${{ matrix.image }}
DOCKERFILE: ${{ matrix.dockerfile }}
CACHE: ${{ matrix.cache }}
PUSH: ${{ matrix.push }}
CACHE_SUFFIX: ${{ matrix.cache_suffix }}
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
@ -198,77 +90,48 @@ jobs:
ARM_SSH_IP: ${{ needs.create-arm.outputs.ip }}
ARM_SSH_CONFIG: ${{ secrets.ARM_SSH_CONFIG }}
# Linux packages
build-ubuntu:
# Build Linux packages
build-packages:
needs: [create-arm]
strategy:
matrix:
linux: [ubuntu, debian, fedora, rhel]
platforms: [linux/amd64, linux/arm64, linux/arm/v7]
include:
- release: beta
- linux: ubuntu
package: deb
- linux: debian
package: deb
- linux: fedora
package: rpm
- linux: rhel
package: rpm
exclude:
- linux: fedora
platforms: linux/arm/v7
- linux: rhel
platforms: linux/arm/v7
uses: ./.github/workflows/linux-build.yml
with:
RELEASE: beta
LINUX: ubuntu
PACKAGE: deb
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
build-debian:
uses: ./.github/workflows/linux-build.yml
with:
RELEASE: beta
LINUX: debian
PACKAGE: deb
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
# build-centos:
# uses: ./.github/workflows/linux-build.yml
# with:
# RELEASE: beta
# LINUX: centos
# PACKAGE: rpm
# secrets:
# DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
# DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
# PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
# PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
build-fedora:
uses: ./.github/workflows/linux-build.yml
with:
RELEASE: beta
LINUX: fedora
PACKAGE: rpm
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
build-rhel:
uses: ./.github/workflows/linux-build.yml
with:
RELEASE: beta
LINUX: rhel
PACKAGE: rpm
RELEASE: ${{ matrix.release }}
LINUX: ${{ matrix.linux }}
PACKAGE: ${{ matrix.package }}
TEST: false
PLATFORMS: ${{ matrix.platforms }}
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
ARM_SSH_KEY: ${{ secrets.ARM_SSH_KEY }}
ARM_SSH_IP: ${{ needs.create-arm.outputs.ip }}
ARM_SSH_CONFIG: ${{ secrets.ARM_SSH_CONFIG }}
# Wait for all builds and extract VERSION
wait-builds:
runs-on: ubuntu-latest
needs: [
build-bw-amd64, build-bw-386,
build-sc-amd64, build-sc-386,
build-au-amd64, build-au-386,
build-ui-amd64, build-ui-386,
build-bw-arm, build-sc-arm, build-au-arm, build-ui-arm,
build-ubuntu,
build-debian,
build-fedora,
build-rhel
]
needs: [build-containers, build-containers-arm, build-packages]
outputs:
version: ${{ steps.getversion.outputs.version }}
versionrpm: ${{ steps.getversionrpm.outputs.versionrpm }}
@ -283,52 +146,30 @@ jobs:
run: echo "versionrpm=$(cat src/VERSION | tr -d '\n' | sed 's/-/_/g')" >> "$GITHUB_OUTPUT"
# Push Docker images
push-bunkerweb:
push-images:
needs: [create-arm, wait-builds]
strategy:
matrix:
image: [bunkerweb, bunkerweb-scheduler, bunkerweb-autoconf, bunkerweb-ui]
include:
- release: beta
- image: bunkerweb
cache_from: bunkerweb
dockerfile: src/bw/Dockerfile
- image: bunkerweb-scheduler
cache_from: scheduler
dockerfile: src/scheduler/Dockerfile
- image: bunkerweb-autoconf
cache_from: autoconf
dockerfile: src/autoconf/Dockerfile
- image: bunkerweb-ui
cache_from: ui
dockerfile: src/ui/Dockerfile
uses: ./.github/workflows/push-docker.yml
with:
IMAGE: bunkerity/bunkerweb:beta,bunkerity/bunkerweb:${{ needs.wait-builds.outputs.version }}
CACHE_FROM: bunkerweb-beta
DOCKERFILE: src/bw/Dockerfile
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
ARM_SSH_KEY: ${{ secrets.ARM_SSH_KEY }}
ARM_SSH_IP: ${{ needs.create-arm.outputs.ip }}
ARM_SSH_CONFIG: ${{ secrets.ARM_SSH_CONFIG }}
push-scheduler:
needs: [create-arm, wait-builds]
uses: ./.github/workflows/push-docker.yml
with:
IMAGE: bunkerity/bunkerweb-scheduler:beta,bunkerity/bunkerweb-scheduler:${{ needs.wait-builds.outputs.version }}
CACHE_FROM: scheduler-beta
DOCKERFILE: src/scheduler/Dockerfile
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
ARM_SSH_KEY: ${{ secrets.ARM_SSH_KEY }}
ARM_SSH_IP: ${{ needs.create-arm.outputs.ip }}
ARM_SSH_CONFIG: ${{ secrets.ARM_SSH_CONFIG }}
push-autoconf:
needs: [create-arm, wait-builds]
uses: ./.github/workflows/push-docker.yml
with:
IMAGE: bunkerity/bunkerweb-autoconf:beta,bunkerity/bunkerweb-autoconf:${{ needs.wait-builds.outputs.version }}
CACHE_FROM: autoconf-beta
DOCKERFILE: src/autoconf/Dockerfile
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
ARM_SSH_KEY: ${{ secrets.ARM_SSH_KEY }}
ARM_SSH_IP: ${{ needs.create-arm.outputs.ip }}
ARM_SSH_CONFIG: ${{ secrets.ARM_SSH_CONFIG }}
push-ui:
needs: [create-arm, wait-builds]
uses: ./.github/workflows/push-docker.yml
with:
IMAGE: bunkerity/bunkerweb-ui:beta,bunkerity/bunkerweb-ui:${{ needs.wait-builds.outputs.version }}
CACHE_FROM: ui-beta
DOCKERFILE: src/ui/Dockerfile
IMAGE: bunkerity/${{ matrix.image }}:${{ matrix.release }},bunkerity/${{ matrix.image }}:${{ needs.wait-builds.outputs.version }}
CACHE_FROM: ${{ matrix.cache_from }}-${{ matrix.release }}
DOCKERFILE: ${{ matrix.dockerfile }}
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
@ -337,76 +178,86 @@ jobs:
ARM_SSH_CONFIG: ${{ secrets.ARM_SSH_CONFIG }}
# Push Linux packages
push-ubuntu:
push-packages:
needs: [wait-builds]
strategy:
matrix:
linux: [ubuntu, debian, fedora, el]
arch: [amd64, arm64, armv7]
include:
- release: beta
repo: bunkerweb
- linux: ubuntu
separator: _
suffix: ""
version: jammy
package: deb
- linux: debian
separator: _
suffix: ""
version: bullseye
package: deb
- linux: fedora
separator: "-"
suffix: "1."
version: 38
package: rpm
- linux: el
separator: "-"
suffix: "1."
version: 8
package: rpm
- linux: ubuntu
arch: amd64
package_arch: amd64
- linux: debian
arch: amd64
package_arch: amd64
- linux: fedora
arch: amd64
package_arch: x86_64
- linux: el
arch: amd64
package_arch: x86_64
- linux: ubuntu
arch: arm64
package_arch: arm64
- linux: debian
arch: arm64
package_arch: arm64
- linux: fedora
arch: arm64
package_arch: aarch64
- linux: el
arch: amd64
package_arch: aarch64
- linux: ubuntu
arch: armv7
package_arch: armhf
- linux: debian
arch: armv7
package_arch: armhf
exclude:
- linux: fedora
arch: armv7
- linux: el
arch: armv7
uses: ./.github/workflows/push-packagecloud.yml
with:
SEPARATOR: _
SUFFIX: amd64
SUFFIX2: -1_amd64
REPO: bunkerweb
LINUX: ubuntu
VERSION: jammy
PACKAGE: deb
BW_VERSION: ${{ needs.wait-builds.outputs.version }}
secrets:
PACKAGECLOUD_TOKEN: ${{ secrets.PACKAGECLOUD_TOKEN }}
push-debian:
needs: [wait-builds]
uses: ./.github/workflows/push-packagecloud.yml
with:
SEPARATOR: _
SUFFIX: amd64
SUFFIX2: -1_amd64
REPO: bunkerweb
LINUX: debian
VERSION: bullseye
PACKAGE: deb
BW_VERSION: ${{ needs.wait-builds.outputs.version }}
secrets:
PACKAGECLOUD_TOKEN: ${{ secrets.PACKAGECLOUD_TOKEN }}
push-rhel:
needs: [wait-builds]
uses: ./.github/workflows/push-packagecloud.yml
with:
SEPARATOR: "-"
SUFFIX: 1.x86_64
SUFFIX2: -1.x86_64
REPO: bunkerweb
LINUX: el
VERSION: 8
PACKAGE: rpm
BW_VERSION: ${{ needs.wait-builds.outputs.versionrpm }}
secrets:
PACKAGECLOUD_TOKEN: ${{ secrets.PACKAGECLOUD_TOKEN }}
push-fedora:
needs: [wait-builds]
uses: ./.github/workflows/push-packagecloud.yml
with:
SEPARATOR: "-"
SUFFIX: 1.x86_64
SUFFIX2: -1.x86_64
REPO: bunkerweb
LINUX: fedora
VERSION: 37
PACKAGE: rpm
BW_VERSION: ${{ needs.wait-builds.outputs.versionrpm }}
SEPARATOR: ${{ matrix.separator }}
SUFFIX: ${{ matrix.suffix }}
REPO: ${{ matrix.repo }}
LINUX: ${{ matrix.linux }}
VERSION: ${{ matrix.separator }}
PACKAGE: ${{ matrix.package }}
BW_VERSION: ${{ $matrix.package == 'rpm' && needs.wait-builds.outputs.versionrpm || needs.wait-builds.outputs.version }}
PACKAGE_ARCH: ${{ matrix.package_arch }}
secrets:
PACKAGECLOUD_TOKEN: ${{ secrets.PACKAGECLOUD_TOKEN }}
# Create doc PDF
doc-pdf:
needs: [
wait-builds,
push-bunkerweb,
push-scheduler,
push-autoconf,
push-ui,
push-ubuntu,
push-debian,
push-rhel,
push-fedora
]
needs: [wait-builds, push-images, push-packages]
uses: ./.github/workflows/doc-to-pdf.yml
with:
VERSION: ${{ needs.wait-builds.outputs.version }}
@ -437,7 +288,7 @@ jobs:
# Remove ARM VM
rm-arm:
if: ${{ always() }}
needs: [create-arm, push-bunkerweb, push-scheduler, push-autoconf, push-ui]
needs: [create-arm, push-images, build-packages]
uses: ./.github/workflows/rm-arm.yml
secrets:
ARM_ID: ${{ needs.create-arm.outputs.id }}

61
.github/workflows/dev-update-mmdb.yml vendored Normal file
View File

@ -0,0 +1,61 @@
name: Update cached mmdb files
permissions:
contents: write
on:
schedule:
- cron: "0 1 5 * *"
jobs:
mmdb-update:
runs-on: ubuntu-latest
steps:
- name: Checkout source code
uses: actions/checkout@v3
with:
fetch-depth: 0
token: ${{ secrets.BUNKERBOT_TOKEN }}
ref: dev
- name: Download mmdb files
run: |
mkdir -p src/bw/misc/
cd src/bw/misc/
CURL_RETURN_CODE=0
CURL_OUTPUT=`curl -w httpcode=%{http_code} -s -o asn.mmdb.gz https://download.db-ip.com/free/dbip-asn-lite-$(date +%Y-%m).mmdb.gz 2> /dev/null` || CURL_RETURN_CODE=$?
if [ ${CURL_RETURN_CODE} -ne 0 ]; then
echo "Curl connection failed when downloading asn-lite mmdb file with return code - ${CURL_RETURN_CODE}"
exit 1
else
echo "Curl connection success"
# Check http code for curl operation/response in CURL_OUTPUT
httpCode=$(echo "${CURL_OUTPUT}" | sed -e 's/.*\httpcode=//')
if [ ${httpCode} -ne 200 ]; then
echo "Curl operation/command failed due to server return code - ${httpCode}"
exit 1
fi
fi
CURL_RETURN_CODE=0
CURL_OUTPUT=`curl -w httpcode=%{http_code} -s -o country.mmdb.gz https://download.db-ip.com/free/dbip-country-lite-$(date +%Y-%m).mmdb.gz 2> /dev/null` || CURL_RETURN_CODE=$?
if [ ${CURL_RETURN_CODE} -ne 0 ]; then
echo "Curl connection failed when downloading country-lite mmdb file with return code - ${CURL_RETURN_CODE}"
exit 1
else
echo "Curl connection success"
# Check http code for curl operation/response in CURL_OUTPUT
httpCode=$(echo "${CURL_OUTPUT}" | sed -e 's/.*\httpcode=//')
if [ ${httpCode} -ne 200 ]; then
echo "Curl operation/command failed due to server return code - ${httpCode}"
exit 1
fi
fi
rm -f asn.mmdb country.mmdb
gunzip asn.mmdb.gz country.mmdb.gz
- name: Commit and push changes
uses: stefanzweifel/git-auto-commit-action@v4
with:
branch: dev
commit_message: "Monthly mmdb update"
commit_options: "--no-verify"
commit_user_name: "BunkerBot"
commit_user_email: "bunkerbot@bunkerity.com"

View File

@ -9,49 +9,25 @@ on:
jobs:
# Containers
build-bw:
build-containers:
strategy:
matrix:
image: [bunkerweb, scheduler, autoconf, ui]
include:
- image: bunkerweb
dockerfile: src/bw/Dockerfile
- image: scheduler
dockerfile: src/scheduler/Dockerfile
- image: autoconf
dockerfile: src/autoconf/Dockerfile
- image: ui
dockerfile: src/ui/Dockerfile
uses: ./.github/workflows/container-build.yml
with:
RELEASE: dev
ARCH: linux/amd64
IMAGE: bunkerweb
DOCKERFILE: src/bw/Dockerfile
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
build-sc:
uses: ./.github/workflows/container-build.yml
with:
RELEASE: dev
ARCH: linux/amd64
IMAGE: scheduler
DOCKERFILE: src/scheduler/Dockerfile
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
build-au:
uses: ./.github/workflows/container-build.yml
with:
RELEASE: dev
ARCH: linux/amd64
IMAGE: autoconf
DOCKERFILE: src/autoconf/Dockerfile
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
build-ui:
uses: ./.github/workflows/container-build.yml
with:
RELEASE: dev
ARCH: linux/amd64
IMAGE: ui
DOCKERFILE: src/ui/Dockerfile
IMAGE: ${{ matrix.image }}
DOCKERFILE: ${{ matrix.dockerfile }}
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
@ -84,7 +60,7 @@ jobs:
# UI tests
tests-ui:
needs: [code-security, build-bw, build-sc, build-ui]
needs: [code-security, build-containers]
uses: ./.github/workflows/tests-ui.yml
with:
RELEASE: dev

View File

@ -12,6 +12,13 @@ on:
PACKAGE:
required: true
type: string
PLATFORMS:
required: true
type: string
TEST:
required: false
type: boolean
default: false
secrets:
DOCKER_USERNAME:
required: true
@ -21,6 +28,13 @@ on:
required: true
PRIVATE_REGISTRY_TOKEN:
required: true
ARM_SSH_KEY:
required: false
ARM_SSH_IP:
required: false
ARM_SSH_CONFIG:
required: false
jobs:
build:
@ -29,8 +43,31 @@ jobs:
# Prepare
- name: Checkout source code
uses: actions/checkout@v3
- name: Extract arch
run : |
echo "ARCH=${{ env.PLATFORMS }}" | sed 's/linux//g' | sed 's@/@@g' >> "$GITHUB_ENV"
env:
PLATFORMS: ${{ inputs.PLATFORMS }}
- name: Setup SSH for ARM node
if: startsWith(env.ARCH, 'arm') == true
run: |
mkdir -p ~/.ssh
echo "$SSH_KEY" > ~/.ssh/id_rsa_arm
chmod 600 ~/.ssh/id_rsa_arm
echo "$SSH_CONFIG" | sed "s/SSH_IP/$SSH_IP/g" > ~/.ssh/config
env:
SSH_KEY: ${{ secrets.ARM_SSH_KEY }}
SSH_IP: ${{ secrets.ARM_SSH_IP }}
SSH_CONFIG: ${{ secrets.ARM_SSH_CONFIG }}
- name: Setup Buildx
uses: docker/setup-buildx-action@v2
if: startsWith(env.ARCH, 'arm') == false
- name: Setup Buildx (ARM)
uses: docker/setup-buildx-action@v2
if: startsWith(env.ARCH, 'arm') == true
with:
endpoint: ssh://root@arm
platforms: linux/arm64,linux/arm/v7,linux/arm/v6
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
@ -50,7 +87,7 @@ jobs:
context: .
load: true
file: src/linux/Dockerfile-${{ inputs.LINUX }}
platforms: linux/amd64
platforms: ${{ inputs.PLATFORMS }}
tags: local/bunkerweb-${{ inputs.LINUX }}:latest
cache-from: type=registry,ref=bunkerity/cache:${{ inputs.LINUX }}-staging
cache-to: type=registry,ref=bunkerity/cache:${{ inputs.LINUX }}-staging,mode=min
@ -62,21 +99,22 @@ jobs:
context: .
load: true
file: src/linux/Dockerfile-${{ inputs.LINUX }}
platforms: linux/amd64
platforms: ${{ inputs.PLATFORMS }}
tags: local/bunkerweb-${{ inputs.LINUX }}:latest
# Generate package
- name: Generate package
run: ./src/linux/package.sh ${{ inputs.LINUX }}
- uses: actions/upload-artifact@v3
with:
name: package-${{ inputs.LINUX }}
name: package-${{ inputs.LINUX }}-${{ env.ARCH }}
path: package-${{ inputs.LINUX }}/*.${{ inputs.PACKAGE }}
# Build test image
- name: Build test image
if: inputs.TEST == true
uses: docker/build-push-action@v3
with:
context: .
file: tests/linux/Dockerfile-${{ inputs.LINUX }}
platforms: linux/amd64
platforms: ${{ inputs.PLATFORMS }}
push: true
tags: ${{ secrets.PRIVATE_REGISTRY }}/infra/${{ inputs.LINUX }}-tests:${{ inputs.RELEASE }}

View File

@ -9,9 +9,6 @@ on:
SUFFIX:
required: true
type: string
SUFFIX2:
required: true
type: string
REPO:
required: true
type: string
@ -27,6 +24,12 @@ on:
BW_VERSION:
required: true
type: string
ARCH:
required: true
type: string
PACKAGE_ARCH:
required: true
type: string
secrets:
PACKAGECLOUD_TOKEN:
required: true
@ -48,16 +51,16 @@ jobs:
- uses: actions/download-artifact@v3
if: inputs.LINUX != 'el'
with:
name: package-${{ inputs.LINUX }}
name: package-${{ inputs.LINUX }}-${{ inputs.ARCH }}
path: /tmp/${{ inputs.LINUX }}
- uses: actions/download-artifact@v3
if: inputs.LINUX == 'el'
with:
name: package-rhel
path: /tmp/${{ inputs.LINUX }}
path: /tmp/${{ inputs.LINUX }}-${{ inputs.ARCH }}
# Remove existing packages
- name: Remove existing package
run: package_cloud yank bunkerity/${{ inputs.REPO }}/${{ inputs.LINUX }}/${{ inputs.VERSION }} bunkerweb${{ inputs.SEPARATOR }}${{ inputs.BW_VERSION }}${{ inputs.SEPARATOR }}${{ inputs.SUFFIX }}.${{ inputs.PACKAGE }}
run: package_cloud yank bunkerity/${{ inputs.REPO }}/${{ inputs.LINUX }}/${{ inputs.VERSION }} bunkerweb${{ inputs.SEPARATOR }}${{ inputs.BW_VERSION }}${{ inputs.SEPARATOR }}${{ inputs.SUFFIX }}${{ inputs.PACKAGE_ARCH }}.${{ inputs.PACKAGE }}
continue-on-error: true
env:
PACKAGECLOUD_TOKEN: ${{ secrets.PACKAGECLOUD_TOKEN }}

View File

@ -7,107 +7,56 @@ on:
branches: [staging]
jobs:
# Containers
build-bw:
# Build Docker images
build-containers:
strategy:
matrix:
image: [bunkerweb, scheduler, autoconf, ui]
include:
- image: bunkerweb
dockerfile: src/bw/Dockerfile
- image: scheduler
dockerfile: src/scheduler/Dockerfile
- image: autoconf
dockerfile: src/autoconf/Dockerfile
- image: ui
dockerfile: src/ui/Dockerfile
uses: ./.github/workflows/container-build.yml
with:
RELEASE: staging
ARCH: linux/amd64
IMAGE: bunkerweb
DOCKERFILE: src/bw/Dockerfile
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
build-scheduler:
uses: ./.github/workflows/container-build.yml
with:
RELEASE: staging
ARCH: linux/amd64
IMAGE: scheduler
DOCKERFILE: src/scheduler/Dockerfile
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
build-autoconf:
uses: ./.github/workflows/container-build.yml
with:
RELEASE: staging
ARCH: linux/amd64
IMAGE: autoconf
DOCKERFILE: src/autoconf/Dockerfile
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
build-ui:
uses: ./.github/workflows/container-build.yml
with:
RELEASE: staging
ARCH: linux/amd64
IMAGE: ui
DOCKERFILE: src/ui/Dockerfile
CACHE: true
PUSH: true
IMAGE: ${{ matrix.image }}
DOCKERFILE: ${{ matrix.dockerfile }}
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
# Linux
build-ubuntu:
# Build Linux packages
build-packages:
strategy:
matrix:
linux: [ubuntu, debian, fedora, rhel]
include:
- linux: ubuntu
package: deb
- linux: debian
package: deb
- linux: fedora
package: rpm
- linux: rhel
package: rpm
uses: ./.github/workflows/linux-build.yml
with:
RELEASE: staging
LINUX: ubuntu
PACKAGE: deb
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
build-debian:
uses: ./.github/workflows/linux-build.yml
with:
RELEASE: staging
LINUX: debian
PACKAGE: deb
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
# build-centos:
# uses: ./.github/workflows/linux-build.yml
# with:
# RELEASE: staging
# LINUX: centos
# PACKAGE: rpm
# secrets:
# DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
# DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
# PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
# PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
build-fedora:
uses: ./.github/workflows/linux-build.yml
with:
RELEASE: staging
LINUX: fedora
PACKAGE: rpm
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
build-rhel:
uses: ./.github/workflows/linux-build.yml
with:
RELEASE: staging
LINUX: rhel
PACKAGE: rpm
LINUX: ${{ matrix.linux }}
PACKAGE: ${{ matrix.package }}
TEST: true
PLATFORMS: linux/amd64
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
@ -139,132 +88,57 @@ jobs:
category: "/language:${{matrix.language}}"
# Create infrastructures
create-infra-docker:
needs: [code-security, build-bw, build-scheduler]
create-infras:
needs: [code-security, build-containers, build-packages]
strategy:
matrix:
type: [docker, autoconf, swarm, k8s, linux]
uses: ./.github/workflows/staging-create-infra.yml
with:
TYPE: docker
secrets:
CICD_SECRETS: ${{ secrets.CICD_SECRETS }}
create-infra-autoconf:
needs: [code-security, build-bw, build-scheduler, build-autoconf]
uses: ./.github/workflows/staging-create-infra.yml
with:
TYPE: autoconf
secrets:
CICD_SECRETS: ${{ secrets.CICD_SECRETS }}
create-infra-swarm:
needs: [code-security, build-bw, build-scheduler, build-autoconf]
uses: ./.github/workflows/staging-create-infra.yml
with:
TYPE: swarm
secrets:
CICD_SECRETS: ${{ secrets.CICD_SECRETS }}
create-infra-k8s:
needs: [code-security, build-bw, build-scheduler, build-autoconf]
uses: ./.github/workflows/staging-create-infra.yml
with:
TYPE: k8s
secrets:
CICD_SECRETS: ${{ secrets.CICD_SECRETS }}
create-infra-linux:
needs:
[
code-security,
build-ubuntu,
build-debian,
build-fedora,
build-rhel,
]
uses: ./.github/workflows/staging-create-infra.yml
with:
TYPE: linux
TYPE: ${{ matrix.type }}
secrets:
CICD_SECRETS: ${{ secrets.CICD_SECRETS }}
# Perform tests
tests-docker:
needs: [create-infra-docker]
staging-tests:
needs: [create-infras]
strategy:
matrix:
type: [docker, autoconf, swarm, k8s, linux]
include:
- type: docker
runs_on: "['self-hosted', 'bw-docker']"
- type: autoconf
runs_on: "['self-hosted', 'bw-autoconf']"
- type: swarm
runs_on: "['self-hosted', 'bw-swarm']"
- type: k8s
runs_on: "['ubuntu-latest']"
- type: linux
runs_on: "['self-hosted', 'bw-linux']"
uses: ./.github/workflows/staging-tests.yml
with:
TYPE: docker
RUNS_ON: "['self-hosted', 'bw-docker']"
TYPE: ${{ matrix.type }}
RUNS_ON: ${{ matrix.runs_on }}
secrets: inherit
tests-autoconf:
needs: [create-infra-autoconf]
uses: ./.github/workflows/staging-tests.yml
tests-ui:
needs: [create-infras]
uses: ./.github/workflows/tests-ui.yml
with:
TYPE: autoconf
RUNS_ON: "['self-hosted', 'bw-autoconf']"
secrets: inherit
tests-swarm:
needs: [create-infra-swarm]
uses: ./.github/workflows/staging-tests.yml
with:
TYPE: swarm
RUNS_ON: "['self-hosted', 'bw-swarm']"
secrets: inherit
tests-k8s:
needs: [create-infra-k8s]
uses: ./.github/workflows/staging-tests.yml
with:
TYPE: k8s
RUNS_ON: "['ubuntu-latest']"
secrets: inherit
tests-linux:
needs: [create-infra-linux]
uses: ./.github/workflows/staging-tests.yml
with:
TYPE: linux
RUNS_ON: "['self-hosted', 'bw-linux']"
secrets: inherit
# tests-ui:
# needs: [code-security, build-bw, build-scheduler, build-ui]
# uses: ./.github/workflows/tests-ui.yml
# with:
# RELEASE: staging
# secrets:
# PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
# PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
RELEASE: staging
secrets:
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
# Delete infrastructures
delete-infra-docker:
delete-infras:
if: ${{ always() }}
needs: [tests-docker]
needs: [staging-tests]
strategy:
matrix:
type: [docker, autoconf, swarm, k8s, linux]
uses: ./.github/workflows/staging-delete-infra.yml
with:
TYPE: docker
secrets:
CICD_SECRETS: ${{ secrets.CICD_SECRETS }}
delete-infra-autoconf:
if: ${{ always() }}
needs: [tests-autoconf]
uses: ./.github/workflows/staging-delete-infra.yml
with:
TYPE: autoconf
secrets:
CICD_SECRETS: ${{ secrets.CICD_SECRETS }}
delete-infra-swarm:
if: ${{ always() }}
needs: [tests-swarm]
uses: ./.github/workflows/staging-delete-infra.yml
with:
TYPE: swarm
secrets:
CICD_SECRETS: ${{ secrets.CICD_SECRETS }}
delete-infra-k8s:
if: ${{ always() }}
needs: [tests-k8s]
uses: ./.github/workflows/staging-delete-infra.yml
with:
TYPE: k8s
secrets:
CICD_SECRETS: ${{ secrets.CICD_SECRETS }}
delete-infra-linux:
if: ${{ always() }}
needs: [tests-linux]
uses: ./.github/workflows/staging-delete-infra.yml
with:
TYPE: linux
TYPE: ${{ matrix.type }}
secrets:
CICD_SECRETS: ${{ secrets.CICD_SECRETS }}

3
.gitignore vendored
View File

@ -4,4 +4,5 @@ site/
__pycache__
env
node_modules
/src/ui/*.txt
/src/ui/*.txt
.mypy_cache

View File

@ -567,6 +567,7 @@ spec:
bunkerweb.io/INSTANCE: "yes"
spec:
containers:
# using bunkerweb as name is mandatory
- name: bunkerweb
image: bunkerity/bunkerweb:1.5.0-beta
imagePullPolicy: Always
@ -816,7 +817,7 @@ List of supported Linux distros :
- Debian 11 "Bullseye"
- Ubuntu 22.04 "Jammy"
- Fedora 37
- Fedora 38
- RedHat Enterprise Linux (RHEL) 8.7
Please note that you will need to **install NGINX 1.24.0 before BunkerWeb**. For all distros, except Fedora, using prebuilt packages from [official NGINX repository](https://nginx.org/en/linux_packages.html) is mandatory. Compiling NGINX from source or using packages from different repositories won't work with the official prebuilt packages of BunkerWeb but you can build it from source.
@ -1082,8 +1083,7 @@ List of supported Linux distros :
- Debian 11 "Bullseye"
- Ubuntu 22.04 "Jammy"
- Fedora 37
- CentOS Stream 8
- Fedora 38
- RedHat Enterprise Linux (RHEL) 8.7
[Ansible](https://docs.ansible.com/ansible/latest/index.html) is an IT automation tool. It can configure systems, deploy software, and orchestrate more advanced IT tasks such as continuous deployments or zero downtime rolling updates.

View File

@ -22,6 +22,7 @@ def print_md_table(settings) -> MarkdownTableWriter:
)
return writer
def stream_support(support) -> str:
md = "STREAM support "
if support == "no":
@ -32,6 +33,7 @@ def stream_support(support) -> str:
md += ":warning:"
return md
doc = StringIO()
print("# Settings\n", file=doc)
@ -72,7 +74,7 @@ for name, data in dict(sorted(core_settings.items())).items():
print(f"### {data['name']}\n", file=doc)
print(f"{stream_support(data['stream'])}\n", file=doc)
print(f"{data['description']}\n", file=doc)
print(print_md_table(data['settings']), file=doc)
print(print_md_table(data["settings"]), file=doc)
doc.seek(0)
content = doc.read()

View File

@ -1,5 +1,5 @@
mkdocs==1.4.2
mkdocs-material==9.1.8
mkdocs==1.4.3
mkdocs-material==9.1.9
pytablewriter==0.64.2
mike==1.1.2
jinja2<3.1.0

View File

@ -522,6 +522,7 @@ Because the web UI is a web application, the recommended installation procedure
bunkerweb.io/INSTANCE: "yes"
spec:
containers:
# using bunkerweb as name is mandatory
- name: bunkerweb
image: bunkerity/bunkerweb:1.5.0-beta
imagePullPolicy: Always

View File

@ -1,3 +1,6 @@
drupalUsername: "user"
drupalPassword: "changeme42"
drupalEmail: "contact@example.com"
mariadb:
auth:
password: "changeme1337"

View File

@ -3,7 +3,7 @@
"kinds": ["docker", "autoconf", "swarm", "kubernetes", "linux"],
"no_copy_container": true,
"timeout": 60,
"delay": 60,
"delay": 120,
"tests": [
{
"type": "string",

View File

@ -1,3 +1,6 @@
ghostUsername: "user"
ghostPassword: "changeme42"
ghostHost: "www.example.com"
mysql:
auth:
password: "changeme1337"

View File

@ -1,3 +1,6 @@
joomlaUsername: "user"
joomlaPassword: "changeme42"
joomlaEmail: "contact@example.com"
mariadb:
auth:
password: "changeme1337"

View File

@ -18,7 +18,7 @@ services:
- REVERSE_PROXY_URL=/
- REVERSE_PROXY_HOST=http://app
- |
CUSTOM_CONF_HTTP_upstream.conf=
CUSTOM_CONF_HTTP_upstream=
upstream app {
server app1:80;
server app2:80;

View File

@ -5,3 +5,6 @@ magentoEmail: "contact@example.com"
magentoFirstName: "John"
magentoLastName: "Doe"
magentoAdminUri: "admin"
mariadb:
auth:
password: "changeme1337"

View File

@ -2,3 +2,6 @@ moodleSiteName: "My Moodle"
moodleUsername: "admin"
moodlePassword: "changeme42"
moodleEmail: "admin@example.com"
mariadb:
auth:
password: "changeme1337"

View File

@ -8,3 +8,6 @@ prestashopCountry: "us"
prestashopLanguage: "en"
service:
type: ClusterIP
mariadb:
auth:
password: "changeme1337"

View File

@ -4,3 +4,6 @@ redmineEmail: "user@example.com"
redmineLanguage: "en"
service:
type: ClusterIP
mariadb:
auth:
password: "changeme1337"

View File

@ -6,3 +6,6 @@ wordpressLastName: "LastName"
wordpressBlogName: "User's Blog!"
wordpressTablePrefix: "changeme_"
wordpressScheme: "https"
mariadb:
auth:
password: "changeme1337"

View File

@ -46,6 +46,7 @@ spec:
bunkerweb.io/INSTANCE: "yes"
spec:
containers:
# using bunkerweb as name is mandatory
- name: bunkerweb
image: bunkerity/bunkerweb:1.5.0-beta
imagePullPolicy: Always

View File

@ -46,6 +46,7 @@ spec:
bunkerweb.io/INSTANCE: "yes"
spec:
containers:
# using bunkerweb as name is mandatory
- name: bunkerweb
image: bunkerity/bunkerweb:1.5.0-beta
imagePullPolicy: Always

View File

@ -46,6 +46,7 @@ spec:
bunkerweb.io/INSTANCE: "yes"
spec:
containers:
# using bunkerweb as name is mandatory
- name: bunkerweb
image: bunkerity/bunkerweb:1.5.0-beta
imagePullPolicy: Always

View File

@ -46,6 +46,7 @@ spec:
bunkerweb.io/INSTANCE: "yes"
spec:
containers:
# using bunkerweb as name is mandatory
- name: bunkerweb
image: bunkerity/bunkerweb:1.5.0-beta
imagePullPolicy: Always

View File

@ -46,6 +46,7 @@ spec:
bunkerweb.io/INSTANCE: "yes"
spec:
containers:
# using bunkerweb as name is mandatory
- name: bunkerweb
image: bunkerity/bunkerweb:1.5.0-beta
imagePullPolicy: Always

View File

@ -46,6 +46,7 @@ spec:
bunkerweb.io/INSTANCE: "yes"
spec:
containers:
# using bunkerweb as name is mandatory
- name: bunkerweb
image: bunkerity/bunkerweb:1.5.0-beta
imagePullPolicy: Always

View File

@ -13,6 +13,7 @@ RUN apk add --no-cache --virtual .build-deps g++ gcc musl-dev jpeg-dev zlib-dev
pip install --no-cache-dir --upgrade pip && \
pip install wheel && \
mkdir -p /usr/share/bunkerweb/deps/python && \
export MAKEFLAGS="-j$(nproc)" && \
pip install --no-cache-dir --require-hashes --target /usr/share/bunkerweb/deps/python -r /usr/share/bunkerweb/deps/requirements.txt && \
apk del .build-deps
@ -37,7 +38,6 @@ RUN apk add --no-cache bash && \
mkdir -p /etc/bunkerweb && \
mkdir -p /data/cache && ln -s /data/cache /var/cache/bunkerweb && \
mkdir -p /data/lib && ln -s /data/lib /var/lib/bunkerweb && \
mkdir -p /data/cache/letsencrypt && ln -s /data/cache/letsencrypt /etc/letsencrypt && \
mkdir -p /data/www && ln -s /data/www /var/www/html && \
for dir in $(echo "configs plugins") ; do mkdir -p "/data/${dir}" && ln -s "/data/${dir}" "/etc/bunkerweb/${dir}" ; done && \
for dir in $(echo "configs/http configs/stream configs/server-http configs/server-stream configs/default-server-http configs/default-server-stream configs/modsec configs/modsec-crs") ; do mkdir "/data/${dir}" ; done && \

View File

@ -41,10 +41,17 @@ class IngressController(Controller, ConfigCaller):
health = True
break
instance["health"] = health
instance["env"] = {
env.name: env.value or ""
for env in controller_instance.spec.containers[0].env
}
instance["env"] = {}
pod = None
for container in controller_instance.spec.containers:
if container.name == "bunkerweb":
pod = container
break
if not pod:
self.__logger.warning(f"Missing container bunkerweb in pod {controller_instance.metadata.name}")
else:
for env in pod.env:
instance["env"][env.name] = env.value or ""
for controller_service in self._get_controller_services():
if controller_service.metadata.annotations:
for (
@ -156,8 +163,16 @@ class IngressController(Controller, ConfigCaller):
):
continue
pod = None
for container in instance.spec.containers:
if container.name == "bunkerweb":
pod = container
break
if not pod :
continue
variables = {
env.name: env.value or "" for env in instance.spec.containers[0].env
env.name: env.value or "" for env in pod.env
}
if "SERVER_NAME" in variables and variables["SERVER_NAME"].strip():

View File

@ -50,7 +50,6 @@ RUN apk add --no-cache pcre bash python3 && \
mkdir -p /var/www/html && \
mkdir -p /etc/bunkerweb && \
mkdir -p /data/cache && ln -s /data/cache /var/cache/bunkerweb && \
mkdir -p /data/cache/letsencrypt && ln -s /data/cache/letsencrypt /etc/letsencrypt && \
for dir in $(echo "configs plugins") ; do mkdir -p "/data/${dir}" && ln -s "/data/${dir}" "/etc/bunkerweb/${dir}" ; done && \
for dir in $(echo "configs/http configs/stream configs/server-http configs/server-stream configs/default-server-http configs/default-server-stream configs/modsec configs/modsec-crs") ; do mkdir "/data/${dir}" ; done && \
chown -R root:nginx /data && \

View File

@ -64,7 +64,9 @@ utils.has_variable = function(var, value)
return true, "success"
end
end
return false, "success"
if servers ~= "" then
return false, "success"
end
end
return check_value == value, "success"
end
@ -93,7 +95,9 @@ utils.has_not_variable = function(var, value)
return true, "success"
end
end
return false, "success"
if servers ~= "" then
return false, "success"
end
end
return check_value ~= value, "success"
end
@ -168,7 +172,21 @@ utils.ip_is_global = function(ip)
"224.0.0.0/4",
"233.252.0.0/24",
"240.0.0.0/4",
"255.255.255.255/32"
"255.255.255.255/32",
"::/128",
"::1/128",
"::ffff:0:0/96",
"::ffff:0:0:0/96",
"64:ff9b::/96",
"64:ff9b:1::/48",
"100::/64",
"2001:0000::/32",
"2001:20::/28",
"2001:db8::/32",
"2002::/16",
"fc00::/7",
"fe80::/10",
"ff00::/8"
}
-- Instantiate ipmatcher
local ipm, err = ipmatcher.new(reserved_ips)

Binary file not shown.

Binary file not shown.

View File

@ -15,6 +15,9 @@ server {
include /etc/bunkerweb/configs/server-http/{{ SERVER_NAME.split(" ")[0] }}/*.conf;
{% endif %}
# reason variable
set $reason '';
# include LUA files
include {{ NGINX_PREFIX }}set-lua.conf;
include {{ NGINX_PREFIX }}access-lua.conf;

View File

@ -257,6 +257,12 @@
</p></footer>
</main>
<!-- end content -->
<script>
function send_challenge(token) {
document.getElementById("token").value = token;
document.getElementById("form").submit();
}
</script>
</body>
</html>
{-raw-}

View File

@ -162,7 +162,7 @@ function blacklist:kind_to_ele(kind)
end
function blacklist:is_in_cache(ele)
local ok, data = self.cachestore:get("plugin_blacklist_" .. ele)
local ok, data = self.cachestore:get("plugin_blacklist_" .. ngx.ctx.bw.server_name .. ele)
if not ok then
return false, data
end
@ -170,7 +170,7 @@ function blacklist:is_in_cache(ele)
end
function blacklist:add_to_cache(ele, value)
local ok, err = self.cachestore:set("plugin_blacklist_" .. ele, value, 86400)
local ok, err = self.cachestore:set("plugin_blacklist_" .. ngx.ctx.bw.server_name .. ele, value, 86400)
if not ok then
return false, err
end

View File

@ -192,6 +192,8 @@ try:
if not cached:
logger.error(f"Error while caching blacklist : {err}")
status = 2
else:
status = 1
except:
status = 2
logger.error(

View File

@ -1,9 +1,9 @@
local class = require "middleclass"
local plugin = require "bunkerweb.plugin"
local class = require "middleclass"
local plugin = require "bunkerweb.plugin"
local utils = require "bunkerweb.utils"
local datastore = require "bunkerweb.datastore"
local cjson = require "cjson"
local http = require "resty.http"
local cjson = require "cjson"
local http = require "resty.http"
local bunkernet = class("bunkernet", plugin)
@ -15,6 +15,8 @@ function bunkernet:initialize()
local id, err = self.datastore:get("plugin_bunkernet_id")
if id then
self.bunkernet_id = id
self.version = ngx.ctx.bw.version
self.integration = ngx.ctx.bw.integration
else
self.logger:log(ngx.ERR, "can't get BunkerNet ID from datastore : " .. err)
end
@ -58,7 +60,7 @@ function bunkernet:init()
ret = false
else
for line in f:lines() do
if utils.is_ipv4(line) and utils.ip_is_global(line) then
if (utils.is_ipv4(line) or utils.is_ipv6(line)) and utils.ip_is_global(line) then
table.insert(db.ip, line)
i = i + 1
end
@ -72,11 +74,54 @@ function bunkernet:init()
if not ok then
return self:ret(false, "can't store bunkernet database into datastore : " .. err)
end
return self:ret(true, "successfully connected to the bunkernet service " .. self.server .. " with machine ID " .. id .. " and " .. tostring(i) .. " bad IPs in database")
return self:ret(true,
"successfully connected to the bunkernet service " ..
self.variables["BUNKERNET_SERVER"] .. " with machine ID " .. id .. " and " .. tostring(i) .. " bad IPs in database")
end
function bunkernet:access()
-- Check if not loading
if self.is_loading then
return self:ret(true, "bunkerweb is loading")
end
-- Check if enabled
if self.variables["USE_BUNKERNET"] ~= "yes" then
return self:ret(true, "bunkernet not activated")
end
-- Check if BunkerNet ID is generated
if not self.bunkernet_id then
return self:ret(false, "bunkernet ID is not generated")
end
-- Check if IP is global
if not ngx.ctx.bw.ip_is_global then
return self:ret(true, "IP is not global")
end
-- Check if whitelisted
if ngx.ctx.bw.is_whitelisted == "yes" then
return self:ret(true, "client is whitelisted")
end
-- Extract DB
local db, err = self.datastore:get("plugin_bunkernet_db")
if db then
db = cjson.decode(db)
-- Check if is IP is present
if #db.ip > 0 then
local present, err = utils.is_ip_in_networks(ngx.ctx.bw.remote_addr, db.ip)
if present == nil then
return self:ret(false, "can't check if ip is in db : " .. err)
end
if present then
return self:ret(true, "ip is in db", utils.get_deny_status())
end
end
else
return self:ret(false, "can't get bunkernet db " .. err)
end
return self:ret(true, "not in db")
end
function bunkernet:log(bypass_use_bunkernet)
-- Check if not loading is needed
-- Check if not loading
if self.is_loading then
return self:ret(true, "bunkerweb is loading")
end
@ -103,10 +148,8 @@ function bunkernet:log(bypass_use_bunkernet)
return self:ret(true, "IP is not global")
end
-- TODO : check if IP has been reported recently
self.integration = ngx.ctx.bw.integration
self.version = ngx.ctx.bw.version
local function report_callback(premature, obj, ip, reason, method, url, headers)
local ok, err, status, data = obj:report(ip, reason, method, url, headers, obj.ctx.integration, obj.ctx.version)
local ok, err, status, data = obj:report(ip, reason, method, url, headers)
if status == 429 then
obj.logger:log(ngx.WARN, "bunkernet API is rate limiting us")
elseif not ok then
@ -159,9 +202,9 @@ function bunkernet:request(method, url, data)
return false, "can't instantiate http object : " .. err, nil, nil
end
local all_data = {
id = self.id,
integration = self.integration,
version = self.version
id = self.bunkernet_id,
version = self.version,
integration = self.integration
}
for k, v in pairs(data) do
all_data[k] = v
@ -203,4 +246,4 @@ function bunkernet:report(ip, reason, method, url, headers)
return self:request("POST", "/report", data)
end
return bunkernet
return bunkernet

View File

@ -18,10 +18,10 @@ sys_path.extend(
from bunkernet import data
from Database import Database
from logger import setup_logger
from jobs import cache_file, cache_hash, file_hash, is_cached_file
from jobs import cache_file, cache_hash, file_hash, is_cached_file, get_file_in_db
logger = setup_logger("BUNKERNET", getenv("LOG_LEVEL", "INFO"))
status = 0
exit_status = 0
try:
# Check if at least a server has BunkerNet activated
@ -43,17 +43,29 @@ try:
logger.info("BunkerNet is not activated, skipping download...")
_exit(0)
# Create directory if it doesn't exist
Path("/var/cache/bunkerweb/bunkernet").mkdir(parents=True, exist_ok=True)
Path("/var/tmp/bunkerweb").mkdir(parents=True, exist_ok=True)
# Create empty file in case it doesn't exist
if not Path("/var/cache/bunkerweb/bunkernet/ip.list").is_file():
Path("/var/cache/bunkerweb/bunkernet/ip.list").write_text("")
# Get ID from cache
bunkernet_id = None
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
# Create directory if it doesn't exist
Path("/var/cache/bunkerweb/bunkernet").mkdir(parents=True, exist_ok=True)
# Create empty file in case it doesn't exist
if not Path("/var/tmp/bunkerweb/bunkernet-ip.list").is_file():
Path("/var/tmp/bunkerweb/bunkernet-ip.list").write_bytes(b"")
if db:
bunkernet_id = get_file_in_db("bunkernet-register", "instance.id", db)
if bunkernet_id:
Path("/var/cache/bunkerweb/bunkernet/bunkernet.id").write_text(
bunkernet_id.decode()
)
logger.info("Successfully retrieved BunkerNet ID from db cache")
else:
logger.info("No BunkerNet ID found in db cache")
# Check if ID is present
if not Path("/var/cache/bunkerweb/bunkernet/instance.id").is_file():
@ -63,11 +75,14 @@ try:
_exit(2)
# Don't go further if the cache is fresh
if is_cached_file("/var/cache/bunkerweb/bunkernet/ip.list", "day", db):
logger.info(
"BunkerNet list is already in cache, skipping download...",
)
_exit(0)
if db:
if is_cached_file("/var/cache/bunkerweb/bunkernet/ip.list", "day", db):
logger.info(
"BunkerNet list is already in cache, skipping download...",
)
_exit(0)
exit_status = 1
# Download data
logger.info("Downloading BunkerNet data ...")
@ -94,13 +109,14 @@ try:
logger.error(
f"Received invalid data from BunkerNet API while sending db request : {data}",
)
_exit(1)
_exit(2)
if data["result"] != "ok":
logger.error(
f"Received error from BunkerNet API while sending db request : {data['data']}, removing instance ID",
)
_exit(2)
logger.info("Successfully downloaded data from BunkerNet API")
# Writing data to file
@ -130,10 +146,10 @@ try:
logger.info("Successfully saved BunkerNet data")
status = 1
exit_status = 1
except:
status = 2
exit_status = 2
logger.error(f"Exception while running bunkernet-data.py :\n{format_exc()}")
sys_exit(status)
sys_exit(exit_status)

View File

@ -19,9 +19,10 @@ sys_path.extend(
from bunkernet import register, ping, get_id
from Database import Database
from logger import setup_logger
from jobs import get_file_in_db, set_file_in_db, del_file_in_db
logger = setup_logger("BUNKERNET", getenv("LOG_LEVEL", "INFO"))
status = 0
exit_status = 0
try:
# Check if at least a server has BunkerNet activated
@ -51,8 +52,24 @@ try:
# Create directory if it doesn't exist
Path("/var/cache/bunkerweb/bunkernet").mkdir(parents=True, exist_ok=True)
# Ask an ID if needed
# Get ID from cache
bunkernet_id = None
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db:
bunkernet_id = get_file_in_db("bunkernet-register", "instance.id", db)
if bunkernet_id:
Path("/var/cache/bunkerweb/bunkernet/instance.id").write_text(
bunkernet_id.decode()
)
logger.info("Successfully retrieved BunkerNet ID from db cache")
else:
logger.info("No BunkerNet ID found in db cache")
# Register instance
registered = False
if not Path("/var/cache/bunkerweb/bunkernet/instance.id").is_file():
logger.info("Registering instance on BunkerNet API ...")
ok, status, data = register()
@ -60,7 +77,7 @@ try:
logger.error(
f"Error while sending register request to BunkerNet API : {data}"
)
_exit(1)
_exit(2)
elif status == 429:
logger.warning(
"BunkerNet API is rate limiting us, trying again later...",
@ -78,19 +95,22 @@ try:
logger.error(
f"Received invalid data from BunkerNet API while sending db request : {data}, retrying later...",
)
_exit(1)
_exit(2)
if status != 200:
logger.error(
f"Error {status} from BunkerNet API : {data['data']}",
)
_exit(1)
_exit(2)
elif data.get("result", "ko") != "ok":
logger.error(
f"Received error from BunkerNet API while sending register request : {data.get('data', {})}"
)
_exit(1)
_exit(2)
bunkernet_id = data["data"]
Path("/var/cache/bunkerweb/bunkernet/instance.id").write_text(bunkernet_id)
registered = True
exit_status = 1
logger.info(
f"Successfully registered on BunkerNet API with instance id {data['data']}"
)
@ -100,6 +120,15 @@ try:
sleep(1)
# Update cache with new bunkernet ID
if db and registered:
with open("/var/cache/bunkerweb/bunkernet/instance.id", "rb") as f:
cached, err = set_file_in_db(f"bunkernet-register", f"instance.id", f, db)
if not cached:
logger.error(f"Error while saving BunkerNet data to db cache : {err}")
else:
logger.info("Successfully saved BunkerNet data to db cache")
# Ping
logger.info("Checking connectivity with BunkerNet API ...")
bunkernet_ping = False
@ -118,11 +147,14 @@ try:
logger.warning(
"BunkerNet has banned this instance, retrying a register later...",
)
_exit(2)
elif status == 401:
logger.warning(
"Instance ID is not registered, removing it and retrying a register later...",
)
Path("/var/cache/bunkerweb/bunkernet/instance.id").unlink()
if db:
del_file_in_db("bunkernet-register", "instance.id", db)
_exit(2)
try:
@ -131,11 +163,11 @@ try:
logger.error(
f"Received invalid data from BunkerNet API while sending db request : {data}, retrying later...",
)
_exit(1)
_exit(2)
if data.get("result", "ko") != "ok":
logger.error(
f"Received error from BunkerNet API while sending ping request : {data.get('data', {})}, removing instance ID",
f"Received error from BunkerNet API while sending ping request : {data.get('data', {})}",
)
retry = True
if not retry:
@ -144,35 +176,14 @@ try:
logger.warning("Waiting 1s and trying again ...")
sleep(1)
if bunkernet_ping and status != 403:
logger.info("Connectivity with BunkerWeb is successful !")
status = 1
if not Path("/var/cache/bunkerweb/bunkernet/instance.id").is_file():
Path("/var/cache/bunkerweb/bunkernet/instance.id").write_text(bunkernet_id)
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
# Update db
with lock:
err = db.update_job_cache(
"bunkernet-register",
None,
"instance.id",
bunkernet_id.encode("utf-8"),
)
if err:
logger.warning(f"Couldn't update db cache: {err}")
if bunkernet_ping:
logger.info("Connectivity with BunkerNet is successful !")
else:
logger.error("Connectivity with BunkerWeb failed ...")
status = 2
logger.error("Connectivity with BunkerNet failed ...")
exit_status = 2
except:
status = 2
exit_status = 2
logger.error(f"Exception while running bunkernet-register.py :\n{format_exc()}")
sys_exit(status)
sys_exit(exit_status)

View File

@ -74,7 +74,7 @@ function country:access()
if not ok then
return self:ret(false, "error while adding item to cache : " .. err)
end
return self:ret(true, "client IP " .. ngx.ctx.bw.remote_addr .. " is blacklisted (country = " .. country .. ")", true, utils.get_deny_status())
return self:ret(true, "client IP " .. ngx.ctx.bw.remote_addr .. " is blacklisted (country = " .. country .. ")", utils.get_deny_status())
end
end
end
@ -92,7 +92,7 @@ function country:preread()
end
function country:is_in_cache(ip)
local ok, data = self.cachestore:get("plugin_country_cache_" .. ip)
local ok, data = self.cachestore:get("plugin_country_cache_" .. ngx.ctx.bw.server_name .. ip)
if not ok then
return false, data
end
@ -100,7 +100,7 @@ function country:is_in_cache(ip)
end
function country:add_to_cache(ip, country, result)
local ok, err = self.cachestore:set("plugin_country_cache_" .. ip, cjson.encode({country = country, result = result}), 86400)
local ok, err = self.cachestore:set("plugin_country_cache_" .. ngx.ctx.bw.server_name .. ip, cjson.encode({country = country, result = result}), 86400)
if not ok then
return false, err
end

View File

@ -1,6 +1,6 @@
#!/usr/bin/python3
from os import getenv, makedirs
from os import getenv
from pathlib import Path
from shutil import copy
from sys import exit as sys_exit, path as sys_path
@ -120,7 +120,7 @@ def check_cert(cert_path, key_path, first_server: Optional[str] = None) -> bool:
status = 0
try:
makedirs("/var/cache/bunkerweb/customcert/", exist_ok=True)
Path("/var/cache/bunkerweb/customcert/").mkdir(parents=True, exist_ok=True)
# Multisite case
if getenv("MULTISITE") == "yes":

View File

@ -231,7 +231,7 @@ function greylist:is_greylisted_ua()
end
function greylist:is_in_cache(ele)
local ok, data = self.cachestore:get("plugin_greylist_" .. ele)
local ok, data = self.cachestore:get("plugin_greylist_" .. ngx.ctx.bw.server_name .. ele)
if not ok then
return false, data
end
@ -239,7 +239,7 @@ function greylist:is_in_cache(ele)
end
function greylist:add_to_cache(ele, value)
local ok, err = self.cachestore:set("plugin_greylist_" .. ele, value, 86400)
local ok, err = self.cachestore:set("plugin_greylist_" .. ngx.ctx.bw.server_name .. ele, value, 86400)
if not ok then
return false, err
end

View File

@ -174,6 +174,8 @@ try:
if not cached:
logger.error(f"Error while caching greylist : {err}")
status = 2
else:
status = 1
except:
status = 2
logger.error(

View File

@ -2,7 +2,7 @@
from hashlib import sha256
from io import BytesIO
from os import getenv, listdir, chmod, stat, _exit
from os import getenv, listdir, chmod, _exit
from os.path import basename, dirname
from pathlib import Path
from stat import S_IEXEC
@ -49,7 +49,7 @@ def install_plugin(plugin_dir) -> bool:
copytree(plugin_dir, f"/etc/bunkerweb/plugins/{metadata['id']}")
# Add u+x permissions to jobs files
for job_file in glob(f"{plugin_dir}/jobs/*"):
st = stat(job_file)
st = Path(job_file).stat()
chmod(job_file, st.st_mode | S_IEXEC)
logger.info(f"Plugin {metadata['id']} installed")
return True
@ -161,6 +161,7 @@ try:
f"Couldn't update external plugins to database: {err}",
)
status = 1
logger.info("External plugins downloaded and installed")
except:

View File

@ -10,8 +10,8 @@ location ~ ^/.well-known/acme-challenge/ {
listen 0.0.0.0:{{ HTTPS_PORT }} ssl {% if HTTP2 == "yes" %}http2{% endif %} {% if USE_PROXY_PROTOCOL == "yes" %}proxy_protocol{% endif %};
# TLS config
ssl_certificate /etc/letsencrypt/live/{{ SERVER_NAME.split(" ")[0] }}/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/{{ SERVER_NAME.split(" ")[0] }}/privkey.pem;
ssl_certificate /var/cache/bunkerweb/letsencrypt/etc/live/{{ SERVER_NAME.split(" ")[0] }}/fullchain.pem;
ssl_certificate_key /var/cache/bunkerweb/letsencrypt/etc/live/{{ SERVER_NAME.split(" ")[0] }}/privkey.pem;
ssl_protocols {{ SSL_PROTOCOLS }};
ssl_prefer_server_ciphers on;
ssl_session_tickets off;
@ -22,4 +22,4 @@ ssl_dhparam /etc/nginx/dhparam;
ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384;
{% endif %}
{% endif %}
{% endif %}

View File

@ -4,8 +4,8 @@
listen 0.0.0.0:{{ LISTEN_STREAM_PORT_SSL }} ssl {% if USE_UDP == "yes" %} udp {% endif %}{% if USE_PROXY_PROTOCOL == "yes" %} proxy_protocol {% endif %};
# TLS config
ssl_certificate /etc/letsencrypt/live/{{ SERVER_NAME.split(" ")[0] }}/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/{{ SERVER_NAME.split(" ")[0] }}/privkey.pem;
ssl_certificate /var/cache/bunkerweb/letsencrypt/etc/live/{{ SERVER_NAME.split(" ")[0] }}/fullchain.pem;
ssl_certificate_key /var/cache/bunkerweb/letsencrypt/etc/live/{{ SERVER_NAME.split(" ")[0] }}/privkey.pem;
ssl_protocols {{ SSL_PROTOCOLS }};
ssl_prefer_server_ciphers on;
ssl_session_tickets off;
@ -16,4 +16,4 @@ ssl_dhparam /etc/nginx/dhparam;
ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384;
{% endif %}
{% endif %}
{% endif %}

View File

@ -1,6 +1,6 @@
#!/usr/bin/python3
from os import getenv, makedirs
from os import getenv
from pathlib import Path
from sys import exit as sys_exit, path as sys_path
from threading import Lock
@ -75,7 +75,7 @@ try:
# Linux case
else:
root_dir = "/var/tmp/bunkerweb/lets-encrypt/.well-known/acme-challenge/"
makedirs(root_dir, exist_ok=True)
Path(root_dir).mkdir(parents=True, exist_ok=True)
Path(f"{root_dir}{token}").write_text(validation)
except:
status = 1

View File

@ -1,7 +1,6 @@
#!/usr/bin/python3
from os import getenv
from os.path import isfile
from pathlib import Path
from sys import exit as sys_exit, path as sys_path
from threading import Lock
@ -73,7 +72,7 @@ try:
challenge_path = (
f"/var/tmp/bunkerweb/lets-encrypt/.well-known/acme-challenge/{token}"
)
if isfile(challenge_path):
if Path(challenge_path).exists():
Path(challenge_path).unlink()
except:
status = 1

View File

@ -47,7 +47,7 @@ try:
tgz = BytesIO()
with tar_open(mode="w:gz", fileobj=tgz) as tf:
tf.add("/var/cache/bunkerweb/letsencrypt", arcname=".")
tf.add("/var/cache/bunkerweb/letsencrypt/etc", arcname="etc")
tgz.seek(0, 0)
files = {"archive.tar.gz": tgz}
@ -102,7 +102,7 @@ try:
# Linux case
else:
proc = run(
["/etc/init.d/nginx", "reload"],
["sudo", "/usr/sbin/nginx", "-s", "reload"],
stdin=DEVNULL,
stderr=STDOUT,
)

View File

@ -1,11 +1,14 @@
#!/usr/bin/python3
from os import environ, getenv
from os import environ, getenv, listdir
from pathlib import Path
from subprocess import DEVNULL, STDOUT, run
from sys import exit as sys_exit, path as sys_path
from threading import Lock
from traceback import format_exc
from tarfile import open as tfopen
from io import BytesIO
from shutil import rmtree
sys_path.extend(
(
@ -17,8 +20,9 @@ sys_path.extend(
from Database import Database
from logger import setup_logger
from jobs import get_file_in_db, set_file_in_db
logger = setup_logger("LETS-ENCRYPT", getenv("LOG_LEVEL", "INFO"))
logger = setup_logger("LETS-ENCRYPT.new", getenv("LOG_LEVEL", "INFO"))
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
@ -33,6 +37,9 @@ def certbot_new(domains, email):
[
"/usr/share/bunkerweb/deps/python/bin/certbot",
"certonly",
"--config-dir=/var/cache/bunkerweb/letsencrypt/etc",
"--work-dir=/var/cache/bunkerweb/letsencrypt/lib",
"--logs-dir=/var/cache/bunkerweb/letsencrypt/log",
"--manual",
"--preferred-challenges=http",
"--manual-auth-hook",
@ -54,7 +61,30 @@ def certbot_new(domains, email):
return proc.returncode
status = 0
try:
# Create directory if it doesn't exist
Path("/var/cache/bunkerweb/letsencrypt").mkdir(parents=True, exist_ok=True)
# Extract letsencrypt folder if it exists in db
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db:
tgz = get_file_in_db("certbot-new", "folder.tgz", db)
if tgz:
# Delete folder if needed
if len(listdir("/var/cache/bunkerweb/letsencrypt")) > 0:
rmtree("/var/cache/bunkerweb/letsencrypt", ignore_errors=True)
# Extract it
with tfopen(name="folder.tgz", mode="r:gz", fileobj=BytesIO(tgz)) as tf:
tf.extractall("/var/cache/bunkerweb/letsencrypt")
logger.info("Successfully retrieved Let's Encrypt data from db cache")
else:
logger.info("No Let's Encrypt data found in db cache")
# Multisite case
if getenv("MULTISITE", "no") == "yes":
for first_server in getenv("SERVER_NAME", "").split(" "):
@ -72,7 +102,9 @@ try:
" ", ","
)
if Path(f"/etc/letsencrypt/live/{first_server}/cert.pem").exists():
if Path(
f"/var/cache/bunkerweb/letsencrypt/{first_server}/cert.pem"
).exists():
logger.info(
f"Certificates already exists for domain(s) {domains}",
)
@ -89,36 +121,24 @@ try:
f"Asking certificates for domains : {domains} (email = {real_email}) ...",
)
if certbot_new(domains, real_email) != 0:
status = 1
status = 2
logger.error(
f"Certificate generation failed for domain(s) {domains} ...",
)
else:
status = 1
logger.info(
f"Certificate generation succeeded for domain(s) : {domains}"
)
if Path(f"/etc/letsencrypt/live/{first_server}/cert.pem").exists():
# Update db
with lock:
err = db.update_job_cache(
"certbot-new",
first_server,
"cert.pem",
Path(
f"/etc/letsencrypt/live/{first_server}/cert.pem"
).read_bytes(),
)
if err:
logger.warning(f"Couldn't update db cache: {err}")
# Singlesite case
elif getenv("AUTO_LETS_ENCRYPT", "no") == "yes" and getenv("SERVER_NAME"):
first_server = getenv("SERVER_NAME", "").split(" ")[0]
domains = getenv("SERVER_NAME", "").replace(" ", ",")
if Path(f"/etc/letsencrypt/live/{first_server}/cert.pem").exists():
if Path(
f"/var/cache/bunkerweb/letsencrypt/etc/live/{first_server}/cert.pem"
).exists():
logger.info(f"Certificates already exists for domain(s) {domains}")
else:
real_email = getenv("EMAIL_LETS_ENCRYPT", f"contact@{first_server}")
@ -132,26 +152,31 @@ try:
status = 2
logger.error(f"Certificate generation failed for domain(s) : {domains}")
else:
status = 1
logger.info(
f"Certificate generation succeeded for domain(s) : {domains}"
)
if Path(f"/etc/letsencrypt/live/{first_server}/cert.pem").exists():
# Update db
with lock:
err = db.update_job_cache(
"certbot-new",
first_server,
"cert.pem",
Path(
f"/etc/letsencrypt/live/{first_server}/cert.pem"
).read_bytes(),
)
# Put new folder in cache
if db:
bio = BytesIO()
with tfopen("folder.tgz", mode="w:gz", fileobj=bio) as tgz:
tgz.add("/var/cache/bunkerweb/letsencrypt", arcname=".")
bio.seek(0)
# Put tgz in cache
cached, err = set_file_in_db(f"certbot-new", f"folder.tgz", bio, db)
if not cached:
logger.error(f"Error while saving Let's Encrypt data to db cache : {err}")
else:
logger.info("Successfully saved Let's Encrypt data to db cache")
# Delete lib and log folders to avoid sending them
if Path("/var/cache/bunkerweb/letsencrypt/lib").exists():
rmtree("/var/cache/bunkerweb/letsencrypt/lib", ignore_errors=True)
if Path("/var/cache/bunkerweb/letsencrypt/log").exists():
rmtree("/var/cache/bunkerweb/letsencrypt/log", ignore_errors=True)
if err:
logger.warning(f"Couldn't update db cache: {err}")
except:
status = 1
status = 3
logger.error(f"Exception while running certbot-new.py :\n{format_exc()}")
sys_exit(status)

View File

@ -1,19 +1,25 @@
#!/usr/bin/python3
from os import environ, getenv
from os import environ, getenv, listdir
from pathlib import Path
from subprocess import DEVNULL, STDOUT, run
from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
from tarfile import open as tfopen
from io import BytesIO
from shutil import rmtree
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/db",
)
)
from logger import setup_logger
from Database import Database
from jobs import get_file_in_db, set_file_in_db
def renew(domain):
@ -22,6 +28,9 @@ def renew(domain):
[
"/usr/share/bunkerweb/deps/python/bin/certbot",
"renew",
"--config-dir=/var/cache/bunkerweb/letsencrypt/etc",
"--work-dir=/var/cache/bunkerweb/letsencrypt/lib",
"--logs-dir=/var/cache/bunkerweb/letsencrypt/log",
"--cert-name",
domain,
"--deploy-hook",
@ -34,10 +43,31 @@ def renew(domain):
return proc.returncode
logger = setup_logger("LETS-ENCRYPT", getenv("LOG_LEVEL", "INFO"))
logger = setup_logger("LETS-ENCRYPT.renew", getenv("LOG_LEVEL", "INFO"))
status = 0
try:
# Create directory if it doesn't exist
Path("/var/cache/bunkerweb/letsencrypt").mkdir(parents=True, exist_ok=True)
# Extract letsencrypt folder if it exists in db
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db:
tgz = get_file_in_db("certbot-new", "folder.tgz", db)
if tgz:
# Delete folder if needed
if len(listdir("/var/cache/bunkerweb/letsencrypt")) > 0:
rmtree("/var/cache/bunkerweb/letsencrypt", ignore_errors=True)
# Extract it
with tfopen(name="folder.tgz", mode="r:gz", fileobj=BytesIO(tgz)) as tf:
tf.extractall("/var/cache/bunkerweb/letsencrypt")
logger.info("Successfully retrieved Let's Encrypt data from db cache")
else:
logger.info("No Let's Encrypt data found in db cache")
if getenv("MULTISITE") == "yes":
servers = getenv("SERVER_NAME", [])
@ -52,7 +82,9 @@ try:
getenv("AUTO_LETS_ENCRYPT", "no"),
)
!= "yes"
or not Path(f"/etc/letsencrypt/live/{first_server}/cert.pem").exists()
or not Path(
f"/var/cache/bunkerweb/letsencrypt/etc/live/{first_server}/cert.pem"
).exists()
):
continue
@ -64,7 +96,9 @@ try:
)
elif getenv("AUTO_LETS_ENCRYPT", "no") == "yes" and not getenv("SERVER_NAME", ""):
first_server = getenv("SERVER_NAME", "").split(" ")[0]
if Path(f"/etc/letsencrypt/live/{first_server}/cert.pem").exists():
if Path(
f"/var/cache/bunkerweb/letsencrypt/etc/live/{first_server}/cert.pem"
).exists():
ret = renew(first_server)
if ret != 0:
status = 2
@ -72,6 +106,24 @@ try:
f"Certificates renewal for {first_server} failed",
)
# Put new folder in cache
if db:
bio = BytesIO()
with tfopen("folder.tgz", mode="w:gz", fileobj=bio) as tgz:
tgz.add("/var/cache/bunkerweb/letsencrypt", arcname=".")
bio.seek(0)
# Put tgz in cache
cached, err = set_file_in_db("certbot-new", "folder.tgz", bio, db)
if not cached:
logger.error(f"Error while saving Let's Encrypt data to db cache : {err}")
else:
logger.info("Successfully saved Let's Encrypt data to db cache")
# Delete lib and log folders to avoid sending them
if Path("/var/cache/bunkerweb/letsencrypt/lib").exists():
rmtree("/var/cache/bunkerweb/letsencrypt/lib", ignore_errors=True)
if Path("/var/cache/bunkerweb/letsencrypt/log").exists():
rmtree("/var/cache/bunkerweb/letsencrypt/log", ignore_errors=True)
except:
status = 2
logger.error(f"Exception while running certbot-renew.py :\n{format_exc()}")

View File

@ -1,7 +1,7 @@
#!/usr/bin/python3
from os import getenv, makedirs
from os.path import isfile
from os import getenv
from pathlib import Path
from subprocess import DEVNULL, run
from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
@ -48,8 +48,10 @@ try:
# Generate the self-signed certificate
if need_default_cert:
makedirs("/var/cache/bunkerweb/default-server-cert", exist_ok=True)
if not isfile("/var/cache/bunkerweb/default-server-cert/cert.pem"):
Path("/var/cache/bunkerweb/default-server-cert").mkdir(
parents=True, exist_ok=True
)
if not Path("/var/cache/bunkerweb/default-server-cert/cert.pem").is_file():
logger.info("Generating self-signed certificate for default server")
cmd = "openssl req -nodes -x509 -newkey rsa:4096 -keyout /var/cache/bunkerweb/default-server-cert/cert.key -out /var/cache/bunkerweb/default-server-cert/cert.pem -days 3650".split(
@ -63,6 +65,7 @@ try:
)
status = 2
else:
status = 1
logger.info(
"Successfully generated self-signed certificate for default server",
)

View File

@ -64,8 +64,9 @@ try:
logger.info("RealIP is not activated, skipping download...")
_exit(0)
# Create directory if it doesn't exist
# Create directories if they don't exist
Path("/var/cache/bunkerweb/realip").mkdir(parents=True, exist_ok=True)
Path("/var/tmp/bunkerweb/realip").mkdir(parents=True, exist_ok=True)
db = Database(
logger,
@ -107,10 +108,10 @@ try:
f"Exception while getting RealIP list from {url} :\n{format_exc()}"
)
Path("/var/tmp/bunkerweb/realip-combined.list").write_bytes(content)
Path("/var/tmp/bunkerweb/realip/combined.list").write_bytes(content)
# Check if file has changed
new_hash = file_hash("/var/tmp/bunkerweb/realip-combined.list")
new_hash = file_hash("/var/tmp/bunkerweb/realip/combined.list")
old_hash = cache_hash("/var/cache/bunkerweb/realip/combined.list", db)
if new_hash == old_hash:
logger.info("New file is identical to cache file, reload is not needed")

View File

@ -25,6 +25,8 @@ db = Database(
)
lock = Lock()
status = 0
def generate_cert(first_server, days, subj):
if Path(f"/var/cache/bunkerweb/selfsigned/{first_server}.pem").is_file():
@ -41,6 +43,8 @@ def generate_cert(first_server, days, subj):
logger.error(f"Self-signed certificate generation failed for {first_server}")
return False, 2
return True, 1
# Update db
with lock:
err = db.update_job_cache(
@ -103,10 +107,7 @@ try:
getenv("SELF_SIGNED_SSL_SUBJ", "/CN=www.example.com/"),
),
)
if not ret:
status = ret_status
elif ret_status == 1 and ret_status != 2:
status = 1
status = ret_status
# Singlesite case
elif getenv("GENERATE_SELF_SIGNED_SSL", "no") == "yes" and getenv("SERVER_NAME"):
@ -116,10 +117,7 @@ try:
getenv("SELF_SIGNED_SSL_EXPIRY", "365"),
getenv("SELF_SIGNED_SSL_SUBJ", "/CN=www.example.com/"),
)
if not ret:
status = ret_status
elif ret_status == 1 and ret_status != 2:
status = 1
status = ret_status
except:
status = 2

View File

@ -176,6 +176,8 @@ try:
if not cached:
logger.error(f"Error while caching whitelist : {err}")
status = 2
else:
status = 1
except:
status = 2
logger.error(

View File

@ -189,7 +189,7 @@ function whitelist:check_cache()
end
function whitelist:is_in_cache(ele)
local ok, data = self.cachestore:get("plugin_whitelist_" .. ele)
local ok, data = self.cachestore:get("plugin_whitelist_" .. ngx.ctx.bw.server_name .. ele)
if not ok then
return false, data
end
@ -197,7 +197,7 @@ function whitelist:is_in_cache(ele)
end
function whitelist:add_to_cache(ele, value)
local ok, err = self.cachestore:set("plugin_whitelist_" .. ele, value, 86400)
local ok, err = self.cachestore:set("plugin_whitelist_" .. ngx.ctx.bw.server_name .. ele, value, 86400)
if not ok then
return false, err
end

View File

@ -877,6 +877,12 @@ class Database:
return ""
def delete_job_cache(self, job_name: str, file_name: str):
with self.__db_session() as session:
session.query(Jobs_cache).filter_by(
job_name=job_name, file_name=file_name
).delete()
def update_job_cache(
self,
job_name: str,

View File

@ -8,7 +8,6 @@ from sqlalchemy import (
Integer,
LargeBinary,
PrimaryKeyConstraint,
SmallInteger,
String,
)
from sqlalchemy.orm import declarative_base, relationship
@ -53,7 +52,7 @@ Base = declarative_base()
class Plugins(Base):
__tablename__ = "plugins"
__tablename__ = "bw_plugins"
id = Column(String(64), primary_key=True)
order = Column(Integer, nullable=False)
@ -74,7 +73,7 @@ class Plugins(Base):
class Settings(Base):
__tablename__ = "settings"
__tablename__ = "bw_settings"
__table_args__ = (
PrimaryKeyConstraint("id", "name"),
UniqueConstraint("id"),
@ -85,7 +84,7 @@ class Settings(Base):
name = Column(String(256), primary_key=True)
plugin_id = Column(
String(64),
ForeignKey("plugins.id", onupdate="cascade", ondelete="cascade"),
ForeignKey("bw_plugins.id", onupdate="cascade", ondelete="cascade"),
nullable=False,
)
context = Column(CONTEXTS_ENUM, nullable=False)
@ -107,22 +106,22 @@ class Settings(Base):
class Global_values(Base):
__tablename__ = "global_values"
__tablename__ = "bw_global_values"
setting_id = Column(
String(256),
ForeignKey("settings.id", onupdate="cascade", ondelete="cascade"),
ForeignKey("bw_settings.id", onupdate="cascade", ondelete="cascade"),
primary_key=True,
)
value = Column(String(4096), nullable=False)
suffix = Column(SmallInteger, primary_key=True, nullable=True, default=0)
suffix = Column(Integer, primary_key=True, nullable=True, default=0)
method = Column(METHODS_ENUM, nullable=False)
setting = relationship("Settings", back_populates="global_value")
class Services(Base):
__tablename__ = "services"
__tablename__ = "bw_services"
id = Column(String(64), primary_key=True)
method = Column(METHODS_ENUM, nullable=False)
@ -137,20 +136,20 @@ class Services(Base):
class Services_settings(Base):
__tablename__ = "services_settings"
__tablename__ = "bw_services_settings"
service_id = Column(
String(64),
ForeignKey("services.id", onupdate="cascade", ondelete="cascade"),
ForeignKey("bw_services.id", onupdate="cascade", ondelete="cascade"),
primary_key=True,
)
setting_id = Column(
String(256),
ForeignKey("settings.id", onupdate="cascade", ondelete="cascade"),
ForeignKey("bw_settings.id", onupdate="cascade", ondelete="cascade"),
primary_key=True,
)
value = Column(String(4096), nullable=False)
suffix = Column(SmallInteger, primary_key=True, nullable=True, default=0)
suffix = Column(Integer, primary_key=True, nullable=True, default=0)
method = Column(METHODS_ENUM, nullable=False)
service = relationship("Services", back_populates="settings")
@ -158,13 +157,13 @@ class Services_settings(Base):
class Jobs(Base):
__tablename__ = "jobs"
__tablename__ = "bw_jobs"
__table_args__ = (UniqueConstraint("name", "plugin_id"),)
name = Column(String(128), primary_key=True)
plugin_id = Column(
String(64),
ForeignKey("plugins.id", onupdate="cascade", ondelete="cascade"),
ForeignKey("bw_plugins.id", onupdate="cascade", ondelete="cascade"),
)
file_name = Column(String(256), nullable=False)
every = Column(SCHEDULES_ENUM, nullable=False)
@ -177,7 +176,7 @@ class Jobs(Base):
class Plugin_pages(Base):
__tablename__ = "plugin_pages"
__tablename__ = "bw_plugin_pages"
id = Column(
Integer,
@ -186,7 +185,7 @@ class Plugin_pages(Base):
)
plugin_id = Column(
String(64),
ForeignKey("plugins.id", onupdate="cascade", ondelete="cascade"),
ForeignKey("bw_plugins.id", onupdate="cascade", ondelete="cascade"),
nullable=False,
)
template_file = Column(LargeBinary(length=(2**32) - 1), nullable=False)
@ -198,7 +197,7 @@ class Plugin_pages(Base):
class Jobs_cache(Base):
__tablename__ = "jobs_cache"
__tablename__ = "bw_jobs_cache"
__table_args__ = (UniqueConstraint("job_name", "service_id", "file_name"),)
id = Column(
@ -208,12 +207,12 @@ class Jobs_cache(Base):
)
job_name = Column(
String(128),
ForeignKey("jobs.name", onupdate="cascade", ondelete="cascade"),
ForeignKey("bw_jobs.name", onupdate="cascade", ondelete="cascade"),
nullable=False,
)
service_id = Column(
String(64),
ForeignKey("services.id", onupdate="cascade", ondelete="cascade"),
ForeignKey("bw_services.id", onupdate="cascade", ondelete="cascade"),
nullable=True,
)
file_name = Column(
@ -229,7 +228,7 @@ class Jobs_cache(Base):
class Custom_configs(Base):
__tablename__ = "custom_configs"
__tablename__ = "bw_custom_configs"
__table_args__ = (UniqueConstraint("service_id", "type", "name"),)
id = Column(
@ -239,7 +238,7 @@ class Custom_configs(Base):
)
service_id = Column(
String(64),
ForeignKey("services.id", onupdate="cascade", ondelete="cascade"),
ForeignKey("bw_services.id", onupdate="cascade", ondelete="cascade"),
nullable=True,
)
type = Column(CUSTOM_CONFIGS_TYPES_ENUM, nullable=False)
@ -252,11 +251,11 @@ class Custom_configs(Base):
class Selects(Base):
__tablename__ = "selects"
__tablename__ = "bw_selects"
setting_id = Column(
String(256),
ForeignKey("settings.id", onupdate="cascade", ondelete="cascade"),
ForeignKey("bw_settings.id", onupdate="cascade", ondelete="cascade"),
primary_key=True,
)
value = Column(String(256), primary_key=True)
@ -265,7 +264,7 @@ class Selects(Base):
class Instances(Base):
__tablename__ = "instances"
__tablename__ = "bw_instances"
hostname = Column(String(256), primary_key=True)
port = Column(Integer, nullable=False)
@ -273,7 +272,7 @@ class Instances(Base):
class Metadata(Base):
__tablename__ = "metadata"
__tablename__ = "bw_metadata"
id = Column(Integer, primary_key=True, default=1)
is_initialized = Column(Boolean, nullable=False)

View File

@ -1,4 +1,4 @@
sqlalchemy==2.0.10
sqlalchemy==2.0.12
psycopg2-binary==2.9.6
PyMySQL==1.0.3
cryptography==40.0.2

View File

@ -225,48 +225,48 @@ pymysql==1.0.3 \
--hash=sha256:3dda943ef3694068a75d69d071755dbecacee1adf9a1fc5b206830d2b67d25e8 \
--hash=sha256:89fc6ae41c0aeb6e1f7710cdd623702ea2c54d040565767a78b00a5ebb12f4e5
# via -r requirements.in
sqlalchemy==2.0.10 \
--hash=sha256:04020aba2c0266ec521095ddd5cb760fc0067b0088828ccbf6b323c900a62e59 \
--hash=sha256:06401013dad015e6f6f72c946f66d750fe4c5ef852ed2f15537d572cb92d7a75 \
--hash=sha256:096d9f72882035b4c6906172bf5c5afe4caefbfe0e028ab0c83dfdaa670cc193 \
--hash=sha256:1f5638aac94c8f3fe04ca030e2b3e84d52d70f15d67f35f794fd2057284abced \
--hash=sha256:1fa90ed075ebc5fefc504c0e35b84fde1880d7c095473c5aa0c01f63eb37beae \
--hash=sha256:207c2cc9b946f832fd45fbdd6276c28e3e80b206909a028cd163e87f4080a333 \
--hash=sha256:23e3e1cc3634a70bba2ab10c144d4f11cf0ddeca239bbdaf646770873030c600 \
--hash=sha256:28c79289b4bf21cf09fb770b124cfae2432bbafb2ffd6758ac280bc1cacabfac \
--hash=sha256:2bd944dc701be15a91ec965c6634ab90998ca2d14e4f1f568545547a3a3adc16 \
--hash=sha256:2fdccadc9359784ae12ae9199849b724c7165220ae93c6066e841b66c6823742 \
--hash=sha256:300e8165bc78a0a917b39617730caf2c08c399302137c562e5ce7a37780ad10f \
--hash=sha256:39869cf2cfe73c8ad9a6f15712a2ed8c13c1f87646611882efb6a8ec80d180e8 \
--hash=sha256:3e77ed2e6d911aafc931c92033262d2979a44317294328b071a53aa10e2a9614 \
--hash=sha256:4a1ec8fcbe7e6a6ec28e161c6030d8cf5077e31efc3d08708d8de5aa8314b345 \
--hash=sha256:5892afc393ecd5f20910ff5a6b90d56620ec2ef3e36e3358eaedbae2aa36816d \
--hash=sha256:5e8abd2ce0745a2819f3e41a17570c9d74b634a5b5ab5a04de5919e55d5d8601 \
--hash=sha256:61ea1af2d01e709dcd4edc0d994db42bac6b2673c093cc35df3875e54cad9cef \
--hash=sha256:631ea4d1a8d78b43126773fa2de5472d97eb54dc4b9fbae4d8bd910f72f31f25 \
--hash=sha256:6b15cadba33d77e6fcee4f4f7706913d143d20e48ce26e9b6578b5cd07d4a353 \
--hash=sha256:70aed8f508f6c2f4da63ee6fa853534bb97d47bc82e28d56442f62a0b6ad2660 \
--hash=sha256:736e92fa4d6e020fc780b915bcdd69749ad32c79bc6b031e85dcd2b8069f8de1 \
--hash=sha256:7a8ca39fbc2dfe357f03e398bf5c1421b9b6614a8cf69ccada9ab3ef7e036073 \
--hash=sha256:7da5bf86746ddbf8d68f1a3f9d1efee1d95e07d5ad63f47b839f4db799e12566 \
--hash=sha256:88df3327c32468716a52c10e7991268afb552a0a7ef36130925864f28873d2e0 \
--hash=sha256:89e7a05639b3ae4fd17062a37b0ee336ea50ac9751e98e3330a6ed95daa4880c \
--hash=sha256:8a3e3f34468a512b3886ac5584384aed8bef388297c710509a842fb1468476f3 \
--hash=sha256:8c3366be42bca5c066703af54b856e00f23b8fbef9ab0346a58d34245af695a5 \
--hash=sha256:9a77e29a96779f373eb144040e5fae1e3944916c13360715e74f73b186f0d8d2 \
--hash=sha256:a4cdac392547dec07d69c5e8b05374b0357359ebc58ab2bbcb9fa0370ecb715f \
--hash=sha256:a9aa445201754a49b7ddb0b99fbe5ccf98f6900548fc60a0a07dde2253dd541e \
--hash=sha256:af525e9fbcf7da7404fc4b91ca4ce6172457d3f4390b93941fb97bfe29afb7dc \
--hash=sha256:b608ad640ac70e2901d111a69ad975e6b0ca39947e08cc28691b0de00831a787 \
--hash=sha256:d46edd508123413595a17bb64655db7c4bfefa83e721a3064f66e046e9a6a103 \
--hash=sha256:d975ac2bc513f530fa2574eb58e0ca731357d4686de2fb644af3036fca4f3fd6 \
--hash=sha256:dcd5793b98eb043703895443cc399fb8e2ce21c9b09757e954e425c8415c541b \
--hash=sha256:dd40fbf4f916a41b4afe50665e2d029a1c9f74967fd3b7422475529641d31ef5 \
--hash=sha256:dddbe2c012d712873fb9f203512db57d3cbdd20803f0792aa01bc513da8a2380 \
--hash=sha256:e9d7e65c2c4f313524399f6b8ec14bfa8f4e9fccd999ff585e10e073cfd21429 \
--hash=sha256:ec910449c70b0359dbe08a5e8c63678c7ef0113ab61cd0bb2e80ed09ea8ce6ab \
--hash=sha256:ed368ee7b1c119d5f6321cc9a3ea806adacf522bb4c2e9e398cbfc2e2cc68a2a \
--hash=sha256:faa6d2e6d6d46d2d58c5a4713148300b44fcfc911341ec82d8731488d0757f96
sqlalchemy==2.0.12 \
--hash=sha256:03206576ca53f55b9de6e890273e498f4b2e6e687a9db9859bdcd21df5a63e53 \
--hash=sha256:09205893a84b6bedae0453d3f384f5d2a6499b6e45ad977549894cdcd85d8f1c \
--hash=sha256:0e5501c78b5ab917f0f0f75ce7f0018f683a0a76e95f30e6561bf61c9ff69d43 \
--hash=sha256:10f1ff0ebe21d2cea89ead231ba3ecf75678463ab85f19ce2ce91207620737f3 \
--hash=sha256:1fac17c866111283cbcdb7024d646abb71fdd95f3ce975cf3710258bc55742fd \
--hash=sha256:297b752d4f30350b64175bbbd57dc94c061a35f5d1dba088d0a367dbbebabc94 \
--hash=sha256:2a3101252f3de9a18561c1fb0a68b1ee465485990aba458d4510f214bd5a582c \
--hash=sha256:32762dba51b663609757f861584a722093487f53737e76474cc6e190904dc31b \
--hash=sha256:369f6564e68a9c60f0b9dde121def491e651a4ba8dcdd652a93f1cd5977cd85c \
--hash=sha256:3745dee26a7ee012598577ad3b8f6e6cd50a49b2afa0cde9db668da6bf2c2319 \
--hash=sha256:3c053c3f4c4e45d4c8b27977647566c140d6de3f61a4e2acb92ea24cf9911c7f \
--hash=sha256:4ad525b9dd17b478a2ed8580d7f2bc46b0f5889153c6b1c099729583e395b4b9 \
--hash=sha256:53b2c8adbcbb59732fb21a024aaa261983655845d86e3fc26a5676cec0ebaa09 \
--hash=sha256:5d709f43caee115b03b707b8cbbcb8b303045dd7cdc825b6d29857d71f3425ae \
--hash=sha256:5e9d390727c11b9a7e583bf6770de36895c0936bddb98ae93ae99282e6428d5f \
--hash=sha256:6b1fa0ffc378a7061c452cb4a1f804fad1b3b8aa8d0552725531d27941b2e3ed \
--hash=sha256:6e1d50592cb24d1947c374c666add65ded7c181ec98a89ed17abbe9b8b2e2ff4 \
--hash=sha256:77a06b0983faf9aa48ee6219d41ade39dee16ce90857cc181dbcf6918acd234d \
--hash=sha256:7eb25b981cbc9e7df9f56ad7ec4c6d77323090ca4b7147fcdc09d66535377759 \
--hash=sha256:85b0efe1c71459ba435a6593f54a0e39334b16ba383e8010fdb9d0127ca51ba8 \
--hash=sha256:87b2c2d13c3d1384859b60eabb3139e169ce68ada1d2963dbd0c7af797f16efe \
--hash=sha256:8aad66215a3817a7a1d535769773333250de2653c89b53f7e2d42b677d398027 \
--hash=sha256:91f4b1bdc987ef85fe3a0ce5d26ac72ff8f60207b08272aa2a65494836391d69 \
--hash=sha256:978bee4ecbcdadf087220618409fb9be9509458df479528b70308f0599c7c519 \
--hash=sha256:9fe98e9d26778d7711ceee2c671741b4f54c74677668481d733d6f70747d7690 \
--hash=sha256:a022c588c0f413f8cddf9fcc597dbf317efeac4186d8bff9aa7f3219258348b0 \
--hash=sha256:a4709457f1c317e347051498b91fa2b86c4bcdebf93c84e6d121a4fc8a397307 \
--hash=sha256:aec5fb36b53125554ecc2285526eb5cc31b21f6cb059993c1c5ca831959de052 \
--hash=sha256:b6ceca432ce88ad12aab5b5896c343a1993c90b325d9193dcd055e73e18a0439 \
--hash=sha256:b76c2fde827522e21922418325c1b95c2d795cdecfb4bc261e4d37965199ee7f \
--hash=sha256:bddfc5bd1dee5db0fddc9dab26f800c283f3243e7281bbf107200fed30125f9c \
--hash=sha256:bf83700faa9642388fbd3167db3f6cbb2e88cc8367b8c22204f3f408ee782d25 \
--hash=sha256:c5268ec05c21e2ecf5bca09314bcaadfec01f02163088cd602db4379862958dd \
--hash=sha256:d9796d5c13b2b7f05084d0ce52528cf919f9bde9e0f10672a6393a4490415695 \
--hash=sha256:dc67efd00ce7f428a446ce012673c03c63c5abb5dec3f33750087b8bdc173bf0 \
--hash=sha256:dfd6385b662aea83e63dd4db5fe116eb11914022deb1745f0b57fa8470c18ffe \
--hash=sha256:e495ad05a13171fbb5d72fe5993469c8bceac42bcf6b8f9f117a518ee7fbc353 \
--hash=sha256:e752c34f7a2057ebe82c856698b9f277c633d4aad006bddf7af74598567c8931 \
--hash=sha256:f0843132168b44ca33c5e5a2046c954775dde8c580ce27f5cf2e134d0d9919e4 \
--hash=sha256:f30c5608c64fc9c1fa9a16277eb4784f782362566fe40ff8d283358c8f2c5fe0 \
--hash=sha256:f6ebadefc4331dda83c22519e1ea1e61104df6eb38abbb80ab91b0a8527a5c19
# via -r requirements.in
typing-extensions==4.5.0 \
--hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \

View File

@ -1,6 +1,6 @@
docker==6.0.1
docker==6.1.0
kubernetes==26.1.0
jinja2==3.1.2
python-dotenv==1.0.0
requests==2.28.2
requests==2.30.0
redis==4.5.4

View File

@ -12,9 +12,9 @@ cachetools==5.3.0 \
--hash=sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14 \
--hash=sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4
# via google-auth
certifi==2022.12.7 \
--hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
--hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
certifi==2023.5.7 \
--hash=sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7 \
--hash=sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716
# via
# kubernetes
# requests
@ -95,9 +95,9 @@ charset-normalizer==3.1.0 \
--hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \
--hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab
# via requests
docker==6.0.1 \
--hash=sha256:896c4282e5c7af5c45e8b683b0b0c33932974fe6e50fc6906a0a83616ab3da97 \
--hash=sha256:dbcb3bd2fa80dca0788ed908218bf43972772009b881ed1e20dfc29a65e49782
docker==6.1.0 \
--hash=sha256:b65c999f87cb5c31700b6944dc17a631071170d1aab3ad6e23506068579f885d \
--hash=sha256:cb697eccfeff55d232f7a7f4f88cd3770d27327c38d6c266b8f55c9f14a8491e
# via -r requirements.in
google-auth==2.17.3 \
--hash=sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc \
@ -239,9 +239,9 @@ redis==4.5.4 \
--hash=sha256:2c19e6767c474f2e85167909061d525ed65bea9301c0770bb151e041b7ac89a2 \
--hash=sha256:73ec35da4da267d6847e47f68730fdd5f62e2ca69e3ef5885c6a78a9374c3893
# via -r requirements.in
requests==2.28.2 \
--hash=sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa \
--hash=sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf
requests==2.30.0 \
--hash=sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294 \
--hash=sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4
# via
# -r requirements.in
# docker
@ -262,9 +262,9 @@ six==1.16.0 \
# google-auth
# kubernetes
# python-dateutil
urllib3==1.26.15 \
--hash=sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305 \
--hash=sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42
urllib3==2.0.2 \
--hash=sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc \
--hash=sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e
# via
# docker
# kubernetes

View File

@ -49,10 +49,15 @@ def get_instance_configs_and_apis(instance: Any, db, _type="Docker"):
):
splitted = var.split("=", 1)
if custom_confs_rx.match(splitted[0]):
custom_conf = custom_confs_rx.search(splitted[0]).groups()
custom_confs.append(
{
"value": splitted[1],
"exploded": custom_confs_rx.search(splitted[0]).groups(),
"exploded": (
custom_conf[0],
custom_conf[1],
custom_conf[2].replace(".conf", ""),
),
}
)
else:
@ -227,11 +232,20 @@ if __name__ == "__main__":
plugins_settings=plugins_settings,
)
config_files = config.get_config()
custom_confs = [
{"value": v, "exploded": custom_confs_rx.search(k).groups()} # type: ignore
for k, v in environ.items()
if custom_confs_rx.match(k)
]
custom_confs = []
for k, v in environ.items():
if custom_confs_rx.match(k):
custom_conf = custom_confs_rx.search(k).groups()
custom_confs.append(
{
"value": v,
"exploded": (
custom_conf[0],
custom_conf[1],
custom_conf[2].replace(".conf", ""),
),
}
)
root_dirs = listdir("/etc/bunkerweb/configs")
for root, dirs, files in walk("/etc/bunkerweb/configs", topdown=True):
if (
@ -276,12 +290,15 @@ if __name__ == "__main__":
for var in instance.attrs["Config"]["Env"]:
splitted = var.split("=", 1)
if custom_confs_rx.match(splitted[0]):
custom_conf = custom_confs_rx.search(splitted[0]).groups()
custom_confs.append(
{
"value": splitted[1],
"exploded": custom_confs_rx.search(
splitted[0]
).groups(),
"exploded": (
custom_conf[0],
custom_conf[1],
custom_conf[2].replace(".conf", ""),
),
}
)
else:

View File

@ -138,14 +138,16 @@ class ApiCaller:
f"Successfully sent API request to {api.get_endpoint()}{url}",
)
if response:
instance = api.get_endpoint().replace("http://", "").split(":")[0]
if isinstance(resp, dict):
responses[instance] = resp
else:
responses[instance] = resp.json()
if response:
instance = (
api.get_endpoint().replace("http://", "").split(":")[0]
)
if isinstance(resp, dict):
responses[instance] = resp
else:
responses[instance] = resp.json()
if response:
if response and responses:
return ret, responses
return ret
@ -153,7 +155,7 @@ class ApiCaller:
ret = True
with BytesIO() as tgz:
with taropen(
mode="w:gz", fileobj=tgz, dereference=True, compresslevel=5
mode="w:gz", fileobj=tgz, dereference=True, compresslevel=3
) as tf:
tf.add(path, arcname=".")
tgz.seek(0, 0)

View File

@ -56,7 +56,44 @@ def is_cached_file(file: str, expire: str, db=None) -> bool:
if is_cached and cached_file:
Path(file).write_bytes(cached_file.data)
return is_cached
return is_cached and cached_file
def get_file_in_db(job: str, file: str, db) -> bytes:
cached_file = db.get_job_cache_file(job, file)
if not cached_file:
return False
return cached_file.data
def set_file_in_db(job: str, name: str, bio, db) -> Tuple[bool, str]:
ret, err = True, "success"
try:
content = bio.read()
bio.seek(0)
with lock:
err = db.update_job_cache(
basename(getsourcefile(_getframe(1))).replace(".py", ""),
None,
name,
content,
checksum=bytes_hash(bio),
)
if err:
ret = False
except:
return False, f"exception :\n{format_exc()}"
return ret, err
def del_file_in_db(job: str, name: str, db) -> Tuple[bool, str]:
ret, err = True, "success"
try:
db.delete_job_cache(job, name)
except:
return False, f"exception :\n{format_exc()}"
return ret, err
def file_hash(file: str) -> str:
@ -70,6 +107,17 @@ def file_hash(file: str) -> str:
return _sha512.hexdigest()
def bytes_hash(bio: bytes) -> str:
_sha512 = sha512()
while True:
data = bio.read(1024)
if not data:
break
_sha512.update(data)
bio.seek(0)
return _sha512.hexdigest()
def cache_hash(cache: str, db=None) -> Optional[str]:
with suppress(BaseException):
return loads(Path(f"{cache}.md").read_text()).get("checksum", None)

View File

@ -33,6 +33,7 @@ RUN dnf install -y python39-pip brotli brotli-devel wget gperftools-devel perl l
chmod +x /tmp/bunkerweb/deps/install.sh && \
bash /tmp/bunkerweb/deps/install.sh && \
mkdir /usr/share/bunkerweb/deps/python && \
export MAKEFLAGS="-j$(nproc)" && \
pip3.9 install --no-cache-dir --require-hashes --target /usr/share/bunkerweb/deps/python -r /usr/share/bunkerweb/deps/requirements.txt
#RUN pip3.9 install --no-cache-dir --require-hashes --target /usr/share/bunkerweb/deps/python -r /tmp/bunkerweb/deps/requirements.txt
@ -64,7 +65,6 @@ RUN cp /usr/share/bunkerweb/helpers/bwcli /usr/bin/ && \
mkdir -p /var/tmp/bunkerweb/ && \
mkdir -p /var/www/html && \
mkdir -p /var/lib/bunkerweb && \
mkdir -p /etc/letsencrypt && \
#mkdir /var/www/html && \
echo "Linux" > /usr/share/bunkerweb/INTEGRATION && \
#It's a find command that will find all files in the bunkerweb directory, excluding the ui/deps directory, and then chmod them to 0740.

View File

@ -37,6 +37,7 @@ RUN apt update && \
chmod +x /tmp/bunkerweb/deps/install.sh && \
bash /tmp/bunkerweb/deps/install.sh && \
mkdir /usr/share/bunkerweb/deps/python && \
export MAKEFLAGS="-j$(nproc)" && \
pip install --no-cache-dir --require-hashes --target /usr/share/bunkerweb/deps/python -r /usr/share/bunkerweb/deps/requirements.txt
# Dirty fix to avoid errors with --target and packages same namespace
@ -69,8 +70,7 @@ RUN cp /usr/share/bunkerweb/helpers/bwcli /usr/bin/ && \
mkdir -p /var/tmp/bunkerweb/ && \
mkdir -p /var/www/ && \
mkdir -p /var/lib/bunkerweb && \
mkdir -p /etc/letsencrypt && \
#mkdir /var/www/html && \
mkdir /var/www/html && \
echo "Linux" > /usr/share/bunkerweb/INTEGRATION && \
#It's a find command that will find all files in the bunkerweb directory, excluding the ui/deps directory, and then chmod them to 0740.
find /usr/share/bunkerweb -path /usr/share/bunkerweb/ui/deps -prune -o -type f -exec chmod 0740 {} \; && \

View File

@ -1,4 +1,4 @@
FROM fedora:37
FROM fedora:38
ENV OS=fedora
ENV NGINX_VERSION 1.24.0
@ -31,6 +31,7 @@ RUN dnf install -y python3-pip brotli brotli-devel gperftools-devel perl libxslt
chmod +x /tmp/bunkerweb/deps/install.sh && \
bash /tmp/bunkerweb/deps/install.sh && \
mkdir /usr/share/bunkerweb/deps/python && \
export MAKEFLAGS="-j$(nproc)" && \
pip install --no-cache-dir --require-hashes --target /usr/share/bunkerweb/deps/python -r /usr/share/bunkerweb/deps/requirements.txt
# Dirty fix to avoid errors with --target and packages same namespace
#cp -r /usr/lib64/python3.10/* /usr/lib/python3.10/ && \
@ -63,7 +64,6 @@ RUN cp /usr/share/bunkerweb/helpers/bwcli /usr/bin/ && \
mkdir -p /var/tmp/bunkerweb/ && \
mkdir -p /var/www/html && \
mkdir -p /var/lib/bunkerweb && \
mkdir -p /etc/letsencrypt && \
echo "Linux" > /usr/share/bunkerweb/INTEGRATION && \
find /usr/share/bunkerweb -path /usr/share/bunkerweb/ui/deps -prune -o -type f -exec chmod 0740 {} \; && \
find /usr/share/bunkerweb -path /usr/share/bunkerweb/ui/deps -prune -o -type d -exec chmod 0750 {} \; && \

View File

@ -5,6 +5,7 @@ ENV NGINX_VERSION 1.24.0
# Copy centos repo
COPY src/linux/centos.repo /etc/yum.repos.d/centos.repo
RUN sed -i "s/%ARCH%/$(uname -m)/g" /etc/yum.repos.d/centos.repo
# Copy RPM-GPG-KEY-CentOS-Official
COPY src/linux/RPM-GPG-KEY-centosofficial /etc/pki/rpm-gpg/RPM-GPG-KEY-centosofficial
@ -45,6 +46,7 @@ RUN dnf install -y readline-devel python39-pip brotli brotli-devel gperftools-de
chmod +x /tmp/bunkerweb/deps/install.sh && \
bash /tmp/bunkerweb/deps/install.sh && \
mkdir /usr/share/bunkerweb/deps/python && \
export MAKEFLAGS="-j$(nproc)" && \
pip3.9 install --no-cache-dir --require-hashes --target /usr/share/bunkerweb/deps/python -r /usr/share/bunkerweb/deps/requirements.txt
#RUN pip3.10 install --no-cache-dir --require-hashes --target /usr/share/bunkerweb/deps/python -r /tmp/bunkerweb/deps/requirements.txt
@ -76,7 +78,6 @@ RUN cp /usr/share/bunkerweb/helpers/bwcli /usr/bin/ && \
mkdir -p /var/tmp/bunkerweb/ && \
mkdir -p /var/www/html && \
mkdir -p /var/lib/bunkerweb && \
mkdir -p /etc/letsencrypt && \
echo "Linux" > /usr/share/bunkerweb/INTEGRATION && \
find /usr/share/bunkerweb -path /usr/share/bunkerweb/ui/deps -prune -o -type f -exec chmod 0740 {} \; && \
find /usr/share/bunkerweb -path /usr/share/bunkerweb/ui/deps -prune -o -type d -exec chmod 0750 {} \; && \

View File

@ -37,6 +37,7 @@ RUN apt update && \
chmod +x /tmp/bunkerweb/deps/install.sh && \
bash /tmp/bunkerweb/deps/install.sh && \
mkdir /usr/share/bunkerweb/deps/python && \
export MAKEFLAGS="-j$(nproc)" && \
pip install --no-cache-dir --require-hashes --target /usr/share/bunkerweb/deps/python -r /usr/share/bunkerweb/deps/requirements.txt
# Copy files
@ -66,7 +67,6 @@ RUN cp /usr/share/bunkerweb/helpers/bwcli /usr/bin/ && \
mkdir -p /var/tmp/bunkerweb/ && \
mkdir -p /var/www/html && \
mkdir -p /var/lib/bunkerweb && \
mkdir -p /etc/letsencrypt && \
echo "Linux" > /usr/share/bunkerweb/INTEGRATION && \
find /usr/share/bunkerweb -path /usr/share/bunkerweb/ui/deps -prune -o -type f -exec chmod 0740 {} \; && \
find /usr/share/bunkerweb -path /usr/share/bunkerweb/ui/deps -prune -o -type d -exec chmod 0750 {} \; && \

View File

@ -1,6 +1,6 @@
[centos8-base]
name = CentOS 8 Base OS
baseurl = http://mirror.centos.org/centos/8-stream/BaseOS/x86_64/os/
baseurl = http://mirror.centos.org/centos/8-stream/BaseOS/%ARCH%/os/
gpgcheck = 1
enabled = 1
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-centosofficial

View File

@ -10,4 +10,4 @@
--before-install /usr/share/bunkerweb/scripts/beforeInstall.sh
--after-install /usr/share/bunkerweb/scripts/postinstall.sh
--after-remove /usr/share/bunkerweb/scripts/afterRemoveRPM.sh
/usr/share/bunkerweb/=/usr/share/bunkerweb/ /usr/bin/bwcli=/usr/bin/bwcli /etc/bunkerweb/=/etc/bunkerweb /var/tmp/bunkerweb/=/var/tmp/bunkerweb /var/cache/bunkerweb/=/var/cache/bunkerweb /lib/systemd/system/bunkerweb.service=/lib/systemd/system/bunkerweb.service /lib/systemd/system/bunkerweb-ui.service=/lib/systemd/system/bunkerweb-ui.service /var/lib/bunkerweb=/var/lib/bunkerweb /etc/letsencrypt=/etc/letsencrypt
/usr/share/bunkerweb/=/usr/share/bunkerweb/ /usr/bin/bwcli=/usr/bin/bwcli /etc/bunkerweb/=/etc/bunkerweb /var/tmp/bunkerweb/=/var/tmp/bunkerweb /var/cache/bunkerweb/=/var/cache/bunkerweb /lib/systemd/system/bunkerweb.service=/lib/systemd/system/bunkerweb.service /lib/systemd/system/bunkerweb-ui.service=/lib/systemd/system/bunkerweb-ui.service /var/lib/bunkerweb=/var/lib/bunkerweb

View File

@ -2,7 +2,7 @@
--name bunkerweb
--license agpl3
--version %VERSION%
--architecture amd64
--architecture %ARCH%
--depends bash --depends python3 --depends procps --depends python3-pip --depends 'nginx = 1.24.0-1~bullseye' --depends libcurl4 --depends libgeoip-dev --depends libxml2 --depends libyajl2 --depends libmagic1 --depends net-tools --depends sudo --depends lsof
--description "BunkerWeb %VERSION% for Debian 11"
--url "https://www.bunkerweb.io"
@ -10,4 +10,4 @@
--before-install /usr/share/bunkerweb/scripts/beforeInstall.sh
--after-install /usr/share/bunkerweb/scripts/postinstall.sh
--after-remove /usr/share/bunkerweb/scripts/afterRemoveDEB.sh
/usr/share/bunkerweb/=/usr/share/bunkerweb/ /usr/bin/bwcli=/usr/bin/bwcli /etc/bunkerweb/=/etc/bunkerweb /var/tmp/bunkerweb/=/var/tmp/bunkerweb /var/cache/bunkerweb/=/var/cache/bunkerweb /lib/systemd/system/bunkerweb.service=/lib/systemd/system/bunkerweb.service /lib/systemd/system/bunkerweb-ui.service=/lib/systemd/system/bunkerweb-ui.service /var/lib/bunkerweb=/var/lib/bunkerweb /etc/letsencrypt=/etc/letsencrypt
/usr/share/bunkerweb/=/usr/share/bunkerweb/ /usr/bin/bwcli=/usr/bin/bwcli /etc/bunkerweb/=/etc/bunkerweb /var/tmp/bunkerweb/=/var/tmp/bunkerweb /var/cache/bunkerweb/=/var/cache/bunkerweb /lib/systemd/system/bunkerweb.service=/lib/systemd/system/bunkerweb.service /lib/systemd/system/bunkerweb-ui.service=/lib/systemd/system/bunkerweb-ui.service /var/lib/bunkerweb=/var/lib/bunkerweb

View File

@ -2,12 +2,12 @@
--name bunkerweb
--license agpl3
--version %VERSION%
--architecture x86_64
--depends bash --depends python3 --depends 'nginx = 1:1.24.0-1.fc37' --depends libcurl-devel --depends libxml2 --depends lmdb-libs --depends geoip-devel --depends gd --depends sudo --depends procps --depends lsof --depends nginx-mod-stream
--description "BunkerWeb %VERSION% for Fedora 37"
--architecture %ARCH%
--depends bash --depends python3 --depends 'nginx = 1:1.24.0-1.fc38' --depends libcurl-devel --depends libxml2 --depends lmdb-libs --depends geoip-devel --depends gd --depends sudo --depends procps --depends lsof --depends nginx-mod-stream --depends pcre
--description "BunkerWeb %VERSION% for Fedora 38"
--url "https://www.bunkerweb.io"
--maintainer "Bunkerity <contact at bunkerity dot com>"
--before-install /usr/share/bunkerweb/scripts/beforeInstall.sh
--after-install /usr/share/bunkerweb/scripts/postinstall.sh
--after-remove /usr/share/bunkerweb/scripts/afterRemoveRPM.sh
/usr/share/bunkerweb/=/usr/share/bunkerweb/ /usr/bin/bwcli=/usr/bin/bwcli /etc/bunkerweb/=/etc/bunkerweb /var/tmp/bunkerweb/=/var/tmp/bunkerweb /var/cache/bunkerweb/=/var/cache/bunkerweb /lib/systemd/system/bunkerweb.service=/lib/systemd/system/bunkerweb.service /lib/systemd/system/bunkerweb-ui.service=/lib/systemd/system/bunkerweb-ui.service /var/lib/bunkerweb=/var/lib/bunkerweb /etc/letsencrypt=/etc/letsencrypt
/usr/share/bunkerweb/=/usr/share/bunkerweb/ /usr/bin/bwcli=/usr/bin/bwcli /etc/bunkerweb/=/etc/bunkerweb /var/tmp/bunkerweb/=/var/tmp/bunkerweb /var/cache/bunkerweb/=/var/cache/bunkerweb /lib/systemd/system/bunkerweb.service=/lib/systemd/system/bunkerweb.service /lib/systemd/system/bunkerweb-ui.service=/lib/systemd/system/bunkerweb-ui.service /var/lib/bunkerweb=/var/lib/bunkerweb

View File

@ -2,12 +2,12 @@
--name bunkerweb
--license agpl3
--version %VERSION%
--architecture x86_64
--architecture %ARCH%
--depends bash --depends python39 --depends 'nginx = 1:1.24.0-1.el8.ngx' --depends libcurl-devel --depends libxml2 --depends file-libs --depends net-tools --depends gd --depends sudo --depends procps --depends lsof --depends geoip
--description "BunkerWeb %VERSION% for Rhel 8"
--description "BunkerWeb %VERSION% for RHEL 8"
--url "https://www.bunkerweb.io"
--maintainer "Bunkerity <contact at bunkerity dot com>"
--before-install /usr/share/bunkerweb/scripts/beforeInstall.sh
--after-install /usr/share/bunkerweb/scripts/postinstall.sh
--after-remove /usr/share/bunkerweb/scripts/afterRemoveRPM.sh
/usr/share/bunkerweb/=/usr/share/bunkerweb/ /usr/bin/bwcli=/usr/bin/bwcli /etc/bunkerweb/=/etc/bunkerweb /var/tmp/bunkerweb/=/var/tmp/bunkerweb /var/cache/bunkerweb/=/var/cache/bunkerweb /lib/systemd/system/bunkerweb.service=/lib/systemd/system/bunkerweb.service /lib/systemd/system/bunkerweb-ui.service=/lib/systemd/system/bunkerweb-ui.service /var/lib/bunkerweb=/var/lib/bunkerweb /etc/letsencrypt=/etc/letsencrypt
/usr/share/bunkerweb/=/usr/share/bunkerweb/ /usr/bin/bwcli=/usr/bin/bwcli /etc/bunkerweb/=/etc/bunkerweb /var/tmp/bunkerweb/=/var/tmp/bunkerweb /var/cache/bunkerweb/=/var/cache/bunkerweb /lib/systemd/system/bunkerweb.service=/lib/systemd/system/bunkerweb.service /lib/systemd/system/bunkerweb-ui.service=/lib/systemd/system/bunkerweb-ui.service /var/lib/bunkerweb=/var/lib/bunkerweb

View File

@ -2,6 +2,7 @@
--name bunkerweb
--license agpl3
--version %VERSION%
--architecture %ARCH%
--depends bash --depends python3 --depends python3-pip --depends 'nginx = 1.24.0-1~jammy' --depends libcurl4 --depends libgeoip-dev --depends libxml2 --depends libyajl2 --depends libmagic1 --depends net-tools --depends sudo --depends procps --depends lsof
--description "BunkerWeb %VERSION% for Ubuntu 22.04"
--url "https://www.bunkerweb.io"
@ -10,4 +11,4 @@
--after-install /usr/share/bunkerweb/scripts/postinstall.sh
--after-remove /usr/share/bunkerweb/scripts/afterRemoveDEB.sh
--deb-no-default-config-files
/usr/share/bunkerweb/=/usr/share/bunkerweb/ /usr/bin/bwcli=/usr/bin/bwcli /etc/bunkerweb/=/etc/bunkerweb /var/tmp/bunkerweb/=/var/tmp/bunkerweb /var/cache/bunkerweb/=/var/cache/bunkerweb /lib/systemd/system/bunkerweb.service=/lib/systemd/system/bunkerweb.service /lib/systemd/system/bunkerweb-ui.service=/lib/systemd/system/bunkerweb-ui.service /var/lib/bunkerweb=/var/lib/bunkerweb /etc/letsencrypt=/etc/letsencrypt
/usr/share/bunkerweb/=/usr/share/bunkerweb/ /usr/bin/bwcli=/usr/bin/bwcli /etc/bunkerweb/=/etc/bunkerweb /var/tmp/bunkerweb/=/var/tmp/bunkerweb /var/cache/bunkerweb/=/var/cache/bunkerweb /lib/systemd/system/bunkerweb.service=/lib/systemd/system/bunkerweb.service /lib/systemd/system/bunkerweb-ui.service=/lib/systemd/system/bunkerweb-ui.service /var/lib/bunkerweb=/var/lib/bunkerweb

View File

@ -1,6 +1,8 @@
#!/bin/bash
VERSION="$(cat /usr/share/bunkerweb/VERSION | tr -d '\n')"
ARCH="$(uname -m)"
sed -i "s/%VERSION%/${VERSION}/g" .fpm
sed -i "s/%ARCH%/${ARCH}/g" .fpm
fpm -t "$1" -p "/data/bunkerweb.$1"

View File

@ -1,124 +0,0 @@
#!/bin/bash
function do_and_check_cmd() {
if [ "$CHANGE_DIR" != "" ] ; then
cd "$CHANGE_DIR"
fi
output=$("$@" 2>&1)
ret="$?"
if [ $ret -ne 0 ] ; then
echo "❌ Error from command : $*"
echo "$output"
exit $ret
fi
#echo $output
return 0
}
# Check if we are root
if [ $(id -u) -ne 0 ] ; then
echo "❌ Run me as root"
exit 1
fi
# Detect OS
OS=""
if [ "$(grep Debian /etc/os-release)" != "" ] ; then
OS="debian"
elif [ "$(grep Ubuntu /etc/os-release)" != "" ] ; then
OS="ubuntu"
elif [ "$(grep CentOS /etc/os-release)" != "" ] ; then
OS="centos"
elif [ "$(grep Fedora /etc/os-release)" != "" ] ; then
OS="fedora"
fi
if [ "$OS" = "" ] ; then
echo "❌ Unsupported Operating System"
exit 1
fi
# Stop nginx
systemctl status nginx > /dev/null 2>&1
if [ $? -eq 0 ] ; then
echo " Stop nginx service"
do_and_check_cmd systemctl stop nginx
fi
# Reload old nginx.service file
# echo " Restore old nginx service"
# do_and_check_cmd mv /lib/systemd/system/nginx.service.bak /lib/systemd/system/nginx.service
# do_and_check_cmd systemctl daemon-reload
# echo " Remove bunkerweb-ui service"
if [ -f "/lib/systemd/system/bunkerweb-ui.service" ] ; then
echo " Remove bunkerweb-ui service"
do_and_check_cmd systemctl stop bunkerweb-ui
do_and_check_cmd systemctl disable bunkerweb-ui
do_and_check_cmd rm -f /lib/systemd/system/bunkerweb-ui.service
do_and_check_cmd systemctl daemon-reload
do_and_check_cmd systemctl reset-failed
fi
# do_and_check_cmd systemctl disable bunkerweb-ui
# do_and_check_cmd rm -f /etc/systemd/system/bunkerweb-ui.service
# do_and_check_cmd systemctl daemon-reload
# do_and_check_cmd systemctl reset-failed
# do_and_check_cmd sed -i "s@nginx ALL=(root:root) NOPASSWD: /usr/share/bunkerweb/ui/linux.sh@@" /etc/sudoers
# Remove /usr/share/bunkerweb
if [ -e "/usr/share/bunkerweb" ] ; then
echo " Remove /usr/share/bunkerweb"
do_and_check_cmd rm -rf /usr/share/bunkerweb
fi
# Remove /etc/bunkerweb
if [ -e "/etc/bunkerweb" ] ; then
echo " Remove /etc/bunkerweb"
do_and_check_cmd rm -rf /etc/bunkerweb
fi
# # Remove /var/tmp/bunkerweb
# if [ -e "/var/tmp/bunkerweb" ] ; then
# echo " Remove /var/tmp/bunkerweb"
# do_and_check_cmd rm -rf /var/tmp/bunkerweb
# fi
# Remove /var/lib/bunkerweb
if [ -e "/var/lib/bunkerweb" ] ; then
echo " Remove /var/lib/bunkerweb"
do_and_check_cmd rm -rf /var/lib/bunkerweb
fi
# Remove /usr/bin/bwcli
if [ -f "/usr/bin/bwcli" ] ; then
echo " Remove /usr/bin/bwcli"
do_and_check_cmd rm -f /usr/bin/bwcli
fi
# Remove systemd service
if [ -f "/lib/systemd/system/bunkerweb.service" ] ; then
echo " Remove bunkerweb service"
do_and_check_cmd systemctl stop bunkerweb
do_and_check_cmd systemctl disable bunkerweb
do_and_check_cmd rm -f /lib/systemd/system/bunkerweb.service
do_and_check_cmd systemctl daemon-reload
do_and_check_cmd systemctl reset-failed
fi
# Uninstall nginx
# if [ "$OS" = "debian" ] || [ "$OS" = "ubuntu" ] ; then
# echo " Uninstall nginx"
# do_and_check_cmd systemctl stop nginx
# do_and_check_cmd apt remove nginx -y
# echo " If you want to reinstall nginx, run the following command:"
# echo "apt-get install nginx"
# elif [ "$OS" = "centos" ] || [ "$OS" = "fedora" ] ; then
# echo " Uninstall nginx"
# do_and_check_cmd systemctl stop nginx
# do_and_check_cmd yum remove nginx -y
# echo " If you want to reinstall nginx, run the following command:"
# echo "apt-get install nginx"
# fi
# We're done
echo " BunkerWeb successfully uninstalled"

View File

@ -95,22 +95,4 @@ else
echo "/var/www/html directory already exists, skipping copy..."
fi
# Create letsencrypt folders if needed
if [ ! -d /etc/letsencrypt ] ; then
mkdir /etc/letsencrypt
fi
chown nginx:nginx /etc/letsencrypt
chmod 770 /etc/letsencrypt
if [ ! -d /var/lib/letsencrypt ] ; then
mkdir /var/lib/letsencrypt
fi
chown nginx:nginx /var/lib/letsencrypt
chmod 770 /var/lib/letsencrypt
if [ ! -d /var/log/letsencrypt ] ; then
mkdir /var/log/letsencrypt
fi
chown nginx:nginx /var/log/letsencrypt
chmod 770 /var/log/letsencrypt
echo "Postinstall successful !"
echo "Postinstall successful !"

View File

@ -14,6 +14,7 @@ RUN apk add --no-cache --virtual .build-deps g++ gcc musl-dev jpeg-dev zlib-dev
pip install --no-cache-dir --upgrade pip && \
pip install wheel && \
mkdir -p /usr/share/bunkerweb/deps/python && \
export MAKEFLAGS="-j$(nproc)" && \
pip install --no-cache-dir --require-hashes --target /usr/share/bunkerweb/deps/python -r /usr/share/bunkerweb/deps/requirements.txt && \
pip install --no-cache-dir gunicorn && \
apk del .build-deps
@ -44,7 +45,6 @@ RUN apk add --no-cache bash libgcc libstdc++ openssl && \
mkdir -p /etc/bunkerweb && \
mkdir -p /data/cache && ln -s /data/cache /var/cache/bunkerweb && \
mkdir -p /data/lib && ln -s /data/lib /var/lib/bunkerweb && \
mkdir -p /data/cache/letsencrypt && ln -s /data/cache/letsencrypt /etc/letsencrypt && \
mkdir -p /data/www && ln -s /data/www /var/www/html && \
for dir in $(echo "configs plugins") ; do mkdir -p "/data/${dir}" && ln -s "/data/${dir}" "/etc/bunkerweb/${dir}" ; done && \
for dir in $(echo "configs/http configs/stream configs/server-http configs/server-stream configs/default-server-http configs/default-server-stream configs/modsec configs/modsec-crs") ; do mkdir "/data/${dir}" ; done && \

View File

@ -105,10 +105,12 @@ class JobScheduler(ApiCaller):
f"Executing job {name} from plugin {plugin} ...",
)
success = True
ret = -1
try:
proc = run(
f"{path}jobs/{file}", stdin=DEVNULL, stderr=STDOUT, env=self.__env
)
ret = proc.returncode
except BaseException:
success = False
self.__logger.error(
@ -136,6 +138,7 @@ class JobScheduler(ApiCaller):
self.__logger.warning(
f"Failed to update database for the job {name} from plugin {plugin}: {err}",
)
return ret
def setup(self):
for plugin, jobs in self.__jobs.items():
@ -164,7 +167,7 @@ class JobScheduler(ApiCaller):
ret = job.run()
if ret == 1:
reload = True
elif (ret or 2) >= 2:
elif ret < 0 or ret >= 2:
success = False
if reload:
try:
@ -172,9 +175,13 @@ class JobScheduler(ApiCaller):
self.__logger.info("Sending /var/cache/bunkerweb folder ...")
if not self._send_files("/var/cache/bunkerweb", "/cache"):
success = False
self.__logger.error("Error while sending /var/cache/bunkerweb folder")
self.__logger.error(
"Error while sending /var/cache/bunkerweb folder"
)
else:
self.__logger.info("Successfully sent /var/cache/bunkerweb folder")
self.__logger.info(
"Successfully sent /var/cache/bunkerweb folder"
)
if not self.__reload():
success = False
except:

View File

@ -10,7 +10,6 @@ from os import (
getenv,
getpid,
listdir,
stat,
walk,
)
from os.path import dirname, join
@ -134,7 +133,7 @@ def generate_external_plugins(
Path(tmp_path).unlink()
for job_file in glob(f"{plugin_dir}/jobs/*"):
st = stat(job_file)
st = Path(job_file).stat()
chmod(job_file, st.st_mode | S_IEXEC)
if integration in ("Autoconf", "Swarm", "Kubernetes", "Docker"):

View File

@ -12,9 +12,9 @@ certbot==2.5.0 \
--hash=sha256:76e6e5305021d3ee54c42fc471f8f0ed5dba790e6fd7fef6713060b0e42b97d7 \
--hash=sha256:a2d730753124508effe79f648264f5cab4d1e9120acfd695a4a0c2b7bab4a966
# via -r requirements.in
certifi==2022.12.7 \
--hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
--hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
certifi==2023.5.7 \
--hash=sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7 \
--hash=sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716
# via requests
cffi==1.15.1 \
--hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
@ -236,9 +236,9 @@ pytz==2023.3 \
# acme
# certbot
# pyrfc3339
requests==2.28.2 \
--hash=sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa \
--hash=sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf
requests==2.30.0 \
--hash=sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294 \
--hash=sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4
# via acme
schedule==1.2.0 \
--hash=sha256:415908febaba0bc9a7c727a32efb407d646fe994367ef9157d123aabbe539ea8 \
@ -248,9 +248,9 @@ six==1.16.0 \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
# via configobj
urllib3==1.26.15 \
--hash=sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305 \
--hash=sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42
urllib3==2.0.2 \
--hash=sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc \
--hash=sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e
# via requests
# The following packages are considered to be unsafe in a requirements file:

View File

@ -1,66 +1,67 @@
FROM python:3.11.3-alpine
# Copy python requirements
COPY src/ui/requirements.txt /tmp/req/requirements.txt
COPY src/common/gen/requirements.txt /tmp/req/requirements.txt.1
COPY src/common/db/requirements.txt /tmp/req/requirements.txt.2
RUN mkdir -p /usr/share/bunkerweb/deps && \
cat /tmp/req/requirements.txt /tmp/req/requirements.txt.1 /tmp/req/requirements.txt.2 > /usr/share/bunkerweb/deps/requirements.txt && \
rm -rf /tmp/req
# Install python requirements
RUN apk add --no-cache --virtual .build-deps g++ gcc musl-dev jpeg-dev zlib-dev libffi-dev cairo-dev pango-dev gdk-pixbuf-dev openssl-dev cargo file make postgresql-dev && \
pip install --no-cache-dir --upgrade pip && \
pip install wheel && \
mkdir -p /usr/share/bunkerweb/deps/python && \
pip install --no-cache-dir --require-hashes --target /usr/share/bunkerweb/deps/python -r /usr/share/bunkerweb/deps/requirements.txt && \
apk del .build-deps
# Copy files
# can't exclude specific files/dir from . so we are copying everything by hand
COPY src/common/api /usr/share/bunkerweb/api
COPY src/common/db /usr/share/bunkerweb/db
COPY src/common/core /usr/share/bunkerweb/core
COPY src/common/gen /usr/share/bunkerweb/gen
COPY src/common/settings.json /usr/share/bunkerweb/settings.json
COPY src/common/utils /usr/share/bunkerweb/utils
COPY src/ui /usr/share/bunkerweb/ui
COPY src/VERSION /usr/share/bunkerweb/VERSION
# Add ui user
RUN apk add --no-cache bash && \
addgroup -g 101 ui && \
adduser -h /var/cache/nginx -g ui -s /bin/sh -G ui -D -H -u 101 ui && \
echo "Docker" > /usr/share/bunkerweb/INTEGRATION && \
mkdir -p /var/tmp/bunkerweb && \
mkdir -p /etc/bunkerweb && \
mkdir -p /data/cache && ln -s /data/cache /var/cache/bunkerweb && \
mkdir -p /data/lib && ln -s /data/lib /var/lib/bunkerweb && \
mkdir -p /var/log/nginx/ && touch /var/log/nginx/ui.log && \
for dir in $(echo "configs plugins") ; do mkdir -p "/data/${dir}" && ln -s "/data/${dir}" "/etc/bunkerweb/${dir}" ; done && \
for dir in $(echo "configs/http configs/stream configs/server-http configs/server-stream configs/default-server-http configs/default-server-stream configs/modsec configs/modsec-crs") ; do mkdir "/data/${dir}" ; done && \
chown -R root:ui /data && \
chmod -R 770 /data && \
chown -R root:ui /usr/share/bunkerweb /var/cache/bunkerweb /var/lib/bunkerweb /etc/bunkerweb /var/tmp/bunkerweb /var/log/nginx && \
for dir in $(echo "/usr/share/bunkerweb /etc/bunkerweb") ; do find ${dir} -type f -exec chmod 0740 {} \; ; done && \
for dir in $(echo "/usr/share/bunkerweb /etc/bunkerweb") ; do find ${dir} -type d -exec chmod 0750 {} \; ; done && \
chmod 770 /var/cache/bunkerweb /var/lib/bunkerweb /var/tmp/bunkerweb /var/log/nginx/ui.log && \
chmod 750 /usr/share/bunkerweb/gen/*.py /usr/share/bunkerweb/ui/*.py /usr/share/bunkerweb/ui/src/*.py /usr/share/bunkerweb/deps/python/bin/* && \
chmod 660 /usr/share/bunkerweb/INTEGRATION
# Fix CVEs
RUN apk add "libcrypto3>=3.0.8-r4" "libssl3>=3.0.8-r4"
VOLUME /data /etc/nginx
EXPOSE 7000
WORKDIR /usr/share/bunkerweb/ui
USER ui:ui
HEALTHCHECK --interval=10s --timeout=10s --start-period=30s --retries=6 CMD /usr/share/bunkerweb/helpers/healthcheck-ui.sh
ENV PYTHONPATH /usr/share/bunkerweb/deps/python
FROM python:3.11.3-alpine
# Copy python requirements
COPY src/ui/requirements.txt /tmp/req/requirements.txt
COPY src/common/gen/requirements.txt /tmp/req/requirements.txt.1
COPY src/common/db/requirements.txt /tmp/req/requirements.txt.2
RUN mkdir -p /usr/share/bunkerweb/deps && \
cat /tmp/req/requirements.txt /tmp/req/requirements.txt.1 /tmp/req/requirements.txt.2 > /usr/share/bunkerweb/deps/requirements.txt && \
rm -rf /tmp/req
# Install python requirements
RUN apk add --no-cache --virtual .build-deps g++ gcc musl-dev jpeg-dev zlib-dev libffi-dev cairo-dev pango-dev gdk-pixbuf-dev openssl-dev cargo file make postgresql-dev && \
pip install --no-cache-dir --upgrade pip && \
pip install wheel && \
mkdir -p /usr/share/bunkerweb/deps/python && \
export MAKEFLAGS="-j$(nproc)" && \
pip install --no-cache-dir --require-hashes --target /usr/share/bunkerweb/deps/python -r /usr/share/bunkerweb/deps/requirements.txt && \
apk del .build-deps
# Copy files
# can't exclude specific files/dir from . so we are copying everything by hand
COPY src/common/api /usr/share/bunkerweb/api
COPY src/common/db /usr/share/bunkerweb/db
COPY src/common/core /usr/share/bunkerweb/core
COPY src/common/gen /usr/share/bunkerweb/gen
COPY src/common/settings.json /usr/share/bunkerweb/settings.json
COPY src/common/utils /usr/share/bunkerweb/utils
COPY src/ui /usr/share/bunkerweb/ui
COPY src/VERSION /usr/share/bunkerweb/VERSION
# Add ui user
RUN apk add --no-cache bash && \
addgroup -g 101 ui && \
adduser -h /var/cache/nginx -g ui -s /bin/sh -G ui -D -H -u 101 ui && \
echo "Docker" > /usr/share/bunkerweb/INTEGRATION && \
mkdir -p /var/tmp/bunkerweb && \
mkdir -p /etc/bunkerweb && \
mkdir -p /data/cache && ln -s /data/cache /var/cache/bunkerweb && \
mkdir -p /data/lib && ln -s /data/lib /var/lib/bunkerweb && \
mkdir -p /var/log/nginx/ && touch /var/log/nginx/ui.log && \
for dir in $(echo "configs plugins") ; do mkdir -p "/data/${dir}" && ln -s "/data/${dir}" "/etc/bunkerweb/${dir}" ; done && \
for dir in $(echo "configs/http configs/stream configs/server-http configs/server-stream configs/default-server-http configs/default-server-stream configs/modsec configs/modsec-crs") ; do mkdir "/data/${dir}" ; done && \
chown -R root:ui /data && \
chmod -R 770 /data && \
chown -R root:ui /usr/share/bunkerweb /var/cache/bunkerweb /var/lib/bunkerweb /etc/bunkerweb /var/tmp/bunkerweb /var/log/nginx && \
for dir in $(echo "/usr/share/bunkerweb /etc/bunkerweb") ; do find ${dir} -type f -exec chmod 0740 {} \; ; done && \
for dir in $(echo "/usr/share/bunkerweb /etc/bunkerweb") ; do find ${dir} -type d -exec chmod 0750 {} \; ; done && \
chmod 770 /var/cache/bunkerweb /var/lib/bunkerweb /var/tmp/bunkerweb /var/log/nginx/ui.log && \
chmod 750 /usr/share/bunkerweb/gen/*.py /usr/share/bunkerweb/ui/*.py /usr/share/bunkerweb/ui/src/*.py /usr/share/bunkerweb/deps/python/bin/* && \
chmod 660 /usr/share/bunkerweb/INTEGRATION
# Fix CVEs
RUN apk add "libcrypto3>=3.0.8-r4" "libssl3>=3.0.8-r4"
VOLUME /data /etc/nginx
EXPOSE 7000
WORKDIR /usr/share/bunkerweb/ui
USER ui:ui
HEALTHCHECK --interval=10s --timeout=10s --start-period=30s --retries=6 CMD /usr/share/bunkerweb/helpers/healthcheck-ui.sh
ENV PYTHONPATH /usr/share/bunkerweb/deps/python
CMD ["python3", "-m", "gunicorn", "--user", "ui", "--group", "ui", "main:app", "--worker-class", "gevent", "--bind", "0.0.0.0:7000", "--graceful-timeout", "0", "--access-logfile", "-", "--error-logfile", "-"]

View File

@ -54,7 +54,8 @@ from kubernetes import client as kube_client
from kubernetes import config as kube_config
from kubernetes.client.exceptions import ApiException as kube_ApiException
from os import _exit, getenv, getpid, listdir
from re import match as re_match
from re import compile as re_compile
from regex import match as regex_match
from requests import get
from shutil import move, rmtree
from signal import SIGINT, signal, SIGTERM
@ -135,8 +136,13 @@ elif "ADMIN_PASSWORD" not in vars:
logger.error("ADMIN_PASSWORD is not set")
stop(1)
if not vars.get("FLASK_DEBUG", False) and vars["ADMIN_PASSWORD"] == "changeme":
logger.error("Please change the default admin password.")
if not vars.get("FLASK_DEBUG", False) and not regex_match(
r"^(?=.*?\p{Lowercase_Letter})(?=.*?\p{Uppercase_Letter})(?=.*?\d)(?=.*?[ !\"#$%&'()*+,\-./:;<=>?@[\\\]^_`{|}~]).{8,}$",
vars["ADMIN_PASSWORD"],
):
logger.error(
"The admin password is not strong enough. It must contain at least 8 characters, including at least 1 uppercase letter, 1 lowercase letter, 1 number and 1 special character (#@?!$%^&*-)."
)
stop(1)
if not vars["ABSOLUTE_URI"].endswith("/"):
@ -231,6 +237,8 @@ except FileNotFoundError as e:
logger.error(repr(e), e.filename)
stop(1)
plugin_id_rx = re_compile(r"^[\w_-]{1,64}$")
# Declare functions for jinja2
app.jinja_env.globals.update(check_settings=check_settings)
@ -1222,7 +1230,7 @@ def upload_plugin():
@app.route("/plugins/<plugin>", methods=["GET", "POST"])
@login_required
def custom_plugin(plugin):
if not re_match(r"^[a-zA-Z0-9_-]{1,64}$", plugin):
if not plugin_id_rx.match(plugin):
flash(
f"Invalid plugin id, <b>{plugin}</b> (must be between 1 and 64 characters, only letters, numbers, underscores and hyphens)",
"error",
@ -1487,7 +1495,7 @@ def logs_container(container_id):
tmp_logs = []
if docker_client:
try:
if integration == "Swarm":
if integration != "Swarm":
docker_logs = docker_client.containers.get(container_id).logs(
stdout=True,
stderr=True,

View File

@ -1,4 +1,4 @@
Flask==2.2.3
Flask==2.3.2
Flask_Login==0.6.2
Flask_WTF==1.1.1
beautifulsoup4==4.12.2
@ -6,3 +6,4 @@ python_dateutil==2.8.2
bcrypt==4.0.1
gunicorn==20.1.0
gevent==22.10.2
regex==2023.5.5

View File

@ -31,13 +31,17 @@ beautifulsoup4==4.12.2 \
--hash=sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da \
--hash=sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a
# via -r requirements.in
blinker==1.6.2 \
--hash=sha256:4afd3de66ef3a9f8067559fb7a1cbe555c17dcbe15971b05d1b625c3e7abe213 \
--hash=sha256:c3d739772abb7bc2860abf5f2ec284223d9ad5c76da018234f6f50d6f31ab1f0
# via flask
click==8.1.3 \
--hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \
--hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48
# via flask
flask==2.2.3 \
--hash=sha256:7eb373984bf1c770023fce9db164ed0c3353cd0b53f130f4693da0ca756a2e6d \
--hash=sha256:c0bec9477df1cb867e5a67c9e1ab758de9cb4a3e52dd70681f59fa40a62b3f2d
flask==2.3.2 \
--hash=sha256:77fd4e1249d8c9923de34907236b747ced06e5467ecac1a7bb7115ae0e9670b0 \
--hash=sha256:8c2f9abd47a9e8df7f0c3f091ce9497d011dc3b31effcf4c85a6e2b50f4114ef
# via
# -r requirements.in
# flask-login
@ -243,6 +247,96 @@ python-dateutil==2.8.2 \
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
# via -r requirements.in
regex==2023.5.5 \
--hash=sha256:02f4541550459c08fdd6f97aa4e24c6f1932eec780d58a2faa2068253df7d6ff \
--hash=sha256:0a69cf0c00c4d4a929c6c7717fd918414cab0d6132a49a6d8fc3ded1988ed2ea \
--hash=sha256:0bbd5dcb19603ab8d2781fac60114fb89aee8494f4505ae7ad141a3314abb1f9 \
--hash=sha256:10250a093741ec7bf74bcd2039e697f519b028518f605ff2aa7ac1e9c9f97423 \
--hash=sha256:10374c84ee58c44575b667310d5bbfa89fb2e64e52349720a0182c0017512f6c \
--hash=sha256:1189fbbb21e2c117fda5303653b61905aeeeea23de4a94d400b0487eb16d2d60 \
--hash=sha256:1307aa4daa1cbb23823d8238e1f61292fd07e4e5d8d38a6efff00b67a7cdb764 \
--hash=sha256:144b5b017646b5a9392a5554a1e5db0000ae637be4971c9747566775fc96e1b2 \
--hash=sha256:171c52e320fe29260da550d81c6b99f6f8402450dc7777ef5ced2e848f3b6f8f \
--hash=sha256:18196c16a584619c7c1d843497c069955d7629ad4a3fdee240eb347f4a2c9dbe \
--hash=sha256:18f05d14f14a812fe9723f13afafefe6b74ca042d99f8884e62dbd34dcccf3e2 \
--hash=sha256:1ecf3dcff71f0c0fe3e555201cbe749fa66aae8d18f80d2cc4de8e66df37390a \
--hash=sha256:21e90a288e6ba4bf44c25c6a946cb9b0f00b73044d74308b5e0afd190338297c \
--hash=sha256:23d86ad2121b3c4fc78c58f95e19173790e22ac05996df69b84e12da5816cb17 \
--hash=sha256:256f7f4c6ba145f62f7a441a003c94b8b1af78cee2cccacfc1e835f93bc09426 \
--hash=sha256:290fd35219486dfbc00b0de72f455ecdd63e59b528991a6aec9fdfc0ce85672e \
--hash=sha256:2e9c4f778514a560a9c9aa8e5538bee759b55f6c1dcd35613ad72523fd9175b8 \
--hash=sha256:338994d3d4ca4cf12f09822e025731a5bdd3a37aaa571fa52659e85ca793fb67 \
--hash=sha256:33d430a23b661629661f1fe8395be2004006bc792bb9fc7c53911d661b69dd7e \
--hash=sha256:385992d5ecf1a93cb85adff2f73e0402dd9ac29b71b7006d342cc920816e6f32 \
--hash=sha256:3d45864693351c15531f7e76f545ec35000d50848daa833cead96edae1665559 \
--hash=sha256:40005cbd383438aecf715a7b47fe1e3dcbc889a36461ed416bdec07e0ef1db66 \
--hash=sha256:4035d6945cb961c90c3e1c1ca2feb526175bcfed44dfb1cc77db4fdced060d3e \
--hash=sha256:445d6f4fc3bd9fc2bf0416164454f90acab8858cd5a041403d7a11e3356980e8 \
--hash=sha256:48c9ec56579d4ba1c88f42302194b8ae2350265cb60c64b7b9a88dcb7fbde309 \
--hash=sha256:4a5059bd585e9e9504ef9c07e4bc15b0a621ba20504388875d66b8b30a5c4d18 \
--hash=sha256:4a6e4b0e0531223f53bad07ddf733af490ba2b8367f62342b92b39b29f72735a \
--hash=sha256:4b870b6f632fc74941cadc2a0f3064ed8409e6f8ee226cdfd2a85ae50473aa94 \
--hash=sha256:50fd2d9b36938d4dcecbd684777dd12a407add4f9f934f235c66372e630772b0 \
--hash=sha256:53e22e4460f0245b468ee645156a4f84d0fc35a12d9ba79bd7d79bdcd2f9629d \
--hash=sha256:586a011f77f8a2da4b888774174cd266e69e917a67ba072c7fc0e91878178a80 \
--hash=sha256:59597cd6315d3439ed4b074febe84a439c33928dd34396941b4d377692eca810 \
--hash=sha256:59e4b729eae1a0919f9e4c0fc635fbcc9db59c74ad98d684f4877be3d2607dd6 \
--hash=sha256:5a0f874ee8c0bc820e649c900243c6d1e6dc435b81da1492046716f14f1a2a96 \
--hash=sha256:5ac2b7d341dc1bd102be849d6dd33b09701223a851105b2754339e390be0627a \
--hash=sha256:5e3f4468b8c6fd2fd33c218bbd0a1559e6a6fcf185af8bb0cc43f3b5bfb7d636 \
--hash=sha256:6164d4e2a82f9ebd7752a06bd6c504791bedc6418c0196cd0a23afb7f3e12b2d \
--hash=sha256:6893544e06bae009916a5658ce7207e26ed17385149f35a3125f5259951f1bbe \
--hash=sha256:690a17db524ee6ac4a27efc5406530dd90e7a7a69d8360235323d0e5dafb8f5b \
--hash=sha256:6b8d0c153f07a953636b9cdb3011b733cadd4178123ef728ccc4d5969e67f3c2 \
--hash=sha256:72a28979cc667e5f82ef433db009184e7ac277844eea0f7f4d254b789517941d \
--hash=sha256:72aa4746993a28c841e05889f3f1b1e5d14df8d3daa157d6001a34c98102b393 \
--hash=sha256:732176f5427e72fa2325b05c58ad0b45af341c459910d766f814b0584ac1f9ac \
--hash=sha256:7918a1b83dd70dc04ab5ed24c78ae833ae8ea228cef84e08597c408286edc926 \
--hash=sha256:7923470d6056a9590247ff729c05e8e0f06bbd4efa6569c916943cb2d9b68b91 \
--hash=sha256:7d76a8a1fc9da08296462a18f16620ba73bcbf5909e42383b253ef34d9d5141e \
--hash=sha256:811040d7f3dd9c55eb0d8b00b5dcb7fd9ae1761c454f444fd9f37fe5ec57143a \
--hash=sha256:821a88b878b6589c5068f4cc2cfeb2c64e343a196bc9d7ac68ea8c2a776acd46 \
--hash=sha256:84397d3f750d153ebd7f958efaa92b45fea170200e2df5e0e1fd4d85b7e3f58a \
--hash=sha256:844671c9c1150fcdac46d43198364034b961bd520f2c4fdaabfc7c7d7138a2dd \
--hash=sha256:890a09cb0a62198bff92eda98b2b507305dd3abf974778bae3287f98b48907d3 \
--hash=sha256:8f08276466fedb9e36e5193a96cb944928301152879ec20c2d723d1031cd4ddd \
--hash=sha256:8f5e06df94fff8c4c85f98c6487f6636848e1dc85ce17ab7d1931df4a081f657 \
--hash=sha256:921473a93bcea4d00295799ab929522fc650e85c6b9f27ae1e6bb32a790ea7d3 \
--hash=sha256:941b3f1b2392f0bcd6abf1bc7a322787d6db4e7457be6d1ffd3a693426a755f2 \
--hash=sha256:9b320677521aabf666cdd6e99baee4fb5ac3996349c3b7f8e7c4eee1c00dfe3a \
--hash=sha256:9c3efee9bb53cbe7b285760c81f28ac80dc15fa48b5fe7e58b52752e642553f1 \
--hash=sha256:9fda3e50abad8d0f48df621cf75adc73c63f7243cbe0e3b2171392b445401550 \
--hash=sha256:a4c5da39bca4f7979eefcbb36efea04471cd68db2d38fcbb4ee2c6d440699833 \
--hash=sha256:a56c18f21ac98209da9c54ae3ebb3b6f6e772038681d6cb43b8d53da3b09ee81 \
--hash=sha256:a623564d810e7a953ff1357f7799c14bc9beeab699aacc8b7ab7822da1e952b8 \
--hash=sha256:a8906669b03c63266b6a7693d1f487b02647beb12adea20f8840c1a087e2dfb5 \
--hash=sha256:a99757ad7fe5c8a2bb44829fc57ced11253e10f462233c1255fe03888e06bc19 \
--hash=sha256:aa7d032c1d84726aa9edeb6accf079b4caa87151ca9fabacef31fa028186c66d \
--hash=sha256:aad5524c2aedaf9aa14ef1bc9327f8abd915699dea457d339bebbe2f0d218f86 \
--hash=sha256:afb1c70ec1e594a547f38ad6bf5e3d60304ce7539e677c1429eebab115bce56e \
--hash=sha256:b6365703e8cf1644b82104cdd05270d1a9f043119a168d66c55684b1b557d008 \
--hash=sha256:b8b942d8b3ce765dbc3b1dad0a944712a89b5de290ce8f72681e22b3c55f3cc8 \
--hash=sha256:ba73a14e9c8f9ac409863543cde3290dba39098fc261f717dc337ea72d3ebad2 \
--hash=sha256:bd7b68fd2e79d59d86dcbc1ccd6e2ca09c505343445daaa4e07f43c8a9cc34da \
--hash=sha256:bd966475e963122ee0a7118ec9024388c602d12ac72860f6eea119a3928be053 \
--hash=sha256:c2ce65bdeaf0a386bb3b533a28de3994e8e13b464ac15e1e67e4603dd88787fa \
--hash=sha256:c64d5abe91a3dfe5ff250c6bb267ef00dbc01501518225b45a5f9def458f31fb \
--hash=sha256:c8c143a65ce3ca42e54d8e6fcaf465b6b672ed1c6c90022794a802fb93105d22 \
--hash=sha256:cd46f30e758629c3ee91713529cfbe107ac50d27110fdcc326a42ce2acf4dafc \
--hash=sha256:ced02e3bd55e16e89c08bbc8128cff0884d96e7f7a5633d3dc366b6d95fcd1d6 \
--hash=sha256:cf123225945aa58b3057d0fba67e8061c62d14cc8a4202630f8057df70189051 \
--hash=sha256:d19e57f888b00cd04fc38f5e18d0efbd91ccba2d45039453ab2236e6eec48d4d \
--hash=sha256:d1cbe6b5be3b9b698d8cc4ee4dee7e017ad655e83361cd0ea8e653d65e469468 \
--hash=sha256:db09e6c18977a33fea26fe67b7a842f706c67cf8bda1450974d0ae0dd63570df \
--hash=sha256:de2f780c3242ea114dd01f84848655356af4dd561501896c751d7b885ea6d3a1 \
--hash=sha256:e2205a81f815b5bb17e46e74cc946c575b484e5f0acfcb805fb252d67e22938d \
--hash=sha256:e645c757183ee0e13f0bbe56508598e2d9cd42b8abc6c0599d53b0d0b8dd1479 \
--hash=sha256:f2910502f718828cecc8beff004917dcf577fc5f8f5dd40ffb1ea7612124547b \
--hash=sha256:f764e4dfafa288e2eba21231f455d209f4709436baeebb05bdecfb5d8ddc3d35 \
--hash=sha256:f83fe9e10f9d0b6cf580564d4d23845b9d692e4c91bd8be57733958e4c602956 \
--hash=sha256:fb2b495dd94b02de8215625948132cc2ea360ae84fe6634cd19b6567709c8ae2 \
--hash=sha256:fee0016cc35a8a91e8cc9312ab26a6fe638d484131a7afa79e1ce6165328a135
# via -r requirements.in
six==1.16.0 \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
@ -251,9 +345,9 @@ soupsieve==2.4.1 \
--hash=sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8 \
--hash=sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea
# via beautifulsoup4
werkzeug==2.2.3 \
--hash=sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe \
--hash=sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612
werkzeug==2.3.3 \
--hash=sha256:4866679a0722de00796a74086238bb3b98d90f423f05de039abb09315487254a \
--hash=sha256:a987caf1092edc7523edb139edb20c70571c4a8d5eed02e0b547b4739174d091
# via
# flask
# flask-login

View File

@ -132,18 +132,11 @@ url_for(request.endpoint)[1:].split("/")[-1].strip().replace('_', '-') %}
name="live-update"
data-default-method="default"
data-default-value="no"
class="relative cursor-pointer disabled:cursor-default disabled:pointer-events-none dark:border-slate-600 dark:bg-slate-700 z-10 checked:z-0 w-5 h-5 ease text-base rounded-1.4 checked:bg-primary checked:border-primary dark:checked:bg-primary dark:checked:border-primary duration-250 float-left mt-1 appearance-none border border-gray-300 bg-white bg-contain bg-center bg-no-repeat align-top transition-all disabled:bg-gray-400 disabled:border-gray-400 dark:disabled:bg-gray-800 dark:disabled:border-gray-800 disabled:text-gray-700 dark:disabled:text-gray-300"
class="checkbox"
type="checkbox"
data-pattern="^(yes|no)$"
value="no"
/>
<input
type="hidden"
name="live-update"
data-default-method="default"
data-default-value="no"
value="no"
/>
<svg
data-checkbox-handler="live-update"
class="pointer-events-none absolute fill-white dark:fill-gray-300 left-0 top-0 translate-x-1 translate-y-2 h-3 w-3"

View File

@ -19,9 +19,15 @@ class DockerTest(Test):
self._domains = {
r"www\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1"),
r"auth\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1"),
r"app1\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1_1"),
r"app2\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1_2"),
r"app3\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1_3")
r"app1\.example\.com": Test.random_string(6)
+ "."
+ getenv("TEST_DOMAIN1_1"),
r"app2\.example\.com": Test.random_string(6)
+ "."
+ getenv("TEST_DOMAIN1_2"),
r"app3\.example\.com": Test.random_string(6)
+ "."
+ getenv("TEST_DOMAIN1_3"),
}
self._check_domains()

View File

@ -21,6 +21,13 @@
state: forcereinstall
executable: pip3
- name: Pin version for urllib
pip:
name: urllib3<2
state: forcereinstall
executable: pip3
extra_args:
- name: Init Docker Swarm
community.general.docker_swarm:
advertise_addr: "{{ local_ip }}"

View File

@ -1,4 +1,4 @@
FROM fedora:37
FROM fedora:38
ENV container docker
ENV NGINX_VERSION 1.24.0

View File

@ -4,6 +4,7 @@ ENV NGINX_VERSION 1.24.0
# Copy centos repo
COPY src/linux/centos.repo /etc/yum.repos.d/centos.repo
RUN sed -i "s/%ARCH%/$(uname -m)/g" /etc/yum.repos.d/centos.repo
# Copy RPM-GPG-KEY-CentOS-Official
COPY src/linux/RPM-GPG-KEY-centosofficial /etc/pki/rpm-gpg/RPM-GPG-KEY-centosofficial

View File

@ -11,25 +11,34 @@ RUN GECKODRIVER_VERSION=`curl -i https://github.com/mozilla/geckodriver/releases
chmod +x /usr/local/bin/geckodriver && \
rm geckodriver.tar.gz
RUN wget https://github.com/bunkerity/bunkerweb-plugins/archive/refs/heads/main.zip && \
unzip main.zip && \
mv bunkerweb-plugins-main/discord . && \
zip -r discord.zip discord && \
rm -rf bunkerweb-plugins-main main.zip discord
WORKDIR /opt/tests_ui
COPY requirements.txt .
RUN pip install --no-cache -r requirements.txt
RUN touch test.txt && \
zip -r test.zip test.txt && \
rm test.txt
RUN echo '{ \
"id": "discord", \
"order": 999, \
"name": "Discord", \
"description": "Send alerts to a Discord channel (using webhooks).", \
"version": "0.1", \
"stream": "no", \
"settings": {} \
}' > plugin.json && \
zip -r discord.zip plugin.json && \
rm plugin.json
# Clean up
RUN apk del .build-deps && \
rm -rf /var/cache/apk/*
COPY main.py .
ENV PYTHONUNBUFFERED=1
WORKDIR /$APP_HOME
COPY requirements.txt .
RUN pip install -r requirements.txt
COPY main.py .
COPY test.zip .
CMD ["python3", "main.py"]

View File

@ -55,7 +55,7 @@ services:
environment:
ABSOLUTE_URI: "http://www.example.com/admin/"
ADMIN_USERNAME: "admin"
ADMIN_PASSWORD: "admin"
ADMIN_PASSWORD: "S$$cr3tP@ssw0rd"
DOCKER_HOST: "tcp://bw-docker-proxy:2375"
volumes:
- bw-data:/data
@ -81,6 +81,7 @@ services:
volumes:
bw-data:
networks:
bw-universe:
name: bw-universe

View File

@ -245,7 +245,7 @@ try:
username_input = safe_get_element(driver, By.ID, "username")
password_input = safe_get_element(driver, By.ID, "password")
username_input.send_keys("admin")
password_input.send_keys("admin")
password_input.send_keys("S$cr3tP@ssw0rd")
access_page(
driver,
@ -254,7 +254,7 @@ try:
"home",
)
## HOME PAGE
### HOME PAGE
print("Trying instances page ...", flush=True)
@ -909,7 +909,7 @@ try:
False,
)
assert_alert_message(driver, "is not a valid plugin")
sleep(2)
print(
"The bad plugin has been rejected, trying to add a good plugin ...",
@ -928,12 +928,10 @@ try:
False,
)
assert_alert_message(driver, "Successfully created plugin")
external_plugins = safe_get_element(
driver,
By.XPATH,
"//div[@data-plugins-external='external']",
"//div[@data-plugins-external=' external ']",
multiple=True,
)
@ -1055,7 +1053,9 @@ try:
print("Logs found, trying auto refresh ...", flush=True)
assert_button_click(driver, safe_get_element(driver, By.ID, "live-update"))
assert_button_click(driver, safe_get_element(driver, By.ID, "submit-settings"))
assert_button_click(
driver, "//button[@id='submit-settings' and contains(text(), 'Go Live')]"
)
sleep(3)

View File

@ -1,2 +1,2 @@
selenium==4.9.0
requests==2.28.2
requests==2.30.0

Binary file not shown.

View File

@ -44,12 +44,12 @@ if [ $i -ge 120 ] ; then
fi
# Start tests
docker-compose -f docker-compose.tests.yml build
docker-compose -f docker-compose.test.yml build
if [ $? -ne 0 ] ; then
echo "❌ Build failed"
exit 1
fi
docker-compose -f docker-compose.tests.yml up --abort-on-container-exit --exit-code-from ui-tests
docker-compose -f docker-compose.test.yml up --abort-on-container-exit --exit-code-from ui-tests
# Exit
exit $?