Merge pull request #475 from bunkerity/dev

Merge branch "dev" into branch "ui"
This commit is contained in:
Théophile Diot 2023-05-19 17:03:51 -04:00 committed by GitHub
commit e1883a04be
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
387 changed files with 14305 additions and 1887 deletions

View File

@ -67,3 +67,30 @@ jobs:
secrets:
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
# Core tests
prepare-tests-core:
needs: [code-security, build-containers]
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- id: set-matrix
run: |
tests=$(find ./tests/core/ -maxdepth 1 -mindepth 1 -type d -printf "%f\n" | jq -c --raw-input --slurp 'split("\n")| .[0:-1]')
echo "::set-output name=tests::$tests"
outputs:
tests: ${{ steps.set-matrix.outputs.tests }}
tests-core:
needs: prepare-tests-core
strategy:
fail-fast: false
matrix:
test: ${{ fromJson(needs.prepare-tests-core.outputs.tests) }}
uses: ./.github/workflows/test-core.yml
with:
TEST: ${{ matrix.test }}
RELEASE: dev
secrets:
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}

View File

@ -129,6 +129,31 @@ jobs:
secrets:
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
prepare-tests-core:
needs: [create-infras]
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- id: set-matrix
run: |
tests=$(find ./tests/core/ -maxdepth 1 -mindepth 1 -type d -printf "%f\n" | jq -c --raw-input --slurp 'split("\n")| .[0:-1]')
echo "::set-output name=tests::$tests"
outputs:
tests: ${{ steps.set-matrix.outputs.tests }}
tests-core:
needs: prepare-tests-core
strategy:
fail-fast: false
matrix:
test: ${{ fromJson(needs.prepare-tests-core.outputs.tests) }}
uses: ./.github/workflows/test-core.yml
with:
TEST: ${{ matrix.test }}
RELEASE: staging
secrets:
PRIVATE_REGISTRY: ${{ secrets.PRIVATE_REGISTRY }}
PRIVATE_REGISTRY_TOKEN: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
# Delete infrastructures
delete-infras:

40
.github/workflows/test-core.yml vendored Normal file
View File

@ -0,0 +1,40 @@
name: Core test (REUSABLE)
on:
workflow_call:
inputs:
TEST:
required: true
type: string
RELEASE:
required: true
type: string
secrets:
PRIVATE_REGISTRY:
required: true
PRIVATE_REGISTRY_TOKEN:
required: true
jobs:
test:
runs-on: ubuntu-latest
steps:
# Prepare
- name: Checkout source code
uses: actions/checkout@v3
- name: Login to private repository
uses: docker/login-action@v2
with:
registry: ${{ secrets.PRIVATE_REGISTRY }}
username: registry
password: ${{ secrets.PRIVATE_REGISTRY_TOKEN }}
- name: Pull BW image
run: docker pull ${{ secrets.PRIVATE_REGISTRY }}/infra/bunkerweb-tests:${{ inputs.RELEASE }} && docker tag ${{ secrets.PRIVATE_REGISTRY }}/infra/bunkerweb-tests:${{ inputs.RELEASE }} bunkerweb-tests
- name: Pull Scheduler image
run: docker pull ${{ secrets.PRIVATE_REGISTRY }}/infra/scheduler-tests:${{ inputs.RELEASE }} && docker tag ${{ secrets.PRIVATE_REGISTRY }}/infra/scheduler-tests:${{ inputs.RELEASE }} scheduler-tests
# Run test
- name: Run test
run: |
cd ./tests/core/${{ inputs.TEST }}
find . -type f -name 'docker-compose.*' -exec sed -i "s@bunkerity/bunkerweb:.*@bunkerweb-tests@" {} \;
find . -type f -name 'docker-compose.*' -exec sed -i "s@bunkerity/bunkerweb-scheduler:.*@scheduler-tests@" {} \;
./test.sh

View File

@ -1,16 +1,23 @@
# Changelog
## v1.5.0-beta -
## v1.5.0-beta - 2023/05/02
- Refactoring of almost all the components of the project
- Dedicated scheduler service to manage jobs and configuration
- Store configuration in a database backend
- Improved web UI and make it working with all integrations
- Improved internal LUA code
- Improved internal cache of BW
- Add Redis support when using clustered integrations
- Add RHEL integration
- Add Vagrant integration
- Improved CI/CD
- Init support of generic TCP/UDP (stream)
- Init support of IPv6
- Improved CI/CD : UI tests, core tests and release automation
- Reduce Docker images size
- Fix and improved core plugins : antibot, cors, dnsbl, ...
- Use PCRE regex instead of LUA patterns
- Connectivity tests at startup/reload with logging
## v1.4.8 - 2023/04/05

6
TODO
View File

@ -1,6 +1,6 @@
- YT video demo web UI
- README
- Ansible
- Vagrant
- Plugins
- prepare new antibot challenge when not resolved and "refresh page"
- sessions helpers in utils
- sessions security : check IP address, check UA, ...
- fix db warnings (Got an error reading communication packets)

View File

@ -88,6 +88,7 @@ volumes:
The scheduler runs as an **unprivileged user with UID 101 and GID 101** inside the container. The reason behind this is security : in case a vulnerability is exploited, the attacker won't have full root (UID/GID 0) privileges.
But there is a downside : if you use a **local folder for the persistent data**, you will need to **set the correct permissions** so the unprivileged user can write data to it. Something like that should do the trick :
```shell
mkdir bw-data && \
chown root:101 bw-data && \
@ -95,26 +96,30 @@ volumes:
```
Alternatively, if the folder already exists :
```shell
chown -R root:101 bw-data && \
chmod -R 770 bw-data
```
If you are using [Docker in rootless mode](https://docs.docker.com/engine/security/rootless) or [podman](https://podman.io/), UIDs and GIDs in the container will be mapped to different ones in the host. You will first need to check your initial subuid and subgid :
```shell
grep ^$(whoami): /etc/subuid && \
grep ^$(whoami): /etc/subgid
```
```shell
grep ^$(whoami): /etc/subuid && \
grep ^$(whoami): /etc/subgid
```
For example, if you have a value of **100000**, the mapped UID/GID will be **100100** (100000 + 100) :
```shell
mkdir bw-data && \
sudo chgrp 100100 bw-data && \
chmod 770 bw-data
```
Or if the folder already exists :
```shell
Or if the folder already exists :
```shell
sudo chgrp -R 100100 bw-data && \
chmod -R 770 bw-data
```
@ -174,13 +179,16 @@ For defense in depth purposes, we strongly recommend to create at least three di
- `bw-universe` : for BunkerWeb and scheduler
- `bw-docker` : for scheduler and the Docker API proxy
The scheduler needs to contact the API of BunkerWeb and for obvious security reason BunkerWeb needs to check if the caller is authorized to make API calls. The `API_WHITELIST_IP` setting lets you choose allowed IP addresses and subnets : usage of a static subnet for the `bw-universe` is strongly advised :
The scheduler needs to contact the API of BunkerWeb and for obvious security reason BunkerWeb needs to check if the caller is authorized to make API calls. The `API_WHITELIST_IP` setting lets you choose allowed IP addresses and subnets, usage of a static subnet for the `bw-universe` is strongly advised :
```yaml
...
services:
mybunker:
image: bunkerity/bunkerweb:1.5.0-beta
ports:
- 80:8080
- 443:8443
networks:
- bw-services
- bw-universe
@ -209,6 +217,64 @@ networks:
name: bw-docker
```
### Full compose file
```yaml
version: "3.5"
services:
bunkerweb:
image: bunkerity/bunkerweb:1.5.0-beta
ports:
- 80:8080
- 443:8443
labels:
- "bunkerweb.INSTANCE"
environment:
- SERVER_NAME=www.example.com
- API_WHITELIST_IP=127.0.0.0/8 10.20.30.0/24
networks:
- bw-universe
- bw-services
bw-scheduler:
image: bunkerity/bunkerweb-scheduler:1.5.0-beta
depends_on:
- bunkerweb
- bw-docker
volumes:
- bw-data:/data
environment:
- DOCKER_HOST=tcp://bw-docker:2375
networks:
- bw-universe
- bw-docker
bw-docker:
image: tecnativa/docker-socket-proxy
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
environment:
- CONTAINERS=1
networks:
- bw-docker
volumes:
bw-data:
networks:
bw-universe:
name: bw-universe
ipam:
driver: default
config:
- subnet: 10.20.30.0/24
bw-services:
name: bw-services
bw-docker:
name: bw-docker
```
## Docker autoconf
<figure markdown>
@ -227,37 +293,57 @@ Instead of defining environment variables for the BunkerWeb container, you simpl
The Docker autoconf integration implies the use of **multisite mode**. Please refer to the [multisite section](concepts.md#multisite-mode) of the documentation for more information.
!!! info "Database backend"
Please note that we assume you are using MariaDB as database backend (which is defined using the `DATABASE_URI` setting). Other backends for this integration are still possible if you want to : see docker-compose files in the [misc/integrations folder](https://github.com/bunkerity/bunkerweb/tree/v1.5.0-beta/misc/integrations) folder of the repostiory for more information.
Please note that we assume you are using MariaDB as database backend (which is defined using the `DATABASE_URI` setting). Other backends for this integration are still possible if you want : see docker-compose files in the [misc/integrations folder](https://github.com/bunkerity/bunkerweb/tree/v1.5.0-beta/misc/integrations) folder of the repostiory for more information.
Another container, named `bw-autoconf` for example, containing the autoconf service must be added to the stack. Since two services will generate the configuration for BunkerWeb, a "real" database backend (in other words, not SQLite) also needs to be added :
```yaml
...
version: "3.5"
services:
bunkerweb:
image: bunkerity/bunkerweb:1.5.0-beta
ports:
- 80:8080
- 443:8443
labels:
- "bunkerweb.INSTANCE"
environment:
- SERVER_NAME=
- DATABASE_URI=mariadb+pymysql://bunkerweb:changeme@bw-db:3306/db
- AUTOCONF_MODE=yes
- MULTISITE=yes
- DATABASE_URI=mariadb+pymysql://bunkerweb:changeme@bw-db:3306/db # Remember to set a stronger password for the database
- API_WHITELIST_IP=127.0.0.0/8 10.20.30.0/24
networks:
- bw-universe
- bw-services
...
bw-autoconf:
image: bunkerity/bunkerweb-autoconf:1.5.0-beta
depends_on:
- bunkerweb
- bw-docker
environment:
- DATABASE_URI=mariadb+pymysql://bunkerweb:changeme@bw-db:3306/db
- AUTOCONF_MODE=yes
- DOCKER_HOST=tcp://bw-docker:2375
networks:
- bw-universe
- bw-docker
bw-scheduler:
image: bunkerity/bunkerweb-scheduler:1.5.0-beta
depends_on:
- bunkerweb
- bw-docker
environment:
- DATABASE_URI=mariadb+pymysql://bunkerweb:changeme@bw-db:3306/db # Remember to set a stronger password for the database
- DATABASE_URI=mariadb+pymysql://bunkerweb:changeme@bw-db:3306/db
- DOCKER_HOST=tcp://bw-docker:2375
- AUTOCONF_MODE=yes
networks:
- bw-universe
- bw-docker
...
bw-docker:
image: tecnativa/docker-socket-proxy
volumes:
@ -266,14 +352,14 @@ services:
- CONTAINERS=1
networks:
- bw-docker
...
bw-db:
image: mariadb:10.10
environment:
- MYSQL_RANDOM_ROOT_PASSWORD=yes
- MYSQL_DATABASE=db
- MYSQL_USER=bunkerweb
- MYSQL_PASSWORD=changeme # Remember to set a stronger password for the database
- MYSQL_PASSWORD=changeme
volumes:
- bw-data:/var/lib/mysql
networks:
@ -301,23 +387,23 @@ networks:
Once the stack is set up, you will be able to create the web application container and add the settings as labels using the "bunkerweb." prefix in order to automatically set up BunkerWeb :
```yaml
...
version: "3.5"
services:
myapp:
image: mywebapp:4.2
networks:
bw-services:
aliases:
- myapp
labels:
- "bunkerweb.MY_SETTING_1=value1"
- "bunkerweb.MY_SETTING_2=value2"
...
networks:
bw-services:
aliases:
- myapp
labels:
- "bunkerweb.MY_SETTING_1=value1"
- "bunkerweb.MY_SETTING_2=value2"
networks:
bw-services:
external:
name: bw-services
...
external: true
name: bw-services
```
## Swarm
@ -328,9 +414,9 @@ networks:
</figure>
!!! info "Docker autoconf"
The Docker autoconf integration is similar to the Docker autoconf one (but with services instead of containers). Please read the [Docker autoconf integration section](#docker-autoconf) first if needed.
The Swarm integration is similar to the Docker autoconf one (but with services instead of containers). Please read the [Docker autoconf integration section](#docker-autoconf) first if needed.
To automatically configure BunkerWeb instances, a special service called **autoconf**, will be scheduled on a manager node. That service will listen for Docker Swarm events like service creation or deletion and automatically configure the **BunkerWeb instances** in real-time without downtime. It also monitors other Swarm objects like [configs](https://docs.docker.com/engine/swarm/configs/) for custom configurations.
To automatically configure BunkerWeb instances, a special service called **autoconf** needs to have access to the Docker API. That service will listen for Docker Swarm events like service creation or deletion and automatically configure the **BunkerWeb instances** in real-time without downtime. It also monitors other Swarm objects like [configs](https://docs.docker.com/engine/swarm/configs/) for custom configurations.
Like the [Docker autoconf integration](#docker-autoconf), configuration for web services is defined by using labels starting with the special **bunkerweb.** prefix.
@ -341,7 +427,7 @@ Since we have multiple instances of BunkerWeb running, a shared data store imple
Using a shared folder or a specific driver for the database volume is left as an exercise for the reader (and depends on your own use-case).
!!! info "Database backend"
Please note that we assume you are using MariaDB as database backend (which is defined using the `DATABASE_URI` setting). Other backends for this integration are still possible if you want to : see docker-compose files in the [misc/integrations folder](https://github.com/bunkerity/bunkerweb/tree/v1.5.0-beta/misc/integrations) folder of the repostiory for more information. Clustered database backends setup are out-of-the-scope of this documentation.
Please note that we assume you are using MariaDB as database backend (which is defined using the `DATABASE_URI` setting). Other backends for this integration are still possible if you want : see docker-compose files in the [misc/integrations folder](https://github.com/bunkerity/bunkerweb/tree/v1.5.0-beta/misc/integrations) folder of the repostiory for more information. Clustered database backends setup are out-of-the-scope of this documentation.
Here is the stack boilerplate that you can deploy using `docker stack deploy` :
@ -492,8 +578,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
```
## Kubernetes
@ -510,7 +596,7 @@ The recommended setup is to define **BunkerWeb** as a **[DaemonSet](https://kube
Since we have multiple instances of BunkerWeb running, a shared data store implemented as a [Redis](https://redis.io/) service must be created : the instances will use it to cache and share data. You will find more information about the Redis settings [here](settings.md#redis)
!!! info "Database backend"
Please note that we assume you are using MariaDB as database backend (which is defined using the `DATABASE_URI` setting). Other backends for this integration are still possible if you want to : see yaml files in the [misc/integrations folder](https://github.com/bunkerity/bunkerweb/tree/v1.5.0-beta/misc/integrations) folder of the repostiory for more information. Clustered database backends setup are out-of-the-scope of this documentation.
Please note that we assume you are using MariaDB as database backend (which is defined using the `DATABASE_URI` setting). Other backends for this integration are still possible if you want : see yaml files in the [misc/integrations folder](https://github.com/bunkerity/bunkerweb/tree/v1.5.0-beta/misc/integrations) folder of the repostiory for more information. Clustered database backends setup are out-of-the-scope of this documentation.
Please note that both scheduler and autoconf services needs to access the Kubernetes API. The recommended way of doing it is using [RBAC authorization](https://kubernetes.io/docs/reference/access-authn-authz/rbac/).

View File

@ -112,8 +112,8 @@ You will find more settings about reverse proxy in the [settings section](settin
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
```
=== "Swarm"
@ -142,8 +142,8 @@ You will find more settings about reverse proxy in the [settings section](settin
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
```
=== "Kubernetes"
@ -488,8 +488,8 @@ You will find more settings about reverse proxy in the [settings section](settin
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
```
=== "Swarm"
@ -550,8 +550,8 @@ You will find more settings about reverse proxy in the [settings section](settin
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
```
=== "Kubernetes"
@ -1109,7 +1109,7 @@ REAL_IP_HEADER=proxy_protocol
## Protect UDP/TCP applications
!!! warning "Feature is in beta"
This feature is not production-ready. Feel free to test it and report us any bug using [issues]() in the GitHub repository.
This feature is not production-ready. Feel free to test it and report us any bug using [issues](https://github.com/bunkerity/bunkerweb/issues) in the GitHub repository.
BunkerWeb can also act as **generic UDP/TCP reverse proxy** : you can protect any network-based applications working at least on layer 4 of the OSI model. Behind the hood, it leverages the [stream module](https://nginx.org/en/docs/stream/ngx_stream_core_module.html) of NGINX instead of using the "classical" http one.
@ -1263,7 +1263,7 @@ For complete list of settings regarding `stream` mode, please refer to the [sett
networks:
bw-services:
external:
external: true
name: bw-services
```
@ -1337,7 +1337,7 @@ For complete list of settings regarding `stream` mode, please refer to the [sett
networks:
bw-services:
external:
external: true
name: bw-services
```
@ -2015,8 +2015,8 @@ BunkerWeb supports PHP using external or remote [PHP-FPM](https://www.php.net/ma
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
```
=== "Swarm"
@ -2128,8 +2128,8 @@ BunkerWeb supports PHP using external or remote [PHP-FPM](https://www.php.net/ma
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
```
=== "Kubernetes"
@ -2307,4 +2307,101 @@ BunkerWeb supports PHP using external or remote [PHP-FPM](https://www.php.net/ma
```shell
systemctl start bunkerweb
```
## IPv6
!!! warning "Feature is in beta"
This feature is not production-ready. Feel free to test it and report us any bug using [issues](https://github.com/bunkerity/bunkerweb/issues) in the GitHub repository.
By default, BunkerWeb will only listen on IPv4 adresses and won't use IPv6 for network communications. If you want to enable IPv6 support, you need to set `USE_IPV6=yes`. Please note that IPv6 configuration of your network and environment is out-of-the-scope of this documentation.
=== "Docker"
First of all, you will need to configure your Docker daemon to enable IPv6 support for containers and use ip6tables if needed. Here is sample configuration for your `/etc/docker/daemon.json` file :
```json
{
"experimental": true,
"ipv6": true,
"ip6tables": true,
"fixed-cidr-v6": "fd00:dead:beef::/48"
}
```
You can now restart the Docker service to apply the changes :
```shell
systemctl restart docker
```
Once Docker is setup to support IPv6 you can add the `USE_IPV6` setting and configure the `bw-services` for IPv6 :
```yaml
version: '3.5'
services:
bunkerweb:
image: bunkerity/bunkerweb:1.5.0-beta
environment:
- USE_IPv6=yes
...
networks:
bw-services:
name: bw-services
enable_ipv6: true
ipam:
config:
- subnet: fd00:13:37::/48
gateway: fd00:13:37::1
...
```
=== "Docker autoconf"
First of all, you will need to configure your Docker daemon to enable IPv6 support for containers and use ip6tables if needed. Here is sample configuration for your `/etc/docker/daemon.json` file :
```json
{
"experimental": true,
"ipv6": true,
"ip6tables": true,
"fixed-cidr-v6": "fd00:dead:beef::/48"
}
```
You can now restart the Docker service to apply the changes :
```shell
systemctl restart docker
```
Once Docker is setup to support IPv6 you can add the `USE_IPV6` setting and configure the IPv6 for the `bw-services` network :
```yaml
version: '3.5'
services:
bunkerweb:
image: bunkerity/bunkerweb:1.5.0-beta
environment:
- USE_IPv6=yes
...
networks:
bw-services:
name: bw-services
enable_ipv6: true
ipam:
config:
- subnet: fd00:13:37::/48
gateway: fd00:13:37::1
...
```

View File

@ -1,5 +1,5 @@
mkdocs==1.4.3
mkdocs-material==9.1.11
mkdocs-material==9.1.13
pytablewriter==0.64.2
mike==1.1.2
jinja2<3.1.0

View File

@ -100,15 +100,24 @@ STREAM support :x:
[Cross-Origin Resource Sharing](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) lets you manage how your service can be contacted from different origins. Please note that you will have to allow the `OPTIONS` HTTP method using the `ALLOWED_METHODS` if you want to enable it (more info [here](#allowed-methods)). Here is the list of settings related to CORS :
| Setting | Default | Context |Multiple| Description |
|------------------------|------------------------------------------------------------------------------------|---------|--------|--------------------------------------------------|
|`USE_CORS` |`no` |multisite|no |Use CORS |
|`CORS_ALLOW_ORIGIN` |`*` |multisite|no |Value of the Access-Control-Allow-Origin header. |
|`CORS_EXPOSE_HEADERS` |`Content-Length,Content-Range` |multisite|no |Value of the Access-Control-Expose-Headers header.|
|`CORS_MAX_AGE` |`86400` |multisite|no |Value of the Access-Control-Max-Age header. |
|`CORS_ALLOW_CREDENTIALS`|`no` |multisite|no |Send the Access-Control-Allow-Credentials header. |
|`CORS_ALLOW_METHODS` |`GET, POST, OPTIONS` |multisite|no |Value of the Access-Control-Allow-Methods header. |
|`CORS_ALLOW_HEADERS` |`DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range`|multisite|no |Value of the Access-Control-Allow-Headers header. |
| Setting | Default | Context |Multiple| Description |
|------------------------|------------------------------------------------------------------------------------|---------|--------|-------------------------------------------------------------------|
|`USE_CORS` |`no` |multisite|no |Use CORS |
|`CORS_ALLOW_ORIGIN` |`*` |multisite|no |Allowed origins to make CORS requests : PCRE regex or *. |
|`CORS_EXPOSE_HEADERS` |`Content-Length,Content-Range` |multisite|no |Value of the Access-Control-Expose-Headers header. |
|`CORS_MAX_AGE` |`86400` |multisite|no |Value of the Access-Control-Max-Age header. |
|`CORS_ALLOW_CREDENTIALS`|`no` |multisite|no |Send the Access-Control-Allow-Credentials header. |
|`CORS_ALLOW_METHODS` |`GET, POST, OPTIONS` |multisite|no |Value of the Access-Control-Allow-Methods header. |
|`CORS_ALLOW_HEADERS` |`DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range`|multisite|no |Value of the Access-Control-Allow-Headers header. |
|`CORS_DENY_REQUEST` |`yes` |multisite|no |Deny request and don't send it to backend if Origin is not allowed.|
Here is some examples of possible values for `CORS_ALLOW_ORIGIN` setting :
- `*` will allow all origin
- `^https://www\.example\.com$` will allow `https://www.example.com`
- `^https://.+\.example.com$` will allow any origins when domain ends with `.example.com`
- `^https://(www\.example1\.com|www\.example2\.com)$` will allow both `https://www.example1.com` and `https://www.example2.com`
- `^https?://www\.example\.com$` will allow both `https://www.example.com` and `http://www.example.com`
## HTTPS / SSL/TLS
@ -265,30 +274,30 @@ STREAM support :warning:
You can use the following settings to set up blacklisting :
| Setting | Default | Description |
| :-------------------------: | :----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------- |
|`USE_BLACKLIST` |`yes` |Activate blacklist feature. |
|`BLACKLIST_IP` | |List of IP/network, separated with spaces, to block. |
|`BLACKLIST_IP_URLS` |`https://www.dan.me.uk/torlist/?exit` |List of URLs, separated with spaces, containing bad IP/network to block. |
|`BLACKLIST_RDNS_GLOBAL` |`yes` |Only perform RDNS blacklist checks on global IP addresses. |
|`BLACKLIST_RDNS` |`.shodan.io .censys.io` |List of reverse DNS suffixes, separated with spaces, to block. |
|`BLACKLIST_RDNS_URLS` | |List of URLs, separated with spaces, containing reverse DNS suffixes to block. |
|`BLACKLIST_ASN` | |List of ASN numbers, separated with spaces, to block. |
|`BLACKLIST_ASN_URLS` | |List of URLs, separated with spaces, containing ASN to block. |
|`BLACKLIST_USER_AGENT` | |List of User-Agent, separated with spaces, to block. |
|`BLACKLIST_USER_AGENT_URLS` |`https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master/_generator_lists/bad-user-agents.list`|List of URLs, separated with spaces, containing bad User-Agent to block. |
|`BLACKLIST_URI` | |List of URI, separated with spaces, to block. |
|`BLACKLIST_URI_URLS` | |List of URLs, separated with spaces, containing bad URI to block. |
|`BLACKLIST_IGNORE_IP` | |List of IP/network, separated with spaces, to ignore in the blacklist. |
|`BLACKLIST_IGNORE_IP_URLS` | |List of URLs, separated with spaces, containing IP/network to ignore in the blacklist. |
|`BLACKLIST_IGNORE_RDNS` | |List of reverse DNS suffixes, separated with spaces, to ignore in the blacklist. |
|`BLACKLIST_IGNORE_RDNS_URLS` | |List of URLs, separated with spaces, containing reverse DNS suffixes to ignore in the blacklist.|
|`BLACKLIST_IGNORE_ASN` | |List of ASN numbers, separated with spaces, to ignore in the blacklist. |
|`BLACKLIST_IGNORE_ASN_URLS` | |List of URLs, separated with spaces, containing ASN to ignore in the blacklist. |
|`BLACKLIST_IGNORE_USER_AGENT` | |List of User-Agent, separated with spaces, to ignore in the blacklist. |
|`BLACKLIST_IGNORE_USER_AGENT_URLS`| |List of URLs, separated with spaces, containing User-Agent to ignore in the blacklist. |
|`BLACKLIST_IGNORE_URI` | |List of URI, separated with spaces, to ignore in the blacklist. |
|`BLACKLIST_IGNORE_URI_URLS` | |List of URLs, separated with spaces, containing URI to ignore in the blacklist. |
| Setting | Default | Context |Multiple| Description |
|----------------------------------|------------------------------------------------------------------------------------------------------------------------------|---------|--------|------------------------------------------------------------------------------------------------|
|`USE_BLACKLIST` |`yes` |multisite|no |Activate blacklist feature. |
|`BLACKLIST_IP` | |multisite|no |List of IP/network, separated with spaces, to block. |
|`BLACKLIST_IP_URLS` |`https://www.dan.me.uk/torlist/?exit` |global |no |List of URLs, separated with spaces, containing bad IP/network to block. |
|`BLACKLIST_RDNS_GLOBAL` |`yes` |multisite|no |Only perform RDNS blacklist checks on global IP addresses. |
|`BLACKLIST_RDNS` |`.shodan.io .censys.io` |multisite|no |List of reverse DNS suffixes, separated with spaces, to block. |
|`BLACKLIST_RDNS_URLS` | |global |no |List of URLs, separated with spaces, containing reverse DNS suffixes to block. |
|`BLACKLIST_ASN` | |multisite|no |List of ASN numbers, separated with spaces, to block. |
|`BLACKLIST_ASN_URLS` | |global |no |List of URLs, separated with spaces, containing ASN to block. |
|`BLACKLIST_USER_AGENT` | |multisite|no |List of User-Agent (PCRE regex), separated with spaces, to block. |
|`BLACKLIST_USER_AGENT_URLS` |`https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master/_generator_lists/bad-user-agents.list`|global |no |List of URLs, separated with spaces, containing bad User-Agent to block. |
|`BLACKLIST_URI` | |multisite|no |List of URI (PCRE regex), separated with spaces, to block. |
|`BLACKLIST_URI_URLS` | |global |no |List of URLs, separated with spaces, containing bad URI to block. |
|`BLACKLIST_IGNORE_IP` | |multisite|no |List of IP/network, separated with spaces, to ignore in the blacklist. |
|`BLACKLIST_IGNORE_IP_URLS` | |global |no |List of URLs, separated with spaces, containing IP/network to ignore in the blacklist. |
|`BLACKLIST_IGNORE_RDNS` | |multisite|no |List of reverse DNS suffixes, separated with spaces, to ignore in the blacklist. |
|`BLACKLIST_IGNORE_RDNS_URLS` | |global |no |List of URLs, separated with spaces, containing reverse DNS suffixes to ignore in the blacklist.|
|`BLACKLIST_IGNORE_ASN` | |multisite|no |List of ASN numbers, separated with spaces, to ignore in the blacklist. |
|`BLACKLIST_IGNORE_ASN_URLS` | |global |no |List of URLs, separated with spaces, containing ASN to ignore in the blacklist. |
|`BLACKLIST_IGNORE_USER_AGENT` | |multisite|no |List of User-Agent (PCRE regex), separated with spaces, to ignore in the blacklist. |
|`BLACKLIST_IGNORE_USER_AGENT_URLS`| |global |no |List of URLs, separated with spaces, containing User-Agent to ignore in the blacklist. |
|`BLACKLIST_IGNORE_URI` | |multisite|no |List of URI (PCRE regex), separated with spaces, to ignore in the blacklist. |
|`BLACKLIST_IGNORE_URI_URLS` | |global |no |List of URLs, separated with spaces, containing URI to ignore in the blacklist. |
When using stream mode, only IP, RDNS and ASN checks will be done.
@ -298,19 +307,20 @@ STREAM support :warning:
You can use the following settings to set up greylisting :
| Setting | Default | Description |
| :-------------------------: | :----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------- |
| `USE_GREYLIST` | `no` | When set to `yes`, will enable greylisting based on various criteria. |
| `GREYLIST_IP` | | List of IPs and networks to greylist. |
| `GREYLIST_IP_URLS` | | List of URL containing IP and network to greylist. |
| `GREYLIST_RDNS` | | List of reverse DNS to greylist. |
| `GREYLIST_RDNS_URLS` | | List of URLs containing reverse DNS to greylist. |
| `GREYLIST_ASN` | | List of ASN to greylist. |
| `GREYLIST_ASN_URLS` | | List of URLs containing ASN to greylist. |
| `GREYLIST_USER_AGENT` | | List of User-Agents to greylist. |
| `GREYLIST_USER_AGENT_URLS` | | List of URLs containing User-Agent(s) to greylist. |
| `GREYLIST_URI` | | List of requests URI to greylist. |
| `GREYLIST_URI_URLS` | | List of URLs containing request URI to greylist. |
| Setting |Default| Context |Multiple| Description |
|--------------------------|-------|---------|--------|----------------------------------------------------------------------------------------------|
|`USE_GREYLIST` |`no` |multisite|no |Activate greylist feature. |
|`GREYLIST_IP` | |multisite|no |List of IP/network, separated with spaces, to put into the greylist. |
|`GREYLIST_IP_URLS` | |global |no |List of URLs, separated with spaces, containing good IP/network to put into the greylist. |
|`GREYLIST_RDNS_GLOBAL` |`yes` |multisite|no |Only perform RDNS greylist checks on global IP addresses. |
|`GREYLIST_RDNS` | |multisite|no |List of reverse DNS suffixes, separated with spaces, to put into the greylist. |
|`GREYLIST_RDNS_URLS` | |global |no |List of URLs, separated with spaces, containing reverse DNS suffixes to put into the greylist.|
|`GREYLIST_ASN` | |multisite|no |List of ASN numbers, separated with spaces, to put into the greylist. |
|`GREYLIST_ASN_URLS` | |global |no |List of URLs, separated with spaces, containing ASN to put into the greylist. |
|`GREYLIST_USER_AGENT` | |multisite|no |List of User-Agent (PCRE regex), separated with spaces, to put into the greylist. |
|`GREYLIST_USER_AGENT_URLS`| |global |no |List of URLs, separated with spaces, containing good User-Agent to put into the greylist. |
|`GREYLIST_URI` | |multisite|no |List of URI (PCRE regex), separated with spaces, to put into the greylist. |
|`GREYLIST_URI_URLS` | |global |no |List of URLs, separated with spaces, containing bad URI to put into the greylist. |
When using stream mode, only IP, RDNS and ASN checks will be done.
@ -320,19 +330,20 @@ STREAM support :warning:
You can use the following settings to set up whitelisting :
| Setting | Default | Description |
| :-------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------- |
| `USE_WHITELIST` | `yes` | When set to `yes`, will enable whitelisting based on various criteria. |
| `WHITELIST_IP` | `20.191.45.212 40.88.21.235 40.76.173.151 40.76.163.7 20.185.79.47 52.142.26.175 20.185.79.15 52.142.24.149 40.76.162.208 40.76.163.23 40.76.162.191 40.76.162.247 54.208.102.37 107.21.1.8` | List of IP and network to whitelist. The default list contains IP from DuckDuckGo crawler. |
| `WHITELIST_IP_URLS` | `` | List of URLs containing IP and network to whitelist. |
| `WHITELIST_RDNS` | `.google.com .googlebot.com .yandex.ru .yandex.net .yandex.com .search.msn.com .baidu.com .baidu.jp .crawl.yahoo.net .fwd.linkedin.com .twitter.com .twttr.com .discord.com` | List of reverse DNS to whitelist. Default list contains various reverse DNS of search engines and social media crawlers. |
| `WHITELIST_RDNS_URLS` | | List of URLs containing reverse DNS to whitelist. |
| `WHITELIST_ASN` | `32934` | List of ASN to whitelist. The default list contains the ASN of Facebook. |
| `WHITELIST_ASN_URLS` | | List of URL containing ASN to whitelist. |
| `WHITELIST_USER_AGENT` | | List of User-Agent to whitelist. |
| `WHITELIST_USER_AGENT_URLS` | | List of URLs containing User-Agent to whitelist. |
| `WHITELIST_URI` | | List of requests URI to whitelist. |
| `WHITELIST_URI_URLS` | | List of URLs containing request(s) URI to whitelist. |
| Setting | Default | Context |Multiple| Description |
|---------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------|--------|----------------------------------------------------------------------------------|
|`USE_WHITELIST` |`yes` |multisite|no |Activate whitelist feature. |
|`WHITELIST_IP` |`20.191.45.212 40.88.21.235 40.76.173.151 40.76.163.7 20.185.79.47 52.142.26.175 20.185.79.15 52.142.24.149 40.76.162.208 40.76.163.23 40.76.162.191 40.76.162.247 54.208.102.37 107.21.1.8`|multisite|no |List of IP/network, separated with spaces, to put into the whitelist. |
|`WHITELIST_IP_URLS` | |global |no |List of URLs, separated with spaces, containing good IP/network to whitelist. |
|`WHITELIST_RDNS_GLOBAL` |`yes` |multisite|no |Only perform RDNS whitelist checks on global IP addresses. |
|`WHITELIST_RDNS` |`.google.com .googlebot.com .yandex.ru .yandex.net .yandex.com .search.msn.com .baidu.com .baidu.jp .crawl.yahoo.net .fwd.linkedin.com .twitter.com .twttr.com .discord.com` |multisite|no |List of reverse DNS suffixes, separated with spaces, to whitelist. |
|`WHITELIST_RDNS_URLS` | |global |no |List of URLs, separated with spaces, containing reverse DNS suffixes to whitelist.|
|`WHITELIST_ASN` |`32934` |multisite|no |List of ASN numbers, separated with spaces, to whitelist. |
|`WHITELIST_ASN_URLS` | |global |no |List of URLs, separated with spaces, containing ASN to whitelist. |
|`WHITELIST_USER_AGENT` | |multisite|no |List of User-Agent (PCRE regex), separated with spaces, to whitelist. |
|`WHITELIST_USER_AGENT_URLS`| |global |no |List of URLs, separated with spaces, containing good User-Agent to whitelist. |
|`WHITELIST_URI` | |multisite|no |List of URI (PCRE regex), separated with spaces, to whitelist. |
|`WHITELIST_URI_URLS` | |global |no |List of URLs, separated with spaces, containing bad URI to whitelist. |
When using stream mode, only IP, RDNS and ASN checks will be done.
@ -410,15 +421,19 @@ STREAM support :x:
The following settings are related to the Limiting requests feature :
| Setting | Default | Description |
| :--------------: | :-----: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `USE_LIMIT_REQ` | `yes` | When set to `yes`, will limit the number of requests for a given IP on each URL within a period of time. |
| `LIMIT_REQ_URL` | `/` | The URL that will be limited. The special URL `/` will define a default limit for all URLs. |
| `LIMIT_REQ_RATE` | `2r/s` | The limit to apply to the corresponding URL. Syntax is `Xr/Y` where **X** is the number of request(s) and **Y** the period of time (s for second, m for minute, h for hour and d for day). |
| Setting |Default| Context |Multiple| Description |
|-----------------------|-------|---------|--------|---------------------------------------------------------------------------------------------|
|`USE_LIMIT_REQ` |`yes` |multisite|no |Activate limit requests feature. |
|`LIMIT_REQ_URL` |`/` |multisite|yes |URL (PCRE regex) where the limit request will be applied or special value / for all requests.|
|`LIMIT_REQ_RATE` |`2r/s` |multisite|yes |Rate to apply to the URL (s for second, m for minute, h for hour and d for day). |
|`USE_LIMIT_CONN` |`yes` |multisite|no |Activate limit connections feature. |
|`LIMIT_CONN_MAX_HTTP1` |`10` |multisite|no |Maximum number of connections per IP when using HTTP/1.X protocol. |
|`LIMIT_CONN_MAX_HTTP2` |`100` |multisite|no |Maximum number of streams per IP when using HTTP/2 protocol. |
|`LIMIT_CONN_MAX_STREAM`|`10` |multisite|no |Maximum number of connections per IP when using stream. |
Please note that you can add different rates for different URLs by adding a number as a suffix to the settings for example : `LIMIT_REQ_URL_1=/url1`, `LIMIT_REQ_RATE_1=5r/d`, `LIMIT_REQ_URL_2=/url2`, `LIMIT_REQ_RATE_2=1r/m`, ...
Please note that you can add different rates for different URLs by adding a number as a suffix to the settings for example : `LIMIT_REQ_URL_1=^/url1$`, `LIMIT_REQ_RATE_1=5r/d`, `LIMIT_REQ_URL_2=^/url2/subdir/.*$`, `LIMIT_REQ_RATE_2=1r/m`, ...
Another important thing to note is that `LIMIT_REQ_URL` accepts LUA patterns.
Another important thing to note is that `LIMIT_REQ_URL` values are PCRE regex.
## Country

View File

@ -47,6 +47,7 @@ STREAM support :warning:
|`LISTEN_STREAM_PORT` |`1337` |multisite|no |Listening port for non-ssl (passthrough). |
|`LISTEN_STREAM_PORT_SSL` |`4242` |multisite|no |Listening port for ssl (passthrough). |
|`USE_UDP` |`no` |multisite|no |UDP listen instead of TCP (stream). |
|`USE_IPV6` |`no` |global |no |Enable IPv6 connectivity. |
## Core settings
@ -57,15 +58,17 @@ STREAM support :x:
Bot detection by using a challenge.
| Setting | Default | Context |Multiple| Description |
|---------------------------|------------|---------|--------|---------------------------------------------------------------------|
|`USE_ANTIBOT` |`no` |multisite|no |Activate antibot feature. |
|`ANTIBOT_URI` |`/challenge`|multisite|no |Unused URI that clients will be redirected to to solve the challenge.|
|`ANTIBOT_RECAPTCHA_SCORE` |`0.7` |multisite|no |Minimum score required for reCAPTCHA challenge. |
|`ANTIBOT_RECAPTCHA_SITEKEY`| |multisite|no |Sitekey for reCAPTCHA challenge. |
|`ANTIBOT_RECAPTCHA_SECRET` | |multisite|no |Secret for reCAPTCHA challenge. |
|`ANTIBOT_HCAPTCHA_SITEKEY` | |multisite|no |Sitekey for hCaptcha challenge. |
|`ANTIBOT_HCAPTCHA_SECRET` | |multisite|no |Secret for hCaptcha challenge. |
| Setting | Default | Context |Multiple| Description |
|---------------------------|------------|---------|--------|------------------------------------------------------------------------------------------------------------------------------|
|`USE_ANTIBOT` |`no` |multisite|no |Activate antibot feature. |
|`ANTIBOT_URI` |`/challenge`|multisite|no |Unused URI that clients will be redirected to to solve the challenge. |
|`ANTIBOT_RECAPTCHA_SCORE` |`0.7` |multisite|no |Minimum score required for reCAPTCHA challenge. |
|`ANTIBOT_RECAPTCHA_SITEKEY`| |multisite|no |Sitekey for reCAPTCHA challenge. |
|`ANTIBOT_RECAPTCHA_SECRET` | |multisite|no |Secret for reCAPTCHA challenge. |
|`ANTIBOT_HCAPTCHA_SITEKEY` | |multisite|no |Sitekey for hCaptcha challenge. |
|`ANTIBOT_HCAPTCHA_SECRET` | |multisite|no |Secret for hCaptcha challenge. |
|`ANTIBOT_TIME_RESOLVE` |`60` |multisite|no |Maximum time (in seconds) clients have to resolve the challenge. Once this time has passed, a new challenge will be generated.|
|`ANTIBOT_TIME_VALID` |`86400` |multisite|no |Maximum validity time of solved challenges. Once this time has passed, clients will need to resolve a new one. |
### Auth basic
@ -111,9 +114,9 @@ Deny access based on internal and external IP/network/rDNS/ASN blacklists.
|`BLACKLIST_RDNS_URLS` | |global |no |List of URLs, separated with spaces, containing reverse DNS suffixes to block. |
|`BLACKLIST_ASN` | |multisite|no |List of ASN numbers, separated with spaces, to block. |
|`BLACKLIST_ASN_URLS` | |global |no |List of URLs, separated with spaces, containing ASN to block. |
|`BLACKLIST_USER_AGENT` | |multisite|no |List of User-Agent, separated with spaces, to block. |
|`BLACKLIST_USER_AGENT` | |multisite|no |List of User-Agent (PCRE regex), separated with spaces, to block. |
|`BLACKLIST_USER_AGENT_URLS` |`https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master/_generator_lists/bad-user-agents.list`|global |no |List of URLs, separated with spaces, containing bad User-Agent to block. |
|`BLACKLIST_URI` | |multisite|no |List of URI, separated with spaces, to block. |
|`BLACKLIST_URI` | |multisite|no |List of URI (PCRE regex), separated with spaces, to block. |
|`BLACKLIST_URI_URLS` | |global |no |List of URLs, separated with spaces, containing bad URI to block. |
|`BLACKLIST_IGNORE_IP` | |multisite|no |List of IP/network, separated with spaces, to ignore in the blacklist. |
|`BLACKLIST_IGNORE_IP_URLS` | |global |no |List of URLs, separated with spaces, containing IP/network to ignore in the blacklist. |
@ -121,9 +124,9 @@ Deny access based on internal and external IP/network/rDNS/ASN blacklists.
|`BLACKLIST_IGNORE_RDNS_URLS` | |global |no |List of URLs, separated with spaces, containing reverse DNS suffixes to ignore in the blacklist.|
|`BLACKLIST_IGNORE_ASN` | |multisite|no |List of ASN numbers, separated with spaces, to ignore in the blacklist. |
|`BLACKLIST_IGNORE_ASN_URLS` | |global |no |List of URLs, separated with spaces, containing ASN to ignore in the blacklist. |
|`BLACKLIST_IGNORE_USER_AGENT` | |multisite|no |List of User-Agent, separated with spaces, to ignore in the blacklist. |
|`BLACKLIST_IGNORE_USER_AGENT` | |multisite|no |List of User-Agent (PCRE regex), separated with spaces, to ignore in the blacklist. |
|`BLACKLIST_IGNORE_USER_AGENT_URLS`| |global |no |List of URLs, separated with spaces, containing User-Agent to ignore in the blacklist. |
|`BLACKLIST_IGNORE_URI` | |multisite|no |List of URI, separated with spaces, to ignore in the blacklist. |
|`BLACKLIST_IGNORE_URI` | |multisite|no |List of URI (PCRE regex), separated with spaces, to ignore in the blacklist. |
|`BLACKLIST_IGNORE_URI_URLS` | |global |no |List of URLs, separated with spaces, containing URI to ignore in the blacklist. |
### Brotli
@ -156,15 +159,16 @@ STREAM support :x:
Cross-Origin Resource Sharing.
| Setting | Default | Context |Multiple| Description |
|------------------------|------------------------------------------------------------------------------------|---------|--------|--------------------------------------------------|
|`USE_CORS` |`no` |multisite|no |Use CORS |
|`CORS_ALLOW_ORIGIN` |`*` |multisite|no |Value of the Access-Control-Allow-Origin header. |
|`CORS_EXPOSE_HEADERS` |`Content-Length,Content-Range` |multisite|no |Value of the Access-Control-Expose-Headers header.|
|`CORS_MAX_AGE` |`86400` |multisite|no |Value of the Access-Control-Max-Age header. |
|`CORS_ALLOW_CREDENTIALS`|`no` |multisite|no |Send the Access-Control-Allow-Credentials header. |
|`CORS_ALLOW_METHODS` |`GET, POST, OPTIONS` |multisite|no |Value of the Access-Control-Allow-Methods header. |
|`CORS_ALLOW_HEADERS` |`DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range`|multisite|no |Value of the Access-Control-Allow-Headers header. |
| Setting | Default | Context |Multiple| Description |
|------------------------|------------------------------------------------------------------------------------|---------|--------|-------------------------------------------------------------------|
|`USE_CORS` |`no` |multisite|no |Use CORS |
|`CORS_ALLOW_ORIGIN` |`*` |multisite|no |Allowed origins to make CORS requests : PCRE regex or *. |
|`CORS_EXPOSE_HEADERS` |`Content-Length,Content-Range` |multisite|no |Value of the Access-Control-Expose-Headers header. |
|`CORS_MAX_AGE` |`86400` |multisite|no |Value of the Access-Control-Max-Age header. |
|`CORS_ALLOW_CREDENTIALS`|`no` |multisite|no |Send the Access-Control-Allow-Credentials header. |
|`CORS_ALLOW_METHODS` |`GET, POST, OPTIONS` |multisite|no |Value of the Access-Control-Allow-Methods header. |
|`CORS_ALLOW_HEADERS` |`DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range`|multisite|no |Value of the Access-Control-Allow-Headers header. |
|`CORS_DENY_REQUEST` |`yes` |multisite|no |Deny request and don't send it to backend if Origin is not allowed.|
### Client cache
@ -250,9 +254,9 @@ Allow access while keeping security features based on internal and external IP/n
|`GREYLIST_RDNS_URLS` | |global |no |List of URLs, separated with spaces, containing reverse DNS suffixes to put into the greylist.|
|`GREYLIST_ASN` | |multisite|no |List of ASN numbers, separated with spaces, to put into the greylist. |
|`GREYLIST_ASN_URLS` | |global |no |List of URLs, separated with spaces, containing ASN to put into the greylist. |
|`GREYLIST_USER_AGENT` | |multisite|no |List of User-Agent, separated with spaces, to put into the greylist. |
|`GREYLIST_USER_AGENT` | |multisite|no |List of User-Agent (PCRE regex), separated with spaces, to put into the greylist. |
|`GREYLIST_USER_AGENT_URLS`| |global |no |List of URLs, separated with spaces, containing good User-Agent to put into the greylist. |
|`GREYLIST_URI` | |multisite|no |List of URI, separated with spaces, to put into the greylist. |
|`GREYLIST_URI` | |multisite|no |List of URI (PCRE regex), separated with spaces, to put into the greylist. |
|`GREYLIST_URI_URLS` | |global |no |List of URLs, separated with spaces, containing bad URI to put into the greylist. |
### Gzip
@ -317,15 +321,15 @@ STREAM support :warning:
Limit maximum number of requests and connections.
| Setting |Default| Context |Multiple| Description |
|-----------------------|-------|---------|--------|--------------------------------------------------------------------------------|
|`USE_LIMIT_REQ` |`yes` |multisite|no |Activate limit requests feature. |
|`LIMIT_REQ_URL` |`/` |multisite|yes |URL where the limit request will be applied. |
|`LIMIT_REQ_RATE` |`2r/s` |multisite|yes |Rate to apply to the URL (s for second, m for minute, h for hour and d for day).|
|`USE_LIMIT_CONN` |`yes` |multisite|no |Activate limit connections feature. |
|`LIMIT_CONN_MAX_HTTP1` |`10` |multisite|no |Maximum number of connections per IP when using HTTP/1.X protocol. |
|`LIMIT_CONN_MAX_HTTP2` |`100` |multisite|no |Maximum number of streams per IP when using HTTP/2 protocol. |
|`LIMIT_CONN_MAX_STREAM`|`10` |multisite|no |Maximum number of connections per IP when using stream. |
| Setting |Default| Context |Multiple| Description |
|-----------------------|-------|---------|--------|---------------------------------------------------------------------------------------------|
|`USE_LIMIT_REQ` |`yes` |multisite|no |Activate limit requests feature. |
|`LIMIT_REQ_URL` |`/` |multisite|yes |URL (PCRE regex) where the limit request will be applied or special value / for all requests.|
|`LIMIT_REQ_RATE` |`2r/s` |multisite|yes |Rate to apply to the URL (s for second, m for minute, h for hour and d for day). |
|`USE_LIMIT_CONN` |`yes` |multisite|no |Activate limit connections feature. |
|`LIMIT_CONN_MAX_HTTP1` |`10` |multisite|no |Maximum number of connections per IP when using HTTP/1.X protocol. |
|`LIMIT_CONN_MAX_HTTP2` |`100` |multisite|no |Maximum number of streams per IP when using HTTP/2 protocol. |
|`LIMIT_CONN_MAX_STREAM`|`10` |multisite|no |Maximum number of connections per IP when using stream. |
### Miscellaneous
@ -521,8 +525,8 @@ Allow access based on internal and external IP/network/rDNS/ASN whitelists.
|`WHITELIST_RDNS_URLS` | |global |no |List of URLs, separated with spaces, containing reverse DNS suffixes to whitelist.|
|`WHITELIST_ASN` |`32934` |multisite|no |List of ASN numbers, separated with spaces, to whitelist. |
|`WHITELIST_ASN_URLS` | |global |no |List of URLs, separated with spaces, containing ASN to whitelist. |
|`WHITELIST_USER_AGENT` | |multisite|no |List of User-Agent, separated with spaces, to whitelist. |
|`WHITELIST_USER_AGENT` | |multisite|no |List of User-Agent (PCRE regex), separated with spaces, to whitelist. |
|`WHITELIST_USER_AGENT_URLS`| |global |no |List of URLs, separated with spaces, containing good User-Agent to whitelist. |
|`WHITELIST_URI` | |multisite|no |List of URI, separated with spaces, to whitelist. |
|`WHITELIST_URI` | |multisite|no |List of URI (PCRE regex), separated with spaces, to whitelist. |
|`WHITELIST_URI_URLS` | |global |no |List of URLs, separated with spaces, containing bad URI to whitelist. |

View File

@ -77,5 +77,5 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -90,8 +90,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
volumes:
redis:

View File

@ -63,5 +63,5 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -16,7 +16,7 @@ services:
labels:
- bunkerweb.SERVER_NAME=app1.example.com
- bunkerweb.USE_CORS=yes
- bunkerweb.CORS_ALLOW_ORIGIN=https://app2.example.com
- bunkerweb.CORS_ALLOW_ORIGIN=^https://app2\.example\.com$$
- bunkerweb.REMOTE_PHP=myapp1
- bunkerweb.REMOTE_PHP_PATH=/app
@ -56,5 +56,5 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -23,7 +23,7 @@ services:
- USE_CLIENT_CACHE=yes
- USE_GZIP=yes
- app1.example.com_USE_CORS=yes
- app1.example.com_CORS_ALLOW_ORIGIN=https://app2.example.com
- app1.example.com_CORS_ALLOW_ORIGIN=^https://app2\.example\.com$$
- app1.example.com_ALLOWED_METHODS=GET|POST|HEAD|OPTIONS
- app1.example.com_REMOTE_PHP=myapp1
- app1.example.com_REMOTE_PHP_PATH=/app

View File

@ -9,7 +9,7 @@ DISABLE_DEFAULT_SERVER=yes
USE_CLIENT_CACHE=yes
USE_GZIP=yes
app1.example.com_USE_CORS=yes
app1.example.com_CORS_ALLOW_ORIGIN=https://app2.example.com
app1.example.com_CORS_ALLOW_ORIGIN=^https://app2\.example\.com$
app1.example.com_ALLOWED_METHODS=GET|POST|HEAD|OPTIONS
app1.example.com_LOCAL_PHP=/run/php/php-fpm.sock
app1.example.com_LOCAL_PHP_PATH=/var/www/html/app1.example.com

View File

@ -42,5 +42,5 @@ volumes:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -41,8 +41,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
volumes:
drupal-modules:

View File

@ -23,5 +23,5 @@ volumes:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -22,8 +22,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
volumes:
ghost_data:

View File

@ -21,5 +21,5 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -27,8 +27,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
volumes:
gogs_data:

View File

@ -44,5 +44,5 @@ volumes:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -45,8 +45,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
volumes:
joomla-files:

View File

@ -64,5 +64,5 @@ volumes:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -58,8 +58,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
volumes:
db-data:

View File

@ -89,5 +89,5 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -43,5 +43,5 @@ volumes:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -44,8 +44,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
volumes:
db-data:

View File

@ -45,8 +45,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
volumes:
db-data:

View File

@ -49,8 +49,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
volumes:
db-data:

View File

@ -78,5 +78,5 @@ volumes:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -56,8 +56,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
volumes:
nc-files:

View File

@ -57,5 +57,5 @@ volumes:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -56,8 +56,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
volumes:
db-data:

View File

@ -44,8 +44,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
volumes:
ps-data:

View File

@ -48,8 +48,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
volumes:
ps-data:

View File

@ -33,5 +33,5 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -35,8 +35,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
volumes:
rr-config:

View File

@ -41,5 +41,5 @@ volumes:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -41,8 +41,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
volumes:
redmine-data:

View File

@ -27,5 +27,5 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -31,5 +31,5 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -39,5 +39,5 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -31,5 +31,5 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -45,5 +45,5 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -17,5 +17,5 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -23,8 +23,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
configs:
tomcat_app_war:

View File

@ -31,7 +31,6 @@ services:
t:none,\
setvar:tx.crs_exclusions_wordpress=1"
mydb:
image: mariadb
volumes:
@ -52,5 +51,5 @@ volumes:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services

View File

@ -42,8 +42,8 @@ services:
networks:
bw-services:
external:
name: bw-services
external: true
name: bw-services
volumes:
wp-data:

View File

@ -1,4 +1,4 @@
FROM python:3.11.3-alpine
FROM python:3.11.3-alpine AS builder
# Copy python requirements
COPY src/common/gen/requirements.txt /tmp/req/requirements.txt
@ -28,10 +28,18 @@ COPY src/common/helpers /usr/share/bunkerweb/helpers
COPY src/common/settings.json /usr/share/bunkerweb/settings.json
COPY src/common/utils /usr/share/bunkerweb/utils
# Add nginx user, drop bwcli, setup data folders, permissions and logging
FROM python:3.11.3-alpine
# Set default umask to prevent huge recursive chmod increasing the final image size
RUN umask 027
# Copy dependencies
COPY --from=builder --chown=0:101 /usr/share/bunkerweb /usr/share/bunkerweb
# Add autoconf user, drop bwcli, install runtime dependencies, create data folders and set permissions
RUN apk add --no-cache bash && \
addgroup -g 101 nginx && \
adduser -h /var/cache/nginx -g nginx -s /bin/sh -G nginx -D -H -u 101 nginx && \
addgroup -g 101 autoconf && \
adduser -h /var/cache/autoconf -g autoconf -s /bin/sh -G autoconf -D -H -u 101 autoconf && \
cp /usr/share/bunkerweb/helpers/bwcli /usr/bin/ && \
mkdir -p /var/tmp/bunkerweb && \
mkdir -p /var/www && \
@ -41,16 +49,11 @@ RUN apk add --no-cache bash && \
mkdir -p /data/www && ln -s /data/www /var/www/html && \
for dir in $(echo "configs plugins") ; do mkdir -p "/data/${dir}" && ln -s "/data/${dir}" "/etc/bunkerweb/${dir}" ; done && \
for dir in $(echo "configs/http configs/stream configs/server-http configs/server-stream configs/default-server-http configs/default-server-stream configs/modsec configs/modsec-crs") ; do mkdir "/data/${dir}" ; done && \
chown -R root:nginx /data && \
chown -R root:autoconf /data && \
chmod -R 770 /data && \
chown -R root:nginx /usr/share/bunkerweb /var/cache/bunkerweb /var/lib/bunkerweb /etc/bunkerweb /var/tmp/bunkerweb /usr/bin/bwcli && \
find /usr/share/bunkerweb -type f -exec chmod 0740 {} \; && \
find /usr/share/bunkerweb -type d -exec chmod 0750 {} \; && \
chown -R root:autoconf /var/cache/bunkerweb /var/lib/bunkerweb /etc/bunkerweb /var/tmp/bunkerweb /usr/bin/bwcli && \
chmod -R 770 /var/cache/bunkerweb /var/lib/bunkerweb /etc/bunkerweb /var/tmp/bunkerweb && \
chmod 750 /usr/share/bunkerweb/cli/main.py /usr/share/bunkerweb/helpers/*.sh /usr/bin/bwcli /usr/share/bunkerweb/autoconf/main.py /usr/share/bunkerweb/deps/python/bin/* && \
mkdir /var/log/letsencrypt /var/lib/letsencrypt && \
chown root:nginx /var/log/letsencrypt /var/lib/letsencrypt && \
chmod 770 /var/log/letsencrypt /var/lib/letsencrypt
chmod 750 /usr/share/bunkerweb/cli/main.py /usr/share/bunkerweb/helpers/*.sh /usr/bin/bwcli /usr/share/bunkerweb/autoconf/main.py /usr/share/bunkerweb/deps/python/bin/*
# Fix CVEs
RUN apk add "libcrypto3>=3.0.8-r4" "libssl3>=3.0.8-r4"
@ -59,7 +62,7 @@ VOLUME /data /etc/nginx
WORKDIR /usr/share/bunkerweb/autoconf
USER nginx:nginx
USER autoconf:autoconf
HEALTHCHECK --interval=10s --timeout=10s --start-period=30s --retries=6 CMD /usr/share/bunkerweb/helpers/healthcheck-autoconf.sh

View File

@ -18,14 +18,10 @@ RUN apk add --no-cache --virtual .build-deps py3-pip && \
pip install --no-cache-dir --upgrade pip && \
pip install wheel && \
mkdir -p /usr/share/bunkerweb/deps/python && \
export MAKEFLAGS="-j$(nproc)" && \
pip install --no-cache-dir --require-hashes --target /usr/share/bunkerweb/deps/python -r /usr/share/bunkerweb/deps/requirements.txt && \
apk del .build-deps
FROM nginx:1.24.0-alpine
# Copy dependencies
COPY --from=builder /usr/share/bunkerweb /usr/share/bunkerweb
# Copy files
# can't exclude deps from . so we are copying everything by hand
COPY src/bw/entrypoint.sh /usr/share/bunkerweb/entrypoint.sh
@ -43,6 +39,14 @@ COPY src/common/utils /usr/share/bunkerweb/utils
COPY src/VERSION /usr/share/bunkerweb/VERSION
COPY misc/*.ascii /usr/share/bunkerweb/misc/
FROM nginx:1.24.0-alpine
# Set default umask to prevent huge recursive chmod increasing the final image size
RUN umask 027
# Copy dependencies
COPY --from=builder --chown=0:101 /usr/share/bunkerweb /usr/share/bunkerweb
# Install runtime dependencies, pypi packages, move bwcli, create data folders and set permissions
RUN apk add --no-cache pcre bash python3 && \
cp /usr/share/bunkerweb/helpers/bwcli /usr/bin/ && \
@ -54,24 +58,18 @@ RUN apk add --no-cache pcre bash python3 && \
for dir in $(echo "configs/http configs/stream configs/server-http configs/server-stream configs/default-server-http configs/default-server-stream configs/modsec configs/modsec-crs") ; do mkdir "/data/${dir}" ; done && \
chown -R root:nginx /data && \
chmod -R 770 /data && \
chown -R root:nginx /usr/share/bunkerweb /var/cache/bunkerweb /etc/bunkerweb /var/tmp/bunkerweb /usr/bin/bwcli && \
for dir in $(echo "/usr/share/bunkerweb /etc/bunkerweb") ; do find ${dir} -type f -exec chmod 0740 {} \; ; done && \
for dir in $(echo "/usr/share/bunkerweb /etc/bunkerweb") ; do find ${dir} -type d -exec chmod 0750 {} \; ; done && \
chown -R root:nginx /var/cache/bunkerweb /etc/bunkerweb /var/tmp/bunkerweb /usr/bin/bwcli && \
chmod 770 /var/cache/bunkerweb /var/tmp/bunkerweb && \
chmod 750 /usr/share/bunkerweb/cli/main.py /usr/share/bunkerweb/gen/main.py /usr/share/bunkerweb/helpers/*.sh /usr/share/bunkerweb/entrypoint.sh /usr/bin/bwcli /usr/share/bunkerweb/deps/python/bin/* && \
chown -R root:nginx /etc/nginx && \
chmod -R 770 /etc/nginx && \
rm -f /var/log/nginx/* && \
mkdir /var/log/letsencrypt /var/lib/letsencrypt && \
chown root:nginx /var/log/letsencrypt /var/lib/letsencrypt && \
chmod 770 /var/log/letsencrypt /var/lib/letsencrypt && \
ln -s /proc/1/fd/2 /var/log/nginx/error.log && \
ln -s /proc/1/fd/2 /var/log/nginx/modsec_audit.log && \
ln -s /proc/1/fd/1 /var/log/nginx/access.log && \
ln -s /proc/1/fd/1 /var/log/nginx/jobs.log
ln -s /proc/1/fd/1 /var/log/nginx/access.log
# Fix CVEs
RUN apk add "libcrypto3>=3.0.8-r4" "libssl3>=3.0.8-r4"
RUN apk add "libcrypto3>=3.0.8-r4" "libssl3>=3.0.8-r4" "curl>=8.1.0-r0" "libcurl>=8.1.0-r0"
VOLUME /data /etc/nginx

View File

@ -1,13 +1,12 @@
local class = require "middleclass"
local datastore = require "bunkerweb.datastore"
local utils = require "bunkerweb.utils"
local cjson = require "cjson"
local upload = require "resty.upload"
local datastore = require "bunkerweb.datastore"
local utils = require "bunkerweb.utils"
local cjson = require "cjson"
local upload = require "resty.upload"
local api = class("api")
local api = class("api")
api.global = { GET = {}, POST = {}, PUT = {}, DELETE = {} }
api.global = { GET = {}, POST = {}, PUT = {}, DELETE = {} }
function api:initialize()
self.datastore = datastore:new()
@ -141,12 +140,12 @@ api.global.GET["^/bans$"] = function(self)
return self:response(ngx.HTTP_INTERNAL_SERVER_ERROR, "error",
"can't access " .. k .. " from datastore : " + reason)
end
local ttl, err = self.datastore:ttl(k)
if not ttl then
local ok, ttl = self.datastore:ttl(k)
if not ok then
return self:response(ngx.HTTP_INTERNAL_SERVER_ERROR, "error",
"can't access ttl " .. k .. " from datastore : " .. err)
"can't access ttl " .. k .. " from datastore : " .. ttl)
end
local ban = { ip = k:sub(9, #k), reason = reason, exp = ttl }
local ban = { ip = k:sub(9, #k), reason = reason, exp = math.floor(ttl) }
table.insert(data, ban)
end
end

View File

@ -1,19 +1,20 @@
local mlcache = require "resty.mlcache"
local logger = require "bunkerweb.logger"
local class = require "middleclass"
local cachestore = class("cachestore")
local mlcache = require "resty.mlcache"
local logger = require "bunkerweb.logger"
local utils = require "bunkerweb.utils"
local class = require "middleclass"
local cachestore = class("cachestore")
-- Instantiate mlcache object at module level (which will be cached when running init phase)
-- TODO : custom settings
local shm = "cachestore"
local ipc_shm = "cachestore_ipc"
local shm_miss = "cachestore_miss"
local shm_locks = "cachestore_locks"
local shm = "cachestore"
local ipc_shm = "cachestore_ipc"
local shm_miss = "cachestore_miss"
local shm_locks = "cachestore_locks"
if not ngx.shared.cachestore then
shm = "cachestore_stream"
ipc_shm = "cachestore_ipc_stream"
shm_miss = "cachestore_miss_stream"
shm_locks = "cachestore_locks_stream"
shm = "cachestore_stream"
ipc_shm = "cachestore_ipc_stream"
shm_miss = "cachestore_miss_stream"
shm_locks = "cachestore_locks_stream"
end
local cache, err = mlcache.new(
"cachestore",
@ -42,7 +43,7 @@ end
function cachestore:initialize(use_redis)
self.cache = cache
self.use_redis = use_redis or false
self.use_redis = (use_redis and utils.is_cosocket_available()) or false
self.logger = module_logger
end
@ -77,17 +78,20 @@ function cachestore:get(key)
clusterstore:close()
if ret[1] == ngx.null then
ret[1] = nil
end
if ret[2] < 0 then
ret[2] = -1
elseif ret[2] < 0 then
ret[2] = ret[2] + 1
end
return ret[1], nil, ret[2]
end
local callback_no_miss = function()
return nil, nil, -1
end
local value, err, hit_level
if self.use_redis then
value, err, hit_level = self.cache:get(key, nil, callback, key)
else
value, err, hit_level = self.cache:get(key)
value, err, hit_level = self.cache:get(key, nil, callback_no_miss)
end
if value == nil and err ~= nil then
return false, err
@ -98,14 +102,14 @@ end
function cachestore:set(key, value, ex)
if self.use_redis then
local ok, err = self.set_redis(key, value, ex)
local ok, err = self:set_redis(key, value, ex)
if not ok then
self.logger:log(ngx.ERR, err)
end
end
local ok, err
if ex then
ok, err = self.cache:set(key, {ttl = ex}, value)
ok, err = self.cache:set(key, { ttl = ex }, value)
else
ok, err = self.cache:set(key, nil, value)
end
@ -164,4 +168,8 @@ function cachestore:del_redis(key)
return true
end
function cachestore:purge()
return self.cache:purge(true)
end
return cachestore

View File

@ -1,7 +1,7 @@
local class = require "middleclass"
local utils = require "bunkerweb.utils"
local logger = require "bunkerweb.logger"
local redis = require "resty.redis"
local class = require "middleclass"
local utils = require "bunkerweb.utils"
local logger = require "bunkerweb.logger"
local redis = require "resty.redis"
local clusterstore = class("clusterstore")
@ -42,7 +42,8 @@ function clusterstore:connect()
return false, err
end
-- Set timeouts
redis_client:set_timeouts(tonumber(self.variables["REDIS_TIMEOUT"]), tonumber(self.variables["REDIS_TIMEOUT"]), tonumber(self.variables["REDIS_TIMEOUT"]))
redis_client:set_timeouts(tonumber(self.variables["REDIS_TIMEOUT"]), tonumber(self.variables["REDIS_TIMEOUT"]),
tonumber(self.variables["REDIS_TIMEOUT"]))
-- Connect
local options = {
ssl = self.variables["REDIS_SSL"] == "yes",
@ -74,7 +75,8 @@ end
function clusterstore:close()
if self.redis_client then
-- Equivalent to close but keep a pool of connections
local ok, err = self.redis_client:set_keepalive(tonumber(self.variables["REDIS_KEEPALIVE_IDLE"]), tonumber(self.variables["REDIS_KEEPALIVE_POOL"]))
local ok, err = self.redis_client:set_keepalive(tonumber(self.variables["REDIS_KEEPALIVE_IDLE"]),
tonumber(self.variables["REDIS_KEEPALIVE_POOL"]))
self.redis_client = nil
return ok, err
end
@ -120,4 +122,4 @@ function clusterstore:multi(calls)
return true, "success", exec
end
return clusterstore
return clusterstore

View File

@ -1,5 +1,5 @@
local class = require "middleclass"
local datastore = class("datastore")
local datastore = class("datastore")
function datastore:initialize()
self.dict = ngx.shared.datastore
@ -48,4 +48,4 @@ function datastore:delete_all(pattern)
return true, "success"
end
return datastore
return datastore

View File

@ -7,7 +7,7 @@ helpers.load_plugin = function(json)
-- Open file
local file, err, nb = io.open(json, "r")
if not file then
return false, "can't load JSON at " .. json .. " : " .. err .. "(nb = " .. tostring(nb) .. ")"
return false, "can't load JSON at " .. json .. " : " .. err .. " (nb = " .. tostring(nb) .. ")"
end
-- Decode JSON
local ok, plugin = pcall(cjson.decode, file:read("*a"))
@ -17,20 +17,80 @@ helpers.load_plugin = function(json)
end
-- Check fields
local missing_fields = {}
local required_fields = {"id", "order", "name", "description", "version", "settings"}
local required_fields = { "id", "name", "description", "version", "settings", "stream" }
for i, field in ipairs(required_fields) do
if plugin[field] == nil then
valid_json = false
table.insert(missing_fields, field)
end
end
if #missing_fields > 0 then
return false, "missing field(s) " .. cjson.encode(missing_fields) .. " for JSON at " .. json
end
-- Try require
local plugin_lua, err = helpers.require_plugin(plugin.id)
if plugin_lua == false then
return false, err
end
-- Fill phases
local phases = utils.get_phases()
plugin.phases = {}
if plugin_lua then
for i, phase in ipairs(phases) do
if plugin_lua[phase] ~= nil then
table.insert(plugin.phases, phase)
end
end
end
-- Return plugin
return true, plugin
end
helpers.order_plugins = function(plugins)
-- Extract orders
local file, err, nb = io.open("/usr/share/bunkerweb/core/order.json", "r")
if not file then
return false, err .. " (nb = " .. tostring(nb) .. ")"
end
local ok, orders = pcall(cjson.decode, file:read("*a"))
file:close()
if not ok then
return false, "invalid order.json : " .. err
end
-- Compute plugins/id/phases table
local plugins_phases = {}
for i, plugin in ipairs(plugins) do
plugins_phases[plugin.id] = {}
for j, phase in ipairs(plugin.phases) do
plugins_phases[plugin.id][phase] = true
end
end
-- Order result
local result_orders = {}
for i, phase in ipairs(utils.get_phases()) do
result_orders[phase] = {}
end
-- Fill order first
for phase, order in pairs(orders) do
for i, id in ipairs(order) do
local plugin = plugins_phases[id]
if plugin and plugin[phase] then
table.insert(result_orders[phase], id)
plugin[phase] = nil
end
end
end
-- Then append missing plugins to the end
for i, phase in ipairs(utils.get_phases()) do
for id, plugin in pairs(plugins_phases) do
if plugin[phase] then
table.insert(result_orders[phase], id)
plugin[phase] = nil
end
end
end
return true, result_orders
end
helpers.require_plugin = function(id)
-- Require call
local ok, plugin_lua = pcall(require, id .. "/" .. id)
@ -45,7 +105,7 @@ helpers.require_plugin = function(id)
return false, "missing new() method for plugin " .. id
end
-- Return plugin
return plugin_lua, "new() call successful for plugin " .. id
return plugin_lua, "require() call successful for plugin " .. id
end
helpers.new_plugin = function(plugin_lua)
@ -72,7 +132,7 @@ helpers.call_plugin = function(plugin, method)
end
-- Check values
local missing_values = {}
local required_values = {"ret", "msg"}
local required_values = { "ret", "msg" }
for i, value in ipairs(required_values) do
if ret[value] == nil then
table.insert(missing_values, value)
@ -106,6 +166,8 @@ helpers.fill_ctx = function()
data.http_user_agent = ngx.var.http_user_agent
data.http_host = ngx.var.http_host
data.server_name = ngx.var.server_name
data.http_content_type = ngx.var.http_content_type
data.http_origin = ngx.var.http_origin
-- IP data : global
local ip_is_global, err = utils.ip_is_global(data.remote_addr)
if ip_is_global == nil then
@ -126,4 +188,4 @@ helpers.fill_ctx = function()
return true, "ctx filled", errors
end
return helpers
return helpers

View File

@ -1,13 +1,13 @@
local errlog = require "ngx.errlog"
local class = require "middleclass"
local logger = class("logger")
local errlog = require "ngx.errlog"
local class = require "middleclass"
local logger = class("logger")
function logger:initialize(prefix)
self.prefix = string.upper(prefix)
end
function logger:log(level, msg)
errlog.raw_log(level, "[" .. self.prefix .. "] " .. msg)
errlog.raw_log(level, "[" .. self.prefix .. "] " .. msg)
end
return logger
return logger

View File

@ -2,7 +2,7 @@ local class = require "middleclass"
local logger = require "bunkerweb.logger"
local datastore = require "bunkerweb.datastore"
local utils = require "bunkerweb.utils"
local cjson = require "cjson"
local cjson = require "cjson"
local plugin = class("plugin")
function plugin:initialize(id)
@ -20,8 +20,17 @@ function plugin:initialize(id)
end
-- Store variables
local metadata = cjson.decode(encoded_metadata)
local multisite = false
local current_phase = ngx.get_phase()
for i, check_phase in ipairs({ "set", "access", "log", "preread" }) do
if current_phase == check_phase then
multisite = true
break
end
end
self.is_request = multisite
for k, v in pairs(metadata.settings) do
local value, err = utils.get_variable(k, v.context == "multisite" and ngx.get_phase() ~= "init")
local value, err = utils.get_variable(k, v.context == "multisite" and multisite)
if value == nil then
self.logger:log(ngx.ERR, "can't get " .. k .. " variable : " .. err)
end
@ -45,7 +54,7 @@ function plugin:get_id()
end
function plugin:ret(ret, msg, status, redirect)
return {ret = ret, msg = msg, status = status, redirect = redirect}
return { ret = ret, msg = msg, status = status, redirect = redirect }
end
return plugin
return plugin

View File

@ -1,18 +1,18 @@
local cdatastore = require "bunkerweb.datastore"
local mmdb = require "bunkerweb.mmdb"
local clogger = require "bunkerweb.logger"
local cdatastore = require "bunkerweb.datastore"
local mmdb = require "bunkerweb.mmdb"
local clogger = require "bunkerweb.logger"
local ipmatcher = require "resty.ipmatcher"
local resolver = require "resty.dns.resolver"
local session = require "resty.session"
local cjson = require "cjson"
local ipmatcher = require "resty.ipmatcher"
local resolver = require "resty.dns.resolver"
local session = require "resty.session"
local cjson = require "cjson"
local logger = clogger:new("UTILS")
local datastore = cdatastore:new()
local logger = clogger:new("UTILS")
local datastore = cdatastore:new()
local utils = {}
local utils = {}
utils.get_variable = function(var, site_search)
utils.get_variable = function(var, site_search)
-- Default site search to true
if site_search == nil then
site_search = true
@ -40,7 +40,7 @@ utils.get_variable = function(var, site_search)
return value, "success"
end
utils.has_variable = function(var, value)
utils.has_variable = function(var, value)
-- Get global variable
local check_value, err = datastore:get("variable_" .. var)
if not value then
@ -71,7 +71,7 @@ utils.has_variable = function(var, value)
return check_value == value, "success"
end
utils.has_not_variable = function(var, value)
utils.has_not_variable = function(var, value)
-- Get global variable
local check_value, err = datastore:get("variable_" .. var)
if not value then
@ -132,7 +132,7 @@ utils.get_multiple_variables = function(vars)
return result
end
utils.is_ip_in_networks = function(ip, networks)
utils.is_ip_in_networks = function(ip, networks)
-- Instantiate ipmatcher
local ipm, err = ipmatcher.new(networks)
if not ipm then
@ -146,15 +146,15 @@ utils.is_ip_in_networks = function(ip, networks)
return matched
end
utils.is_ipv4 = function(ip)
utils.is_ipv4 = function(ip)
return ipmatcher.parse_ipv4(ip)
end
utils.is_ipv6 = function(ip)
utils.is_ipv6 = function(ip)
return ipmatcher.parse_ipv6(ip)
end
utils.ip_is_global = function(ip)
utils.ip_is_global = function(ip)
-- Reserved, non public IPs
local reserved_ips = {
"0.0.0.0/8",
@ -201,7 +201,7 @@ utils.ip_is_global = function(ip)
return not matched, "success"
end
utils.get_integration = function()
utils.get_integration = function()
-- Check if already in datastore
local integration, err = datastore:get("misc_integration")
if integration then
@ -236,7 +236,7 @@ utils.get_integration = function()
if data:find("Alpine") then
integration = "docker"
end
-- Strange case ...
-- Strange case ...
else
integration = "unknown"
end
@ -252,7 +252,7 @@ utils.get_integration = function()
return integration
end
utils.get_version = function()
utils.get_version = function()
-- Check if already in datastore
local version, err = datastore:get("misc_version")
if version then
@ -274,7 +274,7 @@ utils.get_version = function()
return version
end
utils.get_reason = function()
utils.get_reason = function()
-- ngx.ctx
if ngx.ctx.reason then
return ngx.ctx.reason
@ -299,7 +299,7 @@ utils.get_reason = function()
return nil
end
utils.get_resolvers = function()
utils.get_resolvers = function()
-- Get resolvers from datastore if existing
local str_resolvers, err = datastore:get("misc_resolvers")
if str_resolvers then
@ -325,6 +325,14 @@ utils.get_resolvers = function()
end
utils.get_rdns = function(ip)
-- Check cache
local cachestore = utils.new_cachestore()
local ok, value = cachestore:get("rdns_" .. ip)
if not ok then
logger:log(ngx.ERR, "can't get rdns from cachestore : " .. value)
elseif value then
return cjson.decode(value), "success"
end
-- Get resolvers
local resolvers, err = utils.get_resolvers()
if not resolvers then
@ -339,57 +347,109 @@ utils.get_rdns = function(ip)
if not rdns then
return false, err
end
-- Our results
local ptrs = {}
local ret_err = "success"
-- Do rDNS query
local answers, err = rdns:reverse_query(ip)
if not answers then
return false, err
end
if answers.errcode then
return false, answers.errstr
end
-- Return first element
for i, answer in ipairs(answers) do
if answer.ptrdname then
return answer.ptrdname, "success"
logger:log(ngx.ERR, "error while doing reverse DNS query for " .. ip .. " : " .. err)
ret_err = err
else
if answers.errcode then
ret_err = answers.errstr
end
-- Extract all PTR
for i, answer in ipairs(answers) do
if answer.ptrdname then
table.insert(ptrs, answer.ptrdname)
logger:log(ngx.ERR, answer.ptrdname)
end
end
end
return false, nil
-- Save to cache
local ok, err = cachestore:set("rdns_" .. ip, cjson.encode(ptrs), 3600)
if not ok then
logger:log(ngx.ERR, "can't set rdns into cachestore : " .. err)
end
return ptrs, ret_err
end
utils.get_ips = function(fqdn)
utils.get_ips = function(fqdn, ipv6)
-- Check cache
local cachestore = utils.new_cachestore()
local ok, value = cachestore:get("dns_" .. fqdn)
if not ok then
logger:log(ngx.ERR, "can't get dns from cachestore : " .. value)
elseif value then
return cjson.decode(value), "success"
end
-- By default perform ipv6 lookups (only if USE_IPV6=yes)
if ipv6 == nil then
ipv6 = true
end
-- Get resolvers
local resolvers, err = utils.get_resolvers()
if not resolvers then
return false, err
end
-- Instantiante resolver
local rdns, err = resolver:new {
local res, err = resolver:new {
nameservers = resolvers,
retrans = 1,
timeout = 1000
}
if not rdns then
if not res then
return false, err
end
-- Query FQDN
local answers, err = rdns:query(fqdn, nil, {})
if not answers then
return false, err
end
if answers.errcode then
return {}, answers.errstr
end
-- Return all IPs
local ips = {}
for i, answer in ipairs(answers) do
if answer.address then
table.insert(ips, answer.addres)
-- Get query types : AAAA and A if using IPv6 / only A if not using IPv6
local qtypes = {}
if ipv6 then
local use_ipv6, err = utils.get_variable("USE_IPV6", false)
if not use_ipv6 then
logger:log(ngx.ERR, "can't get USE_IPV6 variable " .. err)
elseif use_ipv6 == "yes" then
table.insert(qtypes, res.TYPE_AAAA)
end
end
return ips, "success"
table.insert(qtypes, res.TYPE_A)
-- Loop on qtypes
local res_answers = {}
local res_errors = {}
local ans_errors = {}
for i, qtype in ipairs(qtypes) do
-- Query FQDN
local answers, err = res:query(fqdn, { qtype = qtype }, {})
local qtype_str = qtype == res.TYPE_AAAA and "AAAA" or "A"
if not answers then
res_errors[qtype_str] = err
elseif answers.errcode then
ans_errors[qtype_str] = answers.errstr
else
table.insert(res_answers, answers)
end
end
for qtype, error in pairs(res_errors) do
logger:log(ngx.ERR, "error while doing " .. qtype .. " DNS query for " .. fqdn .. " : " .. error)
end
-- Extract all IPs
local ips = {}
for i, answers in ipairs(res_answers) do
for j, answer in ipairs(answers) do
if answer.address then
table.insert(ips, answer.address)
end
end
end
-- Save to cache
local ok, err = cachestore:set("dns_" .. fqdn, cjson.encode(ips), 3600)
if not ok then
logger:log(ngx.ERR, "can't set dns into cachestore : " .. err)
end
return ips, cjson.encode(res_errors) .. " " .. cjson.encode(ans_errors)
end
utils.get_country = function(ip)
utils.get_country = function(ip)
-- Check if mmdb is loaded
if not mmdb.country_db then
return false, "mmdb country not loaded"
@ -405,7 +465,7 @@ utils.get_country = function(ip)
return result.country.iso_code, "success"
end
utils.get_asn = function(ip)
utils.get_asn = function(ip)
-- Check if mmdp is loaded
if not mmdb.asn_db then
return false, "mmdb asn not loaded"
@ -421,10 +481,12 @@ utils.get_asn = function(ip)
return result.autonomous_system_number, "success"
end
utils.rand = function(nb)
utils.rand = function(nb, no_numbers)
local charset = {}
-- lowers, uppers and numbers
for i = 48, 57 do table.insert(charset, string.char(i)) end
if not no_numbers then
for i = 48, 57 do table.insert(charset, string.char(i)) end
end
for i = 65, 90 do table.insert(charset, string.char(i)) end
for i = 97, 122 do table.insert(charset, string.char(i)) end
local result = ""
@ -434,7 +496,7 @@ utils.rand = function(nb)
return result
end
utils.get_deny_status = function()
utils.get_deny_status = function()
-- Stream case
if ngx.ctx.bw and ngx.ctx.bw.kind == "stream" then
return 444
@ -448,67 +510,23 @@ utils.get_deny_status = function()
return tonumber(status)
end
utils.get_session = function()
utils.get_session = function(audience)
-- Session already in context
if ngx.ctx.bw.session then
return ngx.ctx.bw.session, ngx.ctx.bw.session_err, ngx.ctx.bw.session_exists, ngx.ctx.bw.session_refreshed
ngx.ctx.bw.session:set_audience(audience)
return ngx.ctx.bw.session
end
-- Open session and fill ctx
local _session, err, exists, refreshed = session.start()
ngx.ctx.bw.session_err = nil
local _session, err, exists, refreshed = session.start({ audience = audience })
if err and err ~= "missing session cookie" and err ~= "no session" then
logger:log(ngx.WARN, "can't start session : " .. err)
ngx.ctx.bw.session_err = err
logger:log(ngx.ERR, "session:start() error : " .. err)
end
_session:set_audience(audience)
ngx.ctx.bw.session = _session
ngx.ctx.bw.session_exists = exists
ngx.ctx.bw.session_refreshed = refreshed
ngx.ctx.bw.session_saved = false
ngx.ctx.bw.session_data = _session:get_data()
if not ngx.ctx.bw.session_data then
ngx.ctx.bw.session_data = {}
end
return _session, ngx.ctx.bw.session_err, exists, refreshed
return _session
end
utils.save_session = function()
-- Check if save is needed
if ngx.ctx.bw.session and not ngx.ctx.bw.session_saved then
ngx.ctx.bw.session:set_data(ngx.ctx.bw.session_data)
local ok, err = ngx.ctx.bw.session:save()
if err then
logger:log(ngx.ERR, "can't save session : " .. err)
return false, "can't save session : " .. err
end
ngx.ctx.bw.session_saved = true
return true, "session saved"
elseif ngx.ctx.bw.session_saved then
return true, "session already saved"
end
return true, "no session"
end
utils.set_session_var = function(key, value)
-- Set new data
if ngx.ctx.bw.session then
ngx.ctx.bw.session_data[key] = value
return true, "value set"
end
return false, "no session"
end
utils.get_session_var = function(key)
-- Get data
if ngx.ctx.bw.session then
if ngx.ctx.bw.session_data[key] then
return true, "data present", ngx.ctx.bw.session_data[key]
end
return true, "no data"
end
return false, "no session"
end
utils.is_banned = function(ip)
utils.is_banned = function(ip)
-- Check on local datastore
local reason, err = datastore:get("bans_ip_" .. ip)
if not reason and err ~= "not found" then
@ -571,7 +589,7 @@ utils.is_banned = function(ip)
return false, "not banned"
end
utils.add_ban = function(ip, reason, ttl)
utils.add_ban = function(ip, reason, ttl)
-- Set on local datastore
local ok, err = datastore:set("bans_ip_" .. ip, reason, ttl)
if not ok then
@ -600,4 +618,58 @@ utils.add_ban = function(ip, reason, ttl)
return true, "success"
end
return utils
utils.new_cachestore = function()
-- Check if redis is used
local use_redis, err = utils.get_variable("USE_REDIS", false)
if not use_redis then
logger:log(ngx.ERR, "can't get USE_REDIS variable : " .. err)
else
use_redis = use_redis == "yes"
end
-- Instantiate
return require "bunkerweb.cachestore":new(use_redis)
end
utils.regex_match = function(str, regex, options)
local all_options = "o"
if options then
all_options = all_options .. options
end
local match, err = ngx.re.match(str, regex, all_options)
if err then
logger:log(ngx.ERR, "error while matching regex " .. regex .. "with string " .. str)
return nil
end
return match
end
utils.get_phases = function()
return {
"init",
"init_worker",
"set",
"access",
"header",
"log",
"preread",
"log_stream",
"log_default"
}
end
utils.is_cosocket_available = function()
local phases = {
"timer",
"access",
"preread"
}
local current_phase = ngx.get_phase()
for i, phase in ipairs(phases) do
if current_phase == phase then
return true
end
end
return false
end
return utils

View File

@ -1,12 +1,12 @@
local middleclass = {
_VERSION = 'middleclass v4.1.1',
_DESCRIPTION = 'Object Orientation for Lua',
_URL = 'https://github.com/kikito/middleclass',
_LICENSE = [[
_VERSION = 'middleclass v4.1.1',
_DESCRIPTION = 'Object Orientation for Lua',
_URL = 'https://github.com/kikito/middleclass',
_LICENSE = [[
MIT LICENSE
Copyright (c) 2011 Enrique García Cota
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
@ -14,10 +14,10 @@ local middleclass = {
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
@ -26,169 +26,179 @@ local middleclass = {
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
]]
}
local function _createIndexWrapper(aClass, f)
if f == nil then
return aClass.__instanceDict
elseif type(f) == "function" then
return function(self, name)
local value = aClass.__instanceDict[name]
if value ~= nil then
return value
else
return (f(self, name))
end
end
else -- if type(f) == "table" then
return function(self, name)
local value = aClass.__instanceDict[name]
if value ~= nil then
return value
else
return f[name]
end
end
end
end
local function _propagateInstanceMethod(aClass, name, f)
f = name == "__index" and _createIndexWrapper(aClass, f) or f
aClass.__instanceDict[name] = f
for subclass in pairs(aClass.subclasses) do
if rawget(subclass.__declaredMethods, name) == nil then
_propagateInstanceMethod(subclass, name, f)
end
end
end
local function _declareInstanceMethod(aClass, name, f)
aClass.__declaredMethods[name] = f
if f == nil and aClass.super then
f = aClass.super.__instanceDict[name]
end
_propagateInstanceMethod(aClass, name, f)
end
local function _tostring(self) return "class " .. self.name end
local function _call(self, ...) return self:new(...) end
local function _createClass(name, super)
local dict = {}
dict.__index = dict
local aClass = {
name = name,
super = super,
static = {},
__instanceDict = dict,
__declaredMethods = {},
subclasses = setmetatable({}, { __mode = 'k' })
}
local function _createIndexWrapper(aClass, f)
if f == nil then
return aClass.__instanceDict
elseif type(f) == "function" then
return function(self, name)
local value = aClass.__instanceDict[name]
if value ~= nil then
return value
else
return (f(self, name))
if super then
setmetatable(aClass.static, {
__index = function(_, k)
local result = rawget(dict, k)
if result == nil then
return super.static[k]
end
return result
end
else -- if type(f) == "table" then
return function(self, name)
local value = aClass.__instanceDict[name]
if value ~= nil then
return value
else
return f[name]
end
end
end
})
else
setmetatable(aClass.static, { __index = function(_, k) return rawget(dict, k) end })
end
local function _propagateInstanceMethod(aClass, name, f)
f = name == "__index" and _createIndexWrapper(aClass, f) or f
aClass.__instanceDict[name] = f
for subclass in pairs(aClass.subclasses) do
if rawget(subclass.__declaredMethods, name) == nil then
_propagateInstanceMethod(subclass, name, f)
end
end
setmetatable(aClass, {
__index = aClass.static,
__tostring = _tostring,
__call = _call,
__newindex = _declareInstanceMethod
})
return aClass
end
local function _includeMixin(aClass, mixin)
assert(type(mixin) == 'table', "mixin must be a table")
for name, method in pairs(mixin) do
if name ~= "included" and name ~= "static" then aClass[name] = method end
end
local function _declareInstanceMethod(aClass, name, f)
aClass.__declaredMethods[name] = f
if f == nil and aClass.super then
f = aClass.super.__instanceDict[name]
end
_propagateInstanceMethod(aClass, name, f)
for name, method in pairs(mixin.static or {}) do
aClass.static[name] = method
end
local function _tostring(self) return "class " .. self.name end
local function _call(self, ...) return self:new(...) end
local function _createClass(name, super)
local dict = {}
dict.__index = dict
local aClass = { name = name, super = super, static = {},
__instanceDict = dict, __declaredMethods = {},
subclasses = setmetatable({}, {__mode='k'}) }
if super then
setmetatable(aClass.static, {
__index = function(_,k)
local result = rawget(dict,k)
if result == nil then
return super.static[k]
end
return result
end
})
else
setmetatable(aClass.static, { __index = function(_,k) return rawget(dict,k) end })
end
setmetatable(aClass, { __index = aClass.static, __tostring = _tostring,
__call = _call, __newindex = _declareInstanceMethod })
return aClass
end
local function _includeMixin(aClass, mixin)
assert(type(mixin) == 'table', "mixin must be a table")
for name,method in pairs(mixin) do
if name ~= "included" and name ~= "static" then aClass[name] = method end
end
for name,method in pairs(mixin.static or {}) do
aClass.static[name] = method
end
if type(mixin.included)=="function" then mixin:included(aClass) end
return aClass
end
local DefaultMixin = {
__tostring = function(self) return "instance of " .. tostring(self.class) end,
initialize = function(self, ...) end,
isInstanceOf = function(self, aClass)
return type(aClass) == 'table'
and type(self) == 'table'
and (self.class == aClass
or type(self.class) == 'table'
and type(self.class.isSubclassOf) == 'function'
and self.class:isSubclassOf(aClass))
if type(mixin.included) == "function" then mixin:included(aClass) end
return aClass
end
local DefaultMixin = {
__tostring = function(self) return "instance of " .. tostring(self.class) end,
initialize = function(self, ...)
end,
isInstanceOf = function(self, aClass)
return type(aClass) == 'table'
and type(self) == 'table'
and (self.class == aClass
or type(self.class) == 'table'
and type(self.class.isSubclassOf) == 'function'
and self.class:isSubclassOf(aClass))
end,
static = {
allocate = function(self)
assert(type(self) == 'table', "Make sure that you are using 'Class:allocate' instead of 'Class.allocate'")
return setmetatable({ class = self }, self.__instanceDict)
end,
static = {
allocate = function(self)
assert(type(self) == 'table', "Make sure that you are using 'Class:allocate' instead of 'Class.allocate'")
return setmetatable({ class = self }, self.__instanceDict)
end,
new = function(self, ...)
assert(type(self) == 'table', "Make sure that you are using 'Class:new' instead of 'Class.new'")
local instance = self:allocate()
instance:initialize(...)
return instance
end,
subclass = function(self, name)
assert(type(self) == 'table', "Make sure that you are using 'Class:subclass' instead of 'Class.subclass'")
assert(type(name) == "string", "You must provide a name(string) for your class")
local subclass = _createClass(name, self)
for methodName, f in pairs(self.__instanceDict) do
if not (methodName == "__index" and type(f) == "table") then
_propagateInstanceMethod(subclass, methodName, f)
end
new = function(self, ...)
assert(type(self) == 'table', "Make sure that you are using 'Class:new' instead of 'Class.new'")
local instance = self:allocate()
instance:initialize(...)
return instance
end,
subclass = function(self, name)
assert(type(self) == 'table', "Make sure that you are using 'Class:subclass' instead of 'Class.subclass'")
assert(type(name) == "string", "You must provide a name(string) for your class")
local subclass = _createClass(name, self)
for methodName, f in pairs(self.__instanceDict) do
if not (methodName == "__index" and type(f) == "table") then
_propagateInstanceMethod(subclass, methodName, f)
end
subclass.initialize = function(instance, ...) return self.initialize(instance, ...) end
self.subclasses[subclass] = true
self:subclassed(subclass)
return subclass
end,
subclassed = function(self, other) end,
isSubclassOf = function(self, other)
return type(other) == 'table' and
type(self.super) == 'table' and
( self.super == other or self.super:isSubclassOf(other) )
end,
include = function(self, ...)
assert(type(self) == 'table', "Make sure you that you are using 'Class:include' instead of 'Class.include'")
for _,mixin in ipairs({...}) do _includeMixin(self, mixin) end
return self
end
}
subclass.initialize = function(instance, ...) return self.initialize(instance, ...) end
self.subclasses[subclass] = true
self:subclassed(subclass)
return subclass
end,
subclassed = function(self, other)
end,
isSubclassOf = function(self, other)
return type(other) == 'table' and
type(self.super) == 'table' and
(self.super == other or self.super:isSubclassOf(other))
end,
include = function(self, ...)
assert(type(self) == 'table', "Make sure you that you are using 'Class:include' instead of 'Class.include'")
for _, mixin in ipairs({ ... }) do _includeMixin(self, mixin) end
return self
end
}
function middleclass.class(name, super)
assert(type(name) == 'string', "A name (string) is needed for the new class")
return super and super:subclass(name) or _includeMixin(_createClass(name), DefaultMixin)
end
setmetatable(middleclass, { __call = function(_, ...) return middleclass.class(...) end })
return middleclass
}
function middleclass.class(name, super)
assert(type(name) == 'string', "A name (string) is needed for the new class")
return super and super:subclass(name) or _includeMixin(_createClass(name), DefaultMixin)
end
setmetatable(middleclass, { __call = function(_, ...) return middleclass.class(...) end })
return middleclass

View File

@ -26,7 +26,7 @@ def format_remaining_time(seconds):
if minutes > 0:
time_parts.append(f"{int(minutes)} minute{'' if minutes == 1 else 's'}")
if seconds > 0:
time_parts.append(f"{seconds:.2f} second{'' if seconds == 1 else 's'}")
time_parts.append(f"{seconds} second{'' if seconds == 1 else 's'}")
if len(time_parts) > 1:
time_parts[-1] = f"and {time_parts[-1]}"

View File

@ -9,12 +9,18 @@ server {
{% if LISTEN_HTTP == "yes" +%}
listen 0.0.0.0:{{ HTTP_PORT }} default_server {% if USE_PROXY_PROTOCOL == "yes" %}proxy_protocol{% endif %};
{% endif %}
{% if USE_IPV6 == "yes" +%}
listen [::]:{{ HTTP_PORT }} default_server {% if USE_PROXY_PROTOCOL == "yes" %}proxy_protocol{% endif %};
{% endif %}
# HTTPS listen
{% set os = import("os") %}
{% if os.path.isfile("/var/cache/bunkerweb/default-server-cert/cert.pem") +%}
{% if has_variable(all, "USE_CUSTOM_SSL", "yes") or has_variable(all, "AUTO_LETS_ENCRYPT", "yes") or has_variable(all, "GENERATE_SELF_SIGNED_SSL", "yes") +%}
listen 0.0.0.0:{{ HTTPS_PORT }} ssl {% if HTTP2 == "yes" %}http2{% endif %} default_server {% if USE_PROXY_PROTOCOL == "yes" %}proxy_protocol{% endif %};
{% if USE_IPV6 == "yes" +%}
listen [::]:{{ HTTPS_PORT }} ssl {% if HTTP2 == "yes" %}http2{% endif %} default_server {% if USE_PROXY_PROTOCOL == "yes" %}proxy_protocol{% endif %};
{% endif %}
ssl_certificate /var/cache/bunkerweb/default-server-cert/cert.pem;
ssl_certificate_key /var/cache/bunkerweb/default-server-cert/cert.key;
{% endif %}
@ -56,19 +62,23 @@ server {
end
logger:log(ngx.INFO, "ngx.ctx filled (ret = " .. ret .. ")")
-- Get plugins
local plugins, err = datastore:get("plugins")
if not plugins then
logger:log(ngx.ERR, "can't get plugins from datastore : " .. err)
return false
-- Get plugins order
local order, err = datastore:get("plugins_order")
if not order then
logger:log(ngx.ERR, "can't get plugins order from datastore : " .. err)
local ok, err = lock:unlock()
if not ok then
logger:log(ngx.ERR, "lock:unlock() failed : " .. err)
end
return
end
plugins = cjson.decode(plugins)
order = cjson.decode(order)
-- Call log_default() methods
logger:log(ngx.INFO, "calling log_default() methods of plugins ...")
for i, plugin in ipairs(plugins) do
for i, plugin_id in ipairs(order.log_default) do
-- Require call
local plugin_lua, err = helpers.require_plugin(plugin.id)
local plugin_lua, err = helpers.require_plugin(plugin_id)
if plugin_lua == false then
logger:log(ngx.ERR, err)
elseif plugin_lua == nil then
@ -81,15 +91,15 @@ server {
if not ok then
logger:log(ngx.ERR, plugin_obj)
else
local ok, ret = helpers.call_plugin(plugin_obj, "log")
local ok, ret = helpers.call_plugin(plugin_obj, "log_default")
if not ok then
logger:log(ngx.ERR, ret)
else
logger:log(ngx.INFO, plugin.id .. ":log_default() call successful : " .. ret.msg)
logger:log(ngx.INFO, plugin_id .. ":log_default() call successful : " .. ret.msg)
end
end
else
logger:log(ngx.INFO, "skipped execution of " .. plugin.id .. " because method log_default() is not defined")
logger:log(ngx.INFO, "skipped execution of " .. plugin_id .. " because method log_default() is not defined")
end
end
end

View File

@ -34,12 +34,12 @@ keepalive_timeout 15;
send_timeout 10;
# resolvers to use
resolver {{ DNS_RESOLVERS }} ipv6=off;
resolver {{ DNS_RESOLVERS }} {% if USE_IPV6 == "no" %}ipv6=off{% endif %};
# remove ports when sending redirects
port_in_redirect off;
# lua path and dicts
# lua configs
lua_package_path "/usr/share/bunkerweb/lua/?.lua;/usr/share/bunkerweb/core/?.lua;/etc/bunkerweb/plugins/?.lua;/usr/share/bunkerweb/deps/lib/lua/?.lua;;";
lua_package_cpath "/usr/share/bunkerweb/deps/lib/?.so;/usr/share/bunkerweb/deps/lib/lua/?.so;;";
lua_ssl_trusted_certificate "/usr/share/bunkerweb/misc/root-ca.pem";

View File

@ -11,6 +11,13 @@ local logger = clogger:new("INIT")
local datastore = cdatastore:new()
logger:log(ngx.NOTICE, "init phase started")
-- Purge cache
local cachestore = require "bunkerweb.cachestore":new()
local ok, err = cachestore:purge()
if not ok then
logger:log(ngx.ERR, "can't purge cachestore : " .. err)
end
-- Remove previous data from the datastore
logger:log(ngx.NOTICE, "deleting old keys from datastore ...")
local data_keys = {"^plugin_", "^variable_", "^plugins$", "^api_", "^misc_"}
@ -33,7 +40,7 @@ if not file then
end
file:close()
for line in io.lines("/etc/nginx/variables.env") do
local variable, value = line:match("(.+)=(.*)")
local variable, value = line:match("^([^=]+)=(.*)$")
local ok, err = datastore:set("variable_" .. variable, value)
if not ok then
logger:log(ngx.ERR, "can't save variable " .. variable .. " into datastore : " .. err)
@ -85,9 +92,6 @@ for i, plugin_path in ipairs(plugin_paths) do
logger:log(ngx.ERR, "can't save " .. plugin.id .. " into datastore : " .. err)
else
table.insert(plugins, plugin)
table.sort(plugins, function (a, b)
return a.order < b.order
end)
logger:log(ngx.NOTICE, "loaded plugin " .. plugin.id .. " v" .. plugin.version)
end
end
@ -98,13 +102,28 @@ if not ok then
logger:log(ngx.ERR, "can't save plugins into datastore : " .. err)
return false
end
logger:log(ngx.NOTICE, "saved plugins into datastore")
-- Call init() methodatastore
logger:log(ngx.NOTICE, "saving plugins order into datastore ...")
local ok, order = helpers.order_plugins(plugins)
if not ok then
logger:log(ngx.ERR, "can't compute plugins order : " .. err)
return false
end
for phase, id_list in pairs(order) do
logger:log(ngx.NOTICE, "plugins order for phase " .. phase .. " : " .. cjson.encode(id_list))
end
local ok, err = datastore:set("plugins_order", cjson.encode(order))
if not ok then
logger:log(ngx.ERR, "can't save plugins order into datastore : " .. err)
return false
end
logger:log(ngx.NOTICE, "saved plugins order into datastore")
-- Call init() method
logger:log(ngx.NOTICE, "calling init() methods of plugins ...")
for i, plugin in ipairs(plugins) do
for i, plugin_id in ipairs(order["init"]) do
-- Require call
local plugin_lua, err = helpers.require_plugin(plugin.id)
local plugin_lua, err = helpers.require_plugin(plugin_id)
if plugin_lua == false then
logger:log(ngx.ERR, err)
elseif plugin_lua == nil then
@ -121,9 +140,9 @@ for i, plugin in ipairs(plugins) do
if not ok then
logger:log(ngx.ERR, ret)
elseif not ret.ret then
logger:log(ngx.ERR, plugin.id .. ":init() call failed : " .. ret.msg)
logger:log(ngx.ERR, plugin_id .. ":init() call failed : " .. ret.msg)
else
logger:log(ngx.NOTICE, plugin.id .. ":init() call successful : " .. ret.msg)
logger:log(ngx.NOTICE, plugin_id .. ":init() call successful : " .. ret.msg)
end
end
else

View File

@ -1,138 +1,158 @@
init_by_lua_block {
local class = require "middleclass"
local clogger = require "bunkerweb.logger"
local helpers = require "bunkerweb.helpers"
local cdatastore = require "bunkerweb.datastore"
local cjson = require "cjson"
-- Start init phase
local logger = clogger:new("INIT-STREAM")
local datastore = cdatastore:new()
logger:log(ngx.NOTICE, "init-stream phase started")
-- Remove previous data from the datastore
logger:log(ngx.NOTICE, "deleting old keys from datastore ...")
local data_keys = {"^plugin_", "^variable_", "^plugins$", "^api_", "^misc_"}
for i, key in pairs(data_keys) do
local ok, err = datastore:delete_all(key)
local class = require "middleclass"
local clogger = require "bunkerweb.logger"
local helpers = require "bunkerweb.helpers"
local cdatastore = require "bunkerweb.datastore"
local cjson = require "cjson"
-- Start init phase
local logger = clogger:new("INIT-STREAM")
local datastore = cdatastore:new()
logger:log(ngx.NOTICE, "init-stream phase started")
-- Purge cache
local cachestore = require "bunkerweb.cachestore":new()
local ok, err = cachestore:purge()
if not ok then
logger:log(ngx.ERR, "can't delete " .. key .. " from datastore : " .. err)
return false
logger:log(ngx.ERR, "can't purge cachestore : " .. err)
end
logger:log(ngx.INFO, "deleted " .. key .. " from datastore")
end
logger:log(ngx.NOTICE, "deleted old keys from datastore")
-- Load variables into the datastore
logger:log(ngx.NOTICE, "saving variables into datastore ...")
local file = io.open("/etc/nginx/variables.env")
if not file then
logger:log(ngx.ERR, "can't open /etc/nginx/variables.env file")
return false
end
file:close()
for line in io.lines("/etc/nginx/variables.env") do
local variable, value = line:match("(.+)=(.*)")
local ok, err = datastore:set("variable_" .. variable, value)
if not ok then
logger:log(ngx.ERR, "can't save variable " .. variable .. " into datastore : " .. err)
return false
end
logger:log(ngx.INFO, "saved variable " .. variable .. "=" .. value .. " into datastore")
end
logger:log(ngx.NOTICE, "saved variables into datastore")
-- Set API values into the datastore
logger:log(ngx.NOTICE, "saving API values into datastore ...")
local value, err = datastore:get("variable_USE_API")
if not value then
logger:log(ngx.ERR, "can't get variable USE_API from the datastore : " .. err)
return false
end
if value == "yes" then
local value, err = datastore:get("variable_API_WHITELIST_IP")
if not value then
logger:log(ngx.ERR, "can't get variable API_WHITELIST_IP from the datastore : " .. err)
return false
end
local whitelists = {}
for whitelist in value:gmatch("%S+") do
table.insert(whitelists, whitelist)
end
local ok, err = datastore:set("api_whitelist_ip", cjson.encode(whitelists))
if not ok then
logger:log(ngx.ERR, "can't save API whitelist_ip to datastore : " .. err)
return false
end
logger:log(ngx.INFO, "saved API whitelist_ip into datastore")
end
logger:log(ngx.NOTICE, "saved API values into datastore")
-- Load plugins into the datastore
logger:log(ngx.NOTICE, "saving plugins into datastore ...")
local plugins = {}
local plugin_paths = {"/usr/share/bunkerweb/core", "/etc/bunkerweb/plugins"}
for i, plugin_path in ipairs(plugin_paths) do
local paths = io.popen("find -L " .. plugin_path .. " -maxdepth 1 -type d ! -path " .. plugin_path)
for path in paths:lines() do
local ok, plugin = helpers.load_plugin(path .. "/plugin.json")
-- Remove previous data from the datastore
logger:log(ngx.NOTICE, "deleting old keys from datastore ...")
local data_keys = {"^plugin_", "^variable_", "^plugins$", "^api_", "^misc_"}
for i, key in pairs(data_keys) do
local ok, err = datastore:delete_all(key)
if not ok then
logger:log(ngx.ERR, plugin)
else
local ok, err = datastore:set("plugin_" .. plugin.id, cjson.encode(plugin))
if not ok then
logger:log(ngx.ERR, "can't save " .. plugin.id .. " into datastore : " .. err)
else
table.insert(plugins, plugin)
table.sort(plugins, function (a, b)
return a.order < b.order
end)
logger:log(ngx.NOTICE, "loaded plugin " .. plugin.id .. " v" .. plugin.version)
end
logger:log(ngx.ERR, "can't delete " .. key .. " from datastore : " .. err)
return false
end
logger:log(ngx.INFO, "deleted " .. key .. " from datastore")
end
end
local ok, err = datastore:set("plugins", cjson.encode(plugins))
if not ok then
logger:log(ngx.ERR, "can't save plugins into datastore : " .. err)
return false
end
logger:log(ngx.NOTICE, "saved plugins into datastore")
-- Call init() methodatastore
logger:log(ngx.NOTICE, "calling init() methods of plugins ...")
for i, plugin in ipairs(plugins) do
-- Require call
local plugin_lua, err = helpers.require_plugin(plugin.id)
if plugin_lua == false then
logger:log(ngx.ERR, err)
elseif plugin_lua == nil then
logger:log(ngx.NOTICE, err)
else
-- Check if plugin has init method
if plugin_lua.init ~= nil then
-- New call
local ok, plugin_obj = helpers.new_plugin(plugin_lua)
logger:log(ngx.NOTICE, "deleted old keys from datastore")
-- Load variables into the datastore
logger:log(ngx.NOTICE, "saving variables into datastore ...")
local file = io.open("/etc/nginx/variables.env")
if not file then
logger:log(ngx.ERR, "can't open /etc/nginx/variables.env file")
return false
end
file:close()
for line in io.lines("/etc/nginx/variables.env") do
local variable, value = line:match("^([^=]+)=(.*)$")
local ok, err = datastore:set("variable_" .. variable, value)
if not ok then
logger:log(ngx.ERR, "can't save variable " .. variable .. " into datastore : " .. err)
return false
end
logger:log(ngx.INFO, "saved variable " .. variable .. "=" .. value .. " into datastore")
end
logger:log(ngx.NOTICE, "saved variables into datastore")
-- Set API values into the datastore
logger:log(ngx.NOTICE, "saving API values into datastore ...")
local value, err = datastore:get("variable_USE_API")
if not value then
logger:log(ngx.ERR, "can't get variable USE_API from the datastore : " .. err)
return false
end
if value == "yes" then
local value, err = datastore:get("variable_API_WHITELIST_IP")
if not value then
logger:log(ngx.ERR, "can't get variable API_WHITELIST_IP from the datastore : " .. err)
return false
end
local whitelists = {}
for whitelist in value:gmatch("%S+") do
table.insert(whitelists, whitelist)
end
local ok, err = datastore:set("api_whitelist_ip", cjson.encode(whitelists))
if not ok then
logger:log(ngx.ERR, "can't save API whitelist_ip to datastore : " .. err)
return false
end
logger:log(ngx.INFO, "saved API whitelist_ip into datastore")
end
logger:log(ngx.NOTICE, "saved API values into datastore")
-- Load plugins into the datastore
logger:log(ngx.NOTICE, "saving plugins into datastore ...")
local plugins = {}
local plugin_paths = {"/usr/share/bunkerweb/core", "/etc/bunkerweb/plugins"}
for i, plugin_path in ipairs(plugin_paths) do
local paths = io.popen("find -L " .. plugin_path .. " -maxdepth 1 -type d ! -path " .. plugin_path)
for path in paths:lines() do
local ok, plugin = helpers.load_plugin(path .. "/plugin.json")
if not ok then
logger:log(ngx.ERR, plugin_obj)
logger:log(ngx.ERR, plugin)
else
local ok, ret = helpers.call_plugin(plugin_obj, "init")
local ok, err = datastore:set("plugin_" .. plugin.id, cjson.encode(plugin))
if not ok then
logger:log(ngx.ERR, ret)
elseif not ret.ret then
logger:log(ngx.ERR, plugin.id .. ":init() call failed : " .. ret.msg)
logger:log(ngx.ERR, "can't save " .. plugin.id .. " into datastore : " .. err)
else
logger:log(ngx.NOTICE, plugin.id .. ":init() call successful : " .. ret.msg)
table.insert(plugins, plugin)
logger:log(ngx.NOTICE, "loaded plugin " .. plugin.id .. " v" .. plugin.version)
end
end
else
logger:log(ngx.NOTICE, "skipped execution of " .. plugin.id .. " because method init() is not defined")
end
end
end
logger:log(ngx.NOTICE, "called init() methods of plugins")
logger:log(ngx.NOTICE, "init-stream phase ended")
}
local ok, err = datastore:set("plugins", cjson.encode(plugins))
if not ok then
logger:log(ngx.ERR, "can't save plugins into datastore : " .. err)
return false
end
logger:log(ngx.NOTICE, "saving plugins order into datastore ...")
local ok, order = helpers.order_plugins(plugins)
if not ok then
logger:log(ngx.ERR, "can't compute plugins order : " .. err)
return false
end
for phase, id_list in pairs(order) do
logger:log(ngx.NOTICE, "plugins order for phase " .. phase .. " : " .. cjson.encode(id_list))
end
local ok, err = datastore:set("plugins_order", cjson.encode(order))
if not ok then
logger:log(ngx.ERR, "can't save plugins order into datastore : " .. err)
return false
end
logger:log(ngx.NOTICE, "saved plugins order into datastore")
-- Call init() method
logger:log(ngx.NOTICE, "calling init() methods of plugins ...")
for i, plugin_id in ipairs(order["init"]) do
-- Require call
local plugin_lua, err = helpers.require_plugin(plugin_id)
if plugin_lua == false then
logger:log(ngx.ERR, err)
elseif plugin_lua == nil then
logger:log(ngx.NOTICE, err)
else
-- Check if plugin has init method
if plugin_lua.init ~= nil then
-- New call
local ok, plugin_obj = helpers.new_plugin(plugin_lua)
if not ok then
logger:log(ngx.ERR, plugin_obj)
else
local ok, ret = helpers.call_plugin(plugin_obj, "init")
if not ok then
logger:log(ngx.ERR, ret)
elseif not ret.ret then
logger:log(ngx.ERR, plugin_id .. ":init() call failed : " .. ret.msg)
else
logger:log(ngx.NOTICE, plugin_id .. ":init() call successful : " .. ret.msg)
end
end
else
logger:log(ngx.NOTICE, "skipped execution of " .. plugin.id .. " because method init() is not defined")
end
end
end
logger:log(ngx.NOTICE, "called init() methods of plugins")
logger:log(ngx.NOTICE, "init-stream phase ended")
}

View File

@ -1,13 +1,19 @@
lua_shared_dict ready_lock 16k;
lua_shared_dict worker_lock 16k;
init_worker_by_lua_block {
-- Our timer function
local ready_log = function(premature)
local ready_work = function(premature)
-- Libs
local helpers = require "bunkerweb.helpers"
local cjson = require "cjson"
-- Instantiate objects
local logger = require "bunkerweb.logger":new("INIT")
local logger = require "bunkerweb.logger":new("INIT-WORKER")
local datastore = require "bunkerweb.datastore":new()
-- Don't print the ready log if we are in loading state
-- Don't go further we are in loading state
local is_loading, err = require "bunkerweb.utils".get_variable("IS_LOADING", false)
if not is_loading then
logger:log(ngx.ERR, "utils.get_variable() failed : " .. err)
@ -15,34 +21,101 @@ local ready_log = function(premature)
elseif is_loading == "yes" then
return
end
-- Instantiate lock
local lock = require "resty.lock":new("ready_lock")
local lock = require "resty.lock":new("worker_lock")
if not lock then
logger:log(ngx.ERR, "lock:new() failed : " .. err)
return
end
-- Acquire lock
local elapsed, err = lock:lock("ready")
if elapsed == nil then
logger:log(ngx.ERR, "lock:lock() failed : " .. err)
else
-- Display ready log
local ok, err = datastore:get("misc_ready")
if not ok and err ~= "not found" then
logger:log(ngx.ERR, "datastore:get() failed : " .. err)
elseif not ok and err == "not found" then
logger:log(ngx.NOTICE, "BunkerWeb is ready to fool hackers ! 🚀")
local ok, err = datastore:set("misc_ready", "ok")
if not ok then
logger:log(ngx.ERR, "datastore:set() failed : " .. err)
return
end
-- Check if work is done
local ok, err = datastore:get("misc_ready")
if not ok and err ~= "not found" then
logger:log(ngx.ERR, "datastore:get() failed : " .. err)
local ok, err = lock:unlock()
if not ok then
logger:log(ngx.ERR, "lock:unlock() failed : " .. err)
end
return
end
if ok then
local ok, err = lock:unlock()
if not ok then
logger:log(ngx.ERR, "lock:unlock() failed : " .. err)
end
return
end
logger:log(ngx.INFO, "init_worker phase started")
-- Get plugins order
local order, err = datastore:get("plugins_order")
if not order then
logger:log(ngx.ERR, "can't get plugins order from datastore : " .. err)
local ok, err = lock:unlock()
if not ok then
logger:log(ngx.ERR, "lock:unlock() failed : " .. err)
end
return
end
order = cjson.decode(order)
-- Call init_worker() methods
logger:log(ngx.INFO, "calling init_worker() methods of plugins ...")
for i, plugin_id in ipairs(order.init_worker) do
-- Require call
local plugin_lua, err = helpers.require_plugin(plugin_id)
if plugin_lua == false then
logger:log(ngx.ERR, err)
elseif plugin_lua == nil then
logger:log(ngx.INFO, err)
else
-- Check if plugin has init_worker method
if plugin_lua.init_worker ~= nil then
-- New call
local ok, plugin_obj = helpers.new_plugin(plugin_lua)
if not ok then
logger:log(ngx.ERR, plugin_obj)
else
local ok, ret = helpers.call_plugin(plugin_obj, "init_worker")
if not ok then
logger:log(ngx.ERR, ret)
elseif not ret.ret then
logger:log(ngx.ERR, plugin_id .. ":init_worker() call failed : " .. ret.msg)
else
logger:log(ngx.INFO, plugin_id .. ":init_worker() call successful : " .. ret.msg)
end
end
else
logger:log(ngx.INFO, "skipped execution of " .. plugin_id .. " because method init_worker() is not defined")
end
end
end
-- Release lock
lock:unlock()
logger:log(ngx.INFO, "called init_worker() methods of plugins")
-- End
local ok, err = datastore:set("misc_ready", "ok")
if not ok then
logger:log(ngx.ERR, "datastore:set() failed : " .. err)
end
local ok, err = lock:unlock()
if not ok then
logger:log(ngx.ERR, "lock:unlock() failed : " .. err)
end
logger:log(ngx.INFO, "init phase ended")
logger:log(ngx.NOTICE, "BunkerWeb is ready to fool hackers ! 🚀")
end
-- Start timer
ngx.timer.at(5, ready_log)
ngx.timer.at(5, ready_work)
}

View File

@ -42,21 +42,25 @@ else
logger:log(ngx.INFO, "IP " .. ngx.ctx.bw.remote_addr .. " is not banned")
end
-- Get plugins
local plugins, err = datastore:get("plugins")
if not plugins then
logger:log(ngx.ERR, "can't get plugins from datastore : " .. err)
return false
-- Get plugins order
local order, err = datastore:get("plugins_order")
if not order then
logger:log(ngx.ERR, "can't get plugins order from datastore : " .. err)
local ok, err = lock:unlock()
if not ok then
logger:log(ngx.ERR, "lock:unlock() failed : " .. err)
end
return
end
plugins = cjson.decode(plugins)
order = cjson.decode(order)
-- Call access() methods
logger:log(ngx.INFO, "calling access() methods of plugins ...")
local status = nil
local redirect = nil
for i, plugin in ipairs(plugins) do
for i, plugin_id in ipairs(order.access) do
-- Require call
local plugin_lua, err = helpers.require_plugin(plugin.id)
local plugin_lua, err = helpers.require_plugin(plugin_id)
if plugin_lua == false then
logger:log(ngx.ERR, err)
elseif plugin_lua == nil then
@ -73,39 +77,39 @@ for i, plugin in ipairs(plugins) do
if not ok then
logger:log(ngx.ERR, ret)
elseif not ret.ret then
logger:log(ngx.ERR, plugin.id .. ":access() call failed : " .. ret.msg)
logger:log(ngx.ERR, plugin_id .. ":access() call failed : " .. ret.msg)
else
logger:log(ngx.INFO, plugin.id .. ":access() call successful : " .. ret.msg)
logger:log(ngx.INFO, plugin_id .. ":access() call successful : " .. ret.msg)
end
if ret.status then
if ret.status == utils.get_deny_status() then
ngx.ctx.reason = plugin.id
logger:log(ngx.WARN, "denied access from " .. plugin.id .. " : " .. ret.msg)
ngx.ctx.reason = plugin_id
logger:log(ngx.WARN, "denied access from " .. plugin_id .. " : " .. ret.msg)
else
logger:log(ngx.NOTICE, plugin.id .. " returned status " .. tostring(ret.status) .. " : " .. ret.msg)
logger:log(ngx.NOTICE, plugin_id .. " returned status " .. tostring(ret.status) .. " : " .. ret.msg)
end
status = ret.status
break
elseif ret.redirect then
logger:log(ngx.NOTICE, plugin.id .. " redirect to " .. ret.redirect .. " : " .. ret.msg)
logger:log(ngx.NOTICE, plugin_id .. " redirect to " .. ret.redirect .. " : " .. ret.msg)
redirect = ret.redirect
break
end
end
else
logger:log(ngx.INFO, "skipped execution of " .. plugin.id .. " because method access() is not defined")
logger:log(ngx.INFO, "skipped execution of " .. plugin_id .. " because method access() is not defined")
end
end
end
logger:log(ngx.INFO, "called access() methods of plugins")
-- Save session if needed
local ok, err = utils.save_session()
if not ok then
logger:log(ngx.ERR, "can't save session : " .. err)
else
logger:log(ngx.INFO, "session save return : " .. err)
end
-- local ok, err = utils.save_session()
-- if not ok then
-- logger:log(ngx.ERR, "can't save session : " .. err)
-- else
-- logger:log(ngx.INFO, "session save return : " .. err)
-- end
logger:log(ngx.INFO, "access phase ended")

View File

@ -6,14 +6,8 @@ local helpers = require "bunkerweb.helpers"
local cdatastore = require "bunkerweb.datastore"
local cjson = require "cjson"
-- Don't process internal requests
local logger = clogger:new("HEADER")
if ngx.req.is_internal() then
logger:log(ngx.INFO, "skipped header phase because request is internal")
return true
end
-- Start set phase
local logger = clogger:new("HEADER")
local datastore = cdatastore:new()
logger:log(ngx.INFO, "header phase started")
@ -29,19 +23,23 @@ elseif errors then
end
logger:log(ngx.INFO, "ngx.ctx filled (ret = " .. ret .. ")")
-- Get plugins
local plugins, err = datastore:get("plugins")
if not plugins then
logger:log(ngx.ERR, "can't get plugins from datastore : " .. err)
return false
-- Get plugins order
local order, err = datastore:get("plugins_order")
if not order then
logger:log(ngx.ERR, "can't get plugins order from datastore : " .. err)
local ok, err = lock:unlock()
if not ok then
logger:log(ngx.ERR, "lock:unlock() failed : " .. err)
end
return
end
plugins = cjson.decode(plugins)
order = cjson.decode(order)
-- Call header() methods
logger:log(ngx.INFO, "calling header() methods of plugins ...")
for i, plugin in ipairs(plugins) do
for i, plugin_id in ipairs(order.header) do
-- Require call
local plugin_lua, err = helpers.require_plugin(plugin.id)
local plugin_lua, err = helpers.require_plugin(plugin_id)
if plugin_lua == false then
logger:log(ngx.ERR, err)
elseif plugin_lua == nil then
@ -58,13 +56,13 @@ for i, plugin in ipairs(plugins) do
if not ok then
logger:log(ngx.ERR, ret)
elseif not ret.ret then
logger:log(ngx.ERR, plugin.id .. ":header() call failed : " .. ret.msg)
logger:log(ngx.ERR, plugin_id .. ":header() call failed : " .. ret.msg)
else
logger:log(ngx.INFO, plugin.id .. ":header() call successful : " .. ret.msg)
logger:log(ngx.INFO, plugin_id .. ":header() call successful : " .. ret.msg)
end
end
else
logger:log(ngx.INFO, "skipped execution of " .. plugin.id .. " because method header() is not defined")
logger:log(ngx.INFO, "skipped execution of " .. plugin_id .. " because method header() is not defined")
end
end
end

View File

@ -23,19 +23,23 @@ elseif errors then
end
logger:log(ngx.INFO, "ngx.ctx filled (ret = " .. ret .. ")")
-- Get plugins
local plugins, err = datastore:get("plugins")
if not plugins then
logger:log(ngx.ERR, "can't get plugins from datastore : " .. err)
return false
-- Get plugins order
local order, err = datastore:get("plugins_order")
if not order then
logger:log(ngx.ERR, "can't get plugins order from datastore : " .. err)
local ok, err = lock:unlock()
if not ok then
logger:log(ngx.ERR, "lock:unlock() failed : " .. err)
end
return
end
plugins = cjson.decode(plugins)
order = cjson.decode(order)
-- Call log() methods
logger:log(ngx.INFO, "calling log() methods of plugins ...")
for i, plugin in ipairs(plugins) do
for i, plugin_id in ipairs(order.log) do
-- Require call
local plugin_lua, err = helpers.require_plugin(plugin.id)
local plugin_lua, err = helpers.require_plugin(plugin_id)
if plugin_lua == false then
logger:log(ngx.ERR, err)
elseif plugin_lua == nil then
@ -52,13 +56,13 @@ for i, plugin in ipairs(plugins) do
if not ok then
logger:log(ngx.ERR, ret)
elseif not ret.ret then
logger:log(ngx.ERR, plugin.id .. ":log() call failed : " .. ret.msg)
logger:log(ngx.ERR, plugin_id .. ":log() call failed : " .. ret.msg)
else
logger:log(ngx.INFO, plugin.id .. ":log() call successful : " .. ret.msg)
logger:log(ngx.INFO, plugin_id .. ":log() call successful : " .. ret.msg)
end
end
else
logger:log(ngx.INFO, "skipped execution of " .. plugin.id .. " because method log() is not defined")
logger:log(ngx.INFO, "skipped execution of " .. plugin_id .. " because method log() is not defined")
end
end
end

View File

@ -6,6 +6,9 @@ server {
{% if LISTEN_HTTP == "yes" +%}
listen 0.0.0.0:{{ HTTP_PORT }}{% if MULTISITE == "no" and DISABLE_DEFAULT_SERVER == "no" %} default_server{% endif %}{% if USE_PROXY_PROTOCOL == "yes" %} proxy_protocol{% endif %};
{% endif %}
{% if USE_IPV6 == "yes" +%}
listen [::]:{{ HTTP_PORT }}{% if MULTISITE == "no" and DISABLE_DEFAULT_SERVER == "no" %} default_server{% endif %}{% if USE_PROXY_PROTOCOL == "yes" %} proxy_protocol{% endif %};
{% endif %}
index index.php index.html index.htm;

View File

@ -38,19 +38,23 @@ elseif errors then
end
logger:log(ngx.INFO, "ngx.ctx filled (ret = " .. ret .. ")")
-- Get plugins
local plugins, err = datastore:get("plugins")
if not plugins then
logger:log(ngx.ERR, "can't get plugins from datastore : " .. err)
return false
-- Get plugins order
local order, err = datastore:get("plugins_order")
if not order then
logger:log(ngx.ERR, "can't get plugins order from datastore : " .. err)
local ok, err = lock:unlock()
if not ok then
logger:log(ngx.ERR, "lock:unlock() failed : " .. err)
end
return
end
plugins = cjson.decode(plugins)
order = cjson.decode(order)
-- Call set() methods
logger:log(ngx.INFO, "calling set() methods of plugins ...")
for i, plugin in ipairs(plugins) do
for i, plugin_id in ipairs(order.set) do
-- Require call
local plugin_lua, err = helpers.require_plugin(plugin.id)
local plugin_lua, err = helpers.require_plugin(plugin_id)
if plugin_lua == false then
logger:log(ngx.ERR, err)
elseif plugin_lua == nil then
@ -67,13 +71,13 @@ for i, plugin in ipairs(plugins) do
if not ok then
logger:log(ngx.ERR, ret)
elseif not ret.ret then
logger:log(ngx.ERR, plugin.id .. ":set() call failed : " .. ret.msg)
logger:log(ngx.ERR, plugin_id .. ":set() call failed : " .. ret.msg)
else
logger:log(ngx.INFO, plugin.id .. ":set() call successful : " .. ret.msg)
logger:log(ngx.INFO, plugin_id .. ":set() call successful : " .. ret.msg)
end
end
else
logger:log(ngx.INFO, "skipped execution of " .. plugin.id .. " because method set() is not defined")
logger:log(ngx.INFO, "skipped execution of " .. plugin_id .. " because method set() is not defined")
end
end
end

View File

@ -23,19 +23,23 @@ elseif errors then
end
logger:log(ngx.INFO, "ngx.ctx filled (ret = " .. ret .. ")")
-- Get plugins
local plugins, err = datastore:get("plugins")
if not plugins then
logger:log(ngx.ERR, "can't get plugins from datastore : " .. err)
return false
-- Get plugins order
local order, err = datastore:get("plugins_order")
if not order then
logger:log(ngx.ERR, "can't get plugins order from datastore : " .. err)
local ok, err = lock:unlock()
if not ok then
logger:log(ngx.ERR, "lock:unlock() failed : " .. err)
end
return
end
plugins = cjson.decode(plugins)
order = cjson.decode(order)
-- Call log_stream() methods
logger:log(ngx.INFO, "calling log_stream() methods of plugins ...")
for i, plugin in ipairs(plugins) do
for i, plugin_id in ipairs(order.log_stream) do
-- Require call
local plugin_lua, err = helpers.require_plugin(plugin.id)
local plugin_lua, err = helpers.require_plugin(plugin_id)
if plugin_lua == false then
logger:log(ngx.ERR, err)
elseif plugin_lua == nil then
@ -52,13 +56,13 @@ for i, plugin in ipairs(plugins) do
if not ok then
logger:log(ngx.ERR, ret)
elseif not ret.ret then
logger:log(ngx.ERR, plugin.id .. ":log_stream() call failed : " .. ret.msg)
logger:log(ngx.ERR, plugin_id .. ":log_stream() call failed : " .. ret.msg)
else
logger:log(ngx.INFO, plugin.id .. ":log_stream() call successful : " .. ret.msg)
logger:log(ngx.INFO, plugin_id .. ":log_stream() call successful : " .. ret.msg)
end
end
else
logger:log(ngx.INFO, "skipped execution of " .. plugin.id .. " because method log_stream() is not defined")
logger:log(ngx.INFO, "skipped execution of " .. plugin_id .. " because method log_stream() is not defined")
end
end
end

View File

@ -36,20 +36,24 @@ else
logger:log(ngx.INFO, "IP " .. ngx.ctx.bw.remote_addr .. " is not banned")
end
-- Get plugins
local plugins, err = datastore:get("plugins")
if not plugins then
logger:log(ngx.ERR, "can't get plugins from datastore : " .. err)
return false
-- Get plugins order
local order, err = datastore:get("plugins_order")
if not order then
logger:log(ngx.ERR, "can't get plugins order from datastore : " .. err)
local ok, err = lock:unlock()
if not ok then
logger:log(ngx.ERR, "lock:unlock() failed : " .. err)
end
return
end
plugins = cjson.decode(plugins)
order = cjson.decode(order)
-- Call preread() methods
logger:log(ngx.INFO, "calling preread() methods of plugins ...")
local status = nil
for i, plugin in ipairs(plugins) do
for i, plugin_id in ipairs(order.preread) do
-- Require call
local plugin_lua, err = helpers.require_plugin(plugin.id)
local plugin_lua, err = helpers.require_plugin(plugin_id)
if plugin_lua == false then
logger:log(ngx.ERR, err)
elseif plugin_lua == nil then
@ -66,23 +70,23 @@ for i, plugin in ipairs(plugins) do
if not ok then
logger:log(ngx.ERR, ret)
elseif not ret.ret then
logger:log(ngx.ERR, plugin.id .. ":preread() call failed : " .. ret.msg)
logger:log(ngx.ERR, plugin_id .. ":preread() call failed : " .. ret.msg)
else
logger:log(ngx.INFO, plugin.id .. ":preread() call successful : " .. ret.msg)
logger:log(ngx.INFO, plugin_id .. ":preread() call successful : " .. ret.msg)
end
if ret.status then
if ret.status == utils.get_deny_status() then
ngx.ctx.reason = plugin.id
logger:log(ngx.WARN, "denied access from " .. plugin.id .. " : " .. ret.msg)
ngx.ctx.reason = plugin_id
logger:log(ngx.WARN, "denied access from " .. plugin_id .. " : " .. ret.msg)
else
logger:log(ngx.NOTICE, plugin.id .. " returned status " .. tostring(ret.status) .. " : " .. ret.msg)
logger:log(ngx.NOTICE, plugin_id .. " returned status " .. tostring(ret.status) .. " : " .. ret.msg)
end
status = ret.status
break
end
end
else
logger:log(ngx.INFO, "skipped execution of " .. plugin.id .. " because method preread() is not defined")
logger:log(ngx.INFO, "skipped execution of " .. plugin_id .. " because method preread() is not defined")
end
end
end

View File

@ -4,6 +4,9 @@ server {
{% if LISTEN_STREAM == "yes" +%}
listen 0.0.0.0:{{ LISTEN_STREAM_PORT }}{% if USE_UDP == "yes" %} udp {% endif %}{% if USE_PROXY_PROTOCOL == "yes" %} proxy_protocol {% endif %};
{% endif %}
{% if USE_IPV6 == "yes" +%}
listen [::]:{{ LISTEN_STREAM_PORT }}{% if USE_UDP == "yes" %} udp {% endif %}{% if USE_PROXY_PROTOCOL == "yes" %} proxy_protocol {% endif %};
{% endif %}
# custom config
include /etc/bunkerweb/configs/server-stream/*.conf;

View File

@ -13,7 +13,7 @@ preread_timeout 30s;
proxy_protocol_timeout 30s;
# resolvers to use
resolver {{ DNS_RESOLVERS }} ipv6=off;
resolver {{ DNS_RESOLVERS }} {% if USE_IPV6 == "no" %}ipv6=off{% endif %};
# resolver timeout
# TODO : global setting STREAM_RESOLVER_TIMEOUT

View File

@ -1,5 +1,5 @@
local class = require "middleclass"
local plugin = require "bunkerweb.plugin"
local class = require "middleclass"
local plugin = require "bunkerweb.plugin"
local utils = require "bunkerweb.utils"
local datastore = require "bunkerweb.datastore"
local cjson = require "cjson"
@ -8,12 +8,12 @@ local base64 = require "base64"
local sha256 = require "resty.sha256"
local str = require "resty.string"
local http = require "resty.http"
local template = nil
local template = nil
if ngx.shared.datastore then
template = require "resty.template"
template = require "resty.template"
end
local antibot = class("antibot", plugin)
local antibot = class("antibot", plugin)
function antibot:initialize()
-- Call parent initialize
@ -26,34 +26,28 @@ function antibot:access()
return self:ret(true, "antibot not activated")
end
-- Get session and data
self.session = utils.get_session("antibot")
self:get_session_data()
-- Don't go further if client resolved the challenge
local resolved, err, original_uri = self:challenge_resolved()
if resolved == nil then
return self:ret(false, "can't check if challenge is resolved : " .. err)
end
if resolved then
if self.session_data.resolved then
if ngx.ctx.bw.uri == self.variables["ANTIBOT_URI"] then
return self:ret(true, "client already resolved the challenge", nil, original_uri)
return self:ret(true, "client already resolved the challenge", nil, self.session_data.original_uri)
end
return self:ret(true, "client already resolved the challenge")
end
-- Redirect to challenge page
if ngx.ctx.bw.uri ~= self.variables["ANTIBOT_URI"] then
-- Prepare challenge
local ok, err = self:prepare_challenge()
if not ok then
return self:ret(false, "can't prepare challenge : " .. err, ngx.HTTP_INTERNAL_SERVER_ERROR)
end
return self:ret(true, "redirecting client to the challenge uri", nil, self.variables["ANTIBOT_URI"])
-- Prepare challenge if needed
self:prepare_challenge()
local ok, err = self:set_session_data()
if not ok then
return self:ret(false, "can't save session : " .. err, ngx.HTTP_INTERNAL_SERVER_ERROR)
end
-- Direct access without session => prepare challenge
if not self:challenge_prepared() then
local ok, err = self:prepare_challenge()
if not ok then
return self:ret(false, "can't prepare challenge : " .. err, ngx.HTTP_INTERNAL_SERVER_ERROR)
end
-- Redirect to challenge page
if ngx.ctx.bw.uri ~= self.variables["ANTIBOT_URI"] then
return self:ret(true, "redirecting client to the challenge uri", nil, self.variables["ANTIBOT_URI"])
end
-- Display challenge needed
@ -65,18 +59,23 @@ function antibot:access()
-- Check challenge
if ngx.ctx.bw.request_method == "POST" then
local ok, err, redirect = self:check_challenge()
local set_ok, set_err = self:set_session_data()
if not set_ok then
return self:ret(false, "can't save session : " .. set_err, ngx.HTTP_INTERNAL_SERVER_ERROR)
end
if ok == nil then
return self:ret(false, "check challenge error : " .. err, ngx.HTTP_INTERNAL_SERVER_ERROR)
elseif not ok then
self.logger:log(ngx.WARN, "client failed challenge : " .. err)
local ok, err = self:prepare_challenge()
if not ok then
return self:ret(false, "can't prepare challenge : " .. err, ngx.HTTP_INTERNAL_SERVER_ERROR)
end
end
if redirect then
return self:ret(true, "check challenge redirect : " .. redirect, nil, redirect)
end
self:prepare_challenge()
local ok, err = self:set_session_data()
if not ok then
return self:ret(false, "can't save session : " .. err, ngx.HTTP_INTERNAL_SERVER_ERROR)
end
ngx.ctx.bw.antibot_display_content = true
return self:ret(true, "displaying challenge to client", ngx.OK)
end
@ -87,13 +86,16 @@ end
function antibot:content()
-- Check if content is needed
if not self.variables["USE_ANTIBOT"] or self.variables["USE_ANTIBOT"] == "no" then
if self.variables["USE_ANTIBOT"] == "no" then
return self:ret(true, "antibot not activated")
end
-- Check if display content is needed
if not ngx.ctx.bw.antibot_display_content then
return self:ret(true, "display content not needed", nil, "/")
end
-- Get session and data
self.session = utils.get_session("antibot")
self:get_session_data(true)
-- Display content
local ok, err = self:display_challenge()
if not ok then
@ -102,94 +104,78 @@ function antibot:content()
return self:ret(true, "content displayed")
end
function antibot:challenge_resolved()
local session, err, exists, refreshed = utils.get_session()
if not exists then
return false, "no session set"
function antibot:get_session_data(no_check)
local session_data = self.session:get_data()
if session_data[ngx.ctx.bw.server_name] then
local data = cjson.decode(session_data[ngx.ctx.bw.server_name])
if no_check then
self.session_data = data
return
end
if not data.time_resolve and not data.time_valid then
self.session_data = {}
self.session_updated = true
return
end
local time = ngx.now()
self.session_data = data
-- Check valid time
if data.resolved and (data.time_valid > time or time - data.time_valid > tonumber(self.variables["ANTIBOT_TIME_VALID"])) then
self.session_data.resolved = false
self.session_data.prepared = false
self.session_updated = true
return
end
-- Check resolve time
if not data.resolved and (data.time_resolve > time or time - data.time_resolve > tonumber(self.variables["ANTIBOT_TIME_RESOLVE"])) then
self.session_data.prepared = false
self.session_updated = true
return
end
-- Session is valid
return
end
local ok, err, raw_data = utils.get_session_var("antibot")
if not raw_data then
return false, "session is set but no antibot data"
end
local data = raw_data
if data.resolved and self.variables["USE_ANTIBOT"] == data.type then
return true, "challenge resolved", data.original_uri
end
return false, "challenge not resolved", data.original_uri
self.session_data = {}
self.session_updated = true
return
end
function antibot:challenge_prepared()
local session, err, exists, refreshed = utils.get_session()
if not exists then
return false
function antibot:set_session_data()
if self.session_updated then
local session_data = self.session:get_data()
session_data[ngx.ctx.bw.server_name] = cjson.encode(self.session_data)
self.session:set_data(session_data)
return self.session:save()
end
local ok, err, raw_data = utils.get_session_var("antibot")
if not raw_data then
return false
end
return self.variables["USE_ANTIBOT"] == raw_data.type
return true, "no updates"
end
function antibot:prepare_challenge()
local session, err, exists, refreshed = utils.get_session()
local set_needed = false
local data = nil
if exists then
local ok, err, raw_data = utils.get_session_var("antibot")
if raw_data then
data = raw_data
end
end
if not data or data.type ~= self.variables["USE_ANTIBOT"] then
data = {
type = self.variables["USE_ANTIBOT"],
resolved = self.variables["USE_ANTIBOT"] == "cookie",
original_uri = ngx.ctx.bw.request_uri
}
if not self.session_data.prepared then
self.session_updated = true
self.session_data.prepared = true
self.session_data.time_resolve = ngx.now()
self.session_data.type = self.variables["USE_ANTIBOT"]
self.session_data.resolved = false
self.session_data.original_uri = ngx.ctx.bw.request_uri
if ngx.ctx.bw.uri == self.variables["ANTIBOT_URI"] then
data.original_uri = "/"
self.session_data.original_uri = "/"
end
set_needed = true
end
if not data.resolved then
if self.variables["USE_ANTIBOT"] == "javascript" then
data.random = utils.rand(20)
set_needed = true
if self.variables["USE_ANTIBOT"] == "cookie" then
self.session_data.resolved = true
self.session_data.time_valid = ngx.now()
elseif self.variables["USE_ANTIBOT"] == "javascript" then
self.session_data.random = utils.rand(20)
elseif self.variables["USE_ANTIBOT"] == "captcha" then
local chall_captcha = captcha.new()
chall_captcha:font("/usr/share/bunkerweb/core/antibot/files/font.ttf")
chall_captcha:generate()
data.image = base64.encode(chall_captcha:jpegStr(70))
data.text = chall_captcha:getStr()
set_needed = true
self.session_data.captcha = utils.rand(6, true)
end
end
if set_needed then
local ok, err = utils.set_session_var("antibot", data)
if not ok then
return false, "error while setting session antibot : " .. err
end
end
return true, "prepared"
end
function antibot:display_challenge()
-- Open session
local session, err, exists, refreshed = utils.get_session()
if not exists then
return false, "no session set"
end
-- Get data
local ok, err, raw_data = utils.get_session_var("antibot")
if not raw_data then
return false, "session is set but no data"
end
local data = raw_data
-- Check if session type is equal to antibot type
if self.variables["USE_ANTIBOT"] ~= data.type then
return false, "session type is different from antibot type"
-- Check if prepared
if not self.session_data.prepared then
return false, "challenge not prepared"
end
-- Common variables for templates
@ -199,12 +185,16 @@ function antibot:display_challenge()
-- Javascript case
if self.variables["USE_ANTIBOT"] == "javascript" then
template_vars.random = data.random
template_vars.random = self.session_data.random
end
-- Captcha case
if self.variables["USE_ANTIBOT"] == "captcha" then
template_vars.captcha = data.image
local chall_captcha = captcha.new()
chall_captcha:font("/usr/share/bunkerweb/core/antibot/files/font.ttf")
chall_captcha:string(self.session_data.captcha)
chall_captcha:generate()
template_vars.captcha = base64.encode(chall_captcha:jpegStr(70))
end
-- reCAPTCHA case
@ -224,48 +214,35 @@ function antibot:display_challenge()
end
function antibot:check_challenge()
-- Open session
local session, err, exists, refreshed = utils.get_session()
if not exists then
return false, "no session set"
end
-- Get data
local ok, err, raw_data = utils.get_session_var("antibot")
if not raw_data then
return false, "session is set but no data", nil
end
local data = raw_data
-- Check if session type is equal to antibot type
if self.variables["USE_ANTIBOT"] ~= data.type then
return nil, "session type is different from antibot type", nil
-- Check if prepared
if not self.session_data.prepared then
return nil, "challenge not prepared"
end
local resolved = false
local err = ""
local redirect = nil
self.session_data.prepared = false
self.session_updated = true
-- Javascript case
if self.variables["USE_ANTIBOT"] == "javascript" then
ngx.req.read_body()
local args, err = ngx.req.get_post_args(1)
if err == "truncated" or not args or not args["challenge"] then
return nil, "missing challenge arg", nil
return nil, "missing challenge arg"
end
local hash = sha256:new()
hash:update(data.random .. args["challenge"])
hash:update(self.session_data.random .. args["challenge"])
local digest = hash:final()
resolved = str.to_hex(digest):find("^0000") ~= nil
if not resolved then
return false, "wrong value", nil
return false, "wrong value"
end
data.resolved = true
local ok, err = utils.set_session_var("antibot", data)
if not ok then
return nil, "error while setting session antibot : " .. err
end
return true, "resolved", data.original_uri
self.session_data.resolved = true
self.session_data.time_valid = ngx.now()
return true, "resolved", self.session_data.original_uri
end
-- Captcha case
@ -275,15 +252,12 @@ function antibot:check_challenge()
if err == "truncated" or not args or not args["captcha"] then
return nil, "missing challenge arg", nil
end
if data.text ~= args["captcha"] then
if self.session_data.captcha ~= args["captcha"] then
return false, "wrong value", nil
end
data.resolved = true
local ok, err = utils.set_session_var("antibot", data)
if not ok then
return nil, "error while setting session antibot : " .. err
end
return true, "resolved", data.original_uri
self.session_data.resolved = true
self.session_data.time_valid = ngx.now()
return true, "resolved", self.session_data.original_uri
end
-- reCAPTCHA case
@ -299,7 +273,9 @@ function antibot:check_challenge()
end
local res, err = httpc:request_uri("https://www.google.com/recaptcha/api/siteverify", {
method = "POST",
body = "secret=" .. self.variables["ANTIBOT_RECAPTCHA_SECRET"] .. "&response=" .. args["token"] .. "&remoteip=" .. ngx.ctx.bw.remote_addr,
body = "secret=" ..
self.variables["ANTIBOT_RECAPTCHA_SECRET"] ..
"&response=" .. args["token"] .. "&remoteip=" .. ngx.ctx.bw.remote_addr,
headers = {
["Content-Type"] = "application/x-www-form-urlencoded"
}
@ -315,12 +291,9 @@ function antibot:check_challenge()
if not rdata.success or rdata.score < tonumber(self.variables["ANTIBOT_RECAPTCHA_SCORE"]) then
return false, "client failed challenge with score " .. tostring(rdata.score), nil
end
data.resolved = true
local ok, err = utils.set_session_var("antibot", data)
if not ok then
return nil, "error while setting session antibot : " .. err
end
return true, "resolved", data.original_uri
self.session_data.resolved = true
self.session_data.time_valid = ngx.now()
return true, "resolved", self.session_data.original_uri
end
-- hCaptcha case
@ -336,7 +309,9 @@ function antibot:check_challenge()
end
local res, err = httpc:request_uri("https://hcaptcha.com/siteverify", {
method = "POST",
body = "secret=" .. self.variables["ANTIBOT_HCAPTCHA_SECRET"] .. "&response=" .. args["token"] .. "&remoteip=" .. ngx.ctx.bw.remote_addr,
body = "secret=" ..
self.variables["ANTIBOT_HCAPTCHA_SECRET"] ..
"&response=" .. args["token"] .. "&remoteip=" .. ngx.ctx.bw.remote_addr,
headers = {
["Content-Type"] = "application/x-www-form-urlencoded"
}
@ -352,12 +327,9 @@ function antibot:check_challenge()
if not hdata.success then
return false, "client failed challenge", nil
end
data.resolved = true
local ok, err = utils.set_session_var("antibot", data)
if not ok then
return nil, "error while setting session antibot : " .. err
end
return true, "resolved", data.original_uri
self.session_data.resolved = true
self.session_data.time_valid = ngx.now()
return true, "resolved", self.session_data.original_uri
end
return nil, "unknown", nil

View File

@ -11,19 +11,18 @@ local mt = { __index = {} }
function _M.new()
local cap = {}
local f = setmetatable({ cap = cap}, mt)
local f = setmetatable({ cap = cap }, mt)
return f
end
local function urandom()
local seed = 1
local devurandom = io.open("/dev/urandom", "r")
local urandom = devurandom:read(32)
devurandom:close()
for i=1,string.len(urandom) do
local s = string.byte(urandom,i)
for i = 1, string.len(urandom) do
local s = string.byte(urandom, i)
seed = seed + s
end
return seed
@ -37,10 +36,10 @@ local function random_char(length)
local captcha_t = {}
math.randomseed(urandom())
for c=1,length do
local i = math.random(1, string.len(set))
table.insert(captcha_t, string.sub(set,i,i))
for c = 1, length do
local i = math.random(1, string.len(set))
table.insert(captcha_t, string.sub(set, i, i))
end
return captcha_t
@ -49,11 +48,11 @@ end
local function random_angle()
math.randomseed(urandom())
return math.random(-20, 40)
return math.random(-20, 40)
end
local function scribble(w,h)
local function scribble(w, h)
math.randomseed(urandom())
local x1 = math.random(5, w - 5)
local x2 = math.random(5, w - 5)
@ -73,39 +72,36 @@ function mt.__index:length(l)
self.cap.length = l
end
function mt.__index:bgcolor(r,g,b)
self.cap.bgcolor = { r = r , g = g , b = b}
function mt.__index:bgcolor(r, g, b)
self.cap.bgcolor = { r = r, g = g, b = b }
end
function mt.__index:fgcolor(r,g,b)
self.cap.fgcolor = { r = r , g = g , b = b}
function mt.__index:fgcolor(r, g, b)
self.cap.fgcolor = { r = r, g = g, b = b }
end
function mt.__index:line(line)
self.cap.line = line
end
function mt.__index:font(font)
self.cap.font = font
self.cap.font = font
end
function mt.__index:generate()
--local self.captcha = {}
local captcha_t = {}
if not self.cap.string then
if not self.cap.length then
if not self.cap.length then
self.cap.length = 6
end
captcha_t = random_char(self.cap.length)
self:string(table.concat(captcha_t))
end
captcha_t = random_char(self.cap.length)
self:string(table.concat(captcha_t))
else
for i=1, #self.cap.string do
for i = 1, #self.cap.string do
table.insert(captcha_t, string.sub(self.cap.string, i, i))
end
end
end
@ -114,45 +110,45 @@ function mt.__index:generate()
local white = self.im:colorAllocate(255, 255, 255)
local bgcolor
if not self.cap.bgcolor then
bgcolor = white
bgcolor = white
else
bgcolor = self.im:colorAllocate(self.cap.bgcolor.r , self.cap.bgcolor.g, self.cap.bgcolor.b )
bgcolor = self.im:colorAllocate(self.cap.bgcolor.r, self.cap.bgcolor.g, self.cap.bgcolor.b)
end
local fgcolor
if not self.cap.fgcolor then
fgcolor = black
else
fgcolor = self.im:colorAllocate(self.cap.fgcolor.r , self.cap.fgcolor.g, self.cap.fgcolor.b )
fgcolor = self.im:colorAllocate(self.cap.fgcolor.r, self.cap.fgcolor.g, self.cap.fgcolor.b)
end
self.im:filledRectangle(0, 0, #captcha_t * 40, 45, bgcolor)
local offset_left = 10
for i=1, #captcha_t do
for i = 1, #captcha_t do
local angle = random_angle()
local llx, lly, lrx, lry, urx, ury, ulx, uly = self.im:stringFT(fgcolor, self.cap.font, 25, math.rad(angle), offset_left, 35, captcha_t[i])
self.im:polygon({ {llx, lly}, {lrx, lry}, {urx, ury}, {ulx, uly} }, bgcolor)
local llx, lly, lrx, lry, urx, ury, ulx, uly = self.im:stringFT(fgcolor, self.cap.font, 25, math.rad(angle),
offset_left, 35, captcha_t[i])
self.im:polygon({ { llx, lly }, { lrx, lry }, { urx, ury }, { ulx, uly } }, bgcolor)
offset_left = offset_left + 40
end
if self.cap.line then
self.im:line(10, 10, ( #captcha_t * 40 ) - 10 , 40, fgcolor)
self.im:line(11, 11, ( #captcha_t * 40 ) - 11 , 41, fgcolor)
self.im:line(12, 12, ( #captcha_t * 40 ) - 12 , 42, fgcolor)
self.im:line(10, 10, (#captcha_t * 40) - 10, 40, fgcolor)
self.im:line(11, 11, (#captcha_t * 40) - 11, 41, fgcolor)
self.im:line(12, 12, (#captcha_t * 40) - 12, 42, fgcolor)
end
if self.cap.scribble then
for i=1,self.cap.scribble do
local x1,x2 = scribble( #captcha_t * 40 , 45 )
for i = 1, self.cap.scribble do
local x1, x2 = scribble(#captcha_t * 40, 45)
self.im:line(x1, 5, x2, 40, fgcolor)
end
end
end
-- Perhaps it's not the best solution
-- Writes the generated image to a jpeg file
function mt.__index:jpeg(outfile, quality)
@ -189,4 +185,4 @@ function mt.__index:write(outfile, quality)
return self:getStr()
end
return _M
return _M

View File

@ -1,6 +1,5 @@
{
"id": "antibot",
"order": 9,
"name": "Antibot",
"description": "Bot detection by using a challenge.",
"version": "1.0",
@ -76,6 +75,24 @@
"label": "hCaptcha secret",
"regex": "^(0x[a-zA-Z0-9]+)?$",
"type": "password"
},
"ANTIBOT_TIME_RESOLVE": {
"context": "multisite",
"default": "60",
"help": "Maximum time (in seconds) clients have to resolve the challenge. Once this time has passed, a new challenge will be generated.",
"id": "antibot-time-resolve",
"label": "Time to resolve",
"regex": "^[0-9]+$",
"type": "text"
},
"ANTIBOT_TIME_VALID": {
"context": "multisite",
"default": "86400",
"help": "Maximum validity time of solved challenges. Once this time has passed, clients will need to resolve a new one.",
"id": "antibot-time-valid",
"label": "Time valid",
"regex": "^[0-9]+$",
"type": "text"
}
}
}

View File

@ -1,6 +1,5 @@
{
"id": "authbasic",
"order": 999,
"name": "Auth basic",
"description": "Enforce login before accessing a resource or the whole site using HTTP basic auth method.",
"version": "1.0",

View File

@ -1,6 +1,6 @@
local class = require "middleclass"
local plugin = require "bunkerweb.plugin"
local utils = require "bunkerweb.utils"
local class = require "middleclass"
local plugin = require "bunkerweb.plugin"
local utils = require "bunkerweb.utils"
local badbehavior = class("badbehavior", plugin)
@ -34,7 +34,9 @@ function badbehavior:log()
return self:ret(true, "already banned")
end
-- Call increase function later and with cosocket enabled
local ok, err = ngx.timer.at(0, badbehavior.increase, ngx.ctx.bw.remote_addr, tonumber(self.variables["BAD_BEHAVIOR_COUNT_TIME"]), tonumber(self.variables["BAD_BEHAVIOR_BAN_TIME"]), tonumber(self.variables["BAD_BEHAVIOR_THRESHOLD"]), self.use_redis)
local ok, err = ngx.timer.at(0, badbehavior.increase, ngx.ctx.bw.remote_addr,
tonumber(self.variables["BAD_BEHAVIOR_COUNT_TIME"]), tonumber(self.variables["BAD_BEHAVIOR_BAN_TIME"]),
tonumber(self.variables["BAD_BEHAVIOR_THRESHOLD"]), self.use_redis)
if not ok then
return self:ret(false, "can't create increase timer : " .. err)
end
@ -93,9 +95,11 @@ function badbehavior.increase(premature, ip, count_time, ban_time, threshold, us
logger:log(ngx.ERR, "(increase) can't save ban : " .. err)
return
end
logger:log(ngx.WARN, "IP " .. ip .. " is banned for " .. ban_time .. "s (" .. tostring(counter) .. "/" .. tostring(threshold) .. ")")
logger:log(ngx.WARN,
"IP " .. ip .. " is banned for " .. ban_time .. "s (" .. tostring(counter) .. "/" .. tostring(threshold) .. ")")
end
logger:log(ngx.NOTICE, "increased counter for IP " .. ip .. " (" .. tostring(counter) .. "/" .. tostring(threshold) .. ")")
logger:log(ngx.NOTICE,
"increased counter for IP " .. ip .. " (" .. tostring(counter) .. "/" .. tostring(threshold) .. ")")
end
function badbehavior.decrease(premature, ip, count_time, threshold, use_redis)
@ -136,7 +140,8 @@ function badbehavior.decrease(premature, ip, count_time, threshold, use_redis)
return
end
end
logger:log(ngx.NOTICE, "decreased counter for IP " .. ip .. " (" .. tostring(counter) .. "/" .. tostring(threshold) .. ")")
logger:log(ngx.NOTICE,
"decreased counter for IP " .. ip .. " (" .. tostring(counter) .. "/" .. tostring(threshold) .. ")")
end
function badbehavior.redis_increase(ip, count_time, ban_time)
@ -169,7 +174,9 @@ function badbehavior.redis_increase(ip, count_time, ban_time)
return false, err
end
-- Execute LUA script
local counter, err = clusterstore:call("eval", redis_script, 2, "bad_behavior_" .. ip, "bans_ip" .. ip, count_time, ban_time)
local counter, err = clusterstore:call("eval", redis_script, 2, "plugin_bad_behavior_" .. ip, "bans_ip" .. ip,
count_time,
ban_time)
if not counter then
clusterstore:close()
return false, err
@ -208,7 +215,7 @@ function badbehavior.redis_decrease(ip, count_time)
if not ok then
return false, err
end
local counter, err = clusterstore:call("eval", redis_script, 1, "bad_behavior_" .. ip, count_time)
local counter, err = clusterstore:call("eval", redis_script, 1, "plugin_bad_behavior_" .. ip, count_time)
if not counter then
clusterstore:close()
return false, err
@ -217,4 +224,4 @@ function badbehavior.redis_decrease(ip, count_time)
return counter
end
return badbehavior
return badbehavior

View File

@ -1,6 +1,5 @@
{
"id": "badbehavior",
"order": 999,
"name": "Bad behavior",
"description": "Ban IP generating too much 'bad' HTTP status code in a period of time.",
"version": "1.0",

View File

@ -1,12 +1,12 @@
local class = require "middleclass"
local plugin = require "bunkerweb.plugin"
local utils = require "bunkerweb.utils"
local datastore = require "bunkerweb.datastore"
local cachestore = require "bunkerweb.cachestore"
local cjson = require "cjson"
local ipmatcher = require "resty.ipmatcher"
local class = require "middleclass"
local plugin = require "bunkerweb.plugin"
local utils = require "bunkerweb.utils"
local datastore = require "bunkerweb.datastore"
local cachestore = require "bunkerweb.cachestore"
local cjson = require "cjson"
local ipmatcher = require "resty.ipmatcher"
local blacklist = class("blacklist", plugin)
local blacklist = class("blacklist", plugin)
function blacklist:initialize()
-- Call parent initialize
@ -18,10 +18,11 @@ function blacklist:initialize()
end
self.use_redis = use_redis == "yes"
-- Decode lists
if ngx.get_phase() ~= "init" and self.variables["USE_BLACKLIST"] == "yes" then
if ngx.get_phase() ~= "init" and self:is_needed() then
local lists, err = self.datastore:get("plugin_blacklist_lists")
if not lists then
self.logger:log(ngx.ERR, err)
self.lists = {}
else
self.lists = cjson.decode(lists)
end
@ -39,6 +40,9 @@ function blacklist:initialize()
}
for kind, _ in pairs(kinds) do
for data in self.variables["BLACKLIST_" .. kind]:gmatch("%S+") do
if not self.lists[kind] then
self.lists[kind] = {}
end
table.insert(self.lists[kind], data)
end
end
@ -47,13 +51,26 @@ function blacklist:initialize()
self.cachestore = cachestore:new(self.use_redis)
end
function blacklist:init()
-- Check if init is needed
local init_needed, err = utils.has_variable("USE_BLACKLIST", "yes")
if init_needed == nil then
return self:ret(false, "can't check USE_BLACKLIST variable : " .. err)
function blacklist:is_needed()
-- Loading case
if self.is_loading then
return false
end
if not init_needed or self.is_loading then
-- Request phases (no default)
if self.is_request and (ngx.ctx.bw.server_name ~= "_") then
return self.variables["USE_BLACKLIST"] == "yes"
end
-- Other cases : at least one service uses it
local is_needed, err = utils.has_variable("USE_BLACKLIST", "yes")
if is_needed == nil then
self.logger:log(ngx.ERR, "can't check USE_BLACKLIST variable : " .. err)
end
return is_needed
end
function blacklist:init()
-- Check if init needed
if not self:is_needed() then
return self:ret(true, "init not needed")
end
@ -91,8 +108,8 @@ end
function blacklist:access()
-- Check if access is needed
if self.variables["USE_BLACKLIST"] ~= "yes" then
return self:ret(true, "blacklist not activated")
if not self:is_needed() then
return self:ret(true, "access not needed")
end
-- Check the caches
local checks = {
@ -116,7 +133,7 @@ function blacklist:access()
elseif cached and cached ~= "ok" then
return self:ret(true, k .. " is in cached blacklist (info : " .. cached .. ")", utils.get_deny_status())
end
if cached then
if ok and cached then
already_cached[k] = true
end
end
@ -144,7 +161,6 @@ function blacklist:access()
-- Return
return self:ret(true, "not blacklisted")
end
function blacklist:preread()
@ -165,7 +181,7 @@ function blacklist:is_in_cache(ele)
local ok, data = self.cachestore:get("plugin_blacklist_" .. ngx.ctx.bw.server_name .. ele)
if not ok then
return false, data
end
end
return true, data
end
@ -173,7 +189,7 @@ function blacklist:add_to_cache(ele, value)
local ok, err = self.cachestore:set("plugin_blacklist_" .. ngx.ctx.bw.server_name .. ele, value, 86400)
if not ok then
return false, err
end
end
return true
end
@ -220,24 +236,30 @@ function blacklist:is_blacklisted_ip()
end
if check_rdns then
-- Get rDNS
local rdns, err = utils.get_rdns(ngx.ctx.bw.remote_addr)
if rdns then
local rdns_list, err = utils.get_rdns(ngx.ctx.bw.remote_addr)
if rdns_list then
-- Check if rDNS is in ignore list
local ignore = false
for i, ignore_suffix in ipairs(self.lists["IGNORE_RDNS"]) do
if rdns:sub(-#ignore_suffix) == ignore_suffix then
ignore = true
break
for i, rdns in ipairs(rdns_list) do
for j, suffix in ipairs(self.lists["IGNORE_RDNS"]) do
if rdns:sub(- #suffix) == suffix then
ignore = true
break
end
end
end
-- Check if rDNS is in blacklist
if not ignore then
for i, suffix in ipairs(self.lists["RDNS"]) do
if rdns:sub(-#suffix) == suffix then
return true, "rDNS " .. suffix
for i, rdns in ipairs(rdns_list) do
for j, suffix in ipairs(self.lists["RDNS"]) do
if rdns:sub(- #suffix) == suffix then
return true, "rDNS " .. suffix
end
end
end
end
else
self.logger:log(ngx.ERR, "error while getting rdns : " .. err)
end
end
@ -245,7 +267,7 @@ function blacklist:is_blacklisted_ip()
if ngx.ctx.bw.ip_is_global then
local asn, err = utils.get_asn(ngx.ctx.bw.remote_addr)
if not asn then
return nil, err
return nil, "ASN " .. err
end
local ignore = false
for i, ignore_asn in ipairs(self.lists["IGNORE_ASN"]) do
@ -272,7 +294,7 @@ function blacklist:is_blacklisted_uri()
-- Check if URI is in ignore list
local ignore = false
for i, ignore_uri in ipairs(self.lists["IGNORE_URI"]) do
if ngx.ctx.bw.uri:match(ignore_uri) then
if utils.regex_match(ngx.ctx.bw.uri, ignore_uri) then
ignore = true
break
end
@ -280,7 +302,7 @@ function blacklist:is_blacklisted_uri()
-- Check if URI is in blacklist
if not ignore then
for i, uri in ipairs(self.lists["URI"]) do
if ngx.ctx.bw.uri:match(uri) then
if utils.regex_match(ngx.ctx.bw.uri, uri) then
return true, "URI " .. uri
end
end
@ -293,7 +315,7 @@ function blacklist:is_blacklisted_ua()
-- Check if UA is in ignore list
local ignore = false
for i, ignore_ua in ipairs(self.lists["IGNORE_USER_AGENT"]) do
if ngx.ctx.bw.http_user_agent:match(ignore_ua) then
if utils.regex_match(ngx.ctx.bw.http_user_agent, ignore_ua) then
ignore = true
break
end
@ -301,7 +323,7 @@ function blacklist:is_blacklisted_ua()
-- Check if UA is in blacklist
if not ignore then
for i, ua in ipairs(self.lists["USER_AGENT"]) do
if ngx.ctx.bw.http_user_agent:match(ua) then
if utils.regex_match(ngx.ctx.bw.http_user_agent, ua) then
return true, "UA " .. ua
end
end
@ -310,4 +332,4 @@ function blacklist:is_blacklisted_ua()
return false, "ok"
end
return blacklist
return blacklist

View File

@ -46,9 +46,7 @@ def check_line(kind: str, line: bytes) -> Tuple[bool, bytes]:
if asn_rx.match(real_line):
return True, real_line
elif kind in ("USER_AGENT", "IGNORE_USER_AGENT"):
return True, line.replace(b"\\ ", b" ").replace(b"\\.", b"%.").replace(
b"\\\\", b"\\"
).replace(b"-", b"%-")
return True, b"(?:\\b)" + line + b"(?:\\b)"
elif kind in ("URI", "IGNORE_URI"):
if uri_rx.match(line):
return True, line

View File

@ -1,6 +1,5 @@
{
"id": "blacklist",
"order": 2,
"name": "Blacklist",
"description": "Deny access based on internal and external IP/network/rDNS/ASN blacklists.",
"version": "1.0",
@ -81,7 +80,7 @@
"BLACKLIST_USER_AGENT": {
"context": "multisite",
"default": "",
"help": "List of User-Agent, separated with spaces, to block.",
"help": "List of User-Agent (PCRE regex), separated with spaces, to block.",
"id": "blacklist-user-agent",
"label": "Blacklist User-Agent",
"regex": "^.*$",
@ -99,7 +98,7 @@
"BLACKLIST_URI": {
"context": "multisite",
"default": "",
"help": "List of URI, separated with spaces, to block.",
"help": "List of URI (PCRE regex), separated with spaces, to block.",
"id": "blacklist-uri",
"label": "Blacklist URI",
"regex": "^( *(/[\\w\\].~:/?#[@!$&'()*+,;=-]*)(?!.*\\2(?!.)) *)*$",
@ -171,7 +170,7 @@
"BLACKLIST_IGNORE_USER_AGENT": {
"context": "multisite",
"default": "",
"help": "List of User-Agent, separated with spaces, to ignore in the blacklist.",
"help": "List of User-Agent (PCRE regex), separated with spaces, to ignore in the blacklist.",
"id": "blacklist-ignore-user-agent",
"label": "Blacklist ignore User-Agent",
"regex": "^.*$",
@ -189,7 +188,7 @@
"BLACKLIST_IGNORE_URI": {
"context": "multisite",
"default": "",
"help": "List of URI, separated with spaces, to ignore in the blacklist.",
"help": "List of URI (PCRE regex), separated with spaces, to ignore in the blacklist.",
"id": "blacklist-ignore-uri",
"label": "Blacklist ignore URI",
"regex": "^( *(/[\\w\\].~:/?#[@!$&'()*+,;=-]*)(?!.*\\2(?!.)) *)*$",

View File

@ -1,6 +1,5 @@
{
"id": "brotli",
"order": 999,
"name": "Brotli",
"description": "Compress HTTP requests with the brotli algorithm.",
"version": "1.0",

View File

@ -10,32 +10,62 @@ local bunkernet = class("bunkernet", plugin)
function bunkernet:initialize()
-- Call parent initialize
plugin.initialize(self, "bunkernet")
-- Get BunkerNet ID
if ngx.get_phase() ~= "init" and self.variables["USE_BUNKERNET"] == "yes" and not self.is_loading then
-- Get BunkerNet ID and save info
if ngx.get_phase() ~= "init" and self:is_needed() then
local id, err = self.datastore:get("plugin_bunkernet_id")
if id then
self.bunkernet_id = id
self.version = ngx.ctx.bw.version
self.integration = ngx.ctx.bw.integration
self.version = (ngx.ctx.bw and ngx.ctx.bw.version) or utils.get_version()
self.integration = (ngx.ctx.bw and ngx.ctx.bw.integration) or utils.get_integration()
else
self.logger:log(ngx.ERR, "can't get BunkerNet ID from datastore : " .. err)
end
end
end
function bunkernet:init()
-- Check if init is needed
function bunkernet:is_needed()
-- Loading case
if self.is_loading then
return self:ret(true, "bunkerweb is loading")
return false
end
local init_needed, err = utils.has_variable("USE_BUNKERNET", "yes")
if init_needed == nil then
return self:ret(false, "can't check USE_BUNKERNET variable : " .. err)
-- Request phases (no default)
if self.is_request and (ngx.ctx.bw.server_name ~= "_") then
return self.variables["USE_BUNKERNET"] == "yes"
end
if not init_needed or self.is_loading then
return self:ret(true, "no service uses bunkernet, skipping init")
-- Other cases : at least one service uses it
local is_needed, err = utils.has_variable("USE_BUNKERNET", "yes")
if is_needed == nil then
self.logger:log(ngx.ERR, "can't check USE_BUNKERNET variable : " .. err)
end
return is_needed
end
function bunkernet:init_worker()
-- Check if needed
if not self:is_needed() then
return self:ret(true, "no service uses BunkerNet, skipping init_worker")
end
-- Check id
if not self.bunkernet_id then
return self:ret(false, "missing instance ID")
end
-- Send ping request
local ok, err, status, data = self:ping()
if not ok then
return self:ret(false, "error while sending request to API : " .. err)
end
if status ~= 200 then
return self:ret(false, "received status " .. tostring(status) .. " from API using instance ID " .. self.bunkernet_id)
end
self.logger:log(ngx.NOTICE, "connectivity with API using instance ID " .. self.bunkernet_id .. " is successful")
return self:ret(true, "connectivity with API using instance ID " .. self.bunkernet_id .. " is successful")
end
function bunkernet:init()
-- Check if needed
if not self:is_needed() then
return self:ret(true, "no service uses BunkerNet, skipping init")
end
-- Check if instance ID is present
local f, err = io.open("/var/cache/bunkerweb/bunkernet/instance.id", "r")
if not f then
@ -74,23 +104,17 @@ function bunkernet:init()
if not ok then
return self:ret(false, "can't store bunkernet database into datastore : " .. err)
end
return self:ret(true,
"successfully connected to the bunkernet service " ..
self.variables["BUNKERNET_SERVER"] .. " with machine ID " .. id .. " and " .. tostring(i) .. " bad IPs in database")
return self:ret(true, "successfully loaded " .. tostring(i) .. " bad IPs using instance ID " .. id)
end
function bunkernet:access()
-- Check if not loading
if self.is_loading then
return self:ret(true, "bunkerweb is loading")
-- Check if needed
if not self:is_needed() then
return self:ret(true, "service doesn't use BunkerNet, skipping access")
end
-- Check if enabled
if self.variables["USE_BUNKERNET"] ~= "yes" then
return self:ret(true, "bunkernet not activated")
end
-- Check if BunkerNet ID is generated
-- Check id
if not self.bunkernet_id then
return self:ret(false, "bunkernet ID is not generated")
return self:ret(false, "missing instance ID")
end
-- Check if IP is global
if not ngx.ctx.bw.ip_is_global then
@ -120,20 +144,16 @@ function bunkernet:access()
return self:ret(true, "not in db")
end
function bunkernet:log(bypass_use_bunkernet)
-- Check if not loading
if self.is_loading then
return self:ret(true, "bunkerweb is loading")
end
if not bypass_use_bunkernet then
-- Check if BunkerNet is enabled
if self.variables["USE_BUNKERNET"] ~= "yes" then
return self:ret(true, "bunkernet not activated")
function bunkernet:log(bypass_checks)
if not bypass_checks then
-- Check if needed
if not self:is_needed() then
return self:ret(true, "service doesn't use BunkerNet, skipping log")
end
-- Check id
if not self.bunkernet_id then
return self:ret(false, "missing instance ID")
end
end
-- Check if BunkerNet ID is generated
if not self.bunkernet_id then
return self:ret(false, "bunkernet ID is not generated")
end
-- Check if IP has been blocked
local reason = utils.get_reason()
@ -168,25 +188,21 @@ function bunkernet:log(bypass_use_bunkernet)
end
function bunkernet:log_default()
-- Check if not loading is needed
if self.is_loading then
return self:ret(true, "bunkerweb is loading")
-- Check if needed
if not self:is_needed() then
return self:ret(true, "no service uses BunkerNet, skipping log_default")
end
-- Check if BunkerNet is activated
local check, err = utils.has_variable("USE_BUNKERNET", "yes")
if check == nil then
return false, "error while checking variable USE_BUNKERNET (" .. err .. ")"
end
if not check then
return true, "bunkernet not enabled"
-- Check id
if not self.bunkernet_id then
return self:ret(false, "missing instance ID")
end
-- Check if default server is disabled
local check, err = utils.get_variable("DISABLE_DEFAULT_SERVER", false)
if check == nil then
return false, "error while getting variable DISABLE_DEFAULT_SERVER (" .. err .. ")"
return self:ret(false, "error while getting variable DISABLE_DEFAULT_SERVER : " .. err)
end
if check ~= "yes" then
return true, "default server not disabled"
return self:ret(true, "default server is not disabled")
end
-- Call log method
return self:log(true)
@ -199,15 +215,17 @@ end
function bunkernet:request(method, url, data)
local httpc, err = http.new()
if not httpc then
return false, "can't instantiate http object : " .. err, nil, nil
return false, "can't instantiate http object : " .. err
end
local all_data = {
id = self.bunkernet_id,
version = self.version,
integration = self.integration
}
for k, v in pairs(data) do
all_data[k] = v
if data then
for k, v in pairs(data) do
all_data[k] = v
end
end
local res, err = httpc:request_uri(self.variables["BUNKERNET_SERVER"] .. url, {
method = method,
@ -219,14 +237,14 @@ function bunkernet:request(method, url, data)
})
httpc:close()
if not res then
return false, "error while sending request : " .. err, nil, nil
return false, "error while sending request : " .. err
end
if res.status ~= 200 then
return false, "status code != 200", res.status, nil
end
local ok, ret = pcall(cjson.decode, res.body)
if not ok then
return false, "error while decoding json : " .. ret, nil, nil
return false, "error while decoding json : " .. ret
end
return true, "success", res.status, ret
end

View File

@ -1,6 +1,5 @@
{
"id": "bunkernet",
"order": 7,
"name": "BunkerNet",
"description": "Share threat data with other BunkerWeb instances via BunkerNet.",
"version": "1.0",

View File

@ -1,6 +1,5 @@
{
"id": "clientcache",
"order": 999,
"name": "Client cache",
"description": "Manage caching for clients.",
"version": "1.0",

View File

@ -8,7 +8,6 @@ function cors:initialize()
-- Call parent initialize
plugin.initialize(self, "cors")
self.all_headers = {
["CORS_ALLOW_ORIGIN"] = "Access-Control-Allow-Origin",
["CORS_EXPOSE_HEADERS"] = "Access-Control-Expose-Headers"
}
self.preflight_headers = {
@ -24,13 +23,38 @@ function cors:header()
if self.variables["USE_CORS"] ~= "yes" then
return self:ret(true, "service doesn't use CORS")
end
-- Standard headers
-- Skip if Origin header is not present
if not ngx.ctx.bw.http_origin then
return self:ret(true, "origin header not present")
end
-- Always include Vary header to prevent caching
local vary = ngx.header.Vary
if vary then
if type(vary) == "string" then
ngx.header.Vary = { vary, "Origin" }
else
table.insert(vary, "Origin")
ngx.header.Vary = vary
end
else
ngx.header.Vary = "Origin"
end
-- Check if Origin is allowed
if ngx.ctx.bw.http_origin and self.variables["CORS_DENY_REQUEST"] == "yes" and self.variables["CORS_ALLOW_ORIGIN"] ~= "*" and not utils.regex_match(ngx.ctx.bw.http_origin, self.variables["CORS_ALLOW_ORIGIN"]) then
self.logger:log(ngx.WARN, "origin " .. ngx.ctx.bw.http_origin .. " is not allowed")
return self:ret(true, "origin " .. ngx.ctx.bw.http_origin .. " is not allowed")
end
-- Set headers
if self.variables["CORS_ALLOW_ORIGIN"] == "*" then
ngx.header["Access-Control-Allow-Origin"] = "*"
else
ngx.header["Access-Control-Allow-Origin"] = ngx.ctx.bw.http_origin
end
for variable, header in pairs(self.all_headers) do
if self.variables[variable] ~= "" then
ngx.header[header] = self.variables[variable]
end
end
-- Preflight request
if ngx.ctx.bw.request_method == "OPTIONS" then
for variable, header in pairs(self.preflight_headers) do
if variable == "CORS_ALLOW_CREDENTIALS" then
@ -53,8 +77,12 @@ function cors:access()
if self.variables["USE_CORS"] ~= "yes" then
return self:ret(true, "service doesn't use CORS")
end
-- Deny as soon as possible if needed
if ngx.ctx.bw.http_origin and self.variables["CORS_DENY_REQUEST"] == "yes" and self.variables["CORS_ALLOW_ORIGIN"] ~= "*" and not utils.regex_match(ngx.ctx.bw.http_origin, self.variables["CORS_ALLOW_ORIGIN"]) then
return self:ret(true, "origin " .. ngx.ctx.bw.http_origin .. " is not allowed, denying access", utils.get_deny_status())
end
-- Send CORS policy with a 204 (no content) status
if ngx.ctx.bw.request_method == "OPTIONS" then
if ngx.ctx.bw.request_method == "OPTIONS" and ngx.ctx.bw.http_origin then
return self:ret(true, "preflight request", ngx.HTTP_NO_CONTENT)
end
return self:ret(true, "standard request")

View File

@ -1,6 +1,5 @@
{
"id": "cors",
"order": 999,
"name": "CORS",
"description": "Cross-Origin Resource Sharing.",
"version": "1.0",
@ -18,10 +17,10 @@
"CORS_ALLOW_ORIGIN": {
"context": "multisite",
"default": "*",
"help": "Value of the Access-Control-Allow-Origin header.",
"help": "Allowed origins to make CORS requests : PCRE regex or *.",
"id": "cors-allow-origin",
"label": "Access-Control-Allow-Origin value",
"regex": "^(\\*|https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*|null)$",
"label": "Allowed origins",
"regex": "^.*$",
"type": "text"
},
"CORS_EXPOSE_HEADERS": {
@ -68,6 +67,15 @@
"label": "Access-Control-Allow-Headers value",
"regex": "^(\\*|(?![, ])(,? ?([\\w-]+)(?!.*\\3(?!.)))*)?$",
"type": "text"
},
"CORS_DENY_REQUEST": {
"context": "multisite",
"default": "yes",
"help": "Deny request and don't send it to backend if Origin is not allowed.",
"id": "cors-deny-request",
"label": "Deny request",
"regex": "^(yes|no)$",
"type": "check"
}
}
}

View File

@ -1,10 +1,10 @@
local class = require "middleclass"
local plugin = require "bunkerweb.plugin"
local utils = require "bunkerweb.utils"
local cachestore = require "bunkerweb.cachestore"
local cjson = require "cjson"
local class = require "middleclass"
local plugin = require "bunkerweb.plugin"
local utils = require "bunkerweb.utils"
local cachestore = require "bunkerweb.cachestore"
local cjson = require "cjson"
local country = class("country", plugin)
local country = class("country", plugin)
function country:initialize()
-- Call parent initialize
@ -28,9 +28,13 @@ function country:access()
if data then
data = cjson.decode(data)
if data.result == "ok" then
return self:ret(true, "client IP " .. ngx.ctx.bw.remote_addr .. " is in country cache (not blacklisted, country = " .. data.country .. ")")
return self:ret(true,
"client IP " ..
ngx.ctx.bw.remote_addr .. " is in country cache (not blacklisted, country = " .. data.country .. ")")
end
return self:ret(true, "client IP " .. ngx.ctx.bw.remote_addr .. " is in country cache (blacklisted, country = " .. data.country .. ")", utils.get_deny_status())
return self:ret(true,
"client IP " .. ngx.ctx.bw.remote_addr .. " is in country cache (blacklisted, country = " .. data.country .. ")",
utils.get_deny_status())
end
-- Don't go further if IP is not global
@ -47,7 +51,7 @@ function country:access()
if not country then
return self:ret(false, "can't get country of client IP " .. ngx.ctx.bw.remote_addr .. " : " .. err)
end
-- Process whitelist first
if self.variables["WHITELIST_COUNTRY"] ~= "" then
for wh_country in self.variables["WHITELIST_COUNTRY"]:gmatch("%S+") do
@ -63,9 +67,10 @@ function country:access()
if not ok then
return self:ret(false, "error while adding item to cache : " .. err)
end
return self:ret(true, "client IP " .. ngx.ctx.bw.remote_addr .. " is not whitelisted (country = " .. country .. ")", utils.get_deny_status())
return self:ret(true, "client IP " .. ngx.ctx.bw.remote_addr .. " is not whitelisted (country = " .. country .. ")",
utils.get_deny_status())
end
-- And then blacklist
if self.variables["BLACKLIST_COUNTRY"] ~= "" then
for bl_country in self.variables["BLACKLIST_COUNTRY"]:gmatch("%S+") do
@ -74,7 +79,8 @@ function country:access()
if not ok then
return self:ret(false, "error while adding item to cache : " .. err)
end
return self:ret(true, "client IP " .. ngx.ctx.bw.remote_addr .. " is blacklisted (country = " .. country .. ")", utils.get_deny_status())
return self:ret(true, "client IP " .. ngx.ctx.bw.remote_addr .. " is blacklisted (country = " .. country .. ")",
utils.get_deny_status())
end
end
end
@ -92,19 +98,20 @@ function country:preread()
end
function country:is_in_cache(ip)
local ok, data = self.cachestore:get("plugin_country_cache_" .. ngx.ctx.bw.server_name .. ip)
local ok, data = self.cachestore:get("plugin_country_" .. ngx.ctx.bw.server_name .. ip)
if not ok then
return false, data
end
end
return true, data
end
function country:add_to_cache(ip, country, result)
local ok, err = self.cachestore:set("plugin_country_cache_" .. ngx.ctx.bw.server_name .. ip, cjson.encode({country = country, result = result}), 86400)
local ok, err = self.cachestore:set("plugin_country_" .. ngx.ctx.bw.server_name .. ip,
cjson.encode({ country = country, result = result }), 86400)
if not ok then
return false, err
end
end
return true
end
return country
return country

View File

@ -1,6 +1,5 @@
{
"id": "country",
"order": 4,
"name": "Country",
"description": "Deny access based on the country of the client IP.",
"version": "1.0",

View File

@ -6,6 +6,9 @@
# listen on HTTPS PORT
listen 0.0.0.0:{{ HTTPS_PORT }} ssl {% if HTTP2 == "yes" %}http2{% endif %} {% if USE_PROXY_PROTOCOL == "yes" %}proxy_protocol{% endif %};
{% if USE_IPV6 == "yes" +%}
listen [::]:{{ HTTPS_PORT }} ssl {% if HTTP2 == "yes" %}http2{% endif %} {% if USE_PROXY_PROTOCOL == "yes" %}proxy_protocol{% endif %};
{% endif %}
# TLS config
ssl_certificate {{ cert_file_path }};

View File

@ -6,6 +6,9 @@
# listen
listen 0.0.0.0:{{ LISTEN_STREAM_PORT_SSL }} ssl {% if USE_UDP == "yes" %} udp {% endif %}{% if USE_PROXY_PROTOCOL == "yes" %} proxy_protocol {% endif %};
{% if USE_IPV6 == "yes" +%}
listen [::]:{{ LISTEN_STREAM_PORT_SSL }} ssl {% if USE_UDP == "yes" %} udp {% endif %}{% if USE_PROXY_PROTOCOL == "yes" %} proxy_protocol {% endif %};
{% endif %}
# TLS config
ssl_certificate {{ cert_file_path }};

View File

@ -1,6 +1,5 @@
{
"id": "customcert",
"order": 999,
"name": "Custom HTTPS certificate",
"description": "Choose custom certificate for HTTPS.",
"version": "1.0",

View File

@ -1,6 +1,5 @@
{
"id": "db",
"order": 999,
"name": "DB",
"description": "Integrate easily the Database.",
"version": "1.0",

View File

@ -1,11 +1,11 @@
local class = require "middleclass"
local plugin = require "bunkerweb.plugin"
local utils = require "bunkerweb.utils"
local cachestore = require "bunkerweb.cachestore"
local cjson = require "cjson"
local resolver = require "resty.dns.resolver"
local class = require "middleclass"
local plugin = require "bunkerweb.plugin"
local utils = require "bunkerweb.utils"
local cachestore = require "bunkerweb.cachestore"
local cjson = require "cjson"
local resolver = require "resty.dns.resolver"
local dnsbl = class("dnsbl", plugin)
local dnsbl = class("dnsbl", plugin)
function dnsbl:initialize()
-- Call parent initialize
@ -19,6 +19,32 @@ function dnsbl:initialize()
self.cachestore = cachestore:new(self.use_redis)
end
function dnsbl:init_worker()
-- Check if loading
if self.is_loading then
return self:ret(false, "BW is loading")
end
-- Check if at least one service uses it
local is_needed, err = utils.has_variable("USE_DNSBL", "yes")
if is_needed == nil then
return self:ret(false, "can't check USE_DNSBL variable : " .. err)
elseif not is_needed then
return self:ret(true, "no service uses DNSBL, skipping init_worker")
end
-- Loop on DNSBL list
for server in self.variables["DNSBL_LIST"]:gmatch("%S+") do
local result, err = self:is_in_dnsbl("127.0.0.2", server)
if result == nil then
self.logger:log(ngx.ERR, "error while sending DNS request to " .. server .. " : " .. err)
elseif not result then
self.logger:log(ngx.ERR, "dnsbl check for " .. server .. " failed")
else
self.logger:log(ngx.NOTICE, "dnsbl check for " .. server .. " is successful")
end
end
return self:ret(true, "success")
end
function dnsbl:access()
-- Check if access is needed
if self.variables["USE_DNSBL"] ~= "yes" then
@ -27,6 +53,10 @@ function dnsbl:access()
if self.variables["DNSBL_LIST"] == "" then
return self:ret(true, "dnsbl list is empty")
end
-- Don't go further if IP is not global
if not ngx.ctx.bw.ip_is_global then
return self:ret(true, "client IP is not global, skipping DNSBL check")
end
-- Check if IP is in cache
local ok, cached = self:is_in_cache(ngx.ctx.bw.remote_addr)
if not ok then
@ -35,15 +65,8 @@ function dnsbl:access()
if cached == "ok" then
return self:ret(true, "client IP " .. ngx.ctx.bw.remote_addr .. " is in DNSBL cache (not blacklisted)")
end
return self:ret(true, "client IP " .. ngx.ctx.bw.remote_addr .. " is in DNSBL cache (server = " .. cached .. ")", utils.get_deny_status())
end
-- Don't go further if IP is not global
if not ngx.ctx.bw.ip_is_global then
local ok, err = self:add_to_cache(ngx.ctx.bw.remote_addr, "ok")
if not ok then
return self:ret(false, "error while adding element to cache : " .. err)
end
return self:ret(true, "client IP is not global, skipping DNSBL check")
return self:ret(true, "client IP " .. ngx.ctx.bw.remote_addr .. " is in DNSBL cache (server = " .. cached .. ")",
utils.get_deny_status())
end
-- Loop on DNSBL list
for server in self.variables["DNSBL_LIST"]:gmatch("%S+") do
@ -52,7 +75,7 @@ function dnsbl:access()
self.logger:log(ngx.ERR, "error while sending DNS request to " .. server .. " : " .. err)
end
if result then
local ok, err self:add_to_cache(ngx.ctx.bw.remote_addr, server)
local ok, err = self:add_to_cache(ngx.ctx.bw.remote_addr, server)
if not ok then
return self:ret(false, "error while adding element to cache : " .. err)
end
@ -72,7 +95,7 @@ function dnsbl:preread()
end
function dnsbl:is_in_cache(ip)
local ok, data = self.cachestore:get("plugin_dnsbl_" .. ip)
local ok, data = self.cachestore:get("plugin_dnsbl_" .. ngx.ctx.bw.server_name .. ip)
if not ok then
return false, data
end
@ -80,26 +103,25 @@ function dnsbl:is_in_cache(ip)
end
function dnsbl:add_to_cache(ip, value)
local ok, err = self.cachestore:set("plugin_dnsbl_" .. ip, value, 86400)
local ok, err = self.cachestore:set("plugin_dnsbl_" .. ngx.ctx.bw.server_name .. ip, value, 86400)
if not ok then
return false, err
end
end
return true
end
function dnsbl:is_in_dnsbl(ip, server)
local request = resolver.arpa_str(ip) .. "." .. server
local ips, err = utils.get_ips(request)
local request = resolver.arpa_str(ip):gsub("%.in%-addr%.arpa", ""):gsub("%.ip6%.arpa", "") .. "." .. server
local ips, err = utils.get_ips(request, false)
if not ips then
return nil, err
end
for i, ip in ipairs(ips) do
local a, b, c, d = ip:match("([%d]+).([%d]+).([%d]+).([%d]+)")
if a == "127" then
if ip:find("^127%.0%.0%.") then
return true, "success"
end
end
return false, "success"
end
return dnsbl
return dnsbl

View File

@ -1,6 +1,5 @@
{
"id": "dnsbl",
"order": 5,
"name": "DNSBL",
"description": "Deny access based on external DNSBL servers.",
"version": "1.0",

View File

@ -0,0 +1,22 @@
{% for intercepted_error_code in "400 401 403 404 405 413 429 500 501 502 503 504".split(" ") %}
{% if intercepted_error_code == "400" %}
error_page 400 /bwerror400;
{% else %}
error_page {{ intercepted_error_code }} @bwerror{{ intercepted_error_code }};
{% endif %}
location {% if intercepted_error_code == "400" %}= /{% else %} @{% endif %}bwerror{{ intercepted_error_code }} {
auth_basic off;
internal;
modsecurity off;
default_type 'text/html';
root /usr/share/bunkerweb/core/errors/files;
content_by_lua_block {
local logger = require "bunkerweb.logger"
local cerrors = require "errors.errors"
local errors = cerrors:new()
errors:render_template(tostring(ngx.status))
}
}
{% endfor %}

Some files were not shown because too many files have changed in this diff Show More