Merge branch 'dev' into ui

This commit is contained in:
BlasenhauerJ 2023-06-22 12:50:11 +02:00
commit 26f831cb4d
31 changed files with 521 additions and 342 deletions

View File

@ -13,12 +13,15 @@
- [BUGFIX] Fix wrong variables in header phase (fix CORS feature too)
- [BUGFIX] Fix UI not working in Ubuntu (python zope module)
- [BUGFIX] Patch ModSecurity to run it after LUA code (should fix whitelist problems)
- [PERFORMANCE] Reduce CPU usage of scheduler
- [BUGFIX] Custom configurations from env were not being deleted properly
- [PERFORMANCE] Reduce CPU and RAM usage of scheduler
- [PERFORMANCE] Cache ngx.ctx instead of loading it each time
- [PERFORMANCE] Use per-worker LRU cache for common RO LUA values
- [FEATURE] Add Turnstile antibot mode
- [FEATURE] Add more CORS headers
- [FEATURE] Add KEEP_UPSTREAM_HEADERS to preserve headers when using reverse proxy
- [FEATURE] Add the possibility to download the different lists and plugins from a local file (like the blacklist)
- [FEATURE] External plugins can now be downloaded from a tar.gz and tar.xz file as well as zip
- [MISC] Add LOG_LEVEL=warning for docker socket proxy in docs, examples and boilerplates
- [MISC] Temp remove VMWare provider for Vagrant integration
- [MISC] Remove X-Script-Name header and ABSOLUTE_URI variable when using UI

View File

@ -1,7 +1,7 @@
{
"name": "authelia",
"kinds": ["docker", "autoconf", "swarm", "linux"],
"timeout": 60,
"timeout": 120,
"delay": 60,
"tests": [
{

View File

@ -3,7 +3,7 @@
from contextlib import suppress
from ipaddress import ip_address, ip_network
from os import _exit, getenv, sep
from os.path import join
from os.path import join, normpath
from pathlib import Path
from re import IGNORECASE, compile as re_compile
from sys import exit as sys_exit, path as sys_path
@ -145,14 +145,23 @@ try:
for url in urls_list:
try:
logger.info(f"Downloading blacklist data from {url} ...")
resp = get(url, stream=True, timeout=10)
if url.startswith("file://"):
with open(normpath(url[7:]), "rb") as f:
iterable = f.readlines()
else:
resp = get(url, stream=True, timeout=10)
if resp.status_code != 200:
continue
if resp.status_code != 200:
logger.warning(
f"Got status code {resp.status_code}, skipping..."
)
continue
iterable = resp.iter_lines()
i = 0
content = b""
for line in resp.iter_lines():
for line in iterable:
line = line.strip()
if not line or line.startswith(b"#") or line.startswith(b";"):

View File

@ -29,7 +29,7 @@
"help": "List of URLs, separated with spaces, containing bad IP/network to block.",
"id": "blacklist-ip-urls",
"label": "Blacklist IP/network URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"BLACKLIST_RDNS_GLOBAL": {
@ -56,7 +56,7 @@
"help": "List of URLs, separated with spaces, containing reverse DNS suffixes to block.",
"id": "blacklist-rdns-urls",
"label": "Blacklist reverse DNS URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"BLACKLIST_ASN": {
@ -74,7 +74,7 @@
"help": "List of URLs, separated with spaces, containing ASN to block.",
"id": "blacklist-asn-urls",
"label": "Blacklist ASN URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"BLACKLIST_USER_AGENT": {
@ -92,7 +92,7 @@
"help": "List of URLs, separated with spaces, containing bad User-Agent to block.",
"id": "blacklist-user-agent-urls",
"label": "Blacklist User-Agent URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"BLACKLIST_URI": {
@ -110,7 +110,7 @@
"help": "List of URLs, separated with spaces, containing bad URI to block.",
"id": "blacklist-uri-urls",
"label": "Blacklist URI URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"BLACKLIST_IGNORE_IP": {
@ -128,7 +128,7 @@
"help": "List of URLs, separated with spaces, containing IP/network to ignore in the blacklist.",
"id": "blacklist-ignore-ip-urls",
"label": "Blacklist ignore IP/network URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"BLACKLIST_IGNORE_RDNS": {
@ -146,7 +146,7 @@
"help": "List of URLs, separated with spaces, containing reverse DNS suffixes to ignore in the blacklist.",
"id": "blacklist-ignore-rdns-urls",
"label": "Blacklist ignore reverse DNS URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"BLACKLIST_IGNORE_ASN": {
@ -164,7 +164,7 @@
"help": "List of URLs, separated with spaces, containing ASN to ignore in the blacklist.",
"id": "blacklist-ignore-asn-urls",
"label": "Blacklist ignore ASN URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"BLACKLIST_IGNORE_USER_AGENT": {
@ -182,7 +182,7 @@
"help": "List of URLs, separated with spaces, containing User-Agent to ignore in the blacklist.",
"id": "blacklist-ignore-user-agent-urls",
"label": "Blacklist ignore User-Agent URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"BLACKLIST_IGNORE_URI": {
@ -200,7 +200,7 @@
"help": "List of URLs, separated with spaces, containing URI to ignore in the blacklist.",
"id": "blacklist-ignore-uri-urls",
"label": "Blacklist ignore URI URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
}
},

View File

@ -3,7 +3,7 @@
from contextlib import suppress
from ipaddress import ip_address, ip_network
from os import _exit, getenv, sep
from os.path import join
from os.path import join, normpath
from pathlib import Path
from re import IGNORECASE, compile as re_compile
from sys import exit as sys_exit, path as sys_path
@ -129,14 +129,23 @@ try:
for url in urls_list:
try:
logger.info(f"Downloading greylist data from {url} ...")
resp = get(url, stream=True, timeout=10)
if url.startswith("file://"):
with open(normpath(url[7:]), "rb") as f:
iterable = f.readlines()
else:
resp = get(url, stream=True, timeout=10)
if resp.status_code != 200:
continue
if resp.status_code != 200:
logger.warning(
f"Got status code {resp.status_code}, skipping..."
)
continue
iterable = resp.iter_lines()
i = 0
content = b""
for line in resp.iter_lines():
for line in iterable:
line = line.strip()
if not line or line.startswith(b"#") or line.startswith(b";"):

View File

@ -29,7 +29,7 @@
"help": "List of URLs, separated with spaces, containing good IP/network to put into the greylist.",
"id": "greylist-ip-urls",
"label": "Greylist IP/network URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"GREYLIST_RDNS_GLOBAL": {
@ -56,7 +56,7 @@
"help": "List of URLs, separated with spaces, containing reverse DNS suffixes to put into the greylist.",
"id": "greylist-rdns-urls",
"label": "Greylist reverse DNS URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"GREYLIST_ASN": {
@ -74,7 +74,7 @@
"help": "List of URLs, separated with spaces, containing ASN to put into the greylist.",
"id": "greylist-asn-urls",
"label": "Greylist ASN URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"GREYLIST_USER_AGENT": {
@ -92,7 +92,7 @@
"help": "List of URLs, separated with spaces, containing good User-Agent to put into the greylist.",
"id": "greylist-user-agent-urls",
"label": "Greylist User-Agent URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"GREYLIST_URI": {
@ -110,7 +110,7 @@
"help": "List of URLs, separated with spaces, containing bad URI to put into the greylist.",
"id": "greylist-uri-urls",
"label": "Greylist URI URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
}
},

View File

@ -3,7 +3,7 @@
from hashlib import sha256
from io import BytesIO
from os import getenv, listdir, chmod, _exit, sep
from os.path import basename, dirname, join
from os.path import basename, dirname, join, normpath
from pathlib import Path
from stat import S_IEXEC
from sys import exit as sys_exit, path as sys_path
@ -28,6 +28,7 @@ for deps_path in [
if deps_path not in sys_path:
sys_path.append(deps_path)
from magic import Magic
from requests import get
from Database import Database # type: ignore
@ -69,9 +70,22 @@ try:
# Loop on URLs
logger.info(f"Downloading external plugins from {plugin_urls}...")
for plugin_url in plugin_urls.split(" "):
# Download ZIP file
# Download Plugin file
try:
req = get(plugin_url, timeout=10)
if plugin_urls.startswith("file://"):
content = Path(normpath(plugin_urls[7:])).read_bytes()
else:
content = b""
resp = get(plugin_url, stream=True, timeout=10)
if resp.status_code != 200:
logger.warning(f"Got status code {resp.status_code}, skipping...")
continue
# Iterate over the response content in chunks
for chunk in resp.iter_content(chunk_size=8192):
if chunk:
content += chunk
except:
logger.error(
f"Exception while downloading plugin(s) from {plugin_url} :\n{format_exc()}",
@ -83,8 +97,22 @@ try:
temp_dir = join(sep, "var", "tmp", "bunkerweb", "plugins", str(uuid4()))
try:
Path(temp_dir).mkdir(parents=True, exist_ok=True)
with ZipFile(BytesIO(req.content)) as zf:
zf.extractall(path=temp_dir)
file_type = Magic(mime=True).from_buffer(content)
if file_type == "application/zip":
with ZipFile(BytesIO(content)) as zf:
zf.extractall(path=temp_dir)
elif file_type == "application/gzip":
with tar_open(fileobj=BytesIO(content), mode="r:gz") as tar:
tar.extractall(path=temp_dir)
elif file_type == "application/x-tar":
with tar_open(fileobj=BytesIO(content), mode="r") as tar:
tar.extractall(path=temp_dir)
else:
logger.error(
f"Unknown file type for {plugin_url}, either zip or tar are supported, skipping..."
)
continue
except:
logger.error(
f"Exception while decompressing plugin(s) from {plugin_url} :\n{format_exc()}",

View File

@ -143,10 +143,10 @@
"EXTERNAL_PLUGIN_URLS": {
"context": "global",
"default": "",
"help": "List of external plugins URLs (direct download to .zip file) to download and install (URLs are separated with space).",
"help": "List of external plugins URLs (direct download to .zip or .tar file) to download and install (URLs are separated with space).",
"id": "external-plugin-urls",
"label": "External plugin URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"DENY_HTTP_STATUS": {

View File

@ -3,7 +3,7 @@
from contextlib import suppress
from ipaddress import ip_address, ip_network
from os import _exit, getenv, sep
from os.path import join
from os.path import join, normpath
from pathlib import Path
from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
@ -92,12 +92,19 @@ try:
for url in urls:
try:
logger.info(f"Downloading RealIP list from {url} ...")
resp = get(url, stream=True, timeout=10)
if url.startswith("file://"):
with open(normpath(url[7:]), "rb") as f:
iterable = f.readlines()
else:
resp = get(url, stream=True, timeout=10)
if resp.status_code != 200:
continue
if resp.status_code != 200:
logger.warning(f"Got status code {resp.status_code}, skipping...")
continue
for line in resp.iter_lines():
iterable = resp.iter_lines()
for line in iterable:
line = line.strip().split(b" ")[0]
if not line or line.startswith(b"#") or line.startswith(b";"):

View File

@ -38,7 +38,7 @@
"help": "List of URLs containing trusted IPs / networks, separated with spaces, where proxied requests come from.",
"id": "real-ip-from-urls",
"label": "Real IP from URLs",
"regex": "^(?! )( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"REAL_IP_HEADER": {

View File

@ -3,7 +3,7 @@
from contextlib import suppress
from ipaddress import ip_address, ip_network
from os import _exit, getenv, sep
from os.path import join
from os.path import join, normpath
from pathlib import Path
from re import IGNORECASE, compile as re_compile
from sys import exit as sys_exit, path as sys_path
@ -129,14 +129,23 @@ try:
for url in urls_list:
try:
logger.info(f"Downloading whitelist data from {url} ...")
resp = get(url, stream=True, timeout=10)
if url.startswith("file://"):
with open(normpath(url[7:]), "rb") as f:
iterable = f.readlines()
else:
resp = get(url, stream=True, timeout=10)
if resp.status_code != 200:
continue
if resp.status_code != 200:
logger.warning(
f"Got status code {resp.status_code}, skipping..."
)
continue
iterable = resp.iter_lines()
i = 0
content = b""
for line in resp.iter_lines():
for line in iterable:
line = line.strip()
if not line or line.startswith(b"#") or line.startswith(b";"):

View File

@ -29,7 +29,7 @@
"help": "List of URLs, separated with spaces, containing good IP/network to whitelist.",
"id": "whitelist-ip-urls",
"label": "Whitelist IP/network URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"WHITELIST_RDNS_GLOBAL": {
@ -56,7 +56,7 @@
"help": "List of URLs, separated with spaces, containing reverse DNS suffixes to whitelist.",
"id": "whitelist-rdns-urls",
"label": "Whitelist reverse DNS URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"WHITELIST_ASN": {
@ -74,7 +74,7 @@
"help": "List of URLs, separated with spaces, containing ASN to whitelist.",
"id": "whitelist-asn-urls",
"label": "Whitelist ASN URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"WHITELIST_USER_AGENT": {
@ -92,7 +92,7 @@
"help": "List of URLs, separated with spaces, containing good User-Agent to whitelist.",
"id": "whitelist-user-agent-urls",
"label": "Whitelist User-Agent URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
},
"WHITELIST_URI": {
@ -110,7 +110,7 @@
"help": "List of URLs, separated with spaces, containing bad URI to whitelist.",
"id": "whitelist-uri-urls",
"label": "Whitelist URI URLs",
"regex": "^( *(https?:\\/\\/[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"regex": "^( *((https?:\\/\\/|file:\\/\\/\\/)[-\\w@:%.+~#=]+[-\\w()!@:%+.~#?&\\/=$]*)(?!.*\\2(?!.)) *)*$",
"type": "text"
}
},

View File

@ -11,7 +11,7 @@ from os.path import basename, dirname, join
from pathlib import Path
from re import compile as re_compile
from sys import _getframe, path as sys_path
from typing import Any, Dict, List, Literal, Optional, Tuple, Union
from typing import Any, Dict, List, Optional, Tuple, Union
from time import sleep
from traceback import format_exc
@ -218,6 +218,36 @@ class Database:
except (ProgrammingError, OperationalError):
return False
def set_scheduler_first_start(self, value: bool = False) -> str:
"""Set the scheduler_first_start value"""
with self.__db_session() as session:
try:
metadata = session.query(Metadata).get(1)
if not metadata:
return "The metadata are not set yet, try again"
metadata.scheduler_first_start = value
session.commit()
except BaseException:
return format_exc()
return ""
def is_scheduler_first_start(self) -> bool:
"""Check if it's the scheduler's first start"""
with self.__db_session() as session:
try:
metadata = (
session.query(Metadata)
.with_entities(Metadata.scheduler_first_start)
.filter_by(id=1)
.first()
)
return metadata is not None and metadata.scheduler_first_start
except (ProgrammingError, OperationalError):
return True
def is_first_config_saved(self) -> bool:
"""Check if the first configuration has been saved"""
with self.__db_session() as session:
@ -254,6 +284,7 @@ class Database:
Metadata(
is_initialized=True,
first_config_saved=False,
scheduler_first_start=True,
version=version,
integration=integration,
)
@ -361,6 +392,8 @@ class Database:
stream=plugin["stream"],
external=plugin.get("external", False),
method=plugin.get("method"),
data=plugin.get("data"),
checksum=plugin.get("checksum"),
)
)
@ -614,7 +647,7 @@ class Database:
)
)
config.pop("SERVER_NAME")
config.pop("SERVER_NAME", None)
for key, value in config.items():
suffix = 0
@ -672,7 +705,7 @@ class Database:
if metadata is not None:
if not metadata.first_config_saved:
metadata.first_config_saved = True
metadata.config_changed = bool(to_put)
metadata.config_changed = True
try:
session.add_all(to_put)

View File

@ -279,6 +279,7 @@ class Metadata(Base):
is_initialized = Column(Boolean, nullable=False)
first_config_saved = Column(Boolean, nullable=False)
autoconf_loaded = Column(Boolean, default=False, nullable=True)
scheduler_first_start = Column(Boolean, nullable=True)
custom_configs_changed = Column(Boolean, default=False, nullable=True)
external_plugins_changed = Column(Boolean, default=False, nullable=True)
config_changed = Column(Boolean, default=False, nullable=True)

View File

@ -1,4 +1,4 @@
sqlalchemy==2.0.16
cryptography==41.0.1
psycopg2-binary==2.9.6
PyMySQL==1.0.3
cryptography==41.0.1
sqlalchemy==2.0.16

View File

@ -5,12 +5,13 @@ from hashlib import sha256
from io import BytesIO
from json import loads
from logging import Logger
from os import listdir, sep
from os import cpu_count, listdir, sep
from os.path import basename, dirname, join
from pathlib import Path
from re import compile as re_compile, search as re_search
from sys import path as sys_path
from tarfile import open as tar_open
from threading import Lock, Semaphore, Thread
from traceback import format_exc
from typing import Any, Dict, List, Literal, Optional, Tuple, Union
@ -28,16 +29,20 @@ class Configurator:
logger: Logger,
):
self.__logger = logger
self.__thread_lock = Lock()
self.__semaphore = Semaphore(cpu_count() or 1)
self.__plugin_id_rx = re_compile(r"^[\w.-]{1,64}$")
self.__plugin_version_rx = re_compile(r"^\d+\.\d+(\.\d+)?$")
self.__setting_id_rx = re_compile(r"^[A-Z0-9_]{1,256}$")
self.__name_rx = re_compile(r"^[\w.-]{1,128}$")
self.__job_file_rx = re_compile(r"^[\w./-]{1,256}$")
self.__settings = self.__load_settings(settings)
self.__core_plugins = self.__load_plugins(core)
self.__core_plugins = []
self.__load_plugins(core)
if isinstance(external_plugins, str):
self.__external_plugins = self.__load_plugins(external_plugins, "external")
self.__external_plugins = []
self.__load_plugins(external_plugins, "external")
else:
self.__external_plugins = external_plugins
@ -103,50 +108,61 @@ class Configurator:
def __load_settings(self, path: str) -> Dict[str, Any]:
return loads(Path(path).read_text())
def __load_plugins(self, path: str, _type: str = "core") -> List[Dict[str, Any]]:
plugins = []
files = glob(join(path, "*", "plugin.json"))
for file in files:
try:
data = self.__load_settings(file)
def __load_plugins(self, path: str, _type: str = "core"):
threads = []
for file in glob(join(path, "*", "plugin.json")):
thread = Thread(target=self.__load_plugin, args=(file, _type))
thread.start()
threads.append(thread)
resp, msg = self.__validate_plugin(data)
if not resp:
self.__logger.warning(
f"Ignoring plugin {file} : {msg}",
for thread in threads:
thread.join()
def __load_plugin(self, file: str, _type: str = "core"):
self.__semaphore.acquire(timeout=60)
try:
data = self.__load_settings(file)
resp, msg = self.__validate_plugin(data)
if not resp:
self.__logger.warning(
f"Ignoring plugin {file} : {msg}",
)
return
if _type == "external":
plugin_content = BytesIO()
with tar_open(
fileobj=plugin_content, mode="w:gz", compresslevel=9
) as tar:
tar.add(
dirname(file),
arcname=basename(dirname(file)),
recursive=True,
)
continue
plugin_content.seek(0, 0)
value = plugin_content.getvalue()
if _type == "external":
plugin_content = BytesIO()
with tar_open(fileobj=plugin_content, mode="w:gz") as tar:
tar.add(
dirname(file),
arcname=basename(dirname(file)),
recursive=True,
)
plugin_content.seek(0)
value = plugin_content.getvalue()
data.update(
{
"external": path.startswith(
join(sep, "etc", "bunkerweb", "plugins")
),
"page": "ui" in listdir(dirname(file)),
"method": "manual",
"data": value,
"checksum": sha256(value).hexdigest(),
}
)
plugins.append(data)
except:
self.__logger.error(
f"Exception while loading JSON from {file} : {format_exc()}",
data.update(
{
"external": True,
"page": "ui" in listdir(dirname(file)),
"method": "manual",
"data": value,
"checksum": sha256(value).hexdigest(),
}
)
return plugins
with self.__thread_lock:
self.__external_plugins.append(data)
else:
with self.__thread_lock:
self.__core_plugins.append(data)
except:
self.__logger.error(
f"Exception while loading JSON from {file} : {format_exc()}",
)
self.__semaphore.release()
def __load_variables(self, path: str) -> Dict[str, Any]:
variables = {}

View File

@ -1,7 +1,7 @@
docker==6.1.3
kubernetes==26.1.0
jinja2==3.1.2
kubernetes==26.1.0
python-dotenv==1.0.0
requests==2.31.0
redis==4.5.5
requests==2.31.0
urllib3==2.0.3

View File

@ -278,7 +278,7 @@ websocket-client==1.6.0 \
# kubernetes
# The following packages are considered to be unsafe in a requirements file:
setuptools==67.8.0 \
--hash=sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f \
--hash=sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102
setuptools==68.0.0 \
--hash=sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f \
--hash=sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235
# via kubernetes

View File

@ -46,7 +46,7 @@ def get_instance_configs_and_apis(instance: Any, db, _type="Docker"):
custom_conf = custom_confs_rx.search(splitted[0]).groups()
custom_confs.append(
{
"value": splitted[1],
"value": f"# CREATED BY ENV\n{splitted[1]}",
"exploded": (
custom_conf[0],
custom_conf[1],
@ -212,7 +212,7 @@ if __name__ == "__main__":
custom_conf = custom_confs_rx.search(k).groups()
custom_confs.append(
{
"value": v,
"value": f"# CREATED BY ENV\n{v}",
"exploded": (
custom_conf[0],
custom_conf[1],
@ -223,25 +223,6 @@ if __name__ == "__main__":
logger.info(
f"Found custom conf env var {'for service ' + custom_conf[0] if custom_conf[0] else 'without service'} with type {custom_conf[1]} and name {custom_conf[2]}"
)
configs_path = join(sep, "etc", "bunkerweb", "configs")
root_dirs = listdir(configs_path)
for root, dirs, files in walk(configs_path):
if files or (dirs and basename(root) not in root_dirs):
path_exploded = root.split("/")
for file in files:
with open(join(root, file), "r") as f:
custom_confs.append(
{
"value": f.read(),
"exploded": (
f"{path_exploded.pop()}"
if path_exploded[-1] not in root_dirs
else None,
path_exploded[-1],
file.replace(".conf", ""),
),
}
)
else:
docker_client = DockerClient(
base_url=getenv("DOCKER_HOST", "unix:///var/run/docker.sock")
@ -268,7 +249,7 @@ if __name__ == "__main__":
custom_conf = custom_confs_rx.search(splitted[0]).groups()
custom_confs.append(
{
"value": splitted[1],
"value": f"# CREATED BY ENV\n{splitted[1]}",
"exploded": (
custom_conf[0],
custom_conf[1],
@ -381,7 +362,9 @@ if __name__ == "__main__":
if apis:
for api in apis:
endpoint_data = api.endpoint.replace("http://", "").split(":")
err = db.add_instance(endpoint_data[0], endpoint_data[1].replace("/", ""), api.host)
err = db.add_instance(
endpoint_data[0], endpoint_data[1].replace("/", ""), api.host
)
if err:
logger.warning(err)

View File

@ -70,9 +70,12 @@ def is_cached_file(
return is_cached and cached_file
def get_file_in_db(file: Union[str, Path], db, *, job_name: Optional[str] = None) -> Optional[bytes]:
def get_file_in_db(
file: Union[str, Path], db, *, job_name: Optional[str] = None
) -> Optional[bytes]:
cached_file = db.get_job_cache_file(
job_name or basename(getsourcefile(_getframe(1))).replace(".py", ""), normpath(file)
job_name or basename(getsourcefile(_getframe(1))).replace(".py", ""),
normpath(file),
)
if not cached_file:
return None

View File

@ -45,7 +45,7 @@ RUN umask 027
COPY --from=builder --chown=0:101 /usr/share/bunkerweb /usr/share/bunkerweb
# Add scheduler user, drop bwcli, install runtime dependencies, create data folders and set permissions
RUN apk add --no-cache bash libgcc libstdc++ openssl && \
RUN apk add --no-cache bash libgcc libstdc++ openssl libmagic && \
ln -s /usr/local/bin/python3 /usr/bin/python3 && \
addgroup -g 101 scheduler && \
adduser -h /var/cache/nginx -g scheduler -s /bin/sh -G scheduler -D -H -u 101 scheduler && \

View File

@ -68,6 +68,9 @@ class JobScheduler(ApiCaller):
def auto_setup(self):
super().auto_setup(bw_integration=self.__integration)
def update_jobs(self):
self.__jobs = self.__get_jobs()
def __get_jobs(self):
jobs = {}
for plugin_file in glob(
@ -87,12 +90,12 @@ class JobScheduler(ApiCaller):
for x, job in enumerate(deepcopy(plugin_jobs)):
if not all(
key in job.keys()
for key in [
for key in (
"name",
"file",
"every",
"reload",
]
)
):
self.__logger.warning(
f"missing keys for job {job['name']} in plugin {plugin_name}, must have name, file, every and reload, ignoring job"
@ -112,7 +115,7 @@ class JobScheduler(ApiCaller):
)
plugin_jobs.pop(x)
continue
elif job["every"] not in ["once", "minute", "hour", "day", "week"]:
elif job["every"] not in ("once", "minute", "hour", "day", "week"):
self.__logger.warning(
f"Invalid every for job {job['name']} in plugin {plugin_name} (Must be once, minute, hour, day or week), ignoring job"
)
@ -205,6 +208,11 @@ class JobScheduler(ApiCaller):
with self.__thread_lock:
self.__job_success = False
Thread(target=self.__update_job, args=(plugin, name, success)).start()
return ret
def __update_job(self, plugin: str, name: str, success: bool):
with self.__thread_lock:
err = self.__db.update_job(plugin, name, success)
@ -216,7 +224,6 @@ class JobScheduler(ApiCaller):
self.__logger.warning(
f"Failed to update database for the job {name} from plugin {plugin}: {err}",
)
return ret
def setup(self):
for plugin, jobs in self.__jobs.items():

View File

@ -4,7 +4,7 @@ from argparse import ArgumentParser
from glob import glob
from hashlib import sha256
from io import BytesIO
from json import load as json_load
from json import dumps, load as json_load
from os import (
_exit,
chmod,
@ -23,6 +23,7 @@ from stat import S_IEXEC
from subprocess import run as subprocess_run, DEVNULL, STDOUT
from sys import path as sys_path
from tarfile import open as tar_open
from threading import Thread
from time import sleep
from traceback import format_exc
from typing import Any, Dict, List, Optional, Union
@ -169,6 +170,12 @@ def generate_external_plugins(
)
def dict_to_frozenset(d):
if isinstance(d, dict):
return frozenset((k, dict_to_frozenset(v)) for k, v in d.items())
return d
if __name__ == "__main__":
try:
# Don't execute if pid file exists
@ -287,6 +294,8 @@ if __name__ == "__main__":
# Automatically setup the scheduler apis
SCHEDULER.auto_setup()
scheduler_first_start = db.is_scheduler_first_start()
logger.info("Scheduler started ...")
# Checking if any custom config has been created by the user
@ -294,45 +303,65 @@ if __name__ == "__main__":
db_configs = db.get_custom_configs()
configs_path = Path(sep, "etc", "bunkerweb", "configs")
root_dirs = listdir(str(configs_path))
changes = False
for root, dirs, files in walk(str(configs_path)):
if files or (dirs and basename(root) not in root_dirs):
path_exploded = root.split("/")
for file in files:
with open(join(root, file), "r", encoding="utf-8") as f:
custom_conf = {
"value": f.read(),
"exploded": (
f"{path_exploded.pop()}"
if path_exploded[-1] not in root_dirs
else None,
path_exploded[-1],
file.replace(".conf", ""),
),
}
content = Path(join(root, file)).read_text(encoding="utf-8")
custom_conf = {
"value": content,
"exploded": (
f"{path_exploded.pop()}"
if path_exploded[-1] not in root_dirs
else None,
path_exploded[-1],
file.replace(".conf", ""),
),
}
saving = True
in_db = False
for db_conf in db_configs:
if (
db_conf["method"] != "manual"
and db_conf["service_id"] == custom_conf["exploded"][0]
db_conf["service_id"] == custom_conf["exploded"][0]
and db_conf["name"] == custom_conf["exploded"][2]
):
saving = False
break
in_db = True
if db_conf["method"] != "manual":
saving = False
break
if not in_db and content.startswith("# CREATED BY ENV"):
saving = False
changes = True
if saving:
custom_configs.append(custom_conf)
err = db.save_custom_configs(custom_configs, "manual")
if err:
logger.error(
f"Couldn't save some manually created custom configs to database: {err}",
)
changes = changes or {hash(dict_to_frozenset(d)) for d in custom_configs} != {
hash(dict_to_frozenset(d)) for d in db_configs
}
generate_custom_configs(db.get_custom_configs(), original_path=configs_path)
if changes:
err = db.save_custom_configs(custom_configs, "manual")
if err:
logger.error(
f"Couldn't save some manually created custom configs to database: {err}",
)
if (scheduler_first_start and db_configs) or changes:
Thread(
target=generate_custom_configs,
args=(db.get_custom_configs(),),
kwargs={"original_path": configs_path},
).start()
del custom_configs, db_configs
# Check if any external plugin has been added by the user
external_plugins = []
db_plugins = db.get_plugins(external=True)
plugins_dir = Path(sep, "etc", "bunkerweb", "plugins")
for filename in glob(str(plugins_dir.joinpath("*", "plugin.json"))):
with open(filename, "r", encoding="utf-8") as f:
@ -342,7 +371,7 @@ if __name__ == "__main__":
fileobj=plugin_content, mode="w:gz", compresslevel=9
) as tar:
tar.add(_dir, arcname=basename(_dir), recursive=True)
plugin_content.seek(0)
plugin_content.seek(0, 0)
value = plugin_content.getvalue()
external_plugins.append(
@ -356,17 +385,32 @@ if __name__ == "__main__":
}
)
if external_plugins:
err = db.update_external_plugins(external_plugins, delete_missing=False)
tmp_external_plugins = []
for external_plugin in external_plugins.copy():
external_plugin.pop("data", None)
external_plugin.pop("checksum", None)
external_plugin.pop("jobs", None)
tmp_external_plugins.append(external_plugin)
changes = {hash(dict_to_frozenset(d)) for d in tmp_external_plugins} != {
hash(dict_to_frozenset(d)) for d in db_plugins
}
if changes:
err = db.update_external_plugins(external_plugins, delete_missing=True)
if err:
logger.error(
f"Couldn't save some manually added plugins to database: {err}",
)
generate_external_plugins(
db.get_plugins(external=True, with_data=True),
original_path=plugins_dir,
)
if (scheduler_first_start and db_plugins) or changes:
generate_external_plugins(
db.get_plugins(external=True, with_data=True),
original_path=plugins_dir,
)
SCHEDULER.update_jobs()
del tmp_external_plugins, external_plugins, db_plugins
logger.info("Executing scheduler ...")
@ -387,9 +431,36 @@ if __name__ == "__main__":
"Looks like BunkerWeb configuration is already generated, will not generate it again ..."
)
if scheduler_first_start:
ret = db.set_scheduler_first_start()
if ret:
logger.error(
f"An error occurred when setting the scheduler first start : {ret}"
)
stop(1)
FIRST_RUN = True
CHANGES = []
threads = []
def send_nginx_configs():
logger.info(f"Sending {join(sep, 'etc', 'nginx')} folder ...")
ret = SCHEDULER.send_files(join(sep, "etc", "nginx"), "/confs")
if not ret:
logger.error(
"Sending nginx configs failed, configuration will not work as expected...",
)
def send_nginx_cache():
logger.info(f"Sending {CACHE_PATH} folder ...")
if not SCHEDULER.send_files(CACHE_PATH, "/cache"):
logger.error(f"Error while sending {CACHE_PATH} folder")
else:
logger.info(f"Successfully sent {CACHE_PATH} folder")
while True:
threads.clear()
ret = db.checked_changes(CHANGES)
if ret:
@ -400,6 +471,7 @@ if __name__ == "__main__":
# Update the environment variables of the scheduler
SCHEDULER.env = env.copy() | environ.copy()
SCHEDULER.setup()
# Only run jobs once
if not SCHEDULER.run_once():
@ -407,47 +479,6 @@ if __name__ == "__main__":
else:
logger.info("All jobs in run_once() were successful")
changes = db.check_changes()
if isinstance(changes, str):
logger.error(
f"An error occurred when checking for changes in the database : {changes}"
)
stop(1)
# check if the plugins have changed since last time
if changes["external_plugins_changed"]:
logger.info("External plugins changed, generating ...")
generate_external_plugins(
db.get_plugins(external=True, with_data=True),
original_path=plugins_dir,
)
# run the config saver to save potential plugins settings
proc = subprocess_run(
[
"python",
join(sep, "usr", "share", "bunkerweb", "gen", "save_config.py"),
"--settings",
join(sep, "usr", "share", "bunkerweb", "settings.json"),
],
stdin=DEVNULL,
stderr=STDOUT,
check=False,
)
if proc.returncode != 0:
logger.error(
"Config saver failed, configuration will not work as expected...",
)
ret = db.checked_changes(["external_plugins"])
if ret:
logger.error(
f"An error occurred when setting the changes to checked in the database : {ret}"
)
stop(1)
if GENERATE:
# run the generator
proc = subprocess_run(
@ -480,21 +511,19 @@ if __name__ == "__main__":
if SCHEDULER.apis:
# send nginx configs
logger.info(f"Sending {join(sep, 'etc', 'nginx')} folder ...")
ret = SCHEDULER.send_files(join(sep, "etc", "nginx"), "/confs")
if not ret:
logger.error(
"Sending nginx configs failed, configuration will not work as expected...",
)
thread = Thread(target=send_nginx_configs)
thread.start()
threads.append(thread)
try:
if SCHEDULER.apis:
# send cache
logger.info(f"Sending {CACHE_PATH} folder ...")
if not SCHEDULER.send_files(CACHE_PATH, "/cache"):
logger.error(f"Error while sending {CACHE_PATH} folder")
else:
logger.info(f"Successfully sent {CACHE_PATH} folder")
thread = Thread(target=send_nginx_cache)
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
if SCHEDULER.send_to_apis("POST", "/reload"):
logger.info("Successfully reloaded nginx")
@ -551,12 +580,10 @@ if __name__ == "__main__":
)
GENERATE = True
SCHEDULER.setup()
NEED_RELOAD = False
CONFIG_NEED_GENERATION = False
CONFIGS_NEED_GENERATION = False
PLUGINS_NEED_GENERATION = False
FIRST_RUN = False
# infinite schedule for the jobs
logger.info("Executing job scheduler ...")
@ -575,32 +602,72 @@ if __name__ == "__main__":
)
stop(1)
# check if the plugins have changed since last time
if changes["external_plugins_changed"]:
logger.info("External plugins changed, generating ...")
if FIRST_RUN:
# run the config saver to save potential ignored external plugins settings
logger.info(
"Running config saver to save potential ignored external plugins settings ..."
)
proc = subprocess_run(
[
"python",
join(
sep,
"usr",
"share",
"bunkerweb",
"gen",
"save_config.py",
),
"--settings",
join(sep, "usr", "share", "bunkerweb", "settings.json"),
],
stdin=DEVNULL,
stderr=STDOUT,
check=False,
)
if proc.returncode != 0:
logger.error(
"Config saver failed, configuration will not work as expected...",
)
changes.update(
{
"custom_configs_changed": True,
"config_changed": True,
}
)
PLUGINS_NEED_GENERATION = True
NEED_RELOAD = True
# check if the custom configs have changed since last time
if changes["custom_configs_changed"]:
logger.info("Custom configs changed, generating ...")
CONFIGS_NEED_GENERATION = True
NEED_RELOAD = True
# check if the plugins have changed since last time
if changes["external_plugins_changed"]:
logger.info("External plugins changed, generating ...")
PLUGINS_NEED_GENERATION = True
NEED_RELOAD = True
# check if the config have changed since last time
if changes["config_changed"]:
logger.info("Config changed, generating ...")
CONFIG_NEED_GENERATION = True
NEED_RELOAD = True
FIRST_RUN = False
if NEED_RELOAD:
CHANGES.clear()
if CONFIGS_NEED_GENERATION:
CHANGES.append("custom_configs")
generate_custom_configs(
db.get_custom_configs(), original_path=configs_path
)
Thread(
target=generate_custom_configs,
args=(db.get_custom_configs(),),
kwargs={"original_path": configs_path},
).start()
if PLUGINS_NEED_GENERATION:
CHANGES.append("external_plugins")
@ -608,6 +675,7 @@ if __name__ == "__main__":
db.get_plugins(external=True, with_data=True),
original_path=plugins_dir,
)
SCHEDULER.update_jobs()
if CONFIG_NEED_GENERATION:
CHANGES.append("config")

View File

@ -1,3 +1,4 @@
schedule==1.2.0
certbot==2.6.0
maxminddb==2.3.0
python-magic==0.4.27
schedule==1.2.0

View File

@ -229,6 +229,10 @@ pyrfc3339==1.1 \
# via
# acme
# certbot
python-magic==0.4.27 \
--hash=sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b \
--hash=sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3
# via -r requirements.in
pytz==2023.3 \
--hash=sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588 \
--hash=sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb
@ -254,9 +258,9 @@ urllib3==2.0.3 \
# via requests
# The following packages are considered to be unsafe in a requirements file:
setuptools==67.8.0 \
--hash=sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f \
--hash=sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102
setuptools==68.0.0 \
--hash=sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f \
--hash=sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235
# via
# acme
# certbot

View File

@ -1,8 +1,8 @@
bcrypt==4.0.1
beautifulsoup4==4.12.2
Flask==2.3.2
Flask_Login==0.6.2
Flask_WTF==1.1.1
beautifulsoup4==4.12.2
python_dateutil==2.8.2
bcrypt==4.0.1
gunicorn[gevent]==20.1.0
regex==2023.5.5
python_dateutil==2.8.2
regex==2023.6.3

View File

@ -174,9 +174,9 @@ gunicorn[gevent]==20.1.0 \
--hash=sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e \
--hash=sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8
# via -r requirements.in
importlib-metadata==6.6.0 \
--hash=sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed \
--hash=sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705
importlib-metadata==6.7.0 \
--hash=sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4 \
--hash=sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5
# via flask
itsdangerous==2.1.2 \
--hash=sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44 \
@ -247,95 +247,95 @@ python-dateutil==2.8.2 \
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
# via -r requirements.in
regex==2023.5.5 \
--hash=sha256:02f4541550459c08fdd6f97aa4e24c6f1932eec780d58a2faa2068253df7d6ff \
--hash=sha256:0a69cf0c00c4d4a929c6c7717fd918414cab0d6132a49a6d8fc3ded1988ed2ea \
--hash=sha256:0bbd5dcb19603ab8d2781fac60114fb89aee8494f4505ae7ad141a3314abb1f9 \
--hash=sha256:10250a093741ec7bf74bcd2039e697f519b028518f605ff2aa7ac1e9c9f97423 \
--hash=sha256:10374c84ee58c44575b667310d5bbfa89fb2e64e52349720a0182c0017512f6c \
--hash=sha256:1189fbbb21e2c117fda5303653b61905aeeeea23de4a94d400b0487eb16d2d60 \
--hash=sha256:1307aa4daa1cbb23823d8238e1f61292fd07e4e5d8d38a6efff00b67a7cdb764 \
--hash=sha256:144b5b017646b5a9392a5554a1e5db0000ae637be4971c9747566775fc96e1b2 \
--hash=sha256:171c52e320fe29260da550d81c6b99f6f8402450dc7777ef5ced2e848f3b6f8f \
--hash=sha256:18196c16a584619c7c1d843497c069955d7629ad4a3fdee240eb347f4a2c9dbe \
--hash=sha256:18f05d14f14a812fe9723f13afafefe6b74ca042d99f8884e62dbd34dcccf3e2 \
--hash=sha256:1ecf3dcff71f0c0fe3e555201cbe749fa66aae8d18f80d2cc4de8e66df37390a \
--hash=sha256:21e90a288e6ba4bf44c25c6a946cb9b0f00b73044d74308b5e0afd190338297c \
--hash=sha256:23d86ad2121b3c4fc78c58f95e19173790e22ac05996df69b84e12da5816cb17 \
--hash=sha256:256f7f4c6ba145f62f7a441a003c94b8b1af78cee2cccacfc1e835f93bc09426 \
--hash=sha256:290fd35219486dfbc00b0de72f455ecdd63e59b528991a6aec9fdfc0ce85672e \
--hash=sha256:2e9c4f778514a560a9c9aa8e5538bee759b55f6c1dcd35613ad72523fd9175b8 \
--hash=sha256:338994d3d4ca4cf12f09822e025731a5bdd3a37aaa571fa52659e85ca793fb67 \
--hash=sha256:33d430a23b661629661f1fe8395be2004006bc792bb9fc7c53911d661b69dd7e \
--hash=sha256:385992d5ecf1a93cb85adff2f73e0402dd9ac29b71b7006d342cc920816e6f32 \
--hash=sha256:3d45864693351c15531f7e76f545ec35000d50848daa833cead96edae1665559 \
--hash=sha256:40005cbd383438aecf715a7b47fe1e3dcbc889a36461ed416bdec07e0ef1db66 \
--hash=sha256:4035d6945cb961c90c3e1c1ca2feb526175bcfed44dfb1cc77db4fdced060d3e \
--hash=sha256:445d6f4fc3bd9fc2bf0416164454f90acab8858cd5a041403d7a11e3356980e8 \
--hash=sha256:48c9ec56579d4ba1c88f42302194b8ae2350265cb60c64b7b9a88dcb7fbde309 \
--hash=sha256:4a5059bd585e9e9504ef9c07e4bc15b0a621ba20504388875d66b8b30a5c4d18 \
--hash=sha256:4a6e4b0e0531223f53bad07ddf733af490ba2b8367f62342b92b39b29f72735a \
--hash=sha256:4b870b6f632fc74941cadc2a0f3064ed8409e6f8ee226cdfd2a85ae50473aa94 \
--hash=sha256:50fd2d9b36938d4dcecbd684777dd12a407add4f9f934f235c66372e630772b0 \
--hash=sha256:53e22e4460f0245b468ee645156a4f84d0fc35a12d9ba79bd7d79bdcd2f9629d \
--hash=sha256:586a011f77f8a2da4b888774174cd266e69e917a67ba072c7fc0e91878178a80 \
--hash=sha256:59597cd6315d3439ed4b074febe84a439c33928dd34396941b4d377692eca810 \
--hash=sha256:59e4b729eae1a0919f9e4c0fc635fbcc9db59c74ad98d684f4877be3d2607dd6 \
--hash=sha256:5a0f874ee8c0bc820e649c900243c6d1e6dc435b81da1492046716f14f1a2a96 \
--hash=sha256:5ac2b7d341dc1bd102be849d6dd33b09701223a851105b2754339e390be0627a \
--hash=sha256:5e3f4468b8c6fd2fd33c218bbd0a1559e6a6fcf185af8bb0cc43f3b5bfb7d636 \
--hash=sha256:6164d4e2a82f9ebd7752a06bd6c504791bedc6418c0196cd0a23afb7f3e12b2d \
--hash=sha256:6893544e06bae009916a5658ce7207e26ed17385149f35a3125f5259951f1bbe \
--hash=sha256:690a17db524ee6ac4a27efc5406530dd90e7a7a69d8360235323d0e5dafb8f5b \
--hash=sha256:6b8d0c153f07a953636b9cdb3011b733cadd4178123ef728ccc4d5969e67f3c2 \
--hash=sha256:72a28979cc667e5f82ef433db009184e7ac277844eea0f7f4d254b789517941d \
--hash=sha256:72aa4746993a28c841e05889f3f1b1e5d14df8d3daa157d6001a34c98102b393 \
--hash=sha256:732176f5427e72fa2325b05c58ad0b45af341c459910d766f814b0584ac1f9ac \
--hash=sha256:7918a1b83dd70dc04ab5ed24c78ae833ae8ea228cef84e08597c408286edc926 \
--hash=sha256:7923470d6056a9590247ff729c05e8e0f06bbd4efa6569c916943cb2d9b68b91 \
--hash=sha256:7d76a8a1fc9da08296462a18f16620ba73bcbf5909e42383b253ef34d9d5141e \
--hash=sha256:811040d7f3dd9c55eb0d8b00b5dcb7fd9ae1761c454f444fd9f37fe5ec57143a \
--hash=sha256:821a88b878b6589c5068f4cc2cfeb2c64e343a196bc9d7ac68ea8c2a776acd46 \
--hash=sha256:84397d3f750d153ebd7f958efaa92b45fea170200e2df5e0e1fd4d85b7e3f58a \
--hash=sha256:844671c9c1150fcdac46d43198364034b961bd520f2c4fdaabfc7c7d7138a2dd \
--hash=sha256:890a09cb0a62198bff92eda98b2b507305dd3abf974778bae3287f98b48907d3 \
--hash=sha256:8f08276466fedb9e36e5193a96cb944928301152879ec20c2d723d1031cd4ddd \
--hash=sha256:8f5e06df94fff8c4c85f98c6487f6636848e1dc85ce17ab7d1931df4a081f657 \
--hash=sha256:921473a93bcea4d00295799ab929522fc650e85c6b9f27ae1e6bb32a790ea7d3 \
--hash=sha256:941b3f1b2392f0bcd6abf1bc7a322787d6db4e7457be6d1ffd3a693426a755f2 \
--hash=sha256:9b320677521aabf666cdd6e99baee4fb5ac3996349c3b7f8e7c4eee1c00dfe3a \
--hash=sha256:9c3efee9bb53cbe7b285760c81f28ac80dc15fa48b5fe7e58b52752e642553f1 \
--hash=sha256:9fda3e50abad8d0f48df621cf75adc73c63f7243cbe0e3b2171392b445401550 \
--hash=sha256:a4c5da39bca4f7979eefcbb36efea04471cd68db2d38fcbb4ee2c6d440699833 \
--hash=sha256:a56c18f21ac98209da9c54ae3ebb3b6f6e772038681d6cb43b8d53da3b09ee81 \
--hash=sha256:a623564d810e7a953ff1357f7799c14bc9beeab699aacc8b7ab7822da1e952b8 \
--hash=sha256:a8906669b03c63266b6a7693d1f487b02647beb12adea20f8840c1a087e2dfb5 \
--hash=sha256:a99757ad7fe5c8a2bb44829fc57ced11253e10f462233c1255fe03888e06bc19 \
--hash=sha256:aa7d032c1d84726aa9edeb6accf079b4caa87151ca9fabacef31fa028186c66d \
--hash=sha256:aad5524c2aedaf9aa14ef1bc9327f8abd915699dea457d339bebbe2f0d218f86 \
--hash=sha256:afb1c70ec1e594a547f38ad6bf5e3d60304ce7539e677c1429eebab115bce56e \
--hash=sha256:b6365703e8cf1644b82104cdd05270d1a9f043119a168d66c55684b1b557d008 \
--hash=sha256:b8b942d8b3ce765dbc3b1dad0a944712a89b5de290ce8f72681e22b3c55f3cc8 \
--hash=sha256:ba73a14e9c8f9ac409863543cde3290dba39098fc261f717dc337ea72d3ebad2 \
--hash=sha256:bd7b68fd2e79d59d86dcbc1ccd6e2ca09c505343445daaa4e07f43c8a9cc34da \
--hash=sha256:bd966475e963122ee0a7118ec9024388c602d12ac72860f6eea119a3928be053 \
--hash=sha256:c2ce65bdeaf0a386bb3b533a28de3994e8e13b464ac15e1e67e4603dd88787fa \
--hash=sha256:c64d5abe91a3dfe5ff250c6bb267ef00dbc01501518225b45a5f9def458f31fb \
--hash=sha256:c8c143a65ce3ca42e54d8e6fcaf465b6b672ed1c6c90022794a802fb93105d22 \
--hash=sha256:cd46f30e758629c3ee91713529cfbe107ac50d27110fdcc326a42ce2acf4dafc \
--hash=sha256:ced02e3bd55e16e89c08bbc8128cff0884d96e7f7a5633d3dc366b6d95fcd1d6 \
--hash=sha256:cf123225945aa58b3057d0fba67e8061c62d14cc8a4202630f8057df70189051 \
--hash=sha256:d19e57f888b00cd04fc38f5e18d0efbd91ccba2d45039453ab2236e6eec48d4d \
--hash=sha256:d1cbe6b5be3b9b698d8cc4ee4dee7e017ad655e83361cd0ea8e653d65e469468 \
--hash=sha256:db09e6c18977a33fea26fe67b7a842f706c67cf8bda1450974d0ae0dd63570df \
--hash=sha256:de2f780c3242ea114dd01f84848655356af4dd561501896c751d7b885ea6d3a1 \
--hash=sha256:e2205a81f815b5bb17e46e74cc946c575b484e5f0acfcb805fb252d67e22938d \
--hash=sha256:e645c757183ee0e13f0bbe56508598e2d9cd42b8abc6c0599d53b0d0b8dd1479 \
--hash=sha256:f2910502f718828cecc8beff004917dcf577fc5f8f5dd40ffb1ea7612124547b \
--hash=sha256:f764e4dfafa288e2eba21231f455d209f4709436baeebb05bdecfb5d8ddc3d35 \
--hash=sha256:f83fe9e10f9d0b6cf580564d4d23845b9d692e4c91bd8be57733958e4c602956 \
--hash=sha256:fb2b495dd94b02de8215625948132cc2ea360ae84fe6634cd19b6567709c8ae2 \
--hash=sha256:fee0016cc35a8a91e8cc9312ab26a6fe638d484131a7afa79e1ce6165328a135
regex==2023.6.3 \
--hash=sha256:0385e73da22363778ef2324950e08b689abdf0b108a7d8decb403ad7f5191938 \
--hash=sha256:051da80e6eeb6e239e394ae60704d2b566aa6a7aed6f2890a7967307267a5dc6 \
--hash=sha256:05ed27acdf4465c95826962528f9e8d41dbf9b1aa8531a387dee6ed215a3e9ef \
--hash=sha256:0654bca0cdf28a5956c83839162692725159f4cda8d63e0911a2c0dc76166525 \
--hash=sha256:09e4a1a6acc39294a36b7338819b10baceb227f7f7dbbea0506d419b5a1dd8af \
--hash=sha256:0b49c764f88a79160fa64f9a7b425620e87c9f46095ef9c9920542ab2495c8bc \
--hash=sha256:0b71e63226e393b534105fcbdd8740410dc6b0854c2bfa39bbda6b0d40e59a54 \
--hash=sha256:0c29ca1bd61b16b67be247be87390ef1d1ef702800f91fbd1991f5c4421ebae8 \
--hash=sha256:10590510780b7541969287512d1b43f19f965c2ece6c9b1c00fc367b29d8dce7 \
--hash=sha256:10cb847aeb1728412c666ab2e2000ba6f174f25b2bdc7292e7dd71b16db07568 \
--hash=sha256:12b74fbbf6cbbf9dbce20eb9b5879469e97aeeaa874145517563cca4029db65c \
--hash=sha256:20326216cc2afe69b6e98528160b225d72f85ab080cbdf0b11528cbbaba2248f \
--hash=sha256:2239d95d8e243658b8dbb36b12bd10c33ad6e6933a54d36ff053713f129aa536 \
--hash=sha256:25be746a8ec7bc7b082783216de8e9473803706723b3f6bef34b3d0ed03d57e2 \
--hash=sha256:271f0bdba3c70b58e6f500b205d10a36fb4b58bd06ac61381b68de66442efddb \
--hash=sha256:29cdd471ebf9e0f2fb3cac165efedc3c58db841d83a518b082077e612d3ee5df \
--hash=sha256:2d44dc13229905ae96dd2ae2dd7cebf824ee92bc52e8cf03dcead37d926da019 \
--hash=sha256:3676f1dd082be28b1266c93f618ee07741b704ab7b68501a173ce7d8d0d0ca18 \
--hash=sha256:36efeba71c6539d23c4643be88295ce8c82c88bbd7c65e8a24081d2ca123da3f \
--hash=sha256:3e5219bf9e75993d73ab3d25985c857c77e614525fac9ae02b1bebd92f7cecac \
--hash=sha256:43e1dd9d12df9004246bacb79a0e5886b3b6071b32e41f83b0acbf293f820ee8 \
--hash=sha256:457b6cce21bee41ac292d6753d5e94dcbc5c9e3e3a834da285b0bde7aa4a11e9 \
--hash=sha256:463b6a3ceb5ca952e66550a4532cef94c9a0c80dc156c4cc343041951aec1697 \
--hash=sha256:4959e8bcbfda5146477d21c3a8ad81b185cd252f3d0d6e4724a5ef11c012fb06 \
--hash=sha256:4d3850beab9f527f06ccc94b446c864059c57651b3f911fddb8d9d3ec1d1b25d \
--hash=sha256:5708089ed5b40a7b2dc561e0c8baa9535b77771b64a8330b684823cfd5116036 \
--hash=sha256:5c6b48d0fa50d8f4df3daf451be7f9689c2bde1a52b1225c5926e3f54b6a9ed1 \
--hash=sha256:61474f0b41fe1a80e8dfa70f70ea1e047387b7cd01c85ec88fa44f5d7561d787 \
--hash=sha256:6343c6928282c1f6a9db41f5fd551662310e8774c0e5ebccb767002fcf663ca9 \
--hash=sha256:65ba8603753cec91c71de423a943ba506363b0e5c3fdb913ef8f9caa14b2c7e0 \
--hash=sha256:687ea9d78a4b1cf82f8479cab23678aff723108df3edeac098e5b2498879f4a7 \
--hash=sha256:6b2675068c8b56f6bfd5a2bda55b8accbb96c02fd563704732fd1c95e2083461 \
--hash=sha256:7117d10690c38a622e54c432dfbbd3cbd92f09401d622902c32f6d377e2300ee \
--hash=sha256:7178bbc1b2ec40eaca599d13c092079bf529679bf0371c602edaa555e10b41c3 \
--hash=sha256:72d1a25bf36d2050ceb35b517afe13864865268dfb45910e2e17a84be6cbfeb0 \
--hash=sha256:742e19a90d9bb2f4a6cf2862b8b06dea5e09b96c9f2df1779e53432d7275331f \
--hash=sha256:74390d18c75054947e4194019077e243c06fbb62e541d8817a0fa822ea310c14 \
--hash=sha256:74419d2b50ecb98360cfaa2974da8689cb3b45b9deff0dcf489c0d333bcc1477 \
--hash=sha256:824bf3ac11001849aec3fa1d69abcb67aac3e150a933963fb12bda5151fe1bfd \
--hash=sha256:83320a09188e0e6c39088355d423aa9d056ad57a0b6c6381b300ec1a04ec3d16 \
--hash=sha256:837328d14cde912af625d5f303ec29f7e28cdab588674897baafaf505341f2fc \
--hash=sha256:841d6e0e5663d4c7b4c8099c9997be748677d46cbf43f9f471150e560791f7ff \
--hash=sha256:87b2a5bb5e78ee0ad1de71c664d6eb536dc3947a46a69182a90f4410f5e3f7dd \
--hash=sha256:890e5a11c97cf0d0c550eb661b937a1e45431ffa79803b942a057c4fb12a2da2 \
--hash=sha256:8abbc5d54ea0ee80e37fef009e3cec5dafd722ed3c829126253d3e22f3846f1e \
--hash=sha256:8e3f1316c2293e5469f8f09dc2d76efb6c3982d3da91ba95061a7e69489a14ef \
--hash=sha256:8f56fcb7ff7bf7404becdfc60b1e81a6d0561807051fd2f1860b0d0348156a07 \
--hash=sha256:9427a399501818a7564f8c90eced1e9e20709ece36be701f394ada99890ea4b3 \
--hash=sha256:976d7a304b59ede34ca2921305b57356694f9e6879db323fd90a80f865d355a3 \
--hash=sha256:9a5bfb3004f2144a084a16ce19ca56b8ac46e6fd0651f54269fc9e230edb5e4a \
--hash=sha256:9beb322958aaca059f34975b0df135181f2e5d7a13b84d3e0e45434749cb20f7 \
--hash=sha256:9edcbad1f8a407e450fbac88d89e04e0b99a08473f666a3f3de0fd292badb6aa \
--hash=sha256:9edce5281f965cf135e19840f4d93d55b3835122aa76ccacfd389e880ba4cf82 \
--hash=sha256:a4c3b7fa4cdaa69268748665a1a6ff70c014d39bb69c50fda64b396c9116cf77 \
--hash=sha256:a8105e9af3b029f243ab11ad47c19b566482c150c754e4c717900a798806b222 \
--hash=sha256:a99b50300df5add73d307cf66abea093304a07eb017bce94f01e795090dea87c \
--hash=sha256:aad51907d74fc183033ad796dd4c2e080d1adcc4fd3c0fd4fd499f30c03011cd \
--hash=sha256:af4dd387354dc83a3bff67127a124c21116feb0d2ef536805c454721c5d7993d \
--hash=sha256:b28f5024a3a041009eb4c333863d7894d191215b39576535c6734cd88b0fcb68 \
--hash=sha256:b4598b1897837067a57b08147a68ac026c1e73b31ef6e36deeeb1fa60b2933c9 \
--hash=sha256:b6192d5af2ccd2a38877bfef086d35e6659566a335b1492786ff254c168b1693 \
--hash=sha256:b862c2b9d5ae38a68b92e215b93f98d4c5e9454fa36aae4450f61dd33ff48487 \
--hash=sha256:b956231ebdc45f5b7a2e1f90f66a12be9610ce775fe1b1d50414aac1e9206c06 \
--hash=sha256:bb60b503ec8a6e4e3e03a681072fa3a5adcbfa5479fa2d898ae2b4a8e24c4591 \
--hash=sha256:bbb02fd4462f37060122e5acacec78e49c0fbb303c30dd49c7f493cf21fc5b27 \
--hash=sha256:bdff5eab10e59cf26bc479f565e25ed71a7d041d1ded04ccf9aee1d9f208487a \
--hash=sha256:c123f662be8ec5ab4ea72ea300359023a5d1df095b7ead76fedcd8babbedf969 \
--hash=sha256:c2b867c17a7a7ae44c43ebbeb1b5ff406b3e8d5b3e14662683e5e66e6cc868d3 \
--hash=sha256:c5f8037000eb21e4823aa485149f2299eb589f8d1fe4b448036d230c3f4e68e0 \
--hash=sha256:c6a57b742133830eec44d9b2290daf5cbe0a2f1d6acee1b3c7b1c7b2f3606df7 \
--hash=sha256:ccf91346b7bd20c790310c4147eee6ed495a54ddb6737162a36ce9dbef3e4751 \
--hash=sha256:cf67ca618b4fd34aee78740bea954d7c69fdda419eb208c2c0c7060bb822d747 \
--hash=sha256:d2da3abc88711bce7557412310dfa50327d5769a31d1c894b58eb256459dc289 \
--hash=sha256:d4f03bb71d482f979bda92e1427f3ec9b220e62a7dd337af0aa6b47bf4498f72 \
--hash=sha256:d54af539295392611e7efbe94e827311eb8b29668e2b3f4cadcfe6f46df9c777 \
--hash=sha256:d77f09bc4b55d4bf7cc5eba785d87001d6757b7c9eec237fe2af57aba1a071d9 \
--hash=sha256:d831c2f8ff278179705ca59f7e8524069c1a989e716a1874d6d1aab6119d91d1 \
--hash=sha256:dbbbfce33cd98f97f6bffb17801b0576e653f4fdb1d399b2ea89638bc8d08ae1 \
--hash=sha256:dcba6dae7de533c876255317c11f3abe4907ba7d9aa15d13e3d9710d4315ec0e \
--hash=sha256:e0bb18053dfcfed432cc3ac632b5e5e5c5b7e55fb3f8090e867bfd9b054dbcbf \
--hash=sha256:e2fbd6236aae3b7f9d514312cdb58e6494ee1c76a9948adde6eba33eb1c4264f \
--hash=sha256:e5087a3c59eef624a4591ef9eaa6e9a8d8a94c779dade95d27c0bc24650261cd \
--hash=sha256:e8915cc96abeb8983cea1df3c939e3c6e1ac778340c17732eb63bb96247b91d2 \
--hash=sha256:ea353ecb6ab5f7e7d2f4372b1e779796ebd7b37352d290096978fea83c4dba0c \
--hash=sha256:ee2d1a9a253b1729bb2de27d41f696ae893507c7db224436abe83ee25356f5c1 \
--hash=sha256:f415f802fbcafed5dcc694c13b1292f07fe0befdb94aa8a52905bd115ff41e88 \
--hash=sha256:fb5ec16523dc573a4b277663a2b5a364e2099902d3944c9419a40ebd56a118f9 \
--hash=sha256:fea75c3710d4f31389eed3c02f62d0b66a9da282521075061ce875eb5300cf23
# via -r requirements.in
six==1.16.0 \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
@ -397,9 +397,9 @@ zope-interface==6.0 \
# via gevent
# The following packages are considered to be unsafe in a requirements file:
setuptools==67.8.0 \
--hash=sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f \
--hash=sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102
setuptools==68.0.0 \
--hash=sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f \
--hash=sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235
# via
# gevent
# gunicorn

View File

@ -7,7 +7,6 @@ git clone https://github.com/bunkerity/bunkerweb-plugins.git
echo " Checking out to dev branch ..."
cd bunkerweb-plugins
git checkout dev # TODO: remove this when the next release of bw-plugins is out
echo " Extracting ClamAV plugin ..."

View File

@ -1,6 +1,6 @@
from contextlib import contextmanager
from glob import iglob
from hashlib import sha512
from hashlib import sha256
from json import dumps, load
from os import environ, getenv
from os.path import dirname, join
@ -486,14 +486,14 @@ try:
print(" Checking if all plugin pages are in the database ...", flush=True)
def file_hash(file: str) -> str:
_sha512 = sha512()
_sha256 = sha256()
with open(file, "rb") as f:
while True:
data = f.read(1024)
if not data:
break
_sha512.update(data)
return _sha512.hexdigest()
_sha256.update(data)
return _sha256.hexdigest()
with db_session() as session:
plugin_pages = (
@ -649,7 +649,7 @@ try:
)
exit(1)
elif (
custom_config.data
custom_config.data.replace(b"# CREATED BY ENV\n", b"")
!= current_custom_configs[custom_config.name]["value"]
):
print(

View File

@ -15,7 +15,7 @@ WORKDIR /opt/tests_ui
COPY requirements.txt .
RUN pip install --no-cache -r requirements.txt
RUN MAKEFLAGS="-j $(nproc)" pip install --no-cache -r requirements.txt
RUN touch test.txt && \
zip test.zip test.txt && \

View File

@ -793,12 +793,12 @@ with webdriver.Firefox(
driver, By.XPATH, "//div[@data-configs-modal-editor='']/textarea"
).send_keys(
"""
location /hello {
default_type 'text/plain';
content_by_lua_block {
ngx.say('hello app1')
}
}
location /hello {
default_type 'text/plain';
content_by_lua_block {
ngx.say('hello app1')
}
}
"""
)
@ -994,8 +994,7 @@ with webdriver.Firefox(
assert_button_click(driver, "//button[@data-cache-modal-submit='']")
print(
"The cache file content is correct, trying logs page ...", flush=True)
print("The cache file content is correct, trying logs page ...", flush=True)
access_page(
driver, driver_wait, "/html/body/aside[1]/div[1]/div[2]/ul/li[8]/a", "logs"