Tweak py file to respect flake8 rules

This commit is contained in:
Théophile Diot 2023-10-03 12:01:24 +02:00
parent 508c728b65
commit 6b2df35858
No known key found for this signature in database
GPG Key ID: 248FEA4BAE400D06
88 changed files with 778 additions and 2523 deletions

View File

@ -14,7 +14,7 @@ def print_md_table(settings) -> MarkdownTableWriter:
f"`{setting}`",
"" if data["default"] == "" else f"`{data['default']}`",
data["context"],
"no" if not "multiple" in data else "yes",
"no" if "multiple" not in data else "yes",
data["help"],
]
for setting, data in settings.items()
@ -42,15 +42,19 @@ print(
file=doc,
)
print(
"This section contains the full list of settings supported by BunkerWeb. If you are not yet familiar with BunkerWeb, you should first read the [concepts](concepts.md) section of the documentation. Please follow the instructions for your own [integration](integrations.md) on how to apply the settings.\n",
"This section contains the full list of settings supported by BunkerWeb."
+ " If you are not yet familiar with BunkerWeb, you should first read the [concepts](concepts.md) section of the documentation."
+ " Please follow the instructions for your own [integration](integrations.md) on how to apply the settings.\n",
file=doc,
)
print(
"As a general rule when multisite mode is enabled, if you want to apply settings with multisite context to a specific server, you will need to add the primary (first) server name as a prefix like `www.example.com_USE_ANTIBOT=captcha` or `myapp.example.com_USE_GZIP=yes` for example.\n",
"As a general rule when multisite mode is enabled, if you want to apply settings with multisite context to a specific server, you will need to add the primary"
+ " (first) server name as a prefix like `www.example.com_USE_ANTIBOT=captcha` or `myapp.example.com_USE_GZIP=yes` for example.\n",
file=doc,
)
print(
'When settings are considered as "multiple", it means that you can have multiple groups of settings for the same feature by adding numbers as suffix like `REVERSE_PROXY_URL_1=/subdir`, `REVERSE_PROXY_HOST_1=http://myhost1`, `REVERSE_PROXY_URL_2=/anotherdir`, `REVERSE_PROXY_HOST_2=http://myhost2`, ... for example.\n',
'When settings are considered as "multiple", it means that you can have multiple groups of settings for the same feature by adding numbers as suffix like `REVERSE_PROXY_URL_1=/subdir`,'
+ " `REVERSE_PROXY_HOST_1=http://myhost1`, `REVERSE_PROXY_URL_2=/anotherdir`, `REVERSE_PROXY_HOST_2=http://myhost2`, ... for example.\n",
file=doc,
)

View File

@ -2,7 +2,6 @@
from os import getenv
from time import sleep
from typing import Optional
from copy import deepcopy
from ConfigCaller import ConfigCaller # type: ignore
@ -25,9 +24,7 @@ class Config(ConfigCaller):
"modsec",
"modsec-crs",
]
self.__configs = {
config_type: {} for config_type in self._supported_config_types
}
self.__configs = {config_type: {} for config_type in self._supported_config_types}
self.__config = {}
self._db = Database(self.__logger)
@ -106,9 +103,7 @@ class Config(ConfigCaller):
while True:
curr_changes = self._db.check_changes()
if isinstance(curr_changes, str):
self.__logger.error(
f"An error occurred when checking for changes in the database : {curr_changes}"
)
self.__logger.error(f"An error occurred when checking for changes in the database : {curr_changes}")
elif not any(curr_changes.values()):
break
else:
@ -134,9 +129,7 @@ class Config(ConfigCaller):
# save custom configs to database
if "custom_configs" in changes:
err = self._db.save_custom_configs(
custom_configs, "autoconf", changed=False
)
err = self._db.save_custom_configs(custom_configs, "autoconf", changed=False)
if err:
success = False
self.__logger.error(
@ -146,8 +139,6 @@ class Config(ConfigCaller):
# update changes in db
ret = self._db.checked_changes(changes, value=True)
if ret:
self.__logger.error(
f"An error occurred when setting the changes to checked in the database : {ret}"
)
self.__logger.error(f"An error occurred when setting the changes to checked in the database : {ret}")
return success

View File

@ -1,9 +1,9 @@
#!/usr/bin/python3
from abc import ABC, abstractmethod
from abc import abstractmethod
from os import getenv
from time import sleep
from typing import Literal, Optional, Union
from typing import Literal, Union
from Config import Config
@ -20,12 +20,8 @@ class Controller(Config):
self._type = ctrl_type
self._instances = []
self._services = []
self._configs = {
config_type: {} for config_type in self._supported_config_types
}
self._logger = setup_logger(
f"{self._type}-controller", getenv("LOG_LEVEL", "INFO")
)
self._configs = {config_type: {} for config_type in self._supported_config_types}
self._logger = setup_logger(f"{self._type}-controller", getenv("LOG_LEVEL", "INFO"))
def wait(self, wait_time: int) -> list:
all_ready = False
@ -105,7 +101,7 @@ class Controller(Config):
def _is_service_present(self, server_name):
for service in self._services:
if not "SERVER_NAME" in service or not service["SERVER_NAME"]:
if "SERVER_NAME" not in service or not service["SERVER_NAME"]:
continue
if server_name == service["SERVER_NAME"].strip().split(" ")[0]:
return True

View File

@ -13,9 +13,7 @@ class DockerController(Controller):
def __init__(self, docker_host):
super().__init__("docker")
self.__client = DockerClient(base_url=docker_host)
self.__custom_confs_rx = re_compile(
r"^bunkerweb.CUSTOM_CONF_(SERVER_HTTP|MODSEC_CRS|MODSEC)_(.+)$"
)
self.__custom_confs_rx = re_compile(r"^bunkerweb.CUSTOM_CONF_(SERVER_HTTP|MODSEC_CRS|MODSEC)_(.+)$")
def _get_controller_instances(self) -> List[Container]:
return self.__client.containers.list(filters={"label": "bunkerweb.INSTANCE"})
@ -27,10 +25,7 @@ class DockerController(Controller):
instance = {}
instance["name"] = controller_instance.name
instance["hostname"] = controller_instance.name
instance["health"] = (
controller_instance.status == "running"
and controller_instance.attrs["State"]["Health"]["Status"] == "healthy"
)
instance["health"] = controller_instance.status == "running" and controller_instance.attrs["State"]["Health"]["Status"] == "healthy"
instance["env"] = {}
for env in controller_instance.attrs["Config"]["Env"]:
variable = env.split("=")[0]
@ -53,9 +48,7 @@ class DockerController(Controller):
def _get_static_services(self) -> List[dict]:
services = []
variables = {}
for instance in self.__client.containers.list(
filters={"label": "bunkerweb.INSTANCE"}
):
for instance in self.__client.containers.list(filters={"label": "bunkerweb.INSTANCE"}):
if not instance.attrs or not instance.attrs.get("Config", {}).get("Env"):
continue
@ -70,9 +63,7 @@ class DockerController(Controller):
for variable, value in variables.items():
prefix = variable.split("_")[0]
real_variable = variable.replace(f"{prefix}_", "", 1)
if prefix == server_name and self._is_setting_context(
real_variable, "multisite"
):
if prefix == server_name and self._is_setting_context(real_variable, "multisite"):
service[real_variable] = value
services.append(service)
return services
@ -80,9 +71,7 @@ class DockerController(Controller):
def get_configs(self) -> Dict[str, Dict[str, Any]]:
configs = {config_type: {} for config_type in self._supported_config_types}
# get site configs from labels
for container in self.__client.containers.list(
filters={"label": "bunkerweb.SERVER_NAME"}
):
for container in self.__client.containers.list(filters={"label": "bunkerweb.SERVER_NAME"}):
labels = container.labels # type: ignore (labels is inside a container)
if isinstance(labels, list):
labels = {label: "" for label in labels}
@ -100,9 +89,7 @@ class DockerController(Controller):
result = self.__custom_confs_rx.search(variable)
if result is None:
continue
configs[result.group(1).lower().replace("_", "-")][
f"{server_name}/{result.group(2)}"
] = value
configs[result.group(1).lower().replace("_", "-")][f"{server_name}/{result.group(2)}"] = value
return configs
def apply_config(self) -> bool:
@ -120,13 +107,9 @@ class DockerController(Controller):
self._instances = self.get_instances()
self._services = self.get_services()
self._configs = self.get_configs()
if not self.update_needed(
self._instances, self._services, configs=self._configs
):
if not self.update_needed(self._instances, self._services, configs=self._configs):
continue
self._logger.info(
"Caught Docker event, deploying new configuration ..."
)
self._logger.info("Caught Docker event, deploying new configuration ...")
if not self.apply_config():
self._logger.error("Error while deploying new configuration")
else:
@ -136,6 +119,4 @@ class DockerController(Controller):
self._set_autoconf_load_db()
except:
self._logger.error(
f"Exception while processing events :\n{format_exc()}"
)
self._logger.error(f"Exception while processing events :\n{format_exc()}")

View File

@ -19,21 +19,12 @@ class IngressController(Controller):
self.__networkingv1 = client.NetworkingV1Api()
def _get_controller_instances(self) -> list:
return [
pod
for pod in self.__corev1.list_pod_for_all_namespaces(watch=False).items
if (
pod.metadata.annotations
and "bunkerweb.io/INSTANCE" in pod.metadata.annotations
)
]
return [pod for pod in self.__corev1.list_pod_for_all_namespaces(watch=False).items if (pod.metadata.annotations and "bunkerweb.io/INSTANCE" in pod.metadata.annotations)]
def _to_instances(self, controller_instance) -> List[dict]:
instance = {}
instance["name"] = controller_instance.metadata.name
instance["hostname"] = (
controller_instance.status.pod_ip or controller_instance.metadata.name
)
instance["hostname"] = controller_instance.status.pod_ip or controller_instance.metadata.name
health = False
if controller_instance.status.conditions:
for condition in controller_instance.status.conditions:
@ -48,9 +39,7 @@ class IngressController(Controller):
pod = container
break
if not pod:
self._logger.warning(
f"Missing container bunkerweb in pod {controller_instance.metadata.name}"
)
self._logger.warning(f"Missing container bunkerweb in pod {controller_instance.metadata.name}")
else:
for env in pod.env:
instance["env"][env.name] = env.value or ""
@ -159,10 +148,7 @@ class IngressController(Controller):
services = []
variables = {}
for instance in self.__corev1.list_pod_for_all_namespaces(watch=False).items:
if (
not instance.metadata.annotations
or not "bunkerweb.io/INSTANCE" in instance.metadata.annotations
):
if not instance.metadata.annotations or "bunkerweb.io/INSTANCE" not in instance.metadata.annotations:
continue
pod = None
@ -181,22 +167,15 @@ class IngressController(Controller):
for variable, value in variables.items():
prefix = variable.split("_")[0]
real_variable = variable.replace(f"{prefix}_", "", 1)
if prefix == server_name and self._is_setting_context(
real_variable, "multisite"
):
if prefix == server_name and self._is_setting_context(real_variable, "multisite"):
service[real_variable] = value
services.append(service)
return services
def get_configs(self) -> dict:
configs = {config_type: {} for config_type in self._supported_config_types}
for configmap in self.__corev1.list_config_map_for_all_namespaces(
watch=False
).items:
if (
not configmap.metadata.annotations
or "bunkerweb.io/CONFIG_TYPE" not in configmap.metadata.annotations
):
for configmap in self.__corev1.list_config_map_for_all_namespaces(watch=False).items:
if not configmap.metadata.annotations or "bunkerweb.io/CONFIG_TYPE" not in configmap.metadata.annotations:
continue
config_type = configmap.metadata.annotations["bunkerweb.io/CONFIG_TYPE"]
@ -212,16 +191,12 @@ class IngressController(Controller):
continue
config_site = ""
if "bunkerweb.io/CONFIG_SITE" in configmap.metadata.annotations:
if not self._is_service_present(
configmap.metadata.annotations["bunkerweb.io/CONFIG_SITE"]
):
if not self._is_service_present(configmap.metadata.annotations["bunkerweb.io/CONFIG_SITE"]):
self._logger.warning(
f"Ignoring config {configmap.metadata.name} because {configmap.metadata.annotations['bunkerweb.io/CONFIG_SITE']} doesn't exist",
)
continue
config_site = (
f"{configmap.metadata.annotations['bunkerweb.io/CONFIG_SITE']}/"
)
config_site = f"{configmap.metadata.annotations['bunkerweb.io/CONFIG_SITE']}/"
for config_name, config_data in configmap.data.items():
configs[config_type][f"{config_site}{config_name}"] = config_data
return configs
@ -250,9 +225,7 @@ class IngressController(Controller):
self._instances = self.get_instances()
self._services = self.get_services()
self._configs = self.get_configs()
if not self.update_needed(
self._instances, self._services, configs=self._configs
):
if not self.update_needed(self._instances, self._services, configs=self._configs):
self.__internal_lock.release()
locked = False
continue
@ -308,10 +281,7 @@ class IngressController(Controller):
def process_events(self):
self._set_autoconf_load_db()
watch_types = ("pod", "ingress", "configmap", "service")
threads = [
Thread(target=self.__watch, args=(watch_type,))
for watch_type in watch_types
]
threads = [Thread(target=self.__watch, args=(watch_type,)) for watch_type in watch_types]
for thread in threads:
thread.start()
for thread in threads:

View File

@ -26,9 +26,7 @@ class SwarmController(Controller):
def _to_instances(self, controller_instance) -> List[dict]:
instances = []
instance_env = {}
for env in controller_instance.attrs["Spec"]["TaskTemplate"]["ContainerSpec"][
"Env"
]:
for env in controller_instance.attrs["Spec"]["TaskTemplate"]["ContainerSpec"]["Env"]:
variable = env.split("=")[0]
value = env.replace(f"{variable}=", "", 1)
if self._is_setting(variable):
@ -61,12 +59,8 @@ class SwarmController(Controller):
def _get_static_services(self) -> List[dict]:
services = []
variables = {}
for instance in self.__client.services.list(
filters={"label": "bunkerweb.INSTANCE"}
):
if not instance.attrs or not instance.attrs.get("Spec", {}).get(
"TaskTemplate", {}
).get("ContainerSpec", {}).get("Env"):
for instance in self.__client.services.list(filters={"label": "bunkerweb.INSTANCE"}):
if not instance.attrs or not instance.attrs.get("Spec", {}).get("TaskTemplate", {}).get("ContainerSpec", {}).get("Env"):
continue
for env in instance.attrs["Spec"]["TaskTemplate"]["ContainerSpec"]["Env"]:
@ -80,9 +74,7 @@ class SwarmController(Controller):
for variable, value in variables.items():
prefix = variable.split("_")[0]
real_variable = variable.replace(f"{prefix}_", "", 1)
if prefix == server_name and self._is_setting_context(
real_variable, "multisite"
):
if prefix == server_name and self._is_setting_context(real_variable, "multisite"):
service[real_variable] = value
services.append(service)
return services
@ -91,15 +83,8 @@ class SwarmController(Controller):
configs = {}
for config_type in self._supported_config_types:
configs[config_type] = {}
for config in self.__client.configs.list(
filters={"label": "bunkerweb.CONFIG_TYPE"}
):
if (
not config.name
or not config.attrs
or not config.attrs.get("Spec", {}).get("Labels", {})
or not config.attrs.get("Spec", {}).get("Data", {})
):
for config in self.__client.configs.list(filters={"label": "bunkerweb.CONFIG_TYPE"}):
if not config.name or not config.attrs or not config.attrs.get("Spec", {}).get("Labels", {}) or not config.attrs.get("Spec", {}).get("Data", {}):
continue
config_type = config.attrs["Spec"]["Labels"]["bunkerweb.CONFIG_TYPE"]
@ -111,19 +96,13 @@ class SwarmController(Controller):
continue
config_site = ""
if "bunkerweb.CONFIG_SITE" in config.attrs["Spec"]["Labels"]:
if not self._is_service_present(
config.attrs["Spec"]["Labels"]["bunkerweb.CONFIG_SITE"]
):
if not self._is_service_present(config.attrs["Spec"]["Labels"]["bunkerweb.CONFIG_SITE"]):
self._logger.warning(
f"Ignoring config {config_name} because {config.attrs['Spec']['Labels']['bunkerweb.CONFIG_SITE']} doesn't exist",
)
continue
config_site = (
f"{config.attrs['Spec']['Labels']['bunkerweb.CONFIG_SITE']}/"
)
configs[config_type][f"{config_site}{config_name}"] = b64decode(
config.attrs["Spec"]["Data"]
)
config_site = f"{config.attrs['Spec']['Labels']['bunkerweb.CONFIG_SITE']}/"
configs[config_type][f"{config_site}{config_name}"] = b64decode(config.attrs["Spec"]["Data"])
return configs
def apply_config(self) -> bool:
@ -139,36 +118,26 @@ class SwarmController(Controller):
locked = False
error = False
try:
for _ in self.__client.events(
decode=True, filters={"type": event_type}
):
for _ in self.__client.events(decode=True, filters={"type": event_type}):
self.__internal_lock.acquire()
locked = True
try:
self._instances = self.get_instances()
self._services = self.get_services()
self._configs = self.get_configs()
if not self.update_needed(
self._instances, self._services, configs=self._configs
):
if not self.update_needed(self._instances, self._services, configs=self._configs):
self.__internal_lock.release()
locked = False
continue
self._logger.info(
f"Catched Swarm event ({event_type}), deploying new configuration ..."
)
self._logger.info(f"Catched Swarm event ({event_type}), deploying new configuration ...")
if not self.apply_config():
self._logger.error(
"Error while deploying new configuration"
)
self._logger.error("Error while deploying new configuration")
else:
self._logger.info(
"Successfully deployed new configuration 🚀",
)
except:
self._logger.error(
f"Exception while processing Swarm event ({event_type}) :\n{format_exc()}"
)
self._logger.error(f"Exception while processing Swarm event ({event_type}) :\n{format_exc()}")
self.__internal_lock.release()
locked = False
except:
@ -187,10 +156,7 @@ class SwarmController(Controller):
def process_events(self):
self._set_autoconf_load_db()
event_types = ("service", "config")
threads = [
Thread(target=self.__event, args=(event_type,))
for event_type in event_types
]
threads = [Thread(target=self.__event, args=(event_type,)) for event_type in event_types]
for thread in threads:
thread.start()
for thread in threads:

View File

@ -7,10 +7,7 @@ from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
from pathlib import Path
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",), ("api",), ("db",))
]:
for deps_path in [join(sep, "usr", "share", "bunkerweb", *paths) for paths in (("deps", "python"), ("utils",), ("api",), ("db",))]:
if deps_path not in sys_path:
sys_path.append(deps_path)

View File

@ -64,17 +64,13 @@ class CLI(ApiCaller):
if redis_host:
redis_port = self.__variables.get("REDIS_PORT", "6379")
if not redis_port.isdigit():
self.__logger.error(
f"REDIS_PORT is not a valid port number: {redis_port}, defaulting to 6379"
)
self.__logger.error(f"REDIS_PORT is not a valid port number: {redis_port}, defaulting to 6379")
redis_port = "6379"
redis_port = int(redis_port)
redis_db = self.__variables.get("REDIS_DB", "0")
if not redis_db.isdigit():
self.__logger.error(
f"REDIS_DB is not a valid database number: {redis_db}, defaulting to 0"
)
self.__logger.error(f"REDIS_DB is not a valid database number: {redis_db}, defaulting to 0")
redis_db = "0"
redis_db = int(redis_db)
@ -83,18 +79,12 @@ class CLI(ApiCaller):
try:
redis_timeout = float(redis_timeout)
except ValueError:
self.__logger.error(
f"REDIS_TIMEOUT is not a valid timeout: {redis_timeout}, defaulting to 1000 ms"
)
self.__logger.error(f"REDIS_TIMEOUT is not a valid timeout: {redis_timeout}, defaulting to 1000 ms")
redis_timeout = 1000.0
redis_keepalive_pool = self.__variables.get(
"REDIS_KEEPALIVE_POOL", "10"
)
redis_keepalive_pool = self.__variables.get("REDIS_KEEPALIVE_POOL", "10")
if not redis_keepalive_pool.isdigit():
self.__logger.error(
f"REDIS_KEEPALIVE_POOL is not a valid number of connections: {redis_keepalive_pool}, defaulting to 10"
)
self.__logger.error(f"REDIS_KEEPALIVE_POOL is not a valid number of connections: {redis_keepalive_pool}, defaulting to 10")
redis_keepalive_pool = "10"
redis_keepalive_pool = int(redis_keepalive_pool)
@ -109,9 +99,7 @@ class CLI(ApiCaller):
ssl=self.__variables.get("REDIS_SSL", "no") == "yes",
)
else:
self.__logger.error(
"USE_REDIS is set to yes but REDIS_HOST is not set, disabling redis"
)
self.__logger.error("USE_REDIS is set to yes but REDIS_HOST is not set, disabling redis")
self.__use_redis = False
if not db_path.is_dir() or self.__integration not in (
@ -143,9 +131,7 @@ class CLI(ApiCaller):
return "autoconf"
elif integration_path.is_file():
return integration_path.read_text(encoding="utf-8").strip().lower()
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text(
encoding="utf-8"
):
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text(encoding="utf-8"):
return "docker"
return "linux"

View File

@ -6,10 +6,7 @@ from os.path import join
from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",), ("api",), ("db",))
]:
for deps_path in [join(sep, "usr", "share", "bunkerweb", *paths) for paths in (("deps", "python"), ("utils",), ("api",), ("db",))]:
if deps_path not in sys_path:
sys_path.append(deps_path)
@ -25,9 +22,7 @@ if __name__ == "__main__":
subparsers = parser.add_subparsers(help="command", dest="command")
# Unban subparser
parser_unban = subparsers.add_parser(
"unban", help="remove a ban from the cache"
)
parser_unban = subparsers.add_parser("unban", help="remove a ban from the cache")
parser_unban.add_argument("ip", type=str, help="IP address to unban")
# Ban subparser

View File

@ -10,10 +10,7 @@ from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
from typing import Tuple
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",), ("db",))
]:
for deps_path in [join(sep, "usr", "share", "bunkerweb", *paths) for paths in (("deps", "python"), ("utils",), ("db",))]:
if deps_path not in sys_path:
sys_path.append(deps_path)
@ -63,10 +60,7 @@ try:
# Multisite case
if getenv("MULTISITE", "no") == "yes":
for first_server in getenv("SERVER_NAME", "").split(" "):
if (
getenv(f"{first_server}_USE_BLACKLIST", getenv("USE_BLACKLIST", "yes"))
== "yes"
):
if getenv(f"{first_server}_USE_BLACKLIST", getenv("USE_BLACKLIST", "yes")) == "yes":
blacklist_activated = True
break
# Singlesite case
@ -152,9 +146,7 @@ try:
resp = get(url, stream=True, timeout=10)
if resp.status_code != 200:
logger.warning(
f"Got status code {resp.status_code}, skipping..."
)
logger.warning(f"Got status code {resp.status_code}, skipping...")
continue
iterable = resp.iter_lines()
@ -203,9 +195,7 @@ try:
status = 1
except:
status = 2
logger.error(
f"Exception while getting blacklist from {url} :\n{format_exc()}"
)
logger.error(f"Exception while getting blacklist from {url} :\n{format_exc()}")
except:
status = 2

View File

@ -32,10 +32,7 @@ try:
# Multisite case
if getenv("MULTISITE", "no") == "yes":
for first_server in getenv("SERVER_NAME", "").split(" "):
if (
getenv(f"{first_server}_USE_BUNKERNET", getenv("USE_BUNKERNET", "yes"))
== "yes"
):
if getenv(f"{first_server}_USE_BUNKERNET", getenv("USE_BUNKERNET", "yes")) == "yes":
bunkernet_activated = True
break
# Singlesite case

View File

@ -19,10 +19,10 @@ for deps_path in [
if deps_path not in sys_path:
sys_path.append(deps_path)
from bunkernet import register, ping, get_id
from bunkernet import register, ping
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from jobs import get_file_in_db, set_file_in_db, del_file_in_db
from jobs import get_file_in_db, set_file_in_db, del_file_in_db # type: ignore
logger = setup_logger("BUNKERNET", getenv("LOG_LEVEL", "INFO"))
exit_status = 0
@ -38,10 +38,7 @@ try:
servers = servers.split(" ")
for first_server in servers:
if (
getenv(f"{first_server}_USE_BUNKERNET", getenv("USE_BUNKERNET", "yes"))
== "yes"
):
if getenv(f"{first_server}_USE_BUNKERNET", getenv("USE_BUNKERNET", "yes")) == "yes":
bunkernet_activated = True
break
# Singlesite case
@ -73,9 +70,7 @@ try:
logger.info("Registering instance on BunkerNet API ...")
ok, status, data = register()
if not ok:
logger.error(
f"Error while sending register request to BunkerNet API : {data}"
)
logger.error(f"Error while sending register request to BunkerNet API : {data}")
_exit(2)
elif status == 429:
logger.warning(
@ -102,23 +97,17 @@ try:
)
_exit(2)
elif data.get("result", "ko") != "ok":
logger.error(
f"Received error from BunkerNet API while sending register request : {data.get('data', {})}"
)
logger.error(f"Received error from BunkerNet API while sending register request : {data.get('data', {})}")
_exit(2)
bunkernet_id = data["data"]
instance_id_path.write_text(bunkernet_id, encoding="utf-8")
registered = True
exit_status = 1
logger.info(
f"Successfully registered on BunkerNet API with instance id {data['data']}"
)
logger.info(f"Successfully registered on BunkerNet API with instance id {data['data']}")
else:
bunkernet_id = bunkernet_id or instance_id_path.read_bytes()
bunkernet_id = bunkernet_id.decode()
logger.info(
f"Already registered on BunkerNet API with instance id {bunkernet_id}"
)
logger.info(f"Already registered on BunkerNet API with instance id {bunkernet_id}")
sleep(1)

View File

@ -6,9 +6,7 @@ from requests import request as requests_request, ReadTimeout
from typing import Literal, Optional, Tuple, Union
def request(
method: Union[Literal["POST"], Literal["GET"]], url: str, _id: Optional[str] = None
) -> Tuple[bool, Optional[int], Union[str, dict]]:
def request(method: Union[Literal["POST"], Literal["GET"]], url: str, _id: Optional[str] = None) -> Tuple[bool, Optional[int], Union[str, dict]]:
data = {"integration": get_integration(), "version": get_version()}
headers = {"User-Agent": f"BunkerWeb/{get_version()}"}
if _id is not None:
@ -51,19 +49,11 @@ def data() -> Tuple[bool, Optional[int], Union[str, dict]]:
def get_id() -> str:
return (
Path(sep, "var", "cache", "bunkerweb", "bunkernet", "instance.id")
.read_text(encoding="utf-8")
.strip()
)
return Path(sep, "var", "cache", "bunkerweb", "bunkernet", "instance.id").read_text(encoding="utf-8").strip()
def get_version() -> str:
return (
Path(sep, "usr", "share", "bunkerweb", "VERSION")
.read_text(encoding="utf-8")
.strip()
)
return Path(sep, "usr", "share", "bunkerweb", "VERSION").read_text(encoding="utf-8").strip()
def get_integration() -> str:
@ -78,9 +68,7 @@ def get_integration() -> str:
return "autoconf"
elif integration_path.is_file():
return integration_path.read_text(encoding="utf-8").strip().lower()
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text(
encoding="utf-8"
):
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text(encoding="utf-8"):
return "docker"
return "linux"

View File

@ -26,28 +26,20 @@ logger = setup_logger("CUSTOM-CERT", getenv("LOG_LEVEL", "INFO"))
db = None
def check_cert(
cert_path: str, key_path: str, first_server: Optional[str] = None
) -> bool:
def check_cert(cert_path: str, key_path: str, first_server: Optional[str] = None) -> bool:
try:
if not cert_path or not key_path:
logger.warning(
"Both variables CUSTOM_SSL_CERT and CUSTOM_SSL_KEY have to be set to use custom certificates"
)
logger.warning("Both variables CUSTOM_SSL_CERT and CUSTOM_SSL_KEY have to be set to use custom certificates")
return False
cert_path: Path = Path(normpath(cert_path))
key_path: Path = Path(normpath(key_path))
if not cert_path.is_file():
logger.warning(
f"Certificate file {cert_path} is not a valid file, ignoring the custom certificate"
)
logger.warning(f"Certificate file {cert_path} is not a valid file, ignoring the custom certificate")
return False
elif not key_path.is_file():
logger.warning(
f"Key file {key_path} is not a valid file, ignoring the custom certificate"
)
logger.warning(f"Key file {key_path} is not a valid file, ignoring the custom certificate")
return False
cert_cache_path = Path(
@ -66,9 +58,7 @@ def check_cert(
if old_hash == cert_hash:
return False
cached, err = cache_file(
cert_path, cert_cache_path, cert_hash, db, delete_file=False
)
cached, err = cache_file(cert_path, cert_cache_path, cert_hash, db, delete_file=False)
if not cached:
logger.error(f"Error while caching custom-cert cert.pem file : {err}")
@ -86,9 +76,7 @@ def check_cert(
key_hash = file_hash(key_path)
old_hash = cache_hash(key_cache_path, db)
if old_hash != key_hash:
cached, err = cache_file(
key_path, key_cache_path, key_hash, db, delete_file=False
)
cached, err = cache_file(key_path, key_cache_path, key_hash, db, delete_file=False)
if not cached:
logger.error(f"Error while caching custom-cert key.pem file : {err}")
@ -103,14 +91,10 @@ def check_cert(
status = 0
try:
Path(sep, "var", "cache", "bunkerweb", "customcert").mkdir(
parents=True, exist_ok=True
)
Path(sep, "var", "cache", "bunkerweb", "customcert").mkdir(parents=True, exist_ok=True)
if getenv("USE_CUSTOM_SSL", "no") == "yes" and getenv("SERVER_NAME", "") != "":
db = Database(
logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False
)
db = Database(logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False)
cert_path = getenv("CUSTOM_SSL_CERT", "")
key_path = getenv("CUSTOM_SSL_KEY", "")
@ -131,16 +115,11 @@ try:
servers = servers.split(" ")
for first_server in servers:
if not first_server or (
getenv(f"{first_server}_USE_CUSTOM_SSL", getenv("USE_CUSTOM_SSL", "no"))
!= "yes"
):
if not first_server or (getenv(f"{first_server}_USE_CUSTOM_SSL", getenv("USE_CUSTOM_SSL", "no")) != "yes"):
continue
if not db:
db = Database(
logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False
)
db = Database(logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False)
cert_path = getenv(f"{first_server}_CUSTOM_SSL_CERT", "")
key_path = getenv(f"{first_server}_CUSTOM_SSL_KEY", "")

View File

@ -10,10 +10,7 @@ from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
from typing import Tuple
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",), ("db",))
]:
for deps_path in [join(sep, "usr", "share", "bunkerweb", *paths) for paths in (("deps", "python"), ("utils",), ("db",))]:
if deps_path not in sys_path:
sys_path.append(deps_path)
@ -63,10 +60,7 @@ try:
# Multisite case
if getenv("MULTISITE", "no") == "yes":
for first_server in getenv("SERVER_NAME", "").split(" "):
if (
getenv(f"{first_server}_USE_GREYLIST", getenv("USE_GREYLIST", "no"))
== "yes"
):
if getenv(f"{first_server}_USE_GREYLIST", getenv("USE_GREYLIST", "no")) == "yes":
greylist_activated = True
break
# Singlesite case
@ -136,9 +130,7 @@ try:
resp = get(url, stream=True, timeout=10)
if resp.status_code != 200:
logger.warning(
f"Got status code {resp.status_code}, skipping..."
)
logger.warning(f"Got status code {resp.status_code}, skipping...")
continue
iterable = resp.iter_lines()
@ -187,9 +179,7 @@ try:
status = 1
except:
status = 2
logger.error(
f"Exception while getting greylist from {url} :\n{format_exc()}"
)
logger.error(f"Exception while getting greylist from {url} :\n{format_exc()}")
except:
status = 2

View File

@ -114,9 +114,7 @@ try:
with tar_open(fileobj=BytesIO(content), mode="r") as tar:
tar.extractall(path=temp_dir)
else:
logger.error(
f"Unknown file type for {plugin_url}, either zip or tar are supported, skipping..."
)
logger.error(f"Unknown file type for {plugin_url}, either zip or tar are supported, skipping...")
continue
except:
logger.error(

View File

@ -43,9 +43,7 @@ try:
with lock:
response = None
try:
response = get(
"https://db-ip.com/db/download/ip-to-asn-lite", timeout=5
)
response = get("https://db-ip.com/db/download/ip-to-asn-lite", timeout=5)
except RequestException:
logger.warning("Unable to check if asn.mmdb is the latest version")
@ -59,14 +57,10 @@ try:
_sha1.update(data)
if response.content.decode().find(_sha1.hexdigest()) != -1:
logger.info(
"asn.mmdb is already the latest version, skipping download..."
)
logger.info("asn.mmdb is already the latest version, skipping download...")
dl_mmdb = False
else:
logger.warning(
"Unable to check if asn.mmdb is the latest version, downloading it anyway..."
)
logger.warning("Unable to check if asn.mmdb is the latest version, downloading it anyway...")
db = Database(logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False)

View File

@ -43,9 +43,7 @@ try:
with lock:
response = None
try:
response = get(
"https://db-ip.com/db/download/ip-to-country-lite", timeout=5
)
response = get("https://db-ip.com/db/download/ip-to-country-lite", timeout=5)
except RequestException:
logger.warning("Unable to check if country.mmdb is the latest version")
@ -59,14 +57,10 @@ try:
_sha1.update(data)
if response.content.decode().find(_sha1.hexdigest()) != -1:
logger.info(
"country.mmdb is already the latest version, skipping download..."
)
logger.info("country.mmdb is already the latest version, skipping download...")
dl_mmdb = False
else:
logger.warning(
"Unable to check if country.mmdb is the latest version, downloading it anyway..."
)
logger.warning("Unable to check if country.mmdb is the latest version, downloading it anyway...")
db = Database(logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False)

View File

@ -39,9 +39,7 @@ try:
bw_integration = "Autoconf"
elif integration_path.is_file():
bw_integration = integration_path.read_text(encoding="utf-8").strip()
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text(
encoding="utf-8"
):
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text(encoding="utf-8"):
bw_integration = "Docker"
token = getenv("CERTBOT_TOKEN", "")
@ -49,9 +47,7 @@ try:
# Cluster case
if bw_integration in ("Docker", "Swarm", "Kubernetes", "Autoconf"):
db = Database(
logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False
)
db = Database(logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False)
lock = Lock()
with lock:
@ -69,9 +65,7 @@ try:
)
if not sent:
status = 1
logger.error(
f"Can't send API request to {api.endpoint}/lets-encrypt/challenge : {err}"
)
logger.error(f"Can't send API request to {api.endpoint}/lets-encrypt/challenge : {err}")
elif status != 200:
status = 1
logger.error(

View File

@ -39,18 +39,14 @@ try:
bw_integration = "Autoconf"
elif integration_path.is_file():
bw_integration = integration_path.read_text(encoding="utf-8").strip()
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text(
encoding="utf-8"
):
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text(encoding="utf-8"):
bw_integration = "Docker"
token = getenv("CERTBOT_TOKEN", "")
# Cluster case
if bw_integration in ("Docker", "Swarm", "Kubernetes", "Autoconf"):
db = Database(
logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False
)
db = Database(logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False)
lock = Lock()
with lock:
instances = db.get_instances()
@ -60,14 +56,10 @@ try:
f"http://{instance['hostname']}:{instance['port']}",
host=instance["server_name"],
)
sent, err, status, resp = api.request(
"DELETE", "/lets-encrypt/challenge", data={"token": token}
)
sent, err, status, resp = api.request("DELETE", "/lets-encrypt/challenge", data={"token": token})
if not sent:
status = 1
logger.error(
f"Can't send API request to {api.endpoint}/lets-encrypt/challenge : {err}"
)
logger.error(f"Can't send API request to {api.endpoint}/lets-encrypt/challenge : {err}")
elif status != 200:
status = 1
logger.error(

View File

@ -42,9 +42,7 @@ try:
bw_integration = "Autoconf"
elif integration_path.is_file():
bw_integration = integration_path.read_text(encoding="utf-8").strip()
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text(
encoding="utf-8"
):
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text(encoding="utf-8"):
bw_integration = "Docker"
token = getenv("CERTBOT_TOKEN", "")
@ -64,9 +62,7 @@ try:
tgz.seek(0, 0)
files = {"archive.tar.gz": tgz}
db = Database(
logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False
)
db = Database(logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False)
lock = Lock()
with lock:
@ -77,19 +73,13 @@ try:
host = instance["server_name"]
api = API(endpoint, host=host)
sent, err, status, resp = api.request(
"POST", "/lets-encrypt/certificates", files=files
)
sent, err, status, resp = api.request("POST", "/lets-encrypt/certificates", files=files)
if not sent:
status = 1
logger.error(
f"Can't send API request to {api.endpoint}/lets-encrypt/certificates : {err}"
)
logger.error(f"Can't send API request to {api.endpoint}/lets-encrypt/certificates : {err}")
elif status != 200:
status = 1
logger.error(
f"Error while sending API request to {api.endpoint}/lets-encrypt/certificates : status = {resp['status']}, msg = {resp['msg']}"
)
logger.error(f"Error while sending API request to {api.endpoint}/lets-encrypt/certificates : status = {resp['status']}, msg = {resp['msg']}")
else:
logger.info(
f"Successfully sent API request to {api.endpoint}/lets-encrypt/certificates",
@ -97,18 +87,12 @@ try:
sent, err, status, resp = api.request("POST", "/reload")
if not sent:
status = 1
logger.error(
f"Can't send API request to {api.endpoint}/reload : {err}"
)
logger.error(f"Can't send API request to {api.endpoint}/reload : {err}")
elif status != 200:
status = 1
logger.error(
f"Error while sending API request to {api.endpoint}/reload : status = {resp['status']}, msg = {resp['msg']}"
)
logger.error(f"Error while sending API request to {api.endpoint}/reload : status = {resp['status']}, msg = {resp['msg']}")
else:
logger.info(
f"Successfully sent API request to {api.endpoint}/reload"
)
logger.info(f"Successfully sent API request to {api.endpoint}/reload")
# Linux case
else:
if (

View File

@ -24,15 +24,13 @@ for deps_path in [
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from jobs import get_file_in_db, set_file_in_db
from jobs import get_file_in_db, set_file_in_db # type: ignore
logger = setup_logger("LETS-ENCRYPT.new", getenv("LOG_LEVEL", "INFO"))
status = 0
def certbot_new(
domains: str, email: str, letsencrypt_path: Path, letsencrypt_job_path: Path
) -> int:
def certbot_new(domains: str, email: str, letsencrypt_path: Path, letsencrypt_job_path: Path) -> int:
return run(
[
join(sep, "usr", "share", "bunkerweb", "deps", "python", "bin", "certbot"),
@ -60,8 +58,7 @@ def certbot_new(
+ (["--staging"] if getenv("USE_LETS_ENCRYPT_STAGING", "no") == "yes" else []),
stdin=DEVNULL,
stderr=STDOUT,
env=environ.copy()
| {"PYTHONPATH": join(sep, "usr", "share", "bunkerweb", "deps", "python")},
env=environ.copy() | {"PYTHONPATH": join(sep, "usr", "share", "bunkerweb", "deps", "python")},
).returncode
@ -81,8 +78,7 @@ def certbot_check_domains(domains: list[str], letsencrypt_path: Path) -> int:
stdout=PIPE,
stderr=STDOUT,
text=True,
env=environ.copy()
| {"PYTHONPATH": join(sep, "usr", "share", "bunkerweb", "deps", "python")},
env=environ.copy() | {"PYTHONPATH": join(sep, "usr", "share", "bunkerweb", "deps", "python")},
)
if proc.returncode != 0:
logger.error(f"Error while checking certificates :\n{proc.stdout}")
@ -91,10 +87,7 @@ def certbot_check_domains(domains: list[str], letsencrypt_path: Path) -> int:
needed_domains = set(domains)
for raw_domains in findall(r"^ Domains: (.*)$", proc.stdout, MULTILINE):
current_domains = raw_domains.split(" ")
if (
current_domains[0] == first_needed_domain
and set(current_domains) == needed_domains
):
if current_domains[0] == first_needed_domain and set(current_domains) == needed_domains:
return 1
return 0
@ -108,10 +101,7 @@ try:
use_letsencrypt = True
elif getenv("MULTISITE", "no") == "yes":
for first_server in getenv("SERVER_NAME", "").split(" "):
if (
first_server
and getenv(f"{first_server}_AUTO_LETS_ENCRYPT", "no") == "yes"
):
if first_server and getenv(f"{first_server}_AUTO_LETS_ENCRYPT", "no") == "yes":
use_letsencrypt = True
break
@ -123,12 +113,8 @@ try:
letsencrypt_path = Path(sep, "var", "cache", "bunkerweb", "letsencrypt")
letsencrypt_path.mkdir(parents=True, exist_ok=True)
letsencrypt_job_path = Path(
sep, "usr", "share", "bunkerweb", "core", "letsencrypt", "jobs"
)
Path(sep, "var", "lib", "bunkerweb", "letsencrypt").mkdir(
parents=True, exist_ok=True
)
letsencrypt_job_path = Path(sep, "usr", "share", "bunkerweb", "core", "letsencrypt", "jobs")
Path(sep, "var", "lib", "bunkerweb", "letsencrypt").mkdir(parents=True, exist_ok=True)
# Get env vars
bw_integration = "Linux"
@ -142,9 +128,7 @@ try:
bw_integration = "Autoconf"
elif integration_path.is_file():
bw_integration = integration_path.read_text(encoding="utf-8").strip()
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text(
encoding="utf-8"
):
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text(encoding="utf-8"):
bw_integration = "Docker"
# Extract letsencrypt folder if it exists in db
@ -209,9 +193,7 @@ try:
continue
else:
status = 1 if status == 0 else status
logger.info(
f"Certificate generation succeeded for domain(s) : {domains}"
)
logger.info(f"Certificate generation succeeded for domain(s) : {domains}")
# Singlesite case
elif getenv("AUTO_LETS_ENCRYPT", "no") == "yes" and getenv("SERVER_NAME"):
@ -243,9 +225,7 @@ try:
logger.error(f"Certificate generation failed for domain(s) : {domains}")
else:
status = 1
logger.info(
f"Certificate generation succeeded for domain(s) : {domains}"
)
logger.info(f"Certificate generation succeeded for domain(s) : {domains}")
# Put new folder in cache
bio = BytesIO()

View File

@ -1,6 +1,6 @@
#!/usr/bin/python3
from os import _exit, environ, getenv, listdir, sep
from os import _exit, environ, getenv, sep
from os.path import join
from pathlib import Path
from subprocess import DEVNULL, STDOUT, run
@ -23,7 +23,7 @@ for deps_path in [
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from jobs import get_file_in_db, set_file_in_db
from jobs import get_file_in_db, set_file_in_db # type: ignore
def renew(domain: str, letsencrypt_path: Path) -> int:
@ -53,8 +53,7 @@ def renew(domain: str, letsencrypt_path: Path) -> int:
],
stdin=DEVNULL,
stderr=STDOUT,
env=environ.copy()
| {"PYTHONPATH": join(sep, "usr", "share", "bunkerweb", "deps", "python")},
env=environ.copy() | {"PYTHONPATH": join(sep, "usr", "share", "bunkerweb", "deps", "python")},
check=False,
).returncode
@ -69,10 +68,7 @@ try:
use_letsencrypt = True
elif getenv("MULTISITE", "no") == "yes":
for first_server in getenv("SERVER_NAME", "").split(" "):
if (
first_server
and getenv(f"{first_server}_AUTO_LETS_ENCRYPT", "no") == "yes"
):
if first_server and getenv(f"{first_server}_AUTO_LETS_ENCRYPT", "no") == "yes":
use_letsencrypt = True
break
@ -83,9 +79,7 @@ try:
# Create directory if it doesn't exist
letsencrypt_path = Path(sep, "var", "cache", "bunkerweb", "letsencrypt")
letsencrypt_path.mkdir(parents=True, exist_ok=True)
Path(sep, "var", "lib", "bunkerweb", "letsencrypt").mkdir(
parents=True, exist_ok=True
)
Path(sep, "var", "lib", "bunkerweb", "letsencrypt").mkdir(parents=True, exist_ok=True)
# Get env vars
bw_integration = "Linux"
@ -99,9 +93,7 @@ try:
bw_integration = "Autoconf"
elif integration_path.is_file():
bw_integration = integration_path.read_text(encoding="utf-8").strip()
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text(
encoding="utf-8"
):
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text(encoding="utf-8"):
bw_integration = "Docker"
# Extract letsencrypt folder if it exists in db
@ -134,9 +126,7 @@ try:
getenv("AUTO_LETS_ENCRYPT", "no"),
)
!= "yes"
or not letsencrypt_path.joinpath(
"etc", "live", first_server, "cert.pem"
).exists()
or not letsencrypt_path.joinpath("etc", "live", first_server, "cert.pem").exists()
):
continue

View File

@ -35,10 +35,7 @@ try:
"AUTO_LETS_ENCRYPT",
"GENERATE_SELF_SIGNED_SSL",
):
if (
getenv(f"{first_server}_{check_var}", getenv(check_var, "no"))
== "yes"
):
if getenv(f"{first_server}_{check_var}", getenv(check_var, "no")) == "yes":
need_default_cert = True
break
if need_default_cert:
@ -99,9 +96,7 @@ try:
"Successfully generated self-signed certificate for default server",
)
db = Database(
logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False
)
db = Database(logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False)
cached, err = set_file_in_db(
"cert.pem",
@ -109,13 +104,9 @@ try:
db,
)
if not cached:
logger.error(
f"Error while saving default-server-cert cert.pem file to db cache : {err}"
)
logger.error(f"Error while saving default-server-cert cert.pem file to db cache : {err}")
else:
logger.info(
"Successfully saved default-server-cert cert.pem file to db cache"
)
logger.info("Successfully saved default-server-cert cert.pem file to db cache")
cached, err = set_file_in_db(
"cert.key",
@ -123,13 +114,9 @@ try:
db,
)
if not cached:
logger.error(
f"Error while saving default-server-cert cert.key file to db cache : {err}"
)
logger.error(f"Error while saving default-server-cert cert.key file to db cache : {err}")
else:
logger.info(
"Successfully saved default-server-cert cert.key file to db cache"
)
logger.info("Successfully saved default-server-cert cert.key file to db cache")
else:
logger.info(
"Skipping generation of self-signed certificate for default server (already present)",

View File

@ -23,9 +23,7 @@ logger = setup_logger("UPDATE-CHECK", getenv("LOG_LEVEL", "INFO"))
status = 0
try:
current_version = (
f"v{Path('/usr/share/bunkerweb/VERSION').read_text(encoding='utf-8').strip()}"
)
current_version = f"v{Path('/usr/share/bunkerweb/VERSION').read_text(encoding='utf-8').strip()}"
response = get(
"https://github.com/bunkerity/bunkerweb/releases/latest",

View File

@ -52,10 +52,7 @@ try:
servers = servers.split(" ")
for first_server in servers:
if (
getenv(f"{first_server}_USE_REAL_IP", getenv("USE_REAL_IP", "no"))
== "yes"
):
if getenv(f"{first_server}_USE_REAL_IP", getenv("USE_REAL_IP", "no")) == "yes":
realip_activated = True
break
@ -118,9 +115,7 @@ try:
i += 1
except:
status = 2
logger.error(
f"Exception while getting RealIP list from {url} :\n{format_exc()}"
)
logger.error(f"Exception while getting RealIP list from {url} :\n{format_exc()}")
tmp_realip_path.joinpath("combined.list").write_bytes(content)

View File

@ -34,9 +34,7 @@ lock = Lock()
status = 0
def generate_cert(
first_server: str, days: str, subj: str, self_signed_path: Path
) -> Tuple[bool, int]:
def generate_cert(first_server: str, days: str, subj: str, self_signed_path: Path) -> Tuple[bool, int]:
if self_signed_path.joinpath(f"{first_server}.pem").is_file():
if (
run(
@ -61,19 +59,10 @@ def generate_cert(
self_signed_path.joinpath(f"{first_server}.pem").read_bytes(),
default_backend(),
)
if sorted(
attribute.rfc4514_string() for attribute in certificate.subject
) != sorted(v for v in subj.split("/") if v):
logger.warning(
f"Subject of self-signed certificate for {first_server} is different from the one in the configuration, regenerating ..."
)
elif (
certificate.not_valid_after - certificate.not_valid_before
!= timedelta(days=int(days))
):
logger.warning(
f"Expiration date of self-signed certificate for {first_server} is different from the one in the configuration, regenerating ..."
)
if sorted(attribute.rfc4514_string() for attribute in certificate.subject) != sorted(v for v in subj.split("/") if v):
logger.warning(f"Subject of self-signed certificate for {first_server} is different from the one in the configuration, regenerating ...")
elif certificate.not_valid_after - certificate.not_valid_before != timedelta(days=int(days)):
logger.warning(f"Expiration date of self-signed certificate for {first_server} is different from the one in the configuration, regenerating ...")
else:
return True, 0
@ -154,9 +143,7 @@ try:
continue
if not db:
db = Database(
logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False
)
db = Database(logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False)
ret, ret_status = generate_cert(
first_server,
@ -174,9 +161,7 @@ try:
# Singlesite case
elif getenv("GENERATE_SELF_SIGNED_SSL", "no") == "yes" and getenv("SERVER_NAME"):
db = Database(
logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False
)
db = Database(logger, sqlalchemy_string=getenv("DATABASE_URI", None), pool=False)
first_server = getenv("SERVER_NAME", "").split(" ")[0]
ret, ret_status = generate_cert(

View File

@ -10,10 +10,7 @@ from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
from typing import Tuple
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",), ("db",))
]:
for deps_path in [join(sep, "usr", "share", "bunkerweb", *paths) for paths in (("deps", "python"), ("utils",), ("db",))]:
if deps_path not in sys_path:
sys_path.append(deps_path)
@ -63,10 +60,7 @@ try:
# Multisite case
if getenv("MULTISITE", "no") == "yes":
for first_server in getenv("SERVER_NAME", "").split(" "):
if (
getenv(f"{first_server}_USE_WHITELIST", getenv("USE_WHITELIST", "no"))
== "yes"
):
if getenv(f"{first_server}_USE_WHITELIST", getenv("USE_WHITELIST", "no")) == "yes":
whitelist_activated = True
break
# Singlesite case
@ -136,9 +130,7 @@ try:
resp = get(url, stream=True, timeout=10)
if resp.status_code != 200:
logger.warning(
f"Got status code {resp.status_code}, skipping..."
)
logger.warning(f"Got status code {resp.status_code}, skipping...")
continue
iterable = resp.iter_lines()
@ -187,9 +179,7 @@ try:
status = 1
except:
status = 2
logger.error(
f"Exception while getting whitelist from {url} :\n{format_exc()}"
)
logger.error(f"Exception while getting whitelist from {url} :\n{format_exc()}")
except:
status = 2

View File

@ -31,10 +31,7 @@ from model import (
Metadata,
)
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",))
]:
for deps_path in [join(sep, "usr", "share", "bunkerweb", *paths) for paths in (("deps", "python"), ("utils",))]:
if deps_path not in sys_path:
sys_path.append(deps_path)
@ -70,9 +67,7 @@ class Database:
self.__sql_engine = None
if not sqlalchemy_string:
sqlalchemy_string = getenv(
"DATABASE_URI", "sqlite:////var/lib/bunkerweb/db.sqlite3"
)
sqlalchemy_string = getenv("DATABASE_URI", "sqlite:////var/lib/bunkerweb/db.sqlite3")
if sqlalchemy_string.startswith("sqlite"):
if ui:
@ -80,9 +75,7 @@ class Database:
sleep(1)
else:
with suppress(FileExistsError):
Path(dirname(sqlalchemy_string.split("///")[1])).mkdir(
parents=True, exist_ok=True
)
Path(dirname(sqlalchemy_string.split("///")[1])).mkdir(parents=True, exist_ok=True)
elif "+" in sqlalchemy_string and "+pymysql" not in sqlalchemy_string:
splitted = sqlalchemy_string.split("+")
sqlalchemy_string = f"{splitted[0]}:{':'.join(splitted[1].split(':')[1:])}"
@ -101,9 +94,7 @@ class Database:
self.__logger.error(f"Invalid database URI: {sqlalchemy_string}")
error = True
except SQLAlchemyError:
self.__logger.error(
f"Error when trying to create the engine: {format_exc()}"
)
self.__logger.error(f"Error when trying to create the engine: {format_exc()}")
error = True
finally:
if error:
@ -132,9 +123,7 @@ class Database:
_exit(1)
if "attempt to write a readonly database" in str(e):
self.__logger.warning(
"The database is read-only, waiting for it to become writable. Retrying in 5 seconds ..."
)
self.__logger.warning("The database is read-only, waiting for it to become writable. Retrying in 5 seconds ...")
self.__sql_engine.dispose(close=True)
self.__sql_engine = create_engine(
sqlalchemy_string,
@ -152,9 +141,7 @@ class Database:
retries -= 1
sleep(5)
except BaseException:
self.__logger.error(
f"Error when trying to connect to the database: {format_exc()}"
)
self.__logger.error(f"Error when trying to connect to the database: {format_exc()}")
exit(1)
self.__logger.info("Database connection established")
@ -162,9 +149,7 @@ class Database:
self.__session = sessionmaker()
self.__sql_session = scoped_session(self.__session)
self.__sql_session.remove()
self.__sql_session.configure(
bind=self.__sql_engine, autoflush=False, expire_on_commit=False
)
self.__sql_session.configure(bind=self.__sql_engine, autoflush=False, expire_on_commit=False)
self.suffix_rx = re_compile(r"_\d+$")
if sqlalchemy_string.startswith("sqlite"):
@ -219,12 +204,7 @@ class Database:
"""Check if the autoconf is loaded"""
with self.__db_session() as session:
try:
metadata = (
session.query(Metadata)
.with_entities(Metadata.autoconf_loaded)
.filter_by(id=1)
.first()
)
metadata = session.query(Metadata).with_entities(Metadata.autoconf_loaded).filter_by(id=1).first()
return metadata is not None and metadata.autoconf_loaded
except (ProgrammingError, OperationalError):
return False
@ -249,12 +229,7 @@ class Database:
"""Check if it's the scheduler's first start"""
with self.__db_session() as session:
try:
metadata = (
session.query(Metadata)
.with_entities(Metadata.scheduler_first_start)
.filter_by(id=1)
.first()
)
metadata = session.query(Metadata).with_entities(Metadata.scheduler_first_start).filter_by(id=1).first()
return metadata is not None and metadata.scheduler_first_start
except (ProgrammingError, OperationalError):
return True
@ -263,12 +238,7 @@ class Database:
"""Check if the first configuration has been saved"""
with self.__db_session() as session:
try:
metadata = (
session.query(Metadata)
.with_entities(Metadata.first_config_saved)
.filter_by(id=1)
.first()
)
metadata = session.query(Metadata).with_entities(Metadata.first_config_saved).filter_by(id=1).first()
return metadata is not None and metadata.first_config_saved
except (ProgrammingError, OperationalError):
return False
@ -277,12 +247,7 @@ class Database:
"""Check if the database is initialized"""
with self.__db_session() as session:
try:
metadata = (
session.query(Metadata)
.with_entities(Metadata.is_initialized)
.filter_by(id=1)
.first()
)
metadata = session.query(Metadata).with_entities(Metadata.is_initialized).filter_by(id=1).first()
return metadata is not None and metadata.is_initialized
except (ProgrammingError, OperationalError, DatabaseError):
return False
@ -323,20 +288,15 @@ class Database:
)
return dict(
custom_configs_changed=metadata is not None
and metadata.custom_configs_changed,
external_plugins_changed=metadata is not None
and metadata.external_plugins_changed,
custom_configs_changed=metadata is not None and metadata.custom_configs_changed,
external_plugins_changed=metadata is not None and metadata.external_plugins_changed,
config_changed=metadata is not None and metadata.config_changed,
instances_changed=metadata is not None
and metadata.instances_changed,
instances_changed=metadata is not None and metadata.instances_changed,
)
except BaseException:
return format_exc()
def checked_changes(
self, changes: Optional[List[str]] = None, value: Optional[bool] = False
) -> str:
def checked_changes(self, changes: Optional[List[str]] = None, value: Optional[bool] = False) -> str:
"""Set changed bit for config, custom configs, instances and plugins"""
changes = changes or [
"config",
@ -441,24 +401,12 @@ class Database:
to_put.append(Jobs(plugin_id=plugin["id"], **job))
if page:
core_ui_path = Path(
sep, "usr", "share", "bunkerweb", "core", plugin["id"], "ui"
)
path_ui = (
core_ui_path
if core_ui_path.exists()
else Path(
sep, "etc", "bunkerweb", "plugins", plugin["id"], "ui"
)
)
core_ui_path = Path(sep, "usr", "share", "bunkerweb", "core", plugin["id"], "ui")
path_ui = core_ui_path if core_ui_path.exists() else Path(sep, "etc", "bunkerweb", "plugins", plugin["id"], "ui")
if path_ui.exists():
if {"template.html", "actions.py"}.issubset(
listdir(str(path_ui))
):
template = path_ui.joinpath(
"template.html"
).read_bytes()
if {"template.html", "actions.py"}.issubset(listdir(str(path_ui))):
template = path_ui.joinpath("template.html").read_bytes()
actions = path_ui.joinpath("actions.py").read_bytes()
to_put.append(
@ -479,25 +427,17 @@ class Database:
return True, ""
def save_config(
self, config: Dict[str, Any], method: str, changed: Optional[bool] = True
) -> str:
def save_config(self, config: Dict[str, Any], method: str, changed: Optional[bool] = True) -> str:
"""Save the config in the database"""
to_put = []
with self.__db_session() as session:
# Delete all the old config
session.query(Global_values).filter(Global_values.method == method).delete()
session.query(Services_settings).filter(
Services_settings.method == method
).delete()
session.query(Services_settings).filter(Services_settings.method == method).delete()
if config:
config.pop("DATABASE_URI", None)
db_services = (
session.query(Services)
.with_entities(Services.id, Services.method)
.all()
)
db_services = session.query(Services).with_entities(Services.id, Services.method).all()
db_ids = [service.id for service in db_services]
services = config.get("SERVER_NAME", [])
@ -505,17 +445,11 @@ class Database:
services = services.split(" ")
if db_services:
missing_ids = [
service.id
for service in db_services
if (service.method == method) and service.id not in services
]
missing_ids = [service.id for service in db_services if (service.method == method) and service.id not in services]
if missing_ids:
# Remove services that are no longer in the list
session.query(Services).filter(
Services.id.in_(missing_ids)
).delete()
session.query(Services).filter(Services.id.in_(missing_ids)).delete()
if config.get("MULTISITE", "no") == "yes":
global_values = []
@ -526,20 +460,11 @@ class Database:
suffix = int(key.split("_")[-1])
key = key[: -len(str(suffix)) - 1]
setting = (
session.query(Settings)
.with_entities(Settings.default)
.filter_by(id=key)
.first()
)
setting = session.query(Settings).with_entities(Settings.default).filter_by(id=key).first()
if not setting and services:
try:
server_name = next(
service
for service in services
if key.startswith(f"{service}_")
)
server_name = next(service for service in services if key.startswith(f"{service}_"))
except StopIteration:
continue
@ -548,21 +473,14 @@ class Database:
db_ids.append(server_name)
key = key.replace(f"{server_name}_", "")
setting = (
session.query(Settings)
.with_entities(Settings.default)
.filter_by(id=key)
.first()
)
setting = session.query(Settings).with_entities(Settings.default).filter_by(id=key).first()
if not setting:
continue
service_setting = (
session.query(Services_settings)
.with_entities(
Services_settings.value, Services_settings.method
)
.with_entities(Services_settings.value, Services_settings.method)
.filter_by(
service_id=server_name,
setting_id=key,
@ -572,10 +490,7 @@ class Database:
)
if not service_setting:
if key != "SERVER_NAME" and (
(key not in config and value == setting.default)
or (key in config and value == config[key])
):
if key != "SERVER_NAME" and ((key not in config and value == setting.default) or (key in config and value == config[key])):
continue
to_put.append(
@ -587,14 +502,8 @@ class Database:
method=method,
)
)
elif (
method in (service_setting.method, "autoconf")
and service_setting.value != value
):
if key != "SERVER_NAME" and (
(key not in config and value == setting.default)
or (key in config and value == config[key])
):
elif method in (service_setting.method, "autoconf") and service_setting.value != value:
if key != "SERVER_NAME" and ((key not in config and value == setting.default) or (key in config and value == config[key])):
session.query(Services_settings).filter(
Services_settings.service_id == server_name,
Services_settings.setting_id == key,
@ -616,9 +525,7 @@ class Database:
global_values.append(original_key)
global_value = (
session.query(Global_values)
.with_entities(
Global_values.value, Global_values.method
)
.with_entities(Global_values.value, Global_values.method)
.filter_by(
setting_id=key,
suffix=suffix,
@ -638,10 +545,7 @@ class Database:
method=method,
)
)
elif (
method in (global_value.method, "autoconf")
and global_value.value != value
):
elif method in (global_value.method, "autoconf") and global_value.value != value:
if value == setting.default:
session.query(Global_values).filter(
Global_values.setting_id == key,
@ -659,17 +563,8 @@ class Database:
}
)
else:
if config.get("SERVER_NAME", "") != "" and not (
session.query(Services)
.with_entities(Services.id)
.filter_by(id=config["SERVER_NAME"].split(" ")[0])
.first()
):
to_put.append(
Services(
id=config["SERVER_NAME"].split(" ")[0], method=method
)
)
if config.get("SERVER_NAME", "") != "" and not (session.query(Services).with_entities(Services.id).filter_by(id=config["SERVER_NAME"].split(" ")[0]).first()):
to_put.append(Services(id=config["SERVER_NAME"].split(" ")[0], method=method))
for key, value in config.items():
suffix = 0
@ -677,22 +572,12 @@ class Database:
suffix = int(key.split("_")[-1])
key = key[: -len(str(suffix)) - 1]
setting = (
session.query(Settings)
.with_entities(Settings.default)
.filter_by(id=key)
.first()
)
setting = session.query(Settings).with_entities(Settings.default).filter_by(id=key).first()
if not setting:
continue
global_value = (
session.query(Global_values)
.with_entities(Global_values.value, Global_values.method)
.filter_by(setting_id=key, suffix=suffix)
.first()
)
global_value = session.query(Global_values).with_entities(Global_values.value, Global_values.method).filter_by(setting_id=key, suffix=suffix).first()
if not global_value:
if value == setting.default:
@ -706,10 +591,7 @@ class Database:
method=method,
)
)
elif (
global_value.method == method
and value != global_value.value
):
elif global_value.method == method and value != global_value.value:
if value == setting.default:
session.query(Global_values).filter(
Global_values.setting_id == key,
@ -748,45 +630,32 @@ class Database:
message = ""
with self.__db_session() as session:
# Delete all the old config
session.query(Custom_configs).filter(
Custom_configs.method == method
).delete()
session.query(Custom_configs).filter(Custom_configs.method == method).delete()
to_put = []
endl = "\n"
for custom_config in custom_configs:
config = {
"data": custom_config["value"].encode("utf-8")
if isinstance(custom_config["value"], str)
else custom_config["value"],
"data": custom_config["value"].encode("utf-8") if isinstance(custom_config["value"], str) else custom_config["value"],
"method": method,
}
config["checksum"] = sha256(config["data"]).hexdigest()
if custom_config["exploded"][0]:
if (
not session.query(Services)
.with_entities(Services.id)
.filter_by(id=custom_config["exploded"][0])
.first()
):
if not session.query(Services).with_entities(Services.id).filter_by(id=custom_config["exploded"][0]).first():
message += f"{endl if message else ''}Service {custom_config['exploded'][0]} not found, please check your config"
config.update(
{
"service_id": custom_config["exploded"][0],
"type": custom_config["exploded"][1]
.replace("-", "_")
.lower(),
"type": custom_config["exploded"][1].replace("-", "_").lower(),
"name": custom_config["exploded"][2],
}
)
else:
config.update(
{
"type": custom_config["exploded"][1]
.replace("-", "_")
.lower(),
"type": custom_config["exploded"][1].replace("-", "_").lower(),
"name": custom_config["exploded"][2],
}
)
@ -817,11 +686,7 @@ class Database:
Custom_configs.data: config["data"],
Custom_configs.checksum: config["checksum"],
}
| (
{Custom_configs.method: "autoconf"}
if method == "autoconf"
else {}
)
| ({Custom_configs.method: "autoconf"} if method == "autoconf" else {})
)
if changed:
with suppress(ProgrammingError, OperationalError):
@ -853,30 +718,12 @@ class Database:
.all()
):
default = setting.default or ""
config[setting.id] = (
default
if methods is False
else {"value": default, "global": True, "method": "default"}
)
config[setting.id] = default if methods is False else {"value": default, "global": True, "method": "default"}
global_values = (
session.query(Global_values)
.with_entities(
Global_values.value, Global_values.suffix, Global_values.method
)
.filter_by(setting_id=setting.id)
.all()
)
global_values = session.query(Global_values).with_entities(Global_values.value, Global_values.suffix, Global_values.method).filter_by(setting_id=setting.id).all()
for global_value in global_values:
config[
setting.id
+ (
f"_{global_value.suffix}"
if setting.multiple and global_value.suffix > 0
else ""
)
] = (
config[setting.id + (f"_{global_value.suffix}" if setting.multiple and global_value.suffix > 0 else "")] = (
global_value.value
if methods is False
else {
@ -889,11 +736,7 @@ class Database:
if setting.context == "multisite":
multisite.append(setting.id)
is_multisite = (
config.get("MULTISITE", {"value": "no"})["value"] == "yes"
if methods
else config.get("MULTISITE", "no") == "yes"
)
is_multisite = config.get("MULTISITE", {"value": "no"})["value"] == "yes" if methods else config.get("MULTISITE", "no") == "yes"
if is_multisite:
for service in session.query(Services).with_entities(Services.id).all():
@ -925,14 +768,7 @@ class Database:
)
for service_setting in service_settings:
config[
f"{service.id}_{key}"
+ (
f"_{service_setting.suffix}"
if service_setting.suffix > 0
else ""
)
] = (
config[f"{service.id}_{key}" + (f"_{service_setting.suffix}" if service_setting.suffix > 0 else "")] = (
service_setting.value
if methods is False
else {
@ -943,14 +779,8 @@ class Database:
)
if is_multisite:
servers = " ".join(
service.id for service in session.query(Services).all()
)
config["SERVER_NAME"] = (
servers
if methods is False
else {"value": servers, "global": True, "method": "default"}
)
servers = " ".join(service.id for service in session.query(Services).all())
config["SERVER_NAME"] = servers if methods is False else {"value": servers, "global": True, "method": "default"}
return config
@ -983,10 +813,7 @@ class Database:
services = []
config = self.get_config(methods=methods)
with self.__db_session() as session:
service_names = [
service.id
for service in session.query(Services).with_entities(Services.id).all()
]
service_names = [service.id for service in session.query(Services).with_entities(Services.id).all()]
for service in service_names:
service_settings = []
tmp_config = deepcopy(config)
@ -1016,11 +843,7 @@ class Database:
def update_job(self, plugin_id: str, job_name: str, success: bool) -> str:
"""Update the job last_run in the database"""
with self.__db_session() as session:
job = (
session.query(Jobs)
.filter_by(plugin_id=plugin_id, name=job_name)
.first()
)
job = session.query(Jobs).filter_by(plugin_id=plugin_id, name=job_name).first()
if not job:
return "Job not found"
@ -1038,9 +861,7 @@ class Database:
def delete_job_cache(self, file_name: str, *, job_name: Optional[str] = None):
job_name = job_name or basename(getsourcefile(_getframe(1))).replace(".py", "")
with self.__db_session() as session:
session.query(Jobs_cache).filter_by(
job_name=job_name, file_name=file_name
).delete()
session.query(Jobs_cache).filter_by(job_name=job_name, file_name=file_name).delete()
def update_job_cache(
self,
@ -1054,13 +875,7 @@ class Database:
"""Update the plugin cache in the database"""
job_name = job_name or basename(getsourcefile(_getframe(1))).replace(".py", "")
with self.__db_session() as session:
cache = (
session.query(Jobs_cache)
.filter_by(
job_name=job_name, service_id=service_id, file_name=file_name
)
.first()
)
cache = session.query(Jobs_cache).filter_by(job_name=job_name, service_id=service_id, file_name=file_name).first()
if not cache:
session.add(
@ -1085,18 +900,11 @@ class Database:
return ""
def update_external_plugins(
self, plugins: List[Dict[str, Any]], *, delete_missing: bool = True
) -> str:
def update_external_plugins(self, plugins: List[Dict[str, Any]], *, delete_missing: bool = True) -> str:
"""Update external plugins from the database"""
to_put = []
with self.__db_session() as session:
db_plugins = (
session.query(Plugins)
.with_entities(Plugins.id)
.filter_by(external=True)
.all()
)
db_plugins = session.query(Plugins).with_entities(Plugins.id).filter_by(external=True).all()
db_ids = []
if delete_missing and db_plugins:
@ -1160,27 +968,16 @@ class Database:
updates[Plugins.checksum] = plugin.get("checksum")
if updates:
session.query(Plugins).filter(
Plugins.id == plugin["id"]
).update(updates)
session.query(Plugins).filter(Plugins.id == plugin["id"]).update(updates)
db_plugin_settings = (
session.query(Settings)
.with_entities(Settings.id)
.filter_by(plugin_id=plugin["id"])
.all()
)
db_plugin_settings = session.query(Settings).with_entities(Settings.id).filter_by(plugin_id=plugin["id"]).all()
db_ids = [setting.id for setting in db_plugin_settings]
setting_ids = [setting for setting in settings]
missing_ids = [
setting for setting in db_ids if setting not in setting_ids
]
missing_ids = [setting for setting in db_ids if setting not in setting_ids]
if missing_ids:
# Remove settings that are no longer in the list
session.query(Settings).filter(
Settings.id.in_(missing_ids)
).delete()
session.query(Settings).filter(Settings.id.in_(missing_ids)).delete()
for setting, value in settings.items():
value.update(
@ -1208,9 +1005,7 @@ class Database:
if setting not in db_ids or not db_setting:
for select in value.pop("select", []):
to_put.append(
Selects(setting_id=value["id"], value=select)
)
to_put.append(Selects(setting_id=value["id"], value=select))
to_put.append(
Settings(
@ -1245,59 +1040,32 @@ class Database:
updates[Settings.multiple] = value.get("multiple")
if updates:
session.query(Settings).filter(
Settings.id == setting
).update(updates)
session.query(Settings).filter(Settings.id == setting).update(updates)
db_selects = (
session.query(Selects)
.with_entities(Selects.value)
.filter_by(setting_id=setting)
.all()
)
db_selects = session.query(Selects).with_entities(Selects.value).filter_by(setting_id=setting).all()
db_values = [select.value for select in db_selects]
select_values = value.get("select", [])
missing_values = [
select
for select in db_values
if select not in select_values
]
missing_values = [select for select in db_values if select not in select_values]
if missing_values:
# Remove selects that are no longer in the list
session.query(Selects).filter(
Selects.value.in_(missing_values)
).delete()
session.query(Selects).filter(Selects.value.in_(missing_values)).delete()
for select in value.get("select", []):
if select not in db_values:
to_put.append(
Selects(setting_id=setting, value=select)
)
to_put.append(Selects(setting_id=setting, value=select))
db_jobs = (
session.query(Jobs)
.with_entities(Jobs.name)
.filter_by(plugin_id=plugin["id"])
.all()
)
db_jobs = session.query(Jobs).with_entities(Jobs.name).filter_by(plugin_id=plugin["id"]).all()
db_names = [job.name for job in db_jobs]
job_names = [job["name"] for job in jobs]
missing_names = [job for job in db_names if job not in job_names]
if missing_names:
# Remove jobs that are no longer in the list
session.query(Jobs).filter(
Jobs.name.in_(missing_names)
).delete()
session.query(Jobs).filter(Jobs.name.in_(missing_names)).delete()
for job in jobs:
db_job = (
session.query(Jobs)
.with_entities(Jobs.file_name, Jobs.every, Jobs.reload)
.filter_by(name=job["name"], plugin_id=plugin["id"])
.first()
)
db_job = session.query(Jobs).with_entities(Jobs.file_name, Jobs.every, Jobs.reload).filter_by(name=job["name"], plugin_id=plugin["id"]).first()
if job["name"] not in db_names or not db_job:
job["file_name"] = job.pop("file")
@ -1322,28 +1090,14 @@ class Database:
if updates:
updates[Jobs.last_run] = None
session.query(Jobs_cache).filter(
Jobs_cache.job_name == job["name"]
).delete()
session.query(Jobs).filter(
Jobs.name == job["name"]
).update(updates)
session.query(Jobs_cache).filter(Jobs_cache.job_name == job["name"]).delete()
session.query(Jobs).filter(Jobs.name == job["name"]).update(updates)
tmp_ui_path = Path(
sep, "var", "tmp", "bunkerweb", "ui", plugin["id"], "ui"
)
path_ui = (
tmp_ui_path
if tmp_ui_path.exists()
else Path(
sep, "etc", "bunkerweb", "plugins", plugin["id"], "ui"
)
)
tmp_ui_path = Path(sep, "var", "tmp", "bunkerweb", "ui", plugin["id"], "ui")
path_ui = tmp_ui_path if tmp_ui_path.exists() else Path(sep, "etc", "bunkerweb", "plugins", plugin["id"], "ui")
if path_ui.exists():
if {"template.html", "actions.py"}.issubset(
listdir(str(path_ui))
):
if {"template.html", "actions.py"}.issubset(listdir(str(path_ui))):
db_plugin_page = (
session.query(Plugin_pages)
.with_entities(
@ -1355,9 +1109,7 @@ class Database:
)
if not db_plugin_page:
template = path_ui.joinpath(
"template.html"
).read_bytes()
template = path_ui.joinpath("template.html").read_bytes()
actions = path_ui.joinpath("actions.py").read_bytes()
to_put.append(
@ -1376,10 +1128,7 @@ class Database:
template_checksum = file_hash(str(template_path))
actions_checksum = file_hash(str(actions_path))
if (
template_checksum
!= db_plugin_page.template_checksum
):
if template_checksum != db_plugin_page.template_checksum:
updates.update(
{
Plugin_pages.template_file: template_path.read_bytes(),
@ -1396,9 +1145,7 @@ class Database:
)
if updates:
session.query(Plugin_pages).filter(
Plugin_pages.plugin_id == plugin["id"]
).update(updates)
session.query(Plugin_pages).filter(Plugin_pages.plugin_id == plugin["id"]).update(updates)
continue
@ -1420,9 +1167,7 @@ class Database:
db_setting = session.query(Settings).filter_by(id=setting).first()
if db_setting is not None:
self.__logger.warning(
f"A setting with id {setting} already exists, therefore it will not be added."
)
self.__logger.warning(f"A setting with id {setting} already exists, therefore it will not be added.")
continue
value.update(
@ -1443,17 +1188,10 @@ class Database:
)
for job in jobs:
db_job = (
session.query(Jobs)
.with_entities(Jobs.file_name, Jobs.every, Jobs.reload)
.filter_by(name=job["name"], plugin_id=plugin["id"])
.first()
)
db_job = session.query(Jobs).with_entities(Jobs.file_name, Jobs.every, Jobs.reload).filter_by(name=job["name"], plugin_id=plugin["id"]).first()
if db_job is not None:
self.__logger.warning(
f"A job with the name {job['name']} already exists in the database, therefore it will not be added."
)
self.__logger.warning(f"A job with the name {job['name']} already exists in the database, therefore it will not be added.")
continue
job["file_name"] = job.pop("file")
@ -1461,21 +1199,11 @@ class Database:
to_put.append(Jobs(plugin_id=plugin["id"], **job))
if page:
tmp_ui_path = Path(
sep, "var", "tmp", "bunkerweb", "ui", plugin["id"], "ui"
)
path_ui = (
tmp_ui_path
if tmp_ui_path.exists()
else Path(
sep, "etc", "bunkerweb", "plugins", plugin["id"], "ui"
)
)
tmp_ui_path = Path(sep, "var", "tmp", "bunkerweb", "ui", plugin["id"], "ui")
path_ui = tmp_ui_path if tmp_ui_path.exists() else Path(sep, "etc", "bunkerweb", "plugins", plugin["id"], "ui")
if path_ui.exists():
if {"template.html", "actions.py"}.issubset(
listdir(str(path_ui))
):
if {"template.html", "actions.py"}.issubset(listdir(str(path_ui))):
db_plugin_page = (
session.query(Plugin_pages)
.with_entities(
@ -1487,9 +1215,7 @@ class Database:
)
if not db_plugin_page:
template = path_ui.joinpath(
"template.html"
).read_bytes()
template = path_ui.joinpath("template.html").read_bytes()
actions = path_ui.joinpath("actions.py").read_bytes()
to_put.append(
@ -1508,10 +1234,7 @@ class Database:
template_checksum = file_hash(str(template_path))
actions_checksum = file_hash(str(actions_path))
if (
template_checksum
!= db_plugin_page.template_checksum
):
if template_checksum != db_plugin_page.template_checksum:
updates.update(
{
Plugin_pages.template_file: template_path.read_bytes(),
@ -1528,9 +1251,7 @@ class Database:
)
if updates:
session.query(Plugin_pages).filter(
Plugin_pages.plugin_id == plugin["id"]
).update(updates)
session.query(Plugin_pages).filter(Plugin_pages.plugin_id == plugin["id"]).update(updates)
with suppress(ProgrammingError, OperationalError):
metadata = session.query(Metadata).get(1)
@ -1545,9 +1266,7 @@ class Database:
return ""
def get_plugins(
self, *, external: bool = False, with_data: bool = False
) -> List[Dict[str, Any]]:
def get_plugins(self, *, external: bool = False, with_data: bool = False) -> List[Dict[str, Any]]:
"""Get all plugins from the database."""
plugins = []
with self.__db_session() as session:
@ -1581,12 +1300,7 @@ class Database:
if external and not plugin.external:
continue
page = (
session.query(Plugin_pages)
.with_entities(Plugin_pages.id)
.filter_by(plugin_id=plugin.id)
.first()
)
page = session.query(Plugin_pages).with_entities(Plugin_pages.id).filter_by(plugin_id=plugin.id).first()
data = {
"id": plugin.id,
"stream": plugin.stream,
@ -1597,11 +1311,7 @@ class Database:
"method": plugin.method,
"page": page is not None,
"settings": {},
} | (
{"data": plugin.data, "checksum": plugin.checksum}
if with_data
else {}
)
} | ({"data": plugin.data, "checksum": plugin.checksum} if with_data else {})
for setting in (
session.query(Settings)
@ -1630,13 +1340,7 @@ class Database:
} | ({"multiple": setting.multiple} if setting.multiple else {})
if setting.type == "select":
data["settings"][setting.id]["select"] = [
select.value
for select in session.query(Selects)
.with_entities(Selects.value)
.filter_by(setting_id=setting.id)
.all()
]
data["settings"][setting.id]["select"] = [select.value for select in session.query(Selects).with_entities(Selects.value).filter_by(setting_id=setting.id).all()]
plugins.append(data)
@ -1645,7 +1349,7 @@ class Database:
def get_plugins_errors(self) -> int:
"""Get plugins errors."""
with self.__db_session() as session:
return session.query(Jobs).filter(Jobs.success == False).count()
return session.query(Jobs).filter(Jobs.success == False).count() # noqa: E712
def get_jobs(self) -> Dict[str, Dict[str, Any]]:
"""Get jobs."""
@ -1655,18 +1359,12 @@ class Database:
"every": job.every,
"reload": job.reload,
"success": job.success,
"last_run": job.last_run.strftime("%Y/%m/%d, %I:%M:%S %p")
if job.last_run is not None
else "Never",
"last_run": job.last_run.strftime("%Y/%m/%d, %I:%M:%S %p") if job.last_run is not None else "Never",
"cache": [
{
"service_id": cache.service_id,
"file_name": cache.file_name,
"last_update": cache.last_update.strftime(
"%Y/%m/%d, %I:%M:%S %p"
)
if cache.last_update is not None
else "Never",
"last_update": cache.last_update.strftime("%Y/%m/%d, %I:%M:%S %p") if cache.last_update is not None else "Never",
}
for cache in session.query(Jobs_cache)
.with_entities(
@ -1707,12 +1405,7 @@ class Database:
entities.append(Jobs_cache.data)
with self.__db_session() as session:
return (
session.query(Jobs_cache)
.with_entities(*entities)
.filter_by(job_name=job_name, file_name=file_name)
.first()
)
return session.query(Jobs_cache).with_entities(*entities).filter_by(job_name=job_name, file_name=file_name).first()
def get_jobs_cache_files(self) -> List[Dict[str, Any]]:
"""Get jobs cache files."""
@ -1735,24 +1428,15 @@ class Database:
)
]
def add_instance(
self, hostname: str, port: int, server_name: str, changed: Optional[bool] = True
) -> str:
def add_instance(self, hostname: str, port: int, server_name: str, changed: Optional[bool] = True) -> str:
"""Add instance."""
with self.__db_session() as session:
db_instance = (
session.query(Instances)
.with_entities(Instances.hostname)
.filter_by(hostname=hostname)
.first()
)
db_instance = session.query(Instances).with_entities(Instances.hostname).filter_by(hostname=hostname).first()
if db_instance is not None:
return f"Instance {hostname} already exists, will not be added."
session.add(
Instances(hostname=hostname, port=port, server_name=server_name)
)
session.add(Instances(hostname=hostname, port=port, server_name=server_name))
if changed:
with suppress(ProgrammingError, OperationalError):
@ -1767,9 +1451,7 @@ class Database:
return ""
def update_instances(
self, instances: List[Dict[str, Any]], changed: Optional[bool] = True
) -> str:
def update_instances(self, instances: List[Dict[str, Any]], changed: Optional[bool] = True) -> str:
"""Update instances."""
to_put = []
with self.__db_session() as session:
@ -1807,24 +1489,13 @@ class Database:
"port": instance.port,
"server_name": instance.server_name,
}
for instance in (
session.query(Instances)
.with_entities(
Instances.hostname, Instances.port, Instances.server_name
)
.all()
)
for instance in (session.query(Instances).with_entities(Instances.hostname, Instances.port, Instances.server_name).all())
]
def get_plugin_actions(self, plugin: str) -> Optional[Any]:
"""get actions file for the plugin"""
with self.__db_session() as session:
page = (
session.query(Plugin_pages)
.with_entities(Plugin_pages.actions_file)
.filter_by(plugin_id=plugin)
.first()
)
page = session.query(Plugin_pages).with_entities(Plugin_pages.actions_file).filter_by(plugin_id=plugin).first()
if not page:
return None
@ -1834,12 +1505,7 @@ class Database:
def get_plugin_template(self, plugin: str) -> Optional[Any]:
"""get template file for the plugin"""
with self.__db_session() as session:
page = (
session.query(Plugin_pages)
.with_entities(Plugin_pages.template_file)
.filter_by(plugin_id=plugin)
.first()
)
page = session.query(Plugin_pages).with_entities(Plugin_pages.template_file).filter_by(plugin_id=plugin).first()
if not page:
return None

View File

@ -16,9 +16,7 @@ from sqlalchemy.orm import declarative_base, relationship
from sqlalchemy.schema import UniqueConstraint
CONTEXTS_ENUM = Enum("global", "multisite", name="contexts_enum")
SETTINGS_TYPES_ENUM = Enum(
"password", "text", "check", "select", name="settings_types_enum"
)
SETTINGS_TYPES_ENUM = Enum("password", "text", "check", "select", name="settings_types_enum")
METHODS_ENUM = Enum("ui", "scheduler", "autoconf", "manual", name="methods_enum")
SCHEDULES_ENUM = Enum("once", "minute", "hour", "day", "week", name="schedules_enum")
CUSTOM_CONFIGS_TYPES_ENUM = Enum(
@ -57,9 +55,7 @@ class Plugins(Base):
data = Column(LargeBinary(length=(2**32) - 1), nullable=True)
checksum = Column(String(128), nullable=True)
settings = relationship(
"Settings", back_populates="plugin", cascade="all, delete-orphan"
)
settings = relationship("Settings", back_populates="plugin", cascade="all, delete-orphan")
jobs = relationship("Jobs", back_populates="plugin", cascade="all, delete-orphan")
pages = relationship("Plugin_pages", back_populates="plugin", cascade="all")
@ -88,12 +84,8 @@ class Settings(Base):
multiple = Column(String(128), nullable=True)
selects = relationship("Selects", back_populates="setting", cascade="all")
services = relationship(
"Services_settings", back_populates="setting", cascade="all"
)
global_value = relationship(
"Global_values", back_populates="setting", cascade="all"
)
services = relationship("Services_settings", back_populates="setting", cascade="all")
global_value = relationship("Global_values", back_populates="setting", cascade="all")
plugin = relationship("Plugins", back_populates="settings")
@ -118,12 +110,8 @@ class Services(Base):
id = Column(String(64), primary_key=True)
method = Column(METHODS_ENUM, nullable=False)
settings = relationship(
"Services_settings", back_populates="service", cascade="all"
)
custom_configs = relationship(
"Custom_configs", back_populates="service", cascade="all"
)
settings = relationship("Services_settings", back_populates="service", cascade="all")
custom_configs = relationship("Custom_configs", back_populates="service", cascade="all")
jobs_cache = relationship("Jobs_cache", back_populates="service", cascade="all")

View File

@ -57,14 +57,10 @@ class Configurator:
def get_settings(self) -> Dict[str, Any]:
return self.__settings
def get_plugins(
self, _type: Union[Literal["core"], Literal["external"]]
) -> List[Dict[str, Any]]:
def get_plugins(self, _type: Union[Literal["core"], Literal["external"]]) -> List[Dict[str, Any]]:
return self.__core_plugins if _type == "core" else self.__external_plugins
def get_plugins_settings(
self, _type: Union[Literal["core"], Literal["external"]]
) -> Dict[str, Any]:
def get_plugins_settings(self, _type: Union[Literal["core"], Literal["external"]]) -> Dict[str, Any]:
if _type == "core":
plugins = self.__core_plugins
else:
@ -77,7 +73,7 @@ class Configurator:
return plugins_settings
def __map_servers(self) -> Dict[str, List[str]]:
if not self.__multisite or not "SERVER_NAME" in self.__variables:
if not self.__multisite or "SERVER_NAME" not in self.__variables:
return {}
servers = {}
for server_name in self.__variables["SERVER_NAME"].strip().split(" "):
@ -96,11 +92,7 @@ class Configurator:
f"Ignoring {server_name}_SERVER_NAME because regex is not valid",
)
else:
names = (
self.__variables[f"{server_name}_SERVER_NAME"]
.strip()
.split(" ")
)
names = self.__variables[f"{server_name}_SERVER_NAME"].strip().split(" ")
servers[server_name] = names
return servers
@ -132,9 +124,7 @@ class Configurator:
if _type == "external":
plugin_content = BytesIO()
with tar_open(
fileobj=plugin_content, mode="w:gz", compresslevel=9
) as tar:
with tar_open(fileobj=plugin_content, mode="w:gz", compresslevel=9) as tar:
tar.add(
dirname(file),
arcname=basename(dirname(file)),
@ -170,7 +160,7 @@ class Configurator:
lines = f.readlines()
for line in lines:
line = line.strip()
if not line or line.startswith("#") or not "=" in line:
if not line or line.startswith("#") or "=" not in line:
continue
splitted = line.split("=", 1)
variables[splitted[0]] = splitted[1]
@ -268,9 +258,7 @@ class Configurator:
if variable in target:
return target, variable
for real_var, settings in target.items():
if "multiple" in settings and re_search(
f"^{real_var}_[0-9]+$", variable
):
if "multiple" in settings and re_search(f"^{real_var}_[0-9]+$", variable):
return target, real_var
return None, variable

View File

@ -63,27 +63,19 @@ class Templator:
templates.append(template)
return templates
def __write_config(
self, subpath: Optional[str] = None, config: Optional[Dict[str, Any]] = None
):
def __write_config(self, subpath: Optional[str] = None, config: Optional[Dict[str, Any]] = None):
real_path = Path(self.__output, subpath or "", "variables.env")
real_path.parent.mkdir(parents=True, exist_ok=True)
real_path.write_text(
"\n".join(f"{k}={v}" for k, v in (config or self.__config).items())
)
real_path.write_text("\n".join(f"{k}={v}" for k, v in (config or self.__config).items()))
def __render_global(self):
self.__write_config()
templates = self.__find_templates(
["global", "http", "stream", "default-server-http"]
)
templates = self.__find_templates(["global", "http", "stream", "default-server-http"])
for template in templates:
self.__render_template(template)
def __render_server(self, server: str):
templates = self.__find_templates(
["modsec", "modsec-crs", "server-http", "server-stream"]
)
templates = self.__find_templates(["modsec", "modsec-crs", "server-http", "server-stream"])
if self.__config.get("MULTISITE", "no") == "yes":
config = self.__config.copy()
for variable, value in self.__config.items():

View File

@ -12,10 +12,7 @@ from time import sleep
from traceback import format_exc
from typing import Any, Dict
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",), ("api",))
]:
for deps_path in [join(sep, "usr", "share", "bunkerweb", *paths) for paths in (("deps", "python"), ("utils",), ("api",))]:
if deps_path not in sys_path:
sys_path.append(deps_path)
@ -72,9 +69,7 @@ if __name__ == "__main__":
type=str,
help="path to the file containing environment variables",
)
parser.add_argument(
"--no-linux-reload", action="store_true", help="disable linux reload"
)
parser.add_argument("--no-linux-reload", action="store_true", help="disable linux reload")
args = parser.parse_args()
settings_path = Path(normpath(args.settings))
@ -183,10 +178,7 @@ if __name__ == "__main__":
)
templator.render()
if (
integration not in ("Autoconf", "Swarm", "Kubernetes", "Docker")
and not args.no_linux_reload
):
if integration not in ("Autoconf", "Swarm", "Kubernetes", "Docker") and not args.no_linux_reload:
retries = 0
while not Path(sep, "var", "run", "bunkerweb", "nginx.pid").exists():
if retries == 5:

View File

@ -10,10 +10,7 @@ from time import sleep
from traceback import format_exc
from typing import Any
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",), ("api",), ("db",))
]:
for deps_path in [join(sep, "usr", "share", "bunkerweb", *paths) for paths in (("deps", "python"), ("utils",), ("api",), ("db",))]:
if deps_path not in sys_path:
sys_path.append(deps_path)
@ -24,9 +21,7 @@ from Database import Database # type: ignore
from Configurator import Configurator
from API import API # type: ignore
custom_confs_rx = re_compile(
r"^([0-9a-z\.-]*)_?CUSTOM_CONF_(HTTP|SERVER_STREAM|STREAM|DEFAULT_SERVER_HTTP|SERVER_HTTP|MODSEC_CRS|MODSEC)_(.+)$"
)
custom_confs_rx = re_compile(r"^([0-9a-z\.-]*)_?CUSTOM_CONF_(HTTP|SERVER_STREAM|STREAM|DEFAULT_SERVER_HTTP|SERVER_HTTP|MODSEC_CRS|MODSEC)_(.+)$")
def get_instance_configs_and_apis(instance: Any, db, _type="Docker"):
@ -36,11 +31,7 @@ def get_instance_configs_and_apis(instance: Any, db, _type="Docker"):
custom_confs = []
apis = []
for var in (
instance.attrs["Config"]["Env"]
if _type == "Docker"
else instance.attrs["Spec"]["TaskTemplate"]["ContainerSpec"]["Env"]
):
for var in instance.attrs["Config"]["Env"] if _type == "Docker" else instance.attrs["Spec"]["TaskTemplate"]["ContainerSpec"]["Env"]:
splitted = var.split("=", 1)
if custom_confs_rx.match(splitted[0]):
custom_conf = custom_confs_rx.search(splitted[0]).groups()
@ -54,9 +45,7 @@ def get_instance_configs_and_apis(instance: Any, db, _type="Docker"):
),
}
)
logger.info(
f"Found custom conf env var {'for service ' + custom_conf[0] if custom_conf[0] else 'without service'} with type {custom_conf[1]} and name {custom_conf[2]}"
)
logger.info(f"Found custom conf env var {'for service ' + custom_conf[0] if custom_conf[0] else 'without service'} with type {custom_conf[1]} and name {custom_conf[2]}")
else:
tmp_config[splitted[0]] = splitted[1]
@ -168,9 +157,7 @@ if __name__ == "__main__":
# Check existences and permissions
logger.info("Checking arguments ...")
files = [settings_path] + (
[Path(normpath(args.variables))] if args.variables else []
)
files = [settings_path] + ([Path(normpath(args.variables))] if args.variables else [])
paths_rx = [core_path, plugins_path]
for file in files:
if not file.is_file():
@ -217,19 +204,13 @@ if __name__ == "__main__":
),
}
)
logger.info(
f"Found custom conf env var {'for service ' + custom_conf[0] if custom_conf[0] else 'without service'} with type {custom_conf[1]} and name {custom_conf[2]}"
)
logger.info(f"Found custom conf env var {'for service ' + custom_conf[0] if custom_conf[0] else 'without service'} with type {custom_conf[1]} and name {custom_conf[2]}")
db = Database(logger, config_files.get("DATABASE_URI", None), pool=False)
else:
docker_client = DockerClient(
base_url=getenv("DOCKER_HOST", "unix:///var/run/docker.sock")
)
docker_client = DockerClient(base_url=getenv("DOCKER_HOST", "unix:///var/run/docker.sock"))
while not docker_client.containers.list(
filters={"label": "bunkerweb.INSTANCE"}
):
while not docker_client.containers.list(filters={"label": "bunkerweb.INSTANCE"}):
logger.info("Waiting for BunkerWeb instance ...")
sleep(5)
@ -239,9 +220,7 @@ if __name__ == "__main__":
custom_confs = []
apis = []
for instance in docker_client.containers.list(
filters={"label": "bunkerweb.INSTANCE"}
):
for instance in docker_client.containers.list(filters={"label": "bunkerweb.INSTANCE"}):
for var in instance.attrs["Config"]["Env"]:
splitted = var.split("=", 1)
if custom_confs_rx.match(splitted[0]):
@ -256,16 +235,12 @@ if __name__ == "__main__":
),
}
)
logger.info(
f"Found custom conf env var {'for service ' + custom_conf[0] if custom_conf[0] else 'without service'} with type {custom_conf[1]} and name {custom_conf[2]}"
)
logger.info(f"Found custom conf env var {'for service ' + custom_conf[0] if custom_conf[0] else 'without service'} with type {custom_conf[1]} and name {custom_conf[2]}")
else:
tmp_config[splitted[0]] = splitted[1]
if not db and splitted[0] == "DATABASE_URI":
db = Database(
logger, sqlalchemy_string=splitted[1], pool=False
)
db = Database(logger, sqlalchemy_string=splitted[1], pool=False)
elif splitted[0] == "API_HTTP_PORT":
api_http_port = splitted[1]
elif splitted[0] == "API_SERVER_NAME":
@ -319,9 +294,7 @@ if __name__ == "__main__":
logger.info("Database tables initialized")
err = db.initialize_db(
version=Path(sep, "usr", "share", "bunkerweb", "VERSION")
.read_text()
.strip(),
version=Path(sep, "usr", "share", "bunkerweb", "VERSION").read_text().strip(),
integration=integration,
)
@ -344,9 +317,7 @@ if __name__ == "__main__":
err = db.save_config(config_files, args.method, changed=False)
if err:
logger.warning(
f"Couldn't save config to database : {err}, config may not work as expected"
)
logger.warning(f"Couldn't save config to database : {err}, config may not work as expected")
else:
changes.append("config")
logger.info("Config successfully saved to database")
@ -355,9 +326,7 @@ if __name__ == "__main__":
err1 = db.save_custom_configs(custom_confs, args.method, changed=False)
if err1:
logger.warning(
f"Couldn't save custom configs to database : {err1}, custom configs may not work as expected"
)
logger.warning(f"Couldn't save custom configs to database : {err1}, custom configs may not work as expected")
else:
changes.append("custom_configs")
logger.info("Custom configs successfully saved to database")
@ -377,9 +346,7 @@ if __name__ == "__main__":
else:
if "instances" not in changes:
changes.append("instances")
logger.info(
f"Instance {endpoint_data[0]} successfully saved to database"
)
logger.info(f"Instance {endpoint_data[0]} successfully saved to database")
else:
err = db.add_instance(
"127.0.0.1",
@ -397,9 +364,7 @@ if __name__ == "__main__":
# update changes in db
ret = db.checked_changes(changes, value=True)
if ret:
logger.error(
f"An error occurred when setting the changes to checked in the database : {ret}"
)
logger.error(f"An error occurred when setting the changes to checked in the database : {ret}")
except SystemExit as e:
sys_exit(e.code)
except:

View File

@ -7,10 +7,7 @@ from sys import path as sys_path
from tarfile import open as tar_open
from typing import Any, Dict, List, Literal, Optional, Tuple, Union
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",))
]:
for deps_path in [join(sep, "usr", "share", "bunkerweb", *paths) for paths in (("deps", "python"), ("utils",))]:
if deps_path not in sys_path:
sys_path.append(deps_path)
@ -46,10 +43,7 @@ class ApiCaller:
config.load_incluster_config()
corev1 = kube_client.CoreV1Api()
for pod in corev1.list_pod_for_all_namespaces(watch=False).items:
if (
pod.metadata.annotations != None
and "bunkerweb.io/INSTANCE" in pod.metadata.annotations
):
if pod.metadata.annotations is not None and "bunkerweb.io/INSTANCE" in pod.metadata.annotations:
api_http_port = None
api_server_name = None
@ -66,20 +60,14 @@ class ApiCaller:
)
)
else:
docker_client = DockerClient(
base_url=getenv("DOCKER_HOST", "unix:///var/run/docker.sock")
)
docker_client = DockerClient(base_url=getenv("DOCKER_HOST", "unix:///var/run/docker.sock"))
if bw_integration == "Swarm":
for instance in docker_client.services.list(
filters={"label": "bunkerweb.INSTANCE"}
):
for instance in docker_client.services.list(filters={"label": "bunkerweb.INSTANCE"}):
api_http_port = None
api_server_name = None
for var in instance.attrs["Spec"]["TaskTemplate"]["ContainerSpec"][
"Env"
]:
for var in instance.attrs["Spec"]["TaskTemplate"]["ContainerSpec"]["Env"]:
if var.startswith("API_HTTP_PORT="):
api_http_port = var.replace("API_HTTP_PORT=", "", 1)
elif var.startswith("API_SERVER_NAME="):
@ -89,15 +77,12 @@ class ApiCaller:
self.__apis.append(
API(
f"http://{instance.name}.{task['NodeID']}.{task['ID']}:{api_http_port or getenv('API_HTTP_PORT', '5000')}",
host=api_server_name
or getenv("API_SERVER_NAME", "bwapi"),
host=api_server_name or getenv("API_SERVER_NAME", "bwapi"),
)
)
return
for instance in docker_client.containers.list(
filters={"label": "bunkerweb.INSTANCE"}
):
for instance in docker_client.containers.list(filters={"label": "bunkerweb.INSTANCE"}):
api_http_port = None
api_server_name = None
@ -160,9 +145,7 @@ class ApiCaller:
def send_files(self, path: str, url: str) -> bool:
ret = True
with BytesIO() as tgz:
with tar_open(
mode="w:gz", fileobj=tgz, dereference=True, compresslevel=3
) as tf:
with tar_open(mode="w:gz", fileobj=tgz, dereference=True, compresslevel=3) as tf:
tf.add(path, arcname=".")
tgz.seek(0, 0)
files = {"archive.tar.gz": tgz}

View File

@ -15,18 +15,10 @@ from logger import setup_logger
class ConfigCaller:
def __init__(self):
self.__logger = setup_logger("Config", "INFO")
self._settings = loads(
Path(sep, "usr", "share", "bunkerweb", "settings.json").read_text(
encoding="utf-8"
)
)
for plugin in glob(
join(sep, "usr", "share", "bunkerweb", "core", "*", "plugin.json")
) + glob(join(sep, "etc", "bunkerweb", "plugins", "*", "plugin.json")):
self._settings = loads(Path(sep, "usr", "share", "bunkerweb", "settings.json").read_text(encoding="utf-8"))
for plugin in glob(join(sep, "usr", "share", "bunkerweb", "core", "*", "plugin.json")) + glob(join(sep, "etc", "bunkerweb", "plugins", "*", "plugin.json")):
try:
self._settings.update(
loads(Path(plugin).read_text(encoding="utf-8"))["settings"]
)
self._settings.update(loads(Path(plugin).read_text(encoding="utf-8"))["settings"])
except KeyError:
self.__logger.error(
f'Error while loading plugin metadata file at {plugin} : missing "settings" key',
@ -39,23 +31,15 @@ class ConfigCaller:
def _is_setting(self, setting) -> bool:
return setting in self._settings
def _is_setting_context(
self, setting: str, context: Union[Literal["global"], Literal["multisite"]]
) -> bool:
def _is_setting_context(self, setting: str, context: Union[Literal["global"], Literal["multisite"]]) -> bool:
if self._is_setting(setting):
return self._settings[setting]["context"] == context
elif match(r"^.+_\d+$", setting):
multiple_setting = "_".join(setting.split("_")[:-1])
return (
self._is_setting(multiple_setting)
and self._settings[multiple_setting]["context"] == context
and "multiple" in self._settings[multiple_setting]
)
return self._is_setting(multiple_setting) and self._settings[multiple_setting]["context"] == context and "multiple" in self._settings[multiple_setting]
return False
def _full_env(
self, env_instances: Dict[str, Any], env_services: Dict[str, Any]
) -> Dict[str, Any]:
def _full_env(self, env_instances: Dict[str, Any], env_services: Dict[str, Any]) -> Dict[str, Any]:
full_env = {}
# Fill with default values
for k, v in self._settings.items():
@ -63,11 +47,7 @@ class ConfigCaller:
# Replace with instances values
for k, v in env_instances.items():
full_env[k] = v
if (
not self._is_setting_context(k, "global")
and env_instances.get("MULTISITE", "no") == "yes"
and env_instances.get("SERVER_NAME", "") != ""
):
if not self._is_setting_context(k, "global") and env_instances.get("MULTISITE", "no") == "yes" and env_instances.get("SERVER_NAME", "") != "":
for server_name in env_instances["SERVER_NAME"].split(" "):
full_env[f"{server_name}_{k}"] = v
# Replace with services values

View File

@ -70,9 +70,7 @@ def is_cached_file(
return is_cached and cached_file
def get_file_in_db(
file: Union[str, Path], db, *, job_name: Optional[str] = None
) -> Optional[bytes]:
def get_file_in_db(file: Union[str, Path], db, *, job_name: Optional[str] = None) -> Optional[bytes]:
cached_file = db.get_job_cache_file(
job_name or basename(getsourcefile(_getframe(1))).replace(".py", ""),
normpath(file),
@ -98,8 +96,7 @@ def set_file_in_db(
service_id,
name,
content,
job_name=job_name
or basename(getsourcefile(_getframe(1))).replace(".py", ""),
job_name=job_name or basename(getsourcefile(_getframe(1))).replace(".py", ""),
checksum=checksum,
)
@ -113,9 +110,7 @@ def set_file_in_db(
def del_file_in_db(name: str, db) -> Tuple[bool, str]:
ret, err = True, "success"
try:
db.delete_job_cache(
name, job_name=basename(getsourcefile(_getframe(1))).replace(".py", "")
)
db.delete_job_cache(name, job_name=basename(getsourcefile(_getframe(1))).replace(".py", ""))
except:
return False, f"exception :\n{format_exc()}"
return ret, err
@ -145,9 +140,7 @@ def bytes_hash(bio: BufferedReader) -> str:
def cache_hash(cache: Union[str, Path], db=None) -> Optional[str]:
with suppress(BaseException):
return loads(Path(normpath(f"{cache}.md")).read_text(encoding="utf-8")).get(
"checksum", None
)
return loads(Path(normpath(f"{cache}.md")).read_text(encoding="utf-8")).get("checksum", None)
if db:
cached_file = db.get_job_cache_file(
basename(getsourcefile(_getframe(1))).replace(".py", ""),

View File

@ -30,24 +30,12 @@ basicConfig(
level=default_level,
)
getLogger("sqlalchemy.orm.mapper.Mapper").setLevel(
default_level if default_level != INFO else WARNING
)
getLogger("sqlalchemy.orm.relationships.RelationshipProperty").setLevel(
default_level if default_level != INFO else WARNING
)
getLogger("sqlalchemy.orm.strategies.LazyLoader").setLevel(
default_level if default_level != INFO else WARNING
)
getLogger("sqlalchemy.pool.impl.QueuePool").setLevel(
default_level if default_level != INFO else WARNING
)
getLogger("sqlalchemy.pool.impl.NullPool").setLevel(
default_level if default_level != INFO else WARNING
)
getLogger("sqlalchemy.engine.Engine").setLevel(
default_level if default_level != INFO else WARNING
)
getLogger("sqlalchemy.orm.mapper.Mapper").setLevel(default_level if default_level != INFO else WARNING)
getLogger("sqlalchemy.orm.relationships.RelationshipProperty").setLevel(default_level if default_level != INFO else WARNING)
getLogger("sqlalchemy.orm.strategies.LazyLoader").setLevel(default_level if default_level != INFO else WARNING)
getLogger("sqlalchemy.pool.impl.QueuePool").setLevel(default_level if default_level != INFO else WARNING)
getLogger("sqlalchemy.pool.impl.NullPool").setLevel(default_level if default_level != INFO else WARNING)
getLogger("sqlalchemy.engine.Engine").setLevel(default_level if default_level != INFO else WARNING)
# Edit the default levels of the logging module
addLevelName(CRITICAL, "🚨")

View File

@ -21,9 +21,7 @@ from sys import path as sys_path
from threading import Lock, Semaphore, Thread
from traceback import format_exc
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths) for paths in (("utils",), ("db",))
]:
for deps_path in [join(sep, "usr", "share", "bunkerweb", *paths) for paths in (("utils",), ("db",))]:
if deps_path not in sys_path:
sys_path.append(deps_path)
@ -94,16 +92,12 @@ class JobScheduler(ApiCaller):
def __get_jobs(self):
jobs = {}
for plugin_file in glob(
join(sep, "usr", "share", "bunkerweb", "core", "*", "plugin.json")
) + glob( # core plugins
join(sep, "etc", "bunkerweb", "plugins", "*", "plugin.json")
): # external plugins
for plugin_file in glob(join(sep, "usr", "share", "bunkerweb", "core", "*", "plugin.json")) + glob(join(sep, "etc", "bunkerweb", "plugins", "*", "plugin.json")): # core plugins # external plugins
plugin_name = basename(dirname(plugin_file))
jobs[plugin_name] = []
try:
plugin_data = loads(Path(plugin_file).read_text(encoding="utf-8"))
if not "jobs" in plugin_data:
if "jobs" not in plugin_data:
continue
plugin_jobs = plugin_data["jobs"]
@ -118,34 +112,24 @@ class JobScheduler(ApiCaller):
"reload",
)
):
self.__logger.warning(
f"missing keys for job {job['name']} in plugin {plugin_name}, must have name, file, every and reload, ignoring job"
)
self.__logger.warning(f"missing keys for job {job['name']} in plugin {plugin_name}, must have name, file, every and reload, ignoring job")
plugin_jobs.pop(x)
continue
if not match(r"^[\w.-]{1,128}$", job["name"]):
self.__logger.warning(
f"Invalid name for job {job['name']} in plugin {plugin_name} (Can only contain numbers, letters, underscores and hyphens (min 1 characters and max 128)), ignoring job"
)
self.__logger.warning(f"Invalid name for job {job['name']} in plugin {plugin_name} (Can only contain numbers, letters, underscores and hyphens (min 1 characters and max 128)), ignoring job")
plugin_jobs.pop(x)
continue
elif not match(r"^[\w./-]{1,256}$", job["file"]):
self.__logger.warning(
f"Invalid file for job {job['name']} in plugin {plugin_name} (Can only contain numbers, letters, underscores, hyphens and slashes (min 1 characters and max 256)), ignoring job"
)
self.__logger.warning(f"Invalid file for job {job['name']} in plugin {plugin_name} (Can only contain numbers, letters, underscores, hyphens and slashes (min 1 characters and max 256)), ignoring job")
plugin_jobs.pop(x)
continue
elif job["every"] not in ("once", "minute", "hour", "day", "week"):
self.__logger.warning(
f"Invalid every for job {job['name']} in plugin {plugin_name} (Must be once, minute, hour, day or week), ignoring job"
)
self.__logger.warning(f"Invalid every for job {job['name']} in plugin {plugin_name} (Must be once, minute, hour, day or week), ignoring job")
plugin_jobs.pop(x)
continue
elif job["reload"] is not True and job["reload"] is not False:
self.__logger.warning(
f"Invalid reload for job {job['name']} in plugin {plugin_name} (Must be true or false), ignoring job"
)
self.__logger.warning(f"Invalid reload for job {job['name']} in plugin {plugin_name} (Must be true or false), ignoring job")
plugin_jobs.pop(x)
continue
@ -255,9 +239,7 @@ class JobScheduler(ApiCaller):
file = job["file"]
every = job["every"]
if every != "once":
self.__str_to_schedule(every).do(
self.__job_wrapper, path, plugin, name, file
)
self.__str_to_schedule(every).do(self.__job_wrapper, path, plugin, name, file)
except:
self.__logger.error(
f"Exception while scheduling jobs for plugin {plugin} : {format_exc()}",

View File

@ -5,7 +5,7 @@ from copy import deepcopy
from glob import glob
from hashlib import sha256
from io import BytesIO
from json import dumps, load as json_load
from json import load as json_load
from os import (
_exit,
chmod,
@ -29,10 +29,7 @@ from time import sleep
from traceback import format_exc
from typing import Any, Dict, List, Optional, Union
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",), ("api",), ("db",))
]:
for deps_path in [join(sep, "usr", "share", "bunkerweb", *paths) for paths in (("deps", "python"), ("utils",), ("api",), ("db",))]:
if deps_path not in sys_path:
sys_path.append(deps_path)
@ -41,7 +38,6 @@ from dotenv import dotenv_values
from logger import setup_logger # type: ignore
from Database import Database # type: ignore
from JobScheduler import JobScheduler
from ApiCaller import ApiCaller # type: ignore
RUN = True
SCHEDULER: Optional[JobScheduler] = None
@ -182,12 +178,7 @@ def dict_to_frozenset(d):
def api_to_instance(api):
hostname_port = (
api.endpoint.replace("http://", "")
.replace("https://", "")
.replace("/", "")
.split(":")
)
hostname_port = api.endpoint.replace("http://", "").replace("https://", "").replace("/", "").split(":")
return {
"hostname": hostname_port[0],
"env": {"API_HTTP_PORT": int(hostname_port[1]), "API_SERVER_NAME": api.host},
@ -228,27 +219,19 @@ if __name__ == "__main__":
INTEGRATION = "Autoconf"
elif integration_path.is_file():
INTEGRATION = integration_path.read_text(encoding="utf-8").strip()
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text(
encoding="utf-8"
):
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text(encoding="utf-8"):
INTEGRATION = "Docker"
del integration_path, os_release_path
tmp_variables_path = (
normpath(args.variables)
if args.variables
else join(sep, "var", "tmp", "bunkerweb", "variables.env")
)
tmp_variables_path = normpath(args.variables) if args.variables else join(sep, "var", "tmp", "bunkerweb", "variables.env")
tmp_variables_path = Path(tmp_variables_path)
nginx_variables_path = Path(sep, "etc", "nginx", "variables.env")
dotenv_env = dotenv_values(str(tmp_variables_path))
db = Database(
logger,
sqlalchemy_string=dotenv_env.get(
"DATABASE_URI", getenv("DATABASE_URI", None)
),
sqlalchemy_string=dotenv_env.get("DATABASE_URI", getenv("DATABASE_URI", None)),
)
env = {}
@ -270,16 +253,7 @@ if __name__ == "__main__":
sleep(5)
env = db.get_config()
elif (
not tmp_variables_path.exists()
or not nginx_variables_path.exists()
or (
tmp_variables_path.read_text(encoding="utf-8")
!= nginx_variables_path.read_text(encoding="utf-8")
)
or db.is_initialized()
and db.get_config() != dotenv_env
):
elif not tmp_variables_path.exists() or not nginx_variables_path.exists() or (tmp_variables_path.read_text(encoding="utf-8") != nginx_variables_path.read_text(encoding="utf-8")) or db.is_initialized() and db.get_config() != dotenv_env:
# run the config saver
proc = subprocess_run(
[
@ -349,9 +323,7 @@ if __name__ == "__main__":
custom_conf = {
"value": content,
"exploded": (
f"{path_exploded.pop()}"
if path_exploded[-1] not in root_dirs
else None,
f"{path_exploded.pop()}" if path_exploded[-1] not in root_dirs else None,
path_exploded[-1],
file.replace(".conf", ""),
),
@ -360,10 +332,7 @@ if __name__ == "__main__":
saving = True
in_db = False
for db_conf in db_configs:
if (
db_conf["service_id"] == custom_conf["exploded"][0]
and db_conf["name"] == custom_conf["exploded"][2]
):
if db_conf["service_id"] == custom_conf["exploded"][0] and db_conf["name"] == custom_conf["exploded"][2]:
in_db = True
if db_conf["method"] != "manual":
saving = False
@ -376,9 +345,7 @@ if __name__ == "__main__":
if saving:
custom_configs.append(custom_conf)
changes = changes or {hash(dict_to_frozenset(d)) for d in custom_configs} != {
hash(dict_to_frozenset(d)) for d in db_configs
}
changes = changes or {hash(dict_to_frozenset(d)) for d in custom_configs} != {hash(dict_to_frozenset(d)) for d in db_configs}
if changes:
err = db.save_custom_configs(custom_configs, "manual")
@ -404,9 +371,7 @@ if __name__ == "__main__":
with open(filename, "r", encoding="utf-8") as f:
_dir = dirname(filename)
plugin_content = BytesIO()
with tar_open(
fileobj=plugin_content, mode="w:gz", compresslevel=9
) as tar:
with tar_open(fileobj=plugin_content, mode="w:gz", compresslevel=9) as tar:
tar.add(_dir, arcname=basename(_dir), recursive=True)
plugin_content.seek(0, 0)
value = plugin_content.getvalue()
@ -435,9 +400,7 @@ if __name__ == "__main__":
db_plugin.pop("method", None)
tmp_db_plugins.append(db_plugin)
changes = {hash(dict_to_frozenset(d)) for d in tmp_external_plugins} != {
hash(dict_to_frozenset(d)) for d in tmp_db_plugins
}
changes = {hash(dict_to_frozenset(d)) for d in tmp_external_plugins} != {hash(dict_to_frozenset(d)) for d in tmp_db_plugins}
if changes:
err = db.update_external_plugins(external_plugins, delete_missing=True)
@ -463,9 +426,7 @@ if __name__ == "__main__":
ret = db.set_scheduler_first_start()
if ret:
logger.error(
f"An error occurred when setting the scheduler first start : {ret}"
)
logger.error(f"An error occurred when setting the scheduler first start : {ret}")
stop(1)
FIRST_RUN = True
@ -494,9 +455,7 @@ if __name__ == "__main__":
ret = db.checked_changes(CHANGES)
if ret:
logger.error(
f"An error occurred when setting the changes to checked in the database : {ret}"
)
logger.error(f"An error occurred when setting the changes to checked in the database : {ret}")
stop(1)
if RUN_JOBS_ONCE:
@ -578,17 +537,13 @@ if __name__ == "__main__":
logger.info("Successfully sent stop signal to temp nginx")
i = 0
while i < 20:
if not Path(
sep, "var", "run", "bunkerweb", "nginx.pid"
).is_file():
if not Path(sep, "var", "run", "bunkerweb", "nginx.pid").is_file():
break
logger.warning("Waiting for temp nginx to stop ...")
sleep(1)
i += 1
if i >= 20:
logger.error(
"Timeout error while waiting for temp nginx to stop"
)
logger.error("Timeout error while waiting for temp nginx to stop")
else:
# Start nginx
logger.info("Starting nginx ...")
@ -623,9 +578,7 @@ if __name__ == "__main__":
# infinite schedule for the jobs
logger.info("Executing job scheduler ...")
Path(sep, "var", "tmp", "bunkerweb", "scheduler.healthy").write_text(
"ok", encoding="utf-8"
)
Path(sep, "var", "tmp", "bunkerweb", "scheduler.healthy").write_text("ok", encoding="utf-8")
while RUN and not NEED_RELOAD:
SCHEDULER.run_pending()
sleep(1)
@ -633,9 +586,7 @@ if __name__ == "__main__":
changes = db.check_changes()
if isinstance(changes, str):
logger.error(
f"An error occurred when checking for changes in the database : {changes}"
)
logger.error(f"An error occurred when checking for changes in the database : {changes}")
stop(1)
# check if the plugins have changed since last time
@ -644,9 +595,7 @@ if __name__ == "__main__":
if FIRST_RUN:
# run the config saver to save potential ignored external plugins settings
logger.info(
"Running config saver to save potential ignored external plugins settings ..."
)
logger.info("Running config saver to save potential ignored external plugins settings ...")
proc = subprocess_run(
[
"python",
@ -707,9 +656,7 @@ if __name__ == "__main__":
if CONFIGS_NEED_GENERATION:
CHANGES.append("custom_configs")
generate_custom_configs(
db.get_custom_configs(), original_path=configs_path
)
generate_custom_configs(db.get_custom_configs(), original_path=configs_path)
if PLUGINS_NEED_GENERATION:
CHANGES.append("external_plugins")

View File

@ -6,17 +6,12 @@ from sys import path as sys_path, modules as sys_modules
from pathlib import Path
os_release_path = Path(sep, "etc", "os-release")
if os_release_path.is_file() and "Alpine" not in os_release_path.read_text(
encoding="utf-8"
):
if os_release_path.is_file() and "Alpine" not in os_release_path.read_text(encoding="utf-8"):
sys_path.append(join(sep, "usr", "share", "bunkerweb", "deps", "python"))
del os_release_path
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("utils",), ("api",), ("db",))
]:
for deps_path in [join(sep, "usr", "share", "bunkerweb", *paths) for paths in (("utils",), ("api",), ("db",))]:
if deps_path not in sys_path:
sys_path.append(deps_path)
@ -138,9 +133,7 @@ if not getenv("FLASK_DEBUG", False) and not regex_match(
r"^(?=.*?\p{Lowercase_Letter})(?=.*?\p{Uppercase_Letter})(?=.*?\d)(?=.*?[ !\"#$%&'()*+,\-./:;<=>?@[\\\]^_`{|}~]).{8,}$",
getenv("ADMIN_PASSWORD", "changeme"),
):
app.logger.error(
"The admin password is not strong enough. It must contain at least 8 characters, including at least 1 uppercase letter, 1 lowercase letter, 1 number and 1 special character (#@?!$%^&*-)."
)
app.logger.error("The admin password is not strong enough. It must contain at least 8 characters, including at least 1 uppercase letter, 1 lowercase letter, 1 number and 1 special character (#@?!$%^&*-).")
stop(1)
login_manager = LoginManager()
@ -173,9 +166,7 @@ docker_client = None
kubernetes_client = None
if INTEGRATION in ("Docker", "Swarm", "Autoconf"):
try:
docker_client: DockerClient = DockerClient(
base_url=getenv("DOCKER_HOST", "unix:///var/run/docker.sock")
)
docker_client: DockerClient = DockerClient(base_url=getenv("DOCKER_HOST", "unix:///var/run/docker.sock"))
except (docker_APIError, DockerException):
app.logger.warning("No docker host found")
elif INTEGRATION == "Kubernetes":
@ -203,11 +194,7 @@ while not db.is_initialized():
app.logger.info("Database is ready")
Path(sep, "var", "tmp", "bunkerweb", "ui.healthy").write_text("ok", encoding="utf-8")
bw_version = (
Path(sep, "usr", "share", "bunkerweb", "VERSION")
.read_text(encoding="utf-8")
.strip()
)
bw_version = Path(sep, "usr", "share", "bunkerweb", "VERSION").read_text(encoding="utf-8").strip()
try:
app.config.update(
@ -238,9 +225,7 @@ app.jinja_env.globals.update(check_settings=check_settings)
csrf = CSRFProtect()
csrf.init_app(app)
LOG_RX = re_compile(
r"^(?P<date>\d+/\d+/\d+\s\d+:\d+:\d+)\s\[(?P<level>[a-z]+)\]\s\d+#\d+:\s(?P<message>[^\n]+)$"
)
LOG_RX = re_compile(r"^(?P<date>\d+/\d+/\d+\s\d+:\d+:\d+)\s\[(?P<level>[a-z]+)\]\s\d+#\d+:\s(?P<message>[^\n]+)$")
def manage_bunkerweb(method: str, *args, operation: str = "reloads"):
@ -248,9 +233,7 @@ def manage_bunkerweb(method: str, *args, operation: str = "reloads"):
error = False
if method == "services":
editing = operation == "edit"
service_custom_confs = glob(
join(sep, "etc", "bunkerweb", "configs", "*", args[1])
)
service_custom_confs = glob(join(sep, "etc", "bunkerweb", "configs", "*", args[1]))
moved = False
if operation == "new":
@ -261,9 +244,7 @@ def manage_bunkerweb(method: str, *args, operation: str = "reloads"):
if listdir(service_custom_conf):
move(
service_custom_conf,
service_custom_conf.replace(
f"{sep}{args[1]}", f"{sep}{args[2]}"
).replace(join(sep, "etc"), join(sep, "var", "tmp")),
service_custom_conf.replace(f"{sep}{args[1]}", f"{sep}{args[2]}").replace(join(sep, "etc"), join(sep, "var", "tmp")),
)
moved = True
operation, error = app.config["CONFIG"].edit_service(args[1], args[0])
@ -276,9 +257,7 @@ def manage_bunkerweb(method: str, *args, operation: str = "reloads"):
app.config["TO_FLASH"].append({"content": operation, "type": "success"})
if editing and moved and args[1] != args[2] and service_custom_confs:
for tmp_service_custom_conf in glob(
join(sep, "var", "tmp", "bunkerweb", "configs", "*", args[2])
):
for tmp_service_custom_conf in glob(join(sep, "var", "tmp", "bunkerweb", "configs", "*", args[2])):
move(
tmp_service_custom_conf,
tmp_service_custom_conf.replace(
@ -289,9 +268,7 @@ def manage_bunkerweb(method: str, *args, operation: str = "reloads"):
error = app.config["CONFIGFILES"].save_configs()
if error:
app.config["TO_FLASH"].append({"content": error, "type": "error"})
rmtree(
join(sep, "var", "tmp", "bunkerweb", "configs"), ignore_errors=True
)
rmtree(join(sep, "var", "tmp", "bunkerweb", "configs"), ignore_errors=True)
if method == "global_config":
operation = app.config["CONFIG"].edit_global_conf(args[0])
elif method == "plugins":
@ -313,9 +290,7 @@ def manage_bunkerweb(method: str, *args, operation: str = "reloads"):
if operation:
if isinstance(operation, list):
for op in operation:
app.config["TO_FLASH"].append(
{"content": f"Reload failed for the instance {op}", "type": "error"}
)
app.config["TO_FLASH"].append({"content": f"Reload failed for the instance {op}", "type": "error"})
elif operation.startswith("Can't"):
app.config["TO_FLASH"].append({"content": operation, "type": "error"})
else:
@ -327,9 +302,7 @@ def manage_bunkerweb(method: str, *args, operation: str = "reloads"):
@app.after_request
def set_csp_header(response):
"""Set the Content-Security-Policy header to prevent XSS attacks."""
response.headers[
"Content-Security-Policy"
] = "object-src 'none'; frame-ancestors 'self';"
response.headers["Content-Security-Policy"] = "object-src 'none'; frame-ancestors 'self';"
return response
@ -438,7 +411,7 @@ def instances():
# Manage instances
if request.method == "POST":
# Check operation
if not "operation" in request.form or not request.form["operation"] in (
if "operation" not in request.form or request.form["operation"] not in (
"reload",
"start",
"stop",
@ -448,7 +421,7 @@ def instances():
return redirect(url_for("loading", next=url_for("instances")))
# Check that all fields are present
if not "INSTANCE_ID" in request.form:
if "INSTANCE_ID" not in request.form:
flash("Missing INSTANCE_ID parameter.", "error")
return redirect(url_for("loading", next=url_for("instances")))
@ -465,12 +438,7 @@ def instances():
url_for(
"loading",
next=url_for("instances"),
message=(
f"{request.form['operation'].title()}ing"
if request.form["operation"] != "stop"
else "Stopping"
)
+ " instance",
message=(f"{request.form['operation'].title()}ing" if request.form["operation"] != "stop" else "Stopping") + " instance",
)
)
@ -489,7 +457,7 @@ def instances():
def services():
if request.method == "POST":
# Check operation
if not "operation" in request.form or not request.form["operation"] in (
if "operation" not in request.form or request.form["operation"] not in (
"new",
"edit",
"delete",
@ -501,10 +469,7 @@ def services():
variables = deepcopy(request.form.to_dict())
del variables["csrf_token"]
if (
not "OLD_SERVER_NAME" in request.form
and request.form["operation"] == "edit"
):
if "OLD_SERVER_NAME" not in request.form and request.form["operation"] == "edit":
flash("Missing OLD_SERVER_NAME parameter.", "error")
return redirect(url_for("loading", next=url_for("services")))
@ -527,19 +492,10 @@ def services():
elif value == "off":
value = "no"
if variable in variables and (
variable != "SERVER_NAME"
and value == config.get(variable, None)
or not value.strip()
):
if variable in variables and (variable != "SERVER_NAME" and value == config.get(variable, None) or not value.strip()):
del variables[variable]
if (
request.form["operation"] == "edit"
and len(variables) == 1
and "SERVER_NAME" in variables
and variables["SERVER_NAME"] == request.form.get("OLD_SERVER_NAME", "")
):
if request.form["operation"] == "edit" and len(variables) == 1 and "SERVER_NAME" in variables and variables["SERVER_NAME"] == request.form.get("OLD_SERVER_NAME", ""):
flash(
"The service was not edited because no values were changed.",
"error",
@ -553,13 +509,11 @@ def services():
# Delete
elif request.form["operation"] == "delete":
if not "SERVER_NAME" in request.form:
if "SERVER_NAME" not in request.form:
flash("Missing SERVER_NAME parameter.", "error")
return redirect(url_for("loading", next=url_for("services")))
error = app.config["CONFIG"].check_variables(
{"SERVER_NAME": request.form["SERVER_NAME"]}
)
error = app.config["CONFIG"].check_variables({"SERVER_NAME": request.form["SERVER_NAME"]})
if error:
return redirect(url_for("loading", next=url_for("services")))
@ -584,15 +538,11 @@ def services():
message = ""
if request.form["operation"] == "new":
message = (
f"Creating service {variables.get('SERVER_NAME', '').split(' ')[0]}"
)
message = f"Creating service {variables.get('SERVER_NAME', '').split(' ')[0]}"
elif request.form["operation"] == "edit":
message = f"Saving configuration for service {request.form.get('OLD_SERVER_NAME', '').split(' ')[0]}"
elif request.form["operation"] == "delete":
message = (
f"Deleting service {request.form.get('SERVER_NAME', '').split(' ')[0]}"
)
message = f"Deleting service {request.form.get('SERVER_NAME', '').split(' ')[0]}"
return redirect(url_for("loading", next=url_for("services"), message=message))
@ -644,9 +594,7 @@ def global_config():
del variables[variable]
if not variables:
flash(
"The global configuration was not edited because no values were changed."
)
flash("The global configuration was not edited because no values were changed.")
return redirect(url_for("loading", next=url_for("global_config")))
error = app.config["CONFIG"].check_variables(variables, True)
@ -688,7 +636,7 @@ def configs():
operation = ""
# Check operation
if not "operation" in request.form or not request.form["operation"] in (
if "operation" not in request.form or request.form["operation"] not in (
"new",
"edit",
"delete",
@ -720,21 +668,15 @@ def configs():
if "old_name" in variables:
variables["old_name"] = f"{variables['old_name']}.conf"
variables["content"] = BeautifulSoup(
variables["content"], "html.parser"
).get_text()
variables["content"] = BeautifulSoup(variables["content"], "html.parser").get_text()
error = False
if request.form["operation"] == "new":
if variables["type"] == "folder":
operation, error = app.config["CONFIGFILES"].create_folder(
variables["path"], variables["name"]
)
operation, error = app.config["CONFIGFILES"].create_folder(variables["path"], variables["name"])
elif variables["type"] == "file":
operation, error = app.config["CONFIGFILES"].create_file(
variables["path"], variables["name"], variables["content"]
)
operation, error = app.config["CONFIGFILES"].create_file(variables["path"], variables["name"], variables["content"])
elif request.form["operation"] == "edit":
if variables["type"] == "folder":
operation, error = app.config["CONFIGFILES"].edit_folder(
@ -774,10 +716,7 @@ def configs():
path_to_dict(
join(sep, "etc", "bunkerweb", "configs"),
db_data=db.get_custom_configs(),
services=app.config["CONFIG"]
.get_config(methods=False)
.get("SERVER_NAME", "")
.split(" "),
services=app.config["CONFIG"].get_config(methods=False).get("SERVER_NAME", "").split(" "),
)
],
dark_mode=app.config["DARK_MODE"],
@ -851,9 +790,7 @@ def plugins():
)
else:
try:
with tar_open(
str(tmp_ui_path.joinpath(file)), errorlevel=2
) as tar_file:
with tar_open(str(tmp_ui_path.joinpath(file)), errorlevel=2) as tar_file:
try:
tar_file.getmember("plugin.json")
except KeyError:
@ -882,24 +819,12 @@ def plugins():
)
if is_dir:
dirs = [
d
for d in listdir(str(temp_folder_path))
if temp_folder_path.joinpath(d).is_dir()
]
dirs = [d for d in listdir(str(temp_folder_path)) if temp_folder_path.joinpath(d).is_dir()]
if (
not dirs
or len(dirs) > 1
or not temp_folder_path.joinpath(
dirs[0], "plugin.json"
).is_file()
):
if not dirs or len(dirs) > 1 or not temp_folder_path.joinpath(dirs[0], "plugin.json").is_file():
raise KeyError
for file_name in listdir(
str(temp_folder_path.joinpath(dirs[0]))
):
for file_name in listdir(str(temp_folder_path.joinpath(dirs[0]))):
move(
str(temp_folder_path.joinpath(dirs[0], file_name)),
str(temp_folder_path.joinpath(file_name)),
@ -909,11 +834,7 @@ def plugins():
ignore_errors=True,
)
plugin_file = json_loads(
temp_folder_path.joinpath("plugin.json").read_text(
encoding="utf-8"
)
)
plugin_file = json_loads(temp_folder_path.joinpath("plugin.json").read_text(encoding="utf-8"))
if not all(key in plugin_file.keys() for key in PLUGIN_KEYS):
raise ValueError
@ -992,9 +913,7 @@ def plugins():
flash(f"{e}", "error")
finally:
if error != 1:
flash(
f"Successfully created plugin: <b><i>{folder_name}</i></b>"
)
flash(f"Successfully created plugin: <b><i>{folder_name}</i></b>")
error = 0
@ -1030,9 +949,7 @@ def plugins():
if tmp_ui_path.exists():
rmtree(str(tmp_ui_path), ignore_errors=True)
return redirect(
url_for("loading", next=url_for("plugins"), message="Reloading plugins")
)
return redirect(url_for("loading", next=url_for("plugins"), message="Reloading plugins"))
plugin_args = app.config["PLUGIN_ARGS"]
app.config["PLUGIN_ARGS"] = {}
@ -1051,11 +968,7 @@ def plugins():
csrf_token=generate_csrf,
url_for=url_for,
dark_mode=app.config["DARK_MODE"],
**(
plugin_args["args"]
if plugin_args.get("plugin", None) == plugin_id
else {}
),
**(plugin_args["args"] if plugin_args.get("plugin", None) == plugin_id else {}),
)
plugins = app.config["CONFIG"].get_plugins()
@ -1109,9 +1022,7 @@ def upload_plugin():
plugins.append(basename(dirname(file)))
if len(plugins) > 1:
tar_file.extractall(str(tmp_ui_path) + "/")
folder_name = uploaded_file.filename.replace(".tar.gz", "").replace(
".tar.xz", ""
)
folder_name = uploaded_file.filename.replace(".tar.gz", "").replace(".tar.xz", "")
if len(plugins) <= 1:
io.seek(0, 0)
@ -1120,12 +1031,8 @@ def upload_plugin():
for plugin in plugins:
with BytesIO() as tgz:
with tar_open(
mode="w:gz", fileobj=tgz, dereference=True, compresslevel=3
) as tf:
tf.add(
str(tmp_ui_path.joinpath(folder_name, plugin)), arcname=plugin
)
with tar_open(mode="w:gz", fileobj=tgz, dereference=True, compresslevel=3) as tf:
tf.add(str(tmp_ui_path.joinpath(folder_name, plugin)), arcname=plugin)
tgz.seek(0, 0)
tmp_ui_path.joinpath(f"{plugin}.tar.gz").write_bytes(tgz.read())
@ -1195,15 +1102,8 @@ def custom_plugin(plugin):
sys_modules.pop("actions")
del actions
if (
request.method != "POST"
or error is True
or res is None
or isinstance(res, dict) is False
):
return redirect(
url_for("loading", next=url_for("plugins", plugin_id=plugin))
)
if request.method != "POST" or error is True or res is None or isinstance(res, dict) is False:
return redirect(url_for("loading", next=url_for("plugins", plugin_id=plugin)))
app.config["PLUGIN_ARGS"] = {"plugin": plugin, "args": res}
@ -1221,10 +1121,7 @@ def cache():
join(sep, "var", "cache", "bunkerweb"),
is_cache=True,
db_data=db.get_jobs_cache_files(),
services=app.config["CONFIG"]
.get_config(methods=False)
.get("SERVER_NAME", "")
.split(" "),
services=app.config["CONFIG"].get_config(methods=False).get("SERVER_NAME", "").split(" "),
)
],
dark_mode=app.config["DARK_MODE"],
@ -1274,9 +1171,7 @@ def logs_linux():
nginx_error_file = Path(sep, "var", "log", "bunkerweb", "error.log")
if nginx_error_file.is_file():
with open(nginx_error_file, encoding="utf-8") as f:
for line in f.readlines()[
int(last_update.split(".")[0]) if last_update else 0 :
]:
for line in f.readlines()[int(last_update.split(".")[0]) if last_update else 0 :]: # noqa: E203
match = LOG_RX.search(line)
if not match:
continue
@ -1288,15 +1183,10 @@ def logs_linux():
logs_error[-1] += f"\n{line}"
continue
logs_error.append(line)
elif (
all(f"[{log_level}]" != level for log_level in NGINX_LOG_LEVELS)
and temp_multiple_lines
):
elif all(f"[{log_level}]" != level for log_level in NGINX_LOG_LEVELS) and temp_multiple_lines:
temp_multiple_lines.append(line)
else:
logs_error.append(
f"{datetime.strptime(date, '%Y/%m/%d %H:%M:%S').replace(tzinfo=timezone.utc).timestamp()} {line}"
)
logs_error.append(f"{datetime.strptime(date, '%Y/%m/%d %H:%M:%S').replace(tzinfo=timezone.utc).timestamp()} {line}")
if temp_multiple_lines:
logs_error.append("\n".join(temp_multiple_lines))
@ -1305,12 +1195,8 @@ def logs_linux():
nginx_access_file = Path(sep, "var", "log", "bunkerweb", "access.log")
if nginx_access_file.is_file():
with open(nginx_access_file, encoding="utf-8") as f:
for line in f.readlines()[
int(last_update.split(".")[1]) if last_update else 0 :
]:
logs_access.append(
f"{datetime.strptime(line[line.find('[') + 1: line.find(']')], '%d/%b/%Y:%H:%M:%S %z').replace(tzinfo=timezone.utc).timestamp()} {line}"
)
for line in f.readlines()[int(last_update.split(".")[1]) if last_update else 0 :]: # noqa: E203
logs_access.append(f"{datetime.strptime(line[line.find('[') + 1: line.find(']')], '%d/%b/%Y:%H:%M:%S %z').replace(tzinfo=timezone.utc).timestamp()} {line}")
raw_logs = logs_error + logs_access
@ -1339,17 +1225,8 @@ def logs_linux():
log_lower = log.lower()
error_type = (
"error"
if "[error]" in log_lower
or "[crit]" in log_lower
or "[alert]" in log_lower
or "" in log_lower
else (
"warn"
if "[warn]" in log_lower or "⚠️" in log_lower
else (
"info" if "[info]" in log_lower or "" in log_lower else "message"
)
)
if "[error]" in log_lower or "[crit]" in log_lower or "[alert]" in log_lower or "" in log_lower
else ("warn" if "[warn]" in log_lower or "⚠️" in log_lower else ("info" if "[info]" in log_lower or "" in log_lower else "message"))
)
logs.append(
@ -1370,9 +1247,7 @@ def logs_linux():
return jsonify(
{
"logs": logs,
"last_update": f"{count_error_logs + int(last_update.split('.')[0])}.{len(logs_access) + int(last_update.split('.')[1])}"
if last_update
else f"{count_error_logs}.{len(logs_access)}",
"last_update": f"{count_error_logs + int(last_update.split('.')[0])}.{len(logs_access) + int(last_update.split('.')[1])}" if last_update else f"{count_error_logs}.{len(logs_access)}",
}
)
@ -1398,10 +1273,7 @@ def logs_container(container_id):
422,
)
elif not last_update:
last_update = int(
datetime.now().timestamp()
- timedelta(days=1).total_seconds() # 1 day before
)
last_update = int(datetime.now().timestamp() - timedelta(days=1).total_seconds()) # 1 day before
else:
last_update = int(last_update) // 1000
@ -1474,19 +1346,8 @@ def logs_container(container_id):
{
"content": log,
"type": "error"
if "[error]" in log_lower
or "[crit]" in log_lower
or "[alert]" in log_lower
or "" in log_lower
else (
"warn"
if "[warn]" in log_lower or "⚠️" in log_lower
else (
"info"
if "[info]" in log_lower or "" in log_lower
else "message"
)
),
if "[error]" in log_lower or "[crit]" in log_lower or "[alert]" in log_lower or "" in log_lower
else ("warn" if "[warn]" in log_lower or "⚠️" in log_lower else ("info" if "[info]" in log_lower or "" in log_lower else "message")),
}
)
@ -1541,14 +1402,8 @@ def jobs_download():
@app.route("/login", methods=["GET", "POST"])
def login():
fail = False
if (
request.method == "POST"
and "username" in request.form
and "password" in request.form
):
if app.config["USER"].get_id() == request.form["username"] and app.config[
"USER"
].check_password(request.form["password"]):
if request.method == "POST" and "username" in request.form and "password" in request.form:
if app.config["USER"].get_id() == request.form["username"] and app.config["USER"].check_password(request.form["password"]):
# log the user in
next_url = request.form.get("next")
login_user(app.config["USER"])
@ -1588,9 +1443,7 @@ def darkmode():
def check_reloading():
if not app.config["RELOADING"] or app.config["LAST_RELOAD"] + 60 < time():
if app.config["RELOADING"]:
app.logger.warning(
"Reloading took too long, forcing the state to be reloaded"
)
app.logger.warning("Reloading took too long, forcing the state to be reloaded")
flash("Forced the status to be reloaded", "error")
app.config["RELOADING"] = False

View File

@ -14,11 +14,7 @@ from uuid import uuid4
class Config:
def __init__(self, db) -> None:
self.__settings = json_loads(
Path(sep, "usr", "share", "bunkerweb", "settings.json").read_text(
encoding="utf-8"
)
)
self.__settings = json_loads(Path(sep, "usr", "share", "bunkerweb", "settings.json").read_text(encoding="utf-8"))
self.__db = db
def __gen_conf(self, global_conf: dict, services_conf: list[dict]) -> None:
@ -42,11 +38,7 @@ class Config:
server_name = service["SERVER_NAME"].split(" ")[0]
for k in service:
key_without_server_name = k.replace(f"{server_name}_", "")
if (
plugins_settings[key_without_server_name]["context"] != "global"
if key_without_server_name in plugins_settings
else True
):
if plugins_settings[key_without_server_name]["context"] != "global" if key_without_server_name in plugins_settings else True:
if not k.startswith(server_name) or k in plugins_settings:
conf[f"{server_name}_{k}"] = service[k]
else:
@ -86,9 +78,7 @@ class Config:
**self.__settings,
}
def get_plugins(
self, *, external: bool = False, with_data: bool = False
) -> List[dict]:
def get_plugins(self, *, external: bool = False, with_data: bool = False) -> List[dict]:
plugins = self.__db.get_plugins(external=external, with_data=with_data)
plugins.sort(key=lambda x: x["name"])
@ -153,18 +143,13 @@ class Config:
setting = k
else:
setting = k[0 : k.rfind("_")]
if (
setting not in plugins_settings
or "multiple" not in plugins_settings[setting]
):
setting = k[0 : k.rfind("_")] # noqa: E203
if setting not in plugins_settings or "multiple" not in plugins_settings[setting]:
error = 1
flash(f"Variable {k} is not valid.", "error")
continue
if not (
_global ^ (plugins_settings[setting]["context"] == "global")
) and re_search(plugins_settings[setting]["regex"], v):
if not (_global ^ (plugins_settings[setting]["context"] == "global")) and re_search(plugins_settings[setting]["regex"], v):
check = True
if not check:
@ -175,9 +160,7 @@ class Config:
return error
def reload_config(self) -> None:
self.__gen_conf(
self.get_config(methods=False), self.get_services(methods=False)
)
self.__gen_conf(self.get_config(methods=False), self.get_services(methods=False))
def new_service(self, variables: dict) -> Tuple[str, int]:
"""Creates a new service from the given variables
@ -200,10 +183,7 @@ class Config:
services = self.get_services(methods=False)
server_name_splitted = variables["SERVER_NAME"].split(" ")
for service in services:
if (
service["SERVER_NAME"] == variables["SERVER_NAME"]
or service["SERVER_NAME"] in server_name_splitted
):
if service["SERVER_NAME"] == variables["SERVER_NAME"] or service["SERVER_NAME"] in server_name_splitted:
return (
f"Service {service['SERVER_NAME'].split(' ')[0]} already exists.",
1,
@ -236,20 +216,14 @@ class Config:
server_name_splitted = variables["SERVER_NAME"].split(" ")
old_server_name_splitted = old_server_name.split(" ")
for i, service in enumerate(deepcopy(services)):
if (
service["SERVER_NAME"] == variables["SERVER_NAME"]
or service["SERVER_NAME"] in server_name_splitted
):
if service["SERVER_NAME"] == variables["SERVER_NAME"] or service["SERVER_NAME"] in server_name_splitted:
if changed_server_name:
return (
f"Service {service['SERVER_NAME'].split(' ')[0]} already exists.",
1,
)
services.pop(i)
elif changed_server_name and (
service["SERVER_NAME"] == old_server_name
or service["SERVER_NAME"] in old_server_name_splitted
):
elif changed_server_name and (service["SERVER_NAME"] == old_server_name or service["SERVER_NAME"] in old_server_name_splitted):
services.pop(i)
services.append(variables)
@ -279,9 +253,7 @@ class Config:
str
the confirmation message
"""
self.__gen_conf(
self.get_config(methods=False) | variables, self.get_services(methods=False)
)
self.__gen_conf(self.get_config(methods=False) | variables, self.get_services(methods=False))
return "The global configuration has been edited."
def delete_service(self, service_name: str) -> Tuple[str, int]:
@ -317,9 +289,7 @@ class Config:
if not found:
return f"Can't delete missing {service_name} configuration.", 1
full_env["SERVER_NAME"] = " ".join(
[s for s in full_env["SERVER_NAME"].split(" ") if s != service_name]
)
full_env["SERVER_NAME"] = " ".join([s for s in full_env["SERVER_NAME"].split(" ") if s != service_name])
new_env = deepcopy(full_env)

View File

@ -29,12 +29,7 @@ def generate_custom_configs(
class ConfigFiles:
def __init__(self, logger, db):
self.__name_regex = re_compile(r"^[\w.-]{1,64}$")
self.__root_dirs = [
child["name"]
for child in path_to_dict(join(sep, "etc", "bunkerweb", "configs"))[
"children"
]
]
self.__root_dirs = [child["name"] for child in path_to_dict(join(sep, "etc", "bunkerweb", "configs"))["children"]]
self.__file_creation_blacklist = ["http", "stream"]
self.__logger = logger
self.__db = db
@ -68,9 +63,7 @@ class ConfigFiles:
{
"value": f.read(),
"exploded": (
f"{path_exploded.pop()}"
if path_exploded[-1] not in root_dirs
else None,
f"{path_exploded.pop()}" if path_exploded[-1] not in root_dirs else None,
path_exploded[-1],
file.replace(".conf", ""),
),
@ -87,20 +80,14 @@ class ConfigFiles:
def check_name(self, name: str) -> bool:
return self.__name_regex.match(name) is not None
def check_path(
self, path: str, root_path: str = join(sep, "etc", "bunkerweb", "configs")
) -> str:
def check_path(self, path: str, root_path: str = join(sep, "etc", "bunkerweb", "configs")) -> str:
root_dir: str = path.split("/")[4]
if not (
path.startswith(root_path)
or root_path == join(sep, "etc", "bunkerweb", "configs")
and path.startswith(root_path)
and root_dir in self.__root_dirs
and (
not path.endswith(".conf")
or root_dir not in self.__file_creation_blacklist
or len(path.split("/")) > 5
)
and (not path.endswith(".conf") or root_dir not in self.__file_creation_blacklist or len(path.split("/")) > 5)
):
return f"{path} is not a valid path"
@ -110,9 +97,7 @@ class ConfigFiles:
dirs = "/".join(dirs)
if len(dirs) > 1:
for x in range(nbr_children - 1):
if not Path(
root_path, root_dir, "/".join(dirs.split("/")[0:-x])
).exists():
if not Path(root_path, root_dir, "/".join(dirs.split("/")[0:-x])).exists():
return f"{join(root_path, root_dir, '/'.join(dirs.split('/')[0:-x]))} doesn't exist"
return ""
@ -170,9 +155,7 @@ class ConfigFiles:
0,
)
def edit_file(
self, path: str, name: str, old_name: str, content: str
) -> Tuple[str, int]:
def edit_file(self, path: str, name: str, old_name: str, content: str) -> Tuple[str, int]:
new_path = join(dirname(path), name)
old_path = join(dirname(path), old_name)

View File

@ -34,15 +34,7 @@ class Instance:
self.name = name
self.hostname = hostname
self._type = _type
self.health = status == "up" and (
(
data.attrs["State"]["Health"]["Status"] == "healthy"
if "Health" in data.attrs["State"]
else False
)
if _type == "container" and data
else True
)
self.health = status == "up" and ((data.attrs["State"]["Health"]["Status"] == "healthy" if "Health" in data.attrs["State"] else False) if _type == "container" and data else True)
self.env = data
self.apiCaller = apiCaller or ApiCaller()
@ -133,13 +125,8 @@ class Instances:
instances = []
# Docker instances (containers or services)
if self.__docker_client is not None:
for instance in self.__docker_client.containers.list(
all=True, filters={"label": "bunkerweb.INSTANCE"}
):
env_variables = {
x[0]: x[1]
for x in [env.split("=") for env in instance.attrs["Config"]["Env"]]
}
for instance in self.__docker_client.containers.list(all=True, filters={"label": "bunkerweb.INSTANCE"}):
env_variables = {x[0]: x[1] for x in [env.split("=") for env in instance.attrs["Config"]["Env"]]}
instances.append(
Instance(
@ -160,9 +147,7 @@ class Instances:
)
)
elif self.__integration == "Swarm":
for instance in self.__docker_client.services.list(
filters={"label": "bunkerweb.INSTANCE"}
):
for instance in self.__docker_client.services.list(filters={"label": "bunkerweb.INSTANCE"}):
status = "down"
desired_tasks = instance.attrs["ServiceStatus"]["DesiredTasks"]
running_tasks = instance.attrs["ServiceStatus"]["RunningTasks"]
@ -173,9 +158,7 @@ class Instances:
api_http_port = None
api_server_name = None
for var in instance.attrs["Spec"]["TaskTemplate"]["ContainerSpec"][
"Env"
]:
for var in instance.attrs["Spec"]["TaskTemplate"]["ContainerSpec"]["Env"]:
if var.startswith("API_HTTP_PORT="):
api_http_port = var.replace("API_HTTP_PORT=", "", 1)
elif var.startswith("API_SERVER_NAME="):
@ -201,16 +184,9 @@ class Instances:
)
)
elif self.__integration == "Kubernetes":
for pod in self.__kubernetes_client.list_pod_for_all_namespaces(
watch=False
).items:
if (
pod.metadata.annotations != None
and "bunkerweb.io/INSTANCE" in pod.metadata.annotations
):
env_variables = {
env.name: env.value or "" for env in pod.spec.containers[0].env
}
for pod in self.__kubernetes_client.list_pod_for_all_namespaces(watch=False).items:
if pod.metadata.annotations is not None and "bunkerweb.io/INSTANCE" in pod.metadata.annotations:
env_variables = {env.name: env.value or "" for env in pod.spec.containers[0].env}
status = "up"
if pod.status.conditions is not None:
@ -231,9 +207,7 @@ class Instances:
[
API(
f"http://{pod.status.pod_ip}:{env_variables.get('API_HTTP_PORT', '5000')}",
host=env_variables.get(
"API_SERVER_NAME", "bwapi"
),
host=env_variables.get("API_SERVER_NAME", "bwapi"),
)
]
),
@ -247,9 +221,7 @@ class Instances:
# Local instance
if Path(sep, "usr", "sbin", "nginx").exists():
env_variables = dotenv_values(
join(sep, "etc", "bunkerweb", "variables.env")
)
env_variables = dotenv_values(join(sep, "etc", "bunkerweb", "variables.env"))
instances.insert(
0,
@ -258,9 +230,7 @@ class Instances:
"local",
"127.0.0.1",
"local",
"up"
if Path(sep, "var", "run", "bunkerweb", "nginx.pid").exists()
else "down",
"up" if Path(sep, "var", "run", "bunkerweb", "nginx.pid").exists() else "down",
None,
ApiCaller(
[
@ -287,9 +257,7 @@ class Instances:
return not_reloaded or "Successfully reloaded instances"
def reload_instance(
self, _id: Optional[int] = None, instance: Optional[Instance] = None
) -> str:
def reload_instance(self, _id: Optional[int] = None, instance: Optional[Instance] = None) -> str:
if instance is None:
instance = self.__instance_from_id(_id)

View File

@ -30,9 +30,7 @@ class ReverseProxied(ProxyFix):
if x_for:
environ["REMOTE_ADDR"] = x_for
x_proto = self._get_real_value(
self.x_proto, environ_get("HTTP_X_FORWARDED_PROTO")
)
x_proto = self._get_real_value(self.x_proto, environ_get("HTTP_X_FORWARDED_PROTO"))
if x_proto:
environ["wsgi.url_scheme"] = x_proto
@ -53,16 +51,12 @@ class ReverseProxied(ProxyFix):
environ["HTTP_HOST"] = f"{host}:{x_port}"
environ["SERVER_PORT"] = x_port
x_prefix = self._get_real_value(
self.x_prefix, environ_get("HTTP_X_FORWARDED_PREFIX")
)
x_prefix = self._get_real_value(self.x_prefix, environ_get("HTTP_X_FORWARDED_PREFIX"))
if x_prefix:
environ["SCRIPT_NAME"] = x_prefix
environ["PATH_INFO"] = environ["PATH_INFO"][len(environ["SCRIPT_NAME"]) :]
environ[
"ABSOLUTE_URI"
] = f"{environ['wsgi.url_scheme']}://{environ['HTTP_HOST']}{environ['SCRIPT_NAME']}/"
environ["PATH_INFO"] = environ["PATH_INFO"][len(environ["SCRIPT_NAME"]) :] # noqa: E203
environ["ABSOLUTE_URI"] = f"{environ['wsgi.url_scheme']}://{environ['HTTP_HOST']}{environ['SCRIPT_NAME']}/"
environ["SESSION_COOKIE_DOMAIN"] = environ["HTTP_HOST"]
return self.app(environ, start_response)

View File

@ -78,18 +78,9 @@ def path_to_dict(
}
if conf["service_id"]:
d["children"][config_types.index(type_lower)]["children"][
[
x["name"]
for x in d["children"][config_types.index(type_lower)][
"children"
]
].index(conf["service_id"])
]["children"].append(file_info)
d["children"][config_types.index(type_lower)]["children"][[x["name"] for x in d["children"][config_types.index(type_lower)]["children"]].index(conf["service_id"])]["children"].append(file_info)
else:
d["children"][config_types.index(type_lower)]["children"].append(
file_info
)
d["children"][config_types.index(type_lower)]["children"].append(file_info)
else:
d = {
"name": "cache",
@ -130,9 +121,7 @@ def path_to_dict(
}
if conf["service_id"]:
d["children"][
[x["name"] for x in d["children"]].index(conf["service_id"])
]["children"].append(file_info)
d["children"][[x["name"] for x in d["children"]].index(conf["service_id"])]["children"].append(file_info)
else:
d["children"].append(file_info)

View File

@ -43,9 +43,7 @@ class AutoconfTest(Test):
mkdir("/tmp/www")
copy("./misc/integrations/autoconf.yml", "/tmp/autoconf/docker-compose.yml")
compose = "/tmp/autoconf/docker-compose.yml"
Test.replace_in_file(
compose, r"bunkerity/bunkerweb:.*$", "local/bunkerweb-tests:latest"
)
Test.replace_in_file(compose, r"bunkerity/bunkerweb:.*$", "local/bunkerweb-tests:latest")
Test.replace_in_file(
compose,
r"bunkerity/bunkerweb-autoconf:.*$",
@ -60,16 +58,9 @@ class AutoconfTest(Test):
with open(compose, "r") as f:
data = safe_load(f.read())
data["services"]["bunkerweb"]["volumes"] = ["/tmp/www:/var/www/html"]
if (
not "AUTO_LETS_ENCRYPT=yes"
in data["services"]["bunkerweb"]["environment"]
):
data["services"]["bunkerweb"]["environment"].append(
"AUTO_LETS_ENCRYPT=yes"
)
data["services"]["bunkerweb"]["environment"].append(
"USE_LETS_ENCRYPT_STAGING=yes"
)
if "AUTO_LETS_ENCRYPT=yes" not in data["services"]["bunkerweb"]["environment"]:
data["services"]["bunkerweb"]["environment"].append("AUTO_LETS_ENCRYPT=yes")
data["services"]["bunkerweb"]["environment"].append("USE_LETS_ENCRYPT_STAGING=yes")
with open(compose, "w") as f:
f.write(dump(data))
proc = run(
@ -134,9 +125,7 @@ class AutoconfTest(Test):
compose = f"/tmp/tests/{self._name}/autoconf.yml"
example_data = f"/tmp/tests/{self._name}/bw-data"
example_www = f"/tmp/tests/{self._name}/www"
Test.replace_in_file(
compose, r"bunkerity/bunkerweb:.*$", "local/bunkerweb-tests:latest"
)
Test.replace_in_file(compose, r"bunkerity/bunkerweb:.*$", "local/bunkerweb-tests:latest")
Test.replace_in_file(
compose,
r"bunkerity/bunkerweb-scheduler:.*$",
@ -217,6 +206,6 @@ class AutoconfTest(Test):
def _debug_fail(self):
autoconf = "/tmp/autoconf"
proc = run("docker-compose logs", shell=True, cwd=autoconf)
run("docker-compose logs", shell=True, cwd=autoconf)
test = f"/tmp/tests/{self._name}"
proc = run("docker-compose -f autoconf.yml logs", shell=True, cwd=test)
run("docker-compose -f autoconf.yml logs", shell=True, cwd=test)

View File

@ -19,15 +19,9 @@ class DockerTest(Test):
self._domains = {
r"www\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1"),
r"auth\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1"),
r"app1\.example\.com": Test.random_string(6)
+ "."
+ getenv("TEST_DOMAIN1_1"),
r"app2\.example\.com": Test.random_string(6)
+ "."
+ getenv("TEST_DOMAIN1_2"),
r"app3\.example\.com": Test.random_string(6)
+ "."
+ getenv("TEST_DOMAIN1_3"),
r"app1\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1_1"),
r"app2\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1_2"),
r"app3\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1_3"),
}
self._check_domains()
@ -53,9 +47,7 @@ class DockerTest(Test):
test = "/tmp/tests/" + self._name
compose = "/tmp/tests/" + self._name + "/docker-compose.yml"
example_data = "/tmp/tests/" + self._name + "/bw-data"
Test.replace_in_file(
compose, r"bunkerity/bunkerweb:.*$", "local/bunkerweb-tests:latest"
)
Test.replace_in_file(compose, r"bunkerity/bunkerweb:.*$", "local/bunkerweb-tests:latest")
Test.replace_in_file(
compose,
r"bunkerity/bunkerweb-scheduler:.*$",
@ -69,9 +61,7 @@ class DockerTest(Test):
r"AUTO_LETS_ENCRYPT=yes",
"AUTO_LETS_ENCRYPT=yes\n - USE_LETS_ENCRYPT_STAGING=yes",
)
Test.replace_in_file(
compose, r"DISABLE_DEFAULT_SERVER=yes", "DISABLE_DEFAULT_SERVER=no"
)
Test.replace_in_file(compose, r"DISABLE_DEFAULT_SERVER=yes", "DISABLE_DEFAULT_SERVER=no")
for ex_domain, test_domain in self._domains.items():
Test.replace_in_files(test, ex_domain, test_domain)
Test.rename(test, ex_domain, test_domain)
@ -88,9 +78,7 @@ class DockerTest(Test):
)
if proc.returncode != 0:
raise (Exception("cp bw-data failed"))
proc = run(
"docker-compose pull --ignore-pull-failures", shell=True, cwd=test
)
proc = run("docker-compose pull --ignore-pull-failures", shell=True, cwd=test)
if proc.returncode != 0:
raise (Exception("docker-compose pull failed"))
proc = run("docker-compose up -d", shell=True, cwd=test)
@ -124,4 +112,4 @@ class DockerTest(Test):
def _debug_fail(self):
test = "/tmp/tests/" + self._name
proc = run("docker-compose logs", shell=True, cwd=test)
run("docker-compose logs", shell=True, cwd=test)

View File

@ -1,7 +1,7 @@
from Test import Test
from os.path import isdir, join, isfile
from os import chown, walk, getenv, listdir, mkdir
from shutil import copytree, rmtree, copy
from os.path import isfile
from os import getenv, mkdir
from shutil import rmtree, copy
from traceback import format_exc
from subprocess import run
from time import sleep
@ -42,9 +42,7 @@ class KubernetesTest(Test):
for yaml in data:
if yaml["metadata"]["name"] == "bunkerweb":
for k, v in append_env.items():
yaml["spec"]["template"]["spec"]["containers"][0]["env"].append(
{"name": k, "value": v}
)
yaml["spec"]["template"]["spec"]["containers"][0]["env"].append({"name": k, "value": v})
for ele in yaml["spec"]["template"]["spec"]["containers"][0]["env"]:
if ele["name"] in replace_env:
ele["value"] = replace_env[ele["name"]]
@ -53,9 +51,7 @@ class KubernetesTest(Test):
"bunkerweb-controller",
"bunkerweb-scheduler",
]:
yaml["spec"]["template"]["spec"]["imagePullSecrets"] = [
{"name": "secret-registry"}
]
yaml["spec"]["template"]["spec"]["imagePullSecrets"] = [{"name": "secret-registry"}]
yamls.append(yaml)
with open(deploy, "w") as f:
f.write(dump_all(yamls))
@ -74,9 +70,7 @@ class KubernetesTest(Test):
r"bunkerity/bunkerweb-scheduler:.*$",
f"ghcr.io/bunkerity/scheduler-tests:{getenv('IMAGE_TAG')}",
)
proc = run(
"kubectl apply -f bunkerweb.yml", cwd="/tmp/kubernetes", shell=True
)
proc = run("kubectl apply -f bunkerweb.yml", cwd="/tmp/kubernetes", shell=True)
if proc.returncode != 0:
raise (Exception("kubectl apply bunkerweb failed (k8s stack)"))
healthy = False
@ -154,9 +148,7 @@ class KubernetesTest(Test):
try:
if not Test.end():
return False
proc = run(
"kubectl delete -f bunkerweb.yml", cwd="/tmp/kubernetes", shell=True
)
proc = run("kubectl delete -f bunkerweb.yml", cwd="/tmp/kubernetes", shell=True)
if proc.returncode != 0:
ret = False
rmtree("/tmp/kubernetes")
@ -173,8 +165,6 @@ class KubernetesTest(Test):
try:
super()._setup_test()
test = f"/tmp/tests/{self._name}"
deploy = f"/tmp/tests/{self._name}/kubernetes.yml"
example_data = f"./examples/{self._name}/bw-data"
for ex_domain, test_domain in self._domains.items():
Test.replace_in_files(test, ex_domain, test_domain)
Test.rename(test, ex_domain, test_domain)

View File

@ -17,7 +17,7 @@ class LinuxTest(Test):
r"app2\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1_2')}",
r"app3\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1_3')}",
}
if not distro in ("ubuntu", "debian", "fedora", "centos", "rhel"):
if distro not in ("ubuntu", "debian", "fedora", "centos", "rhel"):
raise Exception(f"unknown distro {distro}")
self.__distro = distro
@ -33,7 +33,7 @@ class LinuxTest(Test):
if distro in ("ubuntu", "debian"):
cmd = "echo force-bad-version >> /etc/dpkg/dpkg.cfg ; apt install -y /opt/\\$(ls /opt | grep deb)"
elif distro in ("centos", "fedora", "rhel"):
cmd = "dnf install -y /opt/\$(ls /opt | grep rpm)"
cmd = "dnf install -y /opt/\\$(ls /opt | grep rpm)"
proc = LinuxTest.docker_exec(distro, cmd)
if proc.returncode != 0:
raise Exception("docker exec apt install failed (linux stack)")
@ -51,23 +51,17 @@ class LinuxTest(Test):
"./tests/www-deb.conf",
"/etc/php/8.1/fpm/pool.d/www.conf",
)
LinuxTest.docker_exec(
distro, "systemctl stop php8.1-fpm ; systemctl start php8.1-fpm"
)
LinuxTest.docker_exec(distro, "systemctl stop php8.1-fpm ; systemctl start php8.1-fpm")
elif distro == "debian":
LinuxTest.docker_cp(
distro,
"./tests/www-deb.conf",
"/etc/php/7.4/fpm/pool.d/www.conf",
)
LinuxTest.docker_exec(
distro, "systemctl stop php7.4-fpm ; systemctl start php7.4-fpm"
)
LinuxTest.docker_exec(distro, "systemctl stop php7.4-fpm ; systemctl start php7.4-fpm")
elif distro in ("centos", "fedora", "rhel"):
LinuxTest.docker_exec(distro, "dnf install -y php-fpm unzip")
LinuxTest.docker_cp(
distro, "./tests/www-rpm.conf", "/etc/php-fpm.d/www.conf"
)
LinuxTest.docker_cp(distro, "./tests/www-rpm.conf", "/etc/php-fpm.d/www.conf")
LinuxTest.docker_exec(
distro,
"mkdir /run/php ; chmod 777 /run/php ; systemctl stop php-fpm ; systemctl start php-fpm",
@ -92,9 +86,7 @@ class LinuxTest(Test):
if proc.returncode != 0:
ret = False
except:
log(
"LINUX", "", f"exception while running LinuxTest.end()\n{format_exc()}"
)
log("LINUX", "", f"exception while running LinuxTest.end()\n{format_exc()}")
return False
return ret
@ -117,9 +109,7 @@ class LinuxTest(Test):
)
if proc.returncode != 0:
raise Exception("docker exec setup failed (test)")
proc = self.docker_exec(
self.__distro, f"cp /opt/{self._name}/variables.env /etc/bunkerweb/"
)
proc = self.docker_exec(self.__distro, f"cp /opt/{self._name}/variables.env /etc/bunkerweb/")
if proc.returncode != 0:
raise Exception("docker exec cp variables.env failed (test)")
proc = self.docker_exec(
@ -128,9 +118,7 @@ class LinuxTest(Test):
)
if proc.returncode != 0:
raise (Exception("docker exec append variables.env failed (test)"))
proc = self.docker_exec(
self.__distro, "systemctl stop bunkerweb ; systemctl start bunkerweb"
)
proc = self.docker_exec(self.__distro, "systemctl stop bunkerweb ; systemctl start bunkerweb")
if proc.returncode != 0:
raise Exception("docker exec systemctl restart failed (linux stack)")
except:

View File

@ -35,16 +35,9 @@ class SwarmTest(Test):
compose = "/tmp/swarm/stack.yml"
with open(compose, "r") as f:
data = safe_load(f.read())
if (
not "AUTO_LETS_ENCRYPT=yes"
in data["services"]["bunkerweb"]["environment"]
):
data["services"]["bunkerweb"]["environment"].append(
"AUTO_LETS_ENCRYPT=yes"
)
data["services"]["bunkerweb"]["environment"].append(
"USE_LETS_ENCRYPT_STAGING=yes"
)
if "AUTO_LETS_ENCRYPT=yes" not in data["services"]["bunkerweb"]["environment"]:
data["services"]["bunkerweb"]["environment"].append("AUTO_LETS_ENCRYPT=yes")
data["services"]["bunkerweb"]["environment"].append("USE_LETS_ENCRYPT_STAGING=yes")
del data["services"]["bunkerweb"]["deploy"]["placement"]
with open(compose, "w") as f:
f.write(dump(data))
@ -87,7 +80,12 @@ class SwarmTest(Test):
i += 1
if not healthy:
proc = run(
"docker service logs bunkerweb_bunkerweb ; docker service logs bunkerweb_bw-autoconf ; docker service logs bunkerweb_bw-scheduler ; docker service logs bunkerweb_bw-db ; docker service logs bunkerweb_bw-redis ; docker stack ps --no-trunc bunkerweb",
"docker service logs bunkerweb_bunkerweb ;"
+ " docker service logs bunkerweb_bw-autoconf ;"
+ " docker service logs bunkerweb_bw-scheduler ;"
+ " docker service logs bunkerweb_bw-db ;"
+ " docker service logs bunkerweb_bw-redis ;"
+ " docker stack ps --no-trunc bunkerweb",
cwd="/tmp/swarm",
shell=True,
capture_output=True,
@ -116,9 +114,7 @@ class SwarmTest(Test):
ret = False
rmtree("/tmp/swarm")
except:
log(
"SWARM", "", f"exception while running SwarmTest.end()\n{format_exc()}"
)
log("SWARM", "", f"exception while running SwarmTest.end()\n{format_exc()}")
return False
return ret
@ -163,7 +159,7 @@ class SwarmTest(Test):
)
if proc2.returncode != 0:
raise (Exception("swarm stack is not healthy (cmd2 failed)"))
if not "Running" in proc2.stdout.decode():
if "Running" not in proc2.stdout.decode():
all_healthy = False
break
if all_healthy:
@ -188,9 +184,7 @@ class SwarmTest(Test):
proc = run(f'docker stack rm "{self._name}"', shell=True)
if proc.returncode != 0:
raise (Exception("docker stack rm failed"))
proc = run(
'docker config ls --format "{{ .ID }}"', shell=True, capture_output=True
)
proc = run('docker config ls --format "{{ .ID }}"', shell=True, capture_output=True)
if proc.returncode != 0:
raise (Exception("docker config ls failed"))
for config in proc.stdout.decode().splitlines():

View File

@ -1,13 +1,11 @@
from abc import ABC, abstractmethod
from sys import stderr
from abc import ABC
from time import time, sleep
from requests import get
from traceback import format_exc
from shutil import copytree
from os.path import isdir, join
from os import mkdir, makedirs, walk, chmod, rename
from os import mkdir, walk, rename
from re import sub, search, MULTILINE
from datetime import datetime
from subprocess import run
from logger import log
from string import ascii_lowercase, digits
@ -22,6 +20,7 @@ class Test(ABC):
self.__tests = tests
self._no_copy_container = no_copy_container
self.__delay = delay
self._domains = {}
log(
"TEST",
"",
@ -30,7 +29,7 @@ class Test(ABC):
# Class method
# called once before running all the different tests for a given integration
def init():
def init(self):
try:
if not isdir("/tmp/bw-data"):
mkdir("/tmp/bw-data")
@ -48,7 +47,7 @@ class Test(ABC):
# Class method
# called once all tests ended
def end():
def end(self):
return True
# helper to check domains
@ -146,6 +145,7 @@ class Test(ABC):
def _debug_fail(self):
pass
@staticmethod
def replace_in_file(path, old, new):
try:
with open(path, "r") as f:
@ -156,11 +156,13 @@ class Test(ABC):
except:
log("TEST", "⚠️", f"can't replace file {path} : {format_exc()}")
@staticmethod
def replace_in_files(path, old, new):
for root, dirs, files in walk(path):
for name in files:
Test.replace_in_file(join(root, name), old, new)
@staticmethod
def rename(path, old, new):
for root, dirs, files in walk(path):
for name in dirs + files:
@ -169,6 +171,7 @@ class Test(ABC):
if full_path != new_path:
rename(full_path, new_path)
@staticmethod
def random_string(length):
charset = ascii_lowercase + digits
return "".join(choice(charset) for i in range(length))

View File

@ -1,7 +1,6 @@
import subprocess
import sys
import tempfile
import os
import time
import pathlib
@ -74,9 +73,7 @@ if distro == "ubuntu":
with tempfile.NamedTemporaryFile(mode="w") as f:
f.write(bash_script)
f.flush()
subprocess.run(
["docker", "cp", f.name, "systemd-ubuntu:/data/install_nginx.sh"]
)
subprocess.run(["docker", "cp", f.name, "systemd-ubuntu:/data/install_nginx.sh"])
result = subprocess.run(
[
"docker",
@ -202,9 +199,7 @@ if distro == "ubuntu":
],
capture_output=True,
)
print(
"❌ bunkerweb.service is not running. Logs:", bunkerweb_logs.stdout.decode()
)
print("❌ bunkerweb.service is not running. Logs:", bunkerweb_logs.stdout.decode())
bunkerweb_ui_state = subprocess.run(
[
@ -329,12 +324,7 @@ if distro == "ubuntu":
print("❌ /usr/bin/bwcli found.")
# Checking Removing test
try:
if (
pathlib.Path("/usr/share/bunkerweb").is_dir()
or pathlib.Path("/var/tmp/bunkerweb").is_dir()
or pathlib.Path("/var/cache/bunkerweb").is_dir()
or pathlib.Path("/usr/bin/bwcli").is_file()
):
if pathlib.Path("/usr/share/bunkerweb").is_dir() or pathlib.Path("/var/tmp/bunkerweb").is_dir() or pathlib.Path("/var/cache/bunkerweb").is_dir() or pathlib.Path("/usr/bin/bwcli").is_file():
test_results["Removing test"] = "KO"
else:
test_results["Removing test"] = "OK"
@ -390,10 +380,7 @@ if distro == "ubuntu":
print("❌ /etc/bunkerweb found.")
# Checking Purging test
try:
if (
pathlib.Path("/var/lib/bunkerweb").is_dir()
or pathlib.Path("/etc/bunkerweb").is_dir()
):
if pathlib.Path("/var/lib/bunkerweb").is_dir() or pathlib.Path("/etc/bunkerweb").is_dir():
test_results["Purging test"] = "KO"
else:
test_results["Purging test"] = "OK"
@ -444,9 +431,7 @@ if distro == "ubuntu":
with tempfile.NamedTemporaryFile(mode="w") as f:
f.write(bash_script)
f.flush()
subprocess.run(
["docker", "cp", f.name, "systemd-ubuntu:/data/install_nginx.sh"]
)
subprocess.run(["docker", "cp", f.name, "systemd-ubuntu:/data/install_nginx.sh"])
result = subprocess.run(
[
"docker",
@ -613,9 +598,7 @@ elif distro == "debian":
f.write(bash_script)
f.flush()
subprocess.run(["docker", "cp", f.name, "systemd-debian:/tmp/install_nginx.sh"])
result = subprocess.run(
["docker", "exec", "-it", "systemd-debian", "bash", "/tmp/install_nginx.sh"]
)
result = subprocess.run(["docker", "exec", "-it", "systemd-debian", "bash", "/tmp/install_nginx.sh"])
if result.returncode != 0:
bunkerweb_logs = subprocess.run(
[
@ -731,9 +714,7 @@ elif distro == "debian":
],
capture_output=True,
)
print(
"❌ bunkerweb.service is not running. Logs:", bunkerweb_logs.stdout.decode()
)
print("❌ bunkerweb.service is not running. Logs:", bunkerweb_logs.stdout.decode())
bunkerweb_ui_state = subprocess.run(
[
@ -858,12 +839,7 @@ elif distro == "debian":
print("❌ /usr/bin/bwcli found.")
# Checking Removing test
try:
if (
pathlib.Path("/usr/share/bunkerweb").is_dir()
or pathlib.Path("/var/tmp/bunkerweb").is_dir()
or pathlib.Path("/var/cache/bunkerweb").is_dir()
or pathlib.Path("/usr/bin/bwcli").is_file()
):
if pathlib.Path("/usr/share/bunkerweb").is_dir() or pathlib.Path("/var/tmp/bunkerweb").is_dir() or pathlib.Path("/var/cache/bunkerweb").is_dir() or pathlib.Path("/usr/bin/bwcli").is_file():
test_results["Removing test"] = "KO"
else:
test_results["Removing test"] = "OK"
@ -919,10 +895,7 @@ elif distro == "debian":
print("❌ /etc/bunkerweb found.")
# Checking Purging test
try:
if (
pathlib.Path("/var/lib/bunkerweb").is_dir()
or pathlib.Path("/etc/bunkerweb").is_dir()
):
if pathlib.Path("/var/lib/bunkerweb").is_dir() or pathlib.Path("/etc/bunkerweb").is_dir():
test_results["Purging test"] = "KO"
else:
test_results["Purging test"] = "OK"
@ -973,9 +946,7 @@ elif distro == "debian":
with tempfile.NamedTemporaryFile(mode="w") as f:
f.write(bash_script)
f.flush()
subprocess.run(
["docker", "cp", f.name, "systemd-debian:/data/install_nginx.sh"]
)
subprocess.run(["docker", "cp", f.name, "systemd-debian:/data/install_nginx.sh"])
result = subprocess.run(
[
"docker",
@ -1138,9 +1109,7 @@ elif distro == "fedora":
with tempfile.NamedTemporaryFile(mode="w") as f:
f.write(bash_script)
f.flush()
subprocess.run(
["docker", "cp", f.name, "systemd-fedora:/data/install_nginx.sh"]
)
subprocess.run(["docker", "cp", f.name, "systemd-fedora:/data/install_nginx.sh"])
result = subprocess.run(
[
"docker",
@ -1265,9 +1234,7 @@ elif distro == "fedora":
],
capture_output=True,
)
print(
"❌ bunkerweb.service is not running. Logs:", bunkerweb_logs.stdout.decode()
)
print("❌ bunkerweb.service is not running. Logs:", bunkerweb_logs.stdout.decode())
bunkerweb_ui_state = subprocess.run(
[
@ -1546,9 +1513,7 @@ elif distro == "fedora":
subprocess.run(["docker", "start", "systemd-fedora"])
def check_container_status():
result = subprocess.run(
["docker", "inspect", "systemd-fedora"], stdout=subprocess.PIPE
)
result = subprocess.run(["docker", "inspect", "systemd-fedora"], stdout=subprocess.PIPE)
return "running" in str(result.stdout)
while True:
@ -1805,9 +1770,7 @@ elif distro == "rhel":
],
capture_output=True,
)
print(
"❌ bunkerweb.service is not running. Logs:", bunkerweb_logs.stdout.decode()
)
print("❌ bunkerweb.service is not running. Logs:", bunkerweb_logs.stdout.decode())
bunkerweb_ui_state = subprocess.run(
[
@ -2151,9 +2114,7 @@ elif distro == "centos":
]
)
# Building local systemd image
subprocess.run(
["docker", "build", "-t", "centos", "-f", "tests/Dockerfile-centos", "."]
)
subprocess.run(["docker", "build", "-t", "centos", "-f", "tests/Dockerfile-centos", "."])
subprocess.run(
[
"docker",
@ -2201,9 +2162,7 @@ elif distro == "centos":
"systemd-centos:/etc/yum.repos.d/nginx.repo",
]
)
subprocess.run(
["docker", "cp", f.name, "systemd-centos:/data/install_nginx.sh"]
)
subprocess.run(["docker", "cp", f.name, "systemd-centos:/data/install_nginx.sh"])
result = subprocess.run(
[
"docker",
@ -2329,9 +2288,7 @@ elif distro == "centos":
],
capture_output=True,
)
print(
"❌ bunkerweb.service is not running. Logs:", bunkerweb_logs.stdout.decode()
)
print("❌ bunkerweb.service is not running. Logs:", bunkerweb_logs.stdout.decode())
bunkerweb_ui_state = subprocess.run(
[

View File

@ -14,9 +14,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -29,9 +27,7 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
firefox_options = Options()
@ -55,9 +51,7 @@ try:
exit(1)
elif test_type == "captcha":
if not driver.current_url.endswith(antibot_uri):
print(
"❌ Antibot is disabled or the endpoint is wrong ...", flush=True
)
print("❌ Antibot is disabled or the endpoint is wrong ...", flush=True)
exit(1)
try:
driver.find_element(By.XPATH, "//input[@name='captcha']")

View File

@ -14,9 +14,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -29,9 +27,7 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
firefox_options = Options()
@ -78,10 +74,8 @@ try:
"✅ Auth-basic is enabled and working in the expected location ...",
)
else:
print(f" Trying to access http://www.example.com ...", flush=True)
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
print(" Trying to access http://www.example.com ...", flush=True)
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code != 401:
print("❌ The page is accessible without auth-basic ...", flush=True)
@ -93,9 +87,7 @@ try:
f" Trying to access http://{auth_basic_username}:{auth_basic_password}@www.example.com ...",
flush=True,
)
driver.get(
f"http://{auth_basic_username}:{auth_basic_password}@www.example.com"
)
driver.get(f"http://{auth_basic_username}:{auth_basic_password}@www.example.com")
try:
driver.find_element(By.XPATH, "//img[@alt='NGINX Logo']")

View File

@ -13,9 +13,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -28,15 +26,11 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
use_bad_behavior = getenv("USE_BAD_BEHAVIOR", "yes") == "yes"
bad_behavior_status_codes = getenv(
"BAD_BEHAVIOR_STATUS_CODES", "400 401 403 404 405 429 444"
)
bad_behavior_status_codes = getenv("BAD_BEHAVIOR_STATUS_CODES", "400 401 403 404 405 429 444")
bad_behavior_ban_time = getenv("BAD_BEHAVIOR_BAN_TIME", "86400")
bad_behavior_threshold = getenv("BAD_BEHAVIOR_THRESHOLD", "10")
bad_behavior_count_time = getenv("BAD_BEHAVIOR_COUNT_TIME", "60")
@ -55,10 +49,7 @@ try:
sleep(3)
status_code = get(
f"http://www.example.com",
headers={"Host": "www.example.com"},
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code == 403:
if not use_bad_behavior:
@ -74,10 +65,7 @@ try:
)
sleep(65)
status_code = get(
f"http://www.example.com",
headers={"Host": "www.example.com"},
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code == 403:
print("❌ Bad Behavior's ban time didn't changed ...", flush=True)
@ -103,9 +91,7 @@ try:
docker_host = getenv("DOCKER_HOST", "unix:///var/run/docker.sock")
docker_client = DockerClient(base_url=docker_host)
bw_instances = docker_client.containers.list(
filters={"label": "bunkerweb.INSTANCE"}
)
bw_instances = docker_client.containers.list(filters={"label": "bunkerweb.INSTANCE"})
if not bw_instances:
print("❌ BunkerWeb instance not found ...", flush=True)
@ -130,11 +116,7 @@ try:
if not found:
print("❌ Bad Behavior's count time didn't changed ...", flush=True)
exit(1)
elif (
use_bad_behavior
and bad_behavior_status_codes == "400 401 403 404 405 429 444"
and bad_behavior_threshold == "10"
):
elif use_bad_behavior and bad_behavior_status_codes == "400 401 403 404 405 429 444" and bad_behavior_threshold == "10":
print("❌ Bad Behavior is disabled, it shouldn't be ...", flush=True)
exit(1)

View File

@ -19,11 +19,7 @@ with get(mmdb_url, stream=True) as resp:
file_content.write(chunk)
file_content.seek(0)
output_path = (
Path(sep, "output", "ip_asn.txt")
if getenv("TEST_TYPE", "docker") == "docker"
else Path(".", "ip_asn.txt")
)
output_path = Path(sep, "output", "ip_asn.txt") if getenv("TEST_TYPE", "docker") == "docker" else Path(".", "ip_asn.txt")
with open_database(GzipFile(fileobj=file_content, mode="rb"), mode=MODE_FD) as reader: # type: ignore
dbip_asn = reader.get("1.0.0.3")

View File

@ -10,9 +10,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -25,9 +23,7 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
GLOBAL = getenv("GLOBAL", "no") == "yes"
@ -63,19 +59,14 @@ try:
status_code = get(
"http://www.example.com/admin",
headers={"Host": "www.example.com", "User-Agent": "BunkerBot"}
| ({"X-Forwarded-For": "1.0.0.3"} if GLOBAL else {}),
headers={"Host": "www.example.com", "User-Agent": "BunkerBot"} | ({"X-Forwarded-For": "1.0.0.3"} if GLOBAL else {}),
).status_code
if status_code == 403:
if not use_blacklist:
print(
"❌ The request was rejected, but the blacklist is disabled, exiting ..."
)
print("❌ The request was rejected, but the blacklist is disabled, exiting ...")
exit(1)
elif blacklist_rdns_global and (
blacklist_rdns != "" or blacklist_rdns_urls != ""
):
elif blacklist_rdns_global and (blacklist_rdns != "" or blacklist_rdns_urls != ""):
print(
"❌ Blacklist's RDNS global didn't work as expected, exiting ...",
)
@ -128,9 +119,7 @@ try:
flush=True,
)
exit(1)
elif blacklist_ip != "" and not any(
[blacklist_ignore_ip, blacklist_ignore_ip_urls, not use_blacklist]
):
elif blacklist_ip != "" and not any([blacklist_ignore_ip, blacklist_ignore_ip_urls, not use_blacklist]):
print("❌ Blacklist's IP didn't work as expected, exiting ...", flush=True)
exit(1)
elif blacklist_ip_urls != "":
@ -146,24 +135,16 @@ try:
print("❌ Blacklist's RDNS didn't work as expected, exiting ...", flush=True)
exit(1)
elif blacklist_rdns_urls != "" and blacklist_rdns_global:
print(
"❌ Blacklist's RDNS urls didn't work as expected, exiting ...", flush=True
)
print("❌ Blacklist's RDNS urls didn't work as expected, exiting ...", flush=True)
exit(1)
elif blacklist_asn != "" and not any(
[blacklist_ignore_asn, blacklist_ignore_asn_urls]
):
elif blacklist_asn != "" and not any([blacklist_ignore_asn, blacklist_ignore_asn_urls]):
print("❌ Blacklist's ASN didn't work as expected, exiting ...", flush=True)
exit(1)
elif blacklist_asn_urls != "":
print("❌ Blacklist's ASN urls didn't work as expected, exiting ...", flush=True)
exit(1)
elif blacklist_user_agent != "" and not any(
[blacklist_ignore_user_agent, blacklist_ignore_user_agent_urls]
):
print(
"❌ Blacklist's User Agent didn't work as expected, exiting ...", flush=True
)
elif blacklist_user_agent != "" and not any([blacklist_ignore_user_agent, blacklist_ignore_user_agent_urls]):
print("❌ Blacklist's User Agent didn't work as expected, exiting ...", flush=True)
exit(1)
elif blacklist_user_agent_urls != "":
print(
@ -171,9 +152,7 @@ try:
flush=True,
)
exit(1)
elif blacklist_uri != "" and not any(
[blacklist_ignore_uri, blacklist_ignore_uri_urls]
):
elif blacklist_uri != "" and not any([blacklist_ignore_uri, blacklist_ignore_uri_urls]):
print("❌ Blacklist's URI didn't work as expected, exiting ...", flush=True)
exit(1)
elif blacklist_uri_urls != "":

View File

@ -10,9 +10,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -25,9 +23,7 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
use_brotli = getenv("USE_BROTLI", "no") == "yes"
@ -44,14 +40,10 @@ try:
response.raise_for_status()
if not use_brotli and response.headers.get("Content-Encoding", "").lower() == "br":
print(
f"❌ Content-Encoding header is present even if Brotli is deactivated, exiting ...\nheaders: {response.headers}"
)
print(f"❌ Content-Encoding header is present even if Brotli is deactivated, exiting ...\nheaders: {response.headers}")
exit(1)
elif use_brotli and response.headers.get("Content-Encoding", "").lower() != "br":
print(
f"❌ Content-Encoding header is not present or with the wrong value even if Brotli is activated, exiting ...\nheaders: {response.headers}"
)
print(f"❌ Content-Encoding header is not present or with the wrong value even if Brotli is activated, exiting ...\nheaders: {response.headers}")
exit(1)
print("✅ Brotli is working as expected ...", flush=True)

View File

@ -24,9 +24,7 @@ async def register(_: Request):
async def report(_: Request):
global report_num
report_num += 1
return JSONResponse(
status_code=200, content={"result": "ok", "data": "Report acknowledged."}
)
return JSONResponse(status_code=200, content={"result": "ok", "data": "Report acknowledged."})
@app.get("/db")
@ -51,9 +49,7 @@ async def reset(_: Request):
global instance_id, report_num
instance_id = None
report_num = 0
return JSONResponse(
status_code=200, content={"result": "ok", "data": "Reset done."}
)
return JSONResponse(status_code=200, content={"result": "ok", "data": "Reset done."})
if __name__ == "__main__":

View File

@ -10,9 +10,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -25,9 +23,7 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
use_bunkernet = getenv("USE_BUNKERNET", "yes") == "yes"
@ -49,14 +45,9 @@ try:
print("✅ BunkerNet plugin is disabled and not registered ...", flush=True)
exit(0)
print(
" Sending a request to http://www.example.com/?id=/etc/passwd ...", flush=True
)
print(" Sending a request to http://www.example.com/?id=/etc/passwd ...", flush=True)
status_code = get(
f"http://www.example.com/?id=/etc/passwd",
headers={"Host": "www.example.com", "X-Forwarded-For": "1.0.0.3"},
).status_code
status_code = get("http://www.example.com/?id=/etc/passwd", headers={"Host": "www.example.com", "X-Forwarded-For": "1.0.0.3"}).status_code
print(f" Status code: {status_code}", flush=True)

View File

@ -15,9 +15,7 @@ try:
_, err = result.communicate()
if result.returncode != 0:
print(
f'❌ Command "ban" failed, exiting ...\noutput: {err.decode()}\nexit_code: {result.returncode}'
)
print(f'❌ Command "ban" failed, exiting ...\noutput: {err.decode()}\nexit_code: {result.returncode}')
exit(1)
print(err.decode(), flush=True)
@ -31,20 +29,14 @@ try:
_, err = result.communicate()
if result.returncode != 0:
print(
f'❌ Command "bans" failed, exiting ...\noutput: {err.decode()}\nexit_code: {result.returncode}'
)
print(f'❌ Command "bans" failed, exiting ...\noutput: {err.decode()}\nexit_code: {result.returncode}')
exit(1)
if b"- 127.0.0.1" not in err:
print(
f'❌ IP 127.0.0.1 not found in the output of "bans", exiting ...\noutput: {err.decode()}'
)
print(f'❌ IP 127.0.0.1 not found in the output of "bans", exiting ...\noutput: {err.decode()}')
exit(1)
elif b"List of bans for redis:" not in err:
print(
f'❌ Redis ban list not found in the output of "bans", exiting ...\noutput: {err.decode()}'
)
print(f'❌ Redis ban list not found in the output of "bans", exiting ...\noutput: {err.decode()}')
exit(1)
elif b"1 hour" not in err and b"59 minutes" not in err:
print(f"❌ Ban duration isn't 1 hour, exiting ...\noutput: {err.decode()}")
@ -59,9 +51,7 @@ try:
_, err = result.communicate()
if result.returncode != 0:
print(
f'❌ Command "unban" failed, exiting ...\noutput: {err.decode()}\nexit_code: {result.returncode}'
)
print(f'❌ Command "unban" failed, exiting ...\noutput: {err.decode()}\nexit_code: {result.returncode}')
exit(1)
print(err.decode(), flush=True)
@ -75,9 +65,7 @@ try:
_, err = result.communicate()
if result.returncode != 0:
print(
f'❌ Command "bans" failed, exiting ...\noutput: {err.decode()}\nexit_code: {result.returncode}'
)
print(f'❌ Command "bans" failed, exiting ...\noutput: {err.decode()}\nexit_code: {result.returncode}')
exit(1)
found = 0

View File

@ -7,9 +7,7 @@ try:
docker_host = getenv("DOCKER_HOST", "unix:///var/run/docker.sock")
docker_client = DockerClient(base_url=docker_host)
bw_instances = docker_client.containers.list(
filters={"label": "bunkerweb.INSTANCE"}
)
bw_instances = docker_client.containers.list(filters={"label": "bunkerweb.INSTANCE"})
if not bw_instances:
print("❌ BunkerWeb instance not found ...", flush=True)
@ -25,9 +23,7 @@ try:
result = bw_instance.exec_run("bwcli ban 127.0.0.1 -exp 3600")
if result.exit_code != 0:
print(
f'❌ Command "ban" failed, exiting ...\noutput: {result.output.decode()}\nexit_code: {result.exit_code}'
)
print(f'❌ Command "ban" failed, exiting ...\noutput: {result.output.decode()}\nexit_code: {result.exit_code}')
exit(1)
print(result.output.decode(), flush=True)
@ -40,25 +36,17 @@ try:
result = bw_instance.exec_run("bwcli bans")
if result.exit_code != 0:
print(
f'❌ Command "bans" failed, exiting ...\noutput: {result.output.decode()}\nexit_code: {result.exit_code}'
)
print(f'❌ Command "bans" failed, exiting ...\noutput: {result.output.decode()}\nexit_code: {result.exit_code}')
exit(1)
if b"- 127.0.0.1" not in result.output:
print(
f'❌ IP 127.0.0.1 not found in the output of "bans", exiting ...\noutput: {result.output.decode()}'
)
print(f'❌ IP 127.0.0.1 not found in the output of "bans", exiting ...\noutput: {result.output.decode()}')
exit(1)
elif b"List of bans for redis:" not in result.output:
print(
f'❌ Redis ban list not found in the output of "bans", exiting ...\noutput: {result.output.decode()}'
)
print(f'❌ Redis ban list not found in the output of "bans", exiting ...\noutput: {result.output.decode()}')
exit(1)
elif b"1 hour" not in result.output and b"59 minutes" not in result.output:
print(
f"❌ Ban duration isn't 1 hour, exiting ...\noutput: {result.output.decode()}"
)
print(f"❌ Ban duration isn't 1 hour, exiting ...\noutput: {result.output.decode()}")
exit(1)
print(result.output.decode(), flush=True)
@ -71,9 +59,7 @@ try:
result = bw_instance.exec_run("bwcli unban 127.0.0.1")
if result.exit_code != 0:
print(
f'❌ Command "unban" failed, exiting ...\noutput: {result.output.decode()}\nexit_code: {result.exit_code}'
)
print(f'❌ Command "unban" failed, exiting ...\noutput: {result.output.decode()}\nexit_code: {result.exit_code}')
exit(1)
print(result.output.decode(), flush=True)
@ -86,9 +72,7 @@ try:
result = bw_instance.exec_run("bwcli bans")
if result.exit_code != 0:
print(
f'❌ Command "bans" failed, exiting ...\noutput: {result.output.decode()}\nexit_code: {result.exit_code}'
)
print(f'❌ Command "bans" failed, exiting ...\noutput: {result.output.decode()}\nexit_code: {result.exit_code}')
exit(1)
found = 0

View File

@ -10,9 +10,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com/image.png", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com/image.png", headers={"Host": "www.example.com"}).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -25,9 +23,7 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
use_client_cache = getenv("USE_CLIENT_CACHE", "no") == "yes"
@ -46,22 +42,16 @@ try:
flush=True,
)
response = get(
"http://www.example.com/image.png", headers={"Host": "www.example.com"}
)
response = get("http://www.example.com/image.png", headers={"Host": "www.example.com"})
response.raise_for_status()
if not use_client_cache:
if "Cache-Control" in response.headers:
print(
f"❌ Cache-Control header is present even if Client cache is deactivated, exiting ...\nheaders: {response.headers}"
)
print(f"❌ Cache-Control header is present even if Client cache is deactivated, exiting ...\nheaders: {response.headers}")
exit(1)
else:
if "Cache-Control" not in response.headers and default_cache_extensions:
print(
f"❌ Cache-Control header is not present even if Client cache is activated, exiting ...\nheaders: {response.headers}"
)
print(f"❌ Cache-Control header is not present even if Client cache is activated, exiting ...\nheaders: {response.headers}")
exit(1)
elif not default_cache_extensions and "Cache-Control" in response.headers:
print(
@ -69,16 +59,10 @@ try:
flush=True,
)
elif not client_cache_etag and "ETag" in response.headers:
print(
f"❌ ETag header is present even if Client cache ETag is deactivated, exiting ...\nheaders: {response.headers}"
)
print(f"❌ ETag header is present even if Client cache ETag is deactivated, exiting ...\nheaders: {response.headers}")
exit(1)
elif default_cache_extensions and client_cache_control != response.headers.get(
"Cache-Control"
):
print(
f"❌ Cache-Control header is not equal to the expected value, exiting ...\nheaders: {response.headers}"
)
elif default_cache_extensions and client_cache_control != response.headers.get("Cache-Control"):
print(f"❌ Cache-Control header is not equal to the expected value, exiting ...\nheaders: {response.headers}")
exit(1)
print("✅ Client cache is working as expected ...", flush=True)

View File

@ -32,23 +32,14 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
use_cors = getenv("USE_CORS", "no") == "yes"
cors_allow_origin = (
getenv("CORS_ALLOW_ORIGIN", "*")
.replace("\\", "")
.replace("^", "")
.replace("$", "")
)
cors_allow_origin = getenv("CORS_ALLOW_ORIGIN", "*").replace("\\", "").replace("^", "").replace("$", "")
cors_expose_headers = getenv("CORS_EXPOSE_HEADERS", "Content-Length,Content-Range")
cors_max_age = getenv("CORS_MAX_AGE", "86400")
cors_allow_credentials = (
"true" if getenv("CORS_ALLOW_CREDENTIALS", "no") == "yes" else "false"
)
cors_allow_credentials = "true" if getenv("CORS_ALLOW_CREDENTIALS", "no") == "yes" else "false"
cors_allow_methods = getenv("CORS_ALLOW_METHODS", "GET, POST, OPTIONS")
cors_allow_headers = getenv(
"CORS_ALLOW_HEADERS",
@ -129,18 +120,12 @@ try:
response.raise_for_status()
if use_cors:
if (
cors_allow_credentials == "false"
and "Access-Control-Allow-Credentials" in response.headers
):
if cors_allow_credentials == "false" and "Access-Control-Allow-Credentials" in response.headers:
print(
f'❌ The Access-Control-Allow-Credentials header is present in the response headers while the setting CORS_ALLOW_CREDENTIALS is set to "no", it should not be ...\nheaders: {response.headers}',
)
exit(1)
elif (
cors_allow_credentials == "true"
and "Access-Control-Allow-Credentials" not in response.headers
):
elif cors_allow_credentials == "true" and "Access-Control-Allow-Credentials" not in response.headers:
print(
f'❌ The Access-Control-Allow-Credentials header is not present in the response headers while the setting CORS_ALLOW_CREDENTIALS is set to "yes", it should be ...\nheaders: {response.headers}',
)
@ -157,10 +142,7 @@ try:
("Access-Control-Allow-Headers", cors_allow_headers),
):
if use_cors:
if (
header == "Access-Control-Allow-Credentials"
and cors_allow_credentials == "false"
):
if header == "Access-Control-Allow-Credentials" and cors_allow_credentials == "false":
continue
if value != response.headers.get(header):
@ -190,8 +172,7 @@ try:
cors_max_age != "86400",
cors_allow_credentials == "true",
cors_allow_methods != "GET, POST, OPTIONS",
cors_allow_headers
!= "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range",
cors_allow_headers != "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range",
]
):
exit(0)

View File

@ -10,9 +10,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -25,9 +23,7 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
country = getenv("COUNTRY")
@ -40,15 +36,8 @@ try:
)
status_code = get(
f"http://www.example.com",
headers={
"Host": "www.example.com",
}
| (
{"X-Forwarded-For": "2.0.0.3" if country == "FR" else "8.0.0.3"}
if getenv("TEST_TYPE", "docker") == "linux"
else {}
),
"http://www.example.com",
headers={"Host": "www.example.com"} | ({"X-Forwarded-For": "2.0.0.3" if country == "FR" else "8.0.0.3"} if getenv("TEST_TYPE", "docker") == "linux" else {}),
).status_code
if status_code == 403:

View File

@ -31,9 +31,7 @@ try:
)
try:
get(
"https://www.example.com", headers={"Host": "www.example.com"}, verify=False
)
get("https://www.example.com", headers={"Host": "www.example.com"}, verify=False)
except RequestException:
print(
"❌ The request failed even though the Custom Cert is activated, exiting ...",

View File

@ -32,10 +32,7 @@ from bunkerweb.db.model import (
try:
database_uri = getenv("DATABASE_URI", "sqlite:////var/lib/bunkerweb/db.sqlite3")
if (
getenv("TEST_TYPE", "docker") == "docker"
and database_uri == "sqlite:////var/lib/bunkerweb/db.sqlite3"
):
if getenv("TEST_TYPE", "docker") == "docker" and database_uri == "sqlite:////var/lib/bunkerweb/db.sqlite3":
database_uri = "sqlite:////data/lib/db.sqlite3"
error = False
@ -130,12 +127,7 @@ try:
print(" Checking if database is initialized ...", flush=True)
with db_session() as session:
metadata = (
session.query(Metadata)
.with_entities(Metadata.is_initialized)
.filter_by(id=1)
.first()
)
metadata = session.query(Metadata).with_entities(Metadata.is_initialized).filter_by(id=1).first()
if metadata is None or not metadata.is_initialized:
print(
@ -188,10 +180,7 @@ try:
for global_value in global_values:
if global_value.setting_id in global_settings:
if (
global_value.value
!= global_settings[global_value.setting_id]["value"]
):
if global_value.value != global_settings[global_value.setting_id]["value"]:
print(
f"❌ The global value {global_value.setting_id} is in the database but is not correct, exiting ...\n{global_value.value} (database) != {global_settings[global_value.setting_id]['value']} (env)",
flush=True,
@ -218,9 +207,7 @@ try:
)
exit(1)
if not all(
[global_settings[global_value]["checked"] for global_value in global_settings]
):
if not all([global_settings[global_value]["checked"] for global_value in global_settings]):
print(
f"❌ Not all global values are in the database, exiting ...\nmissing values: {', '.join([global_value for global_value in global_settings if not global_settings[global_value]['checked']])}",
flush=True,
@ -246,10 +233,7 @@ try:
else:
for service_setting in services_settings:
if service_setting.setting_id in service_settings:
if (
service_setting.value
!= service_settings[service_setting.setting_id]["value"]
):
if service_setting.value != service_settings[service_setting.setting_id]["value"]:
print(
f"❌ The service value {service_setting.setting_id} is in the database but is not correct, exiting ...\n{service_setting.value} (database) != {service_settings[service_setting.setting_id]['value']} (env)",
flush=True,
@ -276,12 +260,7 @@ try:
)
exit(1)
if not all(
[
service_settings[service_setting]["checked"]
for service_setting in service_settings
]
):
if not all([service_settings[service_setting]["checked"] for service_setting in service_settings]):
print(
f"❌ Not all service values are in the database, exiting ...\nmissing values: {', '.join([service_setting for service_setting in service_settings if not service_settings[service_setting]['checked']])}",
flush=True,
@ -353,14 +332,11 @@ try:
)
exit(1)
if (
plugin.name != current_plugin[plugin.id]["name"]
or plugin.description != current_plugin[plugin.id]["description"]
or plugin.version != current_plugin[plugin.id]["version"]
or plugin.stream != current_plugin[plugin.id]["stream"]
):
if plugin.name != current_plugin[plugin.id]["name"] or plugin.description != current_plugin[plugin.id]["description"] or plugin.version != current_plugin[plugin.id]["version"] or plugin.stream != current_plugin[plugin.id]["stream"]:
print(
f"❌ The {'external' if plugin.external else 'core'} plugin {plugin.name} (id: {plugin.id}) is in the database but is not correct, exiting ...\n{dumps({'name': plugin.name, 'description': plugin.description, 'version': plugin.version, 'stream': plugin.stream})} (database) != {dumps({'name': current_plugin[plugin.id]['name'], 'description': current_plugin[plugin.id]['description'], 'version': current_plugin[plugin.id]['version'], 'stream': current_plugin[plugin.id]['stream']})} (file)",
f"❌ The {'external' if plugin.external else 'core'} plugin {plugin.name} (id: {plugin.id}) is in the database but is not correct, exiting ...\n"
+ f"{dumps({'name': plugin.name, 'description': plugin.description, 'version': plugin.version, 'stream': plugin.stream})}"
+ f" (database) != {dumps({'name': current_plugin[plugin.id]['name'], 'description': current_plugin[plugin.id]['description'], 'version': current_plugin[plugin.id]['version'], 'stream': current_plugin[plugin.id]['stream']})} (file)", # noqa: E501
flush=True,
)
exit(1)
@ -369,27 +345,19 @@ try:
for setting in settings:
if (
setting.name
!= current_plugin[plugin.id]["settings"][setting.id]["id"]
or setting.context
!= current_plugin[plugin.id]["settings"][setting.id]["context"]
or setting.default
!= current_plugin[plugin.id]["settings"][setting.id]["default"]
or setting.help
!= current_plugin[plugin.id]["settings"][setting.id]["help"]
or setting.label
!= current_plugin[plugin.id]["settings"][setting.id]["label"]
or setting.regex
!= current_plugin[plugin.id]["settings"][setting.id]["regex"]
or setting.type
!= current_plugin[plugin.id]["settings"][setting.id]["type"]
or setting.multiple
!= current_plugin[plugin.id]["settings"][setting.id].get(
"multiple", None
)
setting.name != current_plugin[plugin.id]["settings"][setting.id]["id"]
or setting.context != current_plugin[plugin.id]["settings"][setting.id]["context"]
or setting.default != current_plugin[plugin.id]["settings"][setting.id]["default"]
or setting.help != current_plugin[plugin.id]["settings"][setting.id]["help"]
or setting.label != current_plugin[plugin.id]["settings"][setting.id]["label"]
or setting.regex != current_plugin[plugin.id]["settings"][setting.id]["regex"]
or setting.type != current_plugin[plugin.id]["settings"][setting.id]["type"]
or setting.multiple != current_plugin[plugin.id]["settings"][setting.id].get("multiple", None)
):
print(
f"❌ The {'external' if plugin.external else 'core'} plugin {plugin.name} (id: {plugin.id}) is in the database but is not correct, exiting ...\n{dumps({'default': setting.default, 'help': setting.help, 'label': setting.label, 'regex': setting.regex, 'type': setting.type})} (database) != {dumps({'default': current_plugin[plugin.id]['settings'][setting.id]['default'], 'help': current_plugin[plugin.id]['settings'][setting.id]['help'], 'label': current_plugin[plugin.id]['settings'][setting.id]['label'], 'regex': current_plugin[plugin.id]['settings'][setting.id]['regex'], 'type': current_plugin[plugin.id]['settings'][setting.id]['type']})} (file)",
f"❌ The {'external' if plugin.external else 'core'} plugin {plugin.name} (id: {plugin.id}) is in the database but is not correct, exiting ...\n"
+ f"{dumps({'default': setting.default, 'help': setting.help, 'label': setting.label, 'regex': setting.regex, 'type': setting.type})}"
+ f" (database) != {dumps({'default': current_plugin[plugin.id]['settings'][setting.id]['default'], 'help': current_plugin[plugin.id]['settings'][setting.id]['help'], 'label': current_plugin[plugin.id]['settings'][setting.id]['label'], 'regex': current_plugin[plugin.id]['settings'][setting.id]['regex'], 'type': current_plugin[plugin.id]['settings'][setting.id]['type']})} (file)", # noqa: E501
flush=True,
)
exit(1)
@ -435,46 +403,26 @@ try:
)
exit(1)
index = next(
index
for (index, d) in enumerate(
current_plugin[job.plugin_id].get("jobs", [])
)
if d["name"] == job.name
)
index = next(index for (index, d) in enumerate(current_plugin[job.plugin_id].get("jobs", [])) if d["name"] == job.name)
core_job = current_plugin[job.plugin_id]["jobs"][index]
if (
job.name != core_job["name"]
or job.file_name != core_job["file"]
or job.every != core_job["every"]
or job.reload != core_job["reload"]
):
if job.name != core_job["name"] or job.file_name != core_job["file"] or job.every != core_job["every"] or job.reload != core_job["reload"]:
print(
f"❌ The job {job.name} (plugin_id: {job.plugin_id}) is in the database but is not correct, exiting ...\n{dumps({'name': job.name, 'file': job.file_name, 'every': job.every, 'reload': job.reload})} (database) != {dumps({'name': core_job['name'], 'file': core_job['file'], 'every': core_job['every'], 'reload': core_job['reload']})} (file)",
f"❌ The job {job.name} (plugin_id: {job.plugin_id}) is in the database but is not correct, exiting ...\n"
+ f"{dumps({'name': job.name, 'file': job.file_name, 'every': job.every, 'reload': job.reload})} (database) != {dumps({'name': core_job['name'], 'file': core_job['file'], 'every': core_job['every'], 'reload': core_job['reload']})} (file)", # noqa: E501
flush=True,
)
exit(1)
current_plugin[job.plugin_id]["jobs"][index]["checked"] = True
if not all(
[
all([job["checked"] for job in core_plugins[plugin].get("jobs", [])])
for plugin in core_plugins
]
):
if not all([all([job["checked"] for job in core_plugins[plugin].get("jobs", [])]) for plugin in core_plugins]):
print(
f"❌ Not all jobs from core plugins are in the database, exiting ...\nmissing jobs: {dumps({plugin: [job['name'] for job in core_plugins[plugin]['jobs'] if not job['checked']] for plugin in core_plugins})}",
flush=True,
)
exit(1)
elif not all(
[
all([job["checked"] for job in external_plugins[plugin].get("jobs", [])])
for plugin in external_plugins
]
):
elif not all([all([job["checked"] for job in external_plugins[plugin].get("jobs", [])]) for plugin in external_plugins]):
print(
f"❌ Not all jobs from external plugins are in the database, exiting ...\nmissing jobs: {dumps({plugin: [job['name'] for job in external_plugins[plugin]['jobs'] if not job['checked']] for plugin in external_plugins})}",
flush=True,
@ -519,11 +467,7 @@ try:
)
exit(1)
path_ui = (
Path(join("bunkerweb", "core", plugin_page.plugin_id, "ui"))
if Path(join("bunkerweb", "core", plugin_page.plugin_id, "ui")).exists()
else Path(join("external", plugin_page.plugin_id, "ui"))
)
path_ui = Path(join("bunkerweb", "core", plugin_page.plugin_id, "ui")) if Path(join("bunkerweb", "core", plugin_page.plugin_id, "ui")).exists() else Path(join("external", plugin_page.plugin_id, "ui"))
if not path_ui.exists():
print(
@ -556,9 +500,7 @@ try:
flush=True,
)
exit(1)
elif not all(
[external_plugins[plugin]["page_checked"] for plugin in external_plugins]
):
elif not all([external_plugins[plugin]["page_checked"] for plugin in external_plugins]):
print(
f"❌ Not all external plugins pages are in the database, exiting ...\nmissing plugins pages: {', '.join([plugin for plugin in external_plugins if not external_plugins[plugin]['page_checked']])}",
flush=True,
@ -569,9 +511,7 @@ try:
print(" ", flush=True)
print(" Checking if all custom configs are in the database ...", flush=True)
custom_confs_rx = re_compile(
r"^([0-9a-z\.-]*)_?CUSTOM_CONF_(SERVICE_)?(HTTP|SERVER_STREAM|STREAM|DEFAULT_SERVER_HTTP|SERVER_HTTP|MODSEC_CRS|MODSEC)_(.+)$"
)
custom_confs_rx = re_compile(r"^([0-9a-z\.-]*)_?CUSTOM_CONF_(SERVICE_)?(HTTP|SERVER_STREAM|STREAM|DEFAULT_SERVER_HTTP|SERVER_HTTP|MODSEC_CRS|MODSEC)_(.+)$")
global_custom_configs = {}
service_custom_configs = {}
@ -584,9 +524,7 @@ try:
service_custom_configs[custom_conf[3]] = {
"value": environ[env].encode(),
"type": custom_conf[2].lower(),
"method": "manual"
if getenv("TEST_TYPE", "docker") == "linux"
else "scheduler",
"method": "manual" if getenv("TEST_TYPE", "docker") == "linux" else "scheduler",
"checked": False,
}
continue
@ -594,9 +532,7 @@ try:
global_custom_configs[custom_conf[3]] = {
"value": environ[env].encode(),
"type": custom_conf[2].lower(),
"method": "manual"
if getenv("TEST_TYPE", "docker") == "linux"
else "scheduler",
"method": "manual" if getenv("TEST_TYPE", "docker") == "linux" else "scheduler",
"checked": False,
}
@ -614,21 +550,13 @@ try:
)
for custom_config in custom_configs:
if (
not multisite
and custom_config.name in global_custom_configs
and custom_config.service_id
):
if not multisite and custom_config.name in global_custom_configs and custom_config.service_id:
print(
f"❌ The custom config {custom_config.name} is in the database but should not be owned by the service {custom_config.service_id} because multisite is not enabled, exiting ...",
flush=True,
)
exit(1)
elif (
multisite
and custom_config.name in service_custom_configs
and not custom_config.service_id
):
elif multisite and custom_config.name in service_custom_configs and not custom_config.service_id:
print(
f"❌ The custom config {custom_config.name} is in the database but should be owned by the service bwadm.example.com because it's a service config, exiting ...",
flush=True,
@ -652,21 +580,13 @@ try:
flush=True,
)
exit(1)
elif (
custom_config.data.replace(b"# CREATED BY ENV\n", b"")
!= current_custom_configs[custom_config.name]["value"]
and custom_config.data.replace(b"# CREATED BY ENV\n", b"")
!= current_custom_configs[custom_config.name]["value"] + b"\n"
):
elif custom_config.data.replace(b"# CREATED BY ENV\n", b"") != current_custom_configs[custom_config.name]["value"] and custom_config.data.replace(b"# CREATED BY ENV\n", b"") != current_custom_configs[custom_config.name]["value"] + b"\n":
print(
f"❌ The custom config {custom_config.name} is in the database but the value differ, exiting ...\n{custom_config.data} (database) != {current_custom_configs[custom_config.name]['value']} (env)",
flush=True,
)
exit(1)
elif (
custom_config.method
!= current_custom_configs[custom_config.name]["method"]
):
elif custom_config.method != current_custom_configs[custom_config.name]["method"]:
print(
f"❌ The custom config {custom_config.name} is in the database but the method differ, exiting ...\n{custom_config.method} (database) != {current_custom_configs[custom_config.name]['method']} (env)",
flush=True,
@ -675,32 +595,20 @@ try:
current_custom_configs[custom_config.name]["checked"] = True
if not all(
[
global_custom_configs[custom_config]["checked"]
for custom_config in global_custom_configs
]
):
if not all([global_custom_configs[custom_config]["checked"] for custom_config in global_custom_configs]):
print(
f"❌ Not all global custom configs are in the database, exiting ...\nmissing custom configs: {', '.join([custom_config for custom_config in global_custom_configs if not global_custom_configs[custom_config]['checked']])}",
flush=True,
)
exit(1)
elif not all(
[
service_custom_configs[custom_config]["checked"]
for custom_config in service_custom_configs
]
):
elif not all([service_custom_configs[custom_config]["checked"] for custom_config in service_custom_configs]):
print(
f"❌ Not all service custom configs are in the database, exiting ...\nmissing custom configs: {', '.join([custom_config for custom_config in service_custom_configs if not service_custom_configs[custom_config]['checked']])}",
flush=True,
)
exit(1)
print(
"✅ All custom configs are in the database and have the right value", flush=True
)
print("✅ All custom configs are in the database and have the right value", flush=True)
except SystemExit:
exit(1)
except:

View File

@ -28,11 +28,7 @@ try:
driver.get("https://www.dnsbl.info/dnsbl-list.php")
print(" Getting the DNSBL servers ...")
links: List[WebElement] = driver_wait.until(
EC.presence_of_all_elements_located(
(By.XPATH, "//table[@class='body_sub_body']//td")
)
)
links: List[WebElement] = driver_wait.until(EC.presence_of_all_elements_located((By.XPATH, "//table[@class='body_sub_body']//td")))
for link in links:
content = link.text
@ -41,18 +37,12 @@ try:
print(" Checking the DNSBL servers for a banned IP ...", flush=True)
output_path = (
Path(sep, "output", "dnsbl_ip.txt")
if getenv("TEST_TYPE", "docker") == "docker"
else Path(".", "dnsbl_ip.txt")
)
output_path = Path(sep, "output", "dnsbl_ip.txt") if getenv("TEST_TYPE", "docker") == "docker" else Path(".", "dnsbl_ip.txt")
for ip_address in [IPv4Address(f"{x}.0.0.3") for x in range(1, 256)]:
for dnsbl_server in dnsbl_servers:
with suppress(gaierror):
gethostbyname(
f"{ip_address.reverse_pointer.replace('.in-addr.arpa', '')}.{dnsbl_server}"
)
gethostbyname(f"{ip_address.reverse_pointer.replace('.in-addr.arpa', '')}.{dnsbl_server}")
print(
f"{ip_address} is banned on {dnsbl_server}, saving it to {output_path}",
flush=True,

View File

@ -11,9 +11,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}, timeout=3
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}, timeout=3).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -26,9 +24,7 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
use_dnsbl = getenv("USE_DNSBL", "yes") == "yes"
@ -43,24 +39,14 @@ try:
retries = 0
while not passed and retries < 10:
status_code = get(
f"http://www.example.com",
headers={"Host": "www.example.com"}
| (
{"X-Forwarded-For": getenv("IP_ADDRESS", "")}
if TEST_TYPE == "linux"
else {}
),
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"} | ({"X-Forwarded-For": getenv("IP_ADDRESS", "")} if TEST_TYPE == "linux" else {})).status_code
if status_code == 403:
if not use_dnsbl:
print("❌ The request was rejected, but DNSBL is disabled, exiting ...")
exit(1)
elif not dnsbl_list:
print(
"❌ The request was rejected, but DNSBL list is empty, exiting ..."
)
print("❌ The request was rejected, but DNSBL list is empty, exiting ...")
exit(1)
elif use_dnsbl and dnsbl_list:
if retries <= 10:
@ -80,9 +66,7 @@ try:
sleep(5)
continue
print(
f'❌ The request was not rejected, but DNSBL list is equal to "{dnsbl_list}", exiting ...'
)
print(f'❌ The request was not rejected, but DNSBL list is equal to "{dnsbl_list}", exiting ...')
exit(1)
passed = True

View File

@ -14,9 +14,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -29,18 +27,14 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
firefox_options = Options()
firefox_options.add_argument("--headless")
errors = getenv("ERRORS", "")
intercepted_error_codes = getenv(
"INTERCEPTED_ERROR_CODES", "400 401 403 404 405 413 429 500 501 502 503 504"
)
intercepted_error_codes = getenv("INTERCEPTED_ERROR_CODES", "400 401 403 404 405 413 429 500 501 502 503 504")
print(" Starting Firefox ...", flush=True)
with webdriver.Firefox(options=firefox_options) as driver:
@ -55,26 +49,15 @@ try:
default_message = None
with suppress(NoSuchElementException):
default_message = driver.find_element(
By.XPATH, "//p[contains(text(), 'This website is protected with')]"
)
default_message = driver.find_element(By.XPATH, "//p[contains(text(), 'This website is protected with')]")
if default_message and (
errors
or intercepted_error_codes
!= "400 401 403 404 405 413 429 500 501 502 503 504"
):
if default_message and (errors or intercepted_error_codes != "400 401 403 404 405 413 429 500 501 502 503 504"):
print(
"❌ The default error page is being displayed, exiting ...",
flush=True,
)
exit(1)
elif (
not default_message
and not errors
and intercepted_error_codes
== "400 401 403 404 405 413 429 500 501 502 503 504"
):
elif not default_message and not errors and intercepted_error_codes == "400 401 403 404 405 413 429 500 501 502 503 504":
print(
"❌ The default error page is not being displayed, exiting ...",
flush=True,
@ -84,9 +67,7 @@ try:
if errors:
custom_message = None
with suppress(NoSuchElementException):
custom_message = driver.find_element(
By.XPATH, "//h1[contains(text(), 'It Works!')]"
)
custom_message = driver.find_element(By.XPATH, "//h1[contains(text(), 'It Works!')]")
if not custom_message:
print(
@ -98,9 +79,7 @@ try:
if intercepted_error_codes != "400 401 403 404 405 413 429 500 501 502 503 504":
nginx_message = None
with suppress(NoSuchElementException):
nginx_message = driver.find_element(
By.XPATH, "//center[contains(text(), 'nginx')]"
)
nginx_message = driver.find_element(By.XPATH, "//center[contains(text(), 'nginx')]")
if not nginx_message:
print(

View File

@ -19,11 +19,7 @@ with get(mmdb_url, stream=True) as resp:
file_content.write(chunk)
file_content.seek(0)
output_path = (
Path(sep, "output", "ip_asn.txt")
if getenv("TEST_TYPE", "docker") == "docker"
else Path(".", "ip_asn.txt")
)
output_path = Path(sep, "output", "ip_asn.txt") if getenv("TEST_TYPE", "docker") == "docker" else Path(".", "ip_asn.txt")
with open_database(GzipFile(fileobj=file_content, mode="rb"), mode=MODE_FD) as reader: # type: ignore
dbip_asn = reader.get("1.0.0.3")

View File

@ -10,9 +10,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -25,9 +23,7 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
use_greylist = getenv("USE_GREYLIST", "yes") == "yes"
@ -48,12 +44,7 @@ try:
print(" Sending a request to http://www.example.com ...", flush=True)
status_code = get(
"http://www.example.com",
headers={"Host": "www.example.com"}
| (
{"X-Forwarded-For": "1.0.0.3"}
if getenv("TEST_TYPE", "docker") == "linux" and _global
else {}
),
headers={"Host": "www.example.com"} | ({"X-Forwarded-For": "1.0.0.3"} if getenv("TEST_TYPE", "docker") == "linux" and _global else {}),
).status_code
print(f" Status code: {status_code}", flush=True)
@ -62,28 +53,16 @@ try:
if status_code == 403:
if not use_greylist:
print(
"❌ Request was rejected, even though greylist is supposed to be disabled, exiting ..."
)
print("❌ Request was rejected, even though greylist is supposed to be disabled, exiting ...")
exit(1)
elif (greylist_ip or greylist_ip_urls) and not _global:
print(
"❌ Request was rejected, even though IP is supposed to be in the greylist, exiting ..."
)
print("❌ Request was rejected, even though IP is supposed to be in the greylist, exiting ...")
exit(1)
elif (
(greylist_rdns or greylist_rdns_urls)
and not greylist_rdns_global
and not _global
):
print(
"❌ Request was rejected, even though RDNS is supposed to be in the greylist, exiting ..."
)
elif (greylist_rdns or greylist_rdns_urls) and not greylist_rdns_global and not _global:
print("❌ Request was rejected, even though RDNS is supposed to be in the greylist, exiting ...")
exit(1)
elif (greylist_asn or greylist_asn_urls) and _global:
print(
"❌ Request was rejected, even though ASN is supposed to be in the greylist, exiting ..."
)
print("❌ Request was rejected, even though ASN is supposed to be in the greylist, exiting ...")
exit(1)
elif greylist_user_agent or greylist_user_agent_urls:
print(
@ -98,9 +77,7 @@ try:
print(f" Status code: {status_code}", flush=True)
if status_code == 403:
print(
"❌ Request was rejected, even though User Agent is supposed to be in the greylist ..."
)
print("❌ Request was rejected, even though User Agent is supposed to be in the greylist ...")
exit(1)
sleep(2)
@ -126,16 +103,12 @@ try:
" Sending a request to http://www.example.com/admin ...",
flush=True,
)
status_code = get(
"http://www.example.com/admin", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com/admin", headers={"Host": "www.example.com"}).status_code
print(f" Status code: {status_code}", flush=True)
if status_code == 403:
print(
"❌ Request was rejected, even though URI is supposed to be in the greylist ..."
)
print("❌ Request was rejected, even though URI is supposed to be in the greylist ...")
exit(1)
sleep(2)
@ -158,14 +131,10 @@ try:
exit(1)
else:
if (greylist_ip or greylist_ip_urls) and _global:
print(
"❌ Request was not rejected, but IP is not in the greylist, exiting ..."
)
print("❌ Request was not rejected, but IP is not in the greylist, exiting ...")
exit(1)
elif (greylist_rdns or greylist_rdns_urls) and _global:
print(
"❌ Request was not rejected, but RDNS is not in the greylist, exiting ..."
)
print("❌ Request was not rejected, but RDNS is not in the greylist, exiting ...")
exit(1)
elif (greylist_asn or greylist_asn_urls) and not _global:
print("❌ Request was rejected, but ASN is not in the greylist, exiting ...")

View File

@ -10,9 +10,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -25,9 +23,7 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
use_gzip = getenv("USE_GZIP", "no") == "yes"
@ -44,14 +40,10 @@ try:
response.raise_for_status()
if not use_gzip and response.headers.get("Content-Encoding", "").lower() == "gzip":
print(
f"❌ Content-Encoding header is present even if Gzip is deactivated, exiting ...\nheaders: {response.headers}"
)
print(f"❌ Content-Encoding header is present even if Gzip is deactivated, exiting ...\nheaders: {response.headers}")
exit(1)
elif use_gzip and response.headers.get("Content-Encoding", "").lower() != "gzip":
print(
f"❌ Content-Encoding header is not present or with the wrong value even if Gzip is activated, exiting ...\nheaders: {response.headers}"
)
print(f"❌ Content-Encoding header is not present or with the wrong value even if Gzip is activated, exiting ...\nheaders: {response.headers}")
exit(1)
print("✅ Gzip is working as expected ...", flush=True)

View File

@ -29,15 +29,11 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
custom_headers = getenv("CUSTOM_HEADER", "")
remove_headers = getenv(
"REMOVE_HEADERS", "Server X-Powered-By X-AspNet-Version X-AspNetMvc-Version"
)
remove_headers = getenv("REMOVE_HEADERS", "Server X-Powered-By X-AspNet-Version X-AspNetMvc-Version")
strict_transport_security = getenv("STRICT_TRANSPORT_SECURITY", "max-age=31536000")
cookie_flags = getenv("COOKIE_FLAGS", "* HttpOnly SameSite=Lax")
cookie_flags_1 = getenv("COOKIE_FLAGS_1")
@ -49,11 +45,17 @@ try:
referrer_policy = getenv("REFERRER_POLICY", "strict-origin-when-cross-origin")
permissions_policy = getenv(
"PERMISSIONS_POLICY",
"accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), cross-origin-isolated=(), display-capture=(), document-domain=(), encrypted-media=(), execution-while-not-rendered=(), execution-while-out-of-viewport=(), fullscreen=(), geolocation=(), gyroscope=(), hid=(), idle-detection=(), magnetometer=(), microphone=(), midi=(), navigation-override=(), payment=(), picture-in-picture=(), publickey-credentials-get=(), screen-wake-lock=(), serial=(), usb=(), web-share=(), xr-spatial-tracking=()",
"accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), cross-origin-isolated=(), display-capture=(), document-domain=(), encrypted-media=(),"
+ " execution-while-not-rendered=(), execution-while-out-of-viewport=(), fullscreen=(), geolocation=(), gyroscope=(), hid=(), idle-detection=(), magnetometer=(), microphone=(), midi=(),"
+ " navigation-override=(), payment=(), picture-in-picture=(), publickey-credentials-get=(), screen-wake-lock=(), serial=(), usb=(), web-share=(), xr-spatial-tracking=()",
)
feature_policy = getenv(
"FEATURE_POLICY",
"accelerometer 'none'; ambient-light-sensor 'none'; autoplay 'none'; battery 'none'; camera 'none'; display-capture 'none'; document-domain 'none'; encrypted-media 'none'; execution-while-not-rendered 'none'; execution-while-out-of-viewport 'none'; fullscreen 'none'; geolocation 'none'; gyroscope 'none'; layout-animation 'none'; legacy-image-formats 'none'; magnetometer 'none'; microphone 'none'; midi 'none'; navigation-override 'none'; payment 'none'; picture-in-picture 'none'; publickey-credentials-get 'none'; speaker-selection 'none'; sync-xhr 'none'; unoptimized-images 'none'; unsized-media 'none'; usb 'none'; screen-wake-lock 'none'; web-share 'none'; xr-spatial-tracking 'none';",
"accelerometer 'none'; ambient-light-sensor 'none'; autoplay 'none'; battery 'none'; camera 'none'; display-capture 'none'; document-domain 'none'; encrypted-media 'none';"
+ " execution-while-not-rendered 'none'; execution-while-out-of-viewport 'none'; fullscreen 'none'; geolocation 'none'; gyroscope 'none'; layout-animation 'none';"
+ " legacy-image-formats 'none'; magnetometer 'none'; microphone 'none'; midi 'none'; navigation-override 'none'; payment 'none'; picture-in-picture 'none';"
+ " publickey-credentials-get 'none'; speaker-selection 'none'; sync-xhr 'none'; unoptimized-images 'none'; unsized-media 'none'; usb 'none'; screen-wake-lock 'none'; web-share 'none';"
+ " xr-spatial-tracking 'none';",
)
x_frame_options = getenv("X_FRAME_OPTIONS", "SAMEORIGIN")
x_content_type_options = getenv("X_CONTENT_TYPE_OPTIONS", "nosniff")
@ -86,11 +88,7 @@ try:
flush=True,
)
exit(1)
elif (
ssl
and response.headers.get("Strict-Transport-Security")
!= strict_transport_security
):
elif ssl and response.headers.get("Strict-Transport-Security") != strict_transport_security:
print(
f'❌ Header "Strict-Transport-Security" doesn\'t have the right value. {response.headers.get("Strict-Transport-Security", "missing header")} (header) != {strict_transport_security} (env), exiting ...\nheaders: {response.headers}',
flush=True,
@ -167,11 +165,7 @@ try:
f"❌ Cookie {cookie.name} has the HttpOnly flag even though it's not supposed to, exiting ...\ncookie: name = {cookie.name}, secure = {cookie.secure}, HttpOnly = {cookie.has_nonstandard_attr('HttpOnly')}",
)
exit(1)
elif (
not cookie_flags_1
and "HttpOnly" in cookie_flags
and not cookie.has_nonstandard_attr("HttpOnly")
):
elif not cookie_flags_1 and "HttpOnly" in cookie_flags and not cookie.has_nonstandard_attr("HttpOnly"):
print(
f"❌ Cookie {cookie.name} doesn't have the HttpOnly flag even though it's set in the env, exiting ...\ncookie: name = {cookie.name}, secure = {cookie.secure}, HttpOnly = {cookie.has_nonstandard_attr('HttpOnly')}",
)

View File

@ -10,9 +10,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -25,9 +23,7 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
inject_body = getenv("INJECT_BODY", "")
@ -35,10 +31,7 @@ try:
page_text = get("http://www.example.com", headers={"Host": "www.example.com"}).text
if inject_body not in page_text:
print(
f"❌ The service is ready but the injected body is not present, exiting ...",
flush=True,
)
print("❌ The service is ready but the injected body is not present, exiting ...", flush=True)
exit(1)
print(

View File

@ -42,19 +42,13 @@ try:
if any(response.status_code >= 500 for response in responses):
print("❌ An error occurred with the server, exiting ...", flush=True)
exit(1)
elif (
not any(response.status_code == 429 for response in responses)
and limit_conn_http1 == 1
):
elif not any(response.status_code == 429 for response in responses) and limit_conn_http1 == 1:
print(
f"❌ The limit_conn for HTTP1 directive is not working correctly, the limit was set to {limit_conn_http1} and the limit was not reached with 5 simultaneous connections, exiting ...",
flush=True,
)
exit(1)
elif (
any(response.status_code == 429 for response in responses)
and limit_conn_http1 > 1
):
elif any(response.status_code == 429 for response in responses) and limit_conn_http1 > 1:
print(
f"❌ The limit_conn for HTTP1 directive is not working correctly, the limit was set to {limit_conn_http1} and the limit was reached with 5 simultaneous connections, exiting ...",
flush=True,
@ -72,9 +66,7 @@ try:
request_number = 0
stopped = False
print(
" Sending requests to the service until it reaches the limit ...", flush=True
)
print(" Sending requests to the service until it reaches the limit ...", flush=True)
while status_code != 429:
with Client() as client:

View File

@ -20,14 +20,7 @@ try:
)
try:
response = head(
"http://"
+ (
"192.168.0.2"
if getenv("TEST_TYPE", "docker") == "docker"
else "127.0.0.1"
)
)
response = head("http://" + ("192.168.0.2" if getenv("TEST_TYPE", "docker") == "docker" else "127.0.0.1"))
if response.status_code != 403 and disabled_default_server:
print(
@ -68,9 +61,7 @@ try:
if deny_http_status == "403" or not disabled_default_server:
raise e
print(
"✅ Request got rejected with the expected deny_http_status", flush=True
)
print("✅ Request got rejected with the expected deny_http_status", flush=True)
exit(0)
else:
print(
@ -83,10 +74,7 @@ try:
ssl_protocols = getenv("SSL_PROTOCOLS", "TLSv1.2 TLSv1.3")
print(
f" Creating a socket and wrapping it with SSL an SSL context to test SSL_PROTOCOLS",
flush=True,
)
print(" Creating a socket and wrapping it with SSL an SSL context to test SSL_PROTOCOLS", flush=True)
sock = create_connection(("www.example.com", 443))
ssl_context = create_default_context()
@ -106,10 +94,7 @@ try:
if not listen_http:
exit(0)
else:
print(
f" Skipping SSL_PROTOCOLS test as SSL is disabled",
flush=True,
)
print(" Skipping SSL_PROTOCOLS test as SSL is disabled", flush=True)
sleep(1)
@ -132,9 +117,7 @@ try:
if response.status_code not in (404, 301):
response.raise_for_status()
if (
redirect_http_to_https or (auto_redirect_http_to_https and ssl_generated)
) and response.status_code != 301:
if (redirect_http_to_https or (auto_redirect_http_to_https and ssl_generated)) and response.status_code != 301:
print(
f"❌ Request didn't get redirected, even if {'auto ' if auto_redirect_http_to_https else ''}redirect_http_to_https is enabled, exiting ...",
flush=True,
@ -277,27 +260,16 @@ try:
check=True,
)
status_code, http_version = (
proc.stdout.splitlines()[-1].replace("'", "").strip().split(" ")
)
status_code, http_version = proc.stdout.splitlines()[-1].replace("'", "").strip().split(" ")
if status_code not in ("200", "404"):
print(
f"❌ Request didn't get accepted, exiting ...",
flush=True,
)
print("❌ Request didn't get accepted, exiting ...", flush=True)
exit(1)
elif ssl_generated and http2 and http_version != "2":
print(
f"❌ Request didn't get accepted with HTTP/2, exiting ...",
flush=True,
)
print("❌ Request didn't get accepted with HTTP/2, exiting ...", flush=True)
exit(1)
elif (not ssl_generated or not http2) and http_version != "1.1":
print(
f"❌ Request got accepted with HTTP/2, it shouldn't have, exiting ...",
flush=True,
)
print("❌ Request got accepted with HTTP/2, it shouldn't have, exiting ...", flush=True)
exit(1)
print(f"✅ Request got accepted with HTTP/{http_version}", flush=True)

View File

@ -1,5 +1,4 @@
from contextlib import suppress
from datetime import datetime
from os import getenv
from requests import get
from requests.exceptions import RequestException
@ -11,9 +10,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -26,9 +23,7 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
use_modsecurity = getenv("USE_MODSECURITY", "yes") == "yes"
@ -39,9 +34,7 @@ try:
flush=True,
)
status_code = get(
"http://www.example.com/?id=/etc/passwd", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com/?id=/etc/passwd", headers={"Host": "www.example.com"}).status_code
print(f" Status code: {status_code}", flush=True)

View File

@ -3,9 +3,7 @@ from os import getenv
from requests import get
from requests.exceptions import RequestException
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.firefox.options import Options
from selenium.common.exceptions import NoSuchElementException
from time import sleep
from traceback import format_exc
@ -14,9 +12,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -29,9 +25,7 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
firefox_options = Options()

View File

@ -1,10 +1,8 @@
from contextlib import suppress
from fastapi import FastAPI
from multiprocessing import Process
from os import getenv
from redis import Redis
from requests import get
from requests.exceptions import RequestException
from selenium import webdriver
from selenium.webdriver.firefox.options import Options
from time import sleep
@ -65,9 +63,7 @@ try:
if ip_to_check == "1.0.0.3":
print(" Testing Reverse Scan, starting FastAPI ...", flush=True)
app = FastAPI()
fastapi_proc = Process(
target=run, args=(app,), kwargs=dict(host="0.0.0.0", port=8080)
)
fastapi_proc = Process(target=run, args=(app,), kwargs=dict(host="0.0.0.0", port=8080))
fastapi_proc.start()
sleep(2)
@ -94,9 +90,7 @@ try:
port_to_check = "8080" if ip_to_check == "1.0.0.3" else "80"
key_value = redis_client.get(
f"plugin_reverse_scan_{ip_to_check}:{port_to_check}"
)
key_value = redis_client.get(f"plugin_reverse_scan_{ip_to_check}:{port_to_check}")
if key_value is None:
print(

View File

@ -11,9 +11,7 @@ from uvicorn import run
fastapi_proc = None
if getenv("TEST_TYPE", "docker") == "docker":
app = FastAPI()
fastapi_proc = Process(
target=run, args=(app,), kwargs=dict(host="0.0.0.0", port=80)
)
fastapi_proc = Process(target=run, args=(app,), kwargs=dict(host="0.0.0.0", port=80))
fastapi_proc.start()
sleep(1)
@ -22,10 +20,8 @@ try:
use_reverse_scan = getenv("USE_REVERSE_SCAN", "yes") == "yes"
reverse_scan_ports = getenv("REVERSE_SCAN_PORTS", "80")
print(f" Trying to access http://www.example.com ...", flush=True)
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
print(" Trying to access http://www.example.com ...", flush=True)
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
print(f" Status code: {status_code}", flush=True)

View File

@ -12,17 +12,11 @@ try:
ssl_generated = getenv("GENERATE_SELF_SIGNED_SSL", "no") == "yes"
self_signed_ssl_expiry = getenv("SELF_SIGNED_SSL_EXPIRY", "365")
self_signed_ssl_expiry = (
datetime.now()
+ timedelta(days=int(self_signed_ssl_expiry))
- timedelta(hours=1)
)
self_signed_ssl_expiry = datetime.now() + timedelta(days=int(self_signed_ssl_expiry)) - timedelta(hours=1)
self_signed_ssl_subj = getenv("SELF_SIGNED_SSL_SUBJ", "/CN=www.example.com/")
response = get(
f"http://www.example.com", headers={"Host": "www.example.com"}, verify=False
)
response = get("http://www.example.com", headers={"Host": "www.example.com"}, verify=False)
if not ssl_generated and response.status_code == 200:
print(
@ -33,9 +27,7 @@ try:
sleep(1)
response = get(
f"https://www.example.com", headers={"Host": "www.example.com"}, verify=False
)
response = get("https://www.example.com", headers={"Host": "www.example.com"}, verify=False)
if ssl_generated and response.status_code != 200:
print(
@ -57,12 +49,8 @@ try:
# Parse the PEM certificate
certificate = x509.load_pem_x509_certificate(pem_data.encode(), default_backend())
common_name = certificate.subject.get_attributes_for_oid(
x509.oid.NameOID.COMMON_NAME
)[0].value
check_self_signed_ssl_subj = self_signed_ssl_subj.replace("/", "").replace(
"CN=", ""
)
common_name = certificate.subject.get_attributes_for_oid(x509.oid.NameOID.COMMON_NAME)[0].value
check_self_signed_ssl_subj = self_signed_ssl_subj.replace("/", "").replace("CN=", "")
if common_name != check_self_signed_ssl_subj:
print(
f"❌ The SSL generation is enabled and the Common Name (CN) is not {check_self_signed_ssl_subj} but {common_name}, exiting ...",

View File

@ -13,9 +13,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -28,9 +26,7 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
firefox_options = Options()
@ -61,38 +57,29 @@ try:
print(" Reloading BunkerWeb ...", flush=True)
if TEST_TYPE == "docker":
response = post(
f"http://192.168.0.2:5000/reload",
headers={"Host": "bwapi"},
)
response = post("http://192.168.0.2:5000/reload", headers={"Host": "bwapi"})
if response.status_code != 200:
print(
"❌ An error occurred when restarting BunkerWeb, exiting ...", flush=True
)
print("❌ An error occurred when restarting BunkerWeb, exiting ...", flush=True)
exit(1)
data = response.json()
if data["status"] != "success":
print(
"❌ An error occurred when restarting BunkerWeb, exiting ...", flush=True
)
print("❌ An error occurred when restarting BunkerWeb, exiting ...", flush=True)
exit(1)
sleep(5)
else:
proc = run(["sudo", "systemctl", "restart", "bunkerweb"], check=False)
if proc.returncode != 0:
print(
"❌ An error occurred when restarting BunkerWeb, exiting ...", flush=True
)
print("❌ An error occurred when restarting BunkerWeb, exiting ...", flush=True)
exit(1)
retries = 0
while (
not b"BunkerWeb is ready"
in run(
b"BunkerWeb is ready"
not in run(
["sudo", "tail", "-n", "1", "/var/log/bunkerweb/error.log"],
stdout=PIPE,
check=True,

View File

@ -19,11 +19,7 @@ with get(mmdb_url, stream=True) as resp:
file_content.write(chunk)
file_content.seek(0)
output_path = (
Path(sep, "output", "ip_asn.txt")
if getenv("TEST_TYPE", "docker") == "docker"
else Path(".", "ip_asn.txt")
)
output_path = Path(sep, "output", "ip_asn.txt") if getenv("TEST_TYPE", "docker") == "docker" else Path(".", "ip_asn.txt")
with open_database(GzipFile(fileobj=file_content, mode="rb"), mode=MODE_FD) as reader: # type: ignore
dbip_asn = reader.get("1.0.0.3")

View File

@ -10,9 +10,7 @@ try:
retries = 0
while not ready:
with suppress(RequestException):
status_code = get(
"http://www.example.com", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com", headers={"Host": "www.example.com"}).status_code
if status_code >= 500:
print("❌ An error occurred with the server, exiting ...", flush=True)
@ -25,9 +23,7 @@ try:
exit(1)
elif not ready:
retries += 1
print(
"⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True
)
print("⚠️ Waiting for the service to be ready, retrying in 5s ...", flush=True)
sleep(5)
use_whitelist = getenv("USE_WHITELIST", "yes") == "yes"
@ -48,35 +44,20 @@ try:
print(" Sending a request to http://www.example.com ...", flush=True)
status_code = get(
"http://www.example.com",
headers={"Host": "www.example.com"}
| (
{"X-Forwarded-For": "1.0.0.3"}
if getenv("TEST_TYPE", "docker") == "linux" and _global
else {}
),
headers={"Host": "www.example.com"} | ({"X-Forwarded-For": "1.0.0.3"} if getenv("TEST_TYPE", "docker") == "linux" and _global else {}),
).status_code
print(f" Status code: {status_code}", flush=True)
if status_code == 403:
if (whitelist_ip or whitelist_ip_urls) and not _global:
print(
"❌ Request was rejected, even though IP is supposed to be in the whitelist, exiting ..."
)
print("❌ Request was rejected, even though IP is supposed to be in the whitelist, exiting ...")
exit(1)
elif (
(whitelist_rdns or whitelist_rdns_urls)
and not whitelist_rdns_global
and not _global
):
print(
"❌ Request was rejected, even though RDNS is supposed to be in the whitelist, exiting ..."
)
elif (whitelist_rdns or whitelist_rdns_urls) and not whitelist_rdns_global and not _global:
print("❌ Request was rejected, even though RDNS is supposed to be in the whitelist, exiting ...")
exit(1)
elif (whitelist_asn or whitelist_asn_urls) and _global:
print(
"❌ Request was rejected, even though ASN is supposed to be in the whitelist, exiting ..."
)
print("❌ Request was rejected, even though ASN is supposed to be in the whitelist, exiting ...")
exit(1)
elif whitelist_user_agent or whitelist_user_agent_urls:
print(
@ -91,9 +72,7 @@ try:
print(f" Status code: {status_code}", flush=True)
if status_code == 403:
print(
"❌ Request was rejected, even though User Agent is supposed to be in the whitelist ..."
)
print("❌ Request was rejected, even though User Agent is supposed to be in the whitelist ...")
exit(1)
print("✅ Request was not rejected, User Agent is in the whitelist ...")
@ -102,34 +81,24 @@ try:
" Sending a request to http://www.example.com/admin ...",
flush=True,
)
status_code = get(
"http://www.example.com/admin", headers={"Host": "www.example.com"}
).status_code
status_code = get("http://www.example.com/admin", headers={"Host": "www.example.com"}).status_code
print(f" Status code: {status_code}", flush=True)
if status_code == 403:
print(
"❌ Request was rejected, even though URI is supposed to be in the whitelist ..."
)
print("❌ Request was rejected, even though URI is supposed to be in the whitelist ...")
exit(1)
print("✅ Request was not rejected, URI is in the whitelist ...")
else:
if (whitelist_ip or whitelist_ip_urls) and _global:
print(
"❌ Request was not rejected, but IP is not in the whitelist, exiting ..."
)
print("❌ Request was not rejected, but IP is not in the whitelist, exiting ...")
exit(1)
elif (whitelist_rdns or whitelist_rdns_urls) and _global:
print(
"❌ Request was not rejected, but RDNS is not in the whitelist, exiting ..."
)
print("❌ Request was not rejected, but RDNS is not in the whitelist, exiting ...")
exit(1)
elif (whitelist_asn or whitelist_asn_urls) and not _global:
print(
"❌ Request was rejected, but ASN is not in the whitelist, exiting ..."
)
print("❌ Request was rejected, but ASN is not in the whitelist, exiting ...")
exit(1)
elif whitelist_user_agent or whitelist_user_agent_urls:
print("❌ Request was rejected, but User Agent is not in the whitelist ...")

View File

@ -3,7 +3,7 @@
from pathlib import Path
from sys import path, argv, exit
from glob import glob
from os import getenv, _exit
from os import _exit
from os.path import isfile
from traceback import format_exc
from json import loads
@ -23,7 +23,7 @@ if len(argv) <= 1:
exit(1)
test_type = argv[1]
if not test_type in ("linux", "docker", "autoconf", "swarm", "kubernetes", "ansible"):
if test_type not in ("linux", "docker", "autoconf", "swarm", "kubernetes", "ansible"):
log("TESTS", "", "Wrong type argument " + test_type)
exit(1)
@ -57,7 +57,7 @@ for example in glob("./examples/*"):
try:
with open(f"{example}/tests.json") as f:
tests = loads(f.read())
if not test_type in tests["kinds"]:
if test_type not in tests["kinds"]:
log(
"TESTS",
"",
@ -88,17 +88,11 @@ for example in glob("./examples/*"):
delay=delay,
)
elif test_type == "swarm":
test_obj = SwarmTest(
tests["name"], tests["timeout"], tests["tests"], delay=delay
)
test_obj = SwarmTest(tests["name"], tests["timeout"], tests["tests"], delay=delay)
elif test_type == "kubernetes":
test_obj = KubernetesTest(
tests["name"], tests["timeout"], tests["tests"], delay=delay
)
test_obj = KubernetesTest(tests["name"], tests["timeout"], tests["tests"], delay=delay)
elif test_type == "linux":
test_obj = LinuxTest(
tests["name"], tests["timeout"], tests["tests"], distro
)
test_obj = LinuxTest(tests["name"], tests["timeout"], tests["tests"], distro)
if not test_obj.run_tests():
log("TESTS", "", "Tests failed for " + tests["name"])
if test_type == "linux":
@ -110,10 +104,7 @@ for example in glob("./examples/*"):
log(
"TESTS",
"",
"Exception while executing test for example "
+ example
+ " : "
+ format_exc(),
"Exception while executing test for example " + example + " : " + format_exc(),
)
if test_type == "linux":
ret = end_fun(distro)

View File

@ -54,15 +54,9 @@ if "geckodriver" not in listdir(Path.cwd()):
print("Starting Firefox ...", flush=True)
def safe_get_element(
driver, by: By, _id: str, *, multiple: bool = False, error: bool = False
) -> Union[WebElement, List[WebElement]]:
def safe_get_element(driver, by: By, _id: str, *, multiple: bool = False, error: bool = False) -> Union[WebElement, List[WebElement]]:
try:
return WebDriverWait(driver, 4).until(
EC.presence_of_element_located((by, _id))
if not multiple
else EC.presence_of_all_elements_located((by, _id))
)
return WebDriverWait(driver, 4).until(EC.presence_of_element_located((by, _id)) if not multiple else EC.presence_of_all_elements_located((by, _id)))
except TimeoutException as e:
if error:
raise e
@ -124,9 +118,7 @@ def assert_alert_message(driver, message: str):
print(f'Message "{message}" found in one of the messages in the list', flush=True)
assert_button_click(
driver, "//aside[@data-flash-sidebar='']/*[local-name() = 'svg']"
)
assert_button_click(driver, "//aside[@data-flash-sidebar='']/*[local-name() = 'svg']")
def access_page(
@ -141,11 +133,7 @@ def access_page(
assert_button_click(driver, button)
try:
title = driver_wait.until(
EC.presence_of_element_located(
(By.XPATH, "/html/body/div/header/div/nav/h6")
)
)
title = driver_wait.until(EC.presence_of_element_located((By.XPATH, "/html/body/div/header/div/nav/h6")))
if title.text != name.replace(" ", "_").title():
print(f"Didn't get redirected to {name} page, exiting ...", flush=True)
@ -169,13 +157,7 @@ driver_func = partial(webdriver.Firefox, options=firefox_options)
if TEST_TYPE == "dev":
driver_func = partial(
webdriver.Firefox,
service=Service(
Service(
executable_path="./geckodriver"
if "geckodriver" in listdir(Path.cwd())
else "/usr/local/bin/geckodriver"
)
),
service=Service(Service(executable_path="./geckodriver" if "geckodriver" in listdir(Path.cwd()) else "/usr/local/bin/geckodriver")),
options=firefox_options,
)
@ -234,11 +216,7 @@ with webdriver.Firefox(options=firefox_options) as driver:
sleep(0.3)
try:
title = driver_wait.until(
EC.presence_of_element_located(
(By.XPATH, "/html/body/main/div[1]/div/h1")
)
)
title = driver_wait.until(EC.presence_of_element_located((By.XPATH, "/html/body/main/div[1]/div/h1")))
if title.text != "Log in":
print("Didn't get redirected to login page, exiting ...", flush=True)
@ -284,11 +262,7 @@ with webdriver.Firefox(options=firefox_options) as driver:
print(f"Trying to {action} BunkerWeb instance ...", flush=True)
try:
form = WebDriverWait(driver, 2).until(
EC.presence_of_element_located(
(By.XPATH, "//form[starts-with(@id, 'form-instance-')]")
)
)
form = WebDriverWait(driver, 2).until(EC.presence_of_element_located((By.XPATH, "//form[starts-with(@id, 'form-instance-')]")))
except TimeoutException:
print("No instance form found, exiting ...", flush=True)
exit(1)
@ -315,15 +289,13 @@ with webdriver.Firefox(options=firefox_options) as driver:
exit(1)
retries += 1
print(
"WARNING: message list doesn't contain the expected message or is empty, retrying..."
)
print("WARNING: message list doesn't contain the expected message or is empty, retrying...")
if TEST_TYPE == "linux":
retries = 0
while (
not b"BunkerWeb is ready"
in run(
b"BunkerWeb is ready"
not in run(
["sudo", "tail", "-n", "1", "/var/log/bunkerweb/error.log"],
stdout=PIPE,
check=True,
@ -383,9 +355,7 @@ with webdriver.Firefox(options=firefox_options) as driver:
exit(1)
retries += 1
print(
"WARNING: message list doesn't contain the expected message or is empty, retrying..."
)
print("WARNING: message list doesn't contain the expected message or is empty, retrying...")
print(
'Checking if the "DATASTORE_MEMORY_SIZE" input have the overridden value ...',
@ -414,9 +384,7 @@ with webdriver.Firefox(options=firefox_options) as driver:
input_worker.clear()
input_worker.send_keys("ZZZ")
assert_button_click(
driver, "//form[@id='form-edit-global-configs']//button[@type='submit']"
)
assert_button_click(driver, "//form[@id='form-edit-global-configs']//button[@type='submit']")
assert_alert_message(
driver,
@ -442,8 +410,8 @@ with webdriver.Firefox(options=firefox_options) as driver:
if TEST_TYPE == "linux":
retries = 0
while (
not b"BunkerWeb is ready"
in run(
b"BunkerWeb is ready"
not in run(
["sudo", "tail", "-n", "1", "/var/log/bunkerweb/error.log"],
stdout=PIPE,
check=True,
@ -510,9 +478,7 @@ with webdriver.Firefox(options=firefox_options) as driver:
print("Checking the services page ...", flush=True)
try:
service = safe_get_element(
driver, By.XPATH, "//div[@data-services-service='']", error=True
)
service = safe_get_element(driver, By.XPATH, "//div[@data-services-service='']", error=True)
except TimeoutException:
print("Services not found, exiting ...", flush=True)
exit(1)
@ -559,17 +525,13 @@ with webdriver.Firefox(options=firefox_options) as driver:
)
try:
modal = safe_get_element(
driver, By.XPATH, "//div[@data-services-modal='']", error=True
)
modal = safe_get_element(driver, By.XPATH, "//div[@data-services-modal='']", error=True)
except TimeoutException:
print("Modal not found, exiting ...", flush=True)
exit(1)
if "hidden" in modal.get_attribute("class"):
print(
"Modal is hidden even though it shouldn't be, exiting ...", flush=True
)
print("Modal is hidden even though it shouldn't be, exiting ...", flush=True)
exit(1)
input_server_name = safe_get_element(driver, By.ID, "SERVER_NAME")
@ -588,9 +550,7 @@ with webdriver.Firefox(options=firefox_options) as driver:
safe_get_element(driver, By.XPATH, "//button[@data-tab-handler='gzip']"),
)
gzip_select = safe_get_element(
driver, By.XPATH, "//button[@data-setting-select='gzip-comp-level']"
)
gzip_select = safe_get_element(driver, By.XPATH, "//button[@data-setting-select='gzip-comp-level']")
assert_button_click(driver, gzip_select)
@ -614,8 +574,8 @@ with webdriver.Firefox(options=firefox_options) as driver:
if TEST_TYPE == "linux":
retries = 0
while (
not b"BunkerWeb is ready"
in run(
b"BunkerWeb is ready"
not in run(
["sudo", "tail", "-n", "1", "/var/log/bunkerweb/error.log"],
stdout=PIPE,
check=True,
@ -635,9 +595,7 @@ with webdriver.Firefox(options=firefox_options) as driver:
)
try:
service = safe_get_element(
driver, By.XPATH, "//div[@data-services-service='']", error=True
)
service = safe_get_element(driver, By.XPATH, "//div[@data-services-service='']", error=True)
except TimeoutException:
print("Services not found, exiting ...", flush=True)
exit(1)
@ -650,9 +608,7 @@ with webdriver.Firefox(options=firefox_options) as driver:
modal = safe_get_element(driver, By.XPATH, "//div[@data-services-modal='']")
if "hidden" in modal.get_attribute("class"):
print(
"Modal is hidden even though it shouldn't be, exiting ...", flush=True
)
print("Modal is hidden even though it shouldn't be, exiting ...", flush=True)
exit(1)
assert_button_click(
@ -662,18 +618,11 @@ with webdriver.Firefox(options=firefox_options) as driver:
gzip_true_select = safe_get_element(driver, By.ID, "GZIP_COMP_LEVEL")
if (
safe_get_element(
driver, By.XPATH, "//select[@id='GZIP_COMP_LEVEL']/option[@selected='']"
).get_attribute("value")
!= "6"
):
if safe_get_element(driver, By.XPATH, "//select[@id='GZIP_COMP_LEVEL']/option[@selected='']").get_attribute("value") != "6":
print("The value is not the expected one, exiting ...", flush=True)
exit(1)
assert_button_click(
driver, "//button[@data-services-modal-close='']/*[local-name() = 'svg']"
)
assert_button_click(driver, "//button[@data-services-modal-close='']/*[local-name() = 'svg']")
print("Creating a new service ...", flush=True)
@ -686,17 +635,11 @@ with webdriver.Firefox(options=firefox_options) as driver:
if TEST_TYPE == "docker":
assert_button_click(driver, "//button[@data-tab-handler='reverseproxy']")
assert_button_click(
driver, safe_get_element(driver, By.ID, "USE_REVERSE_PROXY")
)
assert_button_click(driver, safe_get_element(driver, By.ID, "USE_REVERSE_PROXY"))
assert_button_click(
driver, "//button[@data-services-multiple-add='reverse-proxy']"
)
assert_button_click(driver, "//button[@data-services-multiple-add='reverse-proxy']")
safe_get_element(driver, By.ID, "REVERSE_PROXY_HOST").send_keys(
"http://app1:8080"
)
safe_get_element(driver, By.ID, "REVERSE_PROXY_HOST").send_keys("http://app1:8080")
safe_get_element(driver, By.ID, "REVERSE_PROXY_URL").send_keys("/")
access_page(
@ -710,8 +653,8 @@ with webdriver.Firefox(options=firefox_options) as driver:
if TEST_TYPE == "linux":
retries = 0
while (
not b"BunkerWeb is ready"
in run(
b"BunkerWeb is ready"
not in run(
["sudo", "tail", "-n", "1", "/var/log/bunkerweb/error.log"],
stdout=PIPE,
check=True,
@ -744,9 +687,7 @@ with webdriver.Firefox(options=firefox_options) as driver:
service = services[0]
if service.find_element(By.TAG_NAME, "h5").text.strip() != "app1.example.com":
print(
'The service "app1.example.com" is not present, exiting ...', flush=True
)
print('The service "app1.example.com" is not present, exiting ...', flush=True)
exit(1)
if service.find_element(By.TAG_NAME, "h6").text.strip() != "ui":
@ -829,8 +770,8 @@ with webdriver.Firefox(options=firefox_options) as driver:
if TEST_TYPE == "linux":
retries = 0
while (
not b"BunkerWeb is ready"
in run(
b"BunkerWeb is ready"
not in run(
["sudo", "tail", "-n", "1", "/var/log/bunkerweb/error.log"],
stdout=PIPE,
check=True,
@ -889,12 +830,8 @@ with webdriver.Firefox(options=firefox_options) as driver:
)
assert_button_click(driver, "//li[@data-configs-add-file='']/button")
safe_get_element(
driver, By.XPATH, "//div[@data-configs-modal-path='']/input"
).send_keys("hello")
safe_get_element(
driver, By.XPATH, "//div[@data-configs-modal-editor='']/textarea"
).send_keys(
safe_get_element(driver, By.XPATH, "//div[@data-configs-modal-path='']/input").send_keys("hello")
safe_get_element(driver, By.XPATH, "//div[@data-configs-modal-editor='']/textarea").send_keys(
"""
location /hello {
default_type 'text/plain';
@ -916,8 +853,8 @@ location /hello {
if TEST_TYPE == "linux":
retries = 0
while (
not b"BunkerWeb is ready"
in run(
b"BunkerWeb is ready"
not in run(
["sudo", "tail", "-n", "1", "/var/log/bunkerweb/error.log"],
stdout=PIPE,
check=True,
@ -940,13 +877,8 @@ location /hello {
driver.switch_to.default_content()
try:
if (
safe_get_element(driver, By.XPATH, "//pre", error=True).text.strip()
!= "hello app1"
):
print(
"The config hasn't been created correctly, exiting ...", flush=True
)
if safe_get_element(driver, By.XPATH, "//pre", error=True).text.strip() != "hello app1":
print("The config hasn't been created correctly, exiting ...", flush=True)
exit(1)
except TimeoutException:
print("The config hasn't been created, exiting ...", flush=True)
@ -981,8 +913,8 @@ location /hello {
if TEST_TYPE == "linux":
retries = 0
while (
not b"BunkerWeb is ready"
in run(
b"BunkerWeb is ready"
not in run(
["sudo", "tail", "-n", "1", "/var/log/bunkerweb/error.log"],
stdout=PIPE,
check=True,
@ -1011,9 +943,7 @@ location /hello {
print("Trying to reload the plugins without adding any ...", flush=True)
reload_button = safe_get_element(
driver, By.XPATH, "//div[@data-plugins-upload='']//button[@type='submit']"
)
reload_button = safe_get_element(driver, By.XPATH, "//div[@data-plugins-upload='']//button[@type='submit']")
if reload_button.get_attribute("disabled") is None:
print("The reload button is not disabled, exiting ...", flush=True)
@ -1021,13 +951,9 @@ location /hello {
print("Trying to filter the plugins ...", flush=True)
safe_get_element(
driver, By.XPATH, "//input[@placeholder='key words']"
).send_keys("Anti")
safe_get_element(driver, By.XPATH, "//input[@placeholder='key words']").send_keys("Anti")
plugins = safe_get_element(
driver, By.XPATH, "//div[@data-plugins-list='']", multiple=True
)
plugins = safe_get_element(driver, By.XPATH, "//div[@data-plugins-list='']", multiple=True)
if len(plugins) != 1:
print("The filter is not working, exiting ...", flush=True)
@ -1035,9 +961,7 @@ location /hello {
print("The filter is working, trying to add a bad plugin ...", flush=True)
safe_get_element(
driver, By.XPATH, "//input[@type='file' and @name='file']"
).send_keys(join(Path.cwd(), "test.zip"))
safe_get_element(driver, By.XPATH, "//input[@type='file' and @name='file']").send_keys(join(Path.cwd(), "test.zip"))
sleep(2)
@ -1054,9 +978,7 @@ location /hello {
flush=True,
)
safe_get_element(
driver, By.XPATH, "//input[@type='file' and @name='file']"
).send_keys(join(Path.cwd(), "discord.zip"))
safe_get_element(driver, By.XPATH, "//input[@type='file' and @name='file']").send_keys(join(Path.cwd(), "discord.zip"))
sleep(2)
@ -1071,8 +993,8 @@ location /hello {
if TEST_TYPE == "linux":
retries = 0
while (
not b"BunkerWeb is ready"
in run(
b"BunkerWeb is ready"
not in run(
["sudo", "tail", "-n", "1", "/var/log/bunkerweb/error.log"],
stdout=PIPE,
check=True,
@ -1115,8 +1037,8 @@ location /hello {
if TEST_TYPE == "linux":
retries = 0
while (
not b"BunkerWeb is ready"
in run(
b"BunkerWeb is ready"
not in run(
["sudo", "tail", "-n", "1", "/var/log/bunkerweb/error.log"],
stdout=PIPE,
check=True,
@ -1146,9 +1068,7 @@ location /hello {
print("The plugin has been deleted, trying cache page ...", flush=True)
access_page(
driver, driver_wait, "/html/body/aside[1]/div[1]/div[2]/ul/li[7]/a", "cache"
)
access_page(driver, driver_wait, "/html/body/aside[1]/div[1]/div[2]/ul/li[7]/a", "cache")
### CACHE PAGE
@ -1173,9 +1093,7 @@ location /hello {
print("The cache file content is correct, trying logs page ...", flush=True)
access_page(
driver, driver_wait, "/html/body/aside[1]/div[1]/div[2]/ul/li[8]/a", "logs"
)
access_page(driver, driver_wait, "/html/body/aside[1]/div[1]/div[2]/ul/li[8]/a", "logs")
### LOGS PAGE
@ -1201,9 +1119,7 @@ location /hello {
sleep(3)
logs_list = safe_get_element(
driver, By.XPATH, "//ul[@data-logs-list='']/li", multiple=True
)
logs_list = safe_get_element(driver, By.XPATH, "//ul[@data-logs-list='']/li", multiple=True)
if len(logs_list) == 0:
print("No logs found, exiting ...", flush=True)
@ -1212,9 +1128,7 @@ location /hello {
print("Logs found, trying auto refresh ...", flush=True)
assert_button_click(driver, safe_get_element(driver, By.ID, "live-update"))
assert_button_click(
driver, "//button[@id='submit-settings' and contains(text(), 'Go Live')]"
)
assert_button_click(driver, "//button[@id='submit-settings' and contains(text(), 'Go Live')]")
sleep(3)
@ -1236,9 +1150,7 @@ location /hello {
sleep(3)
logs_list = safe_get_element(
driver, By.XPATH, "//ul[@data-logs-list='']/li", multiple=True
)
logs_list = safe_get_element(driver, By.XPATH, "//ul[@data-logs-list='']/li", multiple=True)
print("Trying filters ...", flush=True)
@ -1303,17 +1215,13 @@ location /hello {
print("Date filter is working, trying jobs page ...", flush=True)
access_page(
driver, driver_wait, "/html/body/aside[1]/div[1]/div[2]/ul/li[9]/a", "jobs"
)
access_page(driver, driver_wait, "/html/body/aside[1]/div[1]/div[2]/ul/li[9]/a", "jobs")
### JOBS PAGE
print("Trying to filter jobs ...", flush=True)
jobs_list = safe_get_element(
driver, By.XPATH, "//ul[@data-jobs-list='']/li", multiple=True
)
jobs_list = safe_get_element(driver, By.XPATH, "//ul[@data-jobs-list='']/li", multiple=True)
if len(jobs_list) == 0:
print("No jobs found, exiting ...", flush=True)
@ -1432,9 +1340,7 @@ location /hello {
sleep(0.3)
resp = get(
"http://www.example.com/admin/jobs/download?job_name=mmdb-country&file_name=country.mmdb"
)
resp = get("http://www.example.com/admin/jobs/download?job_name=mmdb-country&file_name=country.mmdb")
if resp.status_code != 200:
print("The cache download is not working, exiting ...", flush=True)
@ -1445,11 +1351,7 @@ location /hello {
assert_button_click(driver, "//a[@href='logout']")
try:
title = driver_wait.until(
EC.presence_of_element_located(
(By.XPATH, "/html/body/main/div[1]/div/h1")
)
)
title = driver_wait.until(EC.presence_of_element_located((By.XPATH, "/html/body/main/div[1]/div/h1")))
if title.text != "Log in":
print("Didn't get redirected to login page, exiting ...", flush=True)