Starting work on paths resolution refactor

This commit is contained in:
Théophile Diot 2023-05-25 16:52:02 -04:00
parent b5a78c3aaa
commit 8dad7a0b79
No known key found for this signature in database
GPG Key ID: E752C80DB72BB014
29 changed files with 627 additions and 527 deletions

View File

@ -1,13 +1,21 @@
from os import getenv
from time import sleep
#!/usr/bin/python3
from ConfigCaller import ConfigCaller
from Database import Database
from logger import setup_logger
from os import getenv
from threading import Lock
from time import sleep
from typing import Literal, Optional, Union
from ConfigCaller import ConfigCaller # type: ignore
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
class Config(ConfigCaller):
def __init__(self, ctrl_type, lock=None):
def __init__(
self,
ctrl_type: Union[Literal["docker"], Literal["swarm"], Literal["kubernetes"]],
lock: Optional[Lock] = None,
):
super().__init__()
self.__ctrl_type = ctrl_type
self.__lock = lock
@ -77,6 +85,9 @@ class Config(ConfigCaller):
)
sleep(5)
if self.__lock:
self.__lock.acquire()
# update instances in database
err = self._db.update_instances(self.__instances)
if err:
@ -98,4 +109,7 @@ class Config(ConfigCaller):
f"Can't save autoconf custom configs in database: {err}, custom configs may not work as expected",
)
if self.__lock:
self.__lock.release()
return success

View File

@ -1,14 +1,22 @@
#!/usr/bin/python3
from abc import ABC, abstractmethod
from os import getenv
from threading import Lock
from time import sleep
from typing import Literal, Optional, Union
from Config import Config
from logger import setup_logger
from logger import setup_logger # type: ignore
class Controller(ABC):
def __init__(self, ctrl_type, lock=None):
def __init__(
self,
ctrl_type: Union[Literal["docker"], Literal["swarm"], Literal["kubernetes"]],
lock: Optional[Lock] = None,
):
self._type = ctrl_type
self._instances = []
self._services = []
@ -27,7 +35,7 @@ class Controller(ABC):
self._config = Config(ctrl_type, lock)
self.__logger = setup_logger("Controller", getenv("LOG_LEVEL", "INFO"))
def wait(self, wait_time):
def wait(self, wait_time: int) -> list:
all_ready = False
while not all_ready:
self._instances = self.get_instances()
@ -59,8 +67,7 @@ class Controller(ABC):
def get_instances(self):
instances = []
for controller_instance in self._get_controller_instances():
for instance in self._to_instances(controller_instance):
instances.append(instance)
instances.extend(self._to_instances(controller_instance))
return instances
@abstractmethod
@ -86,10 +93,8 @@ class Controller(ABC):
def get_services(self):
services = []
for controller_service in self._get_controller_services():
for service in self._to_services(controller_service):
services.append(service)
for static_service in self._get_static_services():
services.append(static_service)
services.extend(self._to_services(controller_service))
services.extend(self._get_static_services())
return services
@abstractmethod
@ -106,8 +111,8 @@ class Controller(ABC):
def _is_service_present(self, server_name):
for service in self._services:
if not "SERVER_NAME" in service or service["SERVER_NAME"] == "":
if not "SERVER_NAME" in service or not service["SERVER_NAME"]:
continue
if server_name == service["SERVER_NAME"].split(" ")[0]:
if server_name == service["SERVER_NAME"].strip().split(" ")[0]:
return True
return False

View File

@ -1,11 +1,15 @@
#!/usr/bin/python3
from os import getenv
from typing import Any, Dict, List
from docker import DockerClient
from re import compile as re_compile
from traceback import format_exc
from docker.models.containers import Container
from Controller import Controller
from ConfigCaller import ConfigCaller
from logger import setup_logger
from ConfigCaller import ConfigCaller # type: ignore
from logger import setup_logger # type: ignore
class DockerController(Controller, ConfigCaller):
@ -18,13 +22,13 @@ class DockerController(Controller, ConfigCaller):
r"^bunkerweb.CUSTOM_CONF_(SERVER_HTTP|MODSEC_CRS|MODSEC)_(.+)$"
)
def _get_controller_instances(self):
def _get_controller_instances(self) -> List[Container]:
return self.__client.containers.list(filters={"label": "bunkerweb.INSTANCE"})
def _get_controller_services(self):
def _get_controller_services(self) -> List[Container]:
return self.__client.containers.list(filters={"label": "bunkerweb.SERVER_NAME"})
def _to_instances(self, controller_instance):
def _to_instances(self, controller_instance) -> List[dict]:
instance = {}
instance["name"] = controller_instance.name
instance["hostname"] = controller_instance.name
@ -40,18 +44,18 @@ class DockerController(Controller, ConfigCaller):
instance["env"][variable] = value
return [instance]
def _to_services(self, controller_service):
def _to_services(self, controller_service) -> List[dict]:
service = {}
for variable, value in controller_service.labels.items():
if not variable.startswith("bunkerweb."):
continue
real_variable = variable.replace("bunkerweb.", "", 1)
if not self._is_multisite_setting(real_variable):
if not self._is_setting_context(real_variable, "multisite"):
continue
service[real_variable] = value
return [service]
def _get_static_services(self):
def _get_static_services(self) -> List[dict]:
services = []
variables = {}
for instance in self.__client.containers.list(
@ -71,14 +75,14 @@ class DockerController(Controller, ConfigCaller):
for variable, value in variables.items():
prefix = variable.split("_")[0]
real_variable = variable.replace(f"{prefix}_", "", 1)
if prefix == server_name and self._is_multisite_setting(
real_variable
if prefix == server_name and self._is_setting_context(
real_variable, "multisite"
):
service[real_variable] = value
services.append(service)
return services
def get_configs(self):
def get_configs(self) -> Dict[str, Dict[str, Any]]:
configs = {config_type: {} for config_type in self._supported_config_types}
# get site configs from labels
for container in self.__client.containers.list(
@ -106,7 +110,7 @@ class DockerController(Controller, ConfigCaller):
] = value
return configs
def apply_config(self):
def apply_config(self) -> bool:
return self._config.apply(
self._instances, self._services, configs=self._configs
)

View File

@ -60,7 +60,7 @@ RUN apk add --no-cache bash && \
chmod 750 /usr/share/bunkerweb/cli/main.py /usr/share/bunkerweb/helpers/*.sh /usr/bin/bwcli /usr/share/bunkerweb/autoconf/main.py /usr/share/bunkerweb/deps/python/bin/*
# Fix CVEs
# RUN apk add "libcrypto3>=3.0.8-r4" "libssl3>=3.0.8-r4"
# There are no CVEs for python:3.11.3-alpine at the moment
VOLUME /data /etc/nginx

View File

@ -1,13 +1,16 @@
#!/usr/bin/python3
from os import getenv
from time import sleep
from traceback import format_exc
from typing import List
from kubernetes import client, config, watch
from kubernetes.client.exceptions import ApiException
from threading import Thread, Lock
from Controller import Controller
from ConfigCaller import ConfigCaller
from logger import setup_logger
from ConfigCaller import ConfigCaller # type: ignore
from logger import setup_logger # type: ignore
class IngressController(Controller, ConfigCaller):
@ -20,7 +23,7 @@ class IngressController(Controller, ConfigCaller):
self.__internal_lock = Lock()
self.__logger = setup_logger("Ingress-controller", getenv("LOG_LEVEL", "INFO"))
def _get_controller_instances(self):
def _get_controller_instances(self) -> list:
return [
pod
for pod in self.__corev1.list_pod_for_all_namespaces(watch=False).items
@ -30,7 +33,7 @@ class IngressController(Controller, ConfigCaller):
)
]
def _to_instances(self, controller_instance):
def _to_instances(self, controller_instance) -> List[dict]:
instance = {}
instance["name"] = controller_instance.metadata.name
instance["hostname"] = controller_instance.status.pod_ip
@ -48,7 +51,9 @@ class IngressController(Controller, ConfigCaller):
pod = container
break
if not pod:
self.__logger.warning(f"Missing container bunkerweb in pod {controller_instance.metadata.name}")
self.__logger.warning(
f"Missing container bunkerweb in pod {controller_instance.metadata.name}"
)
else:
for env in pod.env:
instance["env"][env.name] = env.value or ""
@ -65,10 +70,10 @@ class IngressController(Controller, ConfigCaller):
instance["env"][variable] = value
return [instance]
def _get_controller_services(self):
def _get_controller_services(self) -> list:
return self.__networkingv1.list_ingress_for_all_namespaces(watch=False).items
def _to_services(self, controller_service):
def _to_services(self, controller_service) -> List[dict]:
if not controller_service.spec or not controller_service.spec.rules:
return []
@ -145,15 +150,15 @@ class IngressController(Controller, ConfigCaller):
continue
variable = annotation.replace("bunkerweb.io/", "", 1)
server_name = service["SERVER_NAME"].split(" ")[0]
server_name = service["SERVER_NAME"].strip().split(" ")[0]
if not variable.startswith(f"{server_name}_"):
continue
variable = variable.replace(f"{server_name}_", "", 1)
if self._is_multisite_setting(variable):
if self._is_setting_context(variable, "multisite"):
service[variable] = value
return services
def _get_static_services(self):
def _get_static_services(self) -> List[dict]:
services = []
variables = {}
for instance in self.__corev1.list_pod_for_all_namespaces(watch=False).items:
@ -168,12 +173,10 @@ class IngressController(Controller, ConfigCaller):
if container.name == "bunkerweb":
pod = container
break
if not pod :
if not pod:
continue
variables = {
env.name: env.value or "" for env in pod.env
}
variables = {env.name: env.value or "" for env in pod.env}
if "SERVER_NAME" in variables and variables["SERVER_NAME"].strip():
for server_name in variables["SERVER_NAME"].strip().split(" "):
@ -181,14 +184,14 @@ class IngressController(Controller, ConfigCaller):
for variable, value in variables.items():
prefix = variable.split("_")[0]
real_variable = variable.replace(f"{prefix}_", "", 1)
if prefix == server_name and self._is_multisite_setting(
real_variable
if prefix == server_name and self._is_setting_context(
real_variable, "multisite"
):
service[real_variable] = value
services.append(service)
return services
def get_configs(self):
def get_configs(self) -> dict:
configs = {config_type: {} for config_type in self._supported_config_types}
for configmap in self.__corev1.list_config_map_for_all_namespaces(
watch=False
@ -302,7 +305,7 @@ class IngressController(Controller, ConfigCaller):
self.__logger.warning("Got exception, retrying in 10 seconds ...")
sleep(10)
def apply_config(self):
def apply_config(self) -> bool:
return self._config.apply(
self._instances, self._services, configs=self._configs
)

View File

@ -1,13 +1,17 @@
#!/usr/bin/python3
from os import getenv
from time import sleep
from traceback import format_exc
from threading import Thread, Lock
from typing import Any, Dict, List
from docker import DockerClient
from base64 import b64decode
from docker.models.services import Service
from Controller import Controller
from ConfigCaller import ConfigCaller
from logger import setup_logger
from ConfigCaller import ConfigCaller # type: ignore
from logger import setup_logger # type: ignore
class SwarmController(Controller, ConfigCaller):
@ -18,13 +22,13 @@ class SwarmController(Controller, ConfigCaller):
self.__internal_lock = Lock()
self.__logger = setup_logger("Swarm-controller", getenv("LOG_LEVEL", "INFO"))
def _get_controller_instances(self):
def _get_controller_instances(self) -> List[Service]:
return self.__client.services.list(filters={"label": "bunkerweb.INSTANCE"})
def _get_controller_services(self):
def _get_controller_services(self) -> List[Service]:
return self.__client.services.list(filters={"label": "bunkerweb.SERVER_NAME"})
def _to_instances(self, controller_instance):
def _to_instances(self, controller_instance) -> List[dict]:
instances = []
instance_env = {}
for env in controller_instance.attrs["Spec"]["TaskTemplate"]["ContainerSpec"][
@ -48,18 +52,18 @@ class SwarmController(Controller, ConfigCaller):
)
return instances
def _to_services(self, controller_service):
def _to_services(self, controller_service) -> List[dict]:
service = {}
for variable, value in controller_service.attrs["Spec"]["Labels"].items():
if not variable.startswith("bunkerweb."):
continue
real_variable = variable.replace("bunkerweb.", "", 1)
if not self._is_multisite_setting(real_variable):
if not self._is_setting_context(real_variable, "multisite"):
continue
service[real_variable] = value
return [service]
def _get_static_services(self):
def _get_static_services(self) -> List[dict]:
services = []
variables = {}
for instance in self.__client.services.list(
@ -81,14 +85,14 @@ class SwarmController(Controller, ConfigCaller):
for variable, value in variables.items():
prefix = variable.split("_")[0]
real_variable = variable.replace(f"{prefix}_", "", 1)
if prefix == server_name and self._is_multisite_setting(
real_variable
if prefix == server_name and self._is_setting_context(
real_variable, "multisite"
):
service[real_variable] = value
services.append(service)
return services
def get_configs(self):
def get_configs(self) -> Dict[str, Dict[str, Any]]:
configs = {}
for config_type in self._supported_config_types:
configs[config_type] = {}
@ -127,7 +131,7 @@ class SwarmController(Controller, ConfigCaller):
)
return configs
def apply_config(self):
def apply_config(self) -> bool:
return self._config.apply(
self._instances, self._services, configs=self._configs
)

View File

@ -1,21 +1,20 @@
#!/usr/bin/python3
from os import _exit, getenv
from os import _exit, getenv, sep
from os.path import join
from signal import SIGINT, SIGTERM, signal
from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
from pathlib import Path
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/api",
"/usr/share/bunkerweb/db",
)
)
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",), ("api",), ("db",))
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from logger import setup_logger
from logger import setup_logger # type: ignore
from SwarmController import SwarmController
from IngressController import IngressController
from DockerController import DockerController
@ -70,12 +69,11 @@ try:
_exit(1)
# Process events
Path("/var/tmp/bunkerweb/autoconf.healthy").write_text("ok")
Path(sep, "var", "tmp", "bunkerweb", "autoconf.healthy").write_text("ok")
logger.info("Processing events ...")
controller.process_events()
except:
logger.error(f"Exception while running autoconf :\n{format_exc()}")
sys_exit(1)
finally:
Path("/var/tmp/bunkerweb/autoconf.healthy").unlink(missing_ok=True)
Path(sep, "var", "tmp", "bunkerweb", "autoconf.healthy").unlink(missing_ok=True)

View File

@ -1,52 +1,48 @@
#!/usr/bin/python3
from typing import Literal, Optional, Union
from requests import request
class API:
def __init__(self, endpoint, host="bwapi"):
def __init__(self, endpoint: str, host: str = "bwapi"):
self.__endpoint = endpoint
self.__host = host
def get_endpoint(self):
def get_endpoint(self) -> str:
return self.__endpoint
def get_host(self):
def get_host(self) -> str:
return self.__host
def request(self, method, url, data=None, files=None, timeout=(10, 30)):
def request(
self,
method: Union[Literal["POST"], Literal["GET"]],
url: str,
data: Optional[Union[dict, bytes]] = None,
files=None,
timeout=(10, 30),
) -> tuple[bool, str, Optional[int], Optional[dict]]:
try:
headers = {}
headers["User-Agent"] = "bwapi"
headers["Host"] = self.__host
kwargs = {}
if isinstance(data, dict):
resp = request(
method,
f"{self.__endpoint}{url}",
json=data,
timeout=timeout,
headers=headers,
)
kwargs["json"] = data
elif isinstance(data, bytes):
resp = request(
method,
f"{self.__endpoint}{url}",
data=data,
timeout=timeout,
headers=headers,
)
elif files:
resp = request(
method,
f"{self.__endpoint}{url}",
files=files,
timeout=timeout,
headers=headers,
)
elif not data:
resp = request(
method, f"{self.__endpoint}{url}", timeout=timeout, headers=headers
)
else:
return False, "unsupported data type", None, None
kwargs["data"] = data
elif data is not None:
return False, f"Unsupported data type: {type(data)}", None, None
if files:
kwargs["files"] = files
resp = request(
method,
f"{self.__endpoint}{url}",
timeout=timeout,
headers={"User-Agent": "bwapi", "Host": self.__host},
**kwargs,
)
except Exception as e:
return False, str(e), None, None
return False, f"Request failed: {e}", None, None
return True, "ok", resp.status_code, resp.json()

View File

@ -1,17 +1,20 @@
from os import getenv
#!/usr/bin/python3
from dotenv import dotenv_values
from os import getenv, sep
from os.path import join
from pathlib import Path
from redis import StrictRedis
from sys import path as sys_path
from typing import Tuple
if "/usr/share/bunkerweb/utils" not in sys_path:
sys_path.append("/usr/share/bunkerweb/utils")
if join(sep, "usr", "share", "bunkerweb", "utils") not in sys_path:
sys_path.append(join(sep, "usr", "share", "bunkerweb", "utils"))
from API import API
from ApiCaller import ApiCaller
from logger import setup_logger
from API import API # type: ignore
from ApiCaller import ApiCaller # type: ignore
from logger import setup_logger # type: ignore
def format_remaining_time(seconds):
@ -37,14 +40,15 @@ def format_remaining_time(seconds):
class CLI(ApiCaller):
def __init__(self):
self.__logger = setup_logger("CLI", getenv("LOG_LEVEL", "INFO"))
db_path = Path(sep, "usr", "share", "bunkerweb", "db")
if not Path("/usr/share/bunkerweb/db").is_dir():
self.__variables = dotenv_values("/etc/nginx/variables.env")
if not db_path.is_dir():
self.__variables = dotenv_values(join(sep, "etc", "nginx", "variables.env"))
else:
if "/usr/share/bunkerweb/db" not in sys_path:
sys_path.append("/usr/share/bunkerweb/db")
if str(db_path) not in sys_path:
sys_path.append(str(db_path))
from Database import Database
from Database import Database # type: ignore
db = Database(
self.__logger,
@ -110,7 +114,7 @@ class CLI(ApiCaller):
)
self.__use_redis = False
if not Path("/usr/share/bunkerweb/db").is_dir() or self.__integration not in (
if not db_path.is_dir() or self.__integration not in (
"kubernetes",
"swarm",
"autoconf",
@ -129,18 +133,17 @@ class CLI(ApiCaller):
self.auto_setup(self.__integration)
def __detect_integration(self) -> str:
integration_path = Path(sep, "usr", "share", "bunkerweb", "INTEGRATION")
os_release_path = Path(sep, "etc", "os-release")
if self.__variables.get("KUBERNETES_MODE", "no").lower() == "yes":
return "kubernetes"
elif self.__variables.get("SWARM_MODE", "no").lower() == "yes":
return "swarm"
elif self.__variables.get("AUTOCONF_MODE", "no").lower() == "yes":
return "autoconf"
elif Path("/usr/share/bunkerweb/INTEGRATION").is_file():
return Path("/usr/share/bunkerweb/INTEGRATION").read_text().strip().lower()
elif (
Path("/etc/os-release").is_file()
and "Alpine" in Path("/etc/os-release").read_text()
):
elif integration_path.is_file():
return integration_path.read_text().strip().lower()
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text():
return "docker"
return "linux"

View File

@ -1,20 +1,19 @@
#!/usr/bin/env python3
#!/usr/bin/python3
from argparse import ArgumentParser
from os import _exit, getenv
from sys import exit as sys_exit, path
from os import _exit, getenv, sep
from os.path import join
from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/cli",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/api",
)
)
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",), ("api",), ("db",))
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from logger import setup_logger
from logger import setup_logger # type: ignore
from CLI import CLI
if __name__ == "__main__":

View File

@ -58,7 +58,7 @@ try:
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db:
bunkernet_id = get_file_in_db("bunkernet-register", "instance.id", db)
bunkernet_id = get_file_in_db("instance.id", db)
if bunkernet_id:
Path("/var/cache/bunkerweb/bunkernet/bunkernet.id").write_text(
bunkernet_id.decode()

View File

@ -59,7 +59,7 @@ try:
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db:
bunkernet_id = get_file_in_db("bunkernet-register", "instance.id", db)
bunkernet_id = get_file_in_db("instance.id", db)
if bunkernet_id:
Path("/var/cache/bunkerweb/bunkernet/instance.id").write_text(
bunkernet_id.decode()
@ -123,7 +123,7 @@ try:
# Update cache with new bunkernet ID
if db and registered:
with open("/var/cache/bunkerweb/bunkernet/instance.id", "rb") as f:
cached, err = set_file_in_db(f"bunkernet-register", f"instance.id", f, db)
cached, err = set_file_in_db(f"instance.id", f, db)
if not cached:
logger.error(f"Error while saving BunkerNet data to db cache : {err}")
else:
@ -154,7 +154,7 @@ try:
)
Path("/var/cache/bunkerweb/bunkernet/instance.id").unlink()
if db:
del_file_in_db("bunkernet-register", "instance.id", db)
del_file_in_db("instance.id", db)
_exit(2)
try:

View File

@ -2,8 +2,8 @@
from hashlib import sha256
from io import BytesIO
from os import getenv, listdir, chmod, _exit
from os.path import basename, dirname
from os import getenv, listdir, chmod, _exit, sep
from os.path import basename, dirname, join
from pathlib import Path
from stat import S_IEXEC
from sys import exit as sys_exit, path as sys_path
@ -37,18 +37,17 @@ status = 0
def install_plugin(plugin_dir) -> bool:
# Load plugin.json
with open(f"{plugin_dir}/plugin.json", "rb") as f:
metadata = loads(f.read())
metadata = loads(Path(plugin_dir, "plugin.json").read_text())
# Don't go further if plugin is already installed
if Path(f"/etc/bunkerweb/plugins/{metadata['id']}/plugin.json").is_file():
if Path("etc", "bunkerweb", "plugins", metadata["id"], "plugin.json").is_file():
logger.warning(
f"Skipping installation of plugin {metadata['id']} (already installed)",
)
return False
# Copy the plugin
copytree(plugin_dir, f"/etc/bunkerweb/plugins/{metadata['id']}")
copytree(plugin_dir, join(sep, "etc", "bunkerweb", "plugins", metadata["id"]))
# Add u+x permissions to jobs files
for job_file in glob(f"{plugin_dir}/jobs/*"):
for job_file in glob(join(plugin_dir, "jobs", "*")):
st = Path(job_file).stat()
chmod(job_file, st.st_mode | S_IEXEC)
logger.info(f"Plugin {metadata['id']} installed")
@ -57,7 +56,7 @@ def install_plugin(plugin_dir) -> bool:
try:
# Check if we have plugins to download
plugin_urls = getenv("EXTERNAL_PLUGIN_URLS", "")
plugin_urls = getenv("EXTERNAL_PLUGIN_URLS")
if not plugin_urls:
logger.info("No external plugins to download")
_exit(0)
@ -84,7 +83,7 @@ try:
continue
# Extract it to tmp folder
temp_dir = f"/var/tmp/bunkerweb/plugins-{uuid4()}"
temp_dir = join(sep, "var", "tmp", "bunkerweb", "plugins", str(uuid4()))
try:
Path(temp_dir).mkdir(parents=True, exist_ok=True)
with ZipFile(BytesIO(req.content)) as zf:
@ -98,7 +97,7 @@ try:
# Install plugins
try:
for plugin_dir in glob(f"{temp_dir}/**/plugin.json", recursive=True):
for plugin_dir in glob(join(temp_dir, "**", "plugin.json"), recursive=True):
try:
if install_plugin(dirname(plugin_dir)):
plugin_nbr += 1
@ -118,17 +117,18 @@ try:
external_plugins = []
external_plugins_ids = []
for plugin in listdir("/etc/bunkerweb/plugins"):
path = f"/etc/bunkerweb/plugins/{plugin}"
if not Path(f"{path}/plugin.json").is_file():
plugins_dir = join(sep, "etc", "bunkerweb", "plugins")
for plugin in listdir(plugins_dir):
path = join(plugins_dir, plugin)
if not Path(path, "plugin.json").is_file():
logger.warning(f"Plugin {plugin} is not valid, deleting it...")
rmtree(path, ignore_errors=True)
continue
plugin_file = loads(Path(f"{path}/plugin.json").read_text())
plugin_file = loads(Path(path, "plugin.json").read_text())
plugin_content = BytesIO()
with tar_open(fileobj=plugin_content, mode="w:gz") as tar:
with tar_open(fileobj=plugin_content, mode="w:gz", compresslevel=9) as tar:
tar.add(path, arcname=basename(path))
plugin_content.seek(0)
value = plugin_content.getvalue()
@ -168,7 +168,7 @@ except:
status = 2
logger.error(f"Exception while running download-plugins.py :\n{format_exc()}")
for plugin_tmp in glob("/var/tmp/bunkerweb/plugins-*/"):
for plugin_tmp in glob(join(sep, "var", "tmp", "bunkerweb", "plugins-*")):
rmtree(plugin_tmp, ignore_errors=True)
sys_exit(status)

View File

@ -73,7 +73,7 @@ try:
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db:
tgz = get_file_in_db("certbot-new", "folder.tgz", db)
tgz = get_file_in_db("folder.tgz", db)
if tgz:
# Delete folder if needed
if len(listdir("/var/cache/bunkerweb/letsencrypt")) > 0:
@ -164,7 +164,7 @@ try:
tgz.add("/var/cache/bunkerweb/letsencrypt", arcname=".")
bio.seek(0)
# Put tgz in cache
cached, err = set_file_in_db(f"certbot-new", f"folder.tgz", bio, db)
cached, err = set_file_in_db(f"folder.tgz", bio, db)
if not cached:
logger.error(f"Error while saving Let's Encrypt data to db cache : {err}")
else:

View File

@ -56,7 +56,7 @@ try:
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db:
tgz = get_file_in_db("certbot-new", "folder.tgz", db)
tgz = get_file_in_db("folder.tgz", db)
if tgz:
# Delete folder if needed
if len(listdir("/var/cache/bunkerweb/letsencrypt")) > 0:
@ -113,7 +113,7 @@ try:
tgz.add("/var/cache/bunkerweb/letsencrypt", arcname=".")
bio.seek(0)
# Put tgz in cache
cached, err = set_file_in_db("certbot-new", "folder.tgz", bio, db)
cached, err = set_file_in_db("folder.tgz", bio, db)
if not cached:
logger.error(f"Error while saving Let's Encrypt data to db cache : {err}")
else:

View File

@ -1,26 +1,16 @@
#!/usr/bin/python3
from contextlib import contextmanager, suppress
from copy import deepcopy
from datetime import datetime
from hashlib import sha256
from logging import (
Logger,
)
from os import _exit, getenv, listdir
from os.path import dirname
from logging import Logger
from os import _exit, getenv, listdir, sep
from os.path import dirname, join
from pathlib import Path
from pymysql import install_as_MySQLdb
from re import compile as re_compile
from sys import path as sys_path
from typing import Any, Dict, List, Optional, Tuple
from sqlalchemy import create_engine, text, inspect
from sqlalchemy.exc import (
ArgumentError,
DatabaseError,
OperationalError,
ProgrammingError,
SQLAlchemyError,
)
from sqlalchemy.orm import scoped_session, sessionmaker
from time import sleep
from traceback import format_exc
@ -40,10 +30,25 @@ from model import (
Metadata,
)
if "/usr/share/bunkerweb/utils" not in sys_path:
sys_path.append("/usr/share/bunkerweb/utils")
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",))
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from jobs import file_hash
from jobs import file_hash # type: ignore
from pymysql import install_as_MySQLdb
from sqlalchemy import create_engine, text, inspect
from sqlalchemy.exc import (
ArgumentError,
DatabaseError,
OperationalError,
ProgrammingError,
SQLAlchemyError,
)
from sqlalchemy.orm import scoped_session, sessionmaker
install_as_MySQLdb()
@ -323,20 +328,25 @@ class Database:
to_put.append(Jobs(plugin_id=plugin["id"], **job))
if page:
core_ui_path = Path(
sep, "usr", "share", "bunkerweb", "core", plugin["id"], "ui"
)
path_ui = (
Path(f"/usr/share/bunkerweb/core/{plugin['id']}/ui")
if Path(
f"/usr/share/bunkerweb/core/{plugin['id']}/ui"
).exists()
else Path(f"/etc/bunkerweb/plugins/{plugin['id']}/ui")
core_ui_path
if core_ui_path.exists()
else Path(
sep, "etc", "bunkerweb", "plugins", plugin["id"], "ui"
)
)
if path_ui.exists():
if {"template.html", "actions.py"}.issubset(
listdir(str(path_ui))
):
template = Path(f"{path_ui}/template.html").read_bytes()
actions = Path(f"{path_ui}/actions.py").read_bytes()
template = path_ui.joinpath(
"template.html"
).read_bytes()
actions = path_ui.joinpath("actions.py").read_bytes()
to_put.append(
Plugin_pages(
@ -1195,10 +1205,15 @@ class Database:
Jobs.name == job["name"]
).update(updates)
tmp_ui_path = Path(
sep, "var", "tmp", "bunkerweb", "ui", plugin["id"], "ui"
)
path_ui = (
Path(f"/var/tmp/bunkerweb/ui/{plugin['id']}/ui")
if Path(f"/var/tmp/bunkerweb/ui/{plugin['id']}/ui").exists()
else Path(f"/etc/bunkerweb/plugins/{plugin['id']}/ui")
tmp_ui_path
if tmp_ui_path.exists()
else Path(
sep, "etc", "bunkerweb", "plugins", plugin["id"], "ui"
)
)
if path_ui.exists():
@ -1216,8 +1231,10 @@ class Database:
)
if not db_plugin_page:
template = Path(f"{path_ui}/template.html").read_bytes()
actions = Path(f"{path_ui}/actions.py").read_bytes()
template = path_ui.joinpath(
"template.html"
).read_bytes()
actions = path_ui.joinpath("actions.py").read_bytes()
to_put.append(
Plugin_pages(
@ -1230,10 +1247,10 @@ class Database:
)
else:
updates = {}
template_checksum = file_hash(
f"{path_ui}/template.html"
)
actions_checksum = file_hash(f"{path_ui}/actions.py")
template_path = path_ui.joinpath("template.html")
actions_path = path_ui.joinpath("actions.py")
template_checksum = file_hash(str(template_path))
actions_checksum = file_hash(str(actions_path))
if (
template_checksum
@ -1241,9 +1258,7 @@ class Database:
):
updates.update(
{
Plugin_pages.template_file: Path(
f"{path_ui}/template.html"
).read_bytes(),
Plugin_pages.template_file: template_path.read_bytes(),
Plugin_pages.template_checksum: template_checksum,
}
)
@ -1251,9 +1266,7 @@ class Database:
if actions_checksum != db_plugin_page.actions_checksum:
updates.update(
{
Plugin_pages.actions_file: Path(
f"{path_ui}/actions.py"
).read_bytes(),
Plugin_pages.actions_file: actions_path.read_bytes(),
Plugin_pages.actions_checksum: actions_checksum,
}
)
@ -1324,10 +1337,15 @@ class Database:
to_put.append(Jobs(plugin_id=plugin["id"], **job))
if page:
tmp_ui_path = Path(
sep, "var", "tmp", "bunkerweb", "ui", plugin["id"], "ui"
)
path_ui = (
Path(f"/var/tmp/bunkerweb/ui/{plugin['id']}/ui")
if Path(f"/var/tmp/bunkerweb/ui/{plugin['id']}/ui").exists()
else Path(f"/etc/bunkerweb/plugins/{plugin['id']}/ui")
tmp_ui_path
if tmp_ui_path.exists()
else Path(
sep, "etc", "bunkerweb", "plugins", plugin["id"], "ui"
)
)
if path_ui.exists():
@ -1345,8 +1363,10 @@ class Database:
)
if not db_plugin_page:
template = Path(f"{path_ui}/template.html").read_bytes()
actions = Path(f"{path_ui}/actions.py").read_bytes()
template = path_ui.joinpath(
"template.html"
).read_bytes()
actions = path_ui.joinpath("actions.py").read_bytes()
to_put.append(
Plugin_pages(
@ -1359,10 +1379,10 @@ class Database:
)
else:
updates = {}
template_checksum = file_hash(
f"{path_ui}/template.html"
)
actions_checksum = file_hash(f"{path_ui}/actions.py")
template_path = path_ui.joinpath("template.html")
actions_path = path_ui.joinpath("actions.py")
template_checksum = file_hash(str(template_path))
actions_checksum = file_hash(str(actions_path))
if (
template_checksum
@ -1370,9 +1390,7 @@ class Database:
):
updates.update(
{
Plugin_pages.template_file: Path(
f"{path_ui}/template.html"
).read_bytes(),
Plugin_pages.template_file: template_path.read_bytes(),
Plugin_pages.template_checksum: template_checksum,
}
)
@ -1380,9 +1398,7 @@ class Database:
if actions_checksum != db_plugin_page.actions_checksum:
updates.update(
{
Plugin_pages.actions_file: Path(
f"{path_ui}/actions.py"
).read_bytes(),
Plugin_pages.actions_file: actions_path.read_bytes(),
Plugin_pages.actions_checksum: actions_checksum,
}
)
@ -1600,7 +1616,7 @@ class Database:
)
if db_instance is not None:
return "An instance with the same hostname already exists."
return f"Instance {hostname} already exists, will not be added."
session.add(
Instances(hostname=hostname, port=port, server_name=server_name)

View File

@ -1,3 +1,5 @@
#!/usr/bin/python3
from sqlalchemy import (
Boolean,
Column,

View File

@ -1,10 +1,12 @@
#!/usr/bin/python3
from glob import glob
from hashlib import sha256
from io import BytesIO
from json import loads
from logging import Logger
from os import listdir
from os.path import basename, dirname
from os import listdir, sep
from os.path import basename, dirname, join
from pathlib import Path
from re import compile as re_compile, search as re_search
from sys import path as sys_path
@ -12,8 +14,8 @@ from tarfile import open as tar_open
from traceback import format_exc
from typing import Any, Dict, List, Literal, Optional, Tuple, Union
if "/usr/share/bunkerweb/utils" not in sys_path:
sys_path.append("/usr/share/bunkerweb/utils")
if join(sep, "usr", "share", "bunkerweb", "utils") not in sys_path:
sys_path.append(join(sep, "usr", "share", "bunkerweb", "utils"))
class Configurator:
@ -103,7 +105,7 @@ class Configurator:
def __load_plugins(self, path: str, _type: str = "core") -> List[Dict[str, Any]]:
plugins = []
files = glob(f"{path}/*/plugin.json")
files = glob(join(path, "*", "plugin.json"))
for file in files:
try:
data = self.__load_settings(file)
@ -128,7 +130,9 @@ class Configurator:
data.update(
{
"external": path.startswith("/etc/bunkerweb/plugins"),
"external": path.startswith(
join(sep, "etc", "bunkerweb", "plugins")
),
"page": "ui" in listdir(dirname(file)),
"method": "manual",
"data": value,

View File

@ -1,11 +1,17 @@
#!/usr/bin/python3
from glob import glob
from importlib import import_module
from os.path import basename, dirname
from os.path import basename, join
from pathlib import Path
from random import choice
from string import ascii_letters, digits
from sys import path as sys_path
from typing import Any, Dict, List, Optional
if join("usr", "share", "bunkerweb", "deps", "python") in sys_path:
sys_path.append(join("usr", "share", "bunkerweb", "deps", "python"))
from jinja2 import Environment, FileSystemLoader
@ -23,11 +29,7 @@ class Templator:
self.__core = core
self.__plugins = plugins
self.__output = output
if not self.__output.endswith("/"):
self.__output += "/"
self.__target = target
if not self.__target.endswith("/"):
self.__target += "/"
self.__config = config
self.__jinja_env = self.__load_jinja_env()
@ -41,9 +43,9 @@ class Templator:
def __load_jinja_env(self) -> Environment:
searchpath = [self.__templates]
for subpath in glob(f"{self.__core}/*") + glob(f"{self.__plugins}/*"):
for subpath in glob(join(self.__core, "*")) + glob(join(self.__plugins, "*")):
if Path(subpath).is_dir():
searchpath.append(f"{subpath}/confs")
searchpath.append(join(subpath, "confs"))
return Environment(
loader=FileSystemLoader(searchpath=searchpath),
lstrip_blocks=True,
@ -57,18 +59,17 @@ class Templator:
templates.append(template)
continue
for context in contexts:
if template.startswith(f"{context}/"):
if template.startswith(context):
templates.append(template)
return templates
def __write_config(
self, subpath: Optional[str] = None, config: Optional[Dict[str, Any]] = None
):
real_path = self.__output + (f"{subpath}/" if subpath else "") + "variables.env"
real_config = config or self.__config
Path(dirname(real_path)).mkdir(parents=True, exist_ok=True)
Path(real_path).write_text(
"\n".join(f"{k}={v}" for k, v in real_config.items())
real_path = Path(self.__output, subpath or "", "variables.env")
real_path.parent.mkdir(parents=True, exist_ok=True)
real_path.write_text(
"\n".join(f"{k}={v}" for k, v in (config or self.__config).items())
)
def __render_global(self):
@ -100,12 +101,12 @@ class Templator:
for variable, value in self.__config.items():
if variable.startswith(f"{server}_"):
config[variable.replace(f"{server}_", "", 1)] = value
config["NGINX_PREFIX"] = f"{self.__target}{server}/"
config["NGINX_PREFIX"] = join(self.__target, server) + "/"
server_key = f"{server}_SERVER_NAME"
if server_key not in self.__config:
config["SERVER_NAME"] = server
root_confs = [
for root_conf in (
"server.conf",
"access-lua.conf",
"init-lua.conf",
@ -114,9 +115,8 @@ class Templator:
"log-stream-lua.conf",
"preread-stream-lua.conf",
"server-stream.conf",
]
for root_conf in root_confs:
if template.endswith(f"/{root_conf}"):
):
if template.endswith(root_conf):
name = basename(template)
break
self.__render_template(template, subpath=subpath, config=config, name=name)
@ -136,16 +136,14 @@ class Templator:
real_config["has_variable"] = Templator.has_variable
real_config["random"] = Templator.random
real_config["read_lines"] = Templator.read_lines
real_path = (
self.__output + (f"/{subpath}/" if subpath else "") + (name or template)
)
real_path = Path(self.__output, subpath or "", name or template)
jinja_template = self.__jinja_env.get_template(template)
Path(dirname(real_path)).mkdir(parents=True, exist_ok=True)
Path(real_path).write_text(jinja_template.render(real_config))
real_path.parent.mkdir(parents=True, exist_ok=True)
real_path.write_text(jinja_template.render(real_config))
@staticmethod
def is_custom_conf(path: str) -> bool:
return bool(glob(f"{path}/*.conf"))
return bool(glob(join(path, "*.conf")))
@staticmethod
def has_variable(all_vars: Dict[str, Any], variable: str, value: Any) -> bool:

View File

@ -2,7 +2,8 @@
from argparse import ArgumentParser
from glob import glob
from os import R_OK, W_OK, X_OK, access, getenv
from os import R_OK, W_OK, X_OK, access, getenv, sep
from os.path import join, normpath
from pathlib import Path
from shutil import rmtree
from subprocess import DEVNULL, STDOUT, run
@ -11,14 +12,14 @@ from time import sleep
from traceback import format_exc
from typing import Any, Dict
if "/usr/share/bunkerweb/deps/python" not in sys_path:
sys_path.append("/usr/share/bunkerweb/deps/python")
if "/usr/share/bunkerweb/utils" not in sys_path:
sys_path.append("/usr/share/bunkerweb/utils")
if "/usr/share/bunkerweb/api" not in sys_path:
sys_path.append("/usr/share/bunkerweb/api")
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",), ("api",))
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from logger import setup_logger
from logger import setup_logger # type: ignore
from Configurator import Configurator
from Templator import Templator
@ -32,37 +33,37 @@ if __name__ == "__main__":
parser = ArgumentParser(description="BunkerWeb config generator")
parser.add_argument(
"--settings",
default="/usr/share/bunkerweb/settings.json",
default=join(sep, "usr", "share", "bunkerweb", "settings.json"),
type=str,
help="file containing the main settings",
)
parser.add_argument(
"--templates",
default="/usr/share/bunkerweb/confs",
default=join(sep, "usr", "share", "bunkerweb", "confs"),
type=str,
help="directory containing the main template files",
)
parser.add_argument(
"--core",
default="/usr/share/bunkerweb/core",
default=join(sep, "usr", "share", "bunkerweb", "core"),
type=str,
help="directory containing the core plugins",
)
parser.add_argument(
"--plugins",
default="/etc/bunkerweb/plugins",
default=join(sep, "etc", "bunkerweb", "plugins"),
type=str,
help="directory containing the external plugins",
)
parser.add_argument(
"--output",
default="/etc/nginx",
default=join(sep, "etc", "nginx"),
type=str,
help="where to write the rendered files",
)
parser.add_argument(
"--target",
default="/etc/nginx",
default=join(sep, "etc", "nginx"),
type=str,
help="where nginx will search for configurations files",
)
@ -76,46 +77,55 @@ if __name__ == "__main__":
)
args = parser.parse_args()
settings_path = Path(normpath(args.settings))
templates_path = Path(normpath(args.templates))
core_path = Path(normpath(args.core))
plugins_path = Path(normpath(args.plugins))
output_path = Path(normpath(args.output))
target_path = Path(normpath(args.target))
logger.info("Generator started ...")
logger.info(f"Settings : {args.settings}")
logger.info(f"Templates : {args.templates}")
logger.info(f"Core : {args.core}")
logger.info(f"Plugins : {args.plugins}")
logger.info(f"Output : {args.output}")
logger.info(f"Target : {args.target}")
logger.info(f"Settings : {settings_path}")
logger.info(f"Templates : {templates_path}")
logger.info(f"Core : {core_path}")
logger.info(f"Plugins : {plugins_path}")
logger.info(f"Output : {output_path}")
logger.info(f"Target : {target_path}")
integration = "Linux"
integration_path = Path(sep, "usr", "share", "bunkerweb", "INTEGRATION")
os_release_path = Path(sep, "etc", "os-release")
if getenv("KUBERNETES_MODE", "no").lower() == "yes":
integration = "Kubernetes"
elif getenv("SWARM_MODE", "no").lower() == "yes":
integration = "Swarm"
elif getenv("AUTOCONF_MODE", "no").lower() == "yes":
integration = "Autoconf"
elif Path("/usr/share/bunkerweb/INTEGRATION").is_file():
integration = Path("/usr/share/bunkerweb/INTEGRATION").read_text().strip()
elif (
Path("/etc/os-release").is_file()
and "Alpine" in Path("/etc/os-release").read_text()
):
elif integration_path.is_file():
integration = integration_path.read_text().strip()
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text():
integration = "Docker"
del integration_path, os_release_path
if args.variables:
logger.info(f"Variables : {args.variables}")
variables_path = Path(normpath(args.variables))
logger.info(f"Variables : {variables_path}")
# Check existences and permissions
logger.info("Checking arguments ...")
files = [args.settings, args.variables]
paths_rx = [args.core, args.plugins, args.templates]
paths_rwx = [args.output]
files = [settings_path, variables_path]
paths_rx = [core_path, plugins_path, templates_path]
paths_rwx = [output_path]
for file in files:
if not Path(file).is_file():
if not file.is_file():
logger.error(f"Missing file : {file}")
sys_exit(1)
elif not access(file, R_OK):
logger.error(f"Can't read file : {file}")
sys_exit(1)
for path in paths_rx + paths_rwx:
if not Path(path).is_dir():
if not path.is_dir():
logger.error(f"Missing directory : {path}")
sys_exit(1)
elif not access(path, R_OK | X_OK):
@ -133,13 +143,17 @@ if __name__ == "__main__":
# Compute the config
logger.info("Computing config ...")
config: Dict[str, Any] = Configurator(
args.settings, args.core, args.plugins, args.variables, logger
str(settings_path),
str(core_path),
str(plugins_path),
str(variables_path),
logger,
).get_config()
else:
if "/usr/share/bunkerweb/db" not in sys_path:
sys_path.append("/usr/share/bunkerweb/db")
if join(sep, "usr", "share", "bunkerweb", "db") not in sys_path:
sys_path.append(join(sep, "usr", "share", "bunkerweb", "db"))
from Database import Database
from Database import Database # type: ignore
db = Database(
logger,
@ -149,21 +163,22 @@ if __name__ == "__main__":
# Remove old files
logger.info("Removing old files ...")
files = glob(f"{args.output}/*")
files = glob(join(args.output, "*"))
for file in files:
if Path(file).is_symlink() or Path(file).is_file():
Path(file).unlink()
elif Path(file).is_dir():
rmtree(file, ignore_errors=False)
file = Path(file)
if file.is_symlink() or file.is_file():
file.unlink()
elif file.is_dir():
rmtree(str(file), ignore_errors=True)
# Render the templates
logger.info("Rendering templates ...")
templator = Templator(
args.templates,
args.core,
args.plugins,
args.output,
args.target,
str(templates_path),
str(core_path),
str(plugins_path),
str(output_path),
str(target_path),
config,
)
templator.render()
@ -173,7 +188,7 @@ if __name__ == "__main__":
and not args.no_linux_reload
):
retries = 0
while not Path("/var/tmp/bunkerweb/nginx.pid").exists():
while not Path(sep, "var", "tmp", "bunkerweb", "nginx.pid").exists():
if retries == 5:
logger.error(
"BunkerWeb's nginx didn't start in time.",
@ -187,7 +202,7 @@ if __name__ == "__main__":
sleep(5)
proc = run(
["sudo", "/usr/sbin/nginx", "-s", "reload"],
["sudo", join(sep, "usr", "sbin", "nginx"), "-s", "reload"],
stdin=DEVNULL,
stderr=STDOUT,
)

View File

@ -1,10 +1,8 @@
#!/usr/bin/python3
from argparse import ArgumentParser
from glob import glob
from json import loads
from os import R_OK, X_OK, access, environ, getenv, listdir, walk
from os.path import join
from os import R_OK, X_OK, access, environ, getenv, listdir, sep, walk
from os.path import basename, join, normpath
from pathlib import Path
from re import compile as re_compile
from sys import exit as sys_exit, path as sys_path
@ -12,22 +10,19 @@ from time import sleep
from traceback import format_exc
from typing import Any
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/api",
"/usr/share/bunkerweb/db",
)
)
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",), ("api",), ("db",))
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from docker import DockerClient
from logger import setup_logger
from Database import Database
from logger import setup_logger # type: ignore
from Database import Database # type: ignore
from Configurator import Configurator
from API import API
from API import API # type: ignore
custom_confs_rx = re_compile(
r"^([0-9a-z\.-]*)_?CUSTOM_CONF_(HTTP|SERVER_STREAM|STREAM|DEFAULT_SERVER_HTTP|SERVER_HTTP|MODSEC_CRS|MODSEC)_(.+)$"
@ -97,19 +92,19 @@ if __name__ == "__main__":
parser = ArgumentParser(description="BunkerWeb config saver")
parser.add_argument(
"--settings",
default="/usr/share/bunkerweb/settings.json",
default=join(sep, "usr", "share", "bunkerweb", "settings.json"),
type=str,
help="file containing the main settings",
)
parser.add_argument(
"--core",
default="/usr/share/bunkerweb/core",
default=join(sep, "usr", "share", "bunkerweb", "core"),
type=str,
help="directory containing the core plugins",
)
parser.add_argument(
"--plugins",
default="/etc/bunkerweb/plugins",
default=join(sep, "etc", "bunkerweb", "plugins"),
type=str,
help="directory containing the external plugins",
)
@ -131,27 +126,32 @@ if __name__ == "__main__":
)
args = parser.parse_args()
settings_path = Path(normpath(args.settings))
core_path = Path(normpath(args.core))
plugins_path = Path(normpath(args.plugins))
logger.info("Save config started ...")
logger.info(f"Settings : {args.settings}")
logger.info(f"Core : {args.core}")
logger.info(f"Plugins : {args.plugins}")
logger.info(f"Settings : {settings_path}")
logger.info(f"Core : {core_path}")
logger.info(f"Plugins : {plugins_path}")
logger.info(f"Init : {args.init}")
integration = "Linux"
integration_path = Path(sep, "usr", "share", "bunkerweb", "INTEGRATION")
os_release_path = Path(sep, "etc", "os-release")
if getenv("KUBERNETES_MODE", "no").lower() == "yes":
integration = "Kubernetes"
elif getenv("SWARM_MODE", "no").lower() == "yes":
integration = "Swarm"
elif getenv("AUTOCONF_MODE", "no").lower() == "yes":
integration = "Autoconf"
elif Path("/usr/share/bunkerweb/INTEGRATION").is_file():
integration = Path("/usr/share/bunkerweb/INTEGRATION").read_text().strip()
elif (
Path("/etc/os-release").is_file()
and "Alpine" in Path("/etc/os-release").read_text()
):
elif integration_path.is_file():
integration = integration_path.read_text().strip()
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text():
integration = "Docker"
del integration_path, os_release_path
if args.init:
logger.info(f"Detected {integration} integration")
@ -160,7 +160,7 @@ if __name__ == "__main__":
apis = []
external_plugins = args.plugins
if not Path("/usr/sbin/nginx").exists() and args.method == "ui":
if not Path(sep, "usr", "sbin", "nginx").exists() and args.method == "ui":
db = Database(logger)
external_plugins = []
for plugin in db.get_plugins():
@ -168,17 +168,19 @@ if __name__ == "__main__":
# Check existences and permissions
logger.info("Checking arguments ...")
files = [args.settings] + ([args.variables] if args.variables else [])
paths_rx = [args.core, args.plugins]
files = [settings_path] + (
[Path(normpath(args.variables))] if args.variables else []
)
paths_rx = [core_path, plugins_path]
for file in files:
if not Path(file).is_file():
if not file.is_file():
logger.error(f"Missing file : {file}")
sys_exit(1)
if not access(file, R_OK):
logger.error(f"Can't read file : {file}")
sys_exit(1)
for path in paths_rx:
if not Path(path).is_dir():
if not path.is_dir():
logger.error(f"Missing directory : {path}")
sys_exit(1)
if not access(path, R_OK | X_OK):
@ -188,15 +190,16 @@ if __name__ == "__main__":
sys_exit(1)
if args.variables:
logger.info(f"Variables : {args.variables}")
variables_path = Path(normpath(args.variables))
logger.info(f"Variables : {variables_path}")
# Compute the config
logger.info("Computing config ...")
config = Configurator(
args.settings,
args.core,
str(settings_path),
str(core_path),
external_plugins,
args.variables,
str(variables_path),
logger,
)
config_files = config.get_config()
@ -214,27 +217,25 @@ if __name__ == "__main__":
),
}
)
root_dirs = listdir("/etc/bunkerweb/configs")
for root, dirs, files in walk("/etc/bunkerweb/configs", topdown=True):
if (
root != "configs"
and (dirs and not root.split("/")[-1] in root_dirs)
or files
):
configs_path = join(sep, "etc", "bunkerweb", "configs")
root_dirs = listdir(configs_path)
for root, dirs, files in walk(configs_path):
if files or (dirs and basename(root) not in root_dirs):
path_exploded = root.split("/")
for file in files:
custom_confs.append(
{
"value": Path(join(root, file)).read_text(),
"exploded": (
f"{path_exploded.pop()}"
if path_exploded[-1] not in root_dirs
else "",
path_exploded[-1],
file.replace(".conf", ""),
),
}
)
with open(join(root, file), "r") as f:
custom_confs.append(
{
"value": f.read(),
"exploded": (
f"{path_exploded.pop()}"
if path_exploded[-1] not in root_dirs
else None,
path_exploded[-1],
file.replace(".conf", ""),
),
}
)
else:
docker_client = DockerClient(
base_url=getenv("DOCKER_HOST", "unix:///var/run/docker.sock")
@ -282,12 +283,12 @@ if __name__ == "__main__":
elif splitted[0] == "API_SERVER_NAME":
api_server_name = splitted[1]
apis.append(
API(
f"http://{instance.name}:{api_http_port or getenv('API_HTTP_PORT', '5000')}",
host=api_server_name or getenv("API_SERVER_NAME", "bwapi"),
apis.append(
API(
f"http://{instance.name}:{api_http_port or getenv('API_HTTP_PORT', '5000')}",
host=api_server_name or getenv("API_SERVER_NAME", "bwapi"),
)
)
)
if not db:
db = Database(logger)
@ -330,7 +331,9 @@ if __name__ == "__main__":
logger.info("Database tables initialized")
err = db.initialize_db(
version=Path("/usr/share/bunkerweb/VERSION").read_text().strip(),
version=Path(sep, "usr", "share", "bunkerweb", "VERSION")
.read_text()
.strip(),
integration=integration,
)

View File

@ -1,3 +1,5 @@
#!/usr/bin/python3
from os import getegid, geteuid, stat
from stat import (
S_IRGRP,

View File

@ -1,20 +1,24 @@
#!/usr/bin/python3
from io import BytesIO
from os import getenv
from os import getenv, sep
from os.path import join
from sys import path as sys_path
from tarfile import open as taropen
from tarfile import open as tar_open
from typing import Any, Dict, List, Literal, Optional, Tuple, Union
if "/usr/share/bunkerweb/utils" not in sys_path:
sys_path.append("/usr/share/bunkerweb/utils")
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",))
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from API import API # type: ignore
from logger import setup_logger
from API import API
if "/usr/share/bunkerweb/deps/python" not in sys_path:
sys_path.append("/usr/share/bunkerweb/deps/python")
from kubernetes import client as kube_client, config
from docker import DockerClient
from kubernetes import client as kube_client, config
class ApiCaller:
@ -154,7 +158,7 @@ class ApiCaller:
def _send_files(self, path: str, url: str) -> bool:
ret = True
with BytesIO() as tgz:
with taropen(
with tar_open(
mode="w:gz", fileobj=tgz, dereference=True, compresslevel=3
) as tf:
tf.add(path, arcname=".")

View File

@ -1,7 +1,13 @@
from json import JSONDecodeError, load
#!/usr/bin/python3
from glob import glob
from json import JSONDecodeError, loads
from os import sep
from os.path import join
from pathlib import Path
from re import match
from traceback import format_exc
from typing import Any, Dict, Literal, Union
from logger import setup_logger
@ -9,51 +15,43 @@ from logger import setup_logger
class ConfigCaller:
def __init__(self):
self.__logger = setup_logger("Config", "INFO")
with open("/usr/share/bunkerweb/settings.json", "r") as f:
self._settings = load(f)
for plugin in glob("/usr/share/bunkerweb/core/*/plugin.json") + glob(
"/etc/bunkerweb/plugins/*/plugin.json"
):
with open(plugin, "r") as f:
try:
self._settings.update(load(f)["settings"])
except KeyError:
self.__logger.error(
f'Error while loading plugin metadata file at {plugin} : missing "settings" key',
)
except JSONDecodeError:
self.__logger.error(
f"Exception while loading plugin metadata file at {plugin} :\n{format_exc()}",
)
self._settings = loads(
Path(sep, "usr", "share", "bunkerweb", "settings.json").read_text()
)
for plugin in glob(
join(sep, "usr", "share", "bunkerweb", "core", "*", "plugin.json")
) + glob(join(sep, "etc", "bunkerweb", "plugins", "*", "plugin.json")):
try:
self._settings.update(loads(Path(plugin).read_text())["settings"])
except KeyError:
self.__logger.error(
f'Error while loading plugin metadata file at {plugin} : missing "settings" key',
)
except JSONDecodeError:
self.__logger.error(
f"Exception while loading plugin metadata file at {plugin} :\n{format_exc()}",
)
def _is_setting(self, setting):
def _is_setting(self, setting) -> bool:
return setting in self._settings
def _is_global_setting(self, setting):
def _is_setting_context(
self, setting: str, context: Union[Literal["global"], Literal["multisite"]]
) -> bool:
if self._is_setting(setting):
return self._settings[setting]["context"] == "global"
return self._settings[setting]["context"] == context
elif match(r"^.+_\d+$", setting):
multiple_setting = "_".join(setting.split("_")[:-1])
return (
self._is_setting(multiple_setting)
and self._settings[multiple_setting]["context"] == "global"
and self._settings[multiple_setting]["context"] == context
and "multiple" in self._settings[multiple_setting]
)
return False
def _is_multisite_setting(self, setting):
if self._is_setting(setting):
return self._settings[setting]["context"] == "multisite"
if match(r"^.+_\d+$", setting):
multiple_setting = "_".join(setting.split("_")[0:-1])
return (
self._is_setting(multiple_setting)
and self._settings[multiple_setting]["context"] == "multisite"
and "multiple" in self._settings[multiple_setting]
)
return False
def _full_env(self, env_instances, env_services):
def _full_env(
self, env_instances: Dict[str, Any], env_services: Dict[str, Any]
) -> Dict[str, Any]:
full_env = {}
# Fill with default values
for k, v in self._settings.items():
@ -62,13 +60,12 @@ class ConfigCaller:
for k, v in env_instances.items():
full_env[k] = v
if (
not self._is_global_setting(k)
not self._is_setting_context(k, "global")
and env_instances.get("MULTISITE", "no") == "yes"
and env_instances.get("SERVER_NAME", "") != ""
):
for server_name in env_instances["SERVER_NAME"].split(" "):
full_env[f"{server_name}_{k}"] = v
# Replace with services values
for k, v in env_services.items():
full_env[k] = v
full_env = full_env | env_services
return full_env

View File

@ -1,14 +1,17 @@
#!/usr/bin/python3
from contextlib import suppress
from datetime import datetime
from hashlib import sha512
from inspect import getsourcefile
from io import BufferedReader
from json import dumps, loads
from os.path import basename
from pathlib import Path
from sys import _getframe
from threading import Lock
from traceback import format_exc
from typing import Optional, Tuple
from typing import Literal, Optional, Tuple, Union
lock = Lock()
@ -20,11 +23,16 @@ lock = Lock()
"""
def is_cached_file(file: str, expire: str, db=None) -> bool:
def is_cached_file(
file: str,
expire: Union[Literal["hour"], Literal["day"], Literal["week"], Literal["month"]],
db=None,
) -> bool:
is_cached = False
cached_file = None
try:
if not Path(f"{file}.md").is_file():
file_path = Path(f"{file}.md")
if not file_path.is_file():
if not db:
return False
cached_file = db.get_job_cache_file(
@ -37,7 +45,7 @@ def is_cached_file(file: str, expire: str, db=None) -> bool:
return False
cached_time = cached_file.last_update.timestamp()
else:
cached_time = loads(Path(f"{file}.md").read_text())["date"]
cached_time = loads(file_path.read_text())["date"]
current_time = datetime.now().timestamp()
if current_time < cached_time:
@ -48,6 +56,8 @@ def is_cached_file(file: str, expire: str, db=None) -> bool:
is_cached = diff_time < 3600
elif expire == "day":
is_cached = diff_time < 86400
elif expire == "week":
is_cached = diff_time < 604800
elif expire == "month":
is_cached = diff_time < 2592000
except:
@ -59,14 +69,16 @@ def is_cached_file(file: str, expire: str, db=None) -> bool:
return is_cached and cached_file
def get_file_in_db(job: str, file: str, db) -> bytes:
cached_file = db.get_job_cache_file(job, file)
def get_file_in_db(file: str, db) -> bytes:
cached_file = db.get_job_cache_file(
basename(getsourcefile(_getframe(1))).replace(".py", ""), file
)
if not cached_file:
return False
return cached_file.data
def set_file_in_db(job: str, name: str, bio, db) -> Tuple[bool, str]:
def set_file_in_db(name: str, bio: BufferedReader, db) -> Tuple[bool, str]:
ret, err = True, "success"
try:
content = bio.read()
@ -87,10 +99,12 @@ def set_file_in_db(job: str, name: str, bio, db) -> Tuple[bool, str]:
return ret, err
def del_file_in_db(job: str, name: str, db) -> Tuple[bool, str]:
def del_file_in_db(name: str, db) -> Tuple[bool, str]:
ret, err = True, "success"
try:
db.delete_job_cache(job, name)
db.delete_job_cache(
basename(getsourcefile(_getframe(1))).replace(".py", ""), name
)
except:
return False, f"exception :\n{format_exc()}"
return ret, err

View File

@ -77,7 +77,7 @@ COPY --chown=root:scheduler src/bw/misc/country.mmdb /var/tmp/bunkerweb/country.
RUN chmod 770 /var/tmp/bunkerweb/asn.mmdb /var/tmp/bunkerweb/country.mmdb
# Fix CVEs
# RUN apk add "libcrypto3>=3.0.8-r4" "libssl3>=3.0.8-r4"
# There are no CVEs for python:3.11.3-alpine at the moment
VOLUME /data /etc/nginx

View File

@ -3,8 +3,8 @@ from functools import partial
from glob import glob
from json import loads
from logging import Logger
from os import cpu_count, environ, getenv
from os.path import basename, dirname
from os import cpu_count, environ, getenv, sep
from os.path import basename, dirname, join
from pathlib import Path
from re import match
from typing import Any, Dict, Optional
@ -19,11 +19,15 @@ from sys import path as sys_path
from threading import Lock, Semaphore, Thread
from traceback import format_exc
sys_path.extend(("/usr/share/bunkerweb/utils", "/usr/share/bunkerweb/db"))
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths) for paths in (("utils",), ("db",))
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from Database import Database
from logger import setup_logger
from ApiCaller import ApiCaller
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from ApiCaller import ApiCaller # type: ignore
class JobScheduler(ApiCaller):
@ -49,11 +53,11 @@ class JobScheduler(ApiCaller):
def __get_jobs(self):
jobs = {}
for plugin_file in list(
glob("/usr/share/bunkerweb/core/*/plugin.json") # core plugins
) + list(
glob("/etc/bunkerweb/plugins/*/plugin.json") # external plugins
):
for plugin_file in glob(
join(sep, "usr", "share", "bunkerweb", "core", "*", "plugin.json")
) + glob( # core plugins
join(sep, "etc", "bunkerweb", "plugins", "*", "plugin.json")
): # external plugins
plugin_name = basename(dirname(plugin_file))
jobs[plugin_name] = []
try:
@ -104,7 +108,7 @@ class JobScheduler(ApiCaller):
plugin_jobs.pop(x)
continue
plugin_jobs[x]["path"] = f"{dirname(plugin_file)}/"
plugin_jobs[x]["path"] = dirname(plugin_file)
jobs[plugin_name] = plugin_jobs
except FileNotFoundError:
@ -131,7 +135,7 @@ class JobScheduler(ApiCaller):
if self.__integration not in ("Autoconf", "Swarm", "Kubernetes", "Docker"):
self.__logger.info("Reloading nginx ...")
proc = run(
["sudo", "/usr/sbin/nginx", "-s", "reload"],
["sudo", join(sep, "usr", "sbin", "nginx"), "-s", "reload"],
stdin=DEVNULL,
stderr=PIPE,
env=self.__env,
@ -160,7 +164,7 @@ class JobScheduler(ApiCaller):
ret = -1
try:
proc = run(
f"{path}jobs/{file}", stdin=DEVNULL, stderr=STDOUT, env=self.__env
join(path, "jobs", file), stdin=DEVNULL, stderr=STDOUT, env=self.__env
)
ret = proc.returncode
except BaseException:
@ -230,16 +234,13 @@ class JobScheduler(ApiCaller):
if reload:
try:
if self._get_apis():
self.__logger.info("Sending /var/cache/bunkerweb folder ...")
if not self._send_files("/var/cache/bunkerweb", "/cache"):
cache_path = join(sep, "var", "cache", "bunkerweb")
self.__logger.info(f"Sending {cache_path} folder ...")
if not self._send_files(cache_path, "/cache"):
success = False
self.__logger.error(
"Error while sending /var/cache/bunkerweb folder"
)
self.__logger.error(f"Error while sending {cache_path} folder")
else:
self.__logger.info(
"Successfully sent /var/cache/bunkerweb folder"
)
self.__logger.info(f"Successfully sent {cache_path} folder")
if not self.__reload():
success = False
except:

View File

@ -1,7 +1,6 @@
#!/usr/bin/python3
from argparse import ArgumentParser
from copy import deepcopy
from glob import glob
from hashlib import sha256
from io import BytesIO
@ -16,7 +15,7 @@ from os import (
sep,
walk,
)
from os.path import basename, dirname, join
from os.path import basename, dirname, join, normpath
from pathlib import Path
from shutil import copy, rmtree
from signal import SIGINT, SIGTERM, signal, SIGHUP
@ -28,21 +27,19 @@ from time import sleep
from traceback import format_exc
from typing import Any, Dict, List
if "/usr/share/bunkerweb/deps/python" not in sys_path:
sys_path.append("/usr/share/bunkerweb/deps/python")
if "/usr/share/bunkerweb/utils" not in sys_path:
sys_path.append("/usr/share/bunkerweb/utils")
if "/usr/share/bunkerweb/api" not in sys_path:
sys_path.append("/usr/share/bunkerweb/api")
if "/usr/share/bunkerweb/db" not in sys_path:
sys_path.append("/usr/share/bunkerweb/db")
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",), ("api",), ("db",))
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from dotenv import dotenv_values
from logger import setup_logger
from Database import Database
from logger import setup_logger # type: ignore
from Database import Database # type: ignore
from JobScheduler import JobScheduler
from ApiCaller import ApiCaller
from ApiCaller import ApiCaller # type: ignore
run = True
scheduler = None
@ -69,7 +66,7 @@ def handle_reload(signum, frame):
try:
if scheduler is not None and run:
# Get the env by reading the .env file
env = dotenv_values("/etc/bunkerweb/variables.env")
env = dotenv_values(join(sep, "etc", "bunkerweb", "variables.env"))
if scheduler.reload(env):
logger.info("Reload successful")
else:
@ -88,8 +85,8 @@ signal(SIGHUP, handle_reload)
def stop(status):
Path("/var/tmp/bunkerweb/scheduler.pid").unlink(missing_ok=True)
Path("/var/tmp/bunkerweb/scheduler.healthy").unlink(missing_ok=True)
Path(sep, "var", "tmp", "bunkerweb", "scheduler.pid").unlink(missing_ok=True)
Path(sep, "var", "tmp", "bunkerweb", "scheduler.healthy").unlink(missing_ok=True)
_exit(status)
@ -106,9 +103,9 @@ def generate_custom_configs(
tmp_path = join(original_path, custom_config["type"].replace("_", "-"))
if custom_config["service_id"]:
tmp_path = join(tmp_path, custom_config["service_id"])
tmp_path = join(tmp_path, f"{custom_config['name'].conf}")
Path(dirname(tmp_path)).mkdir(parents=True, exist_ok=True)
Path(tmp_path).write_bytes(custom_config["data"])
tmp_path = Path(tmp_path, f"{custom_config['name']}.conf")
tmp_path.parent.mkdir(parents=True, exist_ok=True)
tmp_path.write_bytes(custom_config["data"])
if integration in ("Autoconf", "Swarm", "Kubernetes", "Docker"):
logger.info("Sending custom configs to BunkerWeb")
@ -130,15 +127,14 @@ def generate_external_plugins(
logger.info("Generating new external plugins ...")
Path(original_path).mkdir(parents=True, exist_ok=True)
for plugin in plugins:
tmp_path = join(original_path, plugin["id"], f"{plugin['name']}.tar.gz")
plugin_dir = dirname(tmp_path)
Path(plugin_dir).mkdir(parents=True, exist_ok=True)
Path(tmp_path).write_bytes(plugin["data"])
with tar_open(tmp_path, "r:gz") as tar:
tmp_path = Path(original_path, plugin["id"], f"{plugin['name']}.tar.gz")
tmp_path.parent.mkdir(parents=True, exist_ok=True)
tmp_path.write_bytes(plugin["data"])
with tar_open(str(tmp_path), "r:gz") as tar:
tar.extractall(original_path)
Path(tmp_path).unlink()
tmp_path.unlink()
for job_file in glob(join(plugin_dir, "jobs", "*")):
for job_file in glob(join(str(tmp_path.parent), "jobs", "*")):
st = Path(job_file).stat()
chmod(job_file, st.st_mode | S_IEXEC)
@ -155,14 +151,17 @@ def generate_external_plugins(
if __name__ == "__main__":
try:
# Don't execute if pid file exists
if Path("/var/tmp/bunkerweb/scheduler.pid").is_file():
pid_path = Path(sep, "var", "tmp", "bunkerweb", "scheduler.pid")
if pid_path.is_file():
logger.error(
"Scheduler is already running, skipping execution ...",
)
_exit(1)
# Write pid to file
Path("/var/tmp/bunkerweb/scheduler.pid").write_text(str(getpid()))
pid_path.write_text(str(getpid()))
del pid_path
# Parse arguments
parser = ArgumentParser(description="Job scheduler for BunkerWeb")
@ -176,15 +175,22 @@ if __name__ == "__main__":
integration = "Linux"
api_caller = ApiCaller()
db_configs = None
tmp_variables_path = Path(
normpath(args.variables) if args.variables else sep,
"var",
"tmp",
"bunkerweb",
"variables.env",
)
logger.info("Scheduler started ...")
# Checking if the argument variables is true.
if args.variables:
logger.info(f"Variables : {args.variables}")
logger.info(f"Variables : {tmp_variables_path}")
# Read env file
env = dotenv_values(args.variables)
env = dotenv_values(str(tmp_variables_path))
db = Database(
logger,
@ -201,9 +207,11 @@ if __name__ == "__main__":
else:
# Read from database
integration = "Docker"
if Path("/usr/share/bunkerweb/INTEGRATION").exists():
with open("/usr/share/bunkerweb/INTEGRATION", "r") as f:
integration = f.read().strip()
integration_path = Path(sep, "usr", "share", "bunkerweb", "INTEGRATION")
if integration_path.is_file():
integration = integration_path.read_text().strip()
del integration_path
api_caller.auto_setup(bw_integration=integration)
db = Database(
@ -224,18 +232,16 @@ if __name__ == "__main__":
"Autoconf is not loaded yet in the database, retrying in 5s ...",
)
sleep(5)
elif not Path(
"/var/tmp/bunkerweb/variables.env"
).exists() or db.get_config() != dotenv_values(
"/var/tmp/bunkerweb/variables.env"
elif not tmp_variables_path.is_file() or db.get_config() != dotenv_values(
str(tmp_variables_path)
):
# run the config saver
proc = subprocess_run(
[
"python",
"/usr/share/bunkerweb/gen/save_config.py",
join(sep, "usr", "share", "bunkerweb", "gen", "save_config.py"),
"--settings",
"/usr/share/bunkerweb/settings.json",
join(sep, "usr", "share", "bunkerweb", "settings.json"),
],
stdin=DEVNULL,
stderr=STDOUT,
@ -306,10 +312,11 @@ if __name__ == "__main__":
# Remove old custom configs files
logger.info("Removing old custom configs files ...")
for file in glob(join(configs_path, "*", "*")):
if Path(file).is_symlink() or Path(file).is_file():
Path(file).unlink()
elif Path(file).is_dir():
rmtree(file, ignore_errors=True)
file = Path(file)
if file.is_symlink() or file.is_file():
file.unlink()
elif file.is_dir():
rmtree(str(file), ignore_errors=True)
db_configs = db.get_custom_configs()
@ -354,10 +361,11 @@ if __name__ == "__main__":
# Remove old external plugins files
logger.info("Removing old external plugins files ...")
for file in glob(join(plugins_dir, "*")):
if Path(file).is_symlink() or Path(file).is_file():
Path(file).unlink()
elif Path(file).is_dir():
rmtree(file, ignore_errors=True)
file = Path(file)
if file.is_symlink() or file.is_file():
file.unlink()
elif file.is_dir():
rmtree(str(file), ignore_errors=True)
generate_external_plugins(
db.get_plugins(external=True, with_data=True),
@ -368,10 +376,8 @@ if __name__ == "__main__":
logger.info("Executing scheduler ...")
generate = not Path(
sep, "var", "tmp", "bunkerweb", "variables.env"
).exists() or env != dotenv_values(
join(sep, "var", "tmp", "bunkerweb", "variables.env")
generate = not tmp_variables_path.exists() or env != dotenv_values(
str(tmp_variables_path)
)
if not generate:
@ -383,7 +389,7 @@ if __name__ == "__main__":
while True:
# Instantiate scheduler
scheduler = JobScheduler(
env=deepcopy(env) | environ,
env=env.copy() | environ.copy(),
apis=api_caller._get_apis(),
logger=logger,
integration=integration,
@ -400,16 +406,16 @@ if __name__ == "__main__":
proc = subprocess_run(
[
"python3",
"/usr/share/bunkerweb/gen/main.py",
join(sep, "usr", "share", "bunkerweb", "gen", "main.py"),
"--settings",
"/usr/share/bunkerweb/settings.json",
join(sep, "usr", "share", "bunkerweb", "settings.json"),
"--templates",
"/usr/share/bunkerweb/confs",
join(sep, "usr", "share", "bunkerweb", "confs"),
"--output",
"/etc/nginx",
join(sep, "etc", "nginx"),
]
+ (
["--variables", args.variables]
["--variables", str(tmp_variables_path)]
if args.variables and first_run
else []
),
@ -422,41 +428,49 @@ if __name__ == "__main__":
"Config generator failed, configuration will not work as expected...",
)
else:
copy("/etc/nginx/variables.env", "/var/tmp/bunkerweb/variables.env")
copy(
join(sep, "etc", "nginx", "variables.env"),
str(tmp_variables_path),
)
if len(api_caller._get_apis()) > 0:
if api_caller._get_apis():
# send nginx configs
logger.info("Sending /etc/nginx folder ...")
ret = api_caller._send_files("/etc/nginx", "/confs")
logger.info(f"Sending {join(sep, 'etc', 'nginx')} folder ...")
ret = api_caller._send_files(
join(sep, "etc", "nginx"), "/confs"
)
if not ret:
logger.error(
"Sending nginx configs failed, configuration will not work as expected...",
)
try:
if len(api_caller._get_apis()) > 0:
if api_caller._get_apis():
cache_path = join(sep, "var", "cache", "bunkerweb")
# send cache
logger.info("Sending /var/cache/bunkerweb folder ...")
if not api_caller._send_files("/var/cache/bunkerweb", "/cache"):
logger.error("Error while sending /var/cache/bunkerweb folder")
logger.info(f"Sending {cache_path} folder ...")
if not api_caller._send_files(cache_path, "/cache"):
logger.error(f"Error while sending {cache_path} folder")
else:
logger.info("Successfully sent /var/cache/bunkerweb folder")
logger.info(f"Successfully sent {cache_path} folder")
# restart nginx
if integration not in ("Autoconf", "Swarm", "Kubernetes", "Docker"):
# Stop temp nginx
logger.info("Stopping temp nginx ...")
proc = subprocess_run(
["sudo", "/usr/sbin/nginx", "-s", "stop"],
["sudo", join(sep, "usr", "sbin", "nginx"), "-s", "stop"],
stdin=DEVNULL,
stderr=STDOUT,
env=deepcopy(env),
env=env.copy(),
)
if proc.returncode == 0:
logger.info("Successfully sent stop signal to temp nginx")
i = 0
while i < 20:
if not Path("/var/tmp/bunkerweb/nginx.pid").is_file():
if not Path(
sep, "var", "tmp", "bunkerweb", "nginx.pid"
).is_file():
break
logger.warning("Waiting for temp nginx to stop ...")
sleep(1)
@ -469,10 +483,10 @@ if __name__ == "__main__":
# Start nginx
logger.info("Starting nginx ...")
proc = subprocess_run(
["sudo", "/usr/sbin/nginx"],
["sudo", join(sep, "usr", "sbin", "nginx")],
stdin=DEVNULL,
stderr=STDOUT,
env=deepcopy(env),
env=env.copy(),
)
if proc.returncode == 0:
logger.info("Successfully started nginx")
@ -501,26 +515,27 @@ if __name__ == "__main__":
# infinite schedule for the jobs
logger.info("Executing job scheduler ...")
Path("/var/tmp/bunkerweb/scheduler.healthy").write_text("ok")
Path(sep, "var", "tmp", "bunkerweb", "scheduler.healthy").write_text("ok")
while run and not need_reload:
scheduler.run_pending()
sleep(1)
# check if the custom configs have changed since last time
tmp_db_configs = db.get_custom_configs()
tmp_db_configs: Dict[str, Any] = db.get_custom_configs()
if db_configs != tmp_db_configs:
logger.info("Custom configs changed, generating ...")
logger.debug(f"{tmp_db_configs=}")
logger.debug(f"{db_configs=}")
db_configs = deepcopy(tmp_db_configs)
db_configs = tmp_db_configs.copy()
# Remove old custom configs files
logger.info("Removing old custom configs files ...")
for file in glob(join(configs_path, "*", "*")):
if Path(file).is_symlink() or Path(file).is_file():
Path(file).unlink()
elif Path(file).is_dir():
rmtree(file, ignore_errors=True)
file = Path(file)
if file.is_symlink() or file.is_file():
file.unlink()
elif file.is_dir():
rmtree(str(file), ignore_errors=True)
generate_custom_configs(
db_configs,
@ -540,10 +555,10 @@ if __name__ == "__main__":
# Reloading the nginx server.
proc = subprocess_run(
# Reload nginx
["sudo", "/usr/sbin/nginx", "-s", "reload"],
["sudo", join(sep, "usr", "sbin", "nginx"), "-s", "reload"],
stdin=DEVNULL,
stderr=STDOUT,
env=deepcopy(env),
env=env.copy(),
)
if proc.returncode == 0:
logger.info("Successfully reloaded nginx")
@ -555,20 +570,23 @@ if __name__ == "__main__":
need_reload = True
# check if the plugins have changed since last time
tmp_external_plugins = db.get_plugins(external=True)
tmp_external_plugins: List[Dict[str, Any]] = db.get_plugins(
external=True
)
if external_plugins != tmp_external_plugins:
logger.info("External plugins changed, generating ...")
logger.debug(f"{tmp_external_plugins=}")
logger.debug(f"{external_plugins=}")
external_plugins = deepcopy(tmp_external_plugins)
external_plugins = tmp_external_plugins.copy()
# Remove old external plugins files
logger.info("Removing old external plugins files ...")
for file in glob(join(plugins_dir, "*")):
if Path(file).is_symlink() or Path(file).is_file():
Path(file).unlink()
elif Path(file).is_dir():
rmtree(file, ignore_errors=True)
file = Path(file)
if file.is_symlink() or file.is_file():
file.unlink()
elif file.is_dir():
rmtree(str(file), ignore_errors=True)
logger.info("Generating new external plugins ...")
generate_external_plugins(
@ -580,7 +598,7 @@ if __name__ == "__main__":
need_reload = True
# check if the config have changed since last time
tmp_env = db.get_config()
tmp_env: Dict[str, Any] = db.get_config()
tmp_env["DATABASE_URI"] = environ.get(
"DATABASE_URI", tmp_env["DATABASE_URI"]
)
@ -588,7 +606,7 @@ if __name__ == "__main__":
logger.info("Config changed, generating ...")
logger.debug(f"{tmp_env=}")
logger.debug(f"{env=}")
env = deepcopy(tmp_env)
env = tmp_env.copy()
need_reload = True
except:
logger.error(

View File

@ -63,7 +63,7 @@ RUN apk add --no-cache bash && \
chown root:ui /usr/share/bunkerweb/INTEGRATION
# Fix CVEs
# RUN apk add "libcrypto3>=3.0.8-r4" "libssl3>=3.0.8-r4"
# There are no CVEs for python:3.11.3-alpine at the moment
VOLUME /data /etc/nginx