Refactor to make more sens and avoid specific errors

This commit is contained in:
Théophile Diot 2023-06-01 10:09:38 -04:00
parent fff21746a9
commit a68fb0c06a
No known key found for this signature in database
GPG Key ID: E752C80DB72BB014
21 changed files with 147 additions and 185 deletions

View File

@ -3,7 +3,7 @@
from os import getenv
from threading import Lock
from time import sleep
from typing import Literal, Optional, Union
from typing import Optional
from ConfigCaller import ConfigCaller # type: ignore
from Database import Database # type: ignore
@ -11,13 +11,8 @@ from logger import setup_logger # type: ignore
class Config(ConfigCaller):
def __init__(
self,
ctrl_type: Union[Literal["docker"], Literal["swarm"], Literal["kubernetes"]],
lock: Optional[Lock] = None,
):
def __init__(self, lock: Optional[Lock] = None):
super().__init__()
self.__ctrl_type = ctrl_type
self.__lock = lock
self.__logger = setup_logger("Config", getenv("LOG_LEVEL", "INFO"))
self.__instances = []

View File

@ -11,12 +11,13 @@ from Config import Config
from logger import setup_logger # type: ignore
class Controller(ABC):
class Controller(Config):
def __init__(
self,
ctrl_type: Union[Literal["docker"], Literal["swarm"], Literal["kubernetes"]],
lock: Optional[Lock] = None,
):
super().__init__(lock)
self._type = ctrl_type
self._instances = []
self._services = []
@ -32,15 +33,16 @@ class Controller(ABC):
self._configs = {
config_type: {} for config_type in self._supported_config_types
}
self._config = Config(ctrl_type, lock)
self.__logger = setup_logger("Controller", getenv("LOG_LEVEL", "INFO"))
self._logger = setup_logger(
f"{self._type}-controller", getenv("LOG_LEVEL", "INFO")
)
def wait(self, wait_time: int) -> list:
all_ready = False
while not all_ready:
self._instances = self.get_instances()
if not self._instances:
self.__logger.warning(
self._logger.warning(
f"No instance found, waiting {wait_time}s ...",
)
sleep(wait_time)
@ -48,7 +50,7 @@ class Controller(ABC):
all_ready = True
for instance in self._instances:
if not instance["health"]:
self.__logger.warning(
self._logger.warning(
f"Instance {instance['name']} is not ready, waiting {wait_time}s ...",
)
sleep(wait_time)
@ -83,10 +85,10 @@ class Controller(ABC):
pass
def _set_autoconf_load_db(self):
if not self._config._db.is_autoconf_loaded():
ret = self._config._db.set_autoconf_load(True)
if not self._db.is_autoconf_loaded():
ret = self._db.set_autoconf_load(True)
if ret:
self.__logger.warning(
self._logger.warning(
f"Can't set autoconf loaded metadata to true in database: {ret}",
)

View File

@ -1,6 +1,5 @@
#!/usr/bin/python3
from os import getenv
from typing import Any, Dict, List
from docker import DockerClient
from re import compile as re_compile
@ -8,16 +7,12 @@ from traceback import format_exc
from docker.models.containers import Container
from Controller import Controller
from ConfigCaller import ConfigCaller # type: ignore
from logger import setup_logger # type: ignore
class DockerController(Controller, ConfigCaller):
class DockerController(Controller):
def __init__(self, docker_host):
Controller.__init__(self, "docker")
ConfigCaller.__init__(self)
super().__init__("docker")
self.__client = DockerClient(base_url=docker_host)
self.__logger = setup_logger("docker-controller", getenv("LOG_LEVEL", "INFO"))
self.__custom_confs_rx = re_compile(
r"^bunkerweb.CUSTOM_CONF_(SERVER_HTTP|MODSEC_CRS|MODSEC)_(.+)$"
)
@ -111,9 +106,7 @@ class DockerController(Controller, ConfigCaller):
return configs
def apply_config(self) -> bool:
return self._config.apply(
self._instances, self._services, configs=self._configs
)
return self.apply(self._instances, self._services, configs=self._configs)
def process_events(self):
self._set_autoconf_load_db()
@ -122,27 +115,22 @@ class DockerController(Controller, ConfigCaller):
self._instances = self.get_instances()
self._services = self.get_services()
self._configs = self.get_configs()
if not self._config.update_needed(
if not self.update_needed(
self._instances, self._services, configs=self._configs
):
continue
self.__logger.info(
self._logger.info(
"Caught Docker event, deploying new configuration ..."
)
if not self.apply_config():
self.__logger.error("Error while deploying new configuration")
self._logger.error("Error while deploying new configuration")
else:
self.__logger.info(
self._logger.info(
"Successfully deployed new configuration 🚀",
)
if not self._config._db.is_autoconf_loaded():
ret = self._config._db.set_autoconf_load(True)
if ret:
self.__logger.warning(
f"Can't set autoconf loaded metadata to true in database: {ret}",
)
self._set_autoconf_load_db()
except:
self.__logger.error(
self._logger.error(
f"Exception while processing events :\n{format_exc()}"
)

View File

@ -1,6 +1,5 @@
#!/usr/bin/python3
from os import getenv
from time import sleep
from traceback import format_exc
from typing import List
@ -9,19 +8,15 @@ from kubernetes.client.exceptions import ApiException
from threading import Thread, Lock
from Controller import Controller
from ConfigCaller import ConfigCaller # type: ignore
from logger import setup_logger # type: ignore
class IngressController(Controller, ConfigCaller):
class IngressController(Controller):
def __init__(self):
Controller.__init__(self, "kubernetes")
ConfigCaller.__init__(self)
self.__internal_lock = Lock()
super().__init__("kubernetes", self.__internal_lock)
config.load_incluster_config()
self.__corev1 = client.CoreV1Api()
self.__networkingv1 = client.NetworkingV1Api()
self.__internal_lock = Lock()
self.__logger = setup_logger("Ingress-controller", getenv("LOG_LEVEL", "INFO"))
def _get_controller_instances(self) -> list:
return [
@ -51,7 +46,7 @@ class IngressController(Controller, ConfigCaller):
pod = container
break
if not pod:
self.__logger.warning(
self._logger.warning(
f"Missing container bunkerweb in pod {controller_instance.metadata.name}"
)
else:
@ -81,7 +76,7 @@ class IngressController(Controller, ConfigCaller):
# parse rules
for rule in controller_service.spec.rules:
if not rule.host:
self.__logger.warning(
self._logger.warning(
"Ignoring unsupported ingress rule without host.",
)
continue
@ -93,22 +88,22 @@ class IngressController(Controller, ConfigCaller):
location = 1
for path in rule.http.paths:
if not path.path:
self.__logger.warning(
self._logger.warning(
"Ignoring unsupported ingress rule without path.",
)
continue
elif not path.backend.service:
self.__logger.warning(
self._logger.warning(
"Ignoring unsupported ingress rule without backend service.",
)
continue
elif not path.backend.service.port:
self.__logger.warning(
self._logger.warning(
"Ignoring unsupported ingress rule without backend service port.",
)
continue
elif not path.backend.service.port.number:
self.__logger.warning(
self._logger.warning(
"Ignoring unsupported ingress rule without backend service port number.",
)
continue
@ -119,7 +114,7 @@ class IngressController(Controller, ConfigCaller):
).items
if not service_list:
self.__logger.warning(
self._logger.warning(
f"Ignoring ingress rule with service {path.backend.service.name} : service not found.",
)
continue
@ -137,7 +132,7 @@ class IngressController(Controller, ConfigCaller):
# parse tls
if controller_service.spec.tls: # TODO: support tls
self.__logger.warning("Ignoring unsupported tls.")
self._logger.warning("Ignoring unsupported tls.")
# parse annotations
if controller_service.metadata.annotations:
@ -204,12 +199,12 @@ class IngressController(Controller, ConfigCaller):
config_type = configmap.metadata.annotations["bunkerweb.io/CONFIG_TYPE"]
if config_type not in self._supported_config_types:
self.__logger.warning(
self._logger.warning(
f"Ignoring unsupported CONFIG_TYPE {config_type} for ConfigMap {configmap.metadata.name}",
)
continue
elif not configmap.data:
self.__logger.warning(
self._logger.warning(
f"Ignoring blank ConfigMap {configmap.metadata.name}",
)
continue
@ -218,7 +213,7 @@ class IngressController(Controller, ConfigCaller):
if not self._is_service_present(
configmap.metadata.annotations["bunkerweb.io/CONFIG_SITE"]
):
self.__logger.warning(
self._logger.warning(
f"Ignoring config {configmap.metadata.name} because {configmap.metadata.annotations['bunkerweb.io/CONFIG_SITE']} doesn't exist",
)
continue
@ -253,46 +248,41 @@ class IngressController(Controller, ConfigCaller):
self._instances = self.get_instances()
self._services = self.get_services()
self._configs = self.get_configs()
if not self._config.update_needed(
if not self.update_needed(
self._instances, self._services, configs=self._configs
):
self.__internal_lock.release()
locked = False
continue
self.__logger.info(
self._logger.info(
f"Catched kubernetes event ({watch_type}), deploying new configuration ...",
)
try:
ret = self.apply_config()
if not ret:
self.__logger.error(
self._logger.error(
"Error while deploying new configuration ...",
)
else:
self.__logger.info(
self._logger.info(
"Successfully deployed new configuration 🚀",
)
if not self._config._db.is_autoconf_loaded():
ret = self._config._db.set_autoconf_load(True)
if ret:
self.__logger.warning(
f"Can't set autoconf loaded metadata to true in database: {ret}",
)
self._set_autoconf_load_db()
except:
self.__logger.error(
self._logger.error(
f"Exception while deploying new configuration :\n{format_exc()}",
)
self.__internal_lock.release()
locked = False
except ApiException as e:
if e.status != 410:
self.__logger.error(
self._logger.error(
f"API exception while reading k8s event (type = {watch_type}) :\n{format_exc()}",
)
error = True
except:
self.__logger.error(
self._logger.error(
f"Unknown exception while reading k8s event (type = {watch_type}) :\n{format_exc()}",
)
error = True
@ -302,13 +292,11 @@ class IngressController(Controller, ConfigCaller):
locked = False
if error is True:
self.__logger.warning("Got exception, retrying in 10 seconds ...")
self._logger.warning("Got exception, retrying in 10 seconds ...")
sleep(10)
def apply_config(self) -> bool:
return self._config.apply(
self._instances, self._services, configs=self._configs
)
return self.apply(self._instances, self._services, configs=self._configs)
def process_events(self):
self._set_autoconf_load_db()

View File

@ -1,6 +1,5 @@
#!/usr/bin/python3
from os import getenv
from time import sleep
from traceback import format_exc
from threading import Thread, Lock
@ -10,17 +9,13 @@ from base64 import b64decode
from docker.models.services import Service
from Controller import Controller
from ConfigCaller import ConfigCaller # type: ignore
from logger import setup_logger # type: ignore
class SwarmController(Controller, ConfigCaller):
class SwarmController(Controller):
def __init__(self, docker_host):
Controller.__init__(self, "swarm")
ConfigCaller.__init__(self)
super().__init__("swarm")
self.__client = DockerClient(base_url=docker_host)
self.__internal_lock = Lock()
self.__logger = setup_logger("Swarm-controller", getenv("LOG_LEVEL", "INFO"))
def _get_controller_instances(self) -> List[Service]:
return self.__client.services.list(filters={"label": "bunkerweb.INSTANCE"})
@ -110,7 +105,7 @@ class SwarmController(Controller, ConfigCaller):
config_type = config.attrs["Spec"]["Labels"]["bunkerweb.CONFIG_TYPE"]
config_name = config.name
if config_type not in self._supported_config_types:
self.__logger.warning(
self._logger.warning(
f"Ignoring unsupported CONFIG_TYPE {config_type} for Config {config_name}",
)
continue
@ -119,7 +114,7 @@ class SwarmController(Controller, ConfigCaller):
if not self._is_service_present(
config.attrs["Spec"]["Labels"]["bunkerweb.CONFIG_SITE"]
):
self.__logger.warning(
self._logger.warning(
f"Ignoring config {config_name} because {config.attrs['Spec']['Labels']['bunkerweb.CONFIG_SITE']} doesn't exist",
)
continue
@ -132,9 +127,7 @@ class SwarmController(Controller, ConfigCaller):
return configs
def apply_config(self) -> bool:
return self._config.apply(
self._instances, self._services, configs=self._configs
)
return self.apply(self._instances, self._services, configs=self._configs)
def __event(self, event_type):
while True:
@ -150,31 +143,31 @@ class SwarmController(Controller, ConfigCaller):
self._instances = self.get_instances()
self._services = self.get_services()
self._configs = self.get_configs()
if not self._config.update_needed(
if not self.update_needed(
self._instances, self._services, configs=self._configs
):
self.__internal_lock.release()
locked = False
continue
self.__logger.info(
self._logger.info(
f"Catched Swarm event ({event_type}), deploying new configuration ..."
)
if not self.apply_config():
self.__logger.error(
self._logger.error(
"Error while deploying new configuration"
)
else:
self.__logger.info(
self._logger.info(
"Successfully deployed new configuration 🚀",
)
except:
self.__logger.error(
self._logger.error(
f"Exception while processing Swarm event ({event_type}) :\n{format_exc()}"
)
self.__internal_lock.release()
locked = False
except:
self.__logger.error(
self._logger.error(
f"Exception while reading Swarm event ({event_type}) :\n{format_exc()}",
)
error = True
@ -183,7 +176,7 @@ class SwarmController(Controller, ConfigCaller):
self.__internal_lock.release()
locked = False
if error is True:
self.__logger.warning("Got exception, retrying in 10 seconds ...")
self._logger.warning("Got exception, retrying in 10 seconds ...")
sleep(10)
def process_events(self):

View File

@ -7,12 +7,16 @@ from requests import request
class API:
def __init__(self, endpoint: str, host: str = "bwapi"):
self.__endpoint = endpoint
if not self.__endpoint.endswith("/"):
self.__endpoint += "/"
self.__host = host
def get_endpoint(self) -> str:
@property
def endpoint(self) -> str:
return self.__endpoint
def get_host(self) -> str:
@property
def host(self) -> str:
return self.__host
def request(

View File

@ -65,16 +65,16 @@ try:
if not sent:
status = 1
logger.error(
f"Can't send API request to {api.get_endpoint()}/lets-encrypt/challenge : {err}"
f"Can't send API request to {api.endpoint}/lets-encrypt/challenge : {err}"
)
elif status != 200:
status = 1
logger.error(
f"Error while sending API request to {api.get_endpoint()}/lets-encrypt/challenge : status = {resp['status']}, msg = {resp['msg']}",
f"Error while sending API request to {api.endpoint}/lets-encrypt/challenge : status = {resp['status']}, msg = {resp['msg']}",
)
else:
logger.info(
f"Successfully sent API request to {api.get_endpoint()}/lets-encrypt/challenge",
f"Successfully sent API request to {api.endpoint}/lets-encrypt/challenge",
)
# Linux case

View File

@ -61,16 +61,16 @@ try:
if not sent:
status = 1
logger.error(
f"Can't send API request to {api.get_endpoint()}/lets-encrypt/challenge : {err}"
f"Can't send API request to {api.endpoint}/lets-encrypt/challenge : {err}"
)
elif status != 200:
status = 1
logger.error(
f"Error while sending API request to {api.get_endpoint()}/lets-encrypt/challenge : status = {resp['status']}, msg = {resp['msg']}",
f"Error while sending API request to {api.endpoint}/lets-encrypt/challenge : status = {resp['status']}, msg = {resp['msg']}",
)
else:
logger.info(
f"Successfully sent API request to {api.get_endpoint()}/lets-encrypt/challenge",
f"Successfully sent API request to {api.endpoint}/lets-encrypt/challenge",
)
# Linux case
else:

View File

@ -78,31 +78,31 @@ try:
if not sent:
status = 1
logger.error(
f"Can't send API request to {api.get_endpoint()}/lets-encrypt/certificates : {err}"
f"Can't send API request to {api.endpoint}/lets-encrypt/certificates : {err}"
)
elif status != 200:
status = 1
logger.error(
f"Error while sending API request to {api.get_endpoint()}/lets-encrypt/certificates : status = {resp['status']}, msg = {resp['msg']}"
f"Error while sending API request to {api.endpoint}/lets-encrypt/certificates : status = {resp['status']}, msg = {resp['msg']}"
)
else:
logger.info(
f"Successfully sent API request to {api.get_endpoint()}/lets-encrypt/certificates",
f"Successfully sent API request to {api.endpoint}/lets-encrypt/certificates",
)
sent, err, status, resp = api.request("POST", "/reload")
if not sent:
status = 1
logger.error(
f"Can't send API request to {api.get_endpoint()}/reload : {err}"
f"Can't send API request to {api.endpoint}/reload : {err}"
)
elif status != 200:
status = 1
logger.error(
f"Error while sending API request to {api.get_endpoint()}/reload : status = {resp['status']}, msg = {resp['msg']}"
f"Error while sending API request to {api.endpoint}/reload : status = {resp['status']}, msg = {resp['msg']}"
)
else:
logger.info(
f"Successfully sent API request to {api.get_endpoint()}/reload"
f"Successfully sent API request to {api.endpoint}/reload"
)
# Linux case
else:

View File

@ -11,7 +11,7 @@ from os.path import basename, dirname, join
from pathlib import Path
from re import compile as re_compile
from sys import _getframe, path as sys_path
from typing import Any, Dict, List, Optional, Tuple
from typing import Any, Dict, List, Optional, Tuple, Union
from time import sleep
from traceback import format_exc
@ -55,7 +55,7 @@ install_as_MySQLdb()
class Database:
def __init__(self, logger: Logger, sqlalchemy_string: str = None) -> None:
def __init__(self, logger: Logger, sqlalchemy_string: Optional[str] = None) -> None:
"""Initialize the database"""
self.__logger = logger
self.__sql_session = None
@ -257,7 +257,7 @@ class Database:
return ""
def check_changes(self) -> Dict[str, bool]:
def check_changes(self) -> Union[Dict[str, bool], str]:
"""Check if either the config, the custom configs or plugins have changed inside the database"""
with self.__db_session() as session:
try:

View File

@ -298,7 +298,7 @@ class Configurator:
elif not self.__plugin_version_rx.match(plugin["version"]):
return (
False,
f"Invalid version for plugin {plugin['id']} (Must be in format \d+\.\d+(\.\d+)?)",
f"Invalid version for plugin {plugin['id']} (Must be in format \\d+\\.\\d+(\\.\\d+)?)",
)
elif plugin["stream"] not in ["yes", "no", "partial"]:
return (

View File

@ -371,10 +371,8 @@ if __name__ == "__main__":
if args.method != "ui":
if apis:
for api in apis:
endpoint_data = api.get_endpoint().replace("http://", "").split(":")
err = db.add_instance(
endpoint_data[0], endpoint_data[1], api.get_host()
)
endpoint_data = api.endpoint.replace("http://", "").split(":")
err = db.add_instance(endpoint_data[0], endpoint_data[1], api.host)
if err:
logger.warning(err)

View File

@ -131,23 +131,21 @@ class ApiCaller:
if not sent:
ret = False
self.__logger.error(
f"Can't send API request to {api.get_endpoint()}{url} : {err}",
f"Can't send API request to {api.endpoint}{url} : {err}",
)
else:
if status != 200:
ret = False
self.__logger.error(
f"Error while sending API request to {api.get_endpoint()}{url} : status = {resp['status']}, msg = {resp['msg']}",
f"Error while sending API request to {api.endpoint}{url} : status = {resp['status']}, msg = {resp['msg']}",
)
else:
self.__logger.info(
f"Successfully sent API request to {api.get_endpoint()}{url}",
f"Successfully sent API request to {api.endpoint}{url}",
)
if response:
instance = (
api.get_endpoint().replace("http://", "").split(":")[0]
)
instance = api.endpoint.replace("http://", "").split(":")[0]
if isinstance(resp, dict):
responses[instance] = resp
else:

View File

@ -16,13 +16,17 @@ class ConfigCaller:
def __init__(self):
self.__logger = setup_logger("Config", "INFO")
self._settings = loads(
Path(sep, "usr", "share", "bunkerweb", "settings.json").read_text()
Path(sep, "usr", "share", "bunkerweb", "settings.json").read_text(
encoding="utf-8"
)
)
for plugin in glob(
join(sep, "usr", "share", "bunkerweb", "core", "*", "plugin.json")
) + glob(join(sep, "etc", "bunkerweb", "plugins", "*", "plugin.json")):
try:
self._settings.update(loads(Path(plugin).read_text())["settings"])
self._settings.update(
loads(Path(plugin).read_text(encoding="utf-8"))["settings"]
)
except KeyError:
self.__logger.error(
f'Error while loading plugin metadata file at {plugin} : missing "settings" key',

View File

@ -70,12 +70,12 @@ def is_cached_file(
return is_cached and cached_file
def get_file_in_db(file: Union[str, Path], db) -> bytes:
def get_file_in_db(file: Union[str, Path], db) -> Optional[bytes]:
cached_file = db.get_job_cache_file(
basename(getsourcefile(_getframe(1))).replace(".py", ""), normpath(file)
)
if not cached_file:
return False
return None
return cached_file.data
@ -142,7 +142,9 @@ def bytes_hash(bio: BufferedReader) -> str:
def cache_hash(cache: Union[str, Path], db=None) -> Optional[str]:
with suppress(BaseException):
return loads(Path(normpath(f"{cache}.md")).read_text()).get("checksum", None)
return loads(Path(normpath(f"{cache}.md")).read_text(encoding="utf-8")).get(
"checksum", None
)
if db:
cached_file = db.get_job_cache_file(
basename(getsourcefile(_getframe(1))).replace(".py", ""),
@ -192,7 +194,8 @@ def cache_file(
)
else:
Path(f"{cache}.md").write_text(
dumps(dict(date=datetime.now().timestamp(), checksum=_hash))
dumps(dict(date=datetime.now().timestamp(), checksum=_hash)),
encoding="utf-8",
)
except:
return False, f"exception :\n{format_exc()}"

View File

@ -18,21 +18,7 @@ from typing import Optional, Union
class BWLogger(Logger):
def __init__(self, name, level=INFO):
self.name = name
return super(BWLogger, self).__init__(name, level)
def _log(
self,
level,
msg,
args,
exc_info=None,
extra=None,
stack_info=False,
stacklevel=1,
):
return super(BWLogger, self)._log(
level, msg, args, exc_info, extra, stack_info, stacklevel
)
super(BWLogger, self).__init__(name, level)
setLoggerClass(BWLogger)

View File

@ -210,8 +210,12 @@ while not db.is_first_config_saved() or not env:
env = db.get_config()
logger.info("Database is ready")
Path(sep, "var", "tmp", "bunkerweb", "ui.healthy").write_text("ok")
bw_version = Path(sep, "usr", "share", "bunkerweb", "VERSION").read_text().strip()
Path(sep, "var", "tmp", "bunkerweb", "ui.healthy").write_text("ok", encoding="utf-8")
bw_version = (
Path(sep, "usr", "share", "bunkerweb", "VERSION")
.read_text(encoding="utf-8")
.strip()
)
try:
app.config.update(
@ -243,8 +247,12 @@ plugin_id_rx = re_compile(r"^[\w_-]{1,64}$")
# Declare functions for jinja2
app.jinja_env.globals.update(check_settings=check_settings)
# CSRF protection
csrf = CSRFProtect()
csrf.init_app(app)
def manage_bunkerweb(method: str, operation: str = "reloads", *args):
def manage_bunkerweb(method: str, *args, operation: str = "reloads"):
# Do the operation
if method == "services":
error = False
@ -295,11 +303,6 @@ def load_user(user_id):
return User(user_id, vars["ADMIN_PASSWORD"])
# CSRF protection
csrf = CSRFProtect()
csrf.init_app(app)
@app.errorhandler(CSRFError)
def handle_csrf_error(_):
"""
@ -348,6 +351,7 @@ def home():
r = get(
"https://github.com/bunkerity/bunkerweb/releases/latest",
allow_redirects=True,
timeout=5,
)
r.raise_for_status()
except BaseException:
@ -418,7 +422,8 @@ def instances():
Thread(
target=manage_bunkerweb,
name="Reloading instances",
args=("instances", request.form["operation"], request.form["INSTANCE_ID"]),
args=("instances", request.form["INSTANCE_ID"]),
kwargs={"operation": request.form["operation"]},
).start()
return redirect(
@ -522,11 +527,11 @@ def services():
name="Reloading instances",
args=(
"services",
request.form["operation"],
variables,
request.form.get("OLD_SERVER_NAME", "").split(" ")[0],
variables.get("SERVER_NAME", "").split(" ")[0],
),
kwargs={"operation": request.form["operation"]},
).start()
message = ""
@ -589,7 +594,7 @@ def global_config():
if not variables:
flash(
f"The global configuration was not edited because no values were changed."
"The global configuration was not edited because no values were changed."
)
return redirect(url_for("loading", next=url_for("global_config")))
@ -606,7 +611,6 @@ def global_config():
name="Reloading instances",
args=(
"global_config",
"reloads",
variables,
),
).start()
@ -669,6 +673,8 @@ def configs():
variables["content"], "html.parser"
).get_text()
error = False
if request.form["operation"] == "new":
if variables["type"] == "folder":
operation, error = app.config["CONFIGFILES"].create_folder(
@ -852,7 +858,9 @@ def plugins():
)
plugin_file = json_loads(
temp_folder_path.joinpath("plugin.json").read_text()
temp_folder_path.joinpath("plugin.json").read_text(
encoding="utf-8"
)
)
if not all(key in plugin_file.keys() for key in PLUGIN_KEYS):
@ -1200,13 +1208,13 @@ def logs_linux():
nginx_error_file = Path(sep, "var", "log", "nginx", "error.log")
if nginx_error_file.is_file():
raw_logs_access = nginx_error_file.read_text().splitlines()[
raw_logs_access = nginx_error_file.read_text(encoding="utf-8").splitlines()[
int(last_update.split(".")[0]) if last_update else 0 :
]
nginx_access_file = Path(sep, "var", "log", "nginx", "access.log")
if nginx_access_file.is_file():
raw_logs_error = nginx_access_file.read_text().splitlines()[
raw_logs_error = nginx_access_file.read_text(encoding="utf-8").splitlines()[
int(last_update.split(".")[1]) if last_update else 0 :
]

View File

@ -15,24 +15,12 @@ from uuid import uuid4
class Config:
def __init__(self, db) -> None:
self.__settings = json_loads(
Path(sep, "usr", "share", "bunkerweb", "settings.json").read_text()
Path(sep, "usr", "share", "bunkerweb", "settings.json").read_text(
encoding="utf-8"
)
)
self.__db = db
def __dict_to_env(self, filename: str, variables: dict) -> None:
"""Converts the content of a dict into an env file
Parameters
----------
filename : str
The path to save the env file
variables : dict
The dict to convert to env file
"""
Path(filename).write_text(
"\n".join(f"{k}={variables[k]}" for k in sorted(variables))
)
def __gen_conf(self, global_conf: dict, services_conf: list[dict]) -> None:
"""Generates the nginx configuration file from the given configuration
@ -43,7 +31,7 @@ class Config:
Raises
------
Exception
ConfigGenerationError
If an error occurred during the generation of the configuration file, raises this exception
"""
conf = deepcopy(global_conf)
@ -68,7 +56,11 @@ class Config:
conf["SERVER_NAME"] = " ".join(servers)
env_file = Path(sep, "tmp", f"{uuid4()}.env")
self.__dict_to_env(env_file, conf)
env_file.write_text(
"\n".join(f"{k}={conf[k]}" for k in sorted(conf)),
encoding="utf-8",
)
proc = run(
[
"python3",
@ -80,6 +72,7 @@ class Config:
],
stdin=DEVNULL,
stderr=STDOUT,
check=False,
)
if proc.returncode != 0:
@ -270,7 +263,7 @@ class Config:
self.__gen_conf(
self.get_config(methods=False) | variables, self.get_services(methods=False)
)
return f"The global configuration has been edited."
return "The global configuration has been edited."
def delete_service(self, service_name: str) -> Tuple[str, int]:
"""Deletes a service

View File

@ -14,9 +14,8 @@ from utils import path_to_dict
def generate_custom_configs(
custom_configs: List[Dict[str, Any]],
*,
original_path: str = join(sep, "etc", "bunkerweb", "configs"),
original_path: Path = Path(sep, "etc", "bunkerweb", "configs"),
):
original_path: Path = Path(original_path)
original_path.mkdir(parents=True, exist_ok=True)
for custom_config in custom_configs:
tmp_path = original_path.joinpath(custom_config["type"].replace("_", "-"))
@ -64,7 +63,7 @@ class ConfigFiles:
if files or (dirs and basename(root) not in root_dirs):
path_exploded = root.split("/")
for file in files:
with open(join(root, file), "r") as f:
with open(join(root, file), "r", encoding="utf-8") as f:
custom_configs.append(
{
"value": f.read(),
@ -148,7 +147,7 @@ class ConfigFiles:
def create_file(self, path: str, name: str, content: str) -> Tuple[str, int]:
file_path = Path(path, name)
file_path.parent.mkdir(exist_ok=True)
file_path.write_text(content)
file_path.write_text(content, encoding="utf-8")
return f"The file {file_path} was successfully created", 0
def edit_folder(self, path: str, name: str, old_name: str) -> Tuple[str, int]:
@ -178,7 +177,7 @@ class ConfigFiles:
old_path = join(dirname(path), old_name)
try:
file_content = Path(old_path).read_text()
file_content = Path(old_path).read_text(encoding="utf-8")
except FileNotFoundError:
return f"Could not find {old_path}", 1
@ -201,6 +200,6 @@ class ConfigFiles:
except OSError:
return f"Could not remove {old_path}", 1
Path(new_path).write_text(content)
Path(new_path).write_text(content, encoding="utf-8")
return f"The file {old_path} was successfully edited", 0

View File

@ -5,8 +5,8 @@ from bcrypt import checkpw, hashpw, gensalt
class User(UserMixin):
def __init__(self, id, password):
self.__id = id
def __init__(self, _id, password):
self.__id = _id
self.__password = hashpw(password.encode("utf-8"), gensalt())
def get_id(self):

View File

@ -2,7 +2,7 @@
from os import environ, urandom
from os.path import join
from typing import List
from typing import List, Optional
def get_variables():
@ -22,12 +22,15 @@ def get_variables():
def path_to_dict(
path,
path: str,
*,
is_cache: bool = False,
db_data: List[dict] = [],
services: List[str] = [],
db_data: Optional[List[dict]] = None,
services: Optional[List[dict]] = None,
) -> dict:
db_data = db_data or []
services = services or []
if not is_cache:
config_types = [
"http",