Make the Database support every feature + updates

This commit is contained in:
TheophileDiot 2022-11-04 18:14:44 +01:00
parent 5f8353c114
commit 01b4145524
15 changed files with 750 additions and 269 deletions

View File

@ -12,7 +12,7 @@ class Config(ConfigCaller):
self.__ctrl_type = ctrl_type
self.__lock = lock
self.__logger = setup_logger("Config", getenv("LOG_LEVEL", "INFO"))
self.__db = None
self._db = None
self.__instances = []
self.__services = []
self.__configs = []
@ -55,8 +55,8 @@ class Config(ConfigCaller):
self.__configs = configs
self.__config = self.__get_full_env()
if self.__db is None:
self.__db = Database(
if self._db is None:
self._db = Database(
self.__logger,
sqlalchemy_string=self.__config.get("DATABASE_URI", None),
bw_integration="Kubernetes"
@ -64,19 +64,12 @@ class Config(ConfigCaller):
else "Cluster",
)
while not self.__db.is_initialized():
while not self._db.is_initialized():
self.__logger.warning(
"Database is not initialized, retrying in 5 seconds ...",
)
sleep(5)
# save config to database
ret = self.__db.save_config(self.__config, "autoconf")
if ret:
self.__logger.error(
f"Can't save autoconf config in database: {ret}",
)
custom_configs = []
for config_type in self.__configs:
for file, data in self.__configs[config_type].items():
@ -92,9 +85,18 @@ class Config(ConfigCaller):
}
)
# save custom configs to database
ret = self.__db.save_custom_configs(custom_configs, "autoconf")
# save config to database
ret = self._db.save_config(self.__config, "autoconf")
if ret:
success = False
self.__logger.error(
f"Can't save autoconf config in database: {ret}",
)
# save custom configs to database
ret = self._db.save_custom_configs(custom_configs, "autoconf")
if ret:
success = False
self.__logger.error(
f"Can't save autoconf custom configs in database: {ret}",
)

View File

@ -132,6 +132,13 @@ class DockerController(Controller, ConfigCaller):
self.__logger.info(
"Successfully deployed new configuration 🚀",
)
if not self._config._db.is_autoconf_loaded():
ret = self._config._db.set_autoconf_load(True)
if ret:
self.__logger.error(
f"Can't set autoconf loaded metadata to true in database: {ret}",
)
except:
self.__logger.error(
f"Exception while deploying new configuration :\n{format_exc()}",

View File

@ -15,43 +15,23 @@ RUN apk add --no-cache --virtual build g++ gcc python3-dev musl-dev libffi-dev o
# can't exclude specific files/dir from . so we are copying everything by hand
COPY autoconf /opt/bunkerweb/autoconf
COPY bw/api /opt/bunkerweb/api
COPY bw/cli /opt/bunkerweb/cli
COPY bw/confs /opt/bunkerweb/confs
COPY bw/core /opt/bunkerweb/core
COPY bw/gen /opt/bunkerweb/gen
COPY bw/helpers /opt/bunkerweb/helpers
COPY bw/settings.json /opt/bunkerweb/settings.json
COPY db /opt/bunkerweb/db
COPY utils /opt/bunkerweb/utils
COPY VERSION /opt/bunkerweb/VERSION
# Add nginx user, drop bwcli, setup data folders, permissions and logging
RUN apk add --no-cache bash git && \
ln -s /usr/local/bin/python3 /usr/bin/python3 && \
RUN apk add --no-cache bash && \
addgroup -g 101 nginx && \
adduser -h /var/cache/nginx -g nginx -s /bin/sh -G nginx -D -H -u 101 nginx && \
apk add --no-cache bash && \
cp /opt/bunkerweb/helpers/bwcli /usr/local/bin && \
for dir in $(echo "cache configs configs/http configs/stream configs/server-http configs/server-stream configs/default-server-http configs/default-server-stream configs/modsec configs/modsec-crs cache/letsencrypt plugins www") ; do mkdir -p "/data/${dir}" && ln -s "/data/${dir}" "/opt/bunkerweb/${dir}" ; done && \
mkdir /opt/bunkerweb/tmp && \
chown -R root:nginx /opt/bunkerweb && \
chown -R nginx:nginx /opt/bunkerweb && \
find /opt/bunkerweb -type f -exec chmod 0740 {} \; && \
find /opt/bunkerweb -type d -exec chmod 0750 {} \; && \
chown -R nginx:nginx /data && \
chmod 770 /opt/bunkerweb/tmp && \
chmod 750 /opt/bunkerweb/gen/main.py /opt/bunkerweb/cli/main.py /usr/local/bin/bwcli /opt/bunkerweb/helpers/*.sh /opt/bunkerweb/autoconf/main.py /opt/bunkerweb/deps/python/bin/* && \
chown root:nginx /usr/local/bin/bwcli && \
mkdir /etc/nginx && \
chown -R nginx:nginx /etc/nginx && \
chmod -R 770 /etc/nginx
find /opt/bunkerweb -type d -exec chmod 0750 {} \;
# Fix CVEs
RUN apk add "libssl1.1>=1.1.1q-r0" "libcrypto1.1>=1.1.1q-r0" "git>=2.32.3-r0" "ncurses-libs>=6.2_p20210612-r1" "ncurses-terminfo-base>=6.2_p20210612-r1" "libtirpc>=1.3.2-r1" "libtirpc-conf>=1.3.2-r1" "zlib>=1.2.12-r2" "libxml2>=2.9.14-r1"
VOLUME /data /etc/nginx
VOLUME /data
WORKDIR /opt/bunkerweb/autoconf
USER root:nginx
CMD ["python", "/opt/bunkerweb/autoconf/main.py"]
CMD ["python3", "/opt/bunkerweb/autoconf/main.py"]

View File

@ -250,6 +250,13 @@ class IngressController(Controller, ConfigCaller):
self.__logger.info(
"Successfully deployed new configuration 🚀",
)
if not self._config._db.is_autoconf_loaded():
ret = self._config._db.set_autoconf_load(True)
if ret:
self.__logger.error(
f"Can't set autoconf loaded metadata to true in database: {ret}",
)
except:
self.__logger.error(
f"Exception while deploying new configuration :\n{format_exc()}",

View File

@ -131,6 +131,13 @@ class SwarmController(Controller, ConfigCaller):
self.__logger.info(
"Successfully deployed new configuration 🚀",
)
if not self._config._db.is_autoconf_loaded():
ret = self._config._db.set_autoconf_load(True)
if ret:
self.__logger.error(
f"Can't set autoconf loaded metadata to true in database: {ret}",
)
except:
self.__logger.error(
f"Exception while deploying new configuration :\n{format_exc()}",

View File

@ -33,16 +33,6 @@ signal(SIGINT, exit_handler)
signal(SIGTERM, exit_handler)
try:
# Setup /data folder if needed
proc = run(
["/opt/bunkerweb/helpers/data.sh", "AUTOCONF"],
stdin=DEVNULL,
stderr=STDOUT,
)
if proc.returncode != 0:
_exit(1)
# Instantiate the controller
if swarm:
logger.info("Swarm mode detected")

View File

@ -4,8 +4,8 @@ from argparse import ArgumentParser
from glob import glob
from itertools import chain
from json import loads
from os import R_OK, W_OK, X_OK, access, environ, getenv, makedirs, path, remove, unlink
from os.path import dirname, exists, isdir, isfile, islink
from os import R_OK, W_OK, X_OK, access, environ, getenv, path, remove, unlink
from os.path import exists, isdir, isfile, islink
from re import compile as re_compile
from shutil import rmtree
from subprocess import DEVNULL, STDOUT, run
@ -28,7 +28,6 @@ from Database import Database
from Configurator import Configurator
from Templator import Templator
from API import API
from ApiCaller import ApiCaller
if __name__ == "__main__":
@ -163,6 +162,7 @@ if __name__ == "__main__":
for plugin in core_plugins[order]:
core_settings.update(plugin["settings"])
integration = "Linux"
if exists("/opt/bunkerweb/INTEGRATION"):
with open("/opt/bunkerweb/INTEGRATION", "r") as f:
integration = f.read().strip()
@ -182,8 +182,11 @@ if __name__ == "__main__":
if config_files.get("KUBERNETES_MODE", "no") == "yes":
bw_integration = "Kubernetes"
elif (
config_files.get("SWARM_MODE", "no") == "yes"
or config_files.get("AUTOCONF_MODE", "no") == "yes"
integration == "Docker"
or config_files.get(
"SWARM_MODE", config_files.get("AUTOCONF_MODE", "no")
)
== "yes"
):
bw_integration = "Cluster"
@ -192,8 +195,9 @@ if __name__ == "__main__":
sqlalchemy_string=getenv("DATABASE_URI", None),
bw_integration=bw_integration,
)
is_initialized = db.is_initialized()
if args.init:
if not is_initialized:
ret, err = db.init_tables(
[
config.get_settings(),
@ -215,55 +219,55 @@ if __name__ == "__main__":
else:
logger.info("Database tables initialized")
if not db.is_initialized():
logger.info(
"Database not initialized, initializing ...",
)
logger.info(
"Database not initialized, initializing ...",
)
custom_confs = [
{"value": v, "exploded": custom_confs_rx.search(k).groups()}
for k, v in environ.items()
if custom_confs_rx.match(k)
]
custom_confs = [
{"value": v, "exploded": custom_confs_rx.search(k).groups()}
for k, v in environ.items()
if custom_confs_rx.match(k)
]
with open("/opt/bunkerweb/VERSION", "r") as f:
bw_version = f.read().strip()
with open("/opt/bunkerweb/VERSION", "r") as f:
bw_version = f.read().strip()
if bw_integration == "Local":
err = db.save_config(config_files, args.method)
if bw_integration == "Local":
err = db.save_config(config_files, args.method)
if not err:
err1 = db.save_custom_configs(custom_confs, args.method)
else:
err = None
err1 = None
integration = "Linux"
if config_files.get("KUBERNETES_MODE", "no") == "yes":
integration = "Kubernetes"
elif config_files.get("SWARM_MODE", "no") == "yes":
integration = "Swarm"
elif config_files.get("AUTOCONF_MODE", "no") == "yes":
integration = "Autoconf"
elif exists("/opt/bunkerweb/INTEGRATION"):
with open("/opt/bunkerweb/INTEGRATION", "r") as f:
integration = f.read().strip()
err2 = db.initialize_db(version=bw_version, integration=integration)
if err or err1 or err2:
logger.error(
f"Can't Initialize database : {err or err1 or err2}",
)
sys_exit(1)
else:
logger.info("Database initialized")
if not err:
err1 = db.save_custom_configs(custom_confs, args.method)
else:
logger.info(
"Database is already initialized, skipping ...",
)
err = None
err1 = None
sys_exit(0)
integration = "Linux"
if config_files.get("KUBERNETES_MODE", "no") == "yes":
integration = "Kubernetes"
elif config_files.get("SWARM_MODE", "no") == "yes":
integration = "Swarm"
elif config_files.get("AUTOCONF_MODE", "no") == "yes":
integration = "Autoconf"
elif exists("/opt/bunkerweb/INTEGRATION"):
with open("/opt/bunkerweb/INTEGRATION", "r") as f:
integration = f.read().strip()
err2 = db.initialize_db(version=bw_version, integration=integration)
if err or err1 or err2:
logger.error(
f"Can't Initialize database : {err or err1 or err2}",
)
sys_exit(1)
else:
logger.info("Database initialized")
if args.init:
sys_exit(0)
elif is_initialized:
logger.info(
"Database is already initialized, skipping ...",
)
config = db.get_config()
elif integration == "Docker":
@ -355,8 +359,6 @@ if __name__ == "__main__":
if db is None:
db = Database(logger)
api_caller = ApiCaller(apis=apis)
# Compute the config
logger.info("Computing config ...")
config = Configurator(
@ -375,9 +377,6 @@ if __name__ == "__main__":
err = None
err1 = None
with open("/opt/bunkerweb/VERSION", "r") as f:
bw_version = f.read().strip()
if err or err1:
logger.error(
f"Can't save config to database : {err or err1}",
@ -425,8 +424,6 @@ if __name__ == "__main__":
if db is None:
db = Database(logger)
api_caller = ApiCaller(apis=apis)
# Compute the config
logger.info("Computing config ...")
config = Configurator(
@ -493,9 +490,6 @@ if __name__ == "__main__":
err = None
err1 = None
with open("/opt/bunkerweb/VERSION", "r") as f:
bw_version = f.read().strip()
if err or err1:
logger.error(
f"Can't save config to database : {err or err1}",
@ -526,23 +520,17 @@ if __name__ == "__main__":
logger = setup_logger("Generator", config.get("LOG_LEVEL", "INFO"))
if integration == "Docker":
while not api_caller._send_to_apis("GET", "/ping"):
logger.warning(
"Waiting for BunkerWeb's temporary nginx to start, retrying in 5 seconds ...",
)
sleep(5)
elif bw_integration == "Local":
if bw_integration == "Local":
retries = 0
while not exists("/opt/bunkerweb/tmp/nginx.pid"):
if retries == 5:
logger.error(
"BunkerWeb's temporary nginx didn't start in time.",
"BunkerWeb's nginx didn't start in time.",
)
sys_exit(1)
logger.warning(
"Waiting for BunkerWeb's temporary nginx to start, retrying in 5 seconds ...",
"Waiting for BunkerWeb's nginx to start, retrying in 5 seconds ...",
)
retries += 1
sleep(5)
@ -570,14 +558,7 @@ if __name__ == "__main__":
)
templator.render()
if integration == "Docker":
ret = api_caller._send_to_apis("POST", "/reload")
if not ret:
logger.error(
"reload failed",
)
sys_exit(1)
elif bw_integration == "Local":
if bw_integration == "Local":
cmd = "/usr/sbin/nginx -s reload"
proc = run(cmd.split(" "), stdin=DEVNULL, stderr=STDOUT)
if proc.returncode != 0:

View File

@ -1,10 +1,12 @@
from contextlib import contextmanager
from copy import deepcopy
from datetime import datetime
from hashlib import sha256
from logging import INFO, WARNING, Logger, getLogger
from os import _exit, getenv, listdir, path
from os.path import exists
from re import search
from sys import path as sys_path
from typing import Any, Dict, List, Optional, Tuple
from sqlalchemy import create_engine, inspect, text
from sqlalchemy.exc import OperationalError, ProgrammingError, SQLAlchemyError
@ -14,6 +16,11 @@ from traceback import format_exc
from model import *
if "/opt/bunkerweb/utils" not in sys_path:
sys_path.append("/opt/bunkerweb/utils")
from jobs import file_hash
class Database:
def __init__(
@ -86,9 +93,7 @@ class Database:
break
if not sqlalchemy_string:
sqlalchemy_string = getenv(
"DATABASE_URI", "sqlite:////opt/bunkerweb/cache/db.sqlite3"
)
sqlalchemy_string = getenv("DATABASE_URI", "sqlite:////data/db.sqlite3")
if sqlalchemy_string.startswith("sqlite"):
if not path.exists(sqlalchemy_string.split("///")[1]):
@ -149,6 +154,36 @@ class Database:
finally:
session.close()
def set_autoconf_load(self, value: bool = True) -> str:
"""Set the autoconf_loaded value"""
with self.__db_session() as session:
try:
metadata = session.query(Metadata).get(1)
if metadata is None:
return "The metadata are not set yet, try again"
metadata.autoconf_loaded = value
session.commit()
except BaseException:
return format_exc()
return ""
def is_autoconf_loaded(self) -> bool:
"""Check if the autoconf is loaded"""
with self.__db_session() as session:
try:
metadata = (
session.query(Metadata)
.with_entities(Metadata.autoconf_loaded)
.filter_by(id=1)
.first()
)
return metadata is not None and metadata.autoconf_loaded
except (ProgrammingError, OperationalError):
return False
def is_first_config_saved(self) -> bool:
"""Check if the first configuration has been saved"""
with self.__db_session() as session:
@ -274,7 +309,9 @@ class Database:
Plugin_pages(
plugin_id=plugin["id"],
template_file=template,
template_checksum=sha256(template).hexdigest(),
actions_file=actions,
actions_checksum=sha256(actions).hexdigest(),
)
)
@ -291,14 +328,10 @@ class Database:
to_put = []
with self.__db_session() as session:
# Delete all the old config
session.execute(
Global_values.__table__.delete().where(Global_values.method == method)
)
session.execute(
Services_settings.__table__.delete().where(
Services_settings.method == method
)
)
session.query(Global_values).filter(Global_values.method == method).delete()
session.query(Services_settings).filter(
Services_settings.method == method
).delete()
if config:
if config["MULTISITE"] == "yes":
@ -314,7 +347,7 @@ class Database:
to_put.append(Services(id=server_name))
for key, value in deepcopy(config).items():
suffix = None
suffix = 0
if search(r"_\d+$", key):
suffix = int(key.split("_")[-1])
key = key[: -len(str(suffix)) - 1]
@ -326,10 +359,14 @@ class Database:
.first()
)
if not setting:
continue
if server_name and key.startswith(server_name):
key = key.replace(f"{server_name}_", "")
service_setting = (
session.query(Services_settings)
.with_entities(Services_settings.value)
.filter_by(
service_id=server_name,
setting_id=key,
@ -338,12 +375,12 @@ class Database:
.first()
)
if not setting or (
value == setting.default and service_setting is None
):
continue
if service_setting is None:
if value == setting.default or (
key in config and value == config[key]
):
continue
to_put.append(
Services_settings(
service_id=server_name,
@ -354,23 +391,41 @@ class Database:
)
)
elif method == "autoconf":
service_setting.value = value
service_setting.method = method
to_put.append(service_setting)
if value == setting.default or (
key in config and value == config[key]
):
session.query(Services_settings).filter(
Services_settings.service_id == server_name,
Services_settings.setting_id == key,
Services_settings.suffix == suffix,
).delete()
elif global_value.value != value:
session.query(Services_settings).filter(
Services_settings.service_id == server_name,
Services_settings.setting_id == key,
Services_settings.suffix == suffix,
).update(
{
Services_settings.value: value,
Services_settings.method: method,
}
)
elif key not in global_values:
global_values.append(key)
global_value = (
session.query(Global_values)
.filter_by(setting_id=key, suffix=suffix)
.with_entities(Global_values.value)
.filter_by(
setting_id=key,
suffix=suffix,
)
.first()
)
if not setting or (
value == setting.default and global_value is None
):
continue
if global_value is None:
if value == setting.default:
continue
to_put.append(
Global_values(
setting_id=key,
@ -380,15 +435,27 @@ class Database:
)
)
elif method == "autoconf":
global_value.value = value
global_value.method = method
to_put.append(global_value)
if value == setting.default:
session.query(Global_values).filter(
Global_values.setting_id == key,
Global_values.suffix == suffix,
).delete()
elif global_value.value != value:
session.query(Global_values).filter(
Global_values.setting_id == key,
Global_values.suffix == suffix,
).update(
{
Global_values.value: value,
Global_values.method: method,
}
)
else:
primary_server_name = config["SERVER_NAME"].split(" ")[0]
to_put.append(Services(id=primary_server_name))
for key, value in config.items():
suffix = None
suffix = 0
if search(r"_\d+$", key):
suffix = int(key.split("_")[-1])
key = key[: -len(str(suffix)) - 1]
@ -403,15 +470,28 @@ class Database:
if setting and value == setting.default:
continue
to_put.append(
Global_values(
setting_id=key,
value=value,
suffix=suffix,
method=method,
)
global_value = (
session.query(Global_values)
.with_entities(Global_values.method)
.filter_by(setting_id=key, suffix=suffix)
.first()
)
if global_value is None:
to_put.append(
Global_values(
setting_id=key,
value=value,
suffix=suffix,
method=method,
)
)
elif global_value.method == method:
session.query(Global_values).filter(
Global_values.setting_id == key,
Global_values.suffix == suffix,
).update({Global_values.value: value})
try:
metadata = session.query(Metadata).get(1)
if metadata is not None and not metadata.first_config_saved:
@ -433,59 +513,83 @@ class Database:
"""Save the custom configs in the database"""
with self.__db_session() as session:
# Delete all the old config
session.execute(
Custom_configs.__table__.delete().where(Custom_configs.method == method)
)
session.query(Custom_configs).filter(
Custom_configs.method == method
).delete()
to_put = []
for custom_config in custom_configs:
config = {
"data": custom_config["value"].replace("\\\n", "\n").encode("utf-8")
if isinstance(custom_config["value"], str)
else custom_config["value"].replace(b"\\\n", b"\n"),
"method": method,
}
if custom_config["exploded"][0]:
if (
not session.query(Services)
.with_entities(Services.id)
.filter_by(id=custom_config["exploded"][0])
if custom_configs:
for custom_config in custom_configs:
config = {
"data": custom_config["value"]
.replace("\\\n", "\n")
.encode("utf-8")
if isinstance(custom_config["value"], str)
else custom_config["value"].replace(b"\\\n", b"\n"),
"method": method,
}
config["checksum"] = sha256(config["data"]).hexdigest()
if custom_config["exploded"][0]:
if (
not session.query(Services)
.with_entities(Services.id)
.filter_by(id=custom_config["exploded"][0])
.first()
):
return f"Service {custom_config['exploded'][0]} not found, please check your config"
config.update(
{
"service_id": custom_config["exploded"][0],
"type": custom_config["exploded"][1]
.replace("-", "_")
.lower(),
"name": custom_config["exploded"][2],
}
)
else:
config.update(
{
"type": custom_config["exploded"][1]
.replace("-", "_")
.lower(),
"name": custom_config["exploded"][2],
}
)
custom_conf = (
session.query(Custom_configs)
.with_entities(Custom_configs.checksum, Custom_configs.method)
.filter_by(
service_id=config.get("service_id", None),
type=config["type"],
name=config["name"],
)
.first()
)
if custom_conf is None:
to_put.append(Custom_configs(**config))
elif config["checksum"] != custom_conf.checksum and (
method == custom_conf.method or method == "autoconf"
):
return f"Service {custom_config['exploded'][0]} not found, please check your config"
session.query(Custom_configs).filter(
Custom_configs.service_id == config.get("service_id", None),
Custom_configs.type == config["type"],
Custom_configs.name == config["name"],
).update(
{
Custom_configs.data: config["data"],
Custom_configs.checksum: config["checksum"],
}
| (
{Custom_configs.method: "autoconf"}
if method == "autoconf"
else {}
)
)
config.update(
{
"service_id": custom_config["exploded"][0],
"type": custom_config["exploded"][1]
.replace("-", "_")
.lower(),
"name": custom_config["exploded"][2],
}
)
else:
config.update(
{
"type": custom_config["exploded"][1]
.replace("-", "_")
.lower(),
"name": custom_config["exploded"][2],
}
)
if (
method == "autoconf"
or session.query(Custom_configs)
.with_entities(Custom_configs.id)
.filter_by(
service_id=config.get("service_id", None),
type=config["type"],
name=config["name"],
)
.first()
is None
):
to_put.append(Custom_configs(**config))
try:
session.add_all(to_put)
session.commit()
@ -626,7 +730,7 @@ class Database:
for service in session.query(Services).with_entities(Services.id).all():
tmp_config = deepcopy(config)
for key, value in tmp_config.items():
for key, value in deepcopy(tmp_config).items():
if key.startswith(f"{service.id}_"):
tmp_config[key.replace(f"{service.id}_", "")] = value
@ -697,17 +801,290 @@ class Database:
return ""
def update_plugins(self, plugins: List[Dict[str, Any]]) -> str:
"""Add a new plugin to the database"""
def update_external_plugins(self, plugins: List[Dict[str, Any]]) -> str:
"""Update external plugins from the database"""
to_put = []
with self.__db_session() as session:
# Delete all old plugins
session.execute(Plugins.__table__.delete().where(Plugins.id != "default"))
db_plugins = (
session.query(Plugins)
.with_entities(Plugins.id)
.filter_by(external=True)
.all()
)
db_ids = []
if db_plugins is not None:
ids = [plugin["id"] for plugin in plugins]
missing_ids = [
plugin.id for plugin in db_plugins if plugin.id not in ids
]
# Remove plugins that are no longer in the list
session.query(Plugins).filter(Plugins.id.in_(missing_ids)).delete()
for plugin in plugins:
settings = plugin.pop("settings", {})
jobs = plugin.pop("jobs", [])
pages = plugin.pop("pages", [])
plugin["external"] = True
if plugin["id"] in db_ids:
db_plugin = session.query(Plugins).get(plugin["id"])
if db_plugin is not None:
if db_plugin.external is False:
self.__logger.warning(
f"Plugin {plugin['id']} is not external, skipping update (updating a non-external plugin is forbidden for security reasons)"
)
continue
updates = {}
if plugin["order"] != db_plugin.order:
updates[Plugins.order] = plugin["order"]
if plugin["name"] != db_plugin.name:
updates[Plugins.name] = plugin["name"]
if plugin["description"] != db_plugin.description:
updates[Plugins.description] = plugin["description"]
if plugin["version"] != db_plugin.version:
updates[Plugins.version] = plugin["version"]
if updates:
session.query(Plugins).filter(
Plugins.id == plugin["id"]
).update(updates)
db_settings = (
session.query(Settings)
.filter_by(plugin_id=plugin["id"])
.all()
)
setting_ids = [setting["id"] for setting in settings.values()]
missing_ids = [
setting.id
for setting in db_settings
if setting.id not in setting_ids
]
# Remove settings that are no longer in the list
session.query(Settings).filter(
Settings.id.in_(missing_ids)
).delete()
for setting, value in settings.items():
value.update(
{
"plugin_id": plugin["id"],
"name": value["id"],
"id": setting,
}
)
db_setting = session.query(Settings).get(setting)
if setting not in db_ids or db_setting is None:
for select in value.pop("select", []):
to_put.append(
Selects(setting_id=value["id"], value=select)
)
to_put.append(
Settings(
**value,
)
)
else:
updates = {}
if value["name"] != db_setting.name:
updates[Settings.name] = value["name"]
if value["context"] != db_setting.context:
updates[Settings.context] = value["context"]
if value["default"] != db_setting.default:
updates[Settings.default] = value["default"]
if value["help"] != db_setting.help:
updates[Settings.help] = value["help"]
if value["label"] != db_setting.label:
updates[Settings.label] = value["label"]
if value["regex"] != db_setting.regex:
updates[Settings.regex] = value["regex"]
if value["type"] != db_setting.type:
updates[Settings.type] = value["type"]
if value["multiple"] != db_setting.multiple:
updates[Settings.multiple] = value["multiple"]
if updates:
session.query(Settings).filter_by(
Settings.id == setting
).update(updates)
db_selects = (
session.query(Selects)
.filter_by(setting_id=setting)
.all()
)
select_values = [
select["value"]
for select in value.get("select", [])
]
missing_values = [
select.value
for select in db_selects
if select.value not in select_values
]
# Remove selects that are no longer in the list
session.query(Selects).filter(
Selects.value.in_(missing_values)
).delete()
for select in value.get("select", []):
db_select = session.query(Selects).get(
(setting, select)
)
if db_select is None:
to_put.append(
Selects(setting_id=setting, value=select)
)
db_jobs = (
session.query(Jobs).filter_by(plugin_id=plugin["id"]).all()
)
job_names = [job["name"] for job in jobs]
missing_names = [
job.name for job in db_jobs if job.name not in job_names
]
# Remove jobs that are no longer in the list
session.query(Jobs).filter(
Jobs.name.in_(missing_names)
).delete()
for job in jobs:
db_job = session.query(Jobs).get(job["name"])
if job["name"] not in db_ids or db_job is None:
to_put.append(
Jobs(
plugin_id=plugin["id"],
**job,
)
)
else:
updates = {}
if job["file"] != db_job.file:
updates[Jobs.file] = job["file"]
if job["every"] != db_job.every:
updates[Jobs.every] = job["every"]
if job["reload"] != db_job.reload:
updates[Jobs.reload] = job["reload"]
if updates:
updates[Jobs.last_update] = None
session.query(Job_cache).filter_by(
job_name=job["name"]
).delete()
session.query(Jobs).filter_by(
Jobs.name == job["name"]
).update(updates)
if exists(f"/opt/bunkerweb/core/{plugin['id']}/ui"):
if {"template.html", "actions.py"}.issubset(
listdir(f"/opt/bunkerweb/core/{plugin['id']}/ui")
):
db_plugin_page = (
session.query(Plugin_pages)
.filter_by(plugin_id=plugin["id"])
.first()
)
if db_plugin_page is None:
with open(
f"/opt/bunkerweb/core/{plugin['id']}/ui/template.html",
"r",
) as file:
template = file.read().encode("utf-8")
with open(
f"/opt/bunkerweb/core/{plugin['id']}/ui/actions.py",
"r",
) as file:
actions = file.read().encode("utf-8")
to_put.append(
Plugin_pages(
plugin_id=plugin["id"],
template_file=template,
template_checksum=sha256(
template
).hexdigest(),
actions_file=actions,
actions_checksum=sha256(
actions
).hexdigest(),
)
)
else:
updates = {}
template_checksum = file_hash(
f"/opt/bunkerweb/core/{plugin['id']}/ui/template.html"
)
actions_checksum = file_hash(
f"/opt/bunkerweb/core/{plugin['id']}/ui/actions.py"
)
if (
template_checksum
!= db_plugin_page.template_checksum
):
with open(
f"/opt/bunkerweb/core/{plugin['id']}/ui/template.html",
"r",
) as file:
updates.update(
{
Plugin_pages.template_file: file.read().encode(
"utf-8"
),
Plugin_pages.template_checksum: template_checksum,
}
)
if (
actions_checksum
!= db_plugin_page.actions_checksum
):
with open(
f"/opt/bunkerweb/core/{plugin['id']}/ui/actions.py",
"r",
) as file:
updates.update(
{
Plugin_pages.actions_file: file.read().encode(
"utf-8"
),
Plugin_pages.actions_checksum: actions_checksum,
}
)
if updates:
session.query(Plugin_pages).filter(
Plugin_pages.plugin_id == plugin["id"]
).update(updates)
continue
to_put.append(Plugins(**plugin))
@ -737,7 +1114,9 @@ class Database:
Plugin_pages(
plugin_id=plugin["id"],
template_file=page["template_file"],
template_checksum=sha256(page["template_file"]).hexdigest(),
actions_file=page["actions_file"],
actions_checksum=sha256(page["actions_file"]).hexdigest(),
)
)

View File

@ -48,6 +48,7 @@ class Plugins(Base):
name = Column(String(128), nullable=False)
description = Column(String(255), nullable=False)
version = Column(String(32), nullable=False)
external = Column(Boolean, default=False, nullable=False)
settings = relationship(
"Settings", back_populates="plugin", cascade="all, delete, delete-orphan"
@ -166,7 +167,9 @@ class Plugin_pages(Base):
nullable=False,
)
template_file = Column(LargeBinary(length=(2**32) - 1), nullable=False)
template_checksum = Column(String(128), nullable=False)
actions_file = Column(LargeBinary(length=(2**32) - 1), nullable=False)
actions_checksum = Column(String(128), nullable=False)
plugin = relationship("Plugins", back_populates="pages")
@ -191,7 +194,7 @@ class Job_cache(Base):
)
data = Column(LargeBinary(length=(2**32) - 1), nullable=True)
last_update = Column(DateTime, nullable=True)
checksum = Column(String(255), nullable=True)
checksum = Column(String(128), nullable=True)
job = relationship("Jobs", back_populates="cache")
service = relationship("Services", back_populates="jobs_cache")
@ -215,6 +218,7 @@ class Custom_configs(Base):
type = Column(CUSTOM_CONFIGS_TYPES, nullable=False)
name = Column(String(255), nullable=False)
data = Column(LargeBinary(length=(2**32) - 1), nullable=False)
checksum = Column(String(128), nullable=False)
method = Column(METHODS_ENUM, nullable=False)
service = relationship("Services", back_populates="custom_configs")
@ -254,5 +258,6 @@ class Metadata(Base):
id = Column(Integer, primary_key=True, default=1)
is_initialized = Column(Boolean, nullable=False)
first_config_saved = Column(Boolean, nullable=False)
integration = Column(INTEGRATIONS_ENUM, nullable=False)
version = Column(String(5), nullable=False)
autoconf_loaded = Column(Boolean, default=False, nullable=True)
integration = Column(INTEGRATIONS_ENUM, default="Unknown", nullable=False)
version = Column(String(5), default="1.5.0", nullable=False)

View File

@ -1,5 +1,5 @@
mkdocs==1.2.3
mkdocs-material==8.2.5
mkdocs==1.4.2
mkdocs-material==8.5.7
pytablewriter==0.64.2
mike==1.1.2
jinja2<3.1.0

View File

@ -32,8 +32,6 @@ elif [ "$KUBERNETES_MODE" == "yes" ] ; then
echo "Kubernetes" > /opt/bunkerweb/INTEGRATION
elif [ "$AUTOCONF_MODE" == "yes" ] ; then
echo "Autoconf" > /opt/bunkerweb/INTEGRATION
else
VARIABLES_PATH="/etc/nginx/variables.env"
fi
# Init database
@ -45,18 +43,14 @@ if [ "$?" -ne 0 ] ; then
fi
generate=yes
if [ -v VARIABLES_PATH ] && [ -f "/etc/nginx/variables.env" ] && grep -q "^TEMP_NGINX=no$" /etc/nginx/variables.env ; then
if [ -f "/etc/nginx/variables.env" ] && grep -q "^TEMP_NGINX=no$" /etc/nginx/variables.env ; then
log "ENTRYPOINT" "⚠️ " "Looks like BunkerWeb configuration is already generated, will not generate it again"
generate=no
fi
# execute jobs
log "ENTRYPOINT" " " "Executing scheduler ..."
if [ -v VARIABLES_PATH ] ; then
/opt/bunkerweb/scheduler/main.py --variables $VARIABLES_PATH --generate $generate
else
/opt/bunkerweb/scheduler/main.py --generate $generate
fi
/opt/bunkerweb/scheduler/main.py --generate $generate
log "ENTRYPOINT" " " "Scheduler stopped"
exit 0

View File

@ -3,9 +3,20 @@
from argparse import ArgumentParser
from copy import deepcopy
from glob import glob
from os import _exit, getenv, getpid, makedirs, path, remove, unlink
from os.path import dirname, isdir, isfile, islink
from shutil import rmtree
from os import (
_exit,
chmod,
getenv,
getpid,
listdir,
makedirs,
path,
remove,
unlink,
walk,
)
from os.path import dirname, exists, isdir, isfile, islink, join
from shutil import chown, rmtree
from signal import SIGINT, SIGTERM, SIGUSR1, SIGUSR2, signal
from subprocess import PIPE, run as subprocess_run, DEVNULL, STDOUT
from sys import path as sys_path
@ -106,6 +117,7 @@ if __name__ == "__main__":
help="Precise if the configuration needs to be generated directly or not",
)
args = parser.parse_args()
generate = args.generate == "yes"
logger.info("Scheduler started ...")
@ -116,6 +128,8 @@ if __name__ == "__main__":
else "Cluster"
)
api_caller = ApiCaller()
if args.variables:
logger.info(f"Variables : {args.variables}")
@ -127,9 +141,37 @@ if __name__ == "__main__":
"Kubernetes" if getenv("KUBERNETES_MODE", "no") == "yes" else "Cluster"
)
api_caller = ApiCaller()
integration = "Docker"
if exists("/opt/bunkerweb/INTEGRATION"):
with open("/opt/bunkerweb/INTEGRATION", "r") as f:
integration = f.read().strip()
api_caller.auto_setup(bw_integration=bw_integration)
if integration == "Docker" and generate is True:
# run the generator
cmd = f"python /opt/bunkerweb/gen/main.py --settings /opt/bunkerweb/settings.json --templates /opt/bunkerweb/confs --output /etc/nginx{f' --variables {args.variables}' if args.variables else ''} --method scheduler"
proc = subprocess_run(cmd.split(" "), stdin=DEVNULL, stderr=STDOUT)
if proc.returncode != 0:
logger.error(
"Config generator failed, configuration will not work as expected...",
)
# Fix permissions for the nginx folder
for root, dirs, files in walk("/etc/nginx", topdown=False):
for name in files + dirs:
chown(join(root, name), "scheduler", "scheduler")
chmod(join(root, name), 0o770)
if len(api_caller._get_apis()) > 0:
# send nginx configs
logger.info("Sending /etc/nginx folder ...")
ret = api_caller._send_files("/etc/nginx", "/confs")
if not ret:
logger.error(
"Sending nginx configs failed, configuration will not work as expected...",
)
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
@ -140,16 +182,65 @@ if __name__ == "__main__":
logger.warning(
"Database is not initialized, retrying in 5s ...",
)
sleep(3)
sleep(5)
if bw_integration == "Kubernetes" or integration in (
"Swarm",
"Kubernetes",
"Autoconf",
):
ret = db.set_autoconf_load(False)
if ret:
success = False
logger.error(
f"Can't set autoconf loaded metadata to false in database: {ret}",
)
while not db.is_autoconf_loaded():
logger.warning(
"Autoconf is not loaded yet in the database, retrying in 5s ...",
)
sleep(5)
env = db.get_config()
while not db.is_first_config_saved() or not env:
logger.warning(
"Database doesn't have any config saved yet, retrying in 5s ...",
)
sleep(3)
sleep(5)
env = db.get_config()
# Checking if any custom config has been created by the user
custom_configs = []
root_dirs = listdir("/opt/bunkerweb/configs")
for (root, dirs, files) in walk("/opt/bunkerweb/configs", topdown=True):
if (
root != "configs"
and (dirs and not root.split("/")[-1] in root_dirs)
or files
):
path_exploded = root.split("/")
for file in files:
with open(join(root, file), "r") as f:
custom_configs.append(
{
"value": f.read(),
"exploded": (
f"{path_exploded.pop()}"
if path_exploded[-1] not in root_dirs
else "",
path_exploded[-1],
file.replace(".conf", ""),
),
}
)
ret = db.save_custom_configs(custom_configs, "manual")
if ret:
logger.error(
f"Couldn't save manually created custom configs to database: {ret}",
)
custom_configs = db.get_custom_configs()
original_path = "/data/configs"
@ -163,6 +254,16 @@ if __name__ == "__main__":
with open(tmp_path, "wb") as f:
f.write(custom_config["data"])
# Fix permissions for the custom configs folder
for root, dirs, files in walk("/data/configs", topdown=False):
for name in files + dirs:
chown(join(root, name), "scheduler", "scheduler")
if isdir(join(root, name)):
chmod(join(root, name), 0o750)
if isfile(join(root, name)):
chmod(join(root, name), 0o740)
if bw_integration != "Local":
logger.info("Sending custom configs to BunkerWeb")
ret = api_caller._send_files("/data/configs", "/custom_configs")
@ -188,23 +289,40 @@ if __name__ == "__main__":
else:
logger.info("All jobs in run_once() were successful")
# run the generator
cmd = f"python /opt/bunkerweb/gen/main.py --settings /opt/bunkerweb/settings.json --templates /opt/bunkerweb/confs --output /etc/nginx{f' --variables {args.variables}' if args.variables else ''} --method scheduler"
proc = subprocess_run(cmd.split(" "), stdin=DEVNULL, stderr=STDOUT)
if proc.returncode != 0:
logger.error(
"Config generator failed, configuration will not work as expected...",
)
if len(api_caller._get_apis()) > 0:
# send nginx configs
logger.info("Sending /etc/nginx folder ...")
ret = api_caller._send_files("/etc/nginx", "/confs")
if not ret:
if generate is True:
# run the generator
cmd = f"python /opt/bunkerweb/gen/main.py --settings /opt/bunkerweb/settings.json --templates /opt/bunkerweb/confs --output /etc/nginx{f' --variables {args.variables}' if args.variables else ''} --method scheduler"
proc = subprocess_run(cmd.split(" "), stdin=DEVNULL, stderr=STDOUT)
if proc.returncode != 0:
logger.error(
"Sending nginx configs failed, configuration will not work as expected...",
"Config generator failed, configuration will not work as expected...",
)
# Fix permissions for the nginx folder
for root, dirs, files in walk("/etc/nginx", topdown=False):
for name in files + dirs:
chown(join(root, name), "scheduler", "scheduler")
chmod(join(root, name), 0o770)
if len(api_caller._get_apis()) > 0:
# send nginx configs
logger.info("Sending /etc/nginx folder ...")
ret = api_caller._send_files("/etc/nginx", "/confs")
if not ret:
logger.error(
"Sending nginx configs failed, configuration will not work as expected...",
)
# Fix permissions for the cache folder
for root, dirs, files in walk("/data/cache", topdown=False):
for name in files + dirs:
chown(join(root, name), "scheduler", "scheduler")
if isdir(join(root, name)):
chmod(join(root, name), 0o750)
if isfile(join(root, name)):
chmod(join(root, name), 0o740)
try:
if len(api_caller._get_apis()) > 0:
# send cache
@ -241,6 +359,7 @@ if __name__ == "__main__":
)
# infinite schedule for the jobs
generate = True
scheduler.setup()
logger.info("Executing job scheduler ...")
while run:
@ -280,6 +399,16 @@ if __name__ == "__main__":
with open(tmp_path, "wb") as f:
f.write(custom_config["data"])
# Fix permissions for the custom configs folder
for root, dirs, files in walk("/data/configs", topdown=False):
for name in files + dirs:
chown(join(root, name), "scheduler", "scheduler")
if isdir(join(root, name)):
chmod(join(root, name), 0o750)
if isfile(join(root, name)):
chmod(join(root, name), 0o740)
if bw_integration != "Local":
logger.info("Sending custom configs to BunkerWeb")
ret = api_caller._send_files("/data/configs", "/custom_configs")

View File

@ -142,7 +142,7 @@ try:
ABSOLUTE_URI=vars["ABSOLUTE_URI"],
INSTANCES=Instances(docker_client, bw_integration),
CONFIG=Config(logger, db),
CONFIGFILES=ConfigFiles(db),
CONFIGFILES=ConfigFiles(logger, db),
SESSION_COOKIE_DOMAIN=vars["ABSOLUTE_URI"]
.replace("http://", "")
.replace("https://", "")

View File

@ -8,17 +8,18 @@ from utils import path_to_dict
class ConfigFiles:
def __init__(self, db):
def __init__(self, logger, db):
self.__name_regex = re_compile(r"^[a-zA-Z0-9_-]{1,64}$")
self.__root_dirs = [
child["name"]
for child in path_to_dict("/opt/bunkerweb/configs")["children"]
]
self.__file_creation_blacklist = ["http", "stream"]
self.__logger = logger
self.__db = db
def save_configs(self) -> str:
custom_configs = {}
custom_configs = []
root_dirs = listdir("/opt/bunkerweb/configs")
for (root, dirs, files) in walk("/opt/bunkerweb/configs", topdown=True):
if (
@ -29,17 +30,22 @@ class ConfigFiles:
path_exploded = root.split("/")
for file in files:
with open(join(root, file), "r") as f:
custom_configs[
(
f"{path_exploded.pop()}"
if path_exploded[-1] not in root_dirs
else ""
)
+ f"CUSTOM_CONF_{path_exploded[-1].replace('-', '_').upper()}_{file.replace('.conf', '')}"
] = f.read()
custom_configs.append(
{
"value": f.read(),
"exploded": (
f"{path_exploded.pop()}"
if path_exploded[-1] not in root_dirs
else "",
path_exploded[-1],
file.replace(".conf", ""),
),
}
)
ret = self.__db.save_custom_configs(custom_configs, "ui")
if ret:
self.__logger.error(f"Could not save custom configs: {ret}")
return "Couldn't save custom configs to database"
return ""

View File

@ -1,8 +1,8 @@
from os import getenv
from os.path import exists
from typing import Any, Union
from subprocess import run
from docker.errors import APIError
from kubernetes import client as kube_client
from os.path import exists
from subprocess import run
from typing import Any, Union
from API import API
from ApiCaller import ApiCaller
@ -46,9 +46,6 @@ class Instance:
def get_id(self) -> str:
return self._id
# def run_jobs(self) -> bool:
# return self.apiCaller._send_to_apis("POST", "/jobs")
def reload(self) -> bool:
return self.apiCaller._send_to_apis("POST", "/reload")
@ -61,9 +58,6 @@ class Instance:
def restart(self) -> bool:
return self.apiCaller._send_to_apis("POST", "/restart")
def send_custom_configs(self) -> bool:
return self.apiCaller._send_files("/opt/bunkerweb/configs", "/custom_configs")
class Instances:
def __init__(self, docker_client, bw_integration: str):
@ -115,7 +109,7 @@ class Instances:
is_swarm = True
try:
self.__docker.swarm.version
except:
except APIError:
is_swarm = False
if is_swarm: