Merge pull request #324 from TheophileDiot/1.5

Fix gen for Docker integration
This commit is contained in:
Théophile Diot 2022-10-20 15:17:26 +02:00 committed by GitHub
commit cfaeb10133
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 240 additions and 132 deletions

View File

@ -216,20 +216,20 @@ class Config(ApiCaller, ConfigCaller):
# run(cmd.split(" "), stdin=DEVNULL, stdout=DEVNULL, stderr=STDOUT)
# send nginx configs
# send data folder
# reload nginx
ret = self._send_files("/etc/nginx", "/confs")
if not ret:
success = False
self.__logger.error(
"sending nginx configs failed, configuration will not work as expected...",
)
# send data/configs folder
ret = self._send_files("/data/configs", "/custom_configs")
if not ret:
success = False
self.__logger.error(
"sending custom configs failed, configuration will not work as expected...",
)
# reload nginx
ret = self._send_to_apis("POST", "/reload")
if not ret:
success = False

View File

@ -13,6 +13,9 @@ sys_path.append("/opt/bunkerweb/deps/python")
sys_path.append("/opt/bunkerweb/utils")
sys_path.append("/opt/bunkerweb/api")
from docker import DockerClient
from docker.errors import DockerException
from logger import setup_logger
from API import API
@ -85,23 +88,50 @@ try:
# Docker or Linux case
elif bw_integration == "Docker":
api = API(f"{getenv('BW_API_URL')}:5000")
sent, err, status, resp = api.request("POST", "/reload")
if not sent:
status = 1
logger.error(
f"Can't send API request to {api.get_endpoint()}/reload : {err}"
try:
docker_client = DockerClient(base_url="tcp://docker-proxy:2375")
except DockerException:
docker_client = DockerClient(
base_url=getenv("DOCKER_HOST", "unix:///var/run/docker.sock")
)
else:
if status != 200:
apis = []
for instance in docker_client.containers.list(
filters={"label": "bunkerweb.INSTANCE"}
):
api = None
for var in instance.attrs["Config"]["Env"]:
if var.startswith("API_HTTP_PORT="):
api = API(
f"http://{instance.name}:{var.replace('API_HTTP_PORT=', '', 1)}"
)
break
if api:
apis.append(api)
else:
apis.append(
API(f"http://{instance.name}:{getenv('API_HTTP_PORT', '5000')}")
)
for api in apis:
sent, err, status, resp = api.request("POST", "/reload")
if not sent:
status = 1
logger.error(
f"Error while sending API request to {api.get_endpoint()}/reload : status = {resp['status']}, msg = {resp['msg']}"
f"Can't send API request to {api.get_endpoint()}/reload : {err}"
)
else:
logger.info(
f"Successfully sent API request to {api.get_endpoint()}/reload"
)
if status != 200:
status = 1
logger.error(
f"Error while sending API request to {api.get_endpoint()}/reload : status = {resp['status']}, msg = {resp['msg']}"
)
else:
logger.info(
f"Successfully sent API request to {api.get_endpoint()}/reload"
)
elif bw_integration == "Linux":
cmd = "/usr/sbin/nginx -s reload"
proc = run(cmd.split(" "), stdin=DEVNULL, stderr=STDOUT)

View File

@ -4,20 +4,31 @@ from logging import Logger
from re import search as re_search
from sys import path as sys_path
from traceback import format_exc
from typing import Union
sys_path.append("/opt/bunkerweb/utils")
class Configurator:
def __init__(
self, settings: str, core: str, plugins: str, variables: str, logger: Logger
self,
settings: str,
core: str,
plugins: str,
variables: Union[str, dict],
logger: Logger,
):
self.__logger = logger
self.__settings = self.__load_settings(settings)
self.__core = core
self.__plugins_settings = []
self.__plugins = self.__load_plugins(plugins, "plugins")
self.__variables = self.__load_variables(variables)
if isinstance(variables, str):
self.__variables = self.__load_variables(variables)
else:
self.__variables = variables
self.__multisite = (
"MULTISITE" in self.__variables and self.__variables["MULTISITE"] == "yes"
)
@ -89,23 +100,26 @@ class Configurator:
variables[var] = value
return variables
def get_config(self, default: bool = False):
def get_config(self):
config = {}
# Extract default settings
default_settings = [self.__settings, self.__core, self.__plugins]
if default:
return default_settings
for settings in default_settings:
for setting, data in settings.items():
config[setting] = data["default"]
# Override with variables
for variable, value in self.__variables.items():
ret, err = self.__check_var(variable)
if ret:
config[variable] = value
elif (
not variable.startswith("PYTHON")
and variable != "GPG_KEY"
and variable != "LANG"
elif not variable.startswith("PYTHON") and variable not in (
"GPG_KEY",
"LANG",
"PATH",
"NGINX_VERSION",
"NJS_VERSION",
"PKG_RELEASE",
):
self.__logger.warning(f"Ignoring variable {variable} : {err}")
# Expand variables to each sites if MULTISITE=yes and if not present

View File

@ -103,63 +103,67 @@ if __name__ == "__main__":
logger.info(f"Method : {args.method}")
logger.info(f"Init : {args.init}")
custom_confs_rx = re_compile(
r"^([0-9a-z\.\-]*)_?CUSTOM_CONF_(HTTP|DEFAULT_SERVER_HTTP|SERVER_HTTP|MODSEC|MODSEC_CRS)_(.+)$"
)
# Check existences and permissions
logger.info("Checking arguments ...")
files = [args.settings] + ([args.variables] if args.variables else [])
paths_rx = [args.core, args.plugins, args.templates]
paths_rwx = [args.output]
for file in files:
if not path.exists(file):
logger.error(f"Missing file : {file}")
sys_exit(1)
if not access(file, R_OK):
logger.error(f"Can't read file : {file}")
sys_exit(1)
for _path in paths_rx + paths_rwx:
if not path.isdir(_path):
logger.error(f"Missing directory : {_path}")
sys_exit(1)
if not access(_path, R_OK | X_OK):
logger.error(
f"Missing RX rights on directory : {_path}",
)
sys_exit(1)
for _path in paths_rwx:
if not access(_path, W_OK):
logger.error(
f"Missing W rights on directory : {_path}",
)
sys_exit(1)
# Check core plugins orders
logger.info("Checking core plugins orders ...")
core_plugins = {}
files = glob(f"{args.core}/*/plugin.json")
for file in files:
try:
with open(file) as f:
core_plugin = loads(f.read())
if core_plugin["order"] not in core_plugins:
core_plugins[core_plugin["order"]] = []
core_plugins[core_plugin["order"]].append(core_plugin)
except:
logger.error(
f"Exception while loading JSON from {file} : {format_exc()}",
)
core_settings = {}
for order in core_plugins:
if len(core_plugins[order]) > 1 and order != 999:
logger.warning(
f"Multiple plugins have the same order ({order}) : {', '.join(plugin['id'] for plugin in core_plugins[order])}. Therefor, the execution order will be random.",
)
for plugin in core_plugins[order]:
core_settings.update(plugin["settings"])
if args.variables or args.init:
# Check existences and permissions
logger.info("Checking arguments ...")
files = [args.settings, args.variables]
paths_rx = [args.core, args.plugins, args.templates]
paths_rwx = [args.output]
for file in files:
if not path.exists(file):
logger.error(f"Missing file : {file}")
sys_exit(1)
if not access(file, R_OK):
logger.error(f"Can't read file : {file}")
sys_exit(1)
for _path in paths_rx + paths_rwx:
if not path.isdir(_path):
logger.error(f"Missing directory : {_path}")
sys_exit(1)
if not access(_path, R_OK | X_OK):
logger.error(
f"Missing RX rights on directory : {_path}",
)
sys_exit(1)
for _path in paths_rwx:
if not access(_path, W_OK):
logger.error(
f"Missing W rights on directory : {_path}",
)
sys_exit(1)
# Check core plugins orders
logger.info("Checking core plugins orders ...")
core_plugins = {}
files = glob(f"{args.core}/*/plugin.json")
for file in files:
try:
with open(file) as f:
core_plugin = loads(f.read())
if core_plugin["order"] not in core_plugins:
core_plugins[core_plugin["order"]] = []
core_plugins[core_plugin["order"]].append(core_plugin)
except:
logger.error(
f"Exception while loading JSON from {file} : {format_exc()}",
)
core_settings = {}
for order in core_plugins:
if len(core_plugins[order]) > 1 and order != 999:
logger.warning(
f"Multiple plugins have the same order ({order}) : {', '.join(plugin['id'] for plugin in core_plugins[order])}. Therefor, the execution order will be random.",
)
for plugin in core_plugins[order]:
core_settings.update(plugin["settings"])
# Compute the config
logger.info("Computing config ...")
config = Configurator(
@ -167,10 +171,8 @@ if __name__ == "__main__":
)
config_files = config.get_config()
if config_files.get("LOG_LEVEL", "INFO") != logger.level:
logger = setup_logger(
"Generator", config_files.get("LOG_LEVEL", "INFO")
)
if config_files.get("LOG_LEVEL", logger.level) != logger.level:
logger = setup_logger("Generator", config_files["LOG_LEVEL"])
bw_integration = None
if config_files.get("SWARM_MODE", "no") == "yes":
@ -263,9 +265,6 @@ if __name__ == "__main__":
"Database not initialized, initializing ...",
)
custom_confs_rx = re_compile(
r"^([0-9a-z\.\-]*)_?CUSTOM_CONF_(HTTP|DEFAULT_SERVER_HTTP|SERVER_HTTP|MODSEC|MODSEC_CRS)_(.+)$"
)
custom_confs = [
{"value": v, "exploded": custom_confs_rx.search(k).groups()}
for k, v in environ.items()
@ -297,7 +296,7 @@ if __name__ == "__main__":
with open("/opt/bunkerweb/INTEGRATION", "r") as f:
bw_integration = f.read().strip()
if bw_integration in ("Docker", "Linux"):
if bw_integration == "Linux":
err = db.save_config(config_files, args.method)
if not err:
@ -324,6 +323,88 @@ if __name__ == "__main__":
sys_exit(0)
config = config_files
elif args.method != "autoconf":
bw_integration = "Docker"
try:
docker_client = DockerClient(base_url="tcp://docker-proxy:2375")
except DockerException:
docker_client = DockerClient(
base_url=getenv("DOCKER_HOST", "unix:///var/run/docker.sock")
)
tmp_config = {}
custom_confs = []
apis = []
db = None
for instance in docker_client.containers.list(
filters={"label": "bunkerweb.INSTANCE"}
):
api = None
for var in instance.attrs["Config"]["Env"]:
if custom_confs_rx.match(var.split("=", 1)[0]):
splitted = var.split("=", 1)
custom_confs.append(
{
"value": var.pop(0),
"exploded": custom_confs_rx.search(
"=".join(var)
).groups(),
}
)
else:
tmp_config[var.split("=", 1)[0]] = var.split("=", 1)[1]
if var.startswith("DATABASE_URI="):
db = Database(logger, var.replace("DATABASE_URI=", "", 1))
elif var.startswith("API_HTTP_PORT="):
api = API(
f"http://{instance.name}:{var.replace('API_HTTP_PORT=', '', 1)}"
)
if api:
apis.append(api)
else:
apis.append(
API(f"http://{instance.name}:{getenv('API_HTTP_PORT', '5000')}")
)
if db is None:
db = Database(logger)
api_caller = ApiCaller(apis=apis)
# Compute the config
logger.info("Computing config ...")
config = Configurator(
args.settings, core_settings, args.plugins, tmp_config, logger
)
config_files = config.get_config()
if config_files.get("LOG_LEVEL", logger.level) != logger.level:
logger = setup_logger("Generator", config_files["LOG_LEVEL"])
err = db.save_config(config_files, args.method)
if not err:
err1 = db.save_custom_configs(custom_confs, args.method)
else:
err = None
err1 = None
with open("/opt/bunkerweb/VERSION", "r") as f:
bw_version = f.read().strip()
if err or err1:
logger.error(
f"Can't save config to database : {err or err1}",
)
sys_exit(1)
else:
logger.info("Config successfully saved to database")
config = config_files
else:
db = None
@ -347,33 +428,16 @@ if __name__ == "__main__":
base_url=getenv("DOCKER_HOST", "unix:///var/run/docker.sock")
)
apis = []
for instance in docker_client.containers.list(
filters={"label": "bunkerweb.INSTANCE"}
):
api = None
for var in instance.attrs["Config"]["Env"]:
if var.startswith("DATABASE_URI="):
db = Database(logger, var.replace("DATABASE_URI=", "", 1))
break
if db is None:
for var in instance.attrs["Config"]["Env"]:
if var.startswith("DATABASE_URI="):
db = Database(
logger, var.replace("DATABASE_URI=", "", 1)
)
elif var.startswith("API_HTTP_PORT="):
api = API(
f"http://{instance.name}:{var.replace('API_HTTP_PORT=', '', 1)}"
)
if api:
apis.append(api)
else:
apis.append(
API(
f"http://{instance.name}:{getenv('API_HTTP_PORT', '5000')}"
)
)
api_caller = ApiCaller(apis=apis)
if db:
break
if db is None:
db = Database(logger)

View File

@ -5,7 +5,7 @@
log "$1" "" "Setup and check /data folder ..."
# Create folders if missing and check permissions
rwx_folders=("cache" "letsencrypt")
rwx_folders=("cache" "cache/letsencrypt")
rx_folders=("configs" "configs/http" "configs/stream" "configs/server-http" "configs/server-stream" "configs/default-server-http" "configs/default-server-stream" "configs/modsec" "configs/modsec-crs" "plugins" "www")
for folder in "${rwx_folders[@]}" ; do
if [ ! -d "/data/${folder}" ] ; then

View File

@ -18,7 +18,6 @@ API_WHITELIST_IP=127.0.0.0/8 10.20.30.0/24
AUTOCONF_MODE=no
SWARM_MODE=no
KUBERNETES_MODE=no
BW_API_URL=http://mybunker
USE_BROTLI=no
BROTLI_TYPES=application/atom+xml application/javascript application/json application/rss+xml application/vnd.ms-fontobject application/x-font-opentype application/x-font-truetype application/x-font-ttf application/x-javascript application/xhtml+xml application/xml font/eot font/opentype font/otf font/truetype image/svg+xml image/vnd.microsoft.icon image/x-icon image/x-win-bitmap text/css text/javascript text/plain text/xml
BROTLI_MIN_LENGTH=1000

View File

@ -51,6 +51,6 @@ VOLUME /data /etc/nginx
WORKDIR /opt/bunkerweb/scheduler
USER scheduler:scheduler
USER root:scheduler
ENTRYPOINT ["/opt/bunkerweb/scheduler/entrypoint.sh"]

View File

@ -8,9 +8,9 @@ fi
# trap SIGTERM and SIGINT
function trap_exit() {
log "ENTRYPOINT" "" "Catched stop operation"
log "ENTRYPOINT" " " "Catched stop operation"
if [ -f "/opt/bunkerweb/tmp/scheduler.pid" ] ; then
log "ENTRYPOINT" "" "Stopping job scheduler ..."
log "ENTRYPOINT" " " "Stopping job scheduler ..."
kill -s TERM "$(cat /opt/bunkerweb/tmp/scheduler.pid)"
fi
}
@ -18,10 +18,10 @@ trap "trap_exit" TERM INT QUIT
# trap SIGHUP
function trap_reload() {
log "ENTRYPOINT" "" "Catched reload operation"
log "ENTRYPOINT" " " "Catched reload operation"
/opt/bunkerweb/helpers/scheduler-restart.sh
if [ $? -ne 0 ] ; then
log "ENTRYPOINT" "" "Error while restarting scheduler"
log "ENTRYPOINT" " " "Error while restarting scheduler"
fi
}
trap "trap_reload" HUP
@ -44,10 +44,21 @@ if [ "$?" -ne 0 ] ; then
exit 1
fi
if [ -v VARIABLES_PATH ] && [ -f "/etc/nginx/variables.env" ] && grep -q "^TEMP_NGINX=no$" /etc/nginx/variables.env ; then
log "ENTRYPOINT" "⚠️ " "Looks like BunkerWeb configuration is already generated, will not generate it again"
elif [ "$SWARM_MODE" != "yes" ] && [ "$KUBERNETES_MODE" != "yes" ] && [ "$AUTOCONF_MODE" != "yes" ] ; then
# Generate configuration and send config to bunkerweb
/opt/bunkerweb/gen/main.py --method scheduler
if [ "$?" -ne 0 ] ; then
log "ENTRYPOINT" "❌" "Scheduler generator failed"
exit 1
fi
fi
# execute jobs
log "ENTRYPOINT" "" "Executing jobs ..."
log "ENTRYPOINT" " " "Executing jobs ..."
if [ -v VARIABLES_PATH ] ; then
/opt/bunkerweb/scheduler/main.py --variables /tmp/variables.env --run
/opt/bunkerweb/scheduler/main.py --variables $VARIABLES_PATH --run
else
/opt/bunkerweb/scheduler/main.py --run
fi
@ -56,23 +67,13 @@ if [ "$?" -ne 0 ] ; then
exit 1
fi
if [ -v VARIABLES_PATH ] && [ -f "/etc/nginx/variables.env" ] && grep -q "^TEMP_NGINX=no$" /etc/nginx/variables.env ; then
log "ENTRYPOINT" "⚠️" "Looks like BunkerWeb configuration is already generated, will not generate it again"
elif [ "$SWARM_MODE" != "yes" ] && [ "$KUBERNETES_MODE" != "yes" ] && [ "$AUTOCONF_MODE" != "yes" ] ; then
# Generate configuration and send config to bunkerweb
/opt/bunkerweb/gen/main.py --variables /tmp/variables.env --method scheduler
if [ "$?" -ne 0 ] ; then
log "ENTRYPOINT" "❌" "Scheduler generator failed"
exit 1
fi
fi
log "ENTRYPOINT" "" "Executing job scheduler ..."
log "ENTRYPOINT" " " "Executing job scheduler ..."
if [ -v VARIABLES_PATH ] ; then
/opt/bunkerweb/scheduler/main.py --variables $VARIABLES_PATH
else
/opt/bunkerweb/scheduler/main.py
fi
log "ENTRYPOINT" "" "Scheduler stopped"
log "ENTRYPOINT" " " "Scheduler stopped"
exit 0