Merge pull request #343 from TheophileDiot/1.5

1.5
This commit is contained in:
Théophile Diot 2022-11-09 15:54:38 +01:00 committed by GitHub
commit 06aa73fcfe
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 236 additions and 248 deletions

View File

@ -29,16 +29,16 @@ COPY --from=builder /opt/bunkerweb /opt/bunkerweb
# Copy files
# can't exclude deps from . so we are copying everything by hand
COPY bw/api /opt/bunkerweb/api
COPY bw/confs /opt/bunkerweb/confs
COPY bw/core /opt/bunkerweb/core
COPY bw/cli /opt/bunkerweb/cli
COPY bw/gen /opt/bunkerweb/gen
COPY bw/helpers /opt/bunkerweb/helpers
COPY bw/loading /opt/bunkerweb/loading
COPY bw/lua /opt/bunkerweb/lua
COPY bw/misc /opt/bunkerweb/misc
COPY bw/gen /opt/bunkerweb/gen
COPY bw/settings.json /opt/bunkerweb/settings.json
COPY db /opt/bunkerweb/db
COPY bw/confs /opt/bunkerweb/confs
COPY bw/loading /opt/bunkerweb/loading
COPY utils /opt/bunkerweb/utils
COPY VERSION /opt/bunkerweb/VERSION
@ -55,7 +55,7 @@ RUN apk add --no-cache bash python3 libgcc libstdc++ openssl git && \
find /opt/bunkerweb -type f -exec chmod 0740 {} \; && \
find /opt/bunkerweb -type d -exec chmod 0750 {} \; && \
chmod 770 /opt/bunkerweb/cache /opt/bunkerweb/tmp && \
chmod 750 /opt/bunkerweb/cli/main.py /opt/bunkerweb/helpers/*.sh /usr/local/bin/bwcli /opt/bunkerweb/deps/python/bin/* && \
chmod 750 /opt/bunkerweb/cli/main.py /opt/bunkerweb/gen/main.py /opt/bunkerweb/helpers/*.sh /usr/local/bin/bwcli /opt/bunkerweb/deps/python/bin/* && \
chown root:nginx /usr/local/bin/bwcli && \
chown -R nginx:nginx /etc/nginx && \
mkdir /var/log/letsencrypt /var/lib/letsencrypt && \

View File

@ -1,4 +1,4 @@
FROM python:3.11-rc-alpine
FROM python:3.11-alpine
# Copy python requirements
COPY bw/deps/requirements.txt /opt/bunkerweb/deps/requirements.txt

View File

@ -110,7 +110,7 @@ if __name__ == "__main__":
)
args = parser.parse_args()
logger.info("First gen started ...")
logger.info("Save config started ...")
logger.info(f"Settings : {args.settings}")
logger.info(f"Core : {args.core}")
logger.info(f"Plugins : {args.plugins}")
@ -128,138 +128,76 @@ if __name__ == "__main__":
integration = f.read().strip()
logger.info(f"Detected {integration} integration")
config_files = None
db = None
# Check existences and permissions
logger.info("Checking arguments ...")
files = [args.settings] + ([args.variables] if args.variables else [])
paths_rx = [args.core, args.plugins]
for file in files:
if not path.exists(file):
logger.error(f"Missing file : {file}")
sys_exit(1)
if not access(file, R_OK):
logger.error(f"Can't read file : {file}")
sys_exit(1)
for _path in paths_rx:
if not path.isdir(_path):
logger.error(f"Missing directory : {_path}")
sys_exit(1)
if not access(_path, R_OK | X_OK):
logger.error(
f"Missing RX rights on directory : {_path}",
)
sys_exit(1)
# Check core plugins orders
logger.info("Checking core plugins orders ...")
core_plugins = {}
files = glob(f"{args.core}/*/plugin.json")
for file in files:
try:
with open(file) as f:
core_plugin = loads(f.read())
if core_plugin["order"] not in core_plugins:
core_plugins[core_plugin["order"]] = []
core_plugins[core_plugin["order"]].append(core_plugin)
except:
logger.error(
f"Exception while loading JSON from {file} : {format_exc()}",
)
core_settings = {}
for order in core_plugins:
if len(core_plugins[order]) > 1 and order != 999:
logger.warning(
f"Multiple plugins have the same order ({order}) : {', '.join(plugin['id'] for plugin in core_plugins[order])}. Therefor, the execution order will be random.",
)
for plugin in core_plugins[order]:
core_settings.update(plugin["settings"])
if args.variables:
logger.info(f"Variables : {args.variables}")
# Check existences and permissions
logger.info("Checking arguments ...")
files = [args.settings, args.variables]
paths_rx = [args.core, args.plugins]
for file in files:
if not path.exists(file):
logger.error(f"Missing file : {file}")
sys_exit(1)
if not access(file, R_OK):
logger.error(f"Can't read file : {file}")
sys_exit(1)
for _path in paths_rx:
if not path.isdir(_path):
logger.error(f"Missing directory : {_path}")
sys_exit(1)
if not access(_path, R_OK | X_OK):
logger.error(
f"Missing RX rights on directory : {_path}",
)
sys_exit(1)
# Check core plugins orders
logger.info("Checking core plugins orders ...")
core_plugins = {}
files = glob(f"{args.core}/*/plugin.json")
for file in files:
try:
with open(file) as f:
core_plugin = loads(f.read())
if core_plugin["order"] not in core_plugins:
core_plugins[core_plugin["order"]] = []
core_plugins[core_plugin["order"]].append(core_plugin)
except:
logger.error(
f"Exception while loading JSON from {file} : {format_exc()}",
)
core_settings = {}
for order in core_plugins:
if len(core_plugins[order]) > 1 and order != 999:
logger.warning(
f"Multiple plugins have the same order ({order}) : {', '.join(plugin['id'] for plugin in core_plugins[order])}. Therefor, the execution order will be random.",
)
for plugin in core_plugins[order]:
core_settings.update(plugin["settings"])
# Compute the config
logger.info("Computing config ...")
config = Configurator(
args.settings, core_settings, args.plugins, args.variables, logger
)
config_files = config.get_config()
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
is_initialized = db.is_initialized()
if not is_initialized:
ret, err = db.init_tables(
[
config.get_settings(),
list(chain.from_iterable(core_plugins.values())),
config.get_plugins_settings(),
]
)
# Initialize database tables
if err:
logger.error(
f"Exception while initializing database : {err}",
)
sys_exit(1)
elif ret is False:
logger.info(
"Database tables are already initialized, skipping creation ...",
)
else:
logger.info("Database tables initialized")
logger.info(
"Database not initialized, initializing ...",
)
custom_confs = [
{"value": v, "exploded": custom_confs_rx.search(k).groups()}
for k, v in environ.items()
if custom_confs_rx.match(k)
]
with open("/opt/bunkerweb/VERSION", "r") as f:
bw_version = f.read().strip()
err = db.save_config(config_files, "scheduler")
if not err:
err1 = db.save_custom_configs(custom_confs, "scheduler")
if not err1:
err2 = db.initialize_db(
version=bw_version, integration=integration
)
if err or err1 or err2:
logger.error(
f"Can't Initialize database : {err or err1 or err2}",
)
sys_exit(1)
else:
logger.info("Database initialized")
if args.init:
sys_exit(0)
elif is_initialized:
logger.info(
"Database is already initialized, skipping ...",
)
sys_exit(0)
custom_confs = [
{"value": v, "exploded": custom_confs_rx.search(k).groups()}
for k, v in environ.items()
if custom_confs_rx.match(k)
]
elif integration == "Kubernetes":
corev1 = kube_client.CoreV1Api()
tmp_config = {}
apis = []
db = None
for pod in corev1.list_pod_for_all_namespaces(watch=False).items:
if (
@ -343,7 +281,6 @@ if __name__ == "__main__":
tmp_config = {}
custom_confs = []
apis = []
db = None
for instance in (
docker_client.containers.list(filters={"label": "bunkerweb.INSTANCE"})
@ -365,11 +302,57 @@ if __name__ == "__main__":
db = Database(logger)
# Compute the config
logger.info("Computing config ...")
config = Configurator(
args.settings, args.core, args.plugins, tmp_config, logger
)
config_files = config.get_config()
if config_files is None:
logger.info("Computing config ...")
config = Configurator(
args.settings, core_settings, args.plugins, tmp_config, logger
)
config_files = config.get_config()
if not db.is_initialized():
logger.info(
"Database not initialized, initializing ...",
)
ret, err = db.init_tables(
[
config.get_settings(),
list(chain.from_iterable(core_plugins.values())),
config.get_plugins_settings(),
]
)
# Initialize database tables
if err:
logger.error(
f"Exception while initializing database : {err}",
)
sys_exit(1)
elif ret is False:
logger.info(
"Database tables are already initialized, skipping creation ...",
)
else:
logger.info("Database tables initialized")
with open("/opt/bunkerweb/VERSION", "r") as f:
version = f.read().strip()
err = db.initialize_db(version=version, integration=integration)
if err:
logger.error(
f"Can't Initialize database : {err}",
)
sys_exit(1)
else:
logger.info("Database initialized")
else:
logger.info(
"Database is already initialized, skipping ...",
)
if args.init:
sys_exit(0)
err = db.save_config(config_files, "scheduler")

View File

@ -40,6 +40,14 @@ elif [ "$AUTOCONF_MODE" == "yes" ] ; then
echo "Autoconf" > /opt/bunkerweb/INTEGRATION
fi
if [ -f "/etc/nginx/variables.env" ] ; then
log "ENTRYPOINT" "⚠️ " "Looks like BunkerWeb has already been loaded, will not generate temp config"
else
# generate "temp" config
echo -e "IS_LOADING=yes\nSERVER_NAME=\nAPI_HTTP_PORT=${API_HTTP_PORT:-5000}\nAPI_SERVER_NAME=${API_SERVER_NAME:-bwapi}\nAPI_WHITELIST_IP=${API_WHITELIST_IP:-127.0.0.0/8}" > /tmp/variables.env
python3 /opt/bunkerweb/gen/main.py --variables /tmp/variables.env
fi
# start nginx
log "ENTRYPOINT" "" "Starting nginx ..."
nginx -g "daemon off;" &
@ -52,4 +60,4 @@ while [ -f "/opt/bunkerweb/tmp/nginx.pid" ] ; do
done
log "ENTRYPOINT" "" "BunkerWeb stopped"
exit 0
exit 0

View File

@ -8,7 +8,7 @@ from os.path import exists
from re import search
from sys import path as sys_path
from typing import Any, Dict, List, Optional, Tuple
from sqlalchemy import create_engine, inspect, text
from sqlalchemy import create_engine, inspect
from sqlalchemy.exc import OperationalError, ProgrammingError, SQLAlchemyError
from sqlalchemy.orm import scoped_session, sessionmaker
from time import sleep
@ -683,7 +683,7 @@ class Database:
return services
def update_job(self, plugin_id: str, job_name: str) -> str:
def update_job(self, plugin_id: str, job_name: str, success: bool) -> str:
"""Update the job last_run in the database"""
with self.__db_session() as session:
job = (
@ -696,6 +696,7 @@ class Database:
return "Job not found"
job.last_run = datetime.now()
job.success = success
try:
session.commit()

View File

@ -150,6 +150,7 @@ class Jobs(Base):
file = Column(String(255), nullable=False)
every = Column(SCHEDULES_ENUM, nullable=False)
reload = Column(Boolean, nullable=False)
success = Column(Boolean, nullable=True)
last_run = Column(DateTime, nullable=True)
plugin = relationship("Plugins", back_populates="jobs")

View File

@ -1,4 +1,4 @@
FROM python:3.11-rc-alpine
FROM python:3.11-alpine
# Copy python requirements
COPY bw/deps/requirements.txt /opt/bunkerweb/deps/requirements.txt

View File

@ -117,17 +117,17 @@ class JobScheduler(ApiCaller):
f"Error while executing job {name} from plugin {plugin}",
)
success = False
elif success and proc.returncode < 2:
err = self.__db.update_job(plugin, name)
if not err:
self.__logger.info(
f"Successfuly executed job {name} from plugin {plugin} and updated database",
)
else:
self.__logger.warning(
f"Successfuly executed job {name} from plugin {plugin} but failed to update database: {err}",
)
err = self.__db.update_job(plugin, name, success)
if not err:
self.__logger.info(
f"Successfuly executed job {name} from plugin {plugin} and updated database",
)
else:
self.__logger.warning(
f"Successfuly executed job {name} from plugin {plugin} but failed to update database: {err}",
)
return success

View File

@ -37,23 +37,19 @@ elif [ "$AUTOCONF_MODE" == "yes" ] ; then
echo "Autoconf" > /opt/bunkerweb/INTEGRATION
fi
# Init database
get_env > "/tmp/variables.env"
/opt/bunkerweb/gen/save_config.py --variables /tmp/variables.env --init
if [ "$?" -ne 0 ] ; then
log "ENTRYPOINT" "❌" "Scheduler generator failed"
exit 1
fi
generate=yes
if [ -f "/etc/nginx/variables.env" ] && grep -q "^TEMP_NGINX=no$" /etc/nginx/variables.env ; then
log "ENTRYPOINT" "⚠️ " "Looks like BunkerWeb configuration is already generated, will not generate it again"
generate=no
if ! grep -q "Docker" /opt/bunkerweb/INTEGRATION ; then
# Init database
get_env > "/tmp/variables.env"
/opt/bunkerweb/gen/save_config.py --variables /tmp/variables.env --init
if [ "$?" -ne 0 ] ; then
log "ENTRYPOINT" "❌" "Scheduler generator failed"
exit 1
fi
fi
# execute jobs
log "ENTRYPOINT" " " "Executing scheduler ..."
/opt/bunkerweb/scheduler/main.py --generate $generate
/opt/bunkerweb/scheduler/main.py
log "ENTRYPOINT" " " "Scheduler stopped"
exit 0

View File

@ -16,7 +16,7 @@ from os import (
walk,
)
from os.path import dirname, exists, isdir, isfile, islink, join
from shutil import chown, rmtree
from shutil import chown, copy, rmtree
from signal import SIGINT, SIGTERM, SIGUSR1, SIGUSR2, signal
from subprocess import PIPE, run as subprocess_run, DEVNULL, STDOUT
from sys import path as sys_path
@ -110,14 +110,8 @@ if __name__ == "__main__":
type=str,
help="path to the file containing environment variables",
)
parser.add_argument(
"--generate",
default="no",
type=str,
help="Precise if the configuration needs to be generated directly or not",
)
args = parser.parse_args()
generate = args.generate == "yes"
generate = False
integration = "Linux"
api_caller = ApiCaller()
@ -136,27 +130,11 @@ if __name__ == "__main__":
integration = f.read().strip()
api_caller.auto_setup(bw_integration=integration)
if integration == "Docker" and generate is True:
# run the config saver
cmd = f"python /opt/bunkerweb/gen/save_config.py --settings /opt/bunkerweb/settings.json"
proc = subprocess_run(cmd.split(" "), stdin=DEVNULL, stderr=STDOUT)
if proc.returncode != 0:
logger.error(
"Config saver failed, configuration will not work as expected...",
)
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
while not db.is_initialized():
logger.warning(
"Database is not initialized, retrying in 5s ...",
)
sleep(5)
if integration in (
"Swarm",
"Kubernetes",
@ -174,6 +152,23 @@ if __name__ == "__main__":
"Autoconf is not loaded yet in the database, retrying in 5s ...",
)
sleep(5)
elif integration == "Docker" and (
not exists("/opt/bunkerweb/tmp/variables.env")
or db.get_config() != dotenv_values("/opt/bunkerweb/tmp/variables.env")
):
# run the config saver
cmd = f"python /opt/bunkerweb/gen/save_config.py --settings /opt/bunkerweb/settings.json"
proc = subprocess_run(cmd.split(" "), stdin=DEVNULL, stderr=STDOUT)
if proc.returncode != 0:
logger.error(
"Config saver failed, configuration will not work as expected...",
)
while not db.is_initialized():
logger.warning(
"Database is not initialized, retrying in 5s ...",
)
sleep(5)
env = db.get_config()
while not db.is_first_config_saved() or not env:
@ -208,7 +203,10 @@ if __name__ == "__main__":
}
)
old_configs = None
if custom_confs:
old_configs = db.get_custom_configs()
ret = db.save_custom_configs(custom_confs, "manual")
if ret:
logger.error(
@ -217,16 +215,19 @@ if __name__ == "__main__":
custom_configs = db.get_custom_configs()
original_path = "/data/configs"
makedirs(original_path, exist_ok=True)
for custom_config in custom_configs:
tmp_path = f"{original_path}/{custom_config['type'].replace('_', '-')}"
if custom_config["service_id"]:
tmp_path += f"/{custom_config['service_id']}"
tmp_path += f"/{custom_config['name']}.conf"
makedirs(dirname(tmp_path), exist_ok=True)
with open(tmp_path, "wb") as f:
f.write(custom_config["data"])
if old_configs != custom_configs:
original_path = "/data/configs"
makedirs(original_path, exist_ok=True)
for custom_config in custom_configs:
tmp_path = (
f"{original_path}/{custom_config['type'].replace('_', '-')}"
)
if custom_config["service_id"]:
tmp_path += f"/{custom_config['service_id']}"
tmp_path += f"/{custom_config['name']}.conf"
makedirs(dirname(tmp_path), exist_ok=True)
with open(tmp_path, "wb") as f:
f.write(custom_config["data"])
# Fix permissions for the custom configs folder
for root, dirs, files in walk("/data/configs", topdown=False):
@ -248,6 +249,15 @@ if __name__ == "__main__":
)
logger.info("Executing scheduler ...")
generate = not exists(
"/opt/bunkerweb/tmp/variables.env"
) or env != dotenv_values("/opt/bunkerweb/tmp/variables.env")
if generate is False:
logger.warning(
"Looks like BunkerWeb configuration is already generated, will not generate it again ..."
)
while True:
# Instantiate scheduler
scheduler = JobScheduler(
@ -278,6 +288,8 @@ if __name__ == "__main__":
chown(join(root, name), "scheduler", "scheduler")
chmod(join(root, name), 0o770)
copy("/etc/nginx/variables.env", "/opt/bunkerweb/tmp/variables.env")
if len(api_caller._get_apis()) > 0:
# send nginx configs
logger.info("Sending /etc/nginx folder ...")

View File

@ -1,4 +1,4 @@
FROM python:3.11-rc-alpine
FROM python:3.11-alpine
# Copy files
# can't exclude specific files/dir from . so we are copying everything by hand

View File

@ -627,7 +627,11 @@ def configs():
db_configs = db.get_custom_configs()
return render_template(
"configs.html",
folders=[path_to_dict("/opt/bunkerweb/configs", db_configs=db_configs)],
folders=[
path_to_dict(
"/opt/bunkerweb/configs", db_configs=db_configs, integration=integration
)
],
)
@ -1147,7 +1151,11 @@ def logs():
first_instance = instances[0] if instances else None
return render_template(
"logs.html", first_instance=first_instance, instances=instances
"logs.html",
first_instance=first_instance,
instances=instances,
is_swarm=getenv("SWARM_MODE", "no") == "yes",
is_kubernetes=getenv("KUBERNETES_MODE", "no") == "yes",
)
@ -1204,7 +1212,11 @@ def logs_linux():
for line in raw_logs_error:
line_lower = line.lower()
if "[info]" in line.lower() and line.endswith(":") or "[error]" in line.lower():
if (
("[info]" in line_lower or "" in line_lower)
and line.endswith(":")
or ("[error]" in line_lower or "" in line_lower)
):
if temp_multiple_lines:
logs_error.append("\n".join(temp_multiple_lines))
@ -1244,37 +1256,19 @@ def logs_linux():
or "" in log_lower
else (
"warn"
if "[warn]" in log_lower
else ("info" if "[info]" in log_lower else "message")
if "[warn]" in log_lower or "⚠️" in log_lower
else (
"info" if "[info]" in log_lower or "" in log_lower else "message"
)
)
)
if "\n" in log:
splitted_one_line = log.split("\n")
logs.append(
{
"content": " ".join(
splitted_one_line.pop(0).strip().split(" ")[1:]
),
"type": error_type,
"separator": True,
}
)
for splitted_log in splitted_one_line:
logs.append(
{
"content": splitted_log,
"type": error_type,
}
)
else:
logs.append(
{
"content": " ".join(log.strip().split(" ")[1:]),
"type": error_type,
}
)
logs.append(
{
"content": " ".join(log.strip().split(" ")[1:]),
"type": error_type,
}
)
count_error_logs = 0
for log in logs_error:
@ -1336,8 +1330,12 @@ def logs_container(container_id):
or "" in log_lower
else (
"warn"
if "[warn]" in log_lower
else ("info" if "[info]" in log_lower else "message")
if "[warn]" in log_lower or "⚠️" in log_lower
else (
"info"
if "[info]" in log_lower or "" in log_lower
else "message"
)
),
}
)

View File

@ -423,50 +423,39 @@ def path_to_dict(
}
for conf in db_configs:
type_lower = conf["type"].replace("_", "-")
file_info = {
"name": conf["name"],
"name": f"{conf['name']}.conf",
"type": "file",
"path": f"{path}/{conf['type'].replace('_', '-')}{'/' + conf['service_id'] if conf['service_id'] else ''}/{conf['name']}.conf",
"path": f"{path}/{type_lower}{'/' + conf['service_id'] if conf['service_id'] else ''}/{conf['name']}.conf",
"can_edit": conf["method"] == "ui",
"can_download": is_cache,
"content": b64encode(conf["data"]).decode("utf-8"),
}
if (
d["children"][config_types.index(conf["type"].replace("_", "-"))][
"children"
]
d["children"][config_types.index(type_lower)]["children"]
and conf["service_id"]
and conf["service_id"]
in [
x["name"]
for x in d["children"][
config_types.index(conf["type"].replace("_", "-"))
]["children"]
for x in d["children"][config_types.index(type_lower)]["children"]
]
):
d["children"][config_types.index(conf["type"].replace("_", "-"))][
"children"
][
d["children"][config_types.index(type_lower)]["children"][
[
x["name"]
for x in d["children"][
config_types.index(conf["type"].replace("_", "-"))
]["children"]
for x in d["children"][config_types.index(type_lower)][
"children"
]
].index(conf["service_id"])
][
"children"
].append(
file_info
)
]["children"].append(file_info)
else:
d["children"][config_types.index(conf["type"].replace("_", "-"))][
"children"
].append(
d["children"][config_types.index(type_lower)]["children"].append(
{
"name": conf["service_id"],
"type": "folder",
"path": f"{path}/{conf['type']}/{conf['service_id']}",
"path": f"{path}/{type_lower}/{conf['service_id']}",
"can_create_files": True,
"can_create_folders": False,
"can_edit": True,