Refactor paths resolutions for UI + optimizations on the plugin upload

This commit is contained in:
Théophile Diot 2023-05-25 19:29:02 -04:00
parent 6e80c7b8de
commit 168dfc4390
No known key found for this signature in database
GPG key ID: E752C80DB72BB014
8 changed files with 250 additions and 430 deletions

View file

@ -1,3 +1,5 @@
#!/usr/bin/python3
from copy import deepcopy
from functools import partial
from glob import glob

View file

@ -1,23 +1,24 @@
#!/usr/bin/python3
from os import _exit, getenv, getpid, listdir, sep
from os.path import basename, dirname, join
from sys import path as sys_path, modules as sys_modules
from pathlib import Path
if Path("/etc/os-release").is_file():
with open("/etc/os-release", "r") as f:
if (
"/usr/share/bunkerweb/deps/python" not in sys_path
and "Alpine" not in f.read()
):
sys_path.append("/usr/share/bunkerweb/deps/python")
os_release_path = Path(sep, "etc", "os-release")
if os_release_path.is_file() and "Alpine" not in os_release_path.read_text():
sys_path.append(join(sep, "usr", "share", "bunkerweb", "deps", "python"))
if "/usr/share/bunkerweb/utils" not in sys_path:
sys_path.append("/usr/share/bunkerweb/utils")
if "/usr/share/bunkerweb/api" not in sys_path:
sys_path.append("/usr/share/bunkerweb/api")
if "/usr/share/bunkerweb/db" not in sys_path:
sys_path.append("/usr/share/bunkerweb/db")
del os_release_path
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("utils",), ("api",), ("db",))
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from bs4 import BeautifulSoup
from contextlib import suppress
from copy import deepcopy
from datetime import datetime, timedelta, timezone
from dateutil.parser import parse as dateutil_parse
@ -48,13 +49,11 @@ from flask_wtf.csrf import CSRFProtect, CSRFError, generate_csrf
from hashlib import sha256
from importlib.machinery import SourceFileLoader
from io import BytesIO
from json import JSONDecodeError, dumps, load as json_load
from json import JSONDecodeError, dumps, loads as json_loads
from jinja2 import Template
from kubernetes import client as kube_client
from kubernetes import config as kube_config
from kubernetes.client.exceptions import ApiException as kube_ApiException
from os import _exit, getenv, getpid, listdir
from os.path import basename, dirname
from re import compile as re_compile
from regex import match as regex_match
from requests import get
@ -80,8 +79,8 @@ from utils import (
get_variables,
path_to_dict,
)
from logger import setup_logger
from Database import Database
from logger import setup_logger # type: ignore
from Database import Database # type: ignore
logger = setup_logger("UI", getenv("LOG_LEVEL", "INFO"))
@ -94,10 +93,8 @@ def stop_gunicorn():
def stop(status, stop=True):
if Path("/var/tmp/bunkerweb/ui.pid").exists():
Path("/var/tmp/bunkerweb/ui.pid").unlink()
if Path("/var/tmp/bunkerweb/ui.healthy").exists():
Path("/var/tmp/bunkerweb/ui.healthy").unlink()
Path(sep, "var", "tmp", "bunkerweb", "ui.pid").unlink(exist_ok=True)
Path(sep, "var", "tmp", "bunkerweb", "ui.healthy").unlink(exist_ok=True)
if stop is True:
stop_gunicorn()
_exit(status)
@ -112,8 +109,12 @@ def handle_stop(signum, frame):
signal(SIGINT, handle_stop)
signal(SIGTERM, handle_stop)
if not Path("/var/tmp/bunkerweb/ui.pid").is_file():
Path("/var/tmp/bunkerweb/ui.pid").write_text(str(getpid()))
sbin_nginx_path = Path(sep, "usr", "sbin", "nginx")
pid_file = Path(sep, "var", "tmp", "bunkerweb", "ui.pid")
if not pid_file.is_file():
pid_file.write_text(str(getpid()))
del pid_file
# Flask app
app = Flask(
@ -168,15 +169,17 @@ PLUGIN_KEYS = [
]
integration = "Linux"
integration_path = Path(sep, "usr", "share", "bunkerweb", "INTEGRATION")
if getenv("KUBERNETES_MODE", "no").lower() == "yes":
integration = "Kubernetes"
elif getenv("SWARM_MODE", "no").lower() == "yes":
integration = "Swarm"
elif getenv("AUTOCONF_MODE", "no").lower() == "yes":
integration = "Autoconf"
elif Path("/usr/share/bunkerweb/INTEGRATION").exists():
with open("/usr/share/bunkerweb/INTEGRATION", "r") as f:
integration = f.read().strip()
elif integration_path.is_file():
integration = integration_path.read_text().strip()
del integration_path
docker_client = None
kubernetes_client = None
@ -208,10 +211,8 @@ while not db.is_first_config_saved() or not env:
env = db.get_config()
logger.info("Database is ready")
Path("/var/tmp/bunkerweb/ui.healthy").write_text("ok")
with open("/usr/share/bunkerweb/VERSION", "r") as f:
bw_version = f.read().strip()
Path(sep, "var", "tmp", "bunkerweb", "ui.healthy").write_text("ok")
bw_version = Path(sep, "usr", "share", "bunkerweb", "VERSION").read_text().strip()
try:
app.config.update(
@ -715,7 +716,7 @@ def configs():
"configs.html",
folders=[
path_to_dict(
"/etc/bunkerweb/configs",
join(sep, "etc", "bunkerweb", "configs"),
db_data=db.get_custom_configs(),
services=app.config["CONFIG"]
.get_config(methods=False)["SERVER_NAME"]
@ -729,6 +730,7 @@ def configs():
@app.route("/plugins", methods=["GET", "POST"])
@login_required
def plugins():
tmp_ui_path = Path(sep, "var", "tmp", "bunkerweb", "ui")
if request.method == "POST":
operation = ""
error = 0
@ -755,9 +757,7 @@ def plugins():
)
flash(f"Deleted plugin {variables['name']} successfully")
else:
if not Path("/var/tmp/bunkerweb/ui").exists() or not listdir(
"/var/tmp/bunkerweb/ui"
):
if not tmp_ui_path.exists() or not listdir(str(tmp_ui_path)):
flash("Please upload new plugins to reload plugins", "error")
return redirect(url_for("loading", next=url_for("plugins")))
@ -766,161 +766,25 @@ def plugins():
new_plugins = []
new_plugins_ids = []
for file in listdir("/var/tmp/bunkerweb/ui"):
if not Path(f"/var/tmp/bunkerweb/ui/{file}").is_file():
for file in listdir(str(tmp_ui_path)):
if not tmp_ui_path.joinpath(file).is_file():
continue
files_count += 1
folder_name = ""
temp_folder_name = file.split(".")[0]
temp_folder_path = tmp_ui_path.joinpath(temp_folder_name)
is_dir = False
try:
if file.endswith(".zip"):
try:
with ZipFile(f"/var/tmp/bunkerweb/ui/{file}") as zip_file:
with ZipFile(str(tmp_ui_path.joinpath(file))) as zip_file:
try:
zip_file.getinfo("plugin.json")
zip_file.extractall(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
)
with open(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/plugin.json",
"r",
) as f:
plugin_file = json_load(f)
if not all(
key in plugin_file.keys() for key in PLUGIN_KEYS
):
raise ValueError
folder_name = plugin_file["id"]
if not app.config["CONFIGFILES"].check_name(
folder_name
):
errors += 1
error = 1
flash(
f"Invalid plugin name for {temp_folder_name}. (Can only contain numbers, letters, underscores and hyphens (min 4 characters and max 64))",
"error",
)
raise Exception
plugin_content = BytesIO()
with tar_open(
fileobj=plugin_content,
mode="w:gz",
compresslevel=9,
) as tar:
tar.add(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}",
arcname=temp_folder_name,
recursive=True,
)
plugin_content.seek(0)
value = plugin_content.getvalue()
new_plugins.append(
plugin_file
| {
"external": True,
"page": "ui"
in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
),
"method": "ui",
"data": value,
"checksum": sha256(value).hexdigest(),
}
)
new_plugins_ids.append(folder_name)
except KeyError:
zip_file.extractall(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
)
dirs = [
d
for d in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
)
if Path(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{d}"
).is_dir()
]
if (
not dirs
or len(dirs) > 1
or not Path(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}/plugin.json"
).is_file()
):
raise KeyError
with open(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}/plugin.json",
"r",
) as f:
plugin_file = json_load(f)
if not all(
key in plugin_file.keys() for key in PLUGIN_KEYS
):
raise ValueError
folder_name = plugin_file["id"]
if not app.config["CONFIGFILES"].check_name(
folder_name
):
errors += 1
error = 1
flash(
f"Invalid plugin name for {temp_folder_name}. (Can only contain numbers, letters, underscores and hyphens (min 4 characters and max 64))",
"error",
)
raise Exception
for file_name in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}"
):
move(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}/{file_name}",
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{file_name}",
)
rmtree(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}"
)
plugin_content = BytesIO()
with tar_open(
fileobj=plugin_content,
mode="w:gz",
compresslevel=9,
) as tar:
tar.add(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}",
arcname=temp_folder_name,
recursive=True,
)
plugin_content.seek(0)
value = plugin_content.getvalue()
new_plugins.append(
plugin_file
| {
"external": True,
"page": "ui"
in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
),
"method": "ui",
"data": value,
"checksum": sha256(value).hexdigest(),
}
)
new_plugins_ids.append(folder_name)
is_dir = True
zip_file.extractall(str(temp_folder_path))
except BadZipFile:
errors += 1
error = 1
@ -931,152 +795,13 @@ def plugins():
else:
try:
with tar_open(
f"/var/tmp/bunkerweb/ui/{file}",
errorlevel=2,
str(tmp_ui_path.joinpath(file)), errorlevel=2
) as tar_file:
try:
tar_file.getmember("plugin.json")
tar_file.extractall(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
)
with open(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/plugin.json",
"r",
) as f:
plugin_file = json_load(f)
if not all(
key in plugin_file.keys() for key in PLUGIN_KEYS
):
raise ValueError
folder_name = plugin_file["id"]
if not app.config["CONFIGFILES"].check_name(
folder_name
):
errors += 1
error = 1
flash(
f"Invalid plugin name for {temp_folder_name}. (Can only contain numbers, letters, underscores and hyphens (min 4 characters and max 64))",
"error",
)
raise Exception
plugin_content = BytesIO()
with tar_open(
fileobj=plugin_content,
mode="w:gz",
compresslevel=9,
) as tar:
tar.add(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}",
arcname=temp_folder_name,
recursive=True,
)
plugin_content.seek(0)
value = plugin_content.getvalue()
new_plugins.append(
plugin_file
| {
"external": True,
"page": "ui"
in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
),
"method": "ui",
"data": value,
"checksum": sha256(value).hexdigest(),
}
)
new_plugins_ids.append(folder_name)
except KeyError:
tar_file.extractall(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}",
)
dirs = [
d
for d in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
)
if Path(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{d}"
).is_dir()
]
if (
not dirs
or len(dirs) > 1
or not Path(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}/plugin.json"
).is_file()
):
raise KeyError
with open(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}/plugin.json",
"r",
) as f:
plugin_file = json_load(f)
if not all(
key in plugin_file.keys() for key in PLUGIN_KEYS
):
raise ValueError
folder_name = plugin_file["id"]
if not app.config["CONFIGFILES"].check_name(
folder_name
):
errors += 1
error = 1
flash(
f"Invalid plugin name for {temp_folder_name}. (Can only contain numbers, letters, underscores and hyphens (min 4 characters and max 64))",
"error",
)
raise Exception
for file_name in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}"
):
move(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}/{file_name}",
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{file_name}",
)
rmtree(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}"
)
plugin_content = BytesIO()
with tar_open(
fileobj=plugin_content,
mode="w:gz",
compresslevel=9,
) as tar:
tar.add(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}",
arcname=temp_folder_name,
recursive=True,
)
plugin_content.seek(0)
value = plugin_content.getvalue()
new_plugins.append(
plugin_file
| {
"external": True,
"page": "ui"
in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
),
"method": "ui",
"data": value,
"checksum": sha256(value).hexdigest(),
}
)
new_plugins_ids.append(folder_name)
is_dir = True
tar_file.extractall(str(temp_folder_path))
except ReadError:
errors += 1
error = 1
@ -1098,6 +823,78 @@ def plugins():
f"The file plugin.json in {file} is not valid ({folder_name or temp_folder_name})",
"error",
)
if is_dir:
dirs = [
d
for d in listdir(str(temp_folder_path))
if temp_folder_path.joinpath(d).is_dir()
]
if (
not dirs
or len(dirs) > 1
or not temp_folder_path.joinpath(
dirs[0], "plugin.json"
).is_file()
):
raise KeyError
for file_name in listdir(
str(temp_folder_path.joinpath(dirs[0]))
):
move(
str(temp_folder_path.joinpath(dirs[0], file_name)),
str(temp_folder_path.joinpath(file_name)),
)
rmtree(
str(temp_folder_path.joinpath(dirs[0])),
ignore_errors=True,
)
plugin_file = json_loads(
temp_folder_path.joinpath("plugin.json").read_text()
)
if not all(key in plugin_file.keys() for key in PLUGIN_KEYS):
raise ValueError
folder_name = plugin_file["id"]
if not app.config["CONFIGFILES"].check_name(folder_name):
errors += 1
error = 1
flash(
f"Invalid plugin name for {temp_folder_name}. (Can only contain numbers, letters, underscores and hyphens (min 4 characters and max 64))",
"error",
)
raise Exception
plugin_content = BytesIO()
with tar_open(
fileobj=plugin_content,
mode="w:gz",
compresslevel=9,
) as tar:
tar.add(
str(temp_folder_path),
arcname=temp_folder_name,
recursive=True,
)
plugin_content.seek(0)
value = plugin_content.getvalue()
new_plugins.append(
plugin_file
| {
"external": True,
"page": "ui" in listdir(str(temp_folder_path)),
"method": "ui",
"data": value,
"checksum": sha256(value).hexdigest(),
}
)
new_plugins_ids.append(folder_name)
except KeyError:
errors += 1
error = 1
@ -1171,9 +968,8 @@ def plugins():
).start()
# Remove tmp folder
if Path("/var/tmp/bunkerweb/ui").exists():
with suppress(OSError):
rmtree("/var/tmp/bunkerweb/ui")
if tmp_ui_path.exists():
rmtree(str(tmp_ui_path), ignore_errors=True)
return redirect(
url_for("loading", next=url_for("plugins"), message="Reloading plugins")
@ -1229,7 +1025,8 @@ def upload_plugin():
if not request.files:
return {"status": "ko"}, 400
Path("/var/tmp/bunkerweb/ui").mkdir(parents=True, exist_ok=True)
tmp_ui_path = Path(sep, "var", "tmp", "bunkerweb", "ui")
tmp_ui_path.mkdir(parents=True, exist_ok=True)
for uploaded_file in request.files.values():
if not uploaded_file.filename.endswith((".zip", ".tar.gz", ".tar.xz")):
@ -1244,7 +1041,7 @@ def upload_plugin():
if file.endswith("plugin.json"):
plugins.append(basename(dirname(file)))
if len(plugins) > 1:
zip_file.extractall("/var/tmp/bunkerweb/ui/")
zip_file.extractall(str(tmp_ui_path) + "/")
folder_name = uploaded_file.filename.replace(".zip", "")
else:
with tar_open(fileobj=io) as tar_file:
@ -1252,16 +1049,14 @@ def upload_plugin():
if file.endswith("plugin.json"):
plugins.append(basename(dirname(file)))
if len(plugins) > 1:
tar_file.extractall("/var/tmp/bunkerweb/ui/")
tar_file.extractall(str(tmp_ui_path) + "/")
folder_name = uploaded_file.filename.replace(".tar.gz", "").replace(
".tar.xz", ""
)
if len(plugins) <= 1:
io.seek(0, 0)
Path(f"/var/tmp/bunkerweb/ui/{uploaded_file.filename}").write_bytes(
io.read()
)
tmp_ui_path.joinpath(uploaded_file.filename).write_bytes(io.read())
return {"status": "ok"}, 201
for plugin in plugins:
@ -1270,13 +1065,12 @@ def upload_plugin():
mode="w:gz", fileobj=tgz, dereference=True, compresslevel=3
) as tf:
tf.add(
f"/var/tmp/bunkerweb/ui/{folder_name}/{plugin}",
arcname=plugin,
str(tmp_ui_path.joinpath(folder_name, plugin)), arcname=plugin
)
tgz.seek(0, 0)
Path(f"/var/tmp/bunkerweb/ui/{plugin}.tar.gz").write_bytes(tgz.read())
tmp_ui_path.joinpath(f"{plugin}.tar.gz").write_bytes(tgz.read())
rmtree(f"/var/tmp/bunkerweb/ui/{folder_name}", ignore_errors=True)
rmtree(str(tmp_ui_path.joinpath(folder_name)), ignore_errors=True)
return {"status": "ok"}, 201
@ -1336,7 +1130,7 @@ def custom_plugin(plugin):
)
error = True
finally:
if Path("/usr/sbin/nginx").is_file():
if sbin_nginx_path.is_file():
# Remove the custom plugin from the shared library
sys_path.pop()
sys_modules.pop("actions")
@ -1365,7 +1159,7 @@ def cache():
"cache.html",
folders=[
path_to_dict(
"/var/cache/bunkerweb",
join(sep, "var", "cache", "bunkerweb"),
is_cache=True,
db_data=db.get_jobs_cache_files(),
services=app.config["CONFIG"]
@ -1390,7 +1184,7 @@ def logs():
@app.route("/logs/local", methods=["GET"])
@login_required
def logs_linux():
if not Path("/usr/sbin/nginx").is_file():
if not sbin_nginx_path.is_file():
return (
jsonify(
{
@ -1405,24 +1199,17 @@ def logs_linux():
raw_logs_access = []
raw_logs_error = []
if last_update:
if Path("/var/log/nginx/error.log").exists():
with open("/var/log/nginx/error.log", "r") as f:
raw_logs_error = f.read().splitlines()[int(last_update.split(".")[0]) :]
nginx_error_file = Path(sep, "var", "log", "nginx", "error.log")
if nginx_error_file.is_file():
raw_logs_access = nginx_error_file.read_text().splitlines()[
int(last_update.split(".")[0]) if last_update else 0 :
]
if Path("/var/log/nginx/access.log").exists():
with open("/var/log/nginx/access.log", "r") as f:
raw_logs_access = f.read().splitlines()[
int(last_update.split(".")[1]) :
]
else:
if Path("/var/log/nginx/error.log").exists():
with open("/var/log/nginx/error.log", "r") as f:
raw_logs_error = f.read().splitlines()
if Path("/var/log/nginx/access.log").exists():
with open("/var/log/nginx/access.log", "r") as f:
raw_logs_access = f.read().splitlines()
nginx_access_file = Path(sep, "var", "log", "nginx", "access.log")
if nginx_access_file.is_file():
raw_logs_error = nginx_access_file.read_text().splitlines()[
int(last_update.split(".")[1]) if last_update else 0 :
]
logs_error = []
temp_multiple_lines = []
@ -1474,6 +1261,9 @@ def logs_linux():
logs = []
for log in raw_logs:
if "[48;2" in log or not log.strip():
continue
log_lower = log.lower()
error_type = (
"error"

View file

@ -1,24 +1,22 @@
#!/usr/bin/python3
from copy import deepcopy
from hashlib import sha256
from io import BytesIO
from os import sep
from os.path import join
from flask import flash
from glob import iglob
from json import load as json_load
from os import listdir
from os.path import basename
from json import loads as json_loads
from pathlib import Path
from re import search as re_search
from subprocess import run, DEVNULL, STDOUT
from tarfile import open as tar_open
from typing import List, Tuple
from uuid import uuid4
class Config:
def __init__(self, db) -> None:
with open("/usr/share/bunkerweb/settings.json", "r") as f:
self.__settings: dict = json_load(f)
self.__settings = json_loads(
Path(sep, "usr", "share", "bunkerweb", "settings.json").read_text()
)
self.__db = db
def __dict_to_env(self, filename: str, variables: dict) -> None:
@ -69,14 +67,14 @@ class Config:
servers.append(server_name)
conf["SERVER_NAME"] = " ".join(servers)
env_file = f"/tmp/{uuid4()}.env"
env_file = Path(sep, "tmp", f"{uuid4()}.env")
self.__dict_to_env(env_file, conf)
proc = run(
[
"python3",
"/usr/share/bunkerweb/gen/save_config.py",
join(sep, "usr", "share", "bunkerweb", "gen", "save_config.py"),
"--variables",
env_file,
str(env_file),
"--method",
"ui",
],
@ -87,7 +85,7 @@ class Config:
if proc.returncode != 0:
raise Exception(f"Error from generator (return code = {proc.returncode})")
Path(env_file).unlink()
env_file.unlink()
def get_plugins_settings(self) -> dict:
return {
@ -99,7 +97,6 @@ class Config:
self, *, external: bool = False, with_data: bool = False
) -> List[dict]:
plugins = self.__db.get_plugins(external=external, with_data=with_data)
plugins.sort(key=lambda x: x["name"])
general_plugin = None

View file

@ -1,6 +1,8 @@
#!/usr/bin/python3
from glob import glob
from os import listdir, replace, walk
from os.path import dirname, join
from os import listdir, replace, sep, walk
from os.path import basename, dirname, join
from pathlib import Path
from re import compile as re_compile
from shutil import rmtree, move as shutil_move
@ -12,16 +14,17 @@ from utils import path_to_dict
def generate_custom_configs(
custom_configs: List[Dict[str, Any]],
*,
original_path: str = "/etc/bunkerweb/configs",
original_path: str = join(sep, "etc", "bunkerweb", "configs"),
):
Path(original_path).mkdir(parents=True, exist_ok=True)
original_path: Path = Path(original_path)
original_path.mkdir(parents=True, exist_ok=True)
for custom_config in custom_configs:
tmp_path = f"{original_path}/{custom_config['type'].replace('_', '-')}"
tmp_path = original_path.joinpath(custom_config["type"].replace("_", "-"))
if custom_config["service_id"]:
tmp_path += f"/{custom_config['service_id']}"
tmp_path += f"/{custom_config['name']}.conf"
Path(dirname(tmp_path)).mkdir(parents=True, exist_ok=True)
Path(tmp_path).write_bytes(custom_config["data"])
tmp_path = tmp_path.joinpath(custom_config["service_id"])
tmp_path = tmp_path.joinpath(f"{custom_config['name']}.conf")
tmp_path.parent.mkdir(parents=True, exist_ok=True)
tmp_path.write_bytes(custom_config["data"])
class ConfigFiles:
@ -29,36 +32,36 @@ class ConfigFiles:
self.__name_regex = re_compile(r"^[\w.-]{1,64}$")
self.__root_dirs = [
child["name"]
for child in path_to_dict("/etc/bunkerweb/configs")["children"]
for child in path_to_dict(join(sep, "etc", "bunkerweb", "configs"))[
"children"
]
]
self.__file_creation_blacklist = ["http", "stream"]
self.__logger = logger
self.__db = db
if not Path("/usr/sbin/nginx").is_file():
if not Path(sep, "usr", "sbin", "nginx").is_file():
custom_configs = self.__db.get_custom_configs()
if custom_configs:
self.__logger.info("Refreshing custom configs ...")
# Remove old custom configs files
for file in glob("/etc/bunkerweb/configs/*"):
if Path(file).is_symlink() or Path(file).is_file():
Path(file).unlink()
elif Path(file).is_dir():
rmtree(file, ignore_errors=True)
for file in glob(join(sep, "etc", "bunkerweb", "configs", "*")):
file = Path(file)
if file.is_symlink() or file.is_file():
file.unlink()
elif file.is_dir():
rmtree(str(file), ignore_errors=True)
generate_custom_configs(custom_configs)
self.__logger.info("Custom configs refreshed successfully")
def save_configs(self) -> str:
custom_configs = []
root_dirs = listdir("/etc/bunkerweb/configs")
for root, dirs, files in walk("/etc/bunkerweb/configs", topdown=True):
if (
root != "configs"
and (dirs and not root.split("/")[-1] in root_dirs)
or files
):
configs_path = join(sep, "etc", "bunkerweb", "configs")
root_dirs = listdir(configs_path)
for root, dirs, files in walk(configs_path):
if files or (dirs and basename(root) not in root_dirs):
path_exploded = root.split("/")
for file in files:
with open(join(root, file), "r") as f:
@ -68,7 +71,7 @@ class ConfigFiles:
"exploded": (
f"{path_exploded.pop()}"
if path_exploded[-1] not in root_dirs
else "",
else None,
path_exploded[-1],
file.replace(".conf", ""),
),
@ -85,11 +88,13 @@ class ConfigFiles:
def check_name(self, name: str) -> bool:
return self.__name_regex.match(name) is not None
def check_path(self, path: str, root_path: str = "/etc/bunkerweb/configs/") -> str:
def check_path(
self, path: str, root_path: str = join(sep, "etc", "bunkerweb", "configs")
) -> str:
root_dir: str = path.split("/")[4]
if not (
path.startswith(root_path)
or root_path == "/etc/bunkerweb/configs/"
or root_path == join(sep, "etc", "bunkerweb", "configs")
and path.startswith(root_path)
and root_dir in self.__root_dirs
and (
@ -100,25 +105,32 @@ class ConfigFiles:
):
return f"{path} is not a valid path"
if root_path == "/etc/bunkerweb/configs/":
if root_path == join(sep, "etc", "bunkerweb", "configs"):
dirs = path.split("/")[5:]
nbr_children = len(dirs)
dirs = "/".join(dirs)
if len(dirs) > 1:
for x in range(nbr_children - 1):
if not Path(
f"{root_path}{root_dir}/{'/'.join(dirs.split('/')[0:-x])}"
root_path, root_dir, "/".join(dirs.split("/")[0:-x])
).exists():
return f"{root_path}{root_dir}/{'/'.join(dirs.split('/')[0:-x])} doesn't exist"
return f"{join(root_path, root_dir, '/'.join(dirs.split('/')[0:-x]))} doesn't exist"
return ""
def delete_path(self, path: str) -> Tuple[str, int]:
try:
if Path(path).is_file() or Path(f"{path}.conf").is_file():
Path(f"{path}.conf").unlink()
path: Path = Path(path)
if path.is_file():
path.unlink()
elif path.is_dir():
rmtree(str(path), ignore_errors=False)
else:
rmtree(path, ignore_errors=True)
path = Path(f"{path}.conf")
if path.is_file():
path.unlink()
else:
rmtree(str(path), ignore_errors=False)
except OSError:
return f"Could not delete {path}", 1
@ -127,16 +139,16 @@ class ConfigFiles:
def create_folder(self, path: str, name: str) -> Tuple[str, int]:
folder_path = join(path, name) if not path.endswith(name) else path
try:
Path(folder_path).mkdir()
Path(folder_path).mkdir(parents=True)
except OSError:
return f"Could not create {folder_path}", 1
return f"The folder {folder_path} was successfully created", 0
def create_file(self, path: str, name: str, content: str) -> Tuple[str, int]:
file_path = join(path, name)
Path(path).mkdir(exist_ok=True)
Path(file_path).write_text(content)
file_path = Path(path, name)
file_path.parent.mkdir(exist_ok=True)
file_path.write_text(content)
return f"The file {file_path} was successfully created", 0
def edit_folder(self, path: str, name: str, old_name: str) -> Tuple[str, int]:

View file

@ -1,17 +1,15 @@
#!/usr/bin/python3
from os import sep
from os.path import join
from pathlib import Path
from subprocess import DEVNULL, STDOUT, run
from sys import path as sys_path
from typing import Any, Optional, Union
from API import API
from ApiCaller import ApiCaller
if "/usr/share/bunkerweb/deps/python" not in sys_path:
sys_path.append("/usr/share/bunkerweb/deps/python")
from API import API # type: ignore
from ApiCaller import ApiCaller # type: ignore
from dotenv import dotenv_values
from kubernetes import config
class Instance:
@ -56,7 +54,7 @@ class Instance:
if self._type == "local":
return (
run(
["sudo", "/usr/sbin/nginx", "-s", "reload"],
["sudo", join(sep, "usr", "sbin", "nginx"), "-s", "reload"],
stdin=DEVNULL,
stderr=STDOUT,
).returncode
@ -69,7 +67,7 @@ class Instance:
if self._type == "local":
return (
run(
["sudo", "/usr/sbin/nginx"],
["sudo", join(sep, "usr", "sbin", "nginx")],
stdin=DEVNULL,
stderr=STDOUT,
).returncode
@ -82,7 +80,7 @@ class Instance:
if self._type == "local":
return (
run(
["sudo", "/usr/sbin/nginx", "-s", "stop"],
["sudo", join(sep, "usr", "sbin", "nginx"), "-s", "stop"],
stdin=DEVNULL,
stderr=STDOUT,
).returncode
@ -95,7 +93,7 @@ class Instance:
if self._type == "local":
return (
run(
["sudo", "/usr/sbin/nginx", "-s", "restart"],
["sudo", join(sep, "usr", "sbin", "nginx"), "-s", "restart"],
stdin=DEVNULL,
stderr=STDOUT,
).returncode
@ -240,9 +238,11 @@ class Instances:
)
# Local instance
if Path("/usr/sbin/nginx").exists():
if Path(sep, "usr", "sbin", "nginx").exists():
apiCaller = ApiCaller()
env_variables = dotenv_values("/etc/bunkerweb/variables.env")
env_variables = dotenv_values(
join(sep, "etc", "bunkerweb", "variables.env")
)
apiCaller._set_apis(
[
API(
@ -259,7 +259,9 @@ class Instances:
"local",
"127.0.0.1",
"local",
"up" if Path("/var/tmp/bunkerweb/nginx.pid").exists() else "down",
"up"
if Path(sep, "var", "tmp", "bunkerweb", "nginx.pid").exists()
else "down",
None,
apiCaller,
),

View file

@ -1,3 +1,6 @@
#!/usr/bin/python3
class ReverseProxied(object):
def __init__(self, app):
self.app = app

View file

@ -1,3 +1,5 @@
#!/usr/bin/python3
from flask_login import UserMixin
from bcrypt import checkpw, hashpw, gensalt

View file

@ -1,19 +1,22 @@
#!/usr/bin/python3
from os import environ, urandom
from os.path import join
from typing import List
import os
def get_variables():
vars = {}
vars["DOCKER_HOST"] = "unix:///var/run/docker.sock"
vars["ABSOLUTE_URI"] = ""
vars["FLASK_SECRET"] = os.urandom(32)
vars["FLASK_SECRET"] = urandom(32)
vars["FLASK_ENV"] = "development"
vars["ADMIN_USERNAME"] = "admin"
vars["ADMIN_PASSWORD"] = "changeme"
for k in vars:
if k in os.environ:
vars[k] = os.environ[k]
if k in environ:
vars[k] = environ[k]
return vars
@ -48,7 +51,7 @@ def path_to_dict(
{
"name": config,
"type": "folder",
"path": f"{path}/{config}",
"path": join(path, config),
"can_create_files": True,
"can_create_folders": False,
"can_edit": False,
@ -57,7 +60,7 @@ def path_to_dict(
{
"name": service,
"type": "folder",
"path": f"{path}/{config}/{service}",
"path": join(path, config, service),
"can_create_files": True,
"can_create_folders": False,
"can_edit": False,
@ -76,7 +79,12 @@ def path_to_dict(
file_info = {
"name": f"{conf['name']}.conf",
"type": "file",
"path": f"{path}/{type_lower}{'/' + conf['service_id'] if conf['service_id'] else ''}/{conf['name']}.conf",
"path": join(
path,
type_lower,
conf["service_id"] if conf["service_id"] else "",
f"{conf['name']}.conf",
),
"can_edit": conf["method"] == "ui",
"can_delete": True,
"can_download": True,
@ -109,7 +117,7 @@ def path_to_dict(
{
"name": service,
"type": "folder",
"path": f"{path}/{service}",
"path": join(path, service),
"can_create_files": False,
"can_create_folders": False,
"can_edit": False,
@ -122,9 +130,13 @@ def path_to_dict(
for conf in db_data:
file_info = {
"name": f"{conf['job_name']}/{conf['file_name']}",
"name": join(conf["job_name"], conf["file_name"]),
"type": "file",
"path": f"{path}{'/' + conf['service_id'] if conf['service_id'] else ''}/{conf['file_name']}",
"path": join(
path,
conf["service_id"] if conf["service_id"] else "",
conf["file_name"],
),
"can_edit": False,
"can_delete": False,
"can_download": True,