Fix UI with Linux

This commit is contained in:
Théophile Diot 2023-04-25 19:27:30 +02:00
parent c7bc493e35
commit ab509c2705
No known key found for this signature in database
GPG key ID: E752C80DB72BB014
6 changed files with 354 additions and 447 deletions

View file

@ -393,7 +393,7 @@ if __name__ == "__main__":
logger.warning(err)
else:
err = db.add_instance(
"localhost",
"127.0.0.1",
config_files.get("API_HTTP_PORT", 5000),
config_files.get("API_SERVER_NAME", "bwapi"),
)

View file

@ -18,7 +18,7 @@ start() {
fi
source /etc/bunkerweb/ui.env
export $(cat /etc/bunkerweb/ui.env)
python3 -m gunicorn --graceful-timeout=0 --bind=127.0.0.1:7000 --chdir /usr/share/bunkerweb/ui/ --workers=1 --threads=2 main:app &
python3 -m gunicorn --graceful-timeout=0 --bind=127.0.0.1:7000 --chdir /usr/share/bunkerweb/ui/ --workers=1 --threads=4 main:app &
echo $! > /var/tmp/bunkerweb/ui.pid
}

View file

@ -318,6 +318,7 @@ if __name__ == "__main__":
if Path("/var/lib/bunkerweb/db.sqlite3").exists():
chmod("/var/lib/bunkerweb/db.sqlite3", 0o760)
first_run = True
while True:
# Instantiate scheduler
scheduler = JobScheduler(
@ -346,7 +347,11 @@ if __name__ == "__main__":
"--output",
"/etc/nginx",
]
+ (["--variables", args.variables] if args.variables else []),
+ (
["--variables", args.variables]
if args.variables and first_run
else []
),
stdin=DEVNULL,
stderr=STDOUT,
)
@ -431,6 +436,7 @@ if __name__ == "__main__":
generate = True
scheduler.setup()
need_reload = False
first_run = False
# infinite schedule for the jobs
logger.info("Executing job scheduler ...")
@ -439,83 +445,82 @@ if __name__ == "__main__":
scheduler.run_pending()
sleep(1)
if not args.variables:
# check if the custom configs have changed since last time
tmp_custom_configs = db.get_custom_configs()
if custom_configs != tmp_custom_configs:
logger.info("Custom configs changed, generating ...")
logger.debug(f"{tmp_custom_configs=}")
logger.debug(f"{custom_configs=}")
custom_configs = deepcopy(tmp_custom_configs)
# check if the custom configs have changed since last time
tmp_custom_configs = db.get_custom_configs()
if custom_configs != tmp_custom_configs:
logger.info("Custom configs changed, generating ...")
logger.debug(f"{tmp_custom_configs=}")
logger.debug(f"{custom_configs=}")
custom_configs = deepcopy(tmp_custom_configs)
# Remove old custom configs files
logger.info("Removing old custom configs files ...")
for file in glob("/data/configs/*"):
if Path(file).is_symlink() or Path(file).is_file():
Path(file).unlink()
elif Path(file).is_dir():
rmtree(file, ignore_errors=False)
# Remove old custom configs files
logger.info("Removing old custom configs files ...")
for file in glob("/data/configs/*"):
if Path(file).is_symlink() or Path(file).is_file():
Path(file).unlink()
elif Path(file).is_dir():
rmtree(file, ignore_errors=False)
logger.info("Generating new custom configs ...")
generate_custom_configs(custom_configs, integration, api_caller)
logger.info("Generating new custom configs ...")
generate_custom_configs(custom_configs, integration, api_caller)
# reload nginx
logger.info("Reloading nginx ...")
if integration not in (
"Autoconf",
"Swarm",
"Kubernetes",
"Docker",
):
# Reloading the nginx server.
proc = subprocess_run(
# Reload nginx
["/usr/sbin/nginx", "-s", "reload"],
stdin=DEVNULL,
stderr=STDOUT,
env=deepcopy(env),
)
if proc.returncode == 0:
logger.info("Successfully reloaded nginx")
else:
logger.error(
f"Error while reloading nginx - returncode: {proc.returncode} - error: {proc.stderr.decode('utf-8')}",
)
else:
need_reload = True
# check if the plugins have changed since last time
tmp_external_plugins = db.get_plugins(external=True)
if external_plugins != tmp_external_plugins:
logger.info("External plugins changed, generating ...")
logger.debug(f"{tmp_external_plugins=}")
logger.debug(f"{external_plugins=}")
external_plugins = deepcopy(tmp_external_plugins)
# Remove old external plugins files
logger.info("Removing old external plugins files ...")
for file in glob("/data/plugins/*"):
if Path(file).is_symlink() or Path(file).is_file():
Path(file).unlink()
elif Path(file).is_dir():
rmtree(file, ignore_errors=False)
logger.info("Generating new external plugins ...")
generate_external_plugins(
db.get_plugins(external=True, with_data=True),
integration,
api_caller,
# reload nginx
logger.info("Reloading nginx ...")
if integration not in (
"Autoconf",
"Swarm",
"Kubernetes",
"Docker",
):
# Reloading the nginx server.
proc = subprocess_run(
# Reload nginx
["/usr/sbin/nginx", "-s", "reload"],
stdin=DEVNULL,
stderr=STDOUT,
env=deepcopy(env),
)
if proc.returncode == 0:
logger.info("Successfully reloaded nginx")
else:
logger.error(
f"Error while reloading nginx - returncode: {proc.returncode} - error: {proc.stderr.decode('utf-8')}",
)
else:
need_reload = True
# check if the config have changed since last time
tmp_env = db.get_config()
if env != tmp_env:
logger.info("Config changed, generating ...")
logger.debug(f"{tmp_env=}")
logger.debug(f"{env=}")
env = deepcopy(tmp_env)
need_reload = True
# check if the plugins have changed since last time
tmp_external_plugins = db.get_plugins(external=True)
if external_plugins != tmp_external_plugins:
logger.info("External plugins changed, generating ...")
logger.debug(f"{tmp_external_plugins=}")
logger.debug(f"{external_plugins=}")
external_plugins = deepcopy(tmp_external_plugins)
# Remove old external plugins files
logger.info("Removing old external plugins files ...")
for file in glob("/data/plugins/*"):
if Path(file).is_symlink() or Path(file).is_file():
Path(file).unlink()
elif Path(file).is_dir():
rmtree(file, ignore_errors=False)
logger.info("Generating new external plugins ...")
generate_external_plugins(
db.get_plugins(external=True, with_data=True),
integration,
api_caller,
)
need_reload = True
# check if the config have changed since last time
tmp_env = db.get_config()
if env != tmp_env:
logger.info("Config changed, generating ...")
logger.debug(f"{tmp_env=}")
logger.debug(f"{env=}")
env = deepcopy(tmp_env)
need_reload = True
except:
logger.error(
f"Exception while executing scheduler : {format_exc()}",

View file

@ -52,11 +52,10 @@ from json import JSONDecodeError, dumps, load as json_load
from jinja2 import Template
from kubernetes import client as kube_client
from kubernetes.client.exceptions import ApiException as kube_ApiException
from os import _exit, chmod, getenv, getpid, listdir, walk
from os.path import join
from os import _exit, getenv, getpid, listdir
from re import match as re_match
from requests import get
from shutil import move, rmtree, copytree, chown
from shutil import move, rmtree, copytree
from signal import SIGINT, signal, SIGTERM
from subprocess import PIPE, Popen, call
from tarfile import CompressionError, HeaderError, ReadError, TarError, open as tar_open
@ -250,8 +249,6 @@ def manage_bunkerweb(method: str, operation: str = "reloads", *args):
operation = app.config["INSTANCES"].stop_instance(args[0])
elif operation == "restart":
operation = app.config["INSTANCES"].restart_instance(args[0])
elif Path("/usr/sbin/nginx").is_file():
operation = app.config["INSTANCES"].reload_instances()
else:
operation = "The scheduler will be in charge of reloading the instances."
@ -728,36 +725,17 @@ def plugins():
flash(f"Can't delete internal plugin {variables['name']}", "error")
return redirect(url_for("loading", next=url_for("plugins"))), 500
if not Path("/usr/sbin/nginx").is_file():
plugins = app.config["CONFIG"].get_plugins()
for plugin in deepcopy(plugins):
if plugin["external"] is False or plugin["id"] == variables["name"]:
del plugins[plugins.index(plugin)]
plugins = app.config["CONFIG"].get_plugins()
for plugin in deepcopy(plugins):
if plugin["external"] is False or plugin["id"] == variables["name"]:
del plugins[plugins.index(plugin)]
err = db.update_external_plugins(plugins)
if err:
flash(
f"Couldn't update external plugins to database: {err}",
"error",
)
else:
variables["path"] = f"/etc/bunkerweb/plugins/{variables['name']}"
operation = app.config["CONFIGFILES"].check_path(
variables["path"], "/etc/bunkerweb/plugins/"
err = db.update_external_plugins(plugins)
if err:
flash(
f"Couldn't update external plugins to database: {err}",
"error",
)
if operation:
flash(operation, "error")
return redirect(url_for("loading", next=url_for("plugins"))), 500
operation, error = app.config["CONFIGFILES"].delete_path(
variables["path"]
)
if error:
flash(operation, "error")
return redirect(url_for("loading", next=url_for("plugins")))
else:
if not Path("/var/tmp/bunkerweb/ui").exists() or not listdir(
"/var/tmp/bunkerweb/ui"
@ -811,43 +789,32 @@ def plugins():
)
raise Exception
if not Path("/usr/sbin/nginx").is_file():
plugin_content = BytesIO()
with tar_open(
fileobj=plugin_content, mode="w:gz"
) as tar:
tar.add(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}",
arcname=temp_folder_name,
recursive=True,
)
plugin_content.seek(0)
value = plugin_content.getvalue()
new_plugins.append(
plugin_file
| {
"external": True,
"page": "ui"
in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
),
"method": "ui",
"data": value,
"checksum": sha256(value).hexdigest(),
}
)
new_plugins_ids.append(folder_name)
else:
if Path(
f"/etc/bunkerweb/plugins/{folder_name}"
).exists():
raise FileExistsError
copytree(
plugin_content = BytesIO()
with tar_open(
fileobj=plugin_content, mode="w:gz"
) as tar:
tar.add(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}",
f"/etc/bunkerweb/plugins/{folder_name}",
arcname=temp_folder_name,
recursive=True,
)
plugin_content.seek(0)
value = plugin_content.getvalue()
new_plugins.append(
plugin_file
| {
"external": True,
"page": "ui"
in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
),
"method": "ui",
"data": value,
"checksum": sha256(value).hexdigest(),
}
)
new_plugins_ids.append(folder_name)
except KeyError:
zip_file.extractall(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
@ -895,54 +862,43 @@ def plugins():
)
raise Exception
if not Path("/usr/sbin/nginx").is_file():
for file_name in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}"
):
move(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}/{file_name}",
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{file_name}",
)
rmtree(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}"
for file_name in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}"
):
move(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}/{file_name}",
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{file_name}",
)
rmtree(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}"
)
plugin_content = BytesIO()
with tar_open(
fileobj=plugin_content, mode="w:gz"
) as tar:
tar.add(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}",
arcname=temp_folder_name,
recursive=True,
)
plugin_content.seek(0)
value = plugin_content.getvalue()
new_plugins.append(
plugin_file
| {
"external": True,
"page": "ui"
in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
),
"method": "ui",
"data": value,
"checksum": sha256(value).hexdigest(),
}
plugin_content = BytesIO()
with tar_open(
fileobj=plugin_content, mode="w:gz"
) as tar:
tar.add(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}",
arcname=temp_folder_name,
recursive=True,
)
new_plugins_ids.append(folder_name)
else:
if Path(
f"/etc/bunkerweb/plugins/{folder_name}"
).exists():
raise FileExistsError
plugin_content.seek(0)
value = plugin_content.getvalue()
copytree(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}",
f"/etc/bunkerweb/plugins/{folder_name}",
)
new_plugins.append(
plugin_file
| {
"external": True,
"page": "ui"
in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
),
"method": "ui",
"data": value,
"checksum": sha256(value).hexdigest(),
}
)
new_plugins_ids.append(folder_name)
except BadZipFile:
errors += 1
error = 1
@ -985,43 +941,32 @@ def plugins():
)
raise Exception
if not Path("/usr/sbin/nginx").is_file():
plugin_content = BytesIO()
with tar_open(
fileobj=plugin_content, mode="w:gz"
) as tar:
tar.add(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}",
arcname=temp_folder_name,
recursive=True,
)
plugin_content.seek(0)
value = plugin_content.getvalue()
new_plugins.append(
plugin_file
| {
"external": True,
"page": "ui"
in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
),
"method": "ui",
"data": value,
"checksum": sha256(value).hexdigest(),
}
)
new_plugins_ids.append(folder_name)
else:
if Path(
f"/etc/bunkerweb/plugins/{folder_name}"
).exists():
raise FileExistsError
copytree(
plugin_content = BytesIO()
with tar_open(
fileobj=plugin_content, mode="w:gz"
) as tar:
tar.add(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}",
f"/etc/bunkerweb/plugins/{folder_name}",
arcname=temp_folder_name,
recursive=True,
)
plugin_content.seek(0)
value = plugin_content.getvalue()
new_plugins.append(
plugin_file
| {
"external": True,
"page": "ui"
in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
),
"method": "ui",
"data": value,
"checksum": sha256(value).hexdigest(),
}
)
new_plugins_ids.append(folder_name)
except KeyError:
tar_file.extractall(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}",
@ -1069,54 +1014,43 @@ def plugins():
)
raise Exception
if not Path("/usr/sbin/nginx").is_file():
for file_name in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}"
):
move(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}/{file_name}",
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{file_name}",
)
rmtree(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}"
for file_name in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}"
):
move(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}/{file_name}",
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{file_name}",
)
rmtree(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}"
)
plugin_content = BytesIO()
with tar_open(
fileobj=plugin_content, mode="w:gz"
) as tar:
tar.add(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}",
arcname=temp_folder_name,
recursive=True,
)
plugin_content.seek(0)
value = plugin_content.getvalue()
new_plugins.append(
plugin_file
| {
"external": True,
"page": "ui"
in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
),
"method": "ui",
"data": value,
"checksum": sha256(value).hexdigest(),
}
plugin_content = BytesIO()
with tar_open(
fileobj=plugin_content, mode="w:gz"
) as tar:
tar.add(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}",
arcname=temp_folder_name,
recursive=True,
)
new_plugins_ids.append(folder_name)
else:
if Path(
f"/etc/bunkerweb/plugins/{folder_name}"
).exists():
raise FileExistsError
plugin_content.seek(0)
value = plugin_content.getvalue()
copytree(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}/{dirs[0]}",
f"/etc/bunkerweb/plugins/{folder_name}",
)
new_plugins.append(
plugin_file
| {
"external": True,
"page": "ui"
in listdir(
f"/var/tmp/bunkerweb/ui/{temp_folder_name}"
),
"method": "ui",
"data": value,
"checksum": sha256(value).hexdigest(),
}
)
new_plugins_ids.append(folder_name)
except ReadError:
errors += 1
error = 1
@ -1185,12 +1119,6 @@ def plugins():
if errors >= files_count:
return redirect(url_for("loading", next=url_for("plugins")))
# Fix permissions for plugins folders
for root, dirs, files in walk("/etc/bunkerweb/plugins", topdown=False):
for name in files + dirs:
chown(join(root, name), "root", 101)
chmod(join(root, name), 0o770)
plugins = app.config["CONFIG"].get_plugins(external=True, with_data=True)
for plugin in deepcopy(plugins):
if plugin["id"] in new_plugins_ids:
@ -1232,26 +1160,10 @@ def plugins():
plugin_id = request.args.get("plugin_id")
template = None
if not Path("/usr/sbin/nginx").is_file():
page = db.get_plugin_template(plugin_id)
page = db.get_plugin_template(plugin_id)
if page is not None:
template = Template(page.decode("utf-8"))
else:
page_path = ""
if Path(f"/etc/bunkerweb/plugins/{plugin_id}/ui/template.html").exists():
page_path = f"/etc/bunkerweb/plugins/{plugin_id}/ui/template.html"
elif Path(
f"/usr/share/bunkerweb/core/{plugin_id}/ui/template.html"
).exists():
page_path = f"/usr/share/bunkerweb/core/{plugin_id}/ui/template.html"
else:
flash(f"Plugin {plugin_id} not found", "error")
if page_path:
with open(page_path, "r") as f:
template = Template(f.read())
if page is not None:
template = Template(page.decode("utf-8"))
if template is not None:
return template.render(
@ -1312,67 +1224,33 @@ def custom_plugin(plugin):
)
return redirect(url_for("loading", next=url_for("plugins", plugin_id=plugin)))
if not Path("/usr/sbin/nginx").is_file():
module = db.get_plugin_actions(plugin)
module = db.get_plugin_actions(plugin)
if module is None:
flash(
f"The <i>actions.py</i> file for the plugin <b>{plugin}</b> does not exist",
"error",
)
return redirect(
url_for("loading", next=url_for("plugins", plugin_id=plugin))
)
try:
# Try to import the custom plugin
with NamedTemporaryFile(mode="wb", suffix=".py", delete=True) as temp:
temp.write(module)
temp.flush()
temp.seek(0)
loader = SourceFileLoader("actions", temp.name)
actions = loader.load_module()
except:
flash(
f"An error occurred while importing the plugin <b>{plugin}</b>:<br/>{format_exc()}",
"error",
)
return redirect(
url_for("loading", next=url_for("plugins", plugin_id=plugin))
)
else:
if (
not Path(f"/etc/bunkerweb/plugins/{plugin}/ui/actions.py").exists()
and not Path(f"/usr/share/bunkerweb/core/{plugin}/ui/actions.py").exists()
):
flash(
f"The <i>actions.py</i> file for the plugin <b>{plugin}</b> does not exist",
"error",
)
return redirect(
url_for("loading", next=url_for("plugins", plugin_id=plugin))
)
# Add the custom plugin to sys.path
sys_path.append(
(
"/etc/bunkerweb/plugins"
if Path(f"/etc/bunkerweb/plugins/{plugin}/ui/actions.py").exists()
else "/usr/share/bunkerweb/core"
)
+ f"/{plugin}/ui/"
if module is None:
flash(
f"The <i>actions.py</i> file for the plugin <b>{plugin}</b> does not exist",
"error",
)
return redirect(
url_for("loading", next=url_for("plugins", plugin_id=plugin))
)
try:
# Try to import the custom plugin
with NamedTemporaryFile(mode="wb", suffix=".py", delete=True) as temp:
temp.write(module)
temp.flush()
temp.seek(0)
loader = SourceFileLoader("actions", temp.name)
actions = loader.load_module()
except:
flash(
f"An error occurred while importing the plugin <b>{plugin}</b>:<br/>{format_exc()}",
"error",
)
return redirect(
url_for("loading", next=url_for("plugins", plugin_id=plugin))
)
try:
# Try to import the custom plugin
import actions
except:
flash(
f"An error occurred while importing the plugin <b>{plugin}</b>:<br/>{format_exc()}",
"error",
)
return redirect(
url_for("loading", next=url_for("plugins", plugin_id=plugin))
)
error = False
res = None

View file

@ -23,23 +23,22 @@ class Config:
self.__logger = logger
self.__db = db
if not Path("/usr/sbin/nginx").exists():
while not self.__db.is_initialized():
self.__logger.warning(
"Database is not initialized, retrying in 5s ...",
)
sleep(5)
while not self.__db.is_initialized():
self.__logger.warning(
"Database is not initialized, retrying in 5s ...",
)
sleep(5)
env = self.__db.get_config()
while not self.__db.is_first_config_saved() or not env:
self.__logger.warning(
"Database doesn't have any config saved yet, retrying in 5s ...",
)
sleep(5)
env = self.__db.get_config()
while not self.__db.is_first_config_saved() or not env:
self.__logger.warning(
"Database doesn't have any config saved yet, retrying in 5s ...",
)
sleep(5)
env = self.__db.get_config()
self.__logger.info("Database is ready")
Path("/var/tmp/bunkerweb/ui.healthy").write_text("ok")
self.__logger.info("Database is ready")
Path("/var/tmp/bunkerweb/ui.healthy").write_text("ok")
def __env_to_dict(self, filename: str) -> dict:
"""Converts the content of an env file into a dict
@ -144,21 +143,6 @@ class Config:
def get_plugins(
self, *, external: bool = False, with_data: bool = False
) -> List[dict]:
if not Path("/usr/sbin/nginx").exists():
plugins = self.__db.get_plugins(external=external, with_data=with_data)
plugins.sort(key=lambda x: x["name"])
if not external:
general_plugin = None
for x, plugin in enumerate(plugins):
if plugin["name"] == "General":
general_plugin = plugin
del plugins[x]
break
plugins.insert(0, general_plugin)
return plugins
plugins = []
for foldername in list(iglob("/etc/bunkerweb/plugins/*")) + (
@ -231,12 +215,6 @@ class Config:
dict
The nginx variables env file as a dict
"""
if Path("/usr/sbin/nginx").exists():
return {
k: ({"value": v, "method": "ui"} if methods else v)
for k, v in self.__env_to_dict("/etc/nginx/variables.env").items()
}
return self.__db.get_config(methods=methods)
def get_services(self, methods: bool = True) -> list[dict]:
@ -247,22 +225,6 @@ class Config:
list
The services
"""
if Path("/usr/sbin/nginx").exists():
services = []
plugins_settings = self.get_plugins_settings()
for filename in iglob("/etc/nginx/**/variables.env"):
service = filename.split("/")[3]
env = {
k.replace(f"{service}_", ""): (
{"value": v, "method": "ui"} if methods else v
)
for k, v in self.__env_to_dict(filename).items()
if k.startswith(f"{service}_") or k in plugins_settings
}
services.append(env)
return services
return self.__db.get_services_settings(methods=methods)
def check_variables(self, variables: dict, _global: bool = False) -> int:

View file

@ -1,10 +1,18 @@
from pathlib import Path
from subprocess import run
from sys import path as sys_path
from typing import Any, Optional, Union
from API import API
from ApiCaller import ApiCaller
if "/usr/share/bunkerweb/deps/python" not in sys_path:
sys_path.append("/usr/share/bunkerweb/deps/python")
from dotenv import dotenv_values
from kubernetes import config
class Instance:
_id: str
@ -45,15 +53,51 @@ class Instance:
return self._id
def reload(self) -> bool:
if self._type == "local":
return (
run(
["sudo", "systemctl", "reload", "bunkerweb"],
capture_output=True,
).returncode
== 0
)
return self.apiCaller._send_to_apis("POST", "/reload")
def start(self) -> bool:
if self._type == "local":
return (
run(
["sudo", "systemctl", "start", "bunkerweb"],
capture_output=True,
).returncode
== 0
)
return self.apiCaller._send_to_apis("POST", "/start")
def stop(self) -> bool:
if self._type == "local":
return (
run(
["sudo", "systemctl", "stop", "bunkerweb"],
capture_output=True,
).returncode
== 0
)
return self.apiCaller._send_to_apis("POST", "/stop")
def restart(self) -> bool:
if self._type == "local":
return (
run(
["sudo", "systemctl", "restart", "bunkerweb"],
capture_output=True,
).returncode
== 0
)
return self.apiCaller._send_to_apis("POST", "/restart")
@ -114,6 +158,30 @@ class Instances:
if desired_tasks > 0 and (desired_tasks == running_tasks):
status = "up"
apis = []
for instance in self.__docker_client.services.list(
filters={"label": "bunkerweb.INSTANCE"}
):
api_http_port = None
api_server_name = None
for var in instance.attrs["Spec"]["TaskTemplate"]["ContainerSpec"][
"Env"
]:
if var.startswith("API_HTTP_PORT="):
api_http_port = var.replace("API_HTTP_PORT=", "", 1)
elif var.startswith("API_SERVER_NAME="):
api_server_name = var.replace("API_SERVER_NAME=", "", 1)
for task in instance.tasks():
apis.append(
API(
f"http://{instance.name}.{task['NodeID']}.{task['ID']}:{api_http_port or '5000'}",
host=api_server_name or "bwapi",
)
)
apiCaller = ApiCaller(apis=apis)
instances.append(
Instance(
instance.id,
@ -122,7 +190,7 @@ class Instances:
"service",
status,
instance,
None,
apiCaller,
)
)
elif self.__integration == "Kubernetes":
@ -137,15 +205,30 @@ class Instances:
e.name: e.value for e in pod.spec.containers[0].env
}
apiCaller = ApiCaller()
apiCaller._set_apis(
[
API(
f"http://{pod.status.pod_ip}:{env_variables.get('API_HTTP_PORT', '5000')}",
env_variables.get("API_SERVER_NAME", "bwapi"),
apis = []
config.load_incluster_config()
corev1 = self.__kubernetes_client.CoreV1Api()
for pod in corev1.list_pod_for_all_namespaces(watch=False).items:
if (
pod.metadata.annotations != None
and "bunkerweb.io/INSTANCE" in pod.metadata.annotations
):
api_http_port = None
api_server_name = None
for pod_env in pod.spec.containers[0].env:
if pod_env.name == "API_HTTP_PORT":
api_http_port = pod_env.value or "5000"
elif pod_env.name == "API_SERVER_NAME":
api_server_name = pod_env.value or "bwapi"
apis.append(
API(
f"http://{pod.status.pod_ip}:{api_http_port or '5000'}",
host=api_server_name or "bwapi",
)
)
]
)
apiCaller = ApiCaller(apis=apis)
status = "up"
if pod.status.conditions is not None:
@ -173,6 +256,17 @@ class Instances:
# Local instance
if Path("/usr/sbin/nginx").exists():
apiCaller = ApiCaller()
env_variables = dotenv_values("/etc/bunkerweb/variables.env")
apiCaller._set_apis(
[
API(
f"http://127.0.0.1:{env_variables.get('API_HTTP_PORT', '5000')}",
env_variables.get("API_SERVER_NAME", "bwapi"),
)
]
)
instances.insert(
0,
Instance(
@ -181,6 +275,8 @@ class Instances:
"127.0.0.1",
"local",
"up" if Path("/var/tmp/bunkerweb/nginx.pid").exists() else "down",
None,
apiCaller,
),
)
@ -204,17 +300,7 @@ class Instances:
if instance is None:
instance = self.__instance_from_id(id)
result = True
if instance._type == "local":
result = (
run(
["sudo", "systemctl", "restart", "bunkerweb"], capture_output=True
).returncode
!= 0
)
elif instance._type == "container":
# result = instance.run_jobs()
result = result & instance.reload()
result = instance.reload()
if result:
return f"Instance {instance.name} has been reloaded."
@ -223,16 +309,8 @@ class Instances:
def start_instance(self, id) -> str:
instance = self.__instance_from_id(id)
result = True
if instance._type == "local":
proc = run(
["sudo", "/usr/share/bunkerweb/ui/linux.sh", "start"],
capture_output=True,
)
result = proc.returncode == 0
elif instance._type == "container":
result = instance.start()
result = instance.start()
if result:
return f"Instance {instance.name} has been started."
@ -241,16 +319,8 @@ class Instances:
def stop_instance(self, id) -> str:
instance = self.__instance_from_id(id)
result = True
if instance._type == "local":
proc = run(
["sudo", "/usr/share/bunkerweb/ui/linux.sh", "stop"],
capture_output=True,
)
result = proc.returncode == 0
elif instance._type == "container":
result = instance.stop()
result = instance.stop()
if result:
return f"Instance {instance.name} has been stopped."
@ -259,16 +329,8 @@ class Instances:
def restart_instance(self, id) -> str:
instance = self.__instance_from_id(id)
result = True
if instance._type == "local":
proc = run(
["sudo", "/usr/share/bunkerweb/ui/linux.sh", "restart"],
capture_output=True,
)
result = proc.returncode == 0
elif instance._type == "container":
result = instance.restart()
result = instance.restart()
if result:
return f"Instance {instance.name} has been restarted."