2022-10-19 17:37:13 +02:00
|
|
|
#!/usr/bin/python3
|
|
|
|
|
|
|
|
from argparse import ArgumentParser
|
2022-10-26 18:25:53 +02:00
|
|
|
from copy import deepcopy
|
2022-10-28 12:01:05 +02:00
|
|
|
from glob import glob
|
2022-11-04 18:14:44 +01:00
|
|
|
from os import (
|
|
|
|
_exit,
|
|
|
|
chmod,
|
2023-04-18 15:30:09 +02:00
|
|
|
environ,
|
2022-11-04 18:14:44 +01:00
|
|
|
getenv,
|
|
|
|
getpid,
|
|
|
|
listdir,
|
2023-03-21 17:56:31 +01:00
|
|
|
stat,
|
2022-11-04 18:14:44 +01:00
|
|
|
walk,
|
|
|
|
)
|
2023-02-22 19:04:53 +01:00
|
|
|
from os.path import dirname, join
|
2022-12-14 17:09:57 +01:00
|
|
|
from pathlib import Path
|
2023-03-21 17:56:31 +01:00
|
|
|
from shutil import copy, rmtree
|
2022-11-30 09:10:03 +01:00
|
|
|
from signal import SIGINT, SIGTERM, signal, SIGHUP
|
2023-03-21 17:56:31 +01:00
|
|
|
from stat import S_IEXEC
|
2022-11-20 17:34:46 +01:00
|
|
|
from subprocess import run as subprocess_run, DEVNULL, STDOUT
|
2022-10-19 17:37:13 +02:00
|
|
|
from sys import path as sys_path
|
2023-03-10 13:16:00 +01:00
|
|
|
from tarfile import open as tar_open
|
2022-10-19 17:37:13 +02:00
|
|
|
from time import sleep
|
|
|
|
from traceback import format_exc
|
2022-11-17 16:18:24 +01:00
|
|
|
from typing import Any, Dict, List
|
2022-10-19 17:37:13 +02:00
|
|
|
|
2023-03-13 14:30:25 +01:00
|
|
|
if "/usr/share/bunkerweb/deps/python" not in sys_path:
|
|
|
|
sys_path.append("/usr/share/bunkerweb/deps/python")
|
|
|
|
if "/usr/share/bunkerweb/utils" not in sys_path:
|
|
|
|
sys_path.append("/usr/share/bunkerweb/utils")
|
|
|
|
if "/usr/share/bunkerweb/api" not in sys_path:
|
|
|
|
sys_path.append("/usr/share/bunkerweb/api")
|
|
|
|
if "/usr/share/bunkerweb/db" not in sys_path:
|
|
|
|
sys_path.append("/usr/share/bunkerweb/db")
|
2022-10-19 17:37:13 +02:00
|
|
|
|
|
|
|
from dotenv import dotenv_values
|
|
|
|
|
|
|
|
from logger import setup_logger
|
|
|
|
from Database import Database
|
|
|
|
from JobScheduler import JobScheduler
|
2022-10-28 12:01:05 +02:00
|
|
|
from ApiCaller import ApiCaller
|
2022-10-19 17:37:13 +02:00
|
|
|
|
|
|
|
run = True
|
|
|
|
scheduler = None
|
|
|
|
reloading = False
|
2022-10-25 11:39:30 +02:00
|
|
|
logger = setup_logger("Scheduler", getenv("LOG_LEVEL", "INFO"))
|
2022-10-19 17:37:13 +02:00
|
|
|
|
|
|
|
|
2022-11-20 17:37:32 +01:00
|
|
|
def handle_stop(signum, frame):
|
2022-10-19 17:37:13 +02:00
|
|
|
global run, scheduler
|
|
|
|
run = False
|
|
|
|
if scheduler is not None:
|
|
|
|
scheduler.clear()
|
|
|
|
stop(0)
|
|
|
|
|
|
|
|
|
|
|
|
signal(SIGINT, handle_stop)
|
|
|
|
signal(SIGTERM, handle_stop)
|
|
|
|
|
2022-11-11 17:10:16 +01:00
|
|
|
|
2022-12-02 19:53:41 +01:00
|
|
|
# Function to catch SIGHUP and reload the scheduler
|
|
|
|
def handle_reload(signum, frame):
|
|
|
|
global reloading, run, scheduler
|
|
|
|
reloading = True
|
2022-10-19 17:37:13 +02:00
|
|
|
try:
|
|
|
|
if scheduler is not None and run:
|
2022-12-02 19:53:41 +01:00
|
|
|
# Get the env by reading the .env file
|
2023-01-12 13:48:43 +01:00
|
|
|
env = dotenv_values("/etc/bunkerweb/variables.env")
|
2022-12-02 19:53:41 +01:00
|
|
|
if scheduler.reload(env):
|
2022-10-19 17:37:13 +02:00
|
|
|
logger.info("Reload successful")
|
|
|
|
else:
|
|
|
|
logger.error("Reload failed")
|
|
|
|
else:
|
|
|
|
logger.warning(
|
|
|
|
"Ignored reload operation because scheduler is not running ...",
|
|
|
|
)
|
|
|
|
except:
|
|
|
|
logger.error(
|
|
|
|
f"Exception while reloading scheduler : {format_exc()}",
|
|
|
|
)
|
2022-12-02 10:59:18 +01:00
|
|
|
|
|
|
|
|
2022-11-30 09:10:03 +01:00
|
|
|
signal(SIGHUP, handle_reload)
|
2022-11-11 17:10:16 +01:00
|
|
|
|
2022-12-02 10:59:18 +01:00
|
|
|
|
2022-10-19 17:37:13 +02:00
|
|
|
def stop(status):
|
2023-02-22 19:04:53 +01:00
|
|
|
Path("/var/tmp/bunkerweb/scheduler.pid").unlink(missing_ok=True)
|
2023-03-24 11:28:08 +01:00
|
|
|
Path("/var/tmp/bunkerweb/scheduler.healthy").unlink(missing_ok=True)
|
2022-10-19 17:37:13 +02:00
|
|
|
_exit(status)
|
|
|
|
|
|
|
|
|
2022-11-17 16:18:24 +01:00
|
|
|
def generate_custom_configs(
|
|
|
|
custom_configs: List[Dict[str, Any]],
|
|
|
|
integration: str,
|
|
|
|
api_caller: ApiCaller,
|
|
|
|
*,
|
|
|
|
original_path: str = "/data/configs",
|
|
|
|
):
|
2023-02-22 19:04:53 +01:00
|
|
|
Path(original_path).mkdir(parents=True, exist_ok=True)
|
2022-11-17 16:18:24 +01:00
|
|
|
for custom_config in custom_configs:
|
|
|
|
tmp_path = f"{original_path}/{custom_config['type'].replace('_', '-')}"
|
|
|
|
if custom_config["service_id"]:
|
|
|
|
tmp_path += f"/{custom_config['service_id']}"
|
|
|
|
tmp_path += f"/{custom_config['name']}.conf"
|
2022-12-14 17:09:57 +01:00
|
|
|
Path(dirname(tmp_path)).mkdir(parents=True, exist_ok=True)
|
|
|
|
Path(tmp_path).write_bytes(custom_config["data"])
|
2022-11-17 16:18:24 +01:00
|
|
|
|
2023-04-24 11:14:09 +02:00
|
|
|
if integration in ("Autoconf", "Swarm", "Kubernetes", "Docker"):
|
2022-11-17 16:18:24 +01:00
|
|
|
logger.info("Sending custom configs to BunkerWeb")
|
|
|
|
ret = api_caller._send_files("/data/configs", "/custom_configs")
|
|
|
|
|
|
|
|
if not ret:
|
|
|
|
logger.error(
|
|
|
|
"Sending custom configs failed, configuration will not work as expected...",
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-03-10 13:16:00 +01:00
|
|
|
def generate_external_plugins(
|
|
|
|
plugins: List[Dict[str, Any]],
|
|
|
|
integration: str,
|
|
|
|
api_caller: ApiCaller,
|
|
|
|
*,
|
|
|
|
original_path: str = "/data/plugins",
|
|
|
|
):
|
|
|
|
Path(original_path).mkdir(parents=True, exist_ok=True)
|
|
|
|
for plugin in plugins:
|
|
|
|
tmp_path = f"{original_path}/{plugin['id']}/{plugin['name']}.tar.gz"
|
2023-03-21 17:56:31 +01:00
|
|
|
plugin_dir = dirname(tmp_path)
|
|
|
|
Path(plugin_dir).mkdir(parents=True, exist_ok=True)
|
2023-03-10 13:16:00 +01:00
|
|
|
Path(tmp_path).write_bytes(plugin["data"])
|
|
|
|
with tar_open(tmp_path, "r:gz") as tar:
|
|
|
|
tar.extractall(original_path)
|
|
|
|
Path(tmp_path).unlink()
|
|
|
|
|
2023-03-21 17:56:31 +01:00
|
|
|
for job_file in glob(f"{plugin_dir}/jobs/*"):
|
|
|
|
st = stat(job_file)
|
|
|
|
chmod(job_file, st.st_mode | S_IEXEC)
|
2023-03-10 13:16:00 +01:00
|
|
|
|
2023-04-24 11:14:09 +02:00
|
|
|
if integration in ("Autoconf", "Swarm", "Kubernetes", "Docker"):
|
2023-03-10 13:16:00 +01:00
|
|
|
logger.info("Sending plugins to BunkerWeb")
|
|
|
|
ret = api_caller._send_files("/data/plugins", "/plugins")
|
|
|
|
|
|
|
|
if not ret:
|
|
|
|
logger.error(
|
|
|
|
"Sending plugins failed, configuration will not work as expected...",
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-10-19 17:37:13 +02:00
|
|
|
if __name__ == "__main__":
|
|
|
|
try:
|
|
|
|
# Don't execute if pid file exists
|
2023-02-22 19:04:53 +01:00
|
|
|
if Path("/var/tmp/bunkerweb/scheduler.pid").is_file():
|
2022-10-19 17:37:13 +02:00
|
|
|
logger.error(
|
|
|
|
"Scheduler is already running, skipping execution ...",
|
|
|
|
)
|
|
|
|
_exit(1)
|
|
|
|
|
|
|
|
# Write pid to file
|
2022-12-14 17:09:57 +01:00
|
|
|
Path("/var/tmp/bunkerweb/scheduler.pid").write_text(str(getpid()))
|
2022-10-19 17:37:13 +02:00
|
|
|
|
|
|
|
# Parse arguments
|
|
|
|
parser = ArgumentParser(description="Job scheduler for BunkerWeb")
|
|
|
|
parser.add_argument(
|
|
|
|
"--variables",
|
|
|
|
type=str,
|
|
|
|
help="path to the file containing environment variables",
|
|
|
|
)
|
|
|
|
args = parser.parse_args()
|
2022-11-09 15:53:49 +01:00
|
|
|
generate = False
|
2022-11-08 17:14:33 +01:00
|
|
|
integration = "Linux"
|
|
|
|
api_caller = ApiCaller()
|
2022-10-19 17:37:13 +02:00
|
|
|
|
2022-11-17 17:20:46 +01:00
|
|
|
# Define db here because otherwhise it will be undefined for Linux
|
|
|
|
db = Database(
|
|
|
|
logger,
|
|
|
|
sqlalchemy_string=getenv("DATABASE_URI", None),
|
|
|
|
)
|
|
|
|
# END Define db because otherwhise it will be undefined for Linux
|
|
|
|
|
2022-10-19 17:37:13 +02:00
|
|
|
logger.info("Scheduler started ...")
|
|
|
|
|
2022-11-17 17:20:46 +01:00
|
|
|
# Checking if the argument variables is true.
|
2022-10-19 17:37:13 +02:00
|
|
|
if args.variables:
|
|
|
|
logger.info(f"Variables : {args.variables}")
|
|
|
|
|
|
|
|
# Read env file
|
|
|
|
env = dotenv_values(args.variables)
|
2022-11-17 16:48:40 +01:00
|
|
|
|
|
|
|
db = Database(
|
|
|
|
logger,
|
|
|
|
sqlalchemy_string=env.get("DATABASE_URI", None),
|
|
|
|
)
|
|
|
|
|
|
|
|
while not db.is_initialized():
|
|
|
|
logger.warning(
|
|
|
|
"Database is not initialized, retrying in 5s ...",
|
|
|
|
)
|
|
|
|
sleep(5)
|
2022-10-19 17:37:13 +02:00
|
|
|
else:
|
|
|
|
# Read from database
|
2022-11-04 18:14:44 +01:00
|
|
|
integration = "Docker"
|
2023-02-22 19:04:53 +01:00
|
|
|
if Path("/usr/share/bunkerweb/INTEGRATION").exists():
|
2022-11-11 14:55:04 +01:00
|
|
|
with open("/usr/share/bunkerweb/INTEGRATION", "r") as f:
|
2022-11-04 18:14:44 +01:00
|
|
|
integration = f.read().strip()
|
|
|
|
|
2022-11-08 17:14:33 +01:00
|
|
|
api_caller.auto_setup(bw_integration=integration)
|
2022-10-25 11:39:30 +02:00
|
|
|
db = Database(
|
|
|
|
logger,
|
|
|
|
sqlalchemy_string=getenv("DATABASE_URI", None),
|
|
|
|
)
|
2022-10-19 17:37:13 +02:00
|
|
|
|
2022-11-08 17:14:33 +01:00
|
|
|
if integration in (
|
2022-11-04 18:14:44 +01:00
|
|
|
"Swarm",
|
|
|
|
"Kubernetes",
|
|
|
|
"Autoconf",
|
|
|
|
):
|
|
|
|
while not db.is_autoconf_loaded():
|
|
|
|
logger.warning(
|
|
|
|
"Autoconf is not loaded yet in the database, retrying in 5s ...",
|
|
|
|
)
|
|
|
|
sleep(5)
|
2022-11-09 15:53:49 +01:00
|
|
|
elif integration == "Docker" and (
|
2023-02-22 19:04:53 +01:00
|
|
|
not Path("/var/tmp/bunkerweb/variables.env").exists()
|
2022-11-11 14:55:04 +01:00
|
|
|
or db.get_config() != dotenv_values("/var/tmp/bunkerweb/variables.env")
|
2022-11-09 15:53:49 +01:00
|
|
|
):
|
|
|
|
# run the config saver
|
2022-11-19 20:46:13 +01:00
|
|
|
proc = subprocess_run(
|
|
|
|
[
|
|
|
|
"python",
|
|
|
|
"/usr/share/bunkerweb/gen/save_config.py",
|
|
|
|
"--settings",
|
|
|
|
"/usr/share/bunkerweb/settings.json",
|
|
|
|
],
|
|
|
|
stdin=DEVNULL,
|
|
|
|
stderr=STDOUT,
|
|
|
|
)
|
2022-11-09 15:53:49 +01:00
|
|
|
if proc.returncode != 0:
|
|
|
|
logger.error(
|
|
|
|
"Config saver failed, configuration will not work as expected...",
|
|
|
|
)
|
|
|
|
|
|
|
|
while not db.is_initialized():
|
|
|
|
logger.warning(
|
|
|
|
"Database is not initialized, retrying in 5s ...",
|
|
|
|
)
|
|
|
|
sleep(5)
|
2022-10-19 17:37:13 +02:00
|
|
|
|
|
|
|
env = db.get_config()
|
2022-10-20 15:54:43 +02:00
|
|
|
while not db.is_first_config_saved() or not env:
|
2022-10-26 18:25:53 +02:00
|
|
|
logger.warning(
|
2022-10-20 15:54:43 +02:00
|
|
|
"Database doesn't have any config saved yet, retrying in 5s ...",
|
2022-10-19 17:37:13 +02:00
|
|
|
)
|
2022-11-04 18:14:44 +01:00
|
|
|
sleep(5)
|
2022-10-20 15:54:43 +02:00
|
|
|
env = db.get_config()
|
2022-10-19 17:37:13 +02:00
|
|
|
|
2022-11-16 17:39:52 +01:00
|
|
|
env["DATABASE_URI"] = db.get_database_uri()
|
|
|
|
|
2022-11-17 16:48:40 +01:00
|
|
|
# Checking if any custom config has been created by the user
|
|
|
|
custom_confs = []
|
|
|
|
root_dirs = listdir("/etc/bunkerweb/configs")
|
2023-03-09 10:04:59 +01:00
|
|
|
for root, dirs, files in walk("/etc/bunkerweb/configs", topdown=True):
|
2022-11-17 16:48:40 +01:00
|
|
|
if (
|
|
|
|
root != "configs"
|
|
|
|
and (dirs and not root.split("/")[-1] in root_dirs)
|
|
|
|
or files
|
|
|
|
):
|
|
|
|
path_exploded = root.split("/")
|
|
|
|
for file in files:
|
|
|
|
with open(join(root, file), "r") as f:
|
|
|
|
custom_confs.append(
|
|
|
|
{
|
|
|
|
"value": f.read(),
|
|
|
|
"exploded": (
|
|
|
|
f"{path_exploded.pop()}"
|
|
|
|
if path_exploded[-1] not in root_dirs
|
|
|
|
else "",
|
|
|
|
path_exploded[-1],
|
|
|
|
file.replace(".conf", ""),
|
|
|
|
),
|
|
|
|
}
|
|
|
|
)
|
2022-10-28 12:01:05 +02:00
|
|
|
|
2022-11-17 16:48:40 +01:00
|
|
|
old_configs = None
|
|
|
|
if custom_confs:
|
|
|
|
old_configs = db.get_custom_configs()
|
2022-11-04 18:14:44 +01:00
|
|
|
|
2022-11-19 20:46:13 +01:00
|
|
|
err = db.save_custom_configs(custom_confs, "manual")
|
|
|
|
if err:
|
2022-11-17 16:48:40 +01:00
|
|
|
logger.error(
|
2022-11-19 20:46:13 +01:00
|
|
|
f"Couldn't save some manually created custom configs to database: {err}",
|
2022-11-17 16:48:40 +01:00
|
|
|
)
|
2022-11-04 18:14:44 +01:00
|
|
|
|
2022-11-17 16:48:40 +01:00
|
|
|
custom_configs = db.get_custom_configs()
|
2022-10-28 12:01:05 +02:00
|
|
|
|
2022-11-17 16:48:40 +01:00
|
|
|
if old_configs != custom_configs:
|
|
|
|
generate_custom_configs(custom_configs, integration, api_caller)
|
2022-10-28 12:01:05 +02:00
|
|
|
|
2023-03-10 13:16:00 +01:00
|
|
|
external_plugins = db.get_plugins(external=True)
|
|
|
|
if external_plugins:
|
|
|
|
generate_external_plugins(
|
|
|
|
db.get_plugins(external=True, with_data=True),
|
|
|
|
integration,
|
|
|
|
api_caller,
|
|
|
|
)
|
|
|
|
|
2022-10-28 12:01:05 +02:00
|
|
|
logger.info("Executing scheduler ...")
|
2022-11-18 15:03:04 +01:00
|
|
|
|
2023-02-22 19:04:53 +01:00
|
|
|
generate = not Path(
|
2022-11-11 14:55:04 +01:00
|
|
|
"/var/tmp/bunkerweb/variables.env"
|
2023-02-22 19:04:53 +01:00
|
|
|
).exists() or env != dotenv_values("/var/tmp/bunkerweb/variables.env")
|
2022-11-09 15:53:49 +01:00
|
|
|
|
2023-02-22 19:04:53 +01:00
|
|
|
if not generate:
|
2022-11-09 15:53:49 +01:00
|
|
|
logger.warning(
|
|
|
|
"Looks like BunkerWeb configuration is already generated, will not generate it again ..."
|
|
|
|
)
|
|
|
|
|
2023-02-22 19:04:53 +01:00
|
|
|
if Path("/var/lib/bunkerweb/db.sqlite3").exists():
|
2022-11-20 17:34:46 +01:00
|
|
|
chmod("/var/lib/bunkerweb/db.sqlite3", 0o760)
|
|
|
|
|
2022-10-19 17:37:13 +02:00
|
|
|
while True:
|
|
|
|
# Instantiate scheduler
|
|
|
|
scheduler = JobScheduler(
|
2023-04-18 15:30:09 +02:00
|
|
|
env=deepcopy(env) | environ,
|
2022-10-28 12:01:05 +02:00
|
|
|
apis=api_caller._get_apis(),
|
2022-10-19 17:37:13 +02:00
|
|
|
logger=logger,
|
2022-11-08 17:14:33 +01:00
|
|
|
integration=integration,
|
2022-10-19 17:37:13 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Only run jobs once
|
2022-10-28 12:01:05 +02:00
|
|
|
if not scheduler.run_once():
|
|
|
|
logger.error("At least one job in run_once() failed")
|
|
|
|
else:
|
|
|
|
logger.info("All jobs in run_once() were successful")
|
|
|
|
|
2023-02-22 19:04:53 +01:00
|
|
|
if generate:
|
2022-11-04 18:14:44 +01:00
|
|
|
# run the generator
|
2022-11-19 20:46:13 +01:00
|
|
|
proc = subprocess_run(
|
|
|
|
[
|
|
|
|
"python3",
|
|
|
|
"/usr/share/bunkerweb/gen/main.py",
|
|
|
|
"--settings",
|
|
|
|
"/usr/share/bunkerweb/settings.json",
|
|
|
|
"--templates",
|
|
|
|
"/usr/share/bunkerweb/confs",
|
|
|
|
"--output",
|
|
|
|
"/etc/nginx",
|
|
|
|
]
|
|
|
|
+ (["--variables", args.variables] if args.variables else []),
|
|
|
|
stdin=DEVNULL,
|
|
|
|
stderr=STDOUT,
|
|
|
|
)
|
2022-11-14 11:00:00 +01:00
|
|
|
|
2022-11-04 18:14:44 +01:00
|
|
|
if proc.returncode != 0:
|
2022-10-28 12:01:05 +02:00
|
|
|
logger.error(
|
2022-11-04 18:14:44 +01:00
|
|
|
"Config generator failed, configuration will not work as expected...",
|
2022-10-28 12:01:05 +02:00
|
|
|
)
|
2022-11-14 11:00:00 +01:00
|
|
|
else:
|
|
|
|
copy("/etc/nginx/variables.env", "/var/tmp/bunkerweb/variables.env")
|
|
|
|
|
|
|
|
if len(api_caller._get_apis()) > 0:
|
|
|
|
# send nginx configs
|
|
|
|
logger.info("Sending /etc/nginx folder ...")
|
|
|
|
ret = api_caller._send_files("/etc/nginx", "/confs")
|
|
|
|
if not ret:
|
|
|
|
logger.error(
|
|
|
|
"Sending nginx configs failed, configuration will not work as expected...",
|
|
|
|
)
|
2022-11-04 18:14:44 +01:00
|
|
|
|
2022-10-28 12:01:05 +02:00
|
|
|
try:
|
|
|
|
if len(api_caller._get_apis()) > 0:
|
|
|
|
# send cache
|
|
|
|
logger.info("Sending /data/cache folder ...")
|
|
|
|
if not api_caller._send_files("/data/cache", "/cache"):
|
|
|
|
logger.error("Error while sending /data/cache folder")
|
|
|
|
else:
|
2023-04-14 17:37:59 +02:00
|
|
|
logger.info("Successfully sent /data/cache folder")
|
2022-10-28 12:01:05 +02:00
|
|
|
|
2023-03-27 15:28:06 +02:00
|
|
|
# restart nginx
|
2023-04-18 17:23:41 +02:00
|
|
|
if integration not in ("Autoconf", "Swarm", "Kubernetes", "Docker"):
|
2023-03-27 15:28:06 +02:00
|
|
|
# Stop temp nginx
|
2023-04-04 11:45:34 +02:00
|
|
|
logger.info("Stopping temp nginx ...")
|
2022-11-18 15:03:04 +01:00
|
|
|
proc = subprocess_run(
|
2023-03-27 15:28:06 +02:00
|
|
|
["/usr/sbin/nginx", "-s", "stop"],
|
2022-11-18 15:03:04 +01:00
|
|
|
stdin=DEVNULL,
|
|
|
|
stderr=STDOUT,
|
|
|
|
env=deepcopy(env),
|
|
|
|
)
|
2022-10-28 12:01:05 +02:00
|
|
|
if proc.returncode == 0:
|
2023-04-14 17:37:59 +02:00
|
|
|
logger.info("Successfully sent stop signal to temp nginx")
|
2023-03-27 15:28:06 +02:00
|
|
|
i = 0
|
2023-04-14 17:37:59 +02:00
|
|
|
while i < 20:
|
|
|
|
if not Path("/var/tmp/bunkerweb/nginx.pid").is_file():
|
2023-03-27 15:28:06 +02:00
|
|
|
break
|
|
|
|
logger.warning("Waiting for temp nginx to stop ...")
|
|
|
|
sleep(1)
|
|
|
|
i += 1
|
2023-04-14 17:37:59 +02:00
|
|
|
if i >= 20:
|
|
|
|
logger.error(
|
|
|
|
"Timeout error while waiting for temp nginx to stop"
|
|
|
|
)
|
|
|
|
else:
|
2023-03-27 15:28:06 +02:00
|
|
|
# Start nginx
|
|
|
|
logger.info("Starting nginx ...")
|
|
|
|
proc = subprocess_run(
|
|
|
|
["/usr/sbin/nginx"],
|
|
|
|
stdin=DEVNULL,
|
|
|
|
stderr=STDOUT,
|
|
|
|
env=deepcopy(env),
|
|
|
|
)
|
|
|
|
if proc.returncode == 0:
|
2023-04-14 17:37:59 +02:00
|
|
|
logger.info("Successfully started nginx")
|
|
|
|
else:
|
2023-03-27 15:28:06 +02:00
|
|
|
logger.error(
|
|
|
|
f"Error while starting nginx - returncode: {proc.returncode} - error: {proc.stderr.decode('utf-8')}",
|
|
|
|
)
|
2022-10-28 12:01:05 +02:00
|
|
|
else:
|
|
|
|
logger.error(
|
2023-03-27 15:28:06 +02:00
|
|
|
f"Error while sending stop signal to temp nginx - returncode: {proc.returncode} - error: {proc.stderr.decode('utf-8')}",
|
2022-10-28 12:01:05 +02:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
if api_caller._send_to_apis("POST", "/reload"):
|
2023-04-14 17:37:59 +02:00
|
|
|
logger.info("Successfully reloaded nginx")
|
2022-10-28 12:01:05 +02:00
|
|
|
else:
|
|
|
|
logger.error("Error while reloading nginx")
|
|
|
|
except:
|
|
|
|
logger.error(
|
|
|
|
f"Exception while reloading after running jobs once scheduling : {format_exc()}",
|
|
|
|
)
|
2022-10-19 17:37:13 +02:00
|
|
|
|
2022-11-04 18:14:44 +01:00
|
|
|
generate = True
|
2022-10-19 17:37:13 +02:00
|
|
|
scheduler.setup()
|
2023-03-10 13:16:00 +01:00
|
|
|
need_reload = False
|
|
|
|
|
|
|
|
# infinite schedule for the jobs
|
2022-10-28 12:01:05 +02:00
|
|
|
logger.info("Executing job scheduler ...")
|
2023-03-24 11:28:08 +01:00
|
|
|
Path("/var/tmp/bunkerweb/scheduler.healthy").write_text("ok")
|
2023-03-10 13:16:00 +01:00
|
|
|
while run and not need_reload:
|
2022-10-19 17:37:13 +02:00
|
|
|
scheduler.run_pending()
|
|
|
|
sleep(1)
|
|
|
|
|
2022-12-02 10:59:18 +01:00
|
|
|
if not args.variables:
|
|
|
|
# check if the custom configs have changed since last time
|
|
|
|
tmp_custom_configs = db.get_custom_configs()
|
|
|
|
if custom_configs != tmp_custom_configs:
|
|
|
|
logger.info("Custom configs changed, generating ...")
|
2023-03-10 13:16:00 +01:00
|
|
|
logger.debug(f"{tmp_custom_configs=}")
|
|
|
|
logger.debug(f"{custom_configs=}")
|
|
|
|
custom_configs = deepcopy(tmp_custom_configs)
|
2022-12-02 10:59:18 +01:00
|
|
|
|
|
|
|
# Remove old custom configs files
|
|
|
|
logger.info("Removing old custom configs files ...")
|
2023-03-10 13:16:00 +01:00
|
|
|
for file in glob("/data/configs/*"):
|
2023-02-22 19:04:53 +01:00
|
|
|
if Path(file).is_symlink() or Path(file).is_file():
|
|
|
|
Path(file).unlink()
|
|
|
|
elif Path(file).is_dir():
|
2022-12-02 10:59:18 +01:00
|
|
|
rmtree(file, ignore_errors=False)
|
|
|
|
|
|
|
|
logger.info("Generating new custom configs ...")
|
|
|
|
generate_custom_configs(custom_configs, integration, api_caller)
|
|
|
|
|
2022-12-28 13:58:23 +01:00
|
|
|
# reload nginx
|
|
|
|
logger.info("Reloading nginx ...")
|
2023-04-24 11:14:09 +02:00
|
|
|
if integration not in (
|
|
|
|
"Autoconf",
|
|
|
|
"Swarm",
|
|
|
|
"Kubernetes",
|
|
|
|
"Docker",
|
|
|
|
):
|
2022-12-28 13:58:23 +01:00
|
|
|
# Reloading the nginx server.
|
|
|
|
proc = subprocess_run(
|
|
|
|
# Reload nginx
|
2023-03-24 17:50:20 +01:00
|
|
|
["/usr/sbin/nginx", "-s", "reload"],
|
2022-12-28 13:58:23 +01:00
|
|
|
stdin=DEVNULL,
|
|
|
|
stderr=STDOUT,
|
|
|
|
env=deepcopy(env),
|
|
|
|
)
|
|
|
|
if proc.returncode == 0:
|
2023-04-14 17:37:59 +02:00
|
|
|
logger.info("Successfully reloaded nginx")
|
2022-12-28 13:58:23 +01:00
|
|
|
else:
|
|
|
|
logger.error(
|
|
|
|
f"Error while reloading nginx - returncode: {proc.returncode} - error: {proc.stderr.decode('utf-8')}",
|
|
|
|
)
|
|
|
|
else:
|
2023-04-04 22:50:55 +02:00
|
|
|
need_reload = True
|
2022-12-28 13:58:23 +01:00
|
|
|
|
2023-03-10 13:16:00 +01:00
|
|
|
# check if the plugins have changed since last time
|
|
|
|
tmp_external_plugins = db.get_plugins(external=True)
|
|
|
|
if external_plugins != tmp_external_plugins:
|
|
|
|
logger.info("External plugins changed, generating ...")
|
|
|
|
logger.debug(f"{tmp_external_plugins=}")
|
|
|
|
logger.debug(f"{external_plugins=}")
|
|
|
|
external_plugins = deepcopy(tmp_external_plugins)
|
|
|
|
|
|
|
|
# Remove old external plugins files
|
|
|
|
logger.info("Removing old external plugins files ...")
|
|
|
|
for file in glob("/data/plugins/*"):
|
|
|
|
if Path(file).is_symlink() or Path(file).is_file():
|
|
|
|
Path(file).unlink()
|
|
|
|
elif Path(file).is_dir():
|
|
|
|
rmtree(file, ignore_errors=False)
|
|
|
|
|
|
|
|
logger.info("Generating new external plugins ...")
|
|
|
|
generate_external_plugins(
|
|
|
|
db.get_plugins(external=True, with_data=True),
|
|
|
|
integration,
|
|
|
|
api_caller,
|
|
|
|
)
|
|
|
|
need_reload = True
|
|
|
|
|
2022-12-02 10:59:18 +01:00
|
|
|
# check if the config have changed since last time
|
|
|
|
tmp_env = db.get_config()
|
|
|
|
if env != tmp_env:
|
|
|
|
logger.info("Config changed, generating ...")
|
|
|
|
logger.debug(f"{tmp_env=}")
|
|
|
|
logger.debug(f"{env=}")
|
|
|
|
env = deepcopy(tmp_env)
|
2023-03-10 13:16:00 +01:00
|
|
|
need_reload = True
|
2022-10-19 17:37:13 +02:00
|
|
|
except:
|
|
|
|
logger.error(
|
|
|
|
f"Exception while executing scheduler : {format_exc()}",
|
|
|
|
)
|
|
|
|
stop(1)
|