2022-10-19 17:37:13 +02:00
#!/usr/bin/python3
from argparse import ArgumentParser
2023-08-15 00:38:00 +02:00
from copy import deepcopy
2023-05-25 19:00:17 +02:00
from glob import glob
2023-05-11 01:13:23 +02:00
from hashlib import sha256
from io import BytesIO
2023-10-03 12:01:24 +02:00
from json import load as json_load
2022-11-04 18:14:44 +01:00
from os import (
_exit ,
chmod ,
2023-04-18 15:30:09 +02:00
environ ,
2022-11-04 18:14:44 +01:00
getenv ,
getpid ,
listdir ,
2023-05-11 01:13:23 +02:00
sep ,
2022-11-04 18:14:44 +01:00
walk ,
)
2023-05-25 22:52:02 +02:00
from os . path import basename , dirname , join , normpath
2022-12-14 17:09:57 +01:00
from pathlib import Path
2023-03-21 17:56:31 +01:00
from shutil import copy , rmtree
2022-11-30 09:10:03 +01:00
from signal import SIGINT , SIGTERM , signal , SIGHUP
2023-03-21 17:56:31 +01:00
from stat import S_IEXEC
2022-11-20 17:34:46 +01:00
from subprocess import run as subprocess_run , DEVNULL , STDOUT
2022-10-19 17:37:13 +02:00
from sys import path as sys_path
2023-03-10 13:16:00 +01:00
from tarfile import open as tar_open
2023-06-21 21:33:23 +02:00
from threading import Thread
2022-10-19 17:37:13 +02:00
from time import sleep
from traceback import format_exc
2023-05-30 17:49:14 +02:00
from typing import Any , Dict , List , Optional , Union
2022-10-19 17:37:13 +02:00
2023-10-03 12:01:24 +02:00
for deps_path in [ join ( sep , " usr " , " share " , " bunkerweb " , * paths ) for paths in ( ( " deps " , " python " ) , ( " utils " , ) , ( " api " , ) , ( " db " , ) ) ] :
2023-05-25 22:52:02 +02:00
if deps_path not in sys_path :
sys_path . append ( deps_path )
2022-10-19 17:37:13 +02:00
from dotenv import dotenv_values
2023-05-25 22:52:02 +02:00
from logger import setup_logger # type: ignore
from Database import Database # type: ignore
2022-10-19 17:37:13 +02:00
from JobScheduler import JobScheduler
2023-05-30 17:49:14 +02:00
RUN = True
SCHEDULER : Optional [ JobScheduler ] = None
INTEGRATION = " Linux "
CACHE_PATH = join ( sep , " var " , " cache " , " bunkerweb " )
2023-08-08 18:11:34 +02:00
SCHEDULER_TMP_ENV_PATH = Path ( sep , " var " , " tmp " , " bunkerweb " , " scheduler.env " )
SCHEDULER_TMP_ENV_PATH . parent . mkdir ( parents = True , exist_ok = True )
SCHEDULER_TMP_ENV_PATH . touch ( )
2022-10-25 11:39:30 +02:00
logger = setup_logger ( " Scheduler " , getenv ( " LOG_LEVEL " , " INFO " ) )
2022-10-19 17:37:13 +02:00
2022-11-20 17:37:32 +01:00
def handle_stop ( signum , frame ) :
2023-05-30 17:49:14 +02:00
if SCHEDULER is not None :
SCHEDULER . clear ( )
2022-10-19 17:37:13 +02:00
stop ( 0 )
signal ( SIGINT , handle_stop )
signal ( SIGTERM , handle_stop )
2022-11-11 17:10:16 +01:00
2022-12-02 19:53:41 +01:00
# Function to catch SIGHUP and reload the scheduler
def handle_reload ( signum , frame ) :
2022-10-19 17:37:13 +02:00
try :
2023-05-30 17:49:14 +02:00
if SCHEDULER is not None and RUN :
2022-12-02 19:53:41 +01:00
# Get the env by reading the .env file
2023-05-30 17:49:14 +02:00
tmp_env = dotenv_values ( join ( sep , " etc " , " bunkerweb " , " variables.env " ) )
if SCHEDULER . reload ( tmp_env ) :
2022-10-19 17:37:13 +02:00
logger . info ( " Reload successful " )
else :
logger . error ( " Reload failed " )
else :
logger . warning (
" Ignored reload operation because scheduler is not running ... " ,
)
except :
logger . error (
f " Exception while reloading scheduler : { format_exc ( ) } " ,
)
2022-12-02 10:59:18 +01:00
2022-11-30 09:10:03 +01:00
signal ( SIGHUP , handle_reload )
2022-11-11 17:10:16 +01:00
2022-12-02 10:59:18 +01:00
2022-10-19 17:37:13 +02:00
def stop ( status ) :
2023-06-01 00:13:08 +02:00
Path ( sep , " var " , " run " , " bunkerweb " , " scheduler.pid " ) . unlink ( missing_ok = True )
2023-05-25 22:52:02 +02:00
Path ( sep , " var " , " tmp " , " bunkerweb " , " scheduler.healthy " ) . unlink ( missing_ok = True )
2022-10-19 17:37:13 +02:00
_exit ( status )
2022-11-17 16:18:24 +01:00
def generate_custom_configs (
2023-05-30 17:49:14 +02:00
configs : List [ Dict [ str , Any ] ] ,
2022-11-17 16:18:24 +01:00
* ,
2023-05-30 17:49:14 +02:00
original_path : Union [ Path , str ] = join ( sep , " etc " , " bunkerweb " , " configs " ) ,
2022-11-17 16:18:24 +01:00
) :
2023-05-30 17:49:14 +02:00
if not isinstance ( original_path , Path ) :
original_path = Path ( original_path )
# Remove old custom configs files
logger . info ( " Removing old custom configs files ... " )
for file in glob ( str ( original_path . joinpath ( " * " , " * " ) ) ) :
file = Path ( file )
if file . is_symlink ( ) or file . is_file ( ) :
file . unlink ( )
elif file . is_dir ( ) :
rmtree ( str ( file ) , ignore_errors = True )
if configs :
logger . info ( " Generating new custom configs ... " )
original_path . mkdir ( parents = True , exist_ok = True )
for custom_config in configs :
tmp_path = original_path . joinpath (
custom_config [ " type " ] . replace ( " _ " , " - " ) ,
custom_config [ " service_id " ] or " " ,
f " { custom_config [ ' name ' ] } .conf " ,
2022-11-17 16:18:24 +01:00
)
2023-05-30 17:49:14 +02:00
tmp_path . parent . mkdir ( parents = True , exist_ok = True )
tmp_path . write_bytes ( custom_config [ " data " ] )
2023-06-17 15:15:24 +02:00
if SCHEDULER and SCHEDULER . apis :
logger . info ( " Sending custom configs to BunkerWeb " )
ret = SCHEDULER . send_files ( original_path , " /custom_configs " )
2023-05-30 17:49:14 +02:00
2023-06-17 15:15:24 +02:00
if not ret :
logger . error (
" Sending custom configs failed, configuration will not work as expected... " ,
)
2022-11-17 16:18:24 +01:00
2023-03-10 13:16:00 +01:00
def generate_external_plugins (
plugins : List [ Dict [ str , Any ] ] ,
* ,
2023-05-30 17:49:14 +02:00
original_path : Union [ Path , str ] = join ( sep , " etc " , " bunkerweb " , " plugins " ) ,
2023-03-10 13:16:00 +01:00
) :
2023-05-30 17:49:14 +02:00
if not isinstance ( original_path , Path ) :
original_path = Path ( original_path )
# Remove old external plugins files
logger . info ( " Removing old external plugins files ... " )
for file in glob ( str ( original_path . joinpath ( " * " ) ) ) :
file = Path ( file )
if file . is_symlink ( ) or file . is_file ( ) :
file . unlink ( )
elif file . is_dir ( ) :
rmtree ( str ( file ) , ignore_errors = True )
if plugins :
logger . info ( " Generating new external plugins ... " )
original_path . mkdir ( parents = True , exist_ok = True )
for plugin in plugins :
tmp_path = original_path . joinpath ( plugin [ " id " ] , f " { plugin [ ' name ' ] } .tar.gz " )
tmp_path . parent . mkdir ( parents = True , exist_ok = True )
tmp_path . write_bytes ( plugin [ " data " ] )
with tar_open ( str ( tmp_path ) , " r:gz " ) as tar :
tar . extractall ( original_path )
tmp_path . unlink ( )
for job_file in glob ( join ( str ( tmp_path . parent ) , " jobs " , " * " ) ) :
st = Path ( job_file ) . stat ( )
chmod ( job_file , st . st_mode | S_IEXEC )
2023-06-17 15:15:24 +02:00
if SCHEDULER and SCHEDULER . apis :
logger . info ( " Sending plugins to BunkerWeb " )
ret = SCHEDULER . send_files ( original_path , " /plugins " )
2023-05-30 17:49:14 +02:00
2023-06-17 15:15:24 +02:00
if not ret :
logger . error (
" Sending plugins failed, configuration will not work as expected... " ,
)
2023-03-10 13:16:00 +01:00
2023-06-21 15:46:56 +02:00
def dict_to_frozenset ( d ) :
2023-08-15 00:38:00 +02:00
if isinstance ( d , list ) :
return tuple ( sorted ( d ) )
elif isinstance ( d , dict ) :
2023-06-21 15:46:56 +02:00
return frozenset ( ( k , dict_to_frozenset ( v ) ) for k , v in d . items ( ) )
2023-06-21 21:33:23 +02:00
return d
2023-06-21 15:46:56 +02:00
2023-08-08 13:39:43 +02:00
2023-08-08 11:26:22 +02:00
def api_to_instance ( api ) :
2023-10-03 12:01:24 +02:00
hostname_port = api . endpoint . replace ( " http:// " , " " ) . replace ( " https:// " , " " ) . replace ( " / " , " " ) . split ( " : " )
2023-08-08 11:26:22 +02:00
return {
" hostname " : hostname_port [ 0 ] ,
2023-08-08 13:39:43 +02:00
" env " : { " API_HTTP_PORT " : int ( hostname_port [ 1 ] ) , " API_SERVER_NAME " : api . host } ,
2023-08-08 11:26:22 +02:00
}
2023-08-08 13:39:43 +02:00
2022-10-19 17:37:13 +02:00
if __name__ == " __main__ " :
try :
# Don't execute if pid file exists
2023-06-01 00:13:08 +02:00
pid_path = Path ( sep , " var " , " run " , " bunkerweb " , " scheduler.pid " )
2023-05-25 22:52:02 +02:00
if pid_path . is_file ( ) :
2022-10-19 17:37:13 +02:00
logger . error (
" Scheduler is already running, skipping execution ... " ,
)
_exit ( 1 )
# Write pid to file
2023-05-30 17:49:14 +02:00
pid_path . write_text ( str ( getpid ( ) ) , encoding = " utf-8 " )
2023-05-25 22:52:02 +02:00
del pid_path
2022-10-19 17:37:13 +02:00
# Parse arguments
parser = ArgumentParser ( description = " Job scheduler for BunkerWeb " )
parser . add_argument (
" --variables " ,
type = str ,
help = " path to the file containing environment variables " ,
)
args = parser . parse_args ( )
2023-06-01 20:00:36 +02:00
integration_path = Path ( sep , " usr " , " share " , " bunkerweb " , " INTEGRATION " )
os_release_path = Path ( sep , " etc " , " os-release " )
if getenv ( " KUBERNETES_MODE " , " no " ) . lower ( ) == " yes " :
INTEGRATION = " Kubernetes "
elif getenv ( " SWARM_MODE " , " no " ) . lower ( ) == " yes " :
INTEGRATION = " Swarm "
elif getenv ( " AUTOCONF_MODE " , " no " ) . lower ( ) == " yes " :
INTEGRATION = " Autoconf "
elif integration_path . is_file ( ) :
INTEGRATION = integration_path . read_text ( encoding = " utf-8 " ) . strip ( )
2023-10-03 12:01:24 +02:00
elif os_release_path . is_file ( ) and " Alpine " in os_release_path . read_text ( encoding = " utf-8 " ) :
2023-06-01 20:00:36 +02:00
INTEGRATION = " Docker "
del integration_path , os_release_path
2023-10-03 12:01:24 +02:00
tmp_variables_path = normpath ( args . variables ) if args . variables else join ( sep , " var " , " tmp " , " bunkerweb " , " variables.env " )
2023-06-01 17:18:22 +02:00
tmp_variables_path = Path ( tmp_variables_path )
2023-06-01 20:00:36 +02:00
nginx_variables_path = Path ( sep , " etc " , " nginx " , " variables.env " )
2023-05-30 17:49:14 +02:00
dotenv_env = dotenv_values ( str ( tmp_variables_path ) )
2022-11-17 17:20:46 +01:00
2023-06-01 20:00:36 +02:00
db = Database (
logger ,
2023-10-03 12:01:24 +02:00
sqlalchemy_string = dotenv_env . get ( " DATABASE_URI " , getenv ( " DATABASE_URI " , None ) ) ,
2023-06-01 20:00:36 +02:00
)
2023-08-25 10:03:03 +02:00
env = { }
2022-11-17 16:48:40 +01:00
2023-06-01 20:00:36 +02:00
if INTEGRATION in (
" Swarm " ,
" Kubernetes " ,
" Autoconf " ,
) :
2023-08-25 10:03:03 +02:00
while not db . is_initialized ( ) :
logger . warning (
" Database is not initialized, retrying in 5s ... " ,
)
sleep ( 5 )
2023-06-01 20:00:36 +02:00
while not db . is_autoconf_loaded ( ) :
2022-11-17 16:48:40 +01:00
logger . warning (
2023-06-01 20:00:36 +02:00
" Autoconf is not loaded yet in the database, retrying in 5s ... " ,
2022-11-17 16:48:40 +01:00
)
sleep ( 5 )
2023-08-25 10:03:03 +02:00
env = db . get_config ( )
2023-10-03 12:01:24 +02:00
elif not tmp_variables_path . exists ( ) or not nginx_variables_path . exists ( ) or ( tmp_variables_path . read_text ( encoding = " utf-8 " ) != nginx_variables_path . read_text ( encoding = " utf-8 " ) ) or db . is_initialized ( ) and db . get_config ( ) != dotenv_env :
2023-06-01 20:00:36 +02:00
# run the config saver
proc = subprocess_run (
[
" python3 " ,
join ( sep , " usr " , " share " , " bunkerweb " , " gen " , " save_config.py " ) ,
" --settings " ,
join ( sep , " usr " , " share " , " bunkerweb " , " settings.json " ) ,
]
+ ( [ " --variables " , str ( tmp_variables_path ) ] if args . variables else [ ] ) ,
stdin = DEVNULL ,
stderr = STDOUT ,
check = False ,
)
if proc . returncode != 0 :
logger . error (
" Config saver failed, configuration will not work as expected... " ,
)
2023-05-25 19:00:17 +02:00
2023-08-25 10:03:03 +02:00
while not db . is_initialized ( ) :
logger . warning (
" Database is not initialized, retrying in 5s ... " ,
)
sleep ( 5 )
2023-05-31 20:19:35 +02:00
2023-06-01 20:00:36 +02:00
env = db . get_config ( )
2023-08-25 10:03:03 +02:00
while not db . is_first_config_saved ( ) or not env :
logger . warning (
" Database doesn ' t have any config saved yet, retrying in 5s ... " ,
)
sleep ( 5 )
env = db . get_config ( )
2023-09-15 18:10:03 +02:00
else :
env = db . get_config ( )
2022-10-19 17:37:13 +02:00
2023-08-25 10:27:39 +02:00
env [ " DATABASE_URI " ] = db . database_uri
2022-11-09 15:53:49 +01:00
2023-06-01 20:00:36 +02:00
# Instantiate scheduler
SCHEDULER = JobScheduler ( env . copy ( ) | environ . copy ( ) , logger , INTEGRATION )
2022-10-19 17:37:13 +02:00
2023-09-15 18:10:03 +02:00
if INTEGRATION in ( " Docker " , " Swarm " , " Kubernetes " , " Autoconf " ) :
2023-06-01 20:00:36 +02:00
# Automatically setup the scheduler apis
2023-09-15 18:10:03 +02:00
while not SCHEDULER . apis :
SCHEDULER . auto_setup ( )
if not SCHEDULER . apis :
logger . warning (
" No BunkerWeb API found, retrying in 5s ... " ,
)
sleep ( 5 )
db . update_instances ( [ api_to_instance ( api ) for api in SCHEDULER . apis ] )
2022-10-19 17:37:13 +02:00
2023-06-20 22:52:27 +02:00
scheduler_first_start = db . is_scheduler_first_start ( )
2023-06-01 20:00:36 +02:00
logger . info ( " Scheduler started ... " )
2022-11-16 17:39:52 +01:00
2022-11-17 16:48:40 +01:00
# Checking if any custom config has been created by the user
2023-05-25 19:00:17 +02:00
custom_configs = [ ]
2023-05-30 17:49:14 +02:00
db_configs = db . get_custom_configs ( )
configs_path = Path ( sep , " etc " , " bunkerweb " , " configs " )
root_dirs = listdir ( str ( configs_path ) )
2023-06-20 22:52:27 +02:00
changes = False
2023-05-30 17:49:14 +02:00
for root , dirs , files in walk ( str ( configs_path ) ) :
2023-05-25 19:00:17 +02:00
if files or ( dirs and basename ( root ) not in root_dirs ) :
2022-11-17 16:48:40 +01:00
path_exploded = root . split ( " / " )
for file in files :
2023-06-20 22:52:27 +02:00
content = Path ( join ( root , file ) ) . read_text ( encoding = " utf-8 " )
custom_conf = {
" value " : content ,
" exploded " : (
2023-10-03 12:01:24 +02:00
f " { path_exploded . pop ( ) } " if path_exploded [ - 1 ] not in root_dirs else None ,
2023-06-20 22:52:27 +02:00
path_exploded [ - 1 ] ,
file . replace ( " .conf " , " " ) ,
) ,
}
2023-05-25 19:00:17 +02:00
saving = True
2023-06-20 22:52:27 +02:00
in_db = False
2023-05-25 19:00:17 +02:00
for db_conf in db_configs :
2023-10-03 12:01:24 +02:00
if db_conf [ " service_id " ] == custom_conf [ " exploded " ] [ 0 ] and db_conf [ " name " ] == custom_conf [ " exploded " ] [ 2 ] :
2023-06-20 22:52:27 +02:00
in_db = True
if db_conf [ " method " ] != " manual " :
saving = False
break
if not in_db and content . startswith ( " # CREATED BY ENV " ) :
saving = False
changes = True
2023-05-25 19:00:17 +02:00
if saving :
custom_configs . append ( custom_conf )
2023-10-03 12:01:24 +02:00
changes = changes or { hash ( dict_to_frozenset ( d ) ) for d in custom_configs } != { hash ( dict_to_frozenset ( d ) ) for d in db_configs }
2023-06-20 22:52:27 +02:00
if changes :
err = db . save_custom_configs ( custom_configs , " manual " )
if err :
logger . error (
f " Couldn ' t save some manually created custom configs to database: { err } " ,
)
2022-11-04 18:14:44 +01:00
2023-06-20 22:52:27 +02:00
if ( scheduler_first_start and db_configs ) or changes :
2023-06-21 21:33:23 +02:00
Thread (
target = generate_custom_configs ,
args = ( db . get_custom_configs ( ) , ) ,
kwargs = { " original_path " : configs_path } ,
) . start ( )
2023-06-20 22:52:27 +02:00
del custom_configs , db_configs
2022-10-28 12:01:05 +02:00
2023-05-11 01:13:23 +02:00
# Check if any external plugin has been added by the user
external_plugins = [ ]
2023-06-20 22:52:27 +02:00
db_plugins = db . get_plugins ( external = True )
2023-05-30 17:49:14 +02:00
plugins_dir = Path ( sep , " etc " , " bunkerweb " , " plugins " )
for filename in glob ( str ( plugins_dir . joinpath ( " * " , " plugin.json " ) ) ) :
with open ( filename , " r " , encoding = " utf-8 " ) as f :
2023-05-11 01:13:23 +02:00
_dir = dirname ( filename )
plugin_content = BytesIO ( )
2023-10-03 12:01:24 +02:00
with tar_open ( fileobj = plugin_content , mode = " w:gz " , compresslevel = 9 ) as tar :
2023-05-11 01:13:23 +02:00
tar . add ( _dir , arcname = basename ( _dir ) , recursive = True )
2023-06-20 22:52:27 +02:00
plugin_content . seek ( 0 , 0 )
2023-05-11 01:13:23 +02:00
value = plugin_content . getvalue ( )
external_plugins . append (
json_load ( f )
| {
" external " : True ,
2023-05-25 19:00:17 +02:00
" page " : Path ( _dir , " ui " ) . exists ( ) ,
2023-05-11 01:13:23 +02:00
" method " : " manual " ,
" data " : value ,
" checksum " : sha256 ( value ) . hexdigest ( ) ,
}
)
2023-06-20 22:52:27 +02:00
tmp_external_plugins = [ ]
2023-08-15 00:38:00 +02:00
for external_plugin in deepcopy ( external_plugins ) :
2023-06-21 15:35:27 +02:00
external_plugin . pop ( " data " , None )
external_plugin . pop ( " checksum " , None )
external_plugin . pop ( " jobs " , None )
2023-08-15 00:38:00 +02:00
external_plugin . pop ( " method " , None )
2023-06-20 22:52:27 +02:00
tmp_external_plugins . append ( external_plugin )
2023-08-15 00:38:00 +02:00
tmp_db_plugins = [ ]
for db_plugin in db_plugins . copy ( ) :
db_plugin . pop ( " method " , None )
tmp_db_plugins . append ( db_plugin )
2023-10-03 12:01:24 +02:00
changes = { hash ( dict_to_frozenset ( d ) ) for d in tmp_external_plugins } != { hash ( dict_to_frozenset ( d ) ) for d in tmp_db_plugins }
2023-06-20 22:52:27 +02:00
if changes :
err = db . update_external_plugins ( external_plugins , delete_missing = True )
2023-05-11 01:13:23 +02:00
if err :
logger . error (
f " Couldn ' t save some manually added plugins to database: { err } " ,
)
2023-06-20 22:52:27 +02:00
if ( scheduler_first_start and db_plugins ) or changes :
generate_external_plugins (
db . get_plugins ( external = True , with_data = True ) ,
original_path = plugins_dir ,
)
2023-06-21 21:33:23 +02:00
SCHEDULER . update_jobs ( )
2023-06-20 22:52:27 +02:00
del tmp_external_plugins , external_plugins , db_plugins
2023-03-10 13:16:00 +01:00
2022-10-28 12:01:05 +02:00
logger . info ( " Executing scheduler ... " )
2022-11-18 15:03:04 +01:00
2023-05-30 17:49:14 +02:00
del dotenv_env
2022-11-09 15:53:49 +01:00
2023-06-20 22:52:27 +02:00
if scheduler_first_start :
ret = db . set_scheduler_first_start ( )
2023-05-26 20:31:51 +02:00
if ret :
2023-10-03 12:01:24 +02:00
logger . error ( f " An error occurred when setting the scheduler first start : { ret } " )
2023-05-26 20:31:51 +02:00
stop ( 1 )
2023-06-20 22:52:27 +02:00
FIRST_RUN = True
2023-09-15 18:10:03 +02:00
CONFIG_NEED_GENERATION = True
2023-09-28 15:28:49 +02:00
RUN_JOBS_ONCE = True
2023-06-20 22:52:27 +02:00
CHANGES = [ ]
2023-06-21 21:33:23 +02:00
threads = [ ]
def send_nginx_configs ( ) :
logger . info ( f " Sending { join ( sep , ' etc ' , ' nginx ' ) } folder ... " )
ret = SCHEDULER . send_files ( join ( sep , " etc " , " nginx " ) , " /confs " )
if not ret :
logger . error (
" Sending nginx configs failed, configuration will not work as expected... " ,
)
def send_nginx_cache ( ) :
logger . info ( f " Sending { CACHE_PATH } folder ... " )
if not SCHEDULER . send_files ( CACHE_PATH , " /cache " ) :
logger . error ( f " Error while sending { CACHE_PATH } folder " )
else :
logger . info ( f " Successfully sent { CACHE_PATH } folder " )
2023-06-20 22:52:27 +02:00
while True :
2023-09-06 12:41:45 +02:00
threads . clear ( )
ret = db . checked_changes ( CHANGES )
2023-08-31 15:56:46 +02:00
2023-09-06 12:41:45 +02:00
if ret :
2023-10-03 12:01:24 +02:00
logger . error ( f " An error occurred when setting the changes to checked in the database : { ret } " )
2023-09-06 12:41:45 +02:00
stop ( 1 )
2023-06-21 21:33:23 +02:00
2023-09-28 15:28:49 +02:00
if RUN_JOBS_ONCE :
# Update the environment variables of the scheduler
SCHEDULER . env = env . copy ( ) | environ . copy ( )
SCHEDULER . setup ( )
2022-10-19 17:37:13 +02:00
2023-09-28 15:28:49 +02:00
# Only run jobs once
if not SCHEDULER . run_once ( ) :
logger . error ( " At least one job in run_once() failed " )
else :
logger . info ( " All jobs in run_once() were successful " )
2022-10-28 12:01:05 +02:00
2023-09-15 18:10:03 +02:00
if CONFIG_NEED_GENERATION :
content = " "
for k , v in env . items ( ) :
content + = f " { k } = { v } \n "
SCHEDULER_TMP_ENV_PATH . write_text ( content )
# run the generator
proc = subprocess_run (
[
" python3 " ,
join ( sep , " usr " , " share " , " bunkerweb " , " gen " , " main.py " ) ,
" --settings " ,
join ( sep , " usr " , " share " , " bunkerweb " , " settings.json " ) ,
" --templates " ,
join ( sep , " usr " , " share " , " bunkerweb " , " confs " ) ,
" --output " ,
join ( sep , " etc " , " nginx " ) ,
" --variables " ,
str ( SCHEDULER_TMP_ENV_PATH ) ,
] ,
stdin = DEVNULL ,
stderr = STDOUT ,
check = False ,
2023-09-06 12:41:45 +02:00
)
2022-11-14 11:00:00 +01:00
2023-09-15 18:10:03 +02:00
if proc . returncode != 0 :
logger . error (
" Config generator failed, configuration will not work as expected... " ,
)
else :
copy (
str ( nginx_variables_path ) ,
join ( sep , " var " , " tmp " , " bunkerweb " , " variables.env " ) ,
)
if SCHEDULER . apis :
# send nginx configs
thread = Thread ( target = send_nginx_configs )
thread . start ( )
threads . append ( thread )
2022-11-04 18:14:44 +01:00
2022-10-28 12:01:05 +02:00
try :
2023-05-30 17:49:14 +02:00
if SCHEDULER . apis :
2022-10-28 12:01:05 +02:00
# send cache
2023-06-21 21:33:23 +02:00
thread = Thread ( target = send_nginx_cache )
thread . start ( )
threads . append ( thread )
for thread in threads :
thread . join ( )
2022-10-28 12:01:05 +02:00
2023-05-30 17:49:14 +02:00
if SCHEDULER . send_to_apis ( " POST " , " /reload " ) :
logger . info ( " Successfully reloaded nginx " )
else :
logger . error ( " Error while reloading nginx " )
else :
2023-03-27 15:28:06 +02:00
# Stop temp nginx
2023-04-04 11:45:34 +02:00
logger . info ( " Stopping temp nginx ... " )
2022-11-18 15:03:04 +01:00
proc = subprocess_run (
2023-10-26 00:07:11 +02:00
[ join ( sep , " usr " , " sbin " , " nginx " ) , " -s " , " stop " ] ,
2022-11-18 15:03:04 +01:00
stdin = DEVNULL ,
stderr = STDOUT ,
2023-05-25 22:52:02 +02:00
env = env . copy ( ) ,
2023-05-30 17:49:14 +02:00
check = False ,
2022-11-18 15:03:04 +01:00
)
2022-10-28 12:01:05 +02:00
if proc . returncode == 0 :
2023-04-14 17:37:59 +02:00
logger . info ( " Successfully sent stop signal to temp nginx " )
2023-03-27 15:28:06 +02:00
i = 0
2023-04-14 17:37:59 +02:00
while i < 20 :
2023-10-03 12:01:24 +02:00
if not Path ( sep , " var " , " run " , " bunkerweb " , " nginx.pid " ) . is_file ( ) :
2023-03-27 15:28:06 +02:00
break
logger . warning ( " Waiting for temp nginx to stop ... " )
sleep ( 1 )
i + = 1
2023-04-14 17:37:59 +02:00
if i > = 20 :
2023-10-03 12:01:24 +02:00
logger . error ( " Timeout error while waiting for temp nginx to stop " )
2023-04-14 17:37:59 +02:00
else :
2023-03-27 15:28:06 +02:00
# Start nginx
logger . info ( " Starting nginx ... " )
proc = subprocess_run (
2023-10-26 15:52:58 +02:00
[ join ( sep , " usr " , " sbin " , " nginx " ) , " -e " , " /var/log/bunkerweb/error.log " ] ,
2023-03-27 15:28:06 +02:00
stdin = DEVNULL ,
stderr = STDOUT ,
2023-05-25 22:52:02 +02:00
env = env . copy ( ) ,
2023-05-30 17:49:14 +02:00
check = False ,
2023-03-27 15:28:06 +02:00
)
if proc . returncode == 0 :
2023-04-14 17:37:59 +02:00
logger . info ( " Successfully started nginx " )
else :
2023-03-27 15:28:06 +02:00
logger . error (
2023-05-22 18:38:28 +02:00
f " Error while starting nginx - returncode: { proc . returncode } - error: { proc . stderr . decode ( ' utf-8 ' ) if proc . stderr else ' Missing stderr ' } " ,
2023-03-27 15:28:06 +02:00
)
2022-10-28 12:01:05 +02:00
else :
logger . error (
2023-05-22 18:38:28 +02:00
f " Error while sending stop signal to temp nginx - returncode: { proc . returncode } - error: { proc . stderr . decode ( ' utf-8 ' ) if proc . stderr else ' Missing stderr ' } " ,
2022-10-28 12:01:05 +02:00
)
except :
logger . error (
f " Exception while reloading after running jobs once scheduling : { format_exc ( ) } " ,
)
2022-10-19 17:37:13 +02:00
2023-05-30 17:49:14 +02:00
NEED_RELOAD = False
2023-09-28 15:28:49 +02:00
RUN_JOBS_ONCE = False
2023-06-17 15:15:24 +02:00
CONFIG_NEED_GENERATION = False
2023-05-30 17:49:14 +02:00
CONFIGS_NEED_GENERATION = False
PLUGINS_NEED_GENERATION = False
2023-08-02 15:47:33 +02:00
INSTANCES_NEED_GENERATION = False
2023-06-20 22:52:27 +02:00
2023-03-10 13:16:00 +01:00
# infinite schedule for the jobs
2022-10-28 12:01:05 +02:00
logger . info ( " Executing job scheduler ... " )
2023-10-03 12:01:24 +02:00
Path ( sep , " var " , " tmp " , " bunkerweb " , " scheduler.healthy " ) . write_text ( " ok " , encoding = " utf-8 " )
2023-05-30 17:49:14 +02:00
while RUN and not NEED_RELOAD :
SCHEDULER . run_pending ( )
2022-10-19 17:37:13 +02:00
sleep ( 1 )
2023-05-26 20:31:51 +02:00
changes = db . check_changes ( )
if isinstance ( changes , str ) :
2023-10-03 12:01:24 +02:00
logger . error ( f " An error occurred when checking for changes in the database : { changes } " )
2023-05-26 20:31:51 +02:00
stop ( 1 )
# check if the plugins have changed since last time
if changes [ " external_plugins_changed " ] :
logger . info ( " External plugins changed, generating ... " )
2023-06-21 21:33:23 +02:00
if FIRST_RUN :
# run the config saver to save potential ignored external plugins settings
2023-10-03 12:01:24 +02:00
logger . info ( " Running config saver to save potential ignored external plugins settings ... " )
2023-06-21 21:33:23 +02:00
proc = subprocess_run (
[
" python " ,
join (
sep ,
" usr " ,
" share " ,
" bunkerweb " ,
" gen " ,
" save_config.py " ,
) ,
" --settings " ,
join ( sep , " usr " , " share " , " bunkerweb " , " settings.json " ) ,
] ,
stdin = DEVNULL ,
stderr = STDOUT ,
check = False ,
)
if proc . returncode != 0 :
logger . error (
" Config saver failed, configuration will not work as expected... " ,
)
changes . update (
{
" custom_configs_changed " : True ,
" config_changed " : True ,
}
)
2023-05-30 17:49:14 +02:00
PLUGINS_NEED_GENERATION = True
2023-09-28 15:28:49 +02:00
CONFIG_NEED_GENERATION = True
2023-10-20 11:41:39 +02:00
RUN_JOBS_ONCE = True
2023-05-30 17:49:14 +02:00
NEED_RELOAD = True
2023-05-26 20:31:51 +02:00
2023-06-21 21:33:23 +02:00
# check if the custom configs have changed since last time
if changes [ " custom_configs_changed " ] :
logger . info ( " Custom configs changed, generating ... " )
CONFIGS_NEED_GENERATION = True
2023-10-20 11:41:39 +02:00
CONFIG_NEED_GENERATION = True
2023-06-21 21:33:23 +02:00
NEED_RELOAD = True
2023-05-26 20:31:51 +02:00
# check if the config have changed since last time
if changes [ " config_changed " ] :
logger . info ( " Config changed, generating ... " )
2023-06-17 15:15:24 +02:00
CONFIG_NEED_GENERATION = True
2023-10-20 11:41:39 +02:00
RUN_JOBS_ONCE = True
2023-05-30 17:49:14 +02:00
NEED_RELOAD = True
2023-04-25 19:27:30 +02:00
2023-08-02 15:47:33 +02:00
# check if the instances have changed since last time
if changes [ " instances_changed " ] :
logger . info ( " Instances changed, generating ... " )
INSTANCES_NEED_GENERATION = True
2023-09-28 15:28:49 +02:00
CONFIG_NEED_GENERATION = True
2023-10-20 11:41:39 +02:00
RUN_JOBS_ONCE = True
2023-08-02 15:59:23 +02:00
NEED_RELOAD = True
2023-08-02 15:47:33 +02:00
2023-06-21 21:33:23 +02:00
FIRST_RUN = False
2023-05-30 17:49:14 +02:00
if NEED_RELOAD :
2023-06-17 15:15:24 +02:00
CHANGES . clear ( )
2023-05-30 17:49:14 +02:00
if CONFIGS_NEED_GENERATION :
2023-06-17 15:15:24 +02:00
CHANGES . append ( " custom_configs " )
2023-10-03 12:01:24 +02:00
generate_custom_configs ( db . get_custom_configs ( ) , original_path = configs_path )
2023-04-25 19:27:30 +02:00
2023-05-30 17:49:14 +02:00
if PLUGINS_NEED_GENERATION :
2023-06-17 15:15:24 +02:00
CHANGES . append ( " external_plugins " )
2023-04-25 19:27:30 +02:00
generate_external_plugins (
2023-05-30 17:49:14 +02:00
db . get_plugins ( external = True , with_data = True ) ,
2023-05-25 19:00:17 +02:00
original_path = plugins_dir ,
2023-04-25 19:27:30 +02:00
)
2023-06-20 22:52:27 +02:00
SCHEDULER . update_jobs ( )
2023-04-25 19:27:30 +02:00
2023-06-17 15:15:24 +02:00
if CONFIG_NEED_GENERATION :
CHANGES . append ( " config " )
env = db . get_config ( )
2023-08-25 10:27:39 +02:00
env [ " DATABASE_URI " ] = db . database_uri
2023-08-02 15:47:33 +02:00
if INSTANCES_NEED_GENERATION :
CHANGES . append ( " instances " )
SCHEDULER . update_instances ( )
2022-10-19 17:37:13 +02:00
except :
logger . error (
f " Exception while executing scheduler : { format_exc ( ) } " ,
)
stop ( 1 )