2022-10-19 17:37:13 +02:00
#!/usr/bin/python3
from argparse import ArgumentParser
2023-05-25 19:00:17 +02:00
from glob import glob
2023-05-11 01:13:23 +02:00
from hashlib import sha256
from io import BytesIO
from json import load as json_load
2022-11-04 18:14:44 +01:00
from os import (
_exit ,
chmod ,
2023-04-18 15:30:09 +02:00
environ ,
2022-11-04 18:14:44 +01:00
getenv ,
getpid ,
listdir ,
2023-05-11 01:13:23 +02:00
sep ,
2022-11-04 18:14:44 +01:00
walk ,
)
2023-05-25 22:52:02 +02:00
from os . path import basename , dirname , join , normpath
2022-12-14 17:09:57 +01:00
from pathlib import Path
2023-03-21 17:56:31 +01:00
from shutil import copy , rmtree
2022-11-30 09:10:03 +01:00
from signal import SIGINT , SIGTERM , signal , SIGHUP
2023-03-21 17:56:31 +01:00
from stat import S_IEXEC
2022-11-20 17:34:46 +01:00
from subprocess import run as subprocess_run , DEVNULL , STDOUT
2022-10-19 17:37:13 +02:00
from sys import path as sys_path
2023-03-10 13:16:00 +01:00
from tarfile import open as tar_open
2022-10-19 17:37:13 +02:00
from time import sleep
from traceback import format_exc
2023-05-30 17:49:14 +02:00
from typing import Any , Dict , List , Optional , Union
2022-10-19 17:37:13 +02:00
2023-05-25 22:52:02 +02:00
for deps_path in [
join ( sep , " usr " , " share " , " bunkerweb " , * paths )
for paths in ( ( " deps " , " python " ) , ( " utils " , ) , ( " api " , ) , ( " db " , ) )
] :
if deps_path not in sys_path :
sys_path . append ( deps_path )
2022-10-19 17:37:13 +02:00
from dotenv import dotenv_values
2023-05-25 22:52:02 +02:00
from logger import setup_logger # type: ignore
from Database import Database # type: ignore
2022-10-19 17:37:13 +02:00
from JobScheduler import JobScheduler
2023-05-25 22:52:02 +02:00
from ApiCaller import ApiCaller # type: ignore
2022-10-19 17:37:13 +02:00
2023-05-30 17:49:14 +02:00
RUN = True
SCHEDULER : Optional [ JobScheduler ] = None
GENERATE = False
INTEGRATION = " Linux "
CACHE_PATH = join ( sep , " var " , " cache " , " bunkerweb " )
2022-10-25 11:39:30 +02:00
logger = setup_logger ( " Scheduler " , getenv ( " LOG_LEVEL " , " INFO " ) )
2022-10-19 17:37:13 +02:00
2022-11-20 17:37:32 +01:00
def handle_stop ( signum , frame ) :
2023-05-30 17:49:14 +02:00
if SCHEDULER is not None :
SCHEDULER . clear ( )
2022-10-19 17:37:13 +02:00
stop ( 0 )
signal ( SIGINT , handle_stop )
signal ( SIGTERM , handle_stop )
2022-11-11 17:10:16 +01:00
2022-12-02 19:53:41 +01:00
# Function to catch SIGHUP and reload the scheduler
def handle_reload ( signum , frame ) :
2022-10-19 17:37:13 +02:00
try :
2023-05-30 17:49:14 +02:00
if SCHEDULER is not None and RUN :
2022-12-02 19:53:41 +01:00
# Get the env by reading the .env file
2023-05-30 17:49:14 +02:00
tmp_env = dotenv_values ( join ( sep , " etc " , " bunkerweb " , " variables.env " ) )
if SCHEDULER . reload ( tmp_env ) :
2022-10-19 17:37:13 +02:00
logger . info ( " Reload successful " )
else :
logger . error ( " Reload failed " )
else :
logger . warning (
" Ignored reload operation because scheduler is not running ... " ,
)
except :
logger . error (
f " Exception while reloading scheduler : { format_exc ( ) } " ,
)
2022-12-02 10:59:18 +01:00
2022-11-30 09:10:03 +01:00
signal ( SIGHUP , handle_reload )
2022-11-11 17:10:16 +01:00
2022-12-02 10:59:18 +01:00
2022-10-19 17:37:13 +02:00
def stop ( status ) :
2023-06-01 00:13:08 +02:00
Path ( sep , " var " , " run " , " bunkerweb " , " scheduler.pid " ) . unlink ( missing_ok = True )
2023-05-25 22:52:02 +02:00
Path ( sep , " var " , " tmp " , " bunkerweb " , " scheduler.healthy " ) . unlink ( missing_ok = True )
2022-10-19 17:37:13 +02:00
_exit ( status )
2022-11-17 16:18:24 +01:00
def generate_custom_configs (
2023-05-30 17:49:14 +02:00
configs : List [ Dict [ str , Any ] ] ,
2022-11-17 16:18:24 +01:00
* ,
2023-05-30 17:49:14 +02:00
original_path : Union [ Path , str ] = join ( sep , " etc " , " bunkerweb " , " configs " ) ,
2022-11-17 16:18:24 +01:00
) :
2023-05-30 17:49:14 +02:00
if not isinstance ( original_path , Path ) :
original_path = Path ( original_path )
# Remove old custom configs files
logger . info ( " Removing old custom configs files ... " )
for file in glob ( str ( original_path . joinpath ( " * " , " * " ) ) ) :
file = Path ( file )
if file . is_symlink ( ) or file . is_file ( ) :
file . unlink ( )
elif file . is_dir ( ) :
rmtree ( str ( file ) , ignore_errors = True )
if configs :
logger . info ( " Generating new custom configs ... " )
original_path . mkdir ( parents = True , exist_ok = True )
for custom_config in configs :
tmp_path = original_path . joinpath (
custom_config [ " type " ] . replace ( " _ " , " - " ) ,
custom_config [ " service_id " ] or " " ,
f " { custom_config [ ' name ' ] } .conf " ,
2022-11-17 16:18:24 +01:00
)
2023-05-30 17:49:14 +02:00
tmp_path . parent . mkdir ( parents = True , exist_ok = True )
tmp_path . write_bytes ( custom_config [ " data " ] )
if SCHEDULER . apis :
logger . info ( " Sending custom configs to BunkerWeb " )
ret = SCHEDULER . send_files ( original_path , " /custom_configs " )
if not ret :
logger . error (
" Sending custom configs failed, configuration will not work as expected... " ,
)
2022-11-17 16:18:24 +01:00
2023-03-10 13:16:00 +01:00
def generate_external_plugins (
plugins : List [ Dict [ str , Any ] ] ,
* ,
2023-05-30 17:49:14 +02:00
original_path : Union [ Path , str ] = join ( sep , " etc " , " bunkerweb " , " plugins " ) ,
2023-03-10 13:16:00 +01:00
) :
2023-05-30 17:49:14 +02:00
if not isinstance ( original_path , Path ) :
original_path = Path ( original_path )
# Remove old external plugins files
logger . info ( " Removing old external plugins files ... " )
for file in glob ( str ( original_path . joinpath ( " * " ) ) ) :
file = Path ( file )
if file . is_symlink ( ) or file . is_file ( ) :
file . unlink ( )
elif file . is_dir ( ) :
rmtree ( str ( file ) , ignore_errors = True )
if plugins :
logger . info ( " Generating new external plugins ... " )
original_path . mkdir ( parents = True , exist_ok = True )
for plugin in plugins :
tmp_path = original_path . joinpath ( plugin [ " id " ] , f " { plugin [ ' name ' ] } .tar.gz " )
tmp_path . parent . mkdir ( parents = True , exist_ok = True )
tmp_path . write_bytes ( plugin [ " data " ] )
with tar_open ( str ( tmp_path ) , " r:gz " ) as tar :
tar . extractall ( original_path )
tmp_path . unlink ( )
for job_file in glob ( join ( str ( tmp_path . parent ) , " jobs " , " * " ) ) :
st = Path ( job_file ) . stat ( )
chmod ( job_file , st . st_mode | S_IEXEC )
if SCHEDULER . apis :
logger . info ( " Sending plugins to BunkerWeb " )
ret = SCHEDULER . send_files ( original_path , " /plugins " )
if not ret :
logger . error (
" Sending plugins failed, configuration will not work as expected... " ,
)
2023-03-10 13:16:00 +01:00
2022-10-19 17:37:13 +02:00
if __name__ == " __main__ " :
try :
# Don't execute if pid file exists
2023-06-01 00:13:08 +02:00
pid_path = Path ( sep , " var " , " run " , " bunkerweb " , " scheduler.pid " )
2023-05-25 22:52:02 +02:00
if pid_path . is_file ( ) :
2022-10-19 17:37:13 +02:00
logger . error (
" Scheduler is already running, skipping execution ... " ,
)
_exit ( 1 )
# Write pid to file
2023-05-30 17:49:14 +02:00
pid_path . write_text ( str ( getpid ( ) ) , encoding = " utf-8 " )
2023-05-25 22:52:02 +02:00
del pid_path
2022-10-19 17:37:13 +02:00
# Parse arguments
parser = ArgumentParser ( description = " Job scheduler for BunkerWeb " )
parser . add_argument (
" --variables " ,
type = str ,
help = " path to the file containing environment variables " ,
)
args = parser . parse_args ( )
2023-06-01 20:00:36 +02:00
integration_path = Path ( sep , " usr " , " share " , " bunkerweb " , " INTEGRATION " )
os_release_path = Path ( sep , " etc " , " os-release " )
if getenv ( " KUBERNETES_MODE " , " no " ) . lower ( ) == " yes " :
INTEGRATION = " Kubernetes "
elif getenv ( " SWARM_MODE " , " no " ) . lower ( ) == " yes " :
INTEGRATION = " Swarm "
elif getenv ( " AUTOCONF_MODE " , " no " ) . lower ( ) == " yes " :
INTEGRATION = " Autoconf "
elif integration_path . is_file ( ) :
INTEGRATION = integration_path . read_text ( encoding = " utf-8 " ) . strip ( )
elif os_release_path . is_file ( ) and " Alpine " in os_release_path . read_text (
encoding = " utf-8 "
) :
INTEGRATION = " Docker "
del integration_path , os_release_path
2023-06-01 17:18:22 +02:00
tmp_variables_path = (
2023-06-01 17:08:56 +02:00
normpath ( args . variables )
if args . variables
2023-06-01 17:18:22 +02:00
else join ( sep , " var " , " tmp " , " bunkerweb " , " variables.env " )
2023-05-25 22:52:02 +02:00
)
2023-06-01 17:18:22 +02:00
tmp_variables_path = Path ( tmp_variables_path )
2023-06-01 20:00:36 +02:00
nginx_variables_path = Path ( sep , " etc " , " nginx " , " variables.env " )
2023-05-30 17:49:14 +02:00
dotenv_env = dotenv_values ( str ( tmp_variables_path ) )
2022-11-17 17:20:46 +01:00
2023-06-01 20:00:36 +02:00
db = Database (
logger ,
sqlalchemy_string = dotenv_env . get (
" DATABASE_URI " , getenv ( " DATABASE_URI " , None )
) ,
)
2022-11-17 16:48:40 +01:00
2023-06-01 20:00:36 +02:00
if INTEGRATION in (
" Swarm " ,
" Kubernetes " ,
" Autoconf " ,
) :
while not db . is_autoconf_loaded ( ) :
2022-11-17 16:48:40 +01:00
logger . warning (
2023-06-01 20:00:36 +02:00
" Autoconf is not loaded yet in the database, retrying in 5s ... " ,
2022-11-17 16:48:40 +01:00
)
sleep ( 5 )
2023-06-01 20:00:36 +02:00
elif (
not tmp_variables_path . exists ( )
or not nginx_variables_path . exists ( )
or (
tmp_variables_path . read_text ( encoding = " utf-8 " )
!= nginx_variables_path . read_text ( encoding = " utf-8 " )
)
or db . is_initialized ( )
and db . get_config ( ) != dotenv_env
) :
# run the config saver
proc = subprocess_run (
[
" python3 " ,
join ( sep , " usr " , " share " , " bunkerweb " , " gen " , " save_config.py " ) ,
" --settings " ,
join ( sep , " usr " , " share " , " bunkerweb " , " settings.json " ) ,
]
+ ( [ " --variables " , str ( tmp_variables_path ) ] if args . variables else [ ] ) ,
stdin = DEVNULL ,
stderr = STDOUT ,
check = False ,
)
if proc . returncode != 0 :
logger . error (
" Config saver failed, configuration will not work as expected... " ,
)
2023-05-25 19:00:17 +02:00
2023-06-01 20:00:36 +02:00
while not db . is_initialized ( ) :
logger . warning (
" Database is not initialized, retrying in 5s ... " ,
)
sleep ( 5 )
2023-05-31 20:19:35 +02:00
2023-06-01 20:00:36 +02:00
env = db . get_config ( )
while not db . is_first_config_saved ( ) or not env :
logger . warning (
" Database doesn ' t have any config saved yet, retrying in 5s ... " ,
2022-10-25 11:39:30 +02:00
)
2023-06-01 20:00:36 +02:00
sleep ( 5 )
env = db . get_config ( )
2022-10-19 17:37:13 +02:00
2023-06-01 20:00:36 +02:00
env [ " DATABASE_URI " ] = db . database_uri
2022-11-09 15:53:49 +01:00
2023-06-01 20:00:36 +02:00
# Instantiate scheduler
SCHEDULER = JobScheduler ( env . copy ( ) | environ . copy ( ) , logger , INTEGRATION )
2022-10-19 17:37:13 +02:00
2023-06-01 20:00:36 +02:00
if INTEGRATION in ( " Swarm " , " Kubernetes " , " Autoconf " , " Docker " ) :
# Automatically setup the scheduler apis
SCHEDULER . auto_setup ( )
2022-10-19 17:37:13 +02:00
2023-06-01 20:00:36 +02:00
logger . info ( " Scheduler started ... " )
2022-11-16 17:39:52 +01:00
2022-11-17 16:48:40 +01:00
# Checking if any custom config has been created by the user
2023-05-25 19:00:17 +02:00
custom_configs = [ ]
2023-05-30 17:49:14 +02:00
db_configs = db . get_custom_configs ( )
configs_path = Path ( sep , " etc " , " bunkerweb " , " configs " )
root_dirs = listdir ( str ( configs_path ) )
for root , dirs , files in walk ( str ( configs_path ) ) :
2023-05-25 19:00:17 +02:00
if files or ( dirs and basename ( root ) not in root_dirs ) :
2022-11-17 16:48:40 +01:00
path_exploded = root . split ( " / " )
for file in files :
2023-05-30 17:49:14 +02:00
with open ( join ( root , file ) , " r " , encoding = " utf-8 " ) as f :
2023-05-25 19:00:17 +02:00
custom_conf = {
" value " : f . read ( ) ,
" exploded " : (
f " { path_exploded . pop ( ) } "
if path_exploded [ - 1 ] not in root_dirs
else None ,
path_exploded [ - 1 ] ,
file . replace ( " .conf " , " " ) ,
) ,
}
saving = True
for db_conf in db_configs :
if (
db_conf [ " method " ] != " manual "
and db_conf [ " service_id " ] == custom_conf [ " exploded " ] [ 0 ]
and db_conf [ " name " ] == custom_conf [ " exploded " ] [ 2 ]
) :
saving = False
break
if saving :
custom_configs . append ( custom_conf )
err = db . save_custom_configs ( custom_configs , " manual " )
if err :
logger . error (
f " Couldn ' t save some manually created custom configs to database: { err } " ,
)
2022-11-04 18:14:44 +01:00
2023-05-30 17:49:14 +02:00
generate_custom_configs ( db . get_custom_configs ( ) , original_path = configs_path )
2022-10-28 12:01:05 +02:00
2023-05-11 01:13:23 +02:00
# Check if any external plugin has been added by the user
external_plugins = [ ]
2023-05-30 17:49:14 +02:00
plugins_dir = Path ( sep , " etc " , " bunkerweb " , " plugins " )
for filename in glob ( str ( plugins_dir . joinpath ( " * " , " plugin.json " ) ) ) :
with open ( filename , " r " , encoding = " utf-8 " ) as f :
2023-05-11 01:13:23 +02:00
_dir = dirname ( filename )
plugin_content = BytesIO ( )
with tar_open (
fileobj = plugin_content , mode = " w:gz " , compresslevel = 9
) as tar :
tar . add ( _dir , arcname = basename ( _dir ) , recursive = True )
plugin_content . seek ( 0 )
value = plugin_content . getvalue ( )
external_plugins . append (
json_load ( f )
| {
" external " : True ,
2023-05-25 19:00:17 +02:00
" page " : Path ( _dir , " ui " ) . exists ( ) ,
2023-05-11 01:13:23 +02:00
" method " : " manual " ,
" data " : value ,
" checksum " : sha256 ( value ) . hexdigest ( ) ,
}
)
if external_plugins :
err = db . update_external_plugins ( external_plugins , delete_missing = False )
if err :
logger . error (
f " Couldn ' t save some manually added plugins to database: { err } " ,
)
2023-05-30 17:49:14 +02:00
generate_external_plugins (
db . get_plugins ( external = True , with_data = True ) ,
original_path = plugins_dir ,
)
2023-03-10 13:16:00 +01:00
2022-10-28 12:01:05 +02:00
logger . info ( " Executing scheduler ... " )
2022-11-18 15:03:04 +01:00
2023-06-01 20:00:36 +02:00
GENERATE = (
env != dotenv_env
or not tmp_variables_path . exists ( )
or not nginx_variables_path . exists ( )
or (
tmp_variables_path . read_text ( encoding = " utf-8 " )
!= nginx_variables_path . read_text ( encoding = " utf-8 " )
)
)
2023-05-30 17:49:14 +02:00
del dotenv_env
2022-11-09 15:53:49 +01:00
2023-05-30 17:49:14 +02:00
if not GENERATE :
2022-11-09 15:53:49 +01:00
logger . warning (
2023-06-01 20:00:36 +02:00
" Looks like BunkerWeb configuration is already generated, will not generate it again ... "
2022-11-09 15:53:49 +01:00
)
2023-05-30 17:49:14 +02:00
FIRST_RUN = True
2022-10-19 17:37:13 +02:00
while True :
2023-05-26 20:31:51 +02:00
ret = db . checked_changes ( )
if ret :
logger . error (
2023-05-30 17:49:14 +02:00
f " An error occurred when setting the changes to checked in the database : { ret } "
2023-05-26 20:31:51 +02:00
)
stop ( 1 )
2023-05-30 17:49:14 +02:00
# Update the environment variables of the scheduler
SCHEDULER . env = env . copy ( ) | environ . copy ( )
2022-10-19 17:37:13 +02:00
# Only run jobs once
2023-05-30 17:49:14 +02:00
if not SCHEDULER . run_once ( ) :
2022-10-28 12:01:05 +02:00
logger . error ( " At least one job in run_once() failed " )
else :
logger . info ( " All jobs in run_once() were successful " )
2023-05-30 17:49:14 +02:00
changes = db . check_changes ( )
if isinstance ( changes , str ) :
logger . error (
f " An error occurred when checking for changes in the database : { changes } "
)
stop ( 1 )
# check if the plugins have changed since last time
if changes [ " external_plugins_changed " ] :
logger . info ( " External plugins changed, generating ... " )
generate_external_plugins (
db . get_plugins ( external = True , with_data = True ) ,
original_path = plugins_dir ,
)
# run the config saver to save potential plugins settings
proc = subprocess_run (
[
" python " ,
join ( sep , " usr " , " share " , " bunkerweb " , " gen " , " save_config.py " ) ,
" --settings " ,
join ( sep , " usr " , " share " , " bunkerweb " , " settings.json " ) ,
] ,
stdin = DEVNULL ,
stderr = STDOUT ,
check = False ,
)
if proc . returncode != 0 :
logger . error (
" Config saver failed, configuration will not work as expected... " ,
)
ret = db . checked_changes ( )
if ret :
logger . error (
f " An error occurred when setting the changes to checked in the database : { ret } "
)
stop ( 1 )
if GENERATE :
2022-11-04 18:14:44 +01:00
# run the generator
2022-11-19 20:46:13 +01:00
proc = subprocess_run (
[
" python3 " ,
2023-05-25 22:52:02 +02:00
join ( sep , " usr " , " share " , " bunkerweb " , " gen " , " main.py " ) ,
2022-11-19 20:46:13 +01:00
" --settings " ,
2023-05-25 22:52:02 +02:00
join ( sep , " usr " , " share " , " bunkerweb " , " settings.json " ) ,
2022-11-19 20:46:13 +01:00
" --templates " ,
2023-05-25 22:52:02 +02:00
join ( sep , " usr " , " share " , " bunkerweb " , " confs " ) ,
2022-11-19 20:46:13 +01:00
" --output " ,
2023-05-25 22:52:02 +02:00
join ( sep , " etc " , " nginx " ) ,
2022-11-19 20:46:13 +01:00
]
2023-04-25 19:27:30 +02:00
+ (
2023-05-25 22:52:02 +02:00
[ " --variables " , str ( tmp_variables_path ) ]
2023-05-30 17:49:14 +02:00
if args . variables and FIRST_RUN
2023-04-25 19:27:30 +02:00
else [ ]
) ,
2022-11-19 20:46:13 +01:00
stdin = DEVNULL ,
stderr = STDOUT ,
2023-05-30 17:49:14 +02:00
check = False ,
2022-11-19 20:46:13 +01:00
)
2022-11-14 11:00:00 +01:00
2022-11-04 18:14:44 +01:00
if proc . returncode != 0 :
2022-10-28 12:01:05 +02:00
logger . error (
2022-11-04 18:14:44 +01:00
" Config generator failed, configuration will not work as expected... " ,
2022-10-28 12:01:05 +02:00
)
2022-11-14 11:00:00 +01:00
else :
2023-06-01 20:00:36 +02:00
copy ( str ( nginx_variables_path ) , str ( tmp_variables_path ) )
2022-11-14 11:00:00 +01:00
2023-05-30 17:49:14 +02:00
if SCHEDULER . apis :
2022-11-14 11:00:00 +01:00
# send nginx configs
2023-05-25 22:52:02 +02:00
logger . info ( f " Sending { join ( sep , ' etc ' , ' nginx ' ) } folder ... " )
2023-05-30 17:49:14 +02:00
ret = SCHEDULER . send_files ( join ( sep , " etc " , " nginx " ) , " /confs " )
2022-11-14 11:00:00 +01:00
if not ret :
logger . error (
" Sending nginx configs failed, configuration will not work as expected... " ,
)
2022-11-04 18:14:44 +01:00
2022-10-28 12:01:05 +02:00
try :
2023-05-30 17:49:14 +02:00
if SCHEDULER . apis :
2022-10-28 12:01:05 +02:00
# send cache
2023-05-30 17:49:14 +02:00
logger . info ( f " Sending { CACHE_PATH } folder ... " )
if not SCHEDULER . send_files ( CACHE_PATH , " /cache " ) :
logger . error ( f " Error while sending { CACHE_PATH } folder " )
2022-10-28 12:01:05 +02:00
else :
2023-05-30 17:49:14 +02:00
logger . info ( f " Successfully sent { CACHE_PATH } folder " )
2022-10-28 12:01:05 +02:00
2023-05-30 17:49:14 +02:00
if SCHEDULER . send_to_apis ( " POST " , " /reload " ) :
logger . info ( " Successfully reloaded nginx " )
else :
logger . error ( " Error while reloading nginx " )
else :
2023-03-27 15:28:06 +02:00
# Stop temp nginx
2023-04-04 11:45:34 +02:00
logger . info ( " Stopping temp nginx ... " )
2022-11-18 15:03:04 +01:00
proc = subprocess_run (
2023-05-25 22:52:02 +02:00
[ " sudo " , join ( sep , " usr " , " sbin " , " nginx " ) , " -s " , " stop " ] ,
2022-11-18 15:03:04 +01:00
stdin = DEVNULL ,
stderr = STDOUT ,
2023-05-25 22:52:02 +02:00
env = env . copy ( ) ,
2023-05-30 17:49:14 +02:00
check = False ,
2022-11-18 15:03:04 +01:00
)
2022-10-28 12:01:05 +02:00
if proc . returncode == 0 :
2023-04-14 17:37:59 +02:00
logger . info ( " Successfully sent stop signal to temp nginx " )
2023-03-27 15:28:06 +02:00
i = 0
2023-04-14 17:37:59 +02:00
while i < 20 :
2023-05-25 22:52:02 +02:00
if not Path (
2023-06-01 00:13:08 +02:00
sep , " var " , " run " , " bunkerweb " , " nginx.pid "
2023-05-25 22:52:02 +02:00
) . is_file ( ) :
2023-03-27 15:28:06 +02:00
break
logger . warning ( " Waiting for temp nginx to stop ... " )
sleep ( 1 )
i + = 1
2023-04-14 17:37:59 +02:00
if i > = 20 :
logger . error (
" Timeout error while waiting for temp nginx to stop "
)
else :
2023-03-27 15:28:06 +02:00
# Start nginx
logger . info ( " Starting nginx ... " )
proc = subprocess_run (
2023-05-25 22:52:02 +02:00
[ " sudo " , join ( sep , " usr " , " sbin " , " nginx " ) ] ,
2023-03-27 15:28:06 +02:00
stdin = DEVNULL ,
stderr = STDOUT ,
2023-05-25 22:52:02 +02:00
env = env . copy ( ) ,
2023-05-30 17:49:14 +02:00
check = False ,
2023-03-27 15:28:06 +02:00
)
if proc . returncode == 0 :
2023-04-14 17:37:59 +02:00
logger . info ( " Successfully started nginx " )
else :
2023-03-27 15:28:06 +02:00
logger . error (
2023-05-22 18:38:28 +02:00
f " Error while starting nginx - returncode: { proc . returncode } - error: { proc . stderr . decode ( ' utf-8 ' ) if proc . stderr else ' Missing stderr ' } " ,
2023-03-27 15:28:06 +02:00
)
2022-10-28 12:01:05 +02:00
else :
logger . error (
2023-05-22 18:38:28 +02:00
f " Error while sending stop signal to temp nginx - returncode: { proc . returncode } - error: { proc . stderr . decode ( ' utf-8 ' ) if proc . stderr else ' Missing stderr ' } " ,
2022-10-28 12:01:05 +02:00
)
except :
logger . error (
f " Exception while reloading after running jobs once scheduling : { format_exc ( ) } " ,
)
2022-10-19 17:37:13 +02:00
2023-05-30 17:49:14 +02:00
GENERATE = True
SCHEDULER . setup ( )
NEED_RELOAD = False
CONFIGS_NEED_GENERATION = False
PLUGINS_NEED_GENERATION = False
FIRST_RUN = False
2023-03-10 13:16:00 +01:00
# infinite schedule for the jobs
2022-10-28 12:01:05 +02:00
logger . info ( " Executing job scheduler ... " )
2023-05-30 17:49:14 +02:00
Path ( sep , " var " , " tmp " , " bunkerweb " , " scheduler.healthy " ) . write_text (
" ok " , encoding = " utf-8 "
)
while RUN and not NEED_RELOAD :
SCHEDULER . run_pending ( )
2022-10-19 17:37:13 +02:00
sleep ( 1 )
2023-05-26 20:31:51 +02:00
changes = db . check_changes ( )
if isinstance ( changes , str ) :
logger . error (
f " An error occurred when checking for changes in the database : { changes } "
)
stop ( 1 )
2023-04-25 19:27:30 +02:00
# check if the custom configs have changed since last time
2023-05-26 20:31:51 +02:00
if changes [ " custom_configs_changed " ] :
2023-04-25 19:27:30 +02:00
logger . info ( " Custom configs changed, generating ... " )
2023-05-30 17:49:14 +02:00
CONFIGS_NEED_GENERATION = True
NEED_RELOAD = True
2023-05-26 20:31:51 +02:00
# check if the plugins have changed since last time
if changes [ " external_plugins_changed " ] :
logger . info ( " External plugins changed, generating ... " )
2023-05-30 17:49:14 +02:00
PLUGINS_NEED_GENERATION = True
NEED_RELOAD = True
2023-05-26 20:31:51 +02:00
# check if the config have changed since last time
if changes [ " config_changed " ] :
logger . info ( " Config changed, generating ... " )
2023-05-30 17:49:14 +02:00
NEED_RELOAD = True
2023-04-25 19:27:30 +02:00
2023-05-30 17:49:14 +02:00
if NEED_RELOAD :
if CONFIGS_NEED_GENERATION :
2023-05-25 19:00:17 +02:00
generate_custom_configs (
2023-05-30 17:49:14 +02:00
db . get_custom_configs ( ) , original_path = configs_path
2023-05-26 20:31:51 +02:00
)
2023-04-25 19:27:30 +02:00
2023-05-30 17:49:14 +02:00
if PLUGINS_NEED_GENERATION :
2023-04-25 19:27:30 +02:00
generate_external_plugins (
2023-05-30 17:49:14 +02:00
db . get_plugins ( external = True , with_data = True ) ,
2023-05-25 19:00:17 +02:00
original_path = plugins_dir ,
2023-04-25 19:27:30 +02:00
)
2023-05-26 20:31:51 +02:00
env = db . get_config ( )
2022-10-19 17:37:13 +02:00
except :
logger . error (
f " Exception while executing scheduler : { format_exc ( ) } " ,
)
stop ( 1 )