autoconf - fix missing scheduler in autoconf mode and missing apis list

This commit is contained in:
bunkerity 2022-06-27 12:08:35 +02:00
parent 7bba81b16b
commit 7aa6852d3c
6 changed files with 27 additions and 22 deletions

View File

@ -2,6 +2,7 @@
## v1.4.2 - ## v1.4.2 -
- Fix various bugs with jobs scheduler when using autoconf/swarm/k8s
- Fix wrong env file when running jobs using Linux integration - Fix wrong env file when running jobs using Linux integration
- Fix bwcli unban command when using Linux integration - Fix bwcli unban command when using Linux integration
- Fix permissions check when filename has a space - Fix permissions check when filename has a space

View File

@ -120,10 +120,6 @@ class Config(ApiCaller, ConfigCaller) :
self.stop_scheduler() self.stop_scheduler()
# update values # update values
# order here is important :
# __get_scheduler needs apis
# __get_apis needs __config
# __get_full_env needs __instances and __services
self.__instances = instances self.__instances = instances
self.__services = services self.__services = services
self.__configs = configs self.__configs = configs
@ -151,7 +147,7 @@ class Config(ApiCaller, ConfigCaller) :
i += 1 i += 1
if self.__scheduler is None : if self.__scheduler is None :
self.__scheduler = JobScheduler(env=env, lock=self.__lock, apis=self._get_apis()) self.__scheduler = JobScheduler(env=env, lock=self.__lock, apis=self._get_apis())
ret = self.__scheduler.reload(env) ret = self.__scheduler.reload(env, apis=self._get_apis())
if not ret : if not ret :
success = False success = False
log("CONFIG", "", "scheduler.reload() failed, configuration will not work as expected...") log("CONFIG", "", "scheduler.reload() failed, configuration will not work as expected...")

View File

@ -69,7 +69,10 @@ class DockerController(Controller, ConfigCaller) :
raise("get_configs is not supported with DockerController") raise("get_configs is not supported with DockerController")
def apply_config(self) : def apply_config(self) :
return self._config.apply(self._instances, self._services) self._config.stop_scheduler()
ret = self._config.apply(self._instances, self._services)
self._config.start_scheduler()
return ret
def process_events(self) : def process_events(self) :
for event in self.__client.events(decode=True, filters={"type": "container"}) : for event in self.__client.events(decode=True, filters={"type": "container"}) :

View File

@ -28,7 +28,6 @@ signal.signal(signal.SIGTERM, exit_handler)
try : try :
# Setup /data folder if needed # Setup /data folder if needed
#if swarm or kubernetes :
proc = subprocess.run(["/opt/bunkerweb/helpers/data.sh", "AUTOCONF"], stdin=subprocess.DEVNULL, stderr=subprocess.STDOUT) proc = subprocess.run(["/opt/bunkerweb/helpers/data.sh", "AUTOCONF"], stdin=subprocess.DEVNULL, stderr=subprocess.STDOUT)
if proc.returncode != 0 : if proc.returncode != 0 :
os._exit(1) os._exit(1)

View File

@ -53,16 +53,16 @@ class JobScheduler(ApiCaller) :
proc = subprocess.run(["/usr/sbin/nginx", "-s", "reload"], stdin=subprocess.DEVNULL, stderr=subprocess.STDOUT, env=self.__env) proc = subprocess.run(["/usr/sbin/nginx", "-s", "reload"], stdin=subprocess.DEVNULL, stderr=subprocess.STDOUT, env=self.__env)
reload = proc.returncode != 0 reload = proc.returncode != 0
if reload : if reload :
log("SCHEDULER", "", "Successfuly reloaded nginx") log("SCHEDULER", "", "Successfuly reloaded nginx (local)")
else : else :
log("SCHEDULER", "", "Error while reloading nginx") log("SCHEDULER", "", "Error while reloading nginx (local)")
else : else :
log("SCHEDULER", "", "Reloading nginx ...") log("SCHEDULER", "", "Reloading nginx ...")
reload = self._send_to_apis("POST", "/reload") reload = self._send_to_apis("POST", "/reload")
if reload : if reload :
log("SCHEDULER", "", "Successfuly reloaded nginx") log("SCHEDULER", "", "Successfuly reloaded nginx (api)")
else : else :
log("SCHEDULER", "", "Error while reloading nginx") log("SCHEDULER", "", "Error while reloading nginx (api)")
return reload return reload
def __gen_conf(self) : def __gen_conf(self) :
@ -99,7 +99,7 @@ class JobScheduler(ApiCaller) :
if every != "once" : if every != "once" :
self.__str_to_schedule(every).do(self.__job_wrapper, path, plugin, name, file) self.__str_to_schedule(every).do(self.__job_wrapper, path, plugin, name, file)
except : except :
log("SCHEDULER", "⚠️", "Exception while scheduling jobs for plugin " + plugin + " : " + traceback.format_exc()) log("SCHEDULER", "", "Exception while scheduling jobs for plugin " + plugin + " : " + traceback.format_exc())
def run_pending(self) : def run_pending(self) :
if self.__lock is not None : if self.__lock is not None :
@ -114,12 +114,14 @@ class JobScheduler(ApiCaller) :
elif ret >= 2 : elif ret >= 2 :
success = False success = False
if reload : if reload :
if not self.__gen_conf() : try :
success = False if not self._send_files("/data", "/data") :
if not self._send_files("/data", "/data") : success = False
success = False if not self.__reload() :
if not self.__reload() : success = False
except :
success = False success = False
log("SCHEDULER", "", "Exception while reloading after job scheduling : " + traceback.format_exc())
if self.__lock is not None : if self.__lock is not None :
self.__lock.release() self.__lock.release()
return success return success
@ -135,7 +137,7 @@ class JobScheduler(ApiCaller) :
if self.__job_wrapper(path, plugin, name, file) >= 2 : if self.__job_wrapper(path, plugin, name, file) >= 2 :
ret = False ret = False
except : except :
log("SCHEDULER", "⚠️", "Exception while running once jobs for plugin " + plugin + " : " + traceback.format_exc()) log("SCHEDULER", "", "Exception while running once jobs for plugin " + plugin + " : " + traceback.format_exc())
return ret return ret
def clear(self) : def clear(self) :
@ -149,14 +151,12 @@ class JobScheduler(ApiCaller) :
with open("/tmp/autoconf.env", "w") as f : with open("/tmp/autoconf.env", "w") as f :
for k, v in self.__env.items() : for k, v in self.__env.items() :
f.write(k + "=" + v + "\n") f.write(k + "=" + v + "\n")
#print(self.__env)
#self.__env.update(os.environ)
self.clear() self.clear()
self.__jobs = self.__get_jobs() self.__jobs = self.__get_jobs()
if not self.run_once() : if not self.run_once() :
ret = False ret = False
self.setup() self.setup()
except : except :
log("SCHEDULER", "⚠️", "Exception while reloading scheduler " + traceback.format_exc()) log("SCHEDULER", "", "Exception while reloading scheduler " + traceback.format_exc())
return False return False
return ret return ret

View File

@ -1,6 +1,9 @@
from json import loads from json import loads
from glob import glob from glob import glob
from re import match from re import match
import traceback
from logger import log
class ConfigCaller : class ConfigCaller :
@ -9,7 +12,10 @@ class ConfigCaller :
self._settings = loads(f.read()) self._settings = loads(f.read())
for plugin in glob("/opt/bunkerweb/core/*/plugin.json") + glob("/opt/bunkerweb/plugins/*/plugin.json") : for plugin in glob("/opt/bunkerweb/core/*/plugin.json") + glob("/opt/bunkerweb/plugins/*/plugin.json") :
with open(plugin) as f : with open(plugin) as f :
self._settings.update(loads(f.read())["settings"]) try :
self._settings.update(loads(f.read())["settings"])
except :
log("CONFIG", "⚠️", "Exception while loading plugin metadata file at " + plugin + " : " + traceback.format_exc())
def _is_setting(self, setting) : def _is_setting(self, setting) :
return setting in self._settings return setting in self._settings