autoconf - fix missing scheduler in autoconf mode and missing apis list

This commit is contained in:
bunkerity 2022-06-27 12:08:35 +02:00
parent 7bba81b16b
commit 7aa6852d3c
6 changed files with 27 additions and 22 deletions

View File

@ -2,6 +2,7 @@
## v1.4.2 -
- Fix various bugs with jobs scheduler when using autoconf/swarm/k8s
- Fix wrong env file when running jobs using Linux integration
- Fix bwcli unban command when using Linux integration
- Fix permissions check when filename has a space

View File

@ -120,10 +120,6 @@ class Config(ApiCaller, ConfigCaller) :
self.stop_scheduler()
# update values
# order here is important :
# __get_scheduler needs apis
# __get_apis needs __config
# __get_full_env needs __instances and __services
self.__instances = instances
self.__services = services
self.__configs = configs
@ -151,7 +147,7 @@ class Config(ApiCaller, ConfigCaller) :
i += 1
if self.__scheduler is None :
self.__scheduler = JobScheduler(env=env, lock=self.__lock, apis=self._get_apis())
ret = self.__scheduler.reload(env)
ret = self.__scheduler.reload(env, apis=self._get_apis())
if not ret :
success = False
log("CONFIG", "", "scheduler.reload() failed, configuration will not work as expected...")

View File

@ -69,7 +69,10 @@ class DockerController(Controller, ConfigCaller) :
raise("get_configs is not supported with DockerController")
def apply_config(self) :
return self._config.apply(self._instances, self._services)
self._config.stop_scheduler()
ret = self._config.apply(self._instances, self._services)
self._config.start_scheduler()
return ret
def process_events(self) :
for event in self.__client.events(decode=True, filters={"type": "container"}) :

View File

@ -28,7 +28,6 @@ signal.signal(signal.SIGTERM, exit_handler)
try :
# Setup /data folder if needed
#if swarm or kubernetes :
proc = subprocess.run(["/opt/bunkerweb/helpers/data.sh", "AUTOCONF"], stdin=subprocess.DEVNULL, stderr=subprocess.STDOUT)
if proc.returncode != 0 :
os._exit(1)

View File

@ -53,16 +53,16 @@ class JobScheduler(ApiCaller) :
proc = subprocess.run(["/usr/sbin/nginx", "-s", "reload"], stdin=subprocess.DEVNULL, stderr=subprocess.STDOUT, env=self.__env)
reload = proc.returncode != 0
if reload :
log("SCHEDULER", "", "Successfuly reloaded nginx")
log("SCHEDULER", "", "Successfuly reloaded nginx (local)")
else :
log("SCHEDULER", "", "Error while reloading nginx")
log("SCHEDULER", "", "Error while reloading nginx (local)")
else :
log("SCHEDULER", "", "Reloading nginx ...")
reload = self._send_to_apis("POST", "/reload")
if reload :
log("SCHEDULER", "", "Successfuly reloaded nginx")
log("SCHEDULER", "", "Successfuly reloaded nginx (api)")
else :
log("SCHEDULER", "", "Error while reloading nginx")
log("SCHEDULER", "", "Error while reloading nginx (api)")
return reload
def __gen_conf(self) :
@ -99,7 +99,7 @@ class JobScheduler(ApiCaller) :
if every != "once" :
self.__str_to_schedule(every).do(self.__job_wrapper, path, plugin, name, file)
except :
log("SCHEDULER", "⚠️", "Exception while scheduling jobs for plugin " + plugin + " : " + traceback.format_exc())
log("SCHEDULER", "", "Exception while scheduling jobs for plugin " + plugin + " : " + traceback.format_exc())
def run_pending(self) :
if self.__lock is not None :
@ -114,12 +114,14 @@ class JobScheduler(ApiCaller) :
elif ret >= 2 :
success = False
if reload :
if not self.__gen_conf() :
success = False
if not self._send_files("/data", "/data") :
success = False
if not self.__reload() :
try :
if not self._send_files("/data", "/data") :
success = False
if not self.__reload() :
success = False
except :
success = False
log("SCHEDULER", "", "Exception while reloading after job scheduling : " + traceback.format_exc())
if self.__lock is not None :
self.__lock.release()
return success
@ -135,7 +137,7 @@ class JobScheduler(ApiCaller) :
if self.__job_wrapper(path, plugin, name, file) >= 2 :
ret = False
except :
log("SCHEDULER", "⚠️", "Exception while running once jobs for plugin " + plugin + " : " + traceback.format_exc())
log("SCHEDULER", "", "Exception while running once jobs for plugin " + plugin + " : " + traceback.format_exc())
return ret
def clear(self) :
@ -149,14 +151,12 @@ class JobScheduler(ApiCaller) :
with open("/tmp/autoconf.env", "w") as f :
for k, v in self.__env.items() :
f.write(k + "=" + v + "\n")
#print(self.__env)
#self.__env.update(os.environ)
self.clear()
self.__jobs = self.__get_jobs()
if not self.run_once() :
ret = False
self.setup()
except :
log("SCHEDULER", "⚠️", "Exception while reloading scheduler " + traceback.format_exc())
log("SCHEDULER", "", "Exception while reloading scheduler " + traceback.format_exc())
return False
return ret

View File

@ -1,6 +1,9 @@
from json import loads
from glob import glob
from re import match
import traceback
from logger import log
class ConfigCaller :
@ -9,7 +12,10 @@ class ConfigCaller :
self._settings = loads(f.read())
for plugin in glob("/opt/bunkerweb/core/*/plugin.json") + glob("/opt/bunkerweb/plugins/*/plugin.json") :
with open(plugin) as f :
self._settings.update(loads(f.read())["settings"])
try :
self._settings.update(loads(f.read())["settings"])
except :
log("CONFIG", "⚠️", "Exception while loading plugin metadata file at " + plugin + " : " + traceback.format_exc())
def _is_setting(self, setting) :
return setting in self._settings