Refactoring and Linting of py files and json

This commit is contained in:
Théophile Diot 2023-04-14 17:37:59 +02:00
parent 496edb83ac
commit e6ff51e200
No known key found for this signature in database
GPG Key ID: E752C80DB72BB014
15 changed files with 207 additions and 212 deletions

View File

@ -56,7 +56,7 @@ class Config(ConfigCaller):
for file, data in self.__configs[config_type].items():
site = None
name = file
if "/" in file :
if "/" in file:
exploded = file.split("/")
site = exploded[0]
name = exploded[1]

View File

@ -104,10 +104,10 @@ class Controller(ABC):
def process_events(self):
pass
def _is_service_present(self, server_name) :
for service in self._services :
if not "SERVER_NAME" in service or service["SERVER_NAME"] == "" :
def _is_service_present(self, server_name):
for service in self._services:
if not "SERVER_NAME" in service or service["SERVER_NAME"] == "":
continue
if server_name == service["SERVER_NAME"].split(" ")[0] :
if server_name == service["SERVER_NAME"].split(" ")[0]:
return True
return False

View File

@ -197,7 +197,9 @@ class IngressController(Controller, ConfigCaller):
continue
config_site = ""
if "bunkerweb.io/CONFIG_SITE" in configmap.metadata.annotations:
if not self._is_service_present(configmap.metadata.annotations['bunkerweb.io/CONFIG_SITE']) :
if not self._is_service_present(
configmap.metadata.annotations["bunkerweb.io/CONFIG_SITE"]
):
self.__logger.warning(
f"Ignoring config {configmap.metadata.name} because {configmap.metadata.annotations['bunkerweb.io/CONFIG_SITE']} doesn't exist",
)

View File

@ -36,7 +36,7 @@ class SwarmController(Controller, ConfigCaller):
instance_env[variable] = value
for task in controller_instance.tasks():
if task["DesiredState"] != "running" :
if task["DesiredState"] != "running":
continue
instances.append(
{
@ -112,7 +112,9 @@ class SwarmController(Controller, ConfigCaller):
continue
config_site = ""
if "bunkerweb.CONFIG_SITE" in config.attrs["Spec"]["Labels"]:
if not self._is_service_present(config.attrs['Spec']['Labels']['bunkerweb.CONFIG_SITE']) :
if not self._is_service_present(
config.attrs["Spec"]["Labels"]["bunkerweb.CONFIG_SITE"]
):
self.__logger.warning(
f"Ignoring config {config_name} because {config.attrs['Spec']['Labels']['bunkerweb.CONFIG_SITE']} doesn't exist",
)
@ -131,11 +133,13 @@ class SwarmController(Controller, ConfigCaller):
)
def __event(self, event_type):
while True :
while True:
locked = False
error = False
try :
for _ in self.__client.events(decode=True, filters={"type": event_type}):
try:
for _ in self.__client.events(
decode=True, filters={"type": event_type}
):
self.__internal_lock.acquire()
locked = True
try:
@ -152,7 +156,9 @@ class SwarmController(Controller, ConfigCaller):
f"Catched Swarm event ({event_type}), deploying new configuration ..."
)
if not self.apply_config():
self.__logger.error("Error while deploying new configuration")
self.__logger.error(
"Error while deploying new configuration"
)
else:
self.__logger.info(
"Successfully deployed new configuration 🚀",
@ -163,7 +169,7 @@ class SwarmController(Controller, ConfigCaller):
)
self.__internal_lock.release()
locked = False
except :
except:
self.__logger.error(
f"Exception while reading Swarm event ({event_type}) :\n{format_exc()}",
)

View File

@ -79,5 +79,3 @@ except:
sys_exit(1)
finally:
Path("/var/tmp/bunkerweb/autoconf.healthy").unlink(missing_ok=True)

View File

@ -167,11 +167,11 @@
"every": "once",
"reload": false
},
{
"name": "update-check",
"file": "update-check.py",
"every": "day",
"reload": false
}
{
"name": "update-check",
"file": "update-check.py",
"every": "day",
"reload": false
}
]
}

View File

@ -1,36 +1,36 @@
{
"id": "reversescan",
"order": 5,
"name": "Reverse scan",
"description": "Scan clients ports to detect proxies or servers.",
"version": "0.1",
"settings": {
"USE_REVERSE_SCAN": {
"context": "multisite",
"default": "no",
"help": "Enable scanning of clients ports and deny access if one is opened.",
"id": "use-reverse-scan",
"label": "Reverse scan",
"regex": "^(no|yes)$",
"type": "check"
},
"REVERSE_SCAN_PORTS": {
"context": "multisite",
"default": "22 80 443 3128 8000 8080",
"help": "List of port to scan when using reverse scan feature.",
"id": "reverse-scan-ports",
"label": "Reverse scan ports",
"regex": "^.*$",
"type": "text"
},
"REVERSE_SCAN_TIMEOUT": {
"context": "multisite",
"default": "500",
"help": "Specify the maximum timeout (in ms) when scanning a port.",
"id": "reverse-scan-timeout",
"label": "Reverse scan timeout",
"regex": "^.*$",
"type": "text"
}
}
}
"id": "reversescan",
"order": 5,
"name": "Reverse scan",
"description": "Scan clients ports to detect proxies or servers.",
"version": "0.1",
"settings": {
"USE_REVERSE_SCAN": {
"context": "multisite",
"default": "no",
"help": "Enable scanning of clients ports and deny access if one is opened.",
"id": "use-reverse-scan",
"label": "Reverse scan",
"regex": "^(no|yes)$",
"type": "check"
},
"REVERSE_SCAN_PORTS": {
"context": "multisite",
"default": "22 80 443 3128 8000 8080",
"help": "List of port to scan when using reverse scan feature.",
"id": "reverse-scan-ports",
"label": "Reverse scan ports",
"regex": "^.*$",
"type": "text"
},
"REVERSE_SCAN_TIMEOUT": {
"context": "multisite",
"default": "500",
"help": "Specify the maximum timeout (in ms) when scanning a port.",
"id": "reverse-scan-timeout",
"label": "Reverse scan timeout",
"regex": "^.*$",
"type": "text"
}
}
}

View File

@ -71,9 +71,7 @@ if __name__ == "__main__":
help="path to the file containing environment variables",
)
parser.add_argument(
"--no-linux-reload",
action="store_true",
help="disable linux reload"
"--no-linux-reload", action="store_true", help="disable linux reload"
)
args = parser.parse_args()

View File

@ -72,11 +72,12 @@ class ApiCaller:
elif var.startswith("API_SERVER_NAME="):
api_server_name = var.replace("API_SERVER_NAME=", "", 1)
for task in instance.tasks() :
for task in instance.tasks():
self.__apis.append(
API(
f"http://{instance.name}.{task['NodeID']}.{task['ID']}:{api_http_port or getenv('API_HTTP_PORT', '5000')}",
host=api_server_name or getenv("API_SERVER_NAME", "bwapi"),
host=api_server_name
or getenv("API_SERVER_NAME", "bwapi"),
)
)
return

View File

@ -380,7 +380,7 @@ if __name__ == "__main__":
if not api_caller._send_files("/data/cache", "/cache"):
logger.error("Error while sending /data/cache folder")
else:
logger.info("Successfuly sent /data/cache folder")
logger.info("Successfully sent /data/cache folder")
# restart nginx
if integration == "Linux":
@ -393,17 +393,19 @@ if __name__ == "__main__":
env=deepcopy(env),
)
if proc.returncode == 0:
logger.info("Successfuly sent stop signal to temp nginx")
logger.info("Successfully sent stop signal to temp nginx")
i = 0
while i < 20 :
if not Path("/var/tmp/bunkerweb/nginx.pid").is_file() :
while i < 20:
if not Path("/var/tmp/bunkerweb/nginx.pid").is_file():
break
logger.warning("Waiting for temp nginx to stop ...")
sleep(1)
i += 1
if i >= 20 :
logger.error("Timeout error while waiting for temp nginx to stop")
else :
if i >= 20:
logger.error(
"Timeout error while waiting for temp nginx to stop"
)
else:
# Start nginx
logger.info("Starting nginx ...")
proc = subprocess_run(
@ -413,8 +415,8 @@ if __name__ == "__main__":
env=deepcopy(env),
)
if proc.returncode == 0:
logger.info("Successfuly started nginx")
else :
logger.info("Successfully started nginx")
else:
logger.error(
f"Error while starting nginx - returncode: {proc.returncode} - error: {proc.stderr.decode('utf-8')}",
)
@ -424,7 +426,7 @@ if __name__ == "__main__":
)
else:
if api_caller._send_to_apis("POST", "/reload"):
logger.info("Successfuly reloaded nginx")
logger.info("Successfully reloaded nginx")
else:
logger.error("Error while reloading nginx")
except:
@ -475,7 +477,7 @@ if __name__ == "__main__":
env=deepcopy(env),
)
if proc.returncode == 0:
logger.info("Successfuly reloaded nginx")
logger.info("Successfully reloaded nginx")
else:
logger.error(
f"Error while reloading nginx - returncode: {proc.returncode} - error: {proc.stderr.decode('utf-8')}",
@ -483,7 +485,7 @@ if __name__ == "__main__":
else:
need_reload = True
# if api_caller._send_to_apis("POST", "/reload"):
# logger.info("Successfuly reloaded nginx")
# logger.info("Successfully reloaded nginx")
# else:
# logger.error("Error while reloading nginx")

View File

@ -8,6 +8,7 @@ from time import sleep
from logger import log
from yaml import safe_load, dump
class AutoconfTest(Test):
def __init__(self, name, timeout, tests, no_copy_container=False, delay=0):
super().__init__(
@ -19,11 +20,11 @@ class AutoconfTest(Test):
delay=delay,
)
self._domains = {
r"www\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1"),
r"auth\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1"),
r"app1\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1_1"),
r"app2\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1_2"),
r"app3\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1_3")
r"www\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1')}",
r"auth\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1')}",
r"app1\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1_1')}",
r"app2\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1_2')}",
r"app3\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1_3')}",
}
self._check_domains()
@ -56,13 +57,20 @@ class AutoconfTest(Test):
"local/scheduler-tests:latest",
)
Test.replace_in_file(compose, r"\./bw\-data:/", "/tmp/bw-data:/")
with open(compose, "r") as f :
with open(compose, "r") as f:
data = safe_load(f.read())
data["services"]["bunkerweb"]["volumes"] = ["/tmp/www:/var/www/html"]
if not "AUTO_LETS_ENCRYPT=yes" in data["services"]["bunkerweb"]["environment"] :
data["services"]["bunkerweb"]["environment"].append("AUTO_LETS_ENCRYPT=yes")
data["services"]["bunkerweb"]["environment"].append("USE_LETS_ENCRYPT_STAGING=yes")
with open(compose, "w") as f :
if (
not "AUTO_LETS_ENCRYPT=yes"
in data["services"]["bunkerweb"]["environment"]
):
data["services"]["bunkerweb"]["environment"].append(
"AUTO_LETS_ENCRYPT=yes"
)
data["services"]["bunkerweb"]["environment"].append(
"USE_LETS_ENCRYPT_STAGING=yes"
)
with open(compose, "w") as f:
f.write(dump(data))
proc = run(
"docker-compose pull --ignore-pull-failures",
@ -96,7 +104,7 @@ class AutoconfTest(Test):
log(
"AUTOCONF",
"",
"exception while running AutoconfTest.init()\n" + format_exc(),
f"exception while running AutoconfTest.init()\n{format_exc()}",
)
return False
return True
@ -114,7 +122,7 @@ class AutoconfTest(Test):
log(
"AUTOCONF",
"",
"exception while running AutoconfTest.end()\n" + format_exc(),
f"exception while running AutoconfTest.end()\n{format_exc()}",
)
return False
return ret
@ -122,10 +130,10 @@ class AutoconfTest(Test):
def _setup_test(self):
try:
super()._setup_test()
test = "/tmp/tests/" + self._name
compose = "/tmp/tests/" + self._name + "/autoconf.yml"
example_data = "/tmp/tests/" + self._name + "/bw-data"
example_www = "/tmp/tests/" + self._name + "/www"
test = f"/tmp/tests/{self._name}"
compose = f"/tmp/tests/{self._name}/autoconf.yml"
example_data = f"/tmp/tests/{self._name}/bw-data"
example_www = f"/tmp/tests/{self._name}/www"
Test.replace_in_file(
compose, r"bunkerity/bunkerweb:.*$", "local/bunkerweb-tests:latest"
)
@ -146,21 +154,21 @@ class AutoconfTest(Test):
Test.replace_in_files(test, ex_domain, test_domain)
Test.rename(test, ex_domain, test_domain)
Test.replace_in_files(test, "example.com", getenv("ROOT_DOMAIN"))
setup = test + "/setup-autoconf.sh"
setup = f"{test}/setup-autoconf.sh"
if isfile(setup):
proc = run("sudo ./setup-autoconf.sh", cwd=test, shell=True)
if proc.returncode != 0:
raise (Exception("setup-autoconf failed"))
if isdir(example_data) and not self._no_copy_container:
proc = run(
"sudo bash -c 'cp -rp " + example_data + "/* /tmp/bw-data'",
f"sudo bash -c 'cp -rp {example_data}/* /tmp/bw-data'",
shell=True,
)
if proc.returncode != 0:
raise (Exception("cp bw-data failed"))
if isdir(example_www) :
if isdir(example_www):
proc = run(
"sudo bash -c 'cp -rp " + example_www + "/* /tmp/www'",
f"sudo bash -c 'cp -rp {example_www}/* /tmp/www'",
shell=True,
)
if proc.returncode != 0:
@ -179,7 +187,7 @@ class AutoconfTest(Test):
log(
"AUTOCONF",
"",
"exception while running AutoconfTest._setup_test()\n" + format_exc(),
f"exception while running AutoconfTest._setup_test()\n{format_exc()}",
)
self._cleanup_test()
return False
@ -187,7 +195,7 @@ class AutoconfTest(Test):
def _cleanup_test(self):
try:
test = "/tmp/tests/" + self._name
test = f"/tmp/tests/{self._name}"
proc = run("docker-compose -f autoconf.yml down -v", shell=True, cwd=test)
if proc.returncode != 0:
raise (Exception("docker-compose down failed"))
@ -202,7 +210,7 @@ class AutoconfTest(Test):
log(
"AUTOCONF",
"",
"exception while running AutoconfTest._cleanup_test()\n" + format_exc(),
f"exception while running AutoconfTest._cleanup_test()\n{format_exc()}",
)
return False
return True
@ -210,5 +218,5 @@ class AutoconfTest(Test):
def _debug_fail(self):
autoconf = "/tmp/autoconf"
proc = run("docker-compose logs", shell=True, cwd=autoconf)
test = "/tmp/tests/" + self._name
test = f"/tmp/tests/{self._name}"
proc = run("docker-compose -f autoconf.yml logs", shell=True, cwd=test)

View File

@ -8,45 +8,27 @@ from time import sleep
from logger import log
from yaml import safe_load_all, dump_all
class KubernetesTest(Test):
def __init__(self, name, timeout, tests, delay=0):
super().__init__(name, "kubernetes", timeout, tests, delay=delay)
self._domains = {
r"www\.example\.com": Test.random_string(1) + "." + getenv("TEST_DOMAIN1_1"),
r"auth\.example\.com": Test.random_string(1) + "." + getenv("TEST_DOMAIN1_2"),
r"app1\.example\.com": Test.random_string(1) + "." + getenv("TEST_DOMAIN1"),
r"app2\.example\.com": Test.random_string(1) + "." + getenv("TEST_DOMAIN2"),
r"app3\.example\.com": Test.random_string(1) + "." + getenv("TEST_DOMAIN3")
r"www\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1_2')}",
r"auth\.example\.com": f"{Test.random_string(1)}.{getenv('TEST_DOMAIN1_2')}",
r"app1\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1')}",
r"app2\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN2')}",
r"app3\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN3')}",
}
def init():
try:
if not Test.init():
return False
# proc = run("sudo chown -R root:root /tmp/bw-data", shell=True)
# if proc.returncode != 0 :
# raise(Exception("chown failed (k8s stack)"))
# if isdir("/tmp/kubernetes") :
# rmtree("/tmp/kubernetes")
# copytree("./integrations/kubernetes", "/tmp/kubernetes")
# copy("./tests/utils/k8s.yml", "/tmp/kubernetes")
# deploy = "/tmp/kubernetes/bunkerweb.yml"
# Test.replace_in_file(deploy, r"bunkerity/bunkerweb:.*$", getenv("PRIVATE_REGISTRY") + "/infra/bunkerweb-tests-amd64:latest")
# Test.replace_in_file(deploy, r"bunkerity/bunkerweb-autoconf:.*$", getenv("PRIVATE_REGISTRY") + "/infra/bunkerweb-autoconf-tests-amd64:latest")
# proc = run("kubectl apply -f k8s.yml", cwd="/tmp/kubernetes", shell=True)
# if proc.returncode != 0 :
# raise(Exception("kubectl apply k8s failed (k8s stack)"))
# proc = run("kubectl apply -f rbac.yml", cwd="/tmp/kubernetes", shell=True)
# if proc.returncode != 0 :
# raise(Exception("kubectl apply rbac failed (k8s stack)"))
# proc = run("kubectl apply -f bunkerweb.yml", cwd="/tmp/kubernetes", shell=True)
# if proc.returncode != 0 :
# raise(Exception("kubectl apply bunkerweb failed (k8s stack)"))
mkdir("/tmp/kubernetes")
copy("./misc/integrations/k8s.mariadb.yml", "/tmp/kubernetes/bunkerweb.yml")
deploy = "/tmp/kubernetes/bunkerweb.yml"
yamls = []
with open(deploy, "r") as f :
with open(deploy, "r") as f:
data = safe_load_all(f.read())
append_env = {
"AUTO_LETS_ENCRYPT": "yes",
@ -54,43 +36,43 @@ class KubernetesTest(Test):
"USE_REAL_IP": "yes",
"USE_PROXY_PROTOCOL": "yes",
"REAL_IP_FROM": "100.64.0.0/16",
"REAL_IP_HEADER": "proxy_protocol"
"REAL_IP_HEADER": "proxy_protocol",
}
replace_env = {
"API_WHITELIST_IP": "127.0.0.1/8 100.64.0.0/10"
}
for yaml in data :
if yaml["metadata"]["name"] == "bunkerweb" :
for k, v in append_env.items() :
yaml["spec"]["template"]["spec"]["containers"][0]["env"].append({"name": k, "value": v})
for ele in yaml["spec"]["template"]["spec"]["containers"][0]["env"] :
if ele["name"] in replace_env :
replace_env = {"API_WHITELIST_IP": "127.0.0.1/8 100.64.0.0/10"}
for yaml in data:
if yaml["metadata"]["name"] == "bunkerweb":
for k, v in append_env.items():
yaml["spec"]["template"]["spec"]["containers"][0]["env"].append(
{"name": k, "value": v}
)
for ele in yaml["spec"]["template"]["spec"]["containers"][0]["env"]:
if ele["name"] in replace_env:
ele["value"] = replace_env[ele["name"]]
if yaml["metadata"]["name"] in ["bunkerweb", "bunkerweb-controller", "bunkerweb-scheduler"] :
yaml["spec"]["template"]["spec"]["imagePullSecrets"] = [{"name": "secret-registry"}]
if yaml["metadata"]["name"] in [
"bunkerweb",
"bunkerweb-controller",
"bunkerweb-scheduler",
]:
yaml["spec"]["template"]["spec"]["imagePullSecrets"] = [
{"name": "secret-registry"}
]
yamls.append(yaml)
with open(deploy, "w") as f :
with open(deploy, "w") as f:
f.write(dump_all(yamls))
Test.replace_in_file(
deploy,
r"bunkerity/bunkerweb:.*$",
getenv("PRIVATE_REGISTRY")
+ "/infra/bunkerweb-tests:"
+ getenv("IMAGE_TAG"),
f"{getenv('PRIVATE_REGISTRY')}/infra/bunkerweb-tests:{getenv('IMAGE_TAG')}",
)
Test.replace_in_file(
deploy,
r"bunkerity/bunkerweb-autoconf:.*$",
getenv("PRIVATE_REGISTRY")
+ "/infra/autoconf-tests:"
+ getenv("IMAGE_TAG"),
f"{getenv('PRIVATE_REGISTRY')}/infra/autoconf-tests:{getenv('IMAGE_TAG')}",
)
Test.replace_in_file(
deploy,
r"bunkerity/bunkerweb-scheduler:.*$",
getenv("PRIVATE_REGISTRY")
+ "/infra/scheduler-tests:"
+ getenv("IMAGE_TAG"),
f"{getenv('PRIVATE_REGISTRY')}/infra/scheduler-tests:{getenv('IMAGE_TAG')}",
)
proc = run(
"kubectl apply -f bunkerweb.yml", cwd="/tmp/kubernetes", shell=True
@ -162,7 +144,7 @@ class KubernetesTest(Test):
log(
"KUBERNETES",
"",
"exception while running KubernetesTest.init()\n" + format_exc(),
f"exception while running KubernetesTest.init()\n{format_exc()}",
)
return False
return True
@ -182,7 +164,7 @@ class KubernetesTest(Test):
log(
"KUBERNETES",
"",
"exception while running KubernetesTest.end()\n" + format_exc(),
f"exception while running KubernetesTest.end()\n{format_exc()}",
)
return False
return ret
@ -190,22 +172,18 @@ class KubernetesTest(Test):
def _setup_test(self):
try:
super()._setup_test()
test = "/tmp/tests/" + self._name
deploy = "/tmp/tests/" + self._name + "/kubernetes.yml"
example_data = "./examples/" + self._name + "/bw-data"
test = f"/tmp/tests/{self._name}"
deploy = f"/tmp/tests/{self._name}/kubernetes.yml"
example_data = f"./examples/{self._name}/bw-data"
for ex_domain, test_domain in self._domains.items():
Test.replace_in_files(test, ex_domain, test_domain)
Test.rename(test, ex_domain, test_domain)
Test.replace_in_files(test, "example.com", getenv("ROOT_DOMAIN"))
setup = test + "/setup-kubernetes.sh"
setup = f"{test}/setup-kubernetes.sh"
if isfile(setup):
proc = run("./setup-kubernetes.sh", cwd=test, shell=True)
if proc.returncode != 0:
raise (Exception("setup-kubernetes failed"))
# if isdir(example_data) :
# for cp_dir in listdir(example_data) :
# if isdir(join(example_data, cp_dir)) :
# copytree(join(example_data, cp_dir), join("/tmp/bw-data", cp_dir))
proc = run("kubectl apply -f kubernetes.yml", shell=True, cwd=test)
if proc.returncode != 0:
raise (Exception("kubectl apply failed"))
@ -213,7 +191,7 @@ class KubernetesTest(Test):
log(
"KUBERNETES",
"",
"exception while running KubernetesTest._setup_test()\n" + format_exc(),
f"exception while running KubernetesTest._setup_test()\n{format_exc()}",
)
self._cleanup_test()
return False
@ -221,8 +199,8 @@ class KubernetesTest(Test):
def _cleanup_test(self):
try:
test = "/tmp/tests/" + self._name
cleanup = test + "/cleanup-kubernetes.sh"
test = f"/tmp/tests/{self._name}"
cleanup = f"{test}/cleanup-kubernetes.sh"
if isfile(cleanup):
proc = run("./cleanup-kubernetes.sh", cwd=test, shell=True)
if proc.returncode != 0:
@ -235,8 +213,7 @@ class KubernetesTest(Test):
log(
"KUBERNETES",
"",
"exception while running KubernetesTest._cleanup_test()\n"
+ format_exc(),
f"exception while running KubernetesTest._cleanup_test()\n{format_exc()}",
)
return False
return True
@ -248,4 +225,4 @@ class KubernetesTest(Test):
capture_output=True,
)
for pod in proc.stdout.decode().splitlines():
run("kubectl logs " + pod, shell=True)
run(f"kubectl logs {pod}", shell=True)

View File

@ -11,11 +11,11 @@ class LinuxTest(Test):
def __init__(self, name, timeout, tests, distro):
super().__init__(name, "linux", timeout, tests)
self._domains = {
r"www\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1"),
r"auth\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1"),
r"app1\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1_1"),
r"app2\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1_2"),
r"app3\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1_3")
r"www\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1')}",
r"auth\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1')}",
r"app1\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1_1')}",
r"app2\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1_2')}",
r"app3\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1_3')}",
}
if not distro in ("ubuntu", "debian", "fedora", "centos", "rhel"):
raise Exception(f"unknown distro {distro}")
@ -77,7 +77,7 @@ class LinuxTest(Test):
log(
"LINUX",
"",
"exception while running LinuxTest.init()\n" + format_exc(),
f"exception while running LinuxTest.init()\n{format_exc()}",
)
return False
return True
@ -93,7 +93,7 @@ class LinuxTest(Test):
ret = False
except:
log(
"LINUX", "", "exception while running LinuxTest.end()\n" + format_exc()
"LINUX", "", f"exception while running LinuxTest.end()\n{format_exc()}"
)
return False
return ret
@ -109,7 +109,7 @@ class LinuxTest(Test):
proc = self.docker_cp(self.__distro, test, f"/opt/{self._name}")
if proc.returncode != 0:
raise Exception("docker cp failed (test)")
setup = test + "/setup-linux.sh"
setup = f"{test}/setup-linux.sh"
if isfile(setup):
proc = self.docker_exec(
self.__distro, f"cd /opt/{self._name} && ./setup-linux.sh"
@ -136,7 +136,7 @@ class LinuxTest(Test):
log(
"LINUX",
"",
"exception while running LinuxTest._setup_test()\n" + format_exc(),
f"exception while running LinuxTest._setup_test()\n{format_exc()}",
)
self._debug_fail()
self._cleanup_test()
@ -156,7 +156,7 @@ class LinuxTest(Test):
log(
"DOCKER",
"",
"exception while running LinuxTest._cleanup_test()\n" + format_exc(),
f"exception while running LinuxTest._cleanup_test()\n{format_exc()}",
)
return False
return True

View File

@ -8,15 +8,16 @@ from time import sleep
from logger import log
from yaml import safe_load, dump
class SwarmTest(Test):
def __init__(self, name, timeout, tests, delay=0):
super().__init__(name, "swarm", timeout, tests, delay=delay)
self._domains = {
r"www\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1_1"),
r"auth\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1_2"),
r"app1\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN1"),
r"app2\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN2"),
r"app3\.example\.com": Test.random_string(6) + "." + getenv("TEST_DOMAIN3")
r"www\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1_1')}",
r"auth\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1_2')}",
r"app1\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1_1')}",
r"app2\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1_2')}",
r"app3\.example\.com": f"{Test.random_string(6)}.{getenv('TEST_DOMAIN1_3')}",
}
@staticmethod
@ -32,13 +33,20 @@ class SwarmTest(Test):
mkdir("/tmp/swarm")
copy("./misc/integrations/swarm.mariadb.yml", "/tmp/swarm/stack.yml")
compose = "/tmp/swarm/stack.yml"
with open(compose, "r") as f :
with open(compose, "r") as f:
data = safe_load(f.read())
if not "AUTO_LETS_ENCRYPT=yes" in data["services"]["bunkerweb"]["environment"] :
data["services"]["bunkerweb"]["environment"].append("AUTO_LETS_ENCRYPT=yes")
data["services"]["bunkerweb"]["environment"].append("USE_LETS_ENCRYPT_STAGING=yes")
if (
not "AUTO_LETS_ENCRYPT=yes"
in data["services"]["bunkerweb"]["environment"]
):
data["services"]["bunkerweb"]["environment"].append(
"AUTO_LETS_ENCRYPT=yes"
)
data["services"]["bunkerweb"]["environment"].append(
"USE_LETS_ENCRYPT_STAGING=yes"
)
del data["services"]["bunkerweb"]["deploy"]["placement"]
with open(compose, "w") as f :
with open(compose, "w") as f:
f.write(dump(data))
Test.replace_in_file(
compose,
@ -55,7 +63,7 @@ class SwarmTest(Test):
r"bunkerity/bunkerweb-scheduler:.*$",
"192.168.42.100:5000/scheduler-tests:latest",
)
#Test.replace_in_file(compose, r"bw\-data:/", "/tmp/bw-data:/")
# Test.replace_in_file(compose, r"bw\-data:/", "/tmp/bw-data:/")
proc = run(
"docker stack deploy -c stack.yml bunkerweb",
cwd="/tmp/swarm",
@ -84,15 +92,15 @@ class SwarmTest(Test):
shell=True,
capture_output=True,
)
log("SWARM", "", "stdout logs = " + proc.stdout.decode())
log("SWARM", "", "stderr logs = " + proc.stderr.decode())
log("SWARM", "", f"stdout logs = {proc.stdout.decode()}")
log("SWARM", "", f"stderr logs = {proc.stderr.decode()}")
raise (Exception("swarm stack is not healthy"))
sleep(60)
except:
log(
"SWARM",
"",
"exception while running SwarmTest.init()\n" + format_exc(),
f"exception while running SwarmTest.init()\n{format_exc()}",
)
return False
return True
@ -109,7 +117,7 @@ class SwarmTest(Test):
rmtree("/tmp/swarm")
except:
log(
"SWARM", "", "exception while running SwarmTest.end()\n" + format_exc()
"SWARM", "", f"exception while running SwarmTest.end()\n{format_exc()}"
)
return False
return ret
@ -169,7 +177,7 @@ class SwarmTest(Test):
log(
"SWARM",
"",
"exception while running SwarmTest._setup_test()\n" + format_exc(),
f"exception while running SwarmTest._setup_test()\n{format_exc()}",
)
self._cleanup_test()
return False
@ -204,7 +212,7 @@ class SwarmTest(Test):
log(
"SWARM",
"",
"exception while running SwarmTest._cleanup_test()\n" + format_exc(),
f"exception while running SwarmTest._cleanup_test()\n{format_exc()}",
)
return False
return True

View File

@ -13,6 +13,7 @@ from logger import log
from string import ascii_lowercase, digits
from random import choice
class Test(ABC):
def __init__(self, name, kind, timeout, tests, no_copy_container=False, delay=0):
self._name = name
@ -24,12 +25,7 @@ class Test(ABC):
log(
"TEST",
"",
"instiantiated with "
+ str(len(tests))
+ " tests and timeout of "
+ str(timeout)
+ "s for "
+ self._name,
f"instiantiated with {len(tests)} tests and timeout of {timeout}s for {self._name}",
)
# Class method
@ -42,11 +38,11 @@ class Test(ABC):
rm_dirs = ["configs", "plugins", "www"]
for rm_dir in rm_dirs:
if isdir(rm_dir):
run("sudo rm -rf /tmp/bw-data/" + rm_dir, shell=True)
run(f"sudo rm -rf /tmp/bw-data/{rm_dir}", shell=True)
if not isdir("/tmp/tests"):
mkdir("/tmp/tests")
except:
log("TEST", "", "exception while running Test.init()\n" + format_exc())
log("TEST", "", f"exception while running Test.init()\n{format_exc()}")
return False
return True
@ -59,7 +55,7 @@ class Test(ABC):
def _check_domains(self):
for k, v in self._domains.items():
if v is None:
log("TEST", "⚠️", "env " + k + " is None")
log("TEST", "⚠️", f"env {k} is None")
# called before starting the tests
# must be override if specific actions needs to be done
@ -67,19 +63,19 @@ class Test(ABC):
try:
rm_dirs = ["configs", "plugins", "www"]
for rm_dir in rm_dirs:
if isdir("/tmp/bw-data/" + rm_dir):
if isdir(f"/tmp/bw-data/{rm_dir}"):
run(
"sudo bash -c 'rm -rf /tmp/bw-data/" + rm_dir + "/*'",
f"sudo bash -c 'rm -rf /tmp/bw-data/{rm_dir}/*'",
shell=True,
)
if isdir("/tmp/tests/" + self._name):
run("sudo rm -rf /tmp/tests/" + self._name, shell=True)
copytree("./examples/" + self._name, "/tmp/tests/" + self._name)
if isdir(f"/tmp/tests/{self._name}"):
run(f"sudo rm -rf /tmp/tests/{self._name}", shell=True)
copytree(f"./examples/{self._name}/tmp/tests/{self._name}")
except:
log(
"TEST",
"",
"exception while running Test._setup_test()\n" + format_exc(),
f"exception while running Test._setup_test()\n{format_exc()}",
)
return False
return True
@ -87,12 +83,12 @@ class Test(ABC):
# called after running the tests
def _cleanup_test(self):
try:
run("sudo rm -rf /tmp/tests/" + self._name, shell=True)
run(f"sudo rm -rf /tmp/tests/{self._name}", shell=True)
except:
log(
"TEST",
"",
"exception while running Test._cleanup_test()\n" + format_exc(),
f"exception while running Test._cleanup_test()\n{format_exc()}",
)
return False
return True
@ -103,7 +99,7 @@ class Test(ABC):
self._debug_fail()
return False
if self.__delay != 0:
log("TEST", "", "delay is set, sleeping " + str(self.__delay) + "s")
log("TEST", "", f"delay is set, sleeping {self.__delay}s")
sleep(self.__delay)
start = time()
while time() < start + self._timeout:
@ -119,13 +115,13 @@ class Test(ABC):
log(
"TEST",
"",
"success (" + elapsed + "/" + str(self._timeout) + "s)",
f"success ({elapsed}/{self._timeout}s)",
)
return self._cleanup_test()
log("TEST", "⚠️", "tests not ok, retrying in 1s ...")
self._debug_fail()
self._cleanup_test()
log("TEST", "", "failed (timeout = " + str(self._timeout) + "s)")
log("TEST", "", f"failed (timeout = {self._timeout}s)")
return False
# run a single test
@ -143,9 +139,8 @@ class Test(ABC):
r = get(ex_url, timeout=10, verify=False)
return test["status"] == r.status_code
except:
# log("TEST", "❌", "exception while running test of type " + test["type"] + " on URL " + ex_url + "\n" + format_exc())
return False
raise (Exception("unknow test type " + test["type"]))
raise (Exception(f"unknown test type {test['type']}"))
# called when tests fail : typical case is to show logs
def _debug_fail(self):
@ -159,7 +154,7 @@ class Test(ABC):
with open(path, "w") as f:
f.write(content)
except:
log("TEST", "⚠️", "can't replace file " + path + " : " + format_exc())
log("TEST", "⚠️", f"can't replace file {path} : {format_exc()}")
def replace_in_files(path, old, new):
for root, dirs, files in walk(path):
@ -174,6 +169,6 @@ class Test(ABC):
if full_path != new_path:
rename(full_path, new_path)
def random_string(length) :
def random_string(length):
charset = ascii_lowercase + digits
return ''.join(choice(charset) for i in range(length))
return "".join(choice(charset) for i in range(length))