mirror of
https://github.com/bunkerity/bunkerized-nginx
synced 2023-12-13 21:30:18 +01:00
Update the database and the core plugins accordingly
This commit is contained in:
parent
9140dc3244
commit
7d743e1981
17 changed files with 475 additions and 51 deletions
|
@ -8,8 +8,11 @@ from traceback import format_exc
|
|||
|
||||
sys_path.append("/opt/bunkerweb/deps/python")
|
||||
sys_path.append("/opt/bunkerweb/utils")
|
||||
sys_path.append("/opt/bunkerweb/db")
|
||||
|
||||
from requests import get
|
||||
|
||||
from Database import Database
|
||||
from logger import setup_logger
|
||||
from jobs import cache_file, cache_hash, is_cached_file, file_hash
|
||||
|
||||
|
@ -48,6 +51,13 @@ def check_line(kind, line):
|
|||
|
||||
|
||||
logger = setup_logger("BLACKLIST", getenv("LOG_LEVEL", "INFO"))
|
||||
db = Database(
|
||||
logger,
|
||||
sqlalchemy_string=getenv("DATABASE_URI", None),
|
||||
bw_integration="Kubernetes"
|
||||
if getenv("KUBERNETES_MODE", "no") == "yes"
|
||||
else "Cluster",
|
||||
)
|
||||
status = 0
|
||||
|
||||
try:
|
||||
|
@ -131,12 +141,12 @@ try:
|
|||
for url in urls_list:
|
||||
try:
|
||||
logger.info(f"Downloading blacklist data from {url} ...")
|
||||
resp = get(url, stream=True)
|
||||
resp = get(url)
|
||||
if resp.status_code != 200:
|
||||
continue
|
||||
i = 0
|
||||
with open(f"/opt/bunkerweb/tmp/blacklist/{kind}.list", "w") as f:
|
||||
for line in resp.iter_lines(decode_unicode=True):
|
||||
for line in resp.content.decode("utf-8").splitlines():
|
||||
line = line.strip()
|
||||
if kind != "USER_AGENT":
|
||||
line = line.strip().split(" ")[0]
|
||||
|
@ -168,6 +178,16 @@ try:
|
|||
logger.error(f"Error while caching blacklist : {err}")
|
||||
status = 2
|
||||
if status != 2:
|
||||
# Update db
|
||||
err = db.update_job_cache(
|
||||
"blacklist-download",
|
||||
None,
|
||||
f"{kind}.list",
|
||||
resp.content,
|
||||
checksum=new_hash,
|
||||
)
|
||||
if err:
|
||||
logger.warning(f"Couldn't update db cache: {err}")
|
||||
status = 1
|
||||
except:
|
||||
status = 2
|
||||
|
|
|
@ -7,13 +7,22 @@ from traceback import format_exc
|
|||
|
||||
sys_path.append("/opt/bunkerweb/deps/python")
|
||||
sys_path.append("/opt/bunkerweb/utils")
|
||||
sys_path.append("/opt/bunkerweb/db")
|
||||
sys_path.append("/opt/bunkerweb/core/bunkernet/jobs")
|
||||
|
||||
from bunkernet import data
|
||||
from Database import Database
|
||||
from logger import setup_logger
|
||||
from jobs import cache_file, cache_hash, file_hash, is_cached_file
|
||||
from bunkernet import data
|
||||
|
||||
logger = setup_logger("BUNKERNET", getenv("LOG_LEVEL", "INFO"))
|
||||
db = Database(
|
||||
logger,
|
||||
sqlalchemy_string=getenv("DATABASE_URI", None),
|
||||
bw_integration="Kubernetes"
|
||||
if getenv("KUBERNETES_MODE", "no") == "yes"
|
||||
else "Cluster",
|
||||
)
|
||||
status = 0
|
||||
|
||||
try:
|
||||
|
@ -97,6 +106,18 @@ try:
|
|||
if not cached:
|
||||
logger.error(f"Error while caching BunkerNet data : {err}")
|
||||
_exit(2)
|
||||
|
||||
# Update db
|
||||
err = db.update_job_cache(
|
||||
"bunkernet-data",
|
||||
None,
|
||||
"ip.list",
|
||||
"\n".join(data["data"]).encode("utf-8"),
|
||||
checksum=new_hash,
|
||||
)
|
||||
if err:
|
||||
logger.warning(f"Couldn't update db cache: {err}")
|
||||
|
||||
logger.info("Successfully saved BunkerNet data")
|
||||
|
||||
status = 1
|
||||
|
|
|
@ -8,12 +8,21 @@ from traceback import format_exc
|
|||
|
||||
sys_path.append("/opt/bunkerweb/deps/python")
|
||||
sys_path.append("/opt/bunkerweb/utils")
|
||||
sys_path.append("/opt/bunkerweb/db")
|
||||
sys_path.append("/opt/bunkerweb/core/bunkernet/jobs")
|
||||
|
||||
from logger import setup_logger
|
||||
from bunkernet import register, ping, get_id
|
||||
from Database import Database
|
||||
from logger import setup_logger
|
||||
|
||||
logger = setup_logger("BUNKERNET", getenv("LOG_LEVEL", "INFO"))
|
||||
db = Database(
|
||||
logger,
|
||||
sqlalchemy_string=getenv("DATABASE_URI", None),
|
||||
bw_integration="Kubernetes"
|
||||
if getenv("KUBERNETES_MODE", "no") == "yes"
|
||||
else "Cluster",
|
||||
)
|
||||
status = 0
|
||||
|
||||
try:
|
||||
|
@ -110,6 +119,16 @@ try:
|
|||
if not isfile("/opt/bunkerweb/cache/bunkernet/instance.id"):
|
||||
with open("/opt/bunkerweb/cache/bunkernet/instance.id", "w") as f:
|
||||
f.write(bunkernet_id)
|
||||
|
||||
# Update db
|
||||
err = db.update_job_cache(
|
||||
"bunkernet-register",
|
||||
None,
|
||||
"instance.id",
|
||||
f"{bunkernet_id}".encode("utf-8"),
|
||||
)
|
||||
if err:
|
||||
logger.warning(f"Couldn't update db cache: {err}")
|
||||
else:
|
||||
logger.error("Connectivity with BunkerWeb failed ...")
|
||||
status = 2
|
||||
|
|
|
@ -4,7 +4,7 @@ from os import getenv
|
|||
|
||||
def request(method, url, _id=None):
|
||||
data = {"integration": get_integration(), "version": get_version()}
|
||||
headers = {"User-Agent": "BunkerWeb/" + get_version()}
|
||||
headers = {"User-Agent": f"BunkerWeb/{get_version()}"}
|
||||
if _id is not None:
|
||||
data["id"] = _id
|
||||
try:
|
||||
|
|
|
@ -7,17 +7,26 @@ from traceback import format_exc
|
|||
|
||||
sys_path.append("/opt/bunkerweb/deps/python")
|
||||
sys_path.append("/opt/bunkerweb/utils")
|
||||
sys_path.append("/opt/bunkerweb/db")
|
||||
|
||||
from Database import Database
|
||||
from jobs import file_hash
|
||||
from logger import setup_logger
|
||||
|
||||
logger = setup_logger("CUSTOM-CERT", getenv("LOG_LEVEL", "INFO"))
|
||||
db = Database(
|
||||
logger,
|
||||
sqlalchemy_string=getenv("DATABASE_URI", None),
|
||||
bw_integration="Kubernetes"
|
||||
if getenv("KUBERNETES_MODE", "no") == "yes"
|
||||
else "Cluster",
|
||||
)
|
||||
|
||||
|
||||
def check_cert(cert_path):
|
||||
def check_cert(cert_path, first_server: str = None):
|
||||
try:
|
||||
cache_path = (
|
||||
"/opt/bunkerweb/cache/customcert/" + cert_path.replace("/", "_") + ".hash"
|
||||
f"/opt/bunkerweb/cache/customcert/{cert_path.replace('/', '_')}.hash"
|
||||
)
|
||||
current_hash = file_hash(cert_path)
|
||||
if not isfile(cache_path):
|
||||
|
@ -28,6 +37,15 @@ def check_cert(cert_path):
|
|||
return False
|
||||
with open(cache_path, "w") as f:
|
||||
f.write(current_hash)
|
||||
err = db.update_job_cache(
|
||||
"custom-cert",
|
||||
first_server,
|
||||
f"{cert_path.replace('/', '_')}.hash",
|
||||
current_hash.encode("utf-8"),
|
||||
checksum=current_hash,
|
||||
)
|
||||
if err:
|
||||
logger.warning(f"Couldn't update db cache: {err}")
|
||||
return True
|
||||
except:
|
||||
logger.error(
|
||||
|
@ -56,7 +74,7 @@ try:
|
|||
logger.info(
|
||||
f"Checking if certificate {cert_path} changed ...",
|
||||
)
|
||||
need_reload = check_cert(cert_path)
|
||||
need_reload = check_cert(cert_path, first_server)
|
||||
if need_reload:
|
||||
logger.info(
|
||||
f"Detected change for certificate {cert_path}",
|
||||
|
|
|
@ -8,8 +8,11 @@ from traceback import format_exc
|
|||
|
||||
sys_path.append("/opt/bunkerweb/deps/python")
|
||||
sys_path.append("/opt/bunkerweb/utils")
|
||||
sys_path.append("/opt/bunkerweb/db")
|
||||
|
||||
from requests import get
|
||||
|
||||
from Database import Database
|
||||
from logger import setup_logger
|
||||
from jobs import cache_file, cache_hash, is_cached_file, file_hash
|
||||
|
||||
|
@ -48,6 +51,13 @@ def check_line(kind, line):
|
|||
|
||||
|
||||
logger = setup_logger("GREYLIST", getenv("LOG_LEVEL", "INFO"))
|
||||
db = Database(
|
||||
logger,
|
||||
sqlalchemy_string=getenv("DATABASE_URI", None),
|
||||
bw_integration="Kubernetes"
|
||||
if getenv("KUBERNETES_MODE", "no") == "yes"
|
||||
else "Cluster",
|
||||
)
|
||||
status = 0
|
||||
|
||||
try:
|
||||
|
@ -81,11 +91,6 @@ try:
|
|||
"ASN": True,
|
||||
"USER_AGENT": True,
|
||||
"URI": True,
|
||||
"IGNORE_IP": True,
|
||||
"IGNORE_RDNS": True,
|
||||
"IGNORE_ASN": True,
|
||||
"IGNORE_USER_AGENT": True,
|
||||
"IGNORE_URI": True,
|
||||
}
|
||||
all_fresh = True
|
||||
for kind in kinds_fresh:
|
||||
|
@ -109,11 +114,6 @@ try:
|
|||
"ASN": [],
|
||||
"USER_AGENT": [],
|
||||
"URI": [],
|
||||
"IGNORE_IP": [],
|
||||
"IGNORE_RDNS": [],
|
||||
"IGNORE_ASN": [],
|
||||
"IGNORE_USER_AGENT": [],
|
||||
"IGNORE_URI": [],
|
||||
}
|
||||
for kind in urls:
|
||||
for url in getenv(f"GREYLIST_{kind}_URLS", "").split(" "):
|
||||
|
@ -128,12 +128,12 @@ try:
|
|||
for url in urls_list:
|
||||
try:
|
||||
logger.info(f"Downloading greylist data from {url} ...")
|
||||
resp = get(url, stream=True)
|
||||
resp = get(url)
|
||||
if resp.status_code != 200:
|
||||
continue
|
||||
i = 0
|
||||
with open(f"/opt/bunkerweb/tmp/greylist/{kind}.list", "w") as f:
|
||||
for line in resp.iter_lines(decode_unicode=True):
|
||||
for line in resp.content.decode("utf-8").splitlines():
|
||||
line = line.strip()
|
||||
if kind != "USER_AGENT":
|
||||
line = line.strip().split(" ")[0]
|
||||
|
@ -165,6 +165,16 @@ try:
|
|||
logger.error(f"Error while caching greylist : {err}")
|
||||
status = 2
|
||||
if status != 2:
|
||||
# Update db
|
||||
err = db.update_job_cache(
|
||||
"greylist-download",
|
||||
None,
|
||||
f"{kind}.list",
|
||||
resp.content,
|
||||
checksum=new_hash,
|
||||
)
|
||||
if err:
|
||||
logger.warning(f"Couldn't update db cache: {err}")
|
||||
status = 1
|
||||
except:
|
||||
status = 2
|
||||
|
|
|
@ -8,13 +8,23 @@ from traceback import format_exc
|
|||
|
||||
sys_path.append("/opt/bunkerweb/deps/python")
|
||||
sys_path.append("/opt/bunkerweb/utils")
|
||||
sys_path.append("/opt/bunkerweb/db")
|
||||
|
||||
from maxminddb import open_database
|
||||
from requests import get
|
||||
|
||||
from Database import Database
|
||||
from logger import setup_logger
|
||||
from jobs import cache_file, cache_hash, file_hash, is_cached_file
|
||||
|
||||
logger = setup_logger("JOBS", getenv("LOG_LEVEL", "INFO"))
|
||||
db = Database(
|
||||
logger,
|
||||
sqlalchemy_string=getenv("DATABASE_URI", None),
|
||||
bw_integration="Kubernetes"
|
||||
if getenv("KUBERNETES_MODE", "no") == "yes"
|
||||
else "Cluster",
|
||||
)
|
||||
status = 0
|
||||
|
||||
try:
|
||||
|
@ -59,6 +69,13 @@ try:
|
|||
logger.error(f"Error while caching mmdb file : {err}")
|
||||
_exit(2)
|
||||
|
||||
# Update db
|
||||
err = db.update_job_cache(
|
||||
"mmdb-asn", None, "asn.mmdb", resp.content, checksum=new_hash
|
||||
)
|
||||
if err:
|
||||
logger.warning(f"Couldn't update db cache: {err}")
|
||||
|
||||
# Success
|
||||
logger.info(f"Downloaded new mmdb from {mmdb_url}")
|
||||
|
||||
|
|
|
@ -8,13 +8,23 @@ from traceback import format_exc
|
|||
|
||||
sys_path.append("/opt/bunkerweb/deps/python")
|
||||
sys_path.append("/opt/bunkerweb/utils")
|
||||
sys_path.append("/opt/bunkerweb/db")
|
||||
|
||||
from requests import get
|
||||
from maxminddb import open_database
|
||||
|
||||
from Database import Database
|
||||
from logger import setup_logger
|
||||
from jobs import cache_file, cache_hash, file_hash, is_cached_file
|
||||
|
||||
logger = setup_logger("JOBS", getenv("LOG_LEVEL", "INFO"))
|
||||
db = Database(
|
||||
logger,
|
||||
sqlalchemy_string=getenv("DATABASE_URI", None),
|
||||
bw_integration="Kubernetes"
|
||||
if getenv("KUBERNETES_MODE", "no") == "yes"
|
||||
else "Cluster",
|
||||
)
|
||||
status = 0
|
||||
|
||||
try:
|
||||
|
@ -61,6 +71,13 @@ try:
|
|||
logger.error(f"Error while caching mmdb file : {err}")
|
||||
_exit(2)
|
||||
|
||||
# Update db
|
||||
err = db.update_job_cache(
|
||||
"mmdb-country", None, "country.mmdb", resp.content, checksum=new_hash
|
||||
)
|
||||
if err:
|
||||
logger.warning(f"Couldn't update db cache: {err}")
|
||||
|
||||
# Success
|
||||
logger.info(f"Downloaded new mmdb from {mmdb_url}")
|
||||
|
||||
|
|
|
@ -7,8 +7,11 @@ from traceback import format_exc
|
|||
|
||||
sys_path.append("/opt/bunkerweb/deps/python")
|
||||
sys_path.append("/opt/bunkerweb/utils")
|
||||
sys_path.append("/opt/bunkerweb/db")
|
||||
|
||||
from requests import get
|
||||
|
||||
from Database import Database
|
||||
from logger import setup_logger
|
||||
from jobs import cache_file, cache_hash, file_hash, is_cached_file
|
||||
|
||||
|
@ -30,6 +33,13 @@ def check_line(line):
|
|||
|
||||
|
||||
logger = setup_logger("REALIP", getenv("LOG_LEVEL", "INFO"))
|
||||
db = Database(
|
||||
logger,
|
||||
sqlalchemy_string=getenv("DATABASE_URI", None),
|
||||
bw_integration="Kubernetes"
|
||||
if getenv("KUBERNETES_MODE", "no") == "yes"
|
||||
else "Cluster",
|
||||
)
|
||||
status = 0
|
||||
|
||||
try:
|
||||
|
@ -39,11 +49,11 @@ try:
|
|||
# Multisite case
|
||||
if getenv("MULTISITE") == "yes":
|
||||
for first_server in getenv("SERVER_NAME").split(" "):
|
||||
if getenv(first_server + "_USE_REALIP", getenv("USE_REALIP")) == "yes":
|
||||
if getenv(first_server + "_USE_REAL_IP", getenv("USE_REAL_IP")) == "yes":
|
||||
blacklist_activated = True
|
||||
break
|
||||
# Singlesite case
|
||||
elif getenv("USE_REALIP") == "yes":
|
||||
elif getenv("USE_REAL_IP") == "yes":
|
||||
blacklist_activated = True
|
||||
if not blacklist_activated:
|
||||
logger.info("RealIP is not activated, skipping download...")
|
||||
|
@ -65,7 +75,7 @@ try:
|
|||
|
||||
# Download and write data to temp file
|
||||
i = 0
|
||||
f = open("/opt/bunkerweb/tmp/realip-combined.list", "w")
|
||||
content = ""
|
||||
for url in urls:
|
||||
try:
|
||||
logger.info(f"Downloading RealIP list from {url} ...")
|
||||
|
@ -78,14 +88,16 @@ try:
|
|||
continue
|
||||
ok, data = check_line(line)
|
||||
if ok:
|
||||
f.write(data + "\n")
|
||||
content += f"{data}\n"
|
||||
i += 1
|
||||
except:
|
||||
status = 2
|
||||
logger.error(
|
||||
f"Exception while getting RealIP list from {url} :\n{format_exc()}"
|
||||
)
|
||||
f.close()
|
||||
|
||||
with open("/opt/bunkerweb/tmp/realip-combined.list", "w") as f:
|
||||
f.write(content)
|
||||
|
||||
# Check if file has changed
|
||||
new_hash = file_hash("/opt/bunkerweb/tmp/realip-combined.list")
|
||||
|
@ -104,6 +116,17 @@ try:
|
|||
logger.error(f"Error while caching list : {err}")
|
||||
_exit(2)
|
||||
|
||||
# Update db
|
||||
err = db.update_job_cache(
|
||||
"realip-download",
|
||||
None,
|
||||
"combined.list",
|
||||
content.encode("utf-8"),
|
||||
checksum=new_hash,
|
||||
)
|
||||
if err:
|
||||
logger.warning(f"Couldn't update db cache: {err}")
|
||||
|
||||
logger.info(f"Downloaded {i} trusted IP/net")
|
||||
|
||||
status = 1
|
||||
|
|
|
@ -4,14 +4,25 @@ from os import getenv, makedirs
|
|||
from os.path import isfile
|
||||
from subprocess import DEVNULL, STDOUT, run
|
||||
from sys import exit as sys_exit, path as sys_path
|
||||
from tarfile import open as taropen, TarInfo
|
||||
from io import BytesIO
|
||||
from traceback import format_exc
|
||||
|
||||
sys_path.append("/opt/bunkerweb/deps/python")
|
||||
sys_path.append("/opt/bunkerweb/utils")
|
||||
sys_path.append("/opt/bunkerweb/db")
|
||||
|
||||
from Database import Database
|
||||
from logger import setup_logger
|
||||
|
||||
logger = setup_logger("self-signed", getenv("LOG_LEVEL", "INFO"))
|
||||
db = Database(
|
||||
logger,
|
||||
sqlalchemy_string=getenv("DATABASE_URI", None),
|
||||
bw_integration="Kubernetes"
|
||||
if getenv("KUBERNETES_MODE", "no") == "yes"
|
||||
else "Cluster",
|
||||
)
|
||||
|
||||
|
||||
def generate_cert(first_server, days, subj):
|
||||
|
@ -31,6 +42,28 @@ def generate_cert(first_server, days, subj):
|
|||
if proc.returncode != 0:
|
||||
logger.error(f"Self-signed certificate generation failed for {first_server}")
|
||||
return False, 2
|
||||
|
||||
# Update db
|
||||
with open(f"/opt/bunkerweb/cache/selfsigned/{first_server}.key", "r") as f:
|
||||
key_data = f.read().encode("utf-8")
|
||||
|
||||
err = db.update_job_cache(
|
||||
"self-signed", first_server, f"{first_server}.key", key_data
|
||||
)
|
||||
|
||||
if err:
|
||||
logger.warning(f"Couldn't update db cache for {first_server}.key file: {err}")
|
||||
|
||||
with open(f"/opt/bunkerweb/cache/selfsigned/{first_server}.pem", "r") as f:
|
||||
pem_data = f.read().encode("utf-8")
|
||||
|
||||
err = db.update_job_cache(
|
||||
"self-signed", first_server, f"{first_server}.pem", pem_data
|
||||
)
|
||||
|
||||
if err:
|
||||
logger.warning(f"Couldn't update db cache for {first_server}.pem file: {err}")
|
||||
|
||||
logger.info(f"Successfully generated self-signed certificate for {first_server}")
|
||||
return True, 1
|
||||
|
||||
|
|
|
@ -8,8 +8,11 @@ from traceback import format_exc
|
|||
|
||||
sys_path.append("/opt/bunkerweb/deps/python")
|
||||
sys_path.append("/opt/bunkerweb/utils")
|
||||
sys_path.append("/opt/bunkerweb/db")
|
||||
|
||||
from requests import get
|
||||
|
||||
from Database import Database
|
||||
from logger import setup_logger
|
||||
from jobs import cache_file, cache_hash, is_cached_file, file_hash
|
||||
|
||||
|
@ -48,6 +51,13 @@ def check_line(kind, line):
|
|||
|
||||
|
||||
logger = setup_logger("WHITELIST", getenv("LOG_LEVEL", "INFO"))
|
||||
db = Database(
|
||||
logger,
|
||||
sqlalchemy_string=getenv("DATABASE_URI", None),
|
||||
bw_integration="Kubernetes"
|
||||
if getenv("KUBERNETES_MODE", "no") == "yes"
|
||||
else "Cluster",
|
||||
)
|
||||
status = 0
|
||||
|
||||
try:
|
||||
|
@ -115,12 +125,12 @@ try:
|
|||
for url in urls_list:
|
||||
try:
|
||||
logger.info(f"Downloading whitelist data from {url} ...")
|
||||
resp = get(url, stream=True)
|
||||
resp = get(url)
|
||||
if resp.status_code != 200:
|
||||
continue
|
||||
i = 0
|
||||
with open(f"/opt/bunkerweb/tmp/whitelist/{kind}.list", "w") as f:
|
||||
for line in resp.iter_lines(decode_unicode=True):
|
||||
for line in resp.content.decode("utf-8").splitlines():
|
||||
line = line.strip()
|
||||
if kind != "USER_AGENT":
|
||||
line = line.strip().split(" ")[0]
|
||||
|
@ -152,6 +162,16 @@ try:
|
|||
logger.error(f"Error while caching whitelist : {err}")
|
||||
status = 2
|
||||
if status != 2:
|
||||
# Update db
|
||||
err = db.update_job_cache(
|
||||
"whitelist-download",
|
||||
None,
|
||||
f"{kind}.list",
|
||||
resp.content,
|
||||
checksum=new_hash,
|
||||
)
|
||||
if err:
|
||||
logger.warning(f"Couldn't update db cache: {err}")
|
||||
status = 1
|
||||
except:
|
||||
status = 2
|
||||
|
|
|
@ -120,6 +120,7 @@ class Configurator:
|
|||
"NGINX_VERSION",
|
||||
"NJS_VERSION",
|
||||
"PKG_RELEASE",
|
||||
"DOCKER_HOST",
|
||||
):
|
||||
self.__logger.warning(f"Ignoring variable {variable} : {err}")
|
||||
# Expand variables to each sites if MULTISITE=yes and if not present
|
||||
|
|
159
db/Database.py
159
db/Database.py
|
@ -1,10 +1,12 @@
|
|||
from contextlib import contextmanager
|
||||
from copy import deepcopy
|
||||
from datetime import datetime
|
||||
from logging import INFO, WARNING, Logger, getLogger
|
||||
from os import _exit, environ, getenv, path
|
||||
from os import _exit, getenv, listdir, path
|
||||
from os.path import exists
|
||||
from re import search
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from sqlalchemy import create_engine, inspect
|
||||
from sqlalchemy import create_engine, inspect, text
|
||||
from sqlalchemy.exc import OperationalError, ProgrammingError, SQLAlchemyError
|
||||
from sqlalchemy.orm import scoped_session, sessionmaker
|
||||
from time import sleep
|
||||
|
@ -22,6 +24,9 @@ class Database:
|
|||
) -> None:
|
||||
"""Initialize the database"""
|
||||
self.__logger = logger
|
||||
self.__sql_session = None
|
||||
self.__sql_engine = None
|
||||
|
||||
getLogger("sqlalchemy.engine").setLevel(
|
||||
logger.level if logger.level != INFO else WARNING
|
||||
)
|
||||
|
@ -61,9 +66,7 @@ class Database:
|
|||
break
|
||||
|
||||
if not sqlalchemy_string:
|
||||
sqlalchemy_string = environ.get(
|
||||
"DATABASE_URI", "sqlite:////data/db.sqlite3"
|
||||
)
|
||||
sqlalchemy_string = getenv("DATABASE_URI", "sqlite:////data/db.sqlite3")
|
||||
|
||||
if sqlalchemy_string.startswith("sqlite"):
|
||||
if not path.exists(sqlalchemy_string.split("///")[1]):
|
||||
|
@ -104,8 +107,11 @@ class Database:
|
|||
|
||||
def __del__(self) -> None:
|
||||
"""Close the database"""
|
||||
self.__sql_session.remove()
|
||||
self.__sql_engine.dispose()
|
||||
if self.__sql_session:
|
||||
self.__sql_session.remove()
|
||||
|
||||
if self.__sql_engine:
|
||||
self.__sql_engine.dispose()
|
||||
|
||||
@contextmanager
|
||||
def __db_session(self):
|
||||
|
@ -173,8 +179,8 @@ class Database:
|
|||
|
||||
Base.metadata.create_all(self.__sql_engine, checkfirst=True)
|
||||
|
||||
to_put = []
|
||||
with self.__db_session() as session:
|
||||
to_put = []
|
||||
for plugins in default_settings:
|
||||
if not isinstance(plugins, list):
|
||||
plugins = [plugins]
|
||||
|
@ -218,6 +224,28 @@ class Database:
|
|||
for job in jobs:
|
||||
to_put.append(Jobs(plugin_id=plugin["id"], **job))
|
||||
|
||||
if exists(f"/opt/bunkerweb/core/{plugin['id']}/ui"):
|
||||
if {"template.html", "actions.py"}.issubset(
|
||||
listdir(f"/opt/bunkerweb/core/{plugin['id']}/ui")
|
||||
):
|
||||
with open(
|
||||
f"/opt/bunkerweb/core/{plugin['id']}/ui/template.html",
|
||||
"r",
|
||||
) as file:
|
||||
template = file.read().encode("utf-8")
|
||||
with open(
|
||||
f"/opt/bunkerweb/core/{plugin['id']}/ui/actions.py", "r"
|
||||
) as file:
|
||||
actions = file.read().encode("utf-8")
|
||||
|
||||
to_put.append(
|
||||
Plugin_pages(
|
||||
plugin_id=plugin["id"],
|
||||
template_file=template,
|
||||
actions_file=actions,
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
session.add_all(to_put)
|
||||
session.commit()
|
||||
|
@ -446,3 +474,118 @@ class Database:
|
|||
}
|
||||
for custom_config in session.query(Custom_configs).all()
|
||||
]
|
||||
|
||||
def update_job(self, plugin_id: str, job_name: str) -> str:
|
||||
"""Update the job last_run in the database"""
|
||||
with self.__db_session() as session:
|
||||
job = (
|
||||
session.query(Jobs)
|
||||
.filter_by(plugin_id=plugin_id, name=job_name)
|
||||
.first()
|
||||
)
|
||||
|
||||
if job is None:
|
||||
return "Job not found"
|
||||
|
||||
job.last_run = datetime.now()
|
||||
|
||||
try:
|
||||
session.commit()
|
||||
except BaseException:
|
||||
return format_exc()
|
||||
|
||||
return ""
|
||||
|
||||
def update_job_cache(
|
||||
self,
|
||||
job_name: str,
|
||||
service_id: Optional[str],
|
||||
file_name: str,
|
||||
data: bytes,
|
||||
*,
|
||||
checksum: str = None,
|
||||
) -> str:
|
||||
"""Update the plugin cache in the database"""
|
||||
with self.__db_session() as session:
|
||||
cache = (
|
||||
session.query(Job_cache)
|
||||
.filter_by(
|
||||
job_name=job_name, service_id=service_id, file_name=file_name
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if cache is None:
|
||||
session.add(
|
||||
Job_cache(
|
||||
job_name=job_name,
|
||||
service_id=service_id,
|
||||
file_name=file_name,
|
||||
data=data,
|
||||
last_update=datetime.now(),
|
||||
checksum=checksum,
|
||||
)
|
||||
)
|
||||
else:
|
||||
cache.data = data
|
||||
cache.last_update = datetime.now()
|
||||
cache.checksum = checksum
|
||||
|
||||
try:
|
||||
session.commit()
|
||||
except BaseException:
|
||||
return format_exc()
|
||||
|
||||
return ""
|
||||
|
||||
def update_plugins(self, plugins: List[Dict[str, Any]]) -> str:
|
||||
"""Add a new plugin to the database"""
|
||||
to_put = []
|
||||
with self.__db_session() as session:
|
||||
# Delete all old plugins
|
||||
session.execute(Plugins.__table__.delete().where(Plugins.id != "default"))
|
||||
|
||||
for plugin in plugins:
|
||||
settings = plugin.pop("settings", {})
|
||||
jobs = plugin.pop("jobs", [])
|
||||
pages = plugin.pop("pages", [])
|
||||
|
||||
to_put.append(Plugins(**plugin))
|
||||
|
||||
for setting, value in settings.items():
|
||||
value.update(
|
||||
{
|
||||
"plugin_id": plugin["id"],
|
||||
"name": value["id"],
|
||||
"id": setting,
|
||||
}
|
||||
)
|
||||
|
||||
for select in value.pop("select", []):
|
||||
to_put.append(Selects(setting_id=value["id"], value=select))
|
||||
|
||||
to_put.append(
|
||||
Settings(
|
||||
**value,
|
||||
)
|
||||
)
|
||||
|
||||
for job in jobs:
|
||||
to_put.append(Jobs(plugin_id=plugin["id"], **job))
|
||||
|
||||
for page in pages:
|
||||
to_put.append(
|
||||
Plugin_pages(
|
||||
plugin_id=plugin["id"],
|
||||
template_file=page["template_file"],
|
||||
actions_file=page["actions_file"],
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
session.add_all(to_put)
|
||||
session.commit()
|
||||
except BaseException:
|
||||
return format_exc()
|
||||
|
||||
return ""
|
||||
|
|
68
db/model.py
68
db/model.py
|
@ -11,6 +11,7 @@ from sqlalchemy import (
|
|||
String,
|
||||
)
|
||||
from sqlalchemy.orm import declarative_base, relationship
|
||||
from sqlalchemy.schema import UniqueConstraint
|
||||
|
||||
Base = declarative_base()
|
||||
CONTEXTS_ENUM = Enum("global", "multisite")
|
||||
|
@ -47,14 +48,14 @@ class Plugins(Base):
|
|||
name = Column(String(128), nullable=False)
|
||||
description = Column(String(255), nullable=False)
|
||||
version = Column(String(32), nullable=False)
|
||||
cache = Column(LargeBinary(length=(2**32) - 1), nullable=True)
|
||||
cache_last_update = Column(DateTime, nullable=True)
|
||||
cache_checksum = Column(String(255), nullable=True)
|
||||
|
||||
settings = relationship(
|
||||
"Settings", back_populates="plugin", cascade="all, delete, delete-orphan"
|
||||
)
|
||||
jobs = relationship("Jobs", back_populates="plugin", cascade="all, delete")
|
||||
jobs = relationship(
|
||||
"Jobs", back_populates="plugin", cascade="all, delete, delete-orphan"
|
||||
)
|
||||
pages = relationship("Plugin_pages", back_populates="plugin", cascade="all, delete")
|
||||
|
||||
|
||||
class Settings(Base):
|
||||
|
@ -111,6 +112,9 @@ class Services(Base):
|
|||
custom_configs = relationship(
|
||||
"Custom_configs", back_populates="service", cascade="all, delete"
|
||||
)
|
||||
jobs_cache = relationship(
|
||||
"Job_cache", back_populates="service", cascade="all, delete"
|
||||
)
|
||||
|
||||
|
||||
class Services_settings(Base):
|
||||
|
@ -136,18 +140,66 @@ class Services_settings(Base):
|
|||
class Jobs(Base):
|
||||
__tablename__ = "jobs"
|
||||
|
||||
name = Column(String(128), primary_key=True)
|
||||
plugin_id = Column(
|
||||
String(64),
|
||||
ForeignKey("plugins.id", onupdate="CASCADE", ondelete="CASCADE"),
|
||||
primary_key=True,
|
||||
)
|
||||
name = Column(String(128), primary_key=True)
|
||||
file = Column(String(255), nullable=False)
|
||||
every = Column(SCHEDULES_ENUM, nullable=False)
|
||||
reload = Column(Boolean, nullable=False)
|
||||
last_run = Column(DateTime, nullable=True)
|
||||
|
||||
plugin = relationship("Plugins", back_populates="jobs")
|
||||
cache = relationship("Job_cache", back_populates="job", cascade="all, delete")
|
||||
|
||||
|
||||
class Plugin_pages(Base):
|
||||
__tablename__ = "plugin_pages"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
plugin_id = Column(
|
||||
String(64),
|
||||
ForeignKey("plugins.id", onupdate="CASCADE", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
)
|
||||
template_file = Column(LargeBinary(length=(2**32) - 1), nullable=False)
|
||||
actions_file = Column(LargeBinary(length=(2**32) - 1), nullable=False)
|
||||
|
||||
plugin = relationship("Plugins", back_populates="pages")
|
||||
|
||||
|
||||
class Job_cache(Base):
|
||||
__tablename__ = "job_cache"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
job_name = Column(
|
||||
String(128),
|
||||
ForeignKey("jobs.name", onupdate="CASCADE", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
)
|
||||
service_id = Column(
|
||||
String(64),
|
||||
ForeignKey("services.id", onupdate="CASCADE", ondelete="CASCADE"),
|
||||
nullable=True,
|
||||
)
|
||||
file_name = Column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
)
|
||||
data = Column(LargeBinary(length=(2**32) - 1), nullable=True)
|
||||
last_update = Column(DateTime, nullable=True)
|
||||
checksum = Column(String(255), nullable=True)
|
||||
|
||||
job = relationship("Jobs", back_populates="cache")
|
||||
service = relationship("Services", back_populates="jobs_cache")
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint(
|
||||
"job_name", "service_id", "file_name", name="_job_cache_uniqueness"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Custom_configs(Base):
|
||||
|
@ -166,6 +218,12 @@ class Custom_configs(Base):
|
|||
|
||||
service = relationship("Services", back_populates="custom_configs")
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint(
|
||||
"service_id", "type", "name", name="_custom_configs_uniqueness"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Selects(Base):
|
||||
__tablename__ = "selects"
|
||||
|
|
|
@ -24,7 +24,7 @@ COPY utils /opt/bunkerweb/utils
|
|||
COPY VERSION /opt/bunkerweb/VERSION
|
||||
|
||||
# Add scheduler user, install runtime dependencies, create data folders and set permissions
|
||||
RUN apk add --no-cache bash git && \
|
||||
RUN apk add --no-cache bash libgcc libstdc++ openssl git && \
|
||||
ln -s /usr/local/bin/python3 /usr/bin/python3 && \
|
||||
addgroup -g 101 scheduler && \
|
||||
adduser -h /var/cache/nginx -g scheduler -s /bin/sh -G scheduler -D -H -u 101 scheduler && \
|
||||
|
|
|
@ -12,8 +12,10 @@ from schedule import (
|
|||
from sys import path as sys_path
|
||||
from traceback import format_exc
|
||||
|
||||
|
||||
sys_path.append("/opt/bunkerweb/utils")
|
||||
sys_path.append("/opt/bunkerweb/db")
|
||||
|
||||
from Database import Database
|
||||
from logger import setup_logger
|
||||
from ApiCaller import ApiCaller
|
||||
|
||||
|
@ -26,6 +28,7 @@ class JobScheduler(ApiCaller):
|
|||
apis=[],
|
||||
logger: Logger = setup_logger("Scheduler", environ.get("LOG_LEVEL", "INFO")),
|
||||
auto: bool = False,
|
||||
bw_integration: str = "Local",
|
||||
):
|
||||
super().__init__(apis)
|
||||
|
||||
|
@ -33,11 +36,16 @@ class JobScheduler(ApiCaller):
|
|||
self.auto_setup()
|
||||
|
||||
self.__logger = logger
|
||||
self.__db = Database(
|
||||
self.__logger,
|
||||
sqlalchemy_string=env.get("DATABASE_URI", None),
|
||||
bw_integration=bw_integration,
|
||||
)
|
||||
self.__env = env
|
||||
with open("/tmp/autoconf.env", "w") as f:
|
||||
for k, v in self.__env.items():
|
||||
f.write(f"{k}={v}\n")
|
||||
# self.__env.update(environ)
|
||||
self.__env.update(environ)
|
||||
self.__jobs = self.__get_jobs()
|
||||
self.__lock = lock
|
||||
|
||||
|
@ -130,9 +138,17 @@ class JobScheduler(ApiCaller):
|
|||
)
|
||||
success = False
|
||||
elif success and proc.returncode < 2:
|
||||
self.__logger.info(
|
||||
f"Successfuly executed job {name} from plugin {plugin}",
|
||||
)
|
||||
err = self.__db.update_job(plugin, name)
|
||||
|
||||
if not err:
|
||||
self.__logger.info(
|
||||
f"Successfuly executed job {name} from plugin {plugin} and updated database",
|
||||
)
|
||||
else:
|
||||
self.__logger.warning(
|
||||
f"Successfuly executed job {name} from plugin {plugin} but failed to update database: {err}",
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
def setup(self):
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
from argparse import ArgumentParser
|
||||
from copy import deepcopy
|
||||
from os import _exit, environ, getenv, getpid, path, remove
|
||||
from os.path import exists
|
||||
from signal import SIGINT, SIGTERM, SIGUSR1, SIGUSR2, signal
|
||||
|
@ -102,6 +103,8 @@ if __name__ == "__main__":
|
|||
|
||||
logger.info("Scheduler started ...")
|
||||
|
||||
bw_integration = "Local"
|
||||
|
||||
if args.variables:
|
||||
logger.info(f"Variables : {args.variables}")
|
||||
|
||||
|
@ -109,12 +112,14 @@ if __name__ == "__main__":
|
|||
env = dotenv_values(args.variables)
|
||||
else:
|
||||
# Read from database
|
||||
bw_integration = (
|
||||
"Kubernetes" if getenv("KUBERNETES_MODE", "no") == "yes" else "Cluster"
|
||||
)
|
||||
|
||||
db = Database(
|
||||
logger,
|
||||
sqlalchemy_string=getenv("DATABASE_URI", None),
|
||||
bw_integration="Kubernetes"
|
||||
if getenv("KUBERNETES_MODE", "no") == "yes"
|
||||
else "Cluster",
|
||||
bw_integration=bw_integration,
|
||||
)
|
||||
|
||||
while not db.is_initialized():
|
||||
|
@ -125,7 +130,7 @@ if __name__ == "__main__":
|
|||
|
||||
env = db.get_config()
|
||||
while not db.is_first_config_saved() or not env:
|
||||
logger.info(
|
||||
logger.warning(
|
||||
"Database doesn't have any config saved yet, retrying in 5s ...",
|
||||
)
|
||||
sleep(3)
|
||||
|
@ -148,10 +153,11 @@ if __name__ == "__main__":
|
|||
while True:
|
||||
# Instantiate scheduler
|
||||
scheduler = JobScheduler(
|
||||
env=env,
|
||||
env=deepcopy(env),
|
||||
apis=[],
|
||||
logger=logger,
|
||||
auto=not args.variables,
|
||||
bw_integration=bw_integration,
|
||||
)
|
||||
|
||||
# Only run jobs once
|
||||
|
@ -178,6 +184,8 @@ if __name__ == "__main__":
|
|||
)
|
||||
if env != tmp_env:
|
||||
logger.info("Config changed, reloading ...")
|
||||
logger.debug(f"{tmp_env=}")
|
||||
logger.debug(f"{env=}")
|
||||
env = tmp_env
|
||||
run_once = True
|
||||
break
|
||||
|
|
Loading…
Reference in a new issue