fix double .conf suffix in custom conf, migrate /etc/letsencrypt to /var/cache/letsencrypt, fix bunkernet jobs and lua code and fix reload for jobs

This commit is contained in:
florian 2023-05-04 21:26:38 +02:00
parent 773874154d
commit ac94e5072a
No known key found for this signature in database
GPG Key ID: 3D80806F12602A7C
9 changed files with 132 additions and 74 deletions

View File

@ -218,8 +218,6 @@ function bunkernet:request(method, url, data)
}
})
httpc:close()
self.logger:log(ngx.WARN, cjson.encode(all_data))
self.logger:log(ngx.WARN, "BunkerWeb/" .. self.version)
if not res then
return false, "error while sending request : " .. err, nil, nil
end

View File

@ -18,12 +18,13 @@ sys_path.extend(
from bunkernet import data
from Database import Database
from logger import setup_logger
from jobs import cache_file, cache_hash, file_hash, is_cached_file
from jobs import cache_file, cache_hash, file_hash, is_cached_file, get_file_in_db
logger = setup_logger("BUNKERNET", getenv("LOG_LEVEL", "INFO"))
status = 0
exit_status = 0
try:
# Check if at least a server has BunkerNet activated
bunkernet_activated = False
# Multisite case
@ -43,17 +44,27 @@ try:
logger.info("BunkerNet is not activated, skipping download...")
_exit(0)
# Create directory if it doesn't exist
Path("/var/cache/bunkerweb/bunkernet").mkdir(parents=True, exist_ok=True)
Path("/var/tmp/bunkerweb").mkdir(parents=True, exist_ok=True)
# Create empty file in case it doesn't exist
if not Path("/var/cache/bunkerweb/bunkernet/ip.list").is_file():
Path("/var/cache/bunkerweb/bunkernet/ip.list").write_text("")
# Get ID from cache
bunkernet_id = None
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
# Create directory if it doesn't exist
Path("/var/cache/bunkerweb/bunkernet").mkdir(parents=True, exist_ok=True)
# Create empty file in case it doesn't exist
if not Path("/var/tmp/bunkerweb/bunkernet-ip.list").is_file():
Path("/var/tmp/bunkerweb/bunkernet-ip.list").write_bytes(b"")
if db :
bunkernet_id = get_file_in_db("bunkernet-register", "instance.id", db)
if bunkernet_id:
Path("/var/cache/bunkerweb/bunkernet/bunkernet.id").write_text(bunkernet_id.decode())
logger.info("Successfully retrieved BunkerNet ID from db cache")
else:
logger.info("No BunkerNet ID found in db cache")
# Check if ID is present
if not Path("/var/cache/bunkerweb/bunkernet/instance.id").is_file():
@ -63,11 +74,15 @@ try:
_exit(2)
# Don't go further if the cache is fresh
if is_cached_file("/var/cache/bunkerweb/bunkernet/ip.list", "day", db):
logger.info(
"BunkerNet list is already in cache, skipping download...",
)
_exit(0)
if db:
if is_cached_file("/var/cache/bunkerweb/bunkernet/ip.list", "day", db):
logger.info(
"BunkerNet list is already in cache, skipping download...",
)
_exit(0)
exit_status = 1
# Download data
logger.info("Downloading BunkerNet data ...")
@ -94,13 +109,15 @@ try:
logger.error(
f"Received invalid data from BunkerNet API while sending db request : {data}",
)
_exit(1)
_exit(2)
if data["result"] != "ok":
logger.error(
f"Received error from BunkerNet API while sending db request : {data['data']}, removing instance ID",
)
_exit(2)
logger.info("Successfully downloaded data from BunkerNet API")
# Writing data to file
@ -130,10 +147,10 @@ try:
logger.info("Successfully saved BunkerNet data")
status = 1
exit_status = 1
except:
status = 2
exit_status = 2
logger.error(f"Exception while running bunkernet-data.py :\n{format_exc()}")
sys_exit(status)
sys_exit(exit_status)

View File

@ -19,9 +19,10 @@ sys_path.extend(
from bunkernet import register, ping, get_id
from Database import Database
from logger import setup_logger
from jobs import get_file_in_db, set_file_in_db, del_file_in_db
logger = setup_logger("BUNKERNET", getenv("LOG_LEVEL", "INFO"))
status = 0
exit_status = 0
try:
# Check if at least a server has BunkerNet activated
@ -51,11 +52,23 @@ try:
# Create directory if it doesn't exist
Path("/var/cache/bunkerweb/bunkernet").mkdir(parents=True, exist_ok=True)
# Ask an ID if needed
# Get ID from cache
bunkernet_id = None
if not is_cached_file(
f"/var/cache/bunkerweb/blacklist/{kind}.list", "hour", db
):
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db :
bunkernet_id = get_file_in_db("bunkernet-register", "instance.id", db)
if bunkernet_id:
Path("/var/cache/bunkerweb/bunkernet/instance.id").write_text(bunkernet_id.decode())
logger.info("Successfully retrieved BunkerNet ID from db cache")
else:
logger.info("No BunkerNet ID found in db cache")
# Register instance
registered = False
if not Path("/var/cache/bunkerweb/bunkernet/instance.id").is_file():
logger.info("Registering instance on BunkerNet API ...")
ok, status, data = register()
if not ok:
@ -91,8 +104,11 @@ try:
logger.error(
f"Received error from BunkerNet API while sending register request : {data.get('data', {})}"
)
_exit(1)
_exit(2)
bunkernet_id = data["data"]
Path("/var/cache/bunkerweb/bunkernet/instance.id").write_text(bunkernet_id)
registered = True
exit_status = 1
logger.info(
f"Successfully registered on BunkerNet API with instance id {data['data']}"
)
@ -102,6 +118,20 @@ try:
sleep(1)
# Update cache with new bunkernet ID
if db and registered:
with open("/var/cache/bunkerweb/bunkernet/instance.id", "rb") as f:
cached, err = set_file_in_db(
f"bunkernet-register",
f"instance.id",
f,
db
)
if not cached:
logger.error(f"Error while saving BunkerNet data to db cache : {err}")
else:
logger.info("Successfully saved BunkerNet data to db cache")
# Ping
logger.info("Checking connectivity with BunkerNet API ...")
bunkernet_ping = False
@ -120,11 +150,14 @@ try:
logger.warning(
"BunkerNet has banned this instance, retrying a register later...",
)
_exit(2)
elif status == 401:
logger.warning(
"Instance ID is not registered, removing it and retrying a register later...",
)
Path("/var/cache/bunkerweb/bunkernet/instance.id").unlink()
if db:
del_file_in_db("bunkernet-register", "instance.id", db)
_exit(2)
try:
@ -137,7 +170,7 @@ try:
if data.get("result", "ko") != "ok":
logger.error(
f"Received error from BunkerNet API while sending ping request : {data.get('data', {})}, removing instance ID",
f"Received error from BunkerNet API while sending ping request : {data.get('data', {})}",
)
retry = True
if not retry:
@ -146,35 +179,14 @@ try:
logger.warning("Waiting 1s and trying again ...")
sleep(1)
if bunkernet_ping and status != 403:
logger.info("Connectivity with BunkerWeb is successful !")
status = 1
if not Path("/var/cache/bunkerweb/bunkernet/instance.id").is_file():
Path("/var/cache/bunkerweb/bunkernet/instance.id").write_text(bunkernet_id)
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
# Update db
with lock:
err = db.update_job_cache(
"bunkernet-register",
None,
"instance.id",
bunkernet_id.encode("utf-8"),
)
if err:
logger.warning(f"Couldn't update db cache: {err}")
if bunkernet_ping:
logger.info("Connectivity with BunkerNet is successful !")
else:
logger.error("Connectivity with BunkerWeb failed ...")
status = 2
logger.error("Connectivity with BunkerNet failed ...")
exit_status = 2
except:
status = 2
exit_status = 2
logger.error(f"Exception while running bunkernet-register.py :\n{format_exc()}")
sys_exit(status)
sys_exit(exit_status)

View File

@ -47,7 +47,7 @@ try:
tgz = BytesIO()
with tar_open(mode="w:gz", fileobj=tgz) as tf:
tf.add("/var/cache/bunkerweb/letsencrypt", arcname=".")
tf.add("/var/cache/bunkerweb/letsencrypt/etc", arcname="etc")
tgz.seek(0, 0)
files = {"archive.tar.gz": tgz}
@ -102,7 +102,7 @@ try:
# Linux case
else:
proc = run(
["/etc/init.d/nginx", "reload"],
["sudo", "/usr/sbin/nginx", "-s", "reload"],
stdin=DEVNULL,
stderr=STDOUT,
)

View File

@ -20,7 +20,7 @@ sys_path.extend(
from Database import Database
from logger import setup_logger
from jobs import get_file
from jobs import get_file_in_db, set_file_in_db
logger = setup_logger("LETS-ENCRYPT", getenv("LOG_LEVEL", "INFO"))
db = Database(
@ -71,7 +71,7 @@ try:
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db:
tgz = get_file("certbot-new", "folder.tgz", db)
tgz = get_file_in_db("certbot-new", "folder.tgz", db)
if tgz:
# Delete folder if needed
if len(listdir("/var/cache/bunkerweb/letsencrypt")) > 0:
@ -158,7 +158,7 @@ try:
tgz.add("/var/cache/bunkerweb/letsencrypt", arcname=".")
bio.seek(0)
# Put tgz in cache
cached, err = cache_file(
cached, err = set_file_in_db(
f"certbot-new",
f"folder.tgz",
bio,
@ -168,6 +168,9 @@ try:
logger.error(f"Error while saving Let's Encrypt data to db cache : {err}")
else:
logger.info("Successfully saved Let's Encrypt data to db cache")
# Delete lib and log folders to avoid sending them
rmtree("/var/cache/bunkerweb/letsencrypt/lib")
rmtree("/var/cache/bunkerweb/letsencrypt/log")
except:
status = 3

View File

@ -5,17 +5,21 @@ from pathlib import Path
from subprocess import DEVNULL, STDOUT, run
from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
from tarfile import open as tfopen
from io import BytesIO
from shutil import rmtree
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/db",
)
)
from logger import setup_logger
from Database import Database
from jobs import get_file_in_db, set_file_in_db
def renew(domain):
environ["PYTHONPATH"] = "/usr/share/bunkerweb/deps/python"
@ -52,7 +56,7 @@ try:
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db:
tgz = get_file("certbot-new", "folder.tgz", db)
tgz = get_file_in_db("certbot-new", "folder.tgz", db)
if tgz:
# Delete folder if needed
if len(listdir("/var/cache/bunkerweb/letsencrypt")) > 0:
@ -105,7 +109,7 @@ try:
tgz.add("/var/cache/bunkerweb/letsencrypt", arcname=".")
bio.seek(0)
# Put tgz in cache
cached, err = cache_file(
cached, err = set_file_in_db(
f"certbot-new",
f"folder.tgz",
bio,
@ -115,6 +119,9 @@ try:
logger.error(f"Error while saving Let's Encrypt data to db cache : {err}")
else:
logger.info("Successfully saved Let's Encrypt data to db cache")
# Delete lib and log folders to avoid sending them
rmtree("/var/cache/bunkerweb/letsencrypt/lib")
rmtree("/var/cache/bunkerweb/letsencrypt/log")
except:
status = 2

View File

@ -877,6 +877,14 @@ class Database:
return ""
def delete_job_cache(
self,
job_name: str,
file_name: str
):
with self.__db_session() as session:
session.query(Jobs_cache).filter_by(job_name=job_name, file_name=file_name).delete()
def update_job_cache(
self,
job_name: str,

View File

@ -49,10 +49,11 @@ def get_instance_configs_and_apis(instance: Any, db, _type="Docker"):
):
splitted = var.split("=", 1)
if custom_confs_rx.match(splitted[0]):
custom_conf = custom_confs_rx.search(splitted[0]).groups()
custom_confs.append(
{
"value": splitted[1],
"exploded": custom_confs_rx.search(splitted[0]).groups(),
"exploded": (custom_conf[0], custom_conf[1], custom_conf[2].replace(".conf", ""))
}
)
else:
@ -227,11 +228,16 @@ if __name__ == "__main__":
plugins_settings=plugins_settings,
)
config_files = config.get_config()
custom_confs = [
{"value": v, "exploded": custom_confs_rx.search(k).groups()} # type: ignore
for k, v in environ.items()
if custom_confs_rx.match(k)
]
custom_confs = []
for k, v in environ.items():
if custom_confs_rx.match(k):
custom_conf = custom_confs_rx.search(k).groups()
custom_confs.append(
{
"value": v,
"exploded": (custom_conf[0], custom_conf[1], custom_conf[2].replace(".conf", ""))
}
)
root_dirs = listdir("/etc/bunkerweb/configs")
for root, dirs, files in walk("/etc/bunkerweb/configs", topdown=True):
if (
@ -276,12 +282,11 @@ if __name__ == "__main__":
for var in instance.attrs["Config"]["Env"]:
splitted = var.split("=", 1)
if custom_confs_rx.match(splitted[0]):
custom_conf = custom_confs_rx.search(splitted[0]).groups()
custom_confs.append(
{
"value": splitted[1],
"exploded": custom_confs_rx.search(
splitted[0]
).groups(),
"exploded": (custom_conf[0], custom_conf[1], custom_conf[2].replace(".conf", ""))
}
)
else:

View File

@ -56,7 +56,7 @@ def is_cached_file(file: str, expire: str, db=None) -> bool:
if is_cached and cached_file:
Path(file).write_bytes(cached_file.data)
return is_cached
return is_cached and cached_file
def get_file_in_db(job: str, file: str, db) -> bytes:
cached_file = db.get_job_cache_file(
@ -67,8 +67,8 @@ def get_file_in_db(job: str, file: str, db) -> bytes:
return False
return cached_file.data
def set_file_in_db(job: str, name: str, bio, db) -> bool:
ret, err = true, "success"
def set_file_in_db(job: str, name: str, bio, db) -> Tuple[bool, str]:
ret, err = True, "success"
try:
content = bio.read()
bio.seek(0)
@ -87,6 +87,14 @@ def set_file_in_db(job: str, name: str, bio, db) -> bool:
return False, f"exception :\n{format_exc()}"
return ret, err
def del_file_in_db(job: str, name: str, db) -> Tuple[bool, str]:
ret, err = True, "success"
try:
db.delete_job_cache(job, name)
except:
return False, f"exception :\n{format_exc()}"
return ret, err
def file_hash(file: str) -> str:
_sha512 = sha512()
with open(file, "rb") as f: