Refactor paths resolutions for core plugins

This commit is contained in:
Théophile Diot 2023-05-26 13:53:20 -04:00
parent 665b110c63
commit bb7dcda48d
No known key found for this signature in database
GPG key ID: E752C80DB72BB014
22 changed files with 855 additions and 664 deletions

View file

@ -2,25 +2,25 @@
from contextlib import suppress
from ipaddress import ip_address, ip_network
from os import _exit, getenv
from os import _exit, getenv, sep
from os.path import join
from pathlib import Path
from re import IGNORECASE, compile as re_compile
from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
from typing import Tuple
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/db",
)
)
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",), ("db",))
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from requests import get
from Database import Database
from logger import setup_logger
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from jobs import cache_file, cache_hash, is_cached_file, file_hash
rdns_rx = re_compile(rb"^[^ ]+$", IGNORECASE)
@ -83,8 +83,10 @@ try:
)
# Create directories if they don't exist
Path("/var/cache/bunkerweb/blacklist").mkdir(parents=True, exist_ok=True)
Path("/var/tmp/bunkerweb/blacklist").mkdir(parents=True, exist_ok=True)
blacklist_path = Path(sep, "var", "cache", "bunkerweb", "blacklist")
blacklist_path.mkdir(parents=True, exist_ok=True)
tmp_blacklist_path = Path(sep, "var", "tmp", "bunkerweb", "blacklist")
tmp_blacklist_path.mkdir(parents=True, exist_ok=True)
# Our urls data
urls = {"IP": [], "RDNS": [], "ASN": [], "USER_AGENT": [], "URI": []}
@ -104,9 +106,7 @@ try:
}
all_fresh = True
for kind in kinds_fresh:
if not is_cached_file(
f"/var/cache/bunkerweb/blacklist/{kind}.list", "hour", db
):
if not is_cached_file(blacklist_path.joinpath(f"{kind}.list"), "hour", db):
kinds_fresh[kind] = False
all_fresh = False
logger.info(
@ -165,12 +165,12 @@ try:
content += data + b"\n"
i += 1
Path(f"/var/tmp/bunkerweb/blacklist/{kind}.list").write_bytes(content)
tmp_blacklist_path.joinpath(f"{kind}.list").write_bytes(content)
logger.info(f"Downloaded {i} bad {kind}")
# Check if file has changed
new_hash = file_hash(f"/var/tmp/bunkerweb/blacklist/{kind}.list")
old_hash = cache_hash(f"/var/cache/bunkerweb/blacklist/{kind}.list", db)
new_hash = file_hash(tmp_blacklist_path.joinpath(f"{kind}.list"))
old_hash = cache_hash(blacklist_path.joinpath(f"{kind}.list"), db)
if new_hash == old_hash:
logger.info(
f"New file {kind}.list is identical to cache file, reload is not needed",
@ -181,8 +181,8 @@ try:
)
# Put file in cache
cached, err = cache_file(
f"/var/tmp/bunkerweb/blacklist/{kind}.list",
f"/var/cache/bunkerweb/blacklist/{kind}.list",
tmp_blacklist_path.joinpath(f"{kind}.list"),
blacklist_path.joinpath(f"{kind}.list"),
new_hash,
db,
)

View file

@ -1,23 +1,26 @@
#!/usr/bin/python3
from os import _exit, getenv
from os import _exit, getenv, sep
from os.path import join
from pathlib import Path
from sys import exit as sys_exit, path as sys_path
from threading import Lock
from traceback import format_exc
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/db",
"/usr/share/bunkerweb/core/bunkernet/jobs",
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (
("deps", "python"),
("utils",),
("db",),
("core", "bunkernet", "jobs"),
)
)
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from bunkernet import data
from Database import Database
from logger import setup_logger
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from jobs import cache_file, cache_hash, file_hash, is_cached_file, get_file_in_db
logger = setup_logger("BUNKERNET", getenv("LOG_LEVEL", "INFO"))
@ -44,12 +47,14 @@ try:
_exit(0)
# Create directory if it doesn't exist
Path("/var/cache/bunkerweb/bunkernet").mkdir(parents=True, exist_ok=True)
Path("/var/tmp/bunkerweb").mkdir(parents=True, exist_ok=True)
bunkernet_path = Path(sep, "var", "cache", "bunkerweb", "bunkernet")
bunkernet_path.mkdir(parents=True, exist_ok=True)
bunkernet_tmp_path = Path(sep, "var", "tmp", "bunkerweb", "bunkernet")
bunkernet_tmp_path.mkdir(parents=True, exist_ok=True)
# Create empty file in case it doesn't exist
if not Path("/var/cache/bunkerweb/bunkernet/ip.list").is_file():
Path("/var/cache/bunkerweb/bunkernet/ip.list").write_text("")
if not bunkernet_path.joinpath("ip.list").is_file():
bunkernet_path.joinpath("ip.list").write_text("")
# Get ID from cache
bunkernet_id = None
@ -57,32 +62,28 @@ try:
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db:
bunkernet_id = get_file_in_db("instance.id", db)
if bunkernet_id:
Path("/var/cache/bunkerweb/bunkernet/bunkernet.id").write_text(
bunkernet_id.decode()
)
logger.info("Successfully retrieved BunkerNet ID from db cache")
else:
logger.info("No BunkerNet ID found in db cache")
bunkernet_id = get_file_in_db("instance.id", db)
if bunkernet_id:
bunkernet_path.joinpath("bunkernet.id").write_bytes(bunkernet_id)
logger.info("Successfully retrieved BunkerNet ID from db cache")
else:
logger.info("No BunkerNet ID found in db cache")
# Check if ID is present
if not Path("/var/cache/bunkerweb/bunkernet/instance.id").is_file():
if not bunkernet_path.joinpath("instance.id").is_file():
logger.error(
"Not downloading BunkerNet data because instance is not registered",
)
_exit(2)
# Don't go further if the cache is fresh
if db:
if is_cached_file("/var/cache/bunkerweb/bunkernet/ip.list", "day", db):
logger.info(
"BunkerNet list is already in cache, skipping download...",
)
_exit(0)
if is_cached_file(bunkernet_path.joinpath("ip.list"), "day", db):
logger.info(
"BunkerNet list is already in cache, skipping download...",
)
_exit(0)
exit_status = 1
exit_status = 1
# Download data
logger.info("Downloading BunkerNet data ...")
@ -122,11 +123,11 @@ try:
# Writing data to file
logger.info("Saving BunkerNet data ...")
content = "\n".join(data["data"]).encode("utf-8")
Path("/var/tmp/bunkerweb/bunkernet-ip.list").write_bytes(content)
bunkernet_tmp_path.joinpath("ip.list").write_bytes(content)
# Check if file has changed
new_hash = file_hash("/var/tmp/bunkerweb/bunkernet-ip.list")
old_hash = cache_hash("/var/cache/bunkerweb/bunkernet/ip.list", db)
new_hash = file_hash(bunkernet_tmp_path.joinpath("ip.list"))
old_hash = cache_hash(bunkernet_path.joinpath("ip.list"), db)
if new_hash == old_hash:
logger.info(
"New file is identical to cache file, reload is not needed",
@ -135,8 +136,8 @@ try:
# Put file in cache
cached, err = cache_file(
"/var/tmp/bunkerweb/bunkernet-ip.list",
"/var/cache/bunkerweb/bunkernet/ip.list",
bunkernet_tmp_path.joinpath("ip.list"),
bunkernet_path.joinpath("ip.list"),
new_hash,
db,
)
@ -147,7 +148,6 @@ try:
logger.info("Successfully saved BunkerNet data")
exit_status = 1
except:
exit_status = 2
logger.error(f"Exception while running bunkernet-data.py :\n{format_exc()}")

View file

@ -1,24 +1,27 @@
#!/usr/bin/python3
from os import _exit, getenv
from os import _exit, getenv, sep
from os.path import join
from pathlib import Path
from sys import exit as sys_exit, path as sys_path
from threading import Lock
from time import sleep
from traceback import format_exc
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/db",
"/usr/share/bunkerweb/core/bunkernet/jobs",
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (
("deps", "python"),
("utils",),
("db",),
("core", "bunkernet", "jobs"),
)
)
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from bunkernet import register, ping, get_id
from Database import Database
from logger import setup_logger
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from jobs import get_file_in_db, set_file_in_db, del_file_in_db
logger = setup_logger("BUNKERNET", getenv("LOG_LEVEL", "INFO"))
@ -50,7 +53,8 @@ try:
_exit(0)
# Create directory if it doesn't exist
Path("/var/cache/bunkerweb/bunkernet").mkdir(parents=True, exist_ok=True)
bunkernet_path = Path(sep, "var", "cache", "bunkerweb", "bunkernet")
bunkernet_path.mkdir(parents=True, exist_ok=True)
# Get ID from cache
bunkernet_id = None
@ -58,19 +62,17 @@ try:
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db:
bunkernet_id = get_file_in_db("instance.id", db)
if bunkernet_id:
Path("/var/cache/bunkerweb/bunkernet/instance.id").write_text(
bunkernet_id.decode()
)
logger.info("Successfully retrieved BunkerNet ID from db cache")
else:
logger.info("No BunkerNet ID found in db cache")
bunkernet_id = get_file_in_db("instance.id", db)
if bunkernet_id:
bunkernet_path.joinpath("bunkernet.id").write_bytes(bunkernet_id)
logger.info("Successfully retrieved BunkerNet ID from db cache")
else:
logger.info("No BunkerNet ID found in db cache")
# Register instance
registered = False
if not Path("/var/cache/bunkerweb/bunkernet/instance.id").is_file():
instance_id_path = bunkernet_path.joinpath("instance.id")
if not instance_id_path.is_file():
logger.info("Registering instance on BunkerNet API ...")
ok, status, data = register()
if not ok:
@ -108,22 +110,24 @@ try:
)
_exit(2)
bunkernet_id = data["data"]
Path("/var/cache/bunkerweb/bunkernet/instance.id").write_text(bunkernet_id)
instance_id_path.write_text(bunkernet_id)
registered = True
exit_status = 1
logger.info(
f"Successfully registered on BunkerNet API with instance id {data['data']}"
)
else:
bunkernet_id = Path("/var/cache/bunkerweb/bunkernet/instance.id").read_text()
logger.info(f"Already registered on BunkerNet API with instance id {get_id()}")
bunkernet_id = bunkernet_id or instance_id_path.read_bytes()
bunkernet_id = bunkernet_id.decode()
logger.info(
f"Already registered on BunkerNet API with instance id {bunkernet_id}"
)
sleep(1)
# Update cache with new bunkernet ID
if db and registered:
with open("/var/cache/bunkerweb/bunkernet/instance.id", "rb") as f:
cached, err = set_file_in_db(f"instance.id", f, db)
if registered:
cached, err = set_file_in_db("instance.id", bunkernet_id.encode(), db)
if not cached:
logger.error(f"Error while saving BunkerNet data to db cache : {err}")
else:
@ -152,9 +156,8 @@ try:
logger.warning(
"Instance ID is not registered, removing it and retrying a register later...",
)
Path("/var/cache/bunkerweb/bunkernet/instance.id").unlink()
if db:
del_file_in_db("instance.id", db)
instance_id_path.unlink()
del_file_in_db("instance.id", db)
_exit(2)
try:
@ -181,7 +184,6 @@ try:
else:
logger.error("Connectivity with BunkerNet failed ...")
exit_status = 2
except:
exit_status = 2
logger.error(f"Exception while running bunkernet-register.py :\n{format_exc()}")

View file

@ -1,4 +1,6 @@
from os import getenv
#!/usr/bin/python3
from os import getenv, sep
from pathlib import Path
from requests import request as requests_request, ReadTimeout
from typing import Literal, Optional, Tuple, Union
@ -36,42 +38,43 @@ def request(
return True, status, raw_data
def register():
def register() -> Tuple[bool, Optional[int], Union[str, dict]]:
return request("POST", "/register")
def ping(_id=None):
return request("GET", "/ping", _id=get_id() if _id is None else _id)
def ping(_id: Optional[str] = None) -> Tuple[bool, Optional[int], Union[str, dict]]:
return request("GET", "/ping", _id=_id or get_id())
def data():
def data() -> Tuple[bool, Optional[int], Union[str, dict]]:
return request("GET", "/db", _id=get_id())
def get_id():
with open("/var/cache/bunkerweb/bunkernet/instance.id", "r") as f:
return f.read().strip()
def get_id() -> str:
return (
Path(sep, "var", "cache", "bunkerweb", "bunkernet", "instance.id")
.read_text()
.strip()
)
def get_version():
with open("/usr/share/bunkerweb/VERSION", "r") as f:
return f.read().strip()
def get_version() -> str:
return Path(sep, "usr", "share", "bunkerweb", "VERSION").read_text().strip()
def get_integration():
def get_integration() -> str:
try:
integration_path = Path(sep, "usr", "share", "bunkerweb", "INTEGRATION")
os_release_path = Path(sep, "etc", "os-release")
if getenv("KUBERNETES_MODE", "no").lower() == "yes":
return "kubernetes"
elif getenv("SWARM_MODE", "no").lower() == "yes":
return "swarm"
elif getenv("AUTOCONF_MODE", "no").lower() == "yes":
return "autoconf"
elif Path("/usr/share/bunkerweb/INTEGRATION").is_file():
return Path("/usr/share/bunkerweb/INTEGRATION").read_text().strip().lower()
elif (
Path("/etc/os-release").is_file()
and "Alpine" in Path("/etc/os-release").read_text()
):
elif integration_path.is_file():
return integration_path.read_text().strip().lower()
elif os_release_path.is_file() and "Alpine" in os_release_path.read_text():
return "docker"
return "linux"

View file

@ -1,8 +1,6 @@
{% set os_path = import("os.path") %}
{% set cert_file_path = "/data/cache/customcert/{}".format(CUSTOM_SSL_CERT.replace("/", "_")) %}
{% set key_file_path = "/data/cache/customcert/{}".format(CUSTOM_SSL_KEY.replace("/", "_")) %}
{% if USE_CUSTOM_SSL == "yes" and os_path.isfile(cert_file_path) and os_path.isfile(key_file_path) +%}
{% if USE_CUSTOM_SSL == "yes" and os_path.isfile("/data/cache/customcert/cert.pem") and os_path.isfile("/data/cache/customcert/key.pem") +%}
# listen
listen 0.0.0.0:{{ LISTEN_STREAM_PORT_SSL }} ssl {% if USE_UDP == "yes" %} udp {% endif %}{% if USE_PROXY_PROTOCOL == "yes" %} proxy_protocol {% endif %};
@ -11,8 +9,8 @@ listen [::]:{{ LISTEN_STREAM_PORT_SSL }} ssl {% if USE_UDP == "yes" %} udp {% en
{% endif %}
# TLS config
ssl_certificate {{ cert_file_path }};
ssl_certificate_key {{ key_file_path }};
ssl_certificate /data/cache/customcert/cert.pem;
ssl_certificate_key /data/cache/customcert/key.pem;
ssl_protocols {{ SSL_PROTOCOLS }};
ssl_prefer_server_ciphers on;
ssl_session_tickets off;

View file

@ -1,114 +1,78 @@
#!/usr/bin/python3
from os import getenv
from os.path import basename
from os import getenv, sep
from os.path import join, normpath
from pathlib import Path
from shutil import copy
from sys import exit as sys_exit, path as sys_path
from threading import Lock
from traceback import format_exc
from typing import Optional
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/db",
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (
("deps", "python"),
("utils",),
("db",),
)
)
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from Database import Database
from jobs import file_hash
from logger import setup_logger
from jobs import cache_file, cache_hash, file_hash
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
logger = setup_logger("CUSTOM-CERT", getenv("LOG_LEVEL", "INFO"))
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
db = None
def check_cert(cert_path, key_path, first_server: Optional[str] = None) -> bool:
def check_cert(
cert_path: str, key_path: str, first_server: Optional[str] = None
) -> bool:
try:
if not cert_path or not key_path:
logger.warning(
"Both variables CUSTOM_SSL_CERT and CUSTOM_SSL_KEY have to be set to use custom certificates"
)
return False
elif not Path(cert_path).is_file():
cert_path = Path(normpath(cert_path))
key_path = Path(normpath(key_path))
if not cert_path.is_file():
logger.warning(
f"Certificate file {cert_path} is not a valid file, ignoring the custom certificate"
)
return False
elif not Path(key_path).is_file():
elif not key_path.is_file():
logger.warning(
f"Key file {key_path} is not a valid file, ignoring the custom certificate"
)
return False
cert_cache_path = (
f"/var/cache/bunkerweb/customcert/{cert_path.replace('/', '_')}.hash"
cert_cache_path = Path(
sep, "var", "cache", "bunkerweb", "customcert", "cert.pem"
)
cert_hash = file_hash(cert_path)
if not Path(cert_cache_path).is_file():
Path(cert_cache_path).write_text(cert_hash)
old_hash = file_hash(cert_cache_path)
old_hash = cache_hash(cert_cache_path, db)
if old_hash == cert_hash:
return False
Path(cert_cache_path).write_text(cert_hash)
copy(cert_path, cert_cache_path.replace(".hash", ""))
cached, err = cache_file(cert_path, cert_cache_path, cert_hash, db)
if not cached:
logger.error(f"Error while caching custom-cert cert.pem file : {err}")
if not Path(key_path).is_file():
logger.warning(
f"Key file {key_path} is not a valid file, removing the custom certificate ..."
)
Path(cert_path).unlink()
Path(cert_cache_path).unlink()
return False
key_cache_path = (
f"/var/cache/bunkerweb/customcert/{key_path.replace('/', '_')}.hash"
key_cache_path = Path(
sep, "var", "cache", "bunkerweb", "customcert", "cert.key"
)
key_hash = file_hash(key_path)
if not Path(key_cache_path).is_file():
Path(key_cache_path).write_text(key_hash)
old_hash = file_hash(key_cache_path)
old_hash = cache_hash(key_cache_path, db)
if old_hash != key_hash:
copy(key_path, key_cache_path.replace(".hash", ""))
with lock:
err = db.update_job_cache(
"custom-cert",
first_server,
basename(key_cache_path.replace(".hash", "")),
Path(key_path).read_bytes(),
checksum=key_hash,
)
if err:
logger.warning(
f"Couldn't update db cache for {key_path.replace('/', '_')}.hash: {err}"
)
with lock:
err = db.update_job_cache(
"custom-cert",
first_server,
basename(cert_cache_path.replace(".hash", "")),
Path(cert_path).read_bytes(),
checksum=cert_hash,
)
if err:
logger.warning(
f"Couldn't update db cache for {cert_path.replace('/', '_')}.hash: {err}"
)
cached, err = cache_file(key_path, key_cache_path, key_hash, db)
if not cached:
logger.error(f"Error while caching custom-cert cert.key file : {err}")
return True
except:
@ -121,7 +85,9 @@ def check_cert(cert_path, key_path, first_server: Optional[str] = None) -> bool:
status = 0
try:
Path("/var/cache/bunkerweb/customcert/").mkdir(parents=True, exist_ok=True)
Path(sep, "var", "cache", "bunkerweb", "customcert").mkdir(
parents=True, exist_ok=True
)
# Multisite case
if getenv("MULTISITE") == "yes":
@ -137,6 +103,12 @@ try:
):
continue
if not db:
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
cert_path = getenv(
f"{first_server}_CUSTOM_SSL_CERT", getenv("CUSTOM_SSL_CERT", "")
)
@ -159,6 +131,11 @@ try:
)
# Singlesite case
elif getenv("USE_CUSTOM_SSL") == "yes" and getenv("SERVER_NAME") != "":
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
cert_path = getenv("CUSTOM_SSL_CERT", "")
key_path = getenv("CUSTOM_SSL_KEY", "")
@ -169,7 +146,6 @@ try:
status = 1
else:
logger.info(f"No change for certificate {cert_path}")
except:
status = 2
logger.error(f"Exception while running custom-cert.py :\n{format_exc()}")

View file

@ -2,25 +2,25 @@
from contextlib import suppress
from ipaddress import ip_address, ip_network
from os import _exit, getenv
from os import _exit, getenv, sep
from os.path import join
from pathlib import Path
from re import IGNORECASE, compile as re_compile
from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
from typing import Tuple
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/db",
)
)
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",), ("db",))
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from requests import get
from Database import Database
from logger import setup_logger
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from jobs import cache_file, cache_hash, is_cached_file, file_hash
rdns_rx = re_compile(rb"^[^ ]+$", IGNORECASE)
@ -83,8 +83,10 @@ try:
)
# Create directories if they don't exist
Path("/var/cache/bunkerweb/greylist").mkdir(parents=True, exist_ok=True)
Path("/var/tmp/bunkerweb/greylist").mkdir(parents=True, exist_ok=True)
greylist_path = Path(sep, "var", "cache", "bunkerweb", "greylist")
greylist_path.mkdir(parents=True, exist_ok=True)
tmp_greylist_path = Path(sep, "var", "tmp", "bunkerweb", "greylist")
tmp_greylist_path.mkdir(parents=True, exist_ok=True)
# Our urls data
urls = {"IP": [], "RDNS": [], "ASN": [], "USER_AGENT": [], "URI": []}
@ -99,7 +101,7 @@ try:
}
all_fresh = True
for kind in kinds_fresh:
if not is_cached_file(f"/var/cache/bunkerweb/greylist/{kind}.list", "hour", db):
if not is_cached_file(greylist_path.joinpath(f"{kind}.list"), "hour", db):
kinds_fresh[kind] = False
all_fresh = False
logger.info(
@ -147,12 +149,12 @@ try:
content += data + b"\n"
i += 1
Path(f"/var/tmp/bunkerweb/greylist/{kind}.list").write_bytes(content)
tmp_greylist_path.joinpath(f"{kind}.list").write_bytes(content)
logger.info(f"Downloaded {i} grey {kind}")
logger.info(f"Downloaded {i} bad {kind}")
# Check if file has changed
new_hash = file_hash(f"/var/tmp/bunkerweb/greylist/{kind}.list")
old_hash = cache_hash(f"/var/cache/bunkerweb/greylist/{kind}.list", db)
new_hash = file_hash(tmp_greylist_path.joinpath(f"{kind}.list"))
old_hash = cache_hash(greylist_path.joinpath(f"{kind}.list"), db)
if new_hash == old_hash:
logger.info(
f"New file {kind}.list is identical to cache file, reload is not needed",
@ -163,8 +165,8 @@ try:
)
# Put file in cache
cached, err = cache_file(
f"/var/tmp/bunkerweb/greylist/{kind}.list",
f"/var/cache/bunkerweb/greylist/{kind}.list",
tmp_greylist_path.joinpath(f"{kind}.list"),
greylist_path.joinpath(f"{kind}.list"),
new_hash,
db,
)

View file

@ -16,19 +16,22 @@ from tarfile import open as tar_open
from traceback import format_exc
from zipfile import ZipFile
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/api",
"/usr/share/bunkerweb/db",
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (
("deps", "python"),
("utils",),
("api",),
("db",),
)
)
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from requests import get
from Database import Database
from logger import setup_logger
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
logger = setup_logger("Jobs.download-plugins", getenv("LOG_LEVEL", "INFO"))
@ -61,12 +64,6 @@ try:
logger.info("No external plugins to download")
_exit(0)
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI"),
)
lock = Lock()
plugin_nbr = 0
# Loop on URLs
@ -149,6 +146,12 @@ try:
external_plugins.append(plugin_file)
external_plugins_ids.append(plugin_file["id"])
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI"),
)
lock = Lock()
for plugin in db.get_plugins(external=True, with_data=True):
if plugin["method"] != "scheduler" and plugin["id"] not in external_plugins_ids:
external_plugins.append(plugin)

View file

@ -3,25 +3,29 @@
from datetime import date
from gzip import decompress
from hashlib import sha1
from os import _exit, getenv
from os import _exit, getenv, sep
from os.path import join
from pathlib import Path
from sys import exit as sys_exit, path as sys_path
from threading import Lock
from traceback import format_exc
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/db",
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (
("deps", "python"),
("utils",),
("db",),
)
)
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from maxminddb import open_database
from requests import get
from Database import Database
from logger import setup_logger
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from jobs import cache_file, cache_hash, file_hash, is_cached_file
logger = setup_logger("JOBS.mmdb-asn", getenv("LOG_LEVEL", "INFO"))
@ -30,17 +34,18 @@ lock = Lock()
try:
dl_mmdb = True
tmp_path = "/var/tmp/bunkerweb/asn.mmdb"
tmp_path = Path(sep, "var", "tmp", "bunkerweb", "asn.mmdb")
cache_path = Path(sep, "var", "cache", "bunkerweb", "asn.mmdb")
new_hash = None
# Don't go further if the cache match the latest version
if Path("/var/tmp/bunkerweb/asn.mmdb").exists():
if tmp_path.exists():
with lock:
response = get("https://db-ip.com/db/download/ip-to-asn-lite")
if response.status_code == 200:
_sha1 = sha1()
with open("/var/tmp/bunkerweb/asn.mmdb", "rb") as f:
with open(str(tmp_path), "rb") as f:
while True:
data = f.read(1024)
if not data:
@ -52,7 +57,6 @@ try:
"asn.mmdb is already the latest version, skipping download..."
)
dl_mmdb = False
tmp_path = "/var/tmp/bunkerweb/asn.mmdb"
else:
logger.warning(
"Unable to check if asn.mmdb is the latest version, downloading it anyway..."
@ -65,7 +69,7 @@ try:
if dl_mmdb:
# Don't go further if the cache is fresh
if is_cached_file("/var/cache/bunkerweb/asn.mmdb", "month", db):
if is_cached_file(cache_path, "month", db):
logger.info("asn.mmdb is already in cache, skipping download...")
_exit(0)
@ -89,23 +93,23 @@ try:
# Decompress it
logger.info("Decompressing mmdb file ...")
Path(tmp_path).write_bytes(decompress(file_content))
tmp_path.write_bytes(decompress(file_content))
# Check if file has changed
new_hash = file_hash(tmp_path)
old_hash = cache_hash("/var/cache/bunkerweb/asn.mmdb", db)
old_hash = cache_hash(cache_path, db)
if new_hash == old_hash:
logger.info("New file is identical to cache file, reload is not needed")
_exit(0)
# Try to load it
logger.info("Checking if mmdb file is valid ...")
with open_database(tmp_path or "/var/cache/bunkerweb/asn.mmdb") as reader:
with open_database(str(tmp_path)) as reader:
pass
# Move it to cache folder
logger.info("Moving mmdb file to cache ...")
cached, err = cache_file(tmp_path, "/var/cache/bunkerweb/asn.mmdb", new_hash, db)
cached, err = cache_file(tmp_path, cache_path, new_hash, db)
if not cached:
logger.error(f"Error while caching mmdb file : {err}")
_exit(2)
@ -115,7 +119,6 @@ try:
logger.info(f"Downloaded new mmdb from {mmdb_url}")
status = 1
except:
status = 2
logger.error(f"Exception while running mmdb-asn.py :\n{format_exc()}")

View file

@ -3,25 +3,29 @@
from datetime import date
from gzip import decompress
from hashlib import sha1
from os import _exit, getenv
from os import _exit, getenv, sep
from os.path import join
from pathlib import Path
from sys import exit as sys_exit, path as sys_path
from threading import Lock
from traceback import format_exc
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/db",
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (
("deps", "python"),
("utils",),
("db",),
)
)
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from maxminddb import open_database
from requests import get
from Database import Database
from logger import setup_logger
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from jobs import cache_file, cache_hash, file_hash, is_cached_file
logger = setup_logger("JOBS.mmdb-country", getenv("LOG_LEVEL", "INFO"))
@ -30,17 +34,18 @@ lock = Lock()
try:
dl_mmdb = True
tmp_path = "/var/tmp/bunkerweb/country.mmdb"
tmp_path = Path(sep, "var", "tmp", "bunkerweb", "country.mmdb")
cache_path = Path(sep, "var", "cache", "bunkerweb", "country.mmdb")
new_hash = None
# Don't go further if the cache match the latest version
if Path("/var/tmp/bunkerweb/country.mmdb").exists():
if tmp_path.exists():
with lock:
response = get("https://db-ip.com/db/download/ip-to-country-lite")
if response.status_code == 200:
_sha1 = sha1()
with open("/var/tmp/bunkerweb/country.mmdb", "rb") as f:
with open(str(tmp_path), "rb") as f:
while True:
data = f.read(1024)
if not data:
@ -52,7 +57,6 @@ try:
"country.mmdb is already the latest version, skipping download..."
)
dl_mmdb = False
tmp_path = "/var/tmp/bunkerweb/country.mmdb"
else:
logger.warning(
"Unable to check if country.mmdb is the latest version, downloading it anyway..."
@ -65,7 +69,7 @@ try:
if dl_mmdb:
# Don't go further if the cache is fresh
if is_cached_file("/var/cache/bunkerweb/country.mmdb", "month", db):
if is_cached_file(cache_path, "month", db):
logger.info("country.mmdb is already in cache, skipping download...")
_exit(0)
@ -89,25 +93,23 @@ try:
# Decompress it
logger.info("Decompressing mmdb file ...")
Path(tmp_path).write_bytes(decompress(file_content))
tmp_path.write_bytes(decompress(file_content))
# Check if file has changed
new_hash = file_hash(tmp_path)
old_hash = cache_hash("/var/cache/bunkerweb/country.mmdb", db)
old_hash = cache_hash(cache_path, db)
if new_hash == old_hash:
logger.info("New file is identical to cache file, reload is not needed")
_exit(0)
# Try to load it
logger.info("Checking if mmdb file is valid ...")
with open_database(tmp_path or "/var/cache/bunkerweb/country.mmdb") as reader:
with open_database(str(tmp_path)) as reader:
pass
# Move it to cache folder
logger.info("Moving mmdb file to cache ...")
cached, err = cache_file(
tmp_path, "/var/cache/bunkerweb/country.mmdb", new_hash, db
)
cached, err = cache_file(tmp_path, cache_path, new_hash, db)
if not cached:
logger.error(f"Error while caching mmdb file : {err}")
_exit(2)
@ -117,7 +119,6 @@ try:
logger.info(f"Downloaded new mmdb from {mmdb_url}")
status = 1
except:
status = 2
logger.error(f"Exception while running mmdb-country.py :\n{format_exc()}")

View file

@ -1,38 +1,43 @@
#!/usr/bin/python3
from os import getenv
from os import getenv, sep
from os.path import join
from pathlib import Path
from sys import exit as sys_exit, path as sys_path
from threading import Lock
from traceback import format_exc
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/api",
"/usr/share/bunkerweb/db",
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (
("deps", "python"),
("utils",),
("api",),
("db",),
)
)
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from Database import Database
from logger import setup_logger
from API import API
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from API import API # type: ignore
logger = setup_logger("Lets-encrypt", getenv("LOG_LEVEL", "INFO"))
logger = setup_logger("Lets-encrypt.auth", getenv("LOG_LEVEL", "INFO"))
status = 0
try:
# Get env vars
bw_integration = None
bw_integration = "Linux"
integration_path = Path(sep, "usr", "share", "bunkerweb", "INTEGRATION")
if getenv("KUBERNETES_MODE") == "yes":
bw_integration = "Kubernetes"
elif getenv("SWARM_MODE") == "yes":
bw_integration = "Swarm"
elif getenv("AUTOCONF_MODE") == "yes":
bw_integration = "Autoconf"
elif Path("/usr/share/bunkerweb/INTEGRATION").exists():
bw_integration = Path("/usr/share/bunkerweb/INTEGRATION").read_text().strip()
elif integration_path.is_file():
integration = integration_path.read_text().strip()
token = getenv("CERTBOT_TOKEN", "")
validation = getenv("CERTBOT_VALIDATION", "")
@ -48,9 +53,10 @@ try:
instances = db.get_instances()
for instance in instances:
endpoint = f"http://{instance['hostname']}:{instance['port']}"
host = instance["server_name"]
api = API(endpoint, host=host)
api = API(
f"http://{instance['hostname']}:{instance['port']}",
host=instance["server_name"],
)
sent, err, status, resp = api.request(
"POST",
"/lets-encrypt/challenge",
@ -61,22 +67,29 @@ try:
logger.error(
f"Can't send API request to {api.get_endpoint()}/lets-encrypt/challenge : {err}"
)
elif status != 200:
status = 1
logger.error(
f"Error while sending API request to {api.get_endpoint()}/lets-encrypt/challenge : status = {resp['status']}, msg = {resp['msg']}",
)
else:
if status != 200:
status = 1
logger.error(
f"Error while sending API request to {api.get_endpoint()}/lets-encrypt/challenge : status = {resp['status']}, msg = {resp['msg']}",
)
else:
logger.info(
f"Successfully sent API request to {api.get_endpoint()}/lets-encrypt/challenge",
)
logger.info(
f"Successfully sent API request to {api.get_endpoint()}/lets-encrypt/challenge",
)
# Linux case
else:
root_dir = "/var/tmp/bunkerweb/lets-encrypt/.well-known/acme-challenge/"
Path(root_dir).mkdir(parents=True, exist_ok=True)
Path(f"{root_dir}{token}").write_text(validation)
root_dir = Path(
sep,
"var",
"tmp",
"bunkerweb",
"lets-encrypt",
".well-known",
"acme-challenge",
)
root_dir.mkdir(parents=True, exist_ok=True)
root_dir.joinpath(token).write_text(validation)
except:
status = 1
logger.error(f"Exception while running certbot-auth.py :\n{format_exc()}")

View file

@ -1,38 +1,43 @@
#!/usr/bin/python3
from os import getenv
from os import getenv, sep
from os.path import join
from pathlib import Path
from sys import exit as sys_exit, path as sys_path
from threading import Lock
from traceback import format_exc
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/api",
"/usr/share/bunkerweb/db",
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (
("deps", "python"),
("utils",),
("api",),
("db",),
)
)
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from Database import Database
from logger import setup_logger
from API import API
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from API import API # type: ignore
logger = setup_logger("Lets-encrypt", getenv("LOG_LEVEL", "INFO"))
logger = setup_logger("Lets-encrypt.cleanup", getenv("LOG_LEVEL", "INFO"))
status = 0
try:
# Get env vars
bw_integration = None
if getenv("KUBERNETES_MODE", "no").lower() == "yes":
bw_integration = "Linux"
integration_path = Path(sep, "usr", "share", "bunkerweb", "INTEGRATION")
if getenv("KUBERNETES_MODE") == "yes":
bw_integration = "Kubernetes"
elif getenv("SWARM_MODE", "no").lower() == "yes":
elif getenv("SWARM_MODE") == "yes":
bw_integration = "Swarm"
elif getenv("AUTOCONF_MODE", "no").lower() == "yes":
elif getenv("AUTOCONF_MODE") == "yes":
bw_integration = "Autoconf"
elif Path("/usr/share/bunkerweb/INTEGRATION").exists():
bw_integration = Path("/usr/share/bunkerweb/INTEGRATION").read_text().strip()
elif integration_path.is_file():
integration = integration_path.read_text().strip()
token = getenv("CERTBOT_TOKEN", "")
# Cluster case
@ -46,9 +51,10 @@ try:
instances = db.get_instances()
for instance in instances:
endpoint = f"http://{instance['hostname']}:{instance['port']}"
host = instance["server_name"]
api = API(endpoint, host=host)
api = API(
f"http://{instance['hostname']}:{instance['port']}",
host=instance["server_name"],
)
sent, err, status, resp = api.request(
"DELETE", "/lets-encrypt/challenge", data={"token": token}
)
@ -57,23 +63,28 @@ try:
logger.error(
f"Can't send API request to {api.get_endpoint()}/lets-encrypt/challenge : {err}"
)
elif status != 200:
status = 1
logger.error(
f"Error while sending API request to {api.get_endpoint()}/lets-encrypt/challenge : status = {resp['status']}, msg = {resp['msg']}",
)
else:
if status != 200:
status = 1
logger.error(
f"Error while sending API request to {api.get_endpoint()}/lets-encrypt/challenge : status = {resp['status']}, msg = {resp['msg']}",
)
else:
logger.info(
f"Successfully sent API request to {api.get_endpoint()}/lets-encrypt/challenge",
)
logger.info(
f"Successfully sent API request to {api.get_endpoint()}/lets-encrypt/challenge",
)
# Linux case
else:
challenge_path = (
f"/var/tmp/bunkerweb/lets-encrypt/.well-known/acme-challenge/{token}"
challenge_path = Path(
sep,
"var",
"tmp",
"bunkerweb",
"lets-encrypt",
".well-known",
"acme-challenge",
token,
)
if Path(challenge_path).exists():
Path(challenge_path).unlink()
challenge_path.unlink(missing_ok=True)
except:
status = 1
logger.error(f"Exception while running certbot-cleanup.py :\n{format_exc()}")

View file

@ -1,42 +1,46 @@
#!/usr/bin/python3
from io import BytesIO
from os import getenv
from os import getenv, sep
from os.path import join
from pathlib import Path
from subprocess import run, DEVNULL, STDOUT
from subprocess import DEVNULL, STDOUT, run
from sys import exit as sys_exit, path as sys_path
from tarfile import open as tar_open
from threading import Lock
from traceback import format_exc
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/api",
"/usr/share/bunkerweb/db",
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (
("deps", "python"),
("utils",),
("api",),
("db",),
)
)
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from Database import Database
from logger import setup_logger
from API import API
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from API import API # type: ignore
logger = setup_logger("Lets-encrypt", getenv("LOG_LEVEL", "INFO"))
logger = setup_logger("Lets-encrypt.deploy", getenv("LOG_LEVEL", "INFO"))
status = 0
try:
# Get env vars
bw_integration = None
if getenv("KUBERNETES_MODE", "no").lower() == "yes":
bw_integration = "Linux"
integration_path = Path(sep, "usr", "share", "bunkerweb", "INTEGRATION")
if getenv("KUBERNETES_MODE") == "yes":
bw_integration = "Kubernetes"
elif getenv("SWARM_MODE", "no").lower() == "yes":
elif getenv("SWARM_MODE") == "yes":
bw_integration = "Swarm"
elif getenv("AUTOCONF_MODE", "no").lower() == "yes":
elif getenv("AUTOCONF_MODE") == "yes":
bw_integration = "Autoconf"
elif Path("/usr/share/bunkerweb/INTEGRATION").exists():
with open("/usr/share/bunkerweb/INTEGRATION", "r") as f:
bw_integration = f.read().strip()
elif integration_path.is_file():
integration = integration_path.read_text().strip()
token = getenv("CERTBOT_TOKEN", "")
logger.info(f"Certificates renewal for {getenv('RENEWED_DOMAINS')} successful")
@ -46,8 +50,11 @@ try:
# Create tarball of /var/cache/bunkerweb/letsencrypt
tgz = BytesIO()
with tar_open(mode="w:gz", fileobj=tgz) as tf:
tf.add("/var/cache/bunkerweb/letsencrypt/etc", arcname="etc")
with tar_open(mode="w:gz", fileobj=tgz, compresslevel=3) as tf:
tf.add(
join(sep, "var", "cache", "bunkerweb", "letsencrypt", "etc"),
arcname="etc",
)
tgz.seek(0, 0)
files = {"archive.tar.gz": tgz}
@ -73,45 +80,44 @@ try:
logger.error(
f"Can't send API request to {api.get_endpoint()}/lets-encrypt/certificates : {err}"
)
elif status != 200:
status = 1
logger.error(
f"Error while sending API request to {api.get_endpoint()}/lets-encrypt/certificates : status = {resp['status']}, msg = {resp['msg']}"
)
else:
if status != 200:
logger.info(
f"Successfully sent API request to {api.get_endpoint()}/lets-encrypt/certificates",
)
sent, err, status, resp = api.request("POST", "/reload")
if not sent:
status = 1
logger.error(
f"Error while sending API request to {api.get_endpoint()}/lets-encrypt/certificates : status = {resp['status']}, msg = {resp['msg']}"
f"Can't send API request to {api.get_endpoint()}/reload : {err}"
)
elif status != 200:
status = 1
logger.error(
f"Error while sending API request to {api.get_endpoint()}/reload : status = {resp['status']}, msg = {resp['msg']}"
)
else:
logger.info(
f"Successfully sent API request to {api.get_endpoint()}/lets-encrypt/certificates",
f"Successfully sent API request to {api.get_endpoint()}/reload"
)
sent, err, status, resp = api.request("POST", "/reload")
if not sent:
status = 1
logger.error(
f"Can't send API request to {api.get_endpoint()}/reload : {err}"
)
else:
if status != 200:
status = 1
logger.error(
f"Error while sending API request to {api.get_endpoint()}/reload : status = {resp['status']}, msg = {resp['msg']}"
)
else:
logger.info(
f"Successfully sent API request to {api.get_endpoint()}/reload"
)
# Linux case
else:
proc = run(
["sudo", "/usr/sbin/nginx", "-s", "reload"],
stdin=DEVNULL,
stderr=STDOUT,
)
if proc.returncode != 0:
if (
run(
["sudo", join(sep, "usr", "sbin", "nginx"), "-s", "reload"],
stdin=DEVNULL,
stderr=STDOUT,
).returncode
!= 0
):
status = 1
logger.error("Error while reloading nginx")
else:
logger.info("Successfully reloaded nginx")
except:
status = 1
logger.error(f"Exception while running certbot-deploy.py :\n{format_exc()}")

View file

@ -1,51 +1,53 @@
#!/usr/bin/python3
from os import environ, getenv, listdir
from os import _exit, environ, getenv, sep
from os.path import join
from pathlib import Path
from subprocess import DEVNULL, STDOUT, run
from sys import exit as sys_exit, path as sys_path
from threading import Lock
from traceback import format_exc
from tarfile import open as tfopen
from tarfile import open as tar_open
from io import BytesIO
from shutil import rmtree
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/db",
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (
("deps", "python"),
("utils",),
("db",),
)
)
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from Database import Database
from logger import setup_logger
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from jobs import get_file_in_db, set_file_in_db
logger = setup_logger("LETS-ENCRYPT.new", getenv("LOG_LEVEL", "INFO"))
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
status = 0
def certbot_new(domains, email):
environ["PYTHONPATH"] = "/usr/share/bunkerweb/deps/python"
proc = run(
def certbot_new(
domains: str, email: str, letsencrypt_path: Path, letsencrypt_job_path: Path
) -> int:
return run(
[
"/usr/share/bunkerweb/deps/python/bin/certbot",
join(sep, "usr", "share", "bunkerweb", "deps", "python", "bin", "certbot"),
"certonly",
"--config-dir=/var/cache/bunkerweb/letsencrypt/etc",
"--work-dir=/var/cache/bunkerweb/letsencrypt/lib",
"--logs-dir=/var/cache/bunkerweb/letsencrypt/log",
"--config-dir",
letsencrypt_path.joinpath("etc"),
"--work-dir",
letsencrypt_path.joinpath("lib"),
"--logs-dir",
letsencrypt_path.joinpath("log"),
"--manual",
"--preferred-challenges=http",
"--manual-auth-hook",
"/usr/share/bunkerweb/core/letsencrypt/jobs/certbot-auth.py",
letsencrypt_job_path.joinpath("certbot-auth.py"),
"--manual-cleanup-hook",
"/usr/share/bunkerweb/core/letsencrypt/jobs/certbot-cleanup.py",
letsencrypt_job_path.joinpath("certbot-cleanup.py"),
"-n",
"-d",
domains,
@ -56,34 +58,56 @@ def certbot_new(domains, email):
+ (["--staging"] if getenv("USE_LETS_ENCRYPT_STAGING", "no") == "yes" else []),
stdin=DEVNULL,
stderr=STDOUT,
env=environ,
)
return proc.returncode
env=environ.copy()
| {"PYTHONPATH": join(sep, "usr", "share", "bunkerweb", "deps", "python")},
).returncode
status = 0
try:
# Check if we're using let's encrypt
use_letsencrypt = False
if getenv("AUTO_LETS_ENCRYPT", "no") == "yes":
use_letsencrypt = True
elif getenv("MULTISITE", "no") == "yes":
for first_server in getenv("SERVER_NAME", "").split(" "):
if (
first_server
and getenv(f"{first_server}_AUTO_LETS_ENCRYPT", "no") == "yes"
):
use_letsencrypt = True
break
if not use_letsencrypt:
logger.info("Let's Encrypt is not activated, skipping generation...")
_exit(0)
# Create directory if it doesn't exist
Path("/var/cache/bunkerweb/letsencrypt").mkdir(parents=True, exist_ok=True)
letsencrypt_path = Path(sep, "var", "cache", "bunkerweb", "letsencrypt")
letsencrypt_job_path = Path(
sep, "usr", "share", "bunkerweb", "core", "letsencrypt", "jobs"
)
letsencrypt_path.mkdir(parents=True, exist_ok=True)
# Extract letsencrypt folder if it exists in db
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db:
tgz = get_file_in_db("folder.tgz", db)
if tgz:
# Delete folder if needed
if len(listdir("/var/cache/bunkerweb/letsencrypt")) > 0:
rmtree("/var/cache/bunkerweb/letsencrypt", ignore_errors=True)
# Extract it
with tfopen(name="folder.tgz", mode="r:gz", fileobj=BytesIO(tgz)) as tf:
tf.extractall("/var/cache/bunkerweb/letsencrypt")
logger.info("Successfully retrieved Let's Encrypt data from db cache")
else:
logger.info("No Let's Encrypt data found in db cache")
tgz = get_file_in_db("folder.tgz", db)
if tgz:
# Delete folder if needed
if letsencrypt_path.exists():
rmtree(str(letsencrypt_path), ignore_errors=True)
letsencrypt_path.mkdir(parents=True, exist_ok=True)
# Extract it
with tar_open(name="folder.tgz", mode="r:gz", fileobj=BytesIO(tgz)) as tf:
tf.extractall(str(letsencrypt_path))
logger.info("Successfully retrieved Let's Encrypt data from db cache")
else:
logger.info("No Let's Encrypt data found in db cache")
# Multisite case
if getenv("MULTISITE", "no") == "yes":
@ -102,9 +126,7 @@ try:
" ", ","
)
if Path(
f"/var/cache/bunkerweb/letsencrypt/{first_server}/cert.pem"
).exists():
if letsencrypt_path.joinpath(first_server, "cert.pem").exists():
logger.info(
f"Certificates already exists for domain(s) {domains}",
)
@ -120,7 +142,10 @@ try:
logger.info(
f"Asking certificates for domains : {domains} (email = {real_email}) ...",
)
if certbot_new(domains, real_email) != 0:
if (
certbot_new(domains, real_email, letsencrypt_path, letsencrypt_job_path)
!= 0
):
status = 2
logger.error(
f"Certificate generation failed for domain(s) {domains} ...",
@ -136,9 +161,7 @@ try:
first_server = getenv("SERVER_NAME", "").split(" ")[0]
domains = getenv("SERVER_NAME", "").replace(" ", ",")
if Path(
f"/var/cache/bunkerweb/letsencrypt/etc/live/{first_server}/cert.pem"
).exists():
if letsencrypt_path.joinpath("etc", "live", first_server, "cert.pem").exists():
logger.info(f"Certificates already exists for domain(s) {domains}")
else:
real_email = getenv("EMAIL_LETS_ENCRYPT", f"contact@{first_server}")
@ -148,7 +171,10 @@ try:
logger.info(
f"Asking certificates for domain(s) : {domains} (email = {real_email}) ...",
)
if certbot_new(domains, real_email) != 0:
if (
certbot_new(domains, real_email, letsencrypt_path, letsencrypt_job_path)
!= 0
):
status = 2
logger.error(f"Certificate generation failed for domain(s) : {domains}")
else:
@ -158,23 +184,22 @@ try:
)
# Put new folder in cache
if db:
bio = BytesIO()
with tfopen("folder.tgz", mode="w:gz", fileobj=bio) as tgz:
tgz.add("/var/cache/bunkerweb/letsencrypt", arcname=".")
bio.seek(0)
# Put tgz in cache
cached, err = set_file_in_db(f"folder.tgz", bio, db)
if not cached:
logger.error(f"Error while saving Let's Encrypt data to db cache : {err}")
else:
logger.info("Successfully saved Let's Encrypt data to db cache")
# Delete lib and log folders to avoid sending them
if Path("/var/cache/bunkerweb/letsencrypt/lib").exists():
rmtree("/var/cache/bunkerweb/letsencrypt/lib", ignore_errors=True)
if Path("/var/cache/bunkerweb/letsencrypt/log").exists():
rmtree("/var/cache/bunkerweb/letsencrypt/log", ignore_errors=True)
bio = BytesIO()
with tar_open("folder.tgz", mode="w:gz", fileobj=bio, compresslevel=9) as tgz:
tgz.add(str(letsencrypt_path), arcname=".")
bio.seek(0, 0)
# Put tgz in cache
cached, err = set_file_in_db(f"folder.tgz", bio.read(), db)
if not cached:
logger.error(f"Error while saving Let's Encrypt data to db cache : {err}")
else:
logger.info("Successfully saved Let's Encrypt data to db cache")
# Delete lib and log folders to avoid sending them
rmtree(str(letsencrypt_path.joinpath("lib")), ignore_errors=True)
rmtree(str(letsencrypt_path.joinpath("log")), ignore_errors=True)
except:
status = 3
logger.error(f"Exception while running certbot-new.py :\n{format_exc()}")

View file

@ -1,72 +1,105 @@
#!/usr/bin/python3
from os import environ, getenv, listdir
from os import _exit, environ, getenv, listdir, sep
from os.path import join
from pathlib import Path
from subprocess import DEVNULL, STDOUT, run
from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
from tarfile import open as tfopen
from tarfile import open as tar_open
from io import BytesIO
from shutil import rmtree
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/db",
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (
("deps", "python"),
("utils",),
("db",),
)
)
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from logger import setup_logger
from Database import Database
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from jobs import get_file_in_db, set_file_in_db
def renew(domain):
environ["PYTHONPATH"] = "/usr/share/bunkerweb/deps/python"
proc = run(
def renew(domain: str, letsencrypt_path: Path) -> int:
return run(
[
"/usr/share/bunkerweb/deps/python/bin/certbot",
join(sep, "usr", "share", "bunkerweb", "deps", "python", "bin", "certbot"),
"renew",
"--config-dir=/var/cache/bunkerweb/letsencrypt/etc",
"--work-dir=/var/cache/bunkerweb/letsencrypt/lib",
"--logs-dir=/var/cache/bunkerweb/letsencrypt/log",
"--config-dir",
letsencrypt_path.joinpath("etc"),
"--work-dir",
letsencrypt_path.joinpath("lib"),
"--logs-dir",
letsencrypt_path.joinpath("log"),
"--cert-name",
domain,
"--deploy-hook",
"/usr/share/bunkerweb/core/letsencrypt/jobs/certbot-deploy.py",
join(
sep,
"usr",
"share",
"bunkerweb",
"core",
"letsencrypt",
"jobs",
"certbot-deploy.py",
),
],
stdin=DEVNULL,
stderr=STDOUT,
env=environ,
)
return proc.returncode
).returncode
logger = setup_logger("LETS-ENCRYPT.renew", getenv("LOG_LEVEL", "INFO"))
status = 0
try:
# Check if we're using let's encrypt
use_letsencrypt = False
if getenv("AUTO_LETS_ENCRYPT", "no") == "yes":
use_letsencrypt = True
elif getenv("MULTISITE", "no") == "yes":
for first_server in getenv("SERVER_NAME", "").split(" "):
if (
first_server
and getenv(f"{first_server}_AUTO_LETS_ENCRYPT", "no") == "yes"
):
use_letsencrypt = True
break
if not use_letsencrypt:
logger.info("Let's Encrypt is not activated, skipping renew...")
_exit(0)
# Create directory if it doesn't exist
Path("/var/cache/bunkerweb/letsencrypt").mkdir(parents=True, exist_ok=True)
letsencrypt_path = Path(sep, "var", "cache", "bunkerweb", "letsencrypt")
letsencrypt_path.mkdir(parents=True, exist_ok=True)
# Extract letsencrypt folder if it exists in db
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db:
tgz = get_file_in_db("folder.tgz", db)
if tgz:
# Delete folder if needed
if len(listdir("/var/cache/bunkerweb/letsencrypt")) > 0:
rmtree("/var/cache/bunkerweb/letsencrypt", ignore_errors=True)
# Extract it
with tfopen(name="folder.tgz", mode="r:gz", fileobj=BytesIO(tgz)) as tf:
tf.extractall("/var/cache/bunkerweb/letsencrypt")
logger.info("Successfully retrieved Let's Encrypt data from db cache")
else:
logger.info("No Let's Encrypt data found in db cache")
tgz = get_file_in_db("folder.tgz", db)
if tgz:
# Delete folder if needed
if letsencrypt_path.exists():
rmtree(str(letsencrypt_path), ignore_errors=True)
letsencrypt_path.mkdir(parents=True, exist_ok=True)
# Extract it
with tar_open(name="folder.tgz", mode="r:gz", fileobj=BytesIO(tgz)) as tf:
tf.extractall(str(letsencrypt_path))
logger.info("Successfully retrieved Let's Encrypt data from db cache")
else:
logger.info("No Let's Encrypt data found in db cache")
if getenv("MULTISITE") == "yes":
servers = getenv("SERVER_NAME", [])
@ -82,48 +115,42 @@ try:
getenv("AUTO_LETS_ENCRYPT", "no"),
)
!= "yes"
or not Path(
f"/var/cache/bunkerweb/letsencrypt/etc/live/{first_server}/cert.pem"
or not letsencrypt_path.joinpath(
"etc", "live", first_server, "cert.pem"
).exists()
):
continue
ret = renew(first_server)
if ret != 0:
if renew(first_server, letsencrypt_path) != 0:
status = 2
logger.error(
f"Certificates renewal for {first_server} failed",
)
elif getenv("AUTO_LETS_ENCRYPT", "no") == "yes" and not getenv("SERVER_NAME", ""):
first_server = getenv("SERVER_NAME", "").split(" ")[0]
if Path(
f"/var/cache/bunkerweb/letsencrypt/etc/live/{first_server}/cert.pem"
).exists():
ret = renew(first_server)
if ret != 0:
if letsencrypt_path.joinpath("etc", "live", first_server, "cert.pem").exists():
if renew(first_server, letsencrypt_path) != 0:
status = 2
logger.error(
f"Certificates renewal for {first_server} failed",
)
# Put new folder in cache
if db:
bio = BytesIO()
with tfopen("folder.tgz", mode="w:gz", fileobj=bio) as tgz:
tgz.add("/var/cache/bunkerweb/letsencrypt", arcname=".")
bio.seek(0)
# Put tgz in cache
cached, err = set_file_in_db("folder.tgz", bio, db)
if not cached:
logger.error(f"Error while saving Let's Encrypt data to db cache : {err}")
else:
logger.info("Successfully saved Let's Encrypt data to db cache")
# Delete lib and log folders to avoid sending them
if Path("/var/cache/bunkerweb/letsencrypt/lib").exists():
rmtree("/var/cache/bunkerweb/letsencrypt/lib", ignore_errors=True)
if Path("/var/cache/bunkerweb/letsencrypt/log").exists():
rmtree("/var/cache/bunkerweb/letsencrypt/log", ignore_errors=True)
bio = BytesIO()
with tar_open("folder.tgz", mode="w:gz", fileobj=bio, compresslevel=9) as tgz:
tgz.add(str(letsencrypt_path), arcname=".")
bio.seek(0, 0)
# Put tgz in cache
cached, err = set_file_in_db("folder.tgz", bio.read(), db)
if not cached:
logger.error(f"Error while saving Let's Encrypt data to db cache : {err}")
else:
logger.info("Successfully saved Let's Encrypt data to db cache")
# Delete lib and log folders to avoid sending them
rmtree(str(letsencrypt_path.joinpath("lib")), ignore_errors=True)
rmtree(str(letsencrypt_path.joinpath("log")), ignore_errors=True)
except:
status = 2
logger.error(f"Exception while running certbot-renew.py :\n{format_exc()}")

View file

@ -1,19 +1,26 @@
#!/usr/bin/python3
from os import getenv
from os import _exit, getenv, sep
from os.path import join
from pathlib import Path
from subprocess import DEVNULL, run
from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (
("deps", "python"),
("utils",),
("db",),
)
)
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from logger import setup_logger
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from jobs import set_file_in_db
logger = setup_logger("DEFAULT-SERVER-CERT", getenv("LOG_LEVEL", "INFO"))
status = 0
@ -47,37 +54,86 @@ try:
need_default_cert = True
# Generate the self-signed certificate
if need_default_cert:
Path("/var/cache/bunkerweb/default-server-cert").mkdir(
parents=True, exist_ok=True
)
if not Path("/var/cache/bunkerweb/default-server-cert/cert.pem").is_file():
logger.info("Generating self-signed certificate for default server")
cmd = "openssl req -nodes -x509 -newkey rsa:4096 -keyout /var/cache/bunkerweb/default-server-cert/cert.key -out /var/cache/bunkerweb/default-server-cert/cert.pem -days 3650".split(
" "
)
cmd.extend(["-subj", "/C=AU/ST=Some-State/O=Internet Widgits Pty Ltd/"])
proc = run(cmd, stdin=DEVNULL, stderr=DEVNULL)
if proc.returncode != 0:
logger.error(
"Self-signed certificate generation failed for default server",
)
status = 2
else:
status = 1
logger.info(
"Successfully generated self-signed certificate for default server",
)
else:
logger.info(
"Skipping generation of self-signed certificate for default server (already present)",
)
else:
if not need_default_cert:
logger.info(
"Skipping generation of self-signed certificate for default server (not needed)",
)
_exit(0)
cert_path = Path(sep, "var", "cache", "bunkerweb", "default-server-cert")
cert_path.mkdir(parents=True, exist_ok=True)
if not cert_path.joinpath("cert.pem").is_file():
logger.info("Generating self-signed certificate for default server")
if (
run(
[
"openssl",
"req",
"-nodes",
"-x509",
"-newkey",
"rsa:4096",
"-keyout",
cert_path.joinpath("cert.key"),
"-out",
cert_path.joinpath("cert.pem"),
"-days",
"3650",
"-subj",
"/C=AU/ST=Some-State/O=Internet Widgits Pty Ltd/",
],
stdin=DEVNULL,
stderr=DEVNULL,
).returncode
!= 0
):
logger.error(
"Self-signed certificate generation failed for default server",
)
status = 2
else:
status = 1
logger.info(
"Successfully generated self-signed certificate for default server",
)
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
cached, err = set_file_in_db(
"cert.pem",
cert_path.joinpath("cert.pem").read_bytes(),
db,
)
if not cached:
logger.error(
f"Error while saving default-server-cert cert.pem file to db cache : {err}"
)
else:
logger.info(
"Successfully saved default-server-cert cert.pem file to db cache"
)
cached, err = set_file_in_db(
"cert.key",
cert_path.joinpath("cert.key").read_bytes(),
db,
)
if not cached:
logger.error(
f"Error while saving default-server-cert cert.key file to db cache : {err}"
)
else:
logger.info(
"Successfully saved default-server-cert cert.key file to db cache"
)
else:
logger.info(
"Skipping generation of self-signed certificate for default server (already present)",
)
except:
status = 2
logger.error(f"Exception while running default-server-cert.py :\n{format_exc()}")

View file

@ -1,20 +1,23 @@
#!/usr/bin/python3
from os import getenv
from os.path import basename
from os import getenv, sep
from os.path import basename, join
from pathlib import Path
from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (
("deps", "python"),
("utils",),
)
)
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from requests import get
from logger import setup_logger
from logger import setup_logger # type: ignore
logger = setup_logger("UPDATE-CHECK", getenv("LOG_LEVEL", "INFO"))
status = 0

View file

@ -2,24 +2,27 @@
from contextlib import suppress
from ipaddress import ip_address, ip_network
from os import _exit, getenv
from os import _exit, getenv, sep
from os.path import join
from pathlib import Path
from sys import exit as sys_exit, path as sys_path
from threading import Lock
from traceback import format_exc
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/db",
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (
("deps", "python"),
("utils",),
("db",),
)
)
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from requests import get
from Database import Database
from logger import setup_logger
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from jobs import cache_file, cache_hash, file_hash, is_cached_file
@ -65,8 +68,10 @@ try:
_exit(0)
# Create directories if they don't exist
Path("/var/cache/bunkerweb/realip").mkdir(parents=True, exist_ok=True)
Path("/var/tmp/bunkerweb/realip").mkdir(parents=True, exist_ok=True)
realip_path = Path(sep, "var", "cache", "bunkerweb", "realip")
realip_path.mkdir(parents=True, exist_ok=True)
tmp_realip_path = Path(sep, "var", "tmp", "bunkerweb", "realip")
tmp_realip_path.mkdir(parents=True, exist_ok=True)
db = Database(
logger,
@ -74,7 +79,7 @@ try:
)
# Don't go further if the cache is fresh
if is_cached_file("/var/cache/bunkerweb/realip/combined.list", "hour", db):
if is_cached_file(realip_path.joinpath("combined.list"), "hour", db):
logger.info("RealIP list is already in cache, skipping download...")
_exit(0)
@ -108,19 +113,19 @@ try:
f"Exception while getting RealIP list from {url} :\n{format_exc()}"
)
Path("/var/tmp/bunkerweb/realip/combined.list").write_bytes(content)
tmp_realip_path.joinpath("combined.list").write_bytes(content)
# Check if file has changed
new_hash = file_hash("/var/tmp/bunkerweb/realip/combined.list")
old_hash = cache_hash("/var/cache/bunkerweb/realip/combined.list", db)
new_hash = file_hash(tmp_realip_path.joinpath("combined.list"))
old_hash = cache_hash(realip_path.joinpath("combined.list"), db)
if new_hash == old_hash:
logger.info("New file is identical to cache file, reload is not needed")
_exit(0)
# Put file in cache
cached, err = cache_file(
"/var/tmp/bunkerweb/realip/combined.list",
"/var/cache/bunkerweb/realip/combined.list",
tmp_realip_path.joinpath("combined.list"),
realip_path.joinpath("combined.list"),
new_hash,
db,
)

View file

@ -1,72 +1,103 @@
#!/usr/bin/python3
from os import getenv
from os import getenv, sep
from os.path import join
from pathlib import Path
from subprocess import DEVNULL, STDOUT, run
from sys import exit as sys_exit, path as sys_path
from threading import Lock
from traceback import format_exc
from typing import Tuple
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/db",
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (
("deps", "python"),
("utils",),
("db",),
)
)
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from Database import Database
from logger import setup_logger
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from jobs import set_file_in_db
logger = setup_logger("self-signed", getenv("LOG_LEVEL", "INFO"))
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
db = None
lock = Lock()
status = 0
def generate_cert(first_server, days, subj):
if Path(f"/var/cache/bunkerweb/selfsigned/{first_server}.pem").is_file():
cmd = f"openssl x509 -checkend 86400 -noout -in /var/cache/bunkerweb/selfsigned/{first_server}.pem"
proc = run(cmd.split(" "), stdin=DEVNULL, stderr=STDOUT)
if proc.returncode == 0:
def generate_cert(
first_server: str, days: str, subj: str, self_signed_path: Path
) -> Tuple[bool, int]:
if self_signed_path.joinpath(f"{first_server}.pem").is_file():
if (
run(
[
"openssl",
"x509",
"-checkend",
"86400",
"-noout",
"-in",
self_signed_path.joinpath(f"{first_server}.pem"),
],
stdin=DEVNULL,
stderr=STDOUT,
).returncode
== 0
):
logger.info(f"Self-signed certificate already present for {first_server}")
return True, 0
logger.info(f"Generating self-signed certificate for {first_server}")
cmd = f"openssl req -nodes -x509 -newkey rsa:4096 -keyout /var/cache/bunkerweb/selfsigned/{first_server}.key -out /var/cache/bunkerweb/selfsigned/{first_server}.pem -days {days} -subj {subj}"
proc = run(cmd.split(" "), stdin=DEVNULL, stderr=DEVNULL)
if proc.returncode != 0:
if (
run(
[
"openssl",
"req",
"-nodes",
"-x509",
"-newkey",
"rsa:4096",
"-keyout",
self_signed_path.joinpath(f"{first_server}.key"),
"-out",
self_signed_path.joinpath(f"{first_server}.pem"),
"-days",
days,
"-subj",
subj,
],
stdin=DEVNULL,
stderr=DEVNULL,
).returncode
!= 0
):
logger.error(f"Self-signed certificate generation failed for {first_server}")
return False, 2
return True, 1
# Update db
with lock:
err = db.update_job_cache(
"self-signed",
first_server,
f"{first_server}.key",
Path(f"/var/cache/bunkerweb/selfsigned/{first_server}.key").read_bytes(),
)
cached, err = set_file_in_db(
f"{first_server}.pem",
self_signed_path.joinpath(f"{first_server}.pem").read_bytes(),
db,
service_id=first_server,
)
if not cached:
logger.error(f"Error while caching self-signed {first_server}.pem file : {err}")
if err:
logger.warning(f"Couldn't update db cache for {first_server}.key file: {err}")
with lock:
err = db.update_job_cache(
"self-signed",
first_server,
f"{first_server}.pem",
Path(f"/var/cache/bunkerweb/selfsigned/{first_server}.pem").read_bytes(),
)
if err:
logger.warning(f"Couldn't update db cache for {first_server}.pem file: {err}")
cached, err = set_file_in_db(
f"{first_server}.key",
self_signed_path.joinpath(f"{first_server}.key").read_bytes(),
db,
service_id=first_server,
)
if not cached:
logger.error(f"Error while caching self-signed {first_server}.key file : {err}")
logger.info(f"Successfully generated self-signed certificate for {first_server}")
return True, 1
@ -75,7 +106,8 @@ def generate_cert(first_server, days, subj):
status = 0
try:
Path("/var/cache/bunkerweb/selfsigned/").mkdir(parents=True, exist_ok=True)
self_signed_path = Path(sep, "var", "cache", "bunkerweb", "selfsigned")
self_signed_path.mkdir(parents=True, exist_ok=True)
# Multisite case
if getenv("MULTISITE") == "yes":
@ -92,10 +124,16 @@ try:
getenv("GENERATE_SELF_SIGNED_SSL", "no"),
)
!= "yes"
or Path(f"/var/cache/bunkerweb/selfsigned/{first_server}.pem").is_file()
or self_signed_path.joinpath(f"{first_server}.pem").is_file()
):
continue
if not db:
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
ret, ret_status = generate_cert(
first_server,
getenv(
@ -106,19 +144,25 @@ try:
f"{first_server}_SELF_SIGNED_SSL_SUBJ",
getenv("SELF_SIGNED_SSL_SUBJ", "/CN=www.example.com/"),
),
self_signed_path,
)
status = ret_status
# Singlesite case
elif getenv("GENERATE_SELF_SIGNED_SSL", "no") == "yes" and getenv("SERVER_NAME"):
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
first_server = getenv("SERVER_NAME", "").split(" ")[0]
ret, ret_status = generate_cert(
first_server,
getenv("SELF_SIGNED_SSL_EXPIRY", "365"),
getenv("SELF_SIGNED_SSL_SUBJ", "/CN=www.example.com/"),
self_signed_path,
)
status = ret_status
except:
status = 2
logger.error(f"Exception while running self-signed.py :\n{format_exc()}")

View file

@ -2,25 +2,25 @@
from contextlib import suppress
from ipaddress import ip_address, ip_network
from os import _exit, getenv
from os import _exit, getenv, sep
from os.path import join
from pathlib import Path
from re import IGNORECASE, compile as re_compile
from sys import exit as sys_exit, path as sys_path
from traceback import format_exc
from typing import Tuple
sys_path.extend(
(
"/usr/share/bunkerweb/deps/python",
"/usr/share/bunkerweb/utils",
"/usr/share/bunkerweb/db",
)
)
for deps_path in [
join(sep, "usr", "share", "bunkerweb", *paths)
for paths in (("deps", "python"), ("utils",), ("db",))
]:
if deps_path not in sys_path:
sys_path.append(deps_path)
from requests import get
from Database import Database
from logger import setup_logger
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from jobs import cache_file, cache_hash, is_cached_file, file_hash
rdns_rx = re_compile(rb"^[^ ]+$", IGNORECASE)
@ -83,8 +83,10 @@ try:
)
# Create directories if they don't exist
Path("/var/cache/bunkerweb/whitelist").mkdir(parents=True, exist_ok=True)
Path("/var/tmp/bunkerweb/whitelist").mkdir(parents=True, exist_ok=True)
whitelist_path = Path(sep, "var", "cache", "bunkerweb", "whitelist")
whitelist_path.mkdir(parents=True, exist_ok=True)
tmp_whitelist_path = Path(sep, "var", "tmp", "bunkerweb", "whitelist")
tmp_whitelist_path.mkdir(parents=True, exist_ok=True)
# Our urls data
urls = {"IP": [], "RDNS": [], "ASN": [], "USER_AGENT": [], "URI": []}
@ -99,9 +101,7 @@ try:
}
all_fresh = True
for kind in kinds_fresh:
if not is_cached_file(
f"/var/cache/bunkerweb/whitelist/{kind}.list", "hour", db
):
if not is_cached_file(whitelist_path.joinpath(f"{kind}.list"), "hour", db):
kinds_fresh[kind] = False
all_fresh = False
logger.info(
@ -149,12 +149,12 @@ try:
content += data + b"\n"
i += 1
Path(f"/var/tmp/bunkerweb/whitelist/{kind}.list").write_bytes(content)
tmp_whitelist_path.joinpath(f"{kind}.list").write_bytes(content)
logger.info(f"Downloaded {i} good {kind}")
logger.info(f"Downloaded {i} bad {kind}")
# Check if file has changed
new_hash = file_hash(f"/var/tmp/bunkerweb/whitelist/{kind}.list")
old_hash = cache_hash(f"/var/cache/bunkerweb/whitelist/{kind}.list", db)
new_hash = file_hash(tmp_whitelist_path.joinpath(f"{kind}.list"))
old_hash = cache_hash(whitelist_path.joinpath(f"{kind}.list"), db)
if new_hash == old_hash:
logger.info(
f"New file {kind}.list is identical to cache file, reload is not needed",
@ -165,8 +165,8 @@ try:
)
# Put file in cache
cached, err = cache_file(
f"/var/tmp/bunkerweb/whitelist/{kind}.list",
f"/var/cache/bunkerweb/whitelist/{kind}.list",
tmp_whitelist_path.joinpath(f"{kind}.list"),
whitelist_path.joinpath(f"{kind}.list"),
new_hash,
db,
)

View file

@ -4,12 +4,13 @@ from contextlib import contextmanager, suppress
from copy import deepcopy
from datetime import datetime
from hashlib import sha256
from inspect import getsourcefile
from logging import Logger
from os import _exit, getenv, listdir, sep
from os.path import dirname, join
from os.path import basename, dirname, join
from pathlib import Path
from re import compile as re_compile
from sys import path as sys_path
from sys import _getframe, path as sys_path
from typing import Any, Dict, List, Optional, Tuple
from time import sleep
from traceback import format_exc
@ -901,7 +902,8 @@ class Database:
return ""
def delete_job_cache(self, job_name: str, file_name: str):
def delete_job_cache(self, file_name: str, *, job_name: Optional[str] = None):
job_name = job_name or basename(getsourcefile(_getframe(1))).replace(".py", "")
with self.__db_session() as session:
session.query(Jobs_cache).filter_by(
job_name=job_name, file_name=file_name
@ -909,14 +911,15 @@ class Database:
def update_job_cache(
self,
job_name: str,
service_id: Optional[str],
file_name: str,
data: bytes,
*,
job_name: Optional[str] = None,
checksum: Optional[str] = None,
) -> str:
"""Update the plugin cache in the database"""
job_name = job_name or basename(getsourcefile(_getframe(1))).replace(".py", "")
with self.__db_session() as session:
cache = (
session.query(Jobs_cache)

View file

@ -6,7 +6,7 @@ from hashlib import sha512
from inspect import getsourcefile
from io import BufferedReader
from json import dumps, loads
from os.path import basename
from os.path import basename, normpath
from pathlib import Path
from sys import _getframe
from threading import Lock
@ -24,13 +24,14 @@ lock = Lock()
def is_cached_file(
file: str,
file: Union[str, Path],
expire: Union[Literal["hour"], Literal["day"], Literal["week"], Literal["month"]],
db=None,
) -> bool:
is_cached = False
cached_file = None
try:
file = normpath(file)
file_path = Path(f"{file}.md")
if not file_path.is_file():
if not db:
@ -69,27 +70,34 @@ def is_cached_file(
return is_cached and cached_file
def get_file_in_db(file: str, db) -> bytes:
def get_file_in_db(file: Union[str, Path], db) -> bytes:
cached_file = db.get_job_cache_file(
basename(getsourcefile(_getframe(1))).replace(".py", ""), file
basename(getsourcefile(_getframe(1))).replace(".py", ""), normpath(file)
)
if not cached_file:
return False
return cached_file.data
def set_file_in_db(name: str, bio: BufferedReader, db) -> Tuple[bool, str]:
def set_file_in_db(
name: str,
content: bytes,
db,
*,
job_name: Optional[str] = None,
service_id: Optional[str] = None,
checksum: Optional[str] = None,
) -> Tuple[bool, str]:
ret, err = True, "success"
try:
content = bio.read()
bio.seek(0)
with lock:
err = db.update_job_cache(
basename(getsourcefile(_getframe(1))).replace(".py", ""),
None,
service_id,
name,
content,
checksum=bytes_hash(bio),
job_name=job_name
or basename(getsourcefile(_getframe(1))).replace(".py", ""),
checksum=checksum,
)
if err:
@ -103,16 +111,16 @@ def del_file_in_db(name: str, db) -> Tuple[bool, str]:
ret, err = True, "success"
try:
db.delete_job_cache(
basename(getsourcefile(_getframe(1))).replace(".py", ""), name
name, job_name=basename(getsourcefile(_getframe(1))).replace(".py", "")
)
except:
return False, f"exception :\n{format_exc()}"
return ret, err
def file_hash(file: str) -> str:
def file_hash(file: Union[str, Path]) -> str:
_sha512 = sha512()
with open(file, "rb") as f:
with open(normpath(file), "rb") as f:
while True:
data = f.read(1024)
if not data:
@ -121,7 +129,7 @@ def file_hash(file: str) -> str:
return _sha512.hexdigest()
def bytes_hash(bio: bytes) -> str:
def bytes_hash(bio: BufferedReader) -> str:
_sha512 = sha512()
while True:
data = bio.read(1024)
@ -132,13 +140,13 @@ def bytes_hash(bio: bytes) -> str:
return _sha512.hexdigest()
def cache_hash(cache: str, db=None) -> Optional[str]:
def cache_hash(cache: Union[str, Path], db=None) -> Optional[str]:
with suppress(BaseException):
return loads(Path(f"{cache}.md").read_text()).get("checksum", None)
return loads(Path(normpath(f"{cache}.md")).read_text()).get("checksum", None)
if db:
cached_file = db.get_job_cache_file(
basename(getsourcefile(_getframe(1))).replace(".py", ""),
basename(cache),
basename(normpath(cache)),
with_info=True,
with_data=False,
)
@ -149,8 +157,8 @@ def cache_hash(cache: str, db=None) -> Optional[str]:
def cache_file(
file: str,
cache: str,
file: Union[str, Path],
cache: Union[str, Path],
_hash: Optional[str],
db=None,
*,
@ -158,25 +166,27 @@ def cache_file(
) -> Tuple[bool, str]:
ret, err = True, "success"
try:
content = Path(file).read_bytes()
Path(cache).write_bytes(content)
Path(file).unlink()
if not isinstance(file, Path):
file = Path(normpath(file))
if not isinstance(cache, Path):
cache = Path(normpath(cache))
content = file.read_bytes()
cache.write_bytes(content)
file.unlink()
if not _hash:
_hash = file_hash(cache)
_hash = file_hash(str(cache))
if db:
with lock:
err = db.update_job_cache(
basename(getsourcefile(_getframe(1))).replace(".py", ""),
service_id,
basename(cache),
content,
checksum=_hash,
)
if err:
ret = False
return set_file_in_db(
basename(str(cache)),
content,
db,
job_name=basename(getsourcefile(_getframe(1))).replace(".py", ""),
service_id=service_id,
checksum=_hash,
)
else:
Path(f"{cache}.md").write_text(
dumps(dict(date=datetime.now().timestamp(), checksum=_hash))