WIP - fix bunkernet and missing reload for scheduled jobs

This commit is contained in:
florian 2023-05-04 07:07:58 +02:00
parent 0276054522
commit 75ca603b7d
No known key found for this signature in database
GPG Key ID: 3D80806F12602A7C
8 changed files with 136 additions and 46 deletions

View File

@ -168,7 +168,21 @@ utils.ip_is_global = function(ip)
"224.0.0.0/4",
"233.252.0.0/24",
"240.0.0.0/4",
"255.255.255.255/32"
"255.255.255.255/32",
"::/128",
"::1/128",
"::ffff:0:0/96",
"::ffff:0:0:0/96",
"64:ff9b::/96",
"64:ff9b:1::/48",
"100::/64",
"2001:0000::/32",
"2001:20::/28",
"2001:db8::/32",
"2002::/16",
"fc00::/7",
"fe80::/10",
"ff00::/8"
}
-- Instantiate ipmatcher
local ipm, err = ipmatcher.new(reserved_ips)

View File

@ -15,6 +15,9 @@ server {
include /etc/bunkerweb/configs/server-http/{{ SERVER_NAME.split(" ")[0] }}/*.conf;
{% endif %}
# reason variable
set $reason '';
# include LUA files
include {{ NGINX_PREFIX }}set-lua.conf;
include {{ NGINX_PREFIX }}access-lua.conf;

View File

@ -15,6 +15,8 @@ function bunkernet:initialize()
local id, err = self.datastore:get("plugin_bunkernet_id")
if id then
self.bunkernet_id = id
self.version = ngx.ctx.bw.version
self.integration = ngx.ctx.bw.integration
else
self.logger:log(ngx.ERR, "can't get BunkerNet ID from datastore : " .. err)
end
@ -58,7 +60,7 @@ function bunkernet:init()
ret = false
else
for line in f:lines() do
if utils.is_ipv4(line) and utils.ip_is_global(line) then
if (utils.is_ipv4(line) or utils.is_ipv6(line)) and utils.ip_is_global(line) then
table.insert(db.ip, line)
i = i + 1
end
@ -77,8 +79,49 @@ function bunkernet:init()
self.variables["BUNKERNET_SERVER"] .. " with machine ID " .. id .. " and " .. tostring(i) .. " bad IPs in database")
end
function bunkernet:access()
-- Check if not loading
if self.is_loading then
return self:ret(true, "bunkerweb is loading")
end
-- Check if enabled
if self.variables["USE_BUNKERNET"] ~= "yes" then
return self:ret(true, "bunkernet not activated")
end
-- Check if BunkerNet ID is generated
if not self.bunkernet_id then
return self:ret(false, "bunkernet ID is not generated")
end
-- Check if IP is global
if not ngx.ctx.bw.ip_is_global then
return self:ret(true, "IP is not global")
end
-- Check if whitelisted
if ngx.ctx.bw.is_whitelisted == "yes" then
return self:ret(true, "client is whitelisted")
end
-- Extract DB
local db, err = self.datastore:get("plugin_bunkernet_db")
if db then
db = cjson.decode(db)
-- Check if is IP is present
if #db.ip > 0 then
local present, err = utils.is_ip_in_networks(ngx.ctx.bw.remote_addr, db.ip)
if present == nil then
return self:ret(false, "can't check if ip is in db : " .. err)
end
if present then
return self:ret(true, "ip is in db", utils.get_deny_status())
end
end
else
return self:ret(false, "can't get bunkernet db " .. err)
end
return self:ret(true, "not in db")
end
function bunkernet:log(bypass_use_bunkernet)
-- Check if not loading is needed
-- Check if not loading
if self.is_loading then
return self:ret(true, "bunkerweb is loading")
end
@ -105,10 +148,8 @@ function bunkernet:log(bypass_use_bunkernet)
return self:ret(true, "IP is not global")
end
-- TODO : check if IP has been reported recently
self.integration = ngx.ctx.bw.integration
self.version = ngx.ctx.bw.version
local function report_callback(premature, obj, ip, reason, method, url, headers) -- TODO : fix this
local ok, err, status, data = obj:report(ip, reason, method, url, headers, obj.ctx.integration, obj.ctx.version)
local function report_callback(premature, obj, ip, reason, method, url, headers)
local ok, err, status, data = obj:report(ip, reason, method, url, headers)
if status == 429 then
obj.logger:log(ngx.WARN, "bunkernet API is rate limiting us")
elseif not ok then
@ -161,9 +202,9 @@ function bunkernet:request(method, url, data)
return false, "can't instantiate http object : " .. err, nil, nil
end
local all_data = {
id = self.id,
integration = self.integration,
version = self.version
id = self.bunkernet_id,
version = self.version,
integration = self.integration
}
for k, v in pairs(data) do
all_data[k] = v
@ -177,6 +218,8 @@ function bunkernet:request(method, url, data)
}
})
httpc:close()
self.logger:log(ngx.WARN, cjson.encode(all_data))
self.logger:log(ngx.WARN, "BunkerWeb/" .. self.version)
if not res then
return false, "error while sending request : " .. err, nil, nil
end

View File

@ -53,7 +53,9 @@ try:
# Ask an ID if needed
bunkernet_id = None
if not Path("/var/cache/bunkerweb/bunkernet/instance.id").is_file():
if not not is_cached_file(
f"/var/cache/bunkerweb/blacklist/{kind}.list", "hour", db
):
logger.info("Registering instance on BunkerNet API ...")
ok, status, data = register()
if not ok:

View File

@ -74,7 +74,7 @@ function country:access()
if not ok then
return self:ret(false, "error while adding item to cache : " .. err)
end
return self:ret(true, "client IP " .. ngx.ctx.bw.remote_addr .. " is blacklisted (country = " .. country .. ")", true, utils.get_deny_status())
return self:ret(true, "client IP " .. ngx.ctx.bw.remote_addr .. " is blacklisted (country = " .. country .. ")", utils.get_deny_status())
end
end
end

View File

@ -1,11 +1,14 @@
#!/usr/bin/python3
from os import environ, getenv
from os import environ, getenv, listdir
from pathlib import Path
from subprocess import DEVNULL, STDOUT, run
from sys import exit as sys_exit, path as sys_path
from threading import Lock
from traceback import format_exc
from tarfile import open as tfopen
from io import BytesIO
from shutil import rmtree
sys_path.extend(
(
@ -17,6 +20,7 @@ sys_path.extend(
from Database import Database
from logger import setup_logger
from jobs import get_file
logger = setup_logger("LETS-ENCRYPT", getenv("LOG_LEVEL", "INFO"))
db = Database(
@ -26,6 +30,8 @@ db = Database(
lock = Lock()
status = 0
def folder_to_tgz() :
with taropen("/var/tmp/bunkerweb/")
def certbot_new(domains, email):
environ["PYTHONPATH"] = "/usr/share/bunkerweb/deps/python"
@ -53,8 +59,30 @@ def certbot_new(domains, email):
)
return proc.returncode
status = 0
try:
# Create directories if they don't exist
Path("/var/cache/bunkerweb/letsencrypt").mkdir(parents=True, exist_ok=True)
# Extract letsencrypt folder if it exists in db
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db:
tgz = get_file("certbot-new", "folder.tgz", db)
if tgz:
# Delete folder if needed
if len(listdir("/var/cache/bunkerweb/letsencrypt")) > 0:
rmtree("/var/cache/bunkerweb/letsencrypt")
# Extract it
with tfopen(name="folder.tgz", mode="r:gz", fileobj=BytesIO(tgz)) as tf:
tf.extractall("/var/cache/bunkerweb/letsencrypt")
logger.info("Successfully retrieved Let's Encrypt data from db cache")
else:
logger.info("No Let's Encrypt data found in db cache")
# Multisite case
if getenv("MULTISITE", "no") == "yes":
for first_server in getenv("SERVER_NAME", "").split(" "):
@ -72,7 +100,7 @@ try:
" ", ","
)
if Path(f"/etc/letsencrypt/live/{first_server}/cert.pem").exists():
if Path(f"/var/cache/bunkerweb/letsencrypt/{first_server}/cert.pem").exists():
logger.info(
f"Certificates already exists for domain(s) {domains}",
)
@ -89,36 +117,22 @@ try:
f"Asking certificates for domains : {domains} (email = {real_email}) ...",
)
if certbot_new(domains, real_email) != 0:
status = 1
status = 2
logger.error(
f"Certificate generation failed for domain(s) {domains} ...",
)
else:
status = 1
logger.info(
f"Certificate generation succeeded for domain(s) : {domains}"
)
if Path(f"/etc/letsencrypt/live/{first_server}/cert.pem").exists():
# Update db
with lock:
err = db.update_job_cache(
"certbot-new",
first_server,
"cert.pem",
Path(
f"/etc/letsencrypt/live/{first_server}/cert.pem"
).read_bytes(),
)
if err:
logger.warning(f"Couldn't update db cache: {err}")
# Singlesite case
elif getenv("AUTO_LETS_ENCRYPT", "no") == "yes" and getenv("SERVER_NAME"):
first_server = getenv("SERVER_NAME", "").split(" ")[0]
domains = getenv("SERVER_NAME", "").replace(" ", ",")
if Path(f"/etc/letsencrypt/live/{first_server}/cert.pem").exists():
if Path(f"/var/cache/bunkerweb/letsencrypt/{first_server}/cert.pem").exists():
logger.info(f"Certificates already exists for domain(s) {domains}")
else:
real_email = getenv("EMAIL_LETS_ENCRYPT", f"contact@{first_server}")
@ -132,26 +146,27 @@ try:
status = 2
logger.error(f"Certificate generation failed for domain(s) : {domains}")
else:
status = 1
logger.info(
f"Certificate generation succeeded for domain(s) : {domains}"
)
if Path(f"/etc/letsencrypt/live/{first_server}/cert.pem").exists():
# Update db
with lock:
err = db.update_job_cache(
"certbot-new",
first_server,
"cert.pem",
Path(
f"/etc/letsencrypt/live/{first_server}/cert.pem"
).read_bytes(),
)
# Put new folder in cache
if db:
bio = BytesIO()
with tfopen(mode="w:gz", fileobj=bio) as tgz:
tgz.add("/var/cache/bunkerweb/letsencrypt", arcname=".")
bio.seek(0)
# Put tgz in cache
cached, err = cache_file(
f"/var/cache/bunkerweb/letsencrypt/folder.tgz",
f"/var/cache/bunkerweb/blacklist/{kind}.list",
new_hash,
db,
)
if err:
logger.warning(f"Couldn't update db cache: {err}")
except:
status = 1
status = 3
logger.error(f"Exception while running certbot-new.py :\n{format_exc()}")
sys_exit(status)

View File

@ -58,6 +58,16 @@ def is_cached_file(file: str, expire: str, db=None) -> bool:
return is_cached
def get_file_in_db(job: str, file: str, db) -> bytes:
cached_file = db.get_job_cache_file(
job,
file
)
if not cached_file:
return False
return cached_file.data
def set_file_in_db(job: str, file: str, data, db)
def file_hash(file: str) -> str:
_sha512 = sha512()

View File

@ -105,10 +105,12 @@ class JobScheduler(ApiCaller):
f"Executing job {name} from plugin {plugin} ...",
)
success = True
ret = -1
try:
proc = run(
f"{path}jobs/{file}", stdin=DEVNULL, stderr=STDOUT, env=self.__env
)
ret = proc.returncode
except BaseException:
success = False
self.__logger.error(
@ -136,6 +138,7 @@ class JobScheduler(ApiCaller):
self.__logger.warning(
f"Failed to update database for the job {name} from plugin {plugin}: {err}",
)
return ret
def setup(self):
for plugin, jobs in self.__jobs.items():
@ -164,7 +167,7 @@ class JobScheduler(ApiCaller):
ret = job.run()
if ret == 1:
reload = True
elif (ret or 2) >= 2:
elif ret < 0 or ret >= 2:
success = False
if reload:
try: