Update jobs
This commit is contained in:
parent
13fe4b6eef
commit
f97e056ff2
|
@ -5,6 +5,7 @@ from os import _exit, getenv, makedirs
|
|||
from re import IGNORECASE, compile as re_compile
|
||||
from sys import exit as sys_exit, path as sys_path
|
||||
from traceback import format_exc
|
||||
from typing import Tuple
|
||||
|
||||
sys_path.append("/usr/share/bunkerweb/deps/python")
|
||||
sys_path.append("/usr/share/bunkerweb/utils")
|
||||
|
@ -16,42 +17,41 @@ from Database import Database
|
|||
from logger import setup_logger
|
||||
from jobs import cache_file, cache_hash, is_cached_file, file_hash
|
||||
|
||||
rdns_rx = re_compile(r"^(\.?[a-z\d\-]+)*\.[a-z]{2,}$", IGNORECASE)
|
||||
asn_rx = re_compile(r"^\d+$")
|
||||
uri_rx = re_compile(r"^/")
|
||||
rdns_rx = re_compile(rb"^(\.?[a-z\d\-]+)*\.[a-z]{2,}$", IGNORECASE)
|
||||
asn_rx = re_compile(rb"^\d+$")
|
||||
uri_rx = re_compile(rb"^/")
|
||||
|
||||
|
||||
def check_line(kind, line):
|
||||
def check_line(kind: str, line: bytes) -> Tuple[bool, bytes]:
|
||||
if kind == "IP":
|
||||
if "/" in line:
|
||||
if b"/" in line:
|
||||
try:
|
||||
ip_network(line)
|
||||
ip_network(line.decode("utf-8"))
|
||||
return True, line
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
ip_address(line)
|
||||
ip_address(line.decode("utf-8"))
|
||||
return True, line
|
||||
except ValueError:
|
||||
pass
|
||||
return False, ""
|
||||
elif kind == "RDNS":
|
||||
if rdns_rx.match(line):
|
||||
return True, line.lower()
|
||||
return False, ""
|
||||
elif kind == "ASN":
|
||||
real_line = line.replace("AS", "").replace("as", "")
|
||||
real_line = line.replace(b"AS", b"").replace(b"as", b"")
|
||||
if asn_rx.match(real_line):
|
||||
return True, real_line
|
||||
elif kind == "USER_AGENT":
|
||||
return True, line.replace("\\ ", " ").replace("\\.", "%.").replace(
|
||||
"\\\\", "\\"
|
||||
).replace("-", "%-")
|
||||
return True, line.replace(b"\\ ", b" ").replace(b"\\.", b"%.").replace(
|
||||
b"\\\\", b"\\"
|
||||
).replace(b"-", b"%-")
|
||||
elif kind == "URI":
|
||||
if uri_rx.match(line):
|
||||
return True, line
|
||||
return False, ""
|
||||
|
||||
return False, b""
|
||||
|
||||
|
||||
logger = setup_logger("BLACKLIST", getenv("LOG_LEVEL", "INFO"))
|
||||
|
@ -66,18 +66,19 @@ try:
|
|||
# Check if at least a server has Blacklist activated
|
||||
blacklist_activated = False
|
||||
# Multisite case
|
||||
if getenv("MULTISITE") == "yes":
|
||||
for first_server in getenv("SERVER_NAME").split(" "):
|
||||
if getenv("MULTISITE", "no") == "yes":
|
||||
for first_server in getenv("SERVER_NAME", "").split(" "):
|
||||
if (
|
||||
getenv(first_server + "_USE_BLACKLIST", getenv("USE_BLACKLIST"))
|
||||
getenv(f"{first_server}_USE_BLACKLIST", getenv("USE_BLACKLIST", "yes"))
|
||||
== "yes"
|
||||
):
|
||||
blacklist_activated = True
|
||||
break
|
||||
# Singlesite case
|
||||
elif getenv("USE_BLACKLIST") == "yes":
|
||||
elif getenv("USE_BLACKLIST", "yes") == "yes":
|
||||
blacklist_activated = True
|
||||
if not blacklist_activated:
|
||||
|
||||
if blacklist_activated is False:
|
||||
logger.info("Blacklist is not activated, skipping downloads...")
|
||||
_exit(0)
|
||||
|
||||
|
@ -113,7 +114,7 @@ try:
|
|||
logger.info(
|
||||
f"Blacklist for {kind} is already in cache, skipping downloads...",
|
||||
)
|
||||
if all_fresh:
|
||||
if all_fresh is True:
|
||||
_exit(0)
|
||||
|
||||
# Get URLs
|
||||
|
@ -131,32 +132,40 @@ try:
|
|||
}
|
||||
for kind in urls:
|
||||
for url in getenv(f"BLACKLIST_{kind}_URLS", "").split(" "):
|
||||
if url != "" and url not in urls[kind]:
|
||||
if url and url not in urls[kind]:
|
||||
urls[kind].append(url)
|
||||
|
||||
# Loop on kinds
|
||||
for kind, urls_list in urls.items():
|
||||
if kinds_fresh[kind]:
|
||||
if kinds_fresh[kind] is True:
|
||||
continue
|
||||
# Write combined data of the kind to a single temp file
|
||||
for url in urls_list:
|
||||
try:
|
||||
logger.info(f"Downloading blacklist data from {url} ...")
|
||||
resp = get(url)
|
||||
resp = get(url, stream=True)
|
||||
|
||||
if resp.status_code != 200:
|
||||
continue
|
||||
|
||||
i = 0
|
||||
with open(f"/var/tmp/bunkerweb/blacklist/{kind}.list", "w") as f:
|
||||
for line in resp.content.decode("utf-8").splitlines():
|
||||
line = line.strip()
|
||||
if kind != "USER_AGENT":
|
||||
line = line.strip().split(" ")[0]
|
||||
if line == "" or line.startswith("#") or line.startswith(";"):
|
||||
continue
|
||||
ok, data = check_line(kind, line)
|
||||
if ok:
|
||||
f.write(data + "\n")
|
||||
i += 1
|
||||
content = b""
|
||||
for line in resp.iter_lines():
|
||||
line = line.strip()
|
||||
|
||||
if not line or line.startswith(b"#") or line.startswith(b";"):
|
||||
continue
|
||||
elif kind != "USER_AGENT":
|
||||
line = line.split(b" ")[0]
|
||||
|
||||
ok, data = check_line(kind, line)
|
||||
if ok is True:
|
||||
content += data + b"\n"
|
||||
i += 1
|
||||
|
||||
with open(f"/var/tmp/bunkerweb/blacklist/{kind}.list", "wb") as f:
|
||||
f.write(content)
|
||||
|
||||
logger.info(f"Downloaded {i} bad {kind}")
|
||||
# Check if file has changed
|
||||
new_hash = file_hash(f"/var/tmp/bunkerweb/blacklist/{kind}.list")
|
||||
|
@ -175,18 +184,20 @@ try:
|
|||
f"/var/cache/bunkerweb/blacklist/{kind}.list",
|
||||
new_hash,
|
||||
)
|
||||
|
||||
if not cached:
|
||||
logger.error(f"Error while caching blacklist : {err}")
|
||||
status = 2
|
||||
if status != 2:
|
||||
else:
|
||||
# Update db
|
||||
err = db.update_job_cache(
|
||||
"blacklist-download",
|
||||
None,
|
||||
f"{kind}.list",
|
||||
resp.content,
|
||||
content,
|
||||
checksum=new_hash,
|
||||
)
|
||||
|
||||
if err:
|
||||
logger.warning(f"Couldn't update db cache: {err}")
|
||||
status = 1
|
||||
|
|
|
@ -27,18 +27,19 @@ try:
|
|||
# Check if at least a server has BunkerNet activated
|
||||
bunkernet_activated = False
|
||||
# Multisite case
|
||||
if getenv("MULTISITE") == "yes":
|
||||
for first_server in getenv("SERVER_NAME").split(" "):
|
||||
if getenv("MULTISITE", "no") == "yes":
|
||||
for first_server in getenv("SERVER_NAME", "").split(" "):
|
||||
if (
|
||||
getenv(f"{first_server}_USE_BUNKERNET", getenv("USE_BUNKERNET"))
|
||||
getenv(f"{first_server}_USE_BUNKERNET", getenv("USE_BUNKERNET", "yes"))
|
||||
== "yes"
|
||||
):
|
||||
bunkernet_activated = True
|
||||
break
|
||||
# Singlesite case
|
||||
elif getenv("USE_BUNKERNET") == "yes":
|
||||
elif getenv("USE_BUNKERNET", "yes") == "yes":
|
||||
bunkernet_activated = True
|
||||
if not bunkernet_activated:
|
||||
|
||||
if bunkernet_activated is False:
|
||||
logger.info("BunkerNet is not activated, skipping download...")
|
||||
_exit(0)
|
||||
|
||||
|
@ -81,9 +82,9 @@ try:
|
|||
|
||||
# Writing data to file
|
||||
logger.info("Saving BunkerNet data ...")
|
||||
with open("/var/tmp/bunkerweb/bunkernet-ip.list", "w") as f:
|
||||
for ip in data["data"]:
|
||||
f.write(f"{ip}\n")
|
||||
content = "\n".join(data["data"]).encode("utf-8")
|
||||
with open("/var/tmp/bunkerweb/bunkernet-ip.list", "wb") as f:
|
||||
f.write(content)
|
||||
|
||||
# Check if file has changed
|
||||
new_hash = file_hash("/var/tmp/bunkerweb/bunkernet-ip.list")
|
||||
|
@ -109,7 +110,7 @@ try:
|
|||
"bunkernet-data",
|
||||
None,
|
||||
"ip.list",
|
||||
"\n".join(data["data"]).encode("utf-8"),
|
||||
content,
|
||||
checksum=new_hash,
|
||||
)
|
||||
if err:
|
||||
|
|
|
@ -27,7 +27,7 @@ try:
|
|||
# Check if at least a server has BunkerNet activated
|
||||
bunkernet_activated = False
|
||||
# Multisite case
|
||||
if getenv("MULTISITE") == "yes":
|
||||
if getenv("MULTISITE", "no") == "yes":
|
||||
for first_server in getenv("SERVER_NAME").split(" "):
|
||||
if (
|
||||
getenv(f"{first_server}_USE_BUNKERNET", getenv("USE_BUNKERNET", "yes"))
|
||||
|
@ -38,7 +38,8 @@ try:
|
|||
# Singlesite case
|
||||
elif getenv("USE_BUNKERNET", "yes") == "yes":
|
||||
bunkernet_activated = True
|
||||
if not bunkernet_activated:
|
||||
|
||||
if bunkernet_activated is False:
|
||||
logger.info("BunkerNet is not activated, skipping registration...")
|
||||
_exit(0)
|
||||
|
||||
|
@ -122,7 +123,7 @@ try:
|
|||
"bunkernet-register",
|
||||
None,
|
||||
"instance.id",
|
||||
f"{bunkernet_id}".encode("utf-8"),
|
||||
bunkernet_id.encode("utf-8"),
|
||||
)
|
||||
if err:
|
||||
logger.warning(f"Couldn't update db cache: {err}")
|
||||
|
|
|
@ -10,7 +10,7 @@ def request(method, url, _id=None):
|
|||
try:
|
||||
resp = requests.request(
|
||||
method,
|
||||
getenv("BUNKERNET_SERVER", "https://api.bunkerweb.io") + url,
|
||||
f"{getenv('BUNKERNET_SERVER', 'https://api.bunkerweb.io')}{url}",
|
||||
json=data,
|
||||
headers=headers,
|
||||
timeout=5,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
from os import environ, getenv, makedirs, remove
|
||||
from os import getenv, makedirs, remove
|
||||
from os.path import isfile
|
||||
from shutil import copy
|
||||
from sys import exit as sys_exit, path as sys_path
|
||||
|
@ -33,6 +33,11 @@ def check_cert(cert_path, key_path, first_server: str = None) -> bool:
|
|||
f"Certificate file {cert_path} is not a valid file, ignoring the custom certificate"
|
||||
)
|
||||
return False
|
||||
elif not isfile(key_path):
|
||||
logger.warning(
|
||||
f"Key file {key_path} is not a valid file, ignoring the custom certificate"
|
||||
)
|
||||
return False
|
||||
|
||||
cert_cache_path = (
|
||||
f"/var/cache/bunkerweb/customcert/{cert_path.replace('/', '_')}.hash"
|
||||
|
|
|
@ -5,6 +5,7 @@ from os import _exit, getenv, makedirs
|
|||
from re import IGNORECASE, compile as re_compile
|
||||
from sys import exit as sys_exit, path as sys_path
|
||||
from traceback import format_exc
|
||||
from typing import Tuple
|
||||
|
||||
sys_path.append("/usr/share/bunkerweb/deps/python")
|
||||
sys_path.append("/usr/share/bunkerweb/utils")
|
||||
|
@ -16,42 +17,41 @@ from Database import Database
|
|||
from logger import setup_logger
|
||||
from jobs import cache_file, cache_hash, is_cached_file, file_hash
|
||||
|
||||
rdns_rx = re_compile(r"^(\.?[a-z\d\-]+)*\.[a-z]{2,}$", IGNORECASE)
|
||||
asn_rx = re_compile(r"^\d+$")
|
||||
uri_rx = re_compile(r"^/")
|
||||
rdns_rx = re_compile(rb"^(\.?[a-z\d\-]+)*\.[a-z]{2,}$", IGNORECASE)
|
||||
asn_rx = re_compile(rb"^\d+$")
|
||||
uri_rx = re_compile(rb"^/")
|
||||
|
||||
|
||||
def check_line(kind, line):
|
||||
def check_line(kind: str, line: bytes) -> Tuple[bool, bytes]:
|
||||
if kind == "IP":
|
||||
if "/" in line:
|
||||
if b"/" in line:
|
||||
try:
|
||||
ip_network(line)
|
||||
ip_network(line.decode("utf-8"))
|
||||
return True, line
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
ip_address(line)
|
||||
ip_address(line.decode("utf-8"))
|
||||
return True, line
|
||||
except ValueError:
|
||||
pass
|
||||
return False, ""
|
||||
elif kind == "RDNS":
|
||||
if rdns_rx.match(line):
|
||||
return True, line.lower()
|
||||
return False, ""
|
||||
elif kind == "ASN":
|
||||
real_line = line.replace("AS", "").replace("as", "")
|
||||
real_line = line.replace(b"AS", b"").replace(b"as", b"")
|
||||
if asn_rx.match(real_line):
|
||||
return True, real_line
|
||||
elif kind == "USER_AGENT":
|
||||
return True, line.replace("\\ ", " ").replace("\\.", "%.").replace(
|
||||
"\\\\", "\\"
|
||||
).replace("-", "%-")
|
||||
return True, line.replace(b"\\ ", b" ").replace(b"\\.", b"%.").replace(
|
||||
b"\\\\", b"\\"
|
||||
).replace(b"-", b"%-")
|
||||
elif kind == "URI":
|
||||
if uri_rx.match(line):
|
||||
return True, line
|
||||
return False, ""
|
||||
|
||||
return False, b""
|
||||
|
||||
|
||||
logger = setup_logger("GREYLIST", getenv("LOG_LEVEL", "INFO"))
|
||||
|
@ -66,15 +66,19 @@ try:
|
|||
# Check if at least a server has Greylist activated
|
||||
greylist_activated = False
|
||||
# Multisite case
|
||||
if getenv("MULTISITE") == "yes":
|
||||
for first_server in getenv("SERVER_NAME").split(" "):
|
||||
if getenv(first_server + "_USE_GREYLIST", getenv("USE_GREYLIST")) == "yes":
|
||||
if getenv("MULTISITE", "no") == "yes":
|
||||
for first_server in getenv("SERVER_NAME", "").split(" "):
|
||||
if (
|
||||
getenv(f"{first_server}_USE_GREYLIST", getenv("USE_GREYLIST", "no"))
|
||||
== "yes"
|
||||
):
|
||||
greylist_activated = True
|
||||
break
|
||||
# Singlesite case
|
||||
elif getenv("USE_GREYLIST") == "yes":
|
||||
elif getenv("USE_GREYLIST", "no") == "yes":
|
||||
greylist_activated = True
|
||||
if not greylist_activated:
|
||||
|
||||
if greylist_activated is False:
|
||||
logger.info("Greylist is not activated, skipping downloads...")
|
||||
_exit(0)
|
||||
|
||||
|
@ -105,46 +109,48 @@ try:
|
|||
logger.info(
|
||||
f"Greylist for {kind} is already in cache, skipping downloads...",
|
||||
)
|
||||
if all_fresh:
|
||||
if all_fresh is True:
|
||||
_exit(0)
|
||||
|
||||
# Get URLs
|
||||
urls = {
|
||||
"IP": [],
|
||||
"RDNS": [],
|
||||
"ASN": [],
|
||||
"USER_AGENT": [],
|
||||
"URI": [],
|
||||
}
|
||||
urls = {"IP": [], "RDNS": [], "ASN": [], "USER_AGENT": [], "URI": []}
|
||||
for kind in urls:
|
||||
for url in getenv(f"GREYLIST_{kind}_URLS", "").split(" "):
|
||||
if url != "" and url not in urls[kind]:
|
||||
if url and url not in urls[kind]:
|
||||
urls[kind].append(url)
|
||||
|
||||
# Loop on kinds
|
||||
for kind, urls_list in urls.items():
|
||||
if kinds_fresh[kind]:
|
||||
if kinds_fresh[kind] is True:
|
||||
continue
|
||||
# Write combined data of the kind to a single temp file
|
||||
for url in urls_list:
|
||||
try:
|
||||
logger.info(f"Downloading greylist data from {url} ...")
|
||||
resp = get(url)
|
||||
resp = get(url, stream=True)
|
||||
|
||||
if resp.status_code != 200:
|
||||
continue
|
||||
|
||||
i = 0
|
||||
with open(f"/var/tmp/bunkerweb/greylist/{kind}.list", "w") as f:
|
||||
for line in resp.content.decode("utf-8").splitlines():
|
||||
line = line.strip()
|
||||
if kind != "USER_AGENT":
|
||||
line = line.strip().split(" ")[0]
|
||||
if line == "" or line.startswith("#") or line.startswith(";"):
|
||||
continue
|
||||
ok, data = check_line(kind, line)
|
||||
if ok:
|
||||
f.write(data + "\n")
|
||||
i += 1
|
||||
logger.info(f"Downloaded {i} bad {kind}")
|
||||
content = b""
|
||||
for line in resp.iter_lines():
|
||||
line = line.strip()
|
||||
|
||||
if not line or line.startswith(b"#") or line.startswith(b";"):
|
||||
continue
|
||||
elif kind != "USER_AGENT":
|
||||
line = line.split(b" ")[0]
|
||||
|
||||
ok, data = check_line(kind, line)
|
||||
if ok is True:
|
||||
content += data + b"\n"
|
||||
i += 1
|
||||
|
||||
with open(f"/var/tmp/bunkerweb/greylist/{kind}.list", "wb") as f:
|
||||
f.write(content)
|
||||
|
||||
logger.info(f"Downloaded {i} grey {kind}")
|
||||
# Check if file has changed
|
||||
new_hash = file_hash(f"/var/tmp/bunkerweb/greylist/{kind}.list")
|
||||
old_hash = cache_hash(f"/var/cache/bunkerweb/greylist/{kind}.list")
|
||||
|
@ -162,18 +168,20 @@ try:
|
|||
f"/var/cache/bunkerweb/greylist/{kind}.list",
|
||||
new_hash,
|
||||
)
|
||||
|
||||
if not cached:
|
||||
logger.error(f"Error while caching greylist : {err}")
|
||||
status = 2
|
||||
if status != 2:
|
||||
else:
|
||||
# Update db
|
||||
err = db.update_job_cache(
|
||||
"greylist-download",
|
||||
None,
|
||||
f"{kind}.list",
|
||||
resp.content,
|
||||
content,
|
||||
checksum=new_hash,
|
||||
)
|
||||
|
||||
if err:
|
||||
logger.warning(f"Couldn't update db cache: {err}")
|
||||
status = 1
|
||||
|
|
|
@ -33,7 +33,7 @@ status = 0
|
|||
def install_plugin(plugin_dir):
|
||||
# Load plugin.json
|
||||
metadata = {}
|
||||
with open(f"{plugin_dir}plugin.json", "r") as f:
|
||||
with open(f"{plugin_dir}plugin.json", "rb") as f:
|
||||
metadata = loads(f.read())
|
||||
# Don't go further if plugin is already installed
|
||||
if isfile(f"/data/plugins/{metadata['id']}/plugin.json"):
|
||||
|
@ -53,47 +53,43 @@ try:
|
|||
|
||||
# Check if we have plugins to download
|
||||
plugin_urls = getenv("EXTERNAL_PLUGIN_URLS", "")
|
||||
if plugin_urls == "":
|
||||
if not plugin_urls:
|
||||
logger.info("No external plugins to download")
|
||||
_exit(0)
|
||||
|
||||
# Loop on URLs
|
||||
for plugin_url in plugin_urls.split(" "):
|
||||
|
||||
# Download ZIP file
|
||||
try:
|
||||
req = get(plugin_url)
|
||||
except:
|
||||
logger.error(
|
||||
f"Exception while downloading plugin(s) from {plugin_url} :",
|
||||
f"Exception while downloading plugin(s) from {plugin_url} :\n{format_exc()}",
|
||||
)
|
||||
print(format_exc())
|
||||
status = 2
|
||||
continue
|
||||
|
||||
# Extract it to tmp folder
|
||||
temp_dir = "/var/tmp/bunkerweb/plugins-" + str(uuid4()) + "/"
|
||||
temp_dir = f"/var/tmp/bunkerweb/plugins-{uuid4()}/"
|
||||
try:
|
||||
makedirs(temp_dir, exist_ok=True)
|
||||
with ZipFile(BytesIO(req.content)) as zf:
|
||||
zf.extractall(path=temp_dir)
|
||||
except:
|
||||
logger.error(
|
||||
f"Exception while decompressing plugin(s) from {plugin_url} :",
|
||||
f"Exception while decompressing plugin(s) from {plugin_url} :\n{format_exc()}",
|
||||
)
|
||||
print(format_exc())
|
||||
status = 2
|
||||
continue
|
||||
|
||||
# Install plugins
|
||||
try:
|
||||
for plugin_dir in glob(temp_dir + "**/plugin.json", recursive=True):
|
||||
install_plugin(dirname(plugin_dir) + "/")
|
||||
for plugin_dir in glob(f"{temp_dir}**/plugin.json", recursive=True):
|
||||
install_plugin(f"{dirname(plugin_dir)}/")
|
||||
except:
|
||||
logger.error(
|
||||
f"Exception while installing plugin(s) from {plugin_url} :",
|
||||
f"Exception while installing plugin(s) from {plugin_url} :\n{format_exc()}",
|
||||
)
|
||||
print(format_exc())
|
||||
status = 2
|
||||
continue
|
||||
|
||||
|
|
|
@ -31,10 +31,7 @@ try:
|
|||
_exit(0)
|
||||
|
||||
# Compute the mmdb URL
|
||||
today = date.today()
|
||||
mmdb_url = "https://download.db-ip.com/free/dbip-asn-lite-{}-{}.mmdb.gz".format(
|
||||
today.strftime("%Y"), today.strftime("%m")
|
||||
)
|
||||
mmdb_url = f"https://download.db-ip.com/free/dbip-asn-lite-{date.today().strftime('%Y-%m')}.mmdb.gz"
|
||||
|
||||
# Download the mmdb file
|
||||
logger.info(f"Downloading mmdb file from url {mmdb_url} ...")
|
||||
|
|
|
@ -31,10 +31,7 @@ try:
|
|||
_exit(0)
|
||||
|
||||
# Compute the mmdb URL
|
||||
today = date.today()
|
||||
mmdb_url = "https://download.db-ip.com/free/dbip-country-lite-{}-{}.mmdb.gz".format(
|
||||
today.strftime("%Y"), today.strftime("%m")
|
||||
)
|
||||
mmdb_url = f"https://download.db-ip.com/free/dbip-country-lite-{date.today().strftime('%Y-%m')}.mmdb.gz"
|
||||
|
||||
# Download the mmdb file
|
||||
logger.info(f"Downloading mmdb file from url {mmdb_url} ...")
|
||||
|
|
|
@ -33,8 +33,8 @@ try:
|
|||
elif exists("/usr/share/bunkerweb/INTEGRATION"):
|
||||
with open("/usr/share/bunkerweb/INTEGRATION", "r") as f:
|
||||
bw_integration = f.read().strip()
|
||||
token = getenv("CERTBOT_TOKEN")
|
||||
validation = getenv("CERTBOT_VALIDATION")
|
||||
token = getenv("CERTBOT_TOKEN", "")
|
||||
validation = getenv("CERTBOT_VALIDATION", "")
|
||||
|
||||
# Cluster case
|
||||
if bw_integration in ("Docker", "Swarm", "Kubernetes", "Autoconf"):
|
||||
|
|
|
@ -33,7 +33,7 @@ try:
|
|||
elif exists("/usr/share/bunkerweb/INTEGRATION"):
|
||||
with open("/usr/share/bunkerweb/INTEGRATION", "r") as f:
|
||||
bw_integration = f.read().strip()
|
||||
token = getenv("CERTBOT_TOKEN")
|
||||
token = getenv("CERTBOT_TOKEN", "")
|
||||
|
||||
# Cluster case
|
||||
if bw_integration in ("Docker", "Swarm", "Kubernetes", "Autoconf"):
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
from io import BytesIO
|
||||
from os import getenv
|
||||
from os.path import exists
|
||||
from os import chmod, chown, getenv, walk
|
||||
from os.path import exists, join
|
||||
from subprocess import run, DEVNULL, STDOUT
|
||||
from sys import exit as sys_exit, path as sys_path
|
||||
from tarfile import open as tar_open
|
||||
|
@ -36,7 +36,7 @@ try:
|
|||
elif exists("/usr/share/bunkerweb/INTEGRATION"):
|
||||
with open("/usr/share/bunkerweb/INTEGRATION", "r") as f:
|
||||
bw_integration = f.read().strip()
|
||||
token = getenv("CERTBOT_TOKEN")
|
||||
token = getenv("CERTBOT_TOKEN", "")
|
||||
|
||||
logger.info(f"Certificates renewal for {getenv('RENEWED_DOMAINS')} successful")
|
||||
|
||||
|
@ -53,6 +53,13 @@ try:
|
|||
endpoint = f"http://{instance['hostname']}:{instance['port']}"
|
||||
host = instance["server_name"]
|
||||
api = API(endpoint, host=host)
|
||||
|
||||
# Fix permissions for the certificates
|
||||
for root, dirs, files in walk("/lets-encrypt/certificates", topdown=False):
|
||||
for name in files + dirs:
|
||||
chown(join(root, name), 101, 101)
|
||||
chmod(join(root, name), 0o770)
|
||||
|
||||
sent, err, status, resp = api.request(
|
||||
"POST", "/lets-encrypt/certificates", files=files
|
||||
)
|
||||
|
@ -90,7 +97,7 @@ try:
|
|||
# Linux case
|
||||
else:
|
||||
proc = run(
|
||||
["nginx", "-s", "reload"],
|
||||
["/etc/init.d/nginx", "reload"],
|
||||
stdin=DEVNULL,
|
||||
stderr=STDOUT,
|
||||
)
|
||||
|
|
|
@ -25,23 +25,20 @@ def certbot_new(domains, email):
|
|||
environ["PYTHONPATH"] = "/usr/share/bunkerweb/deps/python"
|
||||
proc = run(
|
||||
[
|
||||
"/usr/share/bunkerweb/deps/python/bin/certbot certonly",
|
||||
"/usr/share/bunkerweb/deps/python/bin/certbot",
|
||||
"certonly",
|
||||
"--manual",
|
||||
"--preferred-challenges=http",
|
||||
"--manual-auth-hook",
|
||||
f"{getcwd()}/certbot-auth.py",
|
||||
"/usr/share/bunkerweb/core/letsencrypt/jobs/certbot-auth.py",
|
||||
"--manual-cleanup-hook",
|
||||
f"{getcwd()}/certbot-cleanup.py",
|
||||
"/usr/share/bunkerweb/core/letsencrypt/jobs/certbot-cleanup.py",
|
||||
"-n",
|
||||
"-d",
|
||||
domains,
|
||||
"--email",
|
||||
email,
|
||||
"--agree-tos",
|
||||
"--logs-dir",
|
||||
"/var/tmp/bunkerweb",
|
||||
"--work-dir",
|
||||
"/var/lib/bunkerweb",
|
||||
]
|
||||
+ (["--staging"] if getenv("USE_LETS_ENCRYPT_STAGING", "no") == "yes" else []),
|
||||
stdin=DEVNULL,
|
||||
|
@ -96,12 +93,14 @@ try:
|
|||
)
|
||||
|
||||
if exists(f"/etc/letsencrypt/live/{first_server}/cert.pem"):
|
||||
with open(f"/etc/letsencrypt/live/{first_server}/cert.pem") as f:
|
||||
with open(
|
||||
f"/etc/letsencrypt/live/{first_server}/cert.pem", "rb"
|
||||
) as f:
|
||||
cert = f.read()
|
||||
|
||||
# Update db
|
||||
err = db.update_job_cache(
|
||||
"letsencrypt",
|
||||
"certbot-new",
|
||||
first_server,
|
||||
"cert.pem",
|
||||
cert,
|
||||
|
@ -133,12 +132,14 @@ try:
|
|||
)
|
||||
|
||||
if exists(f"/etc/letsencrypt/live/{first_server}/cert.pem"):
|
||||
with open(f"/etc/letsencrypt/live/{first_server}/cert.pem") as f:
|
||||
with open(
|
||||
f"/etc/letsencrypt/live/{first_server}/cert.pem", "rb"
|
||||
) as f:
|
||||
cert = f.read()
|
||||
|
||||
# Update db
|
||||
err = db.update_job_cache(
|
||||
"letsencrypt",
|
||||
"certbot-new",
|
||||
first_server,
|
||||
"cert.pem",
|
||||
cert,
|
||||
|
|
|
@ -40,19 +40,23 @@ db = Database(
|
|||
status = 0
|
||||
|
||||
try:
|
||||
|
||||
# Check if at least a server has Blacklist activated
|
||||
blacklist_activated = False
|
||||
# Check if at least a server has Realip activated
|
||||
realip_activated = False
|
||||
# Multisite case
|
||||
if getenv("MULTISITE") == "yes":
|
||||
if getenv("MULTISITE", "no") == "yes":
|
||||
for first_server in getenv("SERVER_NAME").split(" "):
|
||||
if getenv(first_server + "_USE_REAL_IP", getenv("USE_REAL_IP")) == "yes":
|
||||
blacklist_activated = True
|
||||
if (
|
||||
getenv(f"{first_server}_USE_REAL_IP", getenv("USE_REAL_IP", "no"))
|
||||
== "yes"
|
||||
):
|
||||
realip_activated = True
|
||||
break
|
||||
|
||||
# Singlesite case
|
||||
elif getenv("USE_REAL_IP") == "yes":
|
||||
blacklist_activated = True
|
||||
if not blacklist_activated:
|
||||
elif getenv("USE_REAL_IP", "no") == "yes":
|
||||
realip_activated = True
|
||||
|
||||
if realip_activated is False:
|
||||
logger.info("RealIP is not activated, skipping download...")
|
||||
_exit(0)
|
||||
|
||||
|
@ -72,20 +76,24 @@ try:
|
|||
|
||||
# Download and write data to temp file
|
||||
i = 0
|
||||
content = ""
|
||||
content = b""
|
||||
for url in urls:
|
||||
try:
|
||||
logger.info(f"Downloading RealIP list from {url} ...")
|
||||
resp = get(url, stream=True)
|
||||
|
||||
if resp.status_code != 200:
|
||||
continue
|
||||
for line in resp.iter_lines(decode_unicode=True):
|
||||
line = line.strip().split(" ")[0]
|
||||
if line == "" or line.startswith("#") or line.startswith(";"):
|
||||
|
||||
for line in resp.iter_lines():
|
||||
line = line.strip().split(b" ")[0]
|
||||
|
||||
if not line or line.startswith(b"#") or line.startswith(b";"):
|
||||
continue
|
||||
|
||||
ok, data = check_line(line)
|
||||
if ok:
|
||||
content += f"{data}\n"
|
||||
content += data + b"\n"
|
||||
i += 1
|
||||
except:
|
||||
status = 2
|
||||
|
@ -93,7 +101,7 @@ try:
|
|||
f"Exception while getting RealIP list from {url} :\n{format_exc()}"
|
||||
)
|
||||
|
||||
with open("/var/tmp/bunkerweb/realip-combined.list", "w") as f:
|
||||
with open("/var/tmp/bunkerweb/realip-combined.list", "wb") as f:
|
||||
f.write(content)
|
||||
|
||||
# Check if file has changed
|
||||
|
@ -118,7 +126,7 @@ try:
|
|||
"realip-download",
|
||||
None,
|
||||
"combined.list",
|
||||
content.encode("utf-8"),
|
||||
content,
|
||||
checksum=new_hash,
|
||||
)
|
||||
if err:
|
||||
|
|
|
@ -21,16 +21,13 @@ db = Database(
|
|||
|
||||
|
||||
def generate_cert(first_server, days, subj):
|
||||
if isfile("/var/cache/bunkerweb/selfsigned/" + first_server + ".pem"):
|
||||
cmd = (
|
||||
"openssl x509 -checkend 86400 -noout -in /var/cache/bunkerweb/selfsigned/"
|
||||
+ first_server
|
||||
+ ".pem"
|
||||
)
|
||||
if isfile(f"/var/cache/bunkerweb/selfsigned/{first_server}.pem"):
|
||||
cmd = f"openssl x509 -checkend 86400 -noout -in /var/cache/bunkerweb/selfsigned/{first_server}.pem"
|
||||
proc = run(cmd.split(" "), stdin=DEVNULL, stderr=STDOUT)
|
||||
if proc.returncode == 0:
|
||||
logger.info(f"Self-signed certificate already present for {first_server}")
|
||||
return True, 0
|
||||
|
||||
logger.info(f"Generating self-signed certificate for {first_server}")
|
||||
cmd = f"openssl req -nodes -x509 -newkey rsa:4096 -keyout /var/cache/bunkerweb/selfsigned/{first_server}.key -out /var/cache/bunkerweb/selfsigned/{first_server}.pem -days {days} -subj {subj}"
|
||||
proc = run(cmd.split(" "), stdin=DEVNULL, stderr=STDOUT)
|
||||
|
@ -73,21 +70,26 @@ try:
|
|||
if getenv("MULTISITE") == "yes":
|
||||
for first_server in getenv("SERVER_NAME").split(" "):
|
||||
if (
|
||||
getenv(
|
||||
first_server + "_GENERATE_SELF_SIGNED_SSL",
|
||||
getenv("GENERATE_SELF_SIGNED_SSL"),
|
||||
not first_server
|
||||
or getenv(
|
||||
f"{first_server}_GENERATE_SELF_SIGNED_SSL",
|
||||
getenv("GENERATE_SELF_SIGNED_SSL", "no"),
|
||||
)
|
||||
!= "yes"
|
||||
or isfile(f"/var/cache/bunkerweb/selfsigned/{first_server}.pem")
|
||||
):
|
||||
continue
|
||||
if first_server == "":
|
||||
continue
|
||||
if isfile("/var/cache/bunkerweb/selfsigned/" + first_server + ".pem"):
|
||||
continue
|
||||
|
||||
ret, ret_status = generate_cert(
|
||||
first_server,
|
||||
getenv(first_server + "_SELF_SIGNED_SSL_EXPIRY"),
|
||||
getenv(first_server + "_SELF_SIGNED_SSL_SUBJ"),
|
||||
getenv(
|
||||
f"{first_server}_SELF_SIGNED_SSL_EXPIRY",
|
||||
getenv("SELF_SIGNED_SSL_EXPIRY", "365"),
|
||||
),
|
||||
getenv(
|
||||
f"{first_server}_SELF_SIGNED_SSL_SUBJ",
|
||||
getenv("SELF_SIGNED_SSL_SUBJ", "/CN=www.example.com/"),
|
||||
),
|
||||
)
|
||||
if not ret:
|
||||
status = ret_status
|
||||
|
@ -95,12 +97,14 @@ try:
|
|||
status = 1
|
||||
|
||||
# Singlesite case
|
||||
elif getenv("GENERATE_SELF_SIGNED_SSL") == "yes" and getenv("SERVER_NAME") != "":
|
||||
first_server = getenv("SERVER_NAME").split(" ")[0]
|
||||
elif getenv("GENERATE_SELF_SIGNED_SSL", "no") == "yes" and getenv(
|
||||
"SERVER_NAME", ""
|
||||
):
|
||||
first_server = getenv("SERVER_NAME", "").split(" ")[0]
|
||||
ret, ret_status = generate_cert(
|
||||
first_server,
|
||||
getenv("SELF_SIGNED_SSL_EXPIRY"),
|
||||
getenv("SELF_SIGNED_SSL_SUBJ"),
|
||||
getenv("SELF_SIGNED_SSL_EXPIRY", "365"),
|
||||
getenv("SELF_SIGNED_SSL_SUBJ", "/CN=www.example.com/"),
|
||||
)
|
||||
if not ret:
|
||||
status = ret_status
|
||||
|
|
|
@ -5,6 +5,7 @@ from os import _exit, getenv, makedirs
|
|||
from re import IGNORECASE, compile as re_compile
|
||||
from sys import exit as sys_exit, path as sys_path
|
||||
from traceback import format_exc
|
||||
from typing import Tuple
|
||||
|
||||
sys_path.append("/usr/share/bunkerweb/deps/python")
|
||||
sys_path.append("/usr/share/bunkerweb/utils")
|
||||
|
@ -16,42 +17,41 @@ from Database import Database
|
|||
from logger import setup_logger
|
||||
from jobs import cache_file, cache_hash, is_cached_file, file_hash
|
||||
|
||||
rdns_rx = re_compile(r"^(\.?[a-z\d\-]+)*\.[a-z]{2,}$", IGNORECASE)
|
||||
asn_rx = re_compile(r"^\d+$")
|
||||
uri_rx = re_compile(r"^/")
|
||||
rdns_rx = re_compile(rb"^(\.?[a-z\d\-]+)*\.[a-z]{2,}$", IGNORECASE)
|
||||
asn_rx = re_compile(rb"^\d+$")
|
||||
uri_rx = re_compile(rb"^/")
|
||||
|
||||
|
||||
def check_line(kind, line):
|
||||
def check_line(kind: str, line: bytes) -> Tuple[bool, bytes]:
|
||||
if kind == "IP":
|
||||
if "/" in line:
|
||||
if b"/" in line:
|
||||
try:
|
||||
ip_network(line)
|
||||
ip_network(line.decode("utf-8"))
|
||||
return True, line
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
ip_address(line)
|
||||
ip_address(line.decode("utf-8"))
|
||||
return True, line
|
||||
except ValueError:
|
||||
pass
|
||||
return False, ""
|
||||
elif kind == "RDNS":
|
||||
if rdns_rx.match(line):
|
||||
return True, line.lower()
|
||||
return False, ""
|
||||
elif kind == "ASN":
|
||||
real_line = line.replace("AS", "").replace("as", "")
|
||||
real_line = line.replace(b"AS", b"").replace(b"as", b"")
|
||||
if asn_rx.match(real_line):
|
||||
return True, real_line
|
||||
elif kind == "USER_AGENT":
|
||||
return True, line.replace("\\ ", " ").replace("\\.", "%.").replace(
|
||||
"\\\\", "\\"
|
||||
).replace("-", "%-")
|
||||
return True, line.replace(b"\\ ", b" ").replace(b"\\.", b"%.").replace(
|
||||
b"\\\\", b"\\"
|
||||
).replace(b"-", b"%-")
|
||||
elif kind == "URI":
|
||||
if uri_rx.match(line):
|
||||
return True, line
|
||||
return False, ""
|
||||
|
||||
return False, b""
|
||||
|
||||
|
||||
logger = setup_logger("WHITELIST", getenv("LOG_LEVEL", "INFO"))
|
||||
|
@ -66,18 +66,19 @@ try:
|
|||
# Check if at least a server has Whitelist activated
|
||||
whitelist_activated = False
|
||||
# Multisite case
|
||||
if getenv("MULTISITE") == "yes":
|
||||
for first_server in getenv("SERVER_NAME").split(" "):
|
||||
if getenv("MULTISITE", "no") == "yes":
|
||||
for first_server in getenv("SERVER_NAME", "").split(" "):
|
||||
if (
|
||||
getenv(first_server + "_USE_WHITELIST", getenv("USE_WHITELIST"))
|
||||
getenv(f"{first_server}_USE_WHITELIST", getenv("USE_WHITELIST", "no"))
|
||||
== "yes"
|
||||
):
|
||||
whitelist_activated = True
|
||||
break
|
||||
# Singlesite case
|
||||
elif getenv("USE_WHITELIST") == "yes":
|
||||
elif getenv("USE_WHITELIST", "no") == "yes":
|
||||
whitelist_activated = True
|
||||
if not whitelist_activated:
|
||||
|
||||
if whitelist_activated is False:
|
||||
logger.info("Whitelist is not activated, skipping downloads...")
|
||||
_exit(0)
|
||||
|
||||
|
@ -102,46 +103,54 @@ try:
|
|||
kinds_fresh[kind] = False
|
||||
all_fresh = False
|
||||
logger.info(
|
||||
f"Whitelist for {kind} is not cached, processing downloads...",
|
||||
f"Whitelist for {kind} is not cached, processing downloads..",
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"Whitelist for {kind} is already in cache, skipping downloads...",
|
||||
)
|
||||
if all_fresh:
|
||||
if all_fresh is True:
|
||||
_exit(0)
|
||||
|
||||
# Get URLs
|
||||
urls = {"IP": [], "RDNS": [], "ASN": [], "USER_AGENT": [], "URI": []}
|
||||
for kind in urls:
|
||||
for url in getenv(f"WHITELIST_{kind}_URLS", "").split(" "):
|
||||
if url != "" and url not in urls[kind]:
|
||||
if url and url not in urls[kind]:
|
||||
urls[kind].append(url)
|
||||
|
||||
# Loop on kinds
|
||||
for kind, urls_list in urls.items():
|
||||
if kinds_fresh[kind]:
|
||||
if kinds_fresh[kind] is True:
|
||||
continue
|
||||
# Write combined data of the kind to a single temp file
|
||||
for url in urls_list:
|
||||
try:
|
||||
logger.info(f"Downloading whitelist data from {url} ...")
|
||||
resp = get(url)
|
||||
resp = get(url, stream=True)
|
||||
|
||||
if resp.status_code != 200:
|
||||
continue
|
||||
|
||||
i = 0
|
||||
with open(f"/var/tmp/bunkerweb/whitelist/{kind}.list", "w") as f:
|
||||
for line in resp.content.decode("utf-8").splitlines():
|
||||
line = line.strip()
|
||||
if kind != "USER_AGENT":
|
||||
line = line.strip().split(" ")[0]
|
||||
if line == "" or line.startswith("#") or line.startswith(";"):
|
||||
continue
|
||||
ok, data = check_line(kind, line)
|
||||
if ok:
|
||||
f.write(data + "\n")
|
||||
i += 1
|
||||
logger.info(f"Downloaded {i} bad {kind}")
|
||||
content = b""
|
||||
for line in resp.iter_lines():
|
||||
line = line.strip()
|
||||
|
||||
if not line or line.startswith(b"#") or line.startswith(b";"):
|
||||
continue
|
||||
elif kind != "USER_AGENT":
|
||||
line = line.split(b" ")[0]
|
||||
|
||||
ok, data = check_line(kind, line)
|
||||
if ok is True:
|
||||
content += data + b"\n"
|
||||
i += 1
|
||||
|
||||
with open(f"/var/tmp/bunkerweb/whitelist/{kind}.list", "wb") as f:
|
||||
f.write(content)
|
||||
|
||||
logger.info(f"Downloaded {i} good {kind}")
|
||||
# Check if file has changed
|
||||
new_hash = file_hash(f"/var/tmp/bunkerweb/whitelist/{kind}.list")
|
||||
old_hash = cache_hash(f"/var/cache/bunkerweb/whitelist/{kind}.list")
|
||||
|
@ -159,25 +168,27 @@ try:
|
|||
f"/var/cache/bunkerweb/whitelist/{kind}.list",
|
||||
new_hash,
|
||||
)
|
||||
|
||||
if not cached:
|
||||
logger.error(f"Error while caching whitelist : {err}")
|
||||
status = 2
|
||||
if status != 2:
|
||||
else:
|
||||
# Update db
|
||||
err = db.update_job_cache(
|
||||
"whitelist-download",
|
||||
None,
|
||||
f"{kind}.list",
|
||||
resp.content,
|
||||
content,
|
||||
checksum=new_hash,
|
||||
)
|
||||
|
||||
if err:
|
||||
logger.warning(f"Couldn't update db cache: {err}")
|
||||
status = 1
|
||||
except:
|
||||
status = 2
|
||||
logger.error(
|
||||
f"Exception while getting whitelist from {url} :\n{format_exc()}",
|
||||
f"Exception while getting whitelist from {url} :\n{format_exc()}"
|
||||
)
|
||||
|
||||
except:
|
||||
|
|
Loading…
Reference in New Issue