Merge branch 'dev' of github.com:bunkerity/bunkerweb into dev

This commit is contained in:
florian 2023-04-24 13:21:41 +02:00
commit 4bc0771d95
No known key found for this signature in database
GPG Key ID: 3D80806F12602A7C
7 changed files with 53 additions and 34 deletions

View File

@ -54,7 +54,7 @@ RUN apk add --no-cache bash && \
chmod 770 /var/log/letsencrypt /var/lib/letsencrypt
# Fix CVEs
RUN apk add "libcrypto3>=3.0.8-r4" "libssl3>=3.0.8-r4"
# There are no CVEs for the image python:3.11.3-alpine at the moment
VOLUME /data /etc/nginx

View File

@ -73,7 +73,7 @@ RUN apk add --no-cache pcre bash python3 && \
ln -s /proc/1/fd/1 /var/log/nginx/jobs.log
# Fix CVEs
RUN apk add "libcrypto3>=3.0.8-r4" "libssl3>=3.0.8-r4"
# There are no CVEs for the image nginx:1.24.0-alpine at the moment
VOLUME /data /etc/nginx

View File

@ -1502,20 +1502,24 @@ class Database:
}
def get_job_cache_file(
self, job_name: str, file_name: str, *, with_data: bool = True
self,
job_name: str,
file_name: str,
*,
with_info: bool = False,
with_data: bool = True,
) -> Optional[Any]:
"""Get job cache file."""
entities = []
if with_info:
entities.extend([Jobs_cache.last_update, Jobs_cache.checksum])
if with_data:
entities.append(Jobs_cache.data)
with self.__db_session() as session:
if with_data:
return (
session.query(Jobs_cache)
.with_entities(Jobs_cache.data)
.filter_by(job_name=job_name, file_name=file_name)
.first()
)
return (
session.query(Jobs_cache)
.with_entities(Jobs_cache.last_update, Jobs_cache.checksum)
.with_entities(*entities)
.filter_by(job_name=job_name, file_name=file_name)
.first()
)

View File

@ -3,6 +3,7 @@ from datetime import datetime
from hashlib import sha512
from inspect import getsourcefile
from json import dumps, loads
from os.path import basename
from pathlib import Path
from sys import _getframe
from threading import Lock
@ -21,34 +22,40 @@ lock = Lock()
def is_cached_file(file: str, expire: str, db=None) -> bool:
is_cached = False
cached_file = None
try:
if not Path(f"{file}.md").is_file():
if not db:
return False
cached_file = db.get_job_cache_file(
getsourcefile(_getframe(1)).replace(".py", "").split("/")[-1],
file.split("/")[-1],
with_data=False,
basename(getsourcefile(_getframe(1))).replace(".py", ""),
basename(file),
with_info=True,
)
if not cached_file:
return False
cached_time = cached_file.last_update
cached_time = cached_file.last_update.timestamp()
else:
cached_time = loads(Path(f"{file}.md").read_text())["date"]
current_time = datetime.now().timestamp()
if current_time < cached_time:
return False
diff_time = current_time - cached_time
if expire == "hour":
is_cached = diff_time < 3600
elif expire == "day":
is_cached = diff_time < 86400
elif expire == "month":
is_cached = diff_time < 2592000
is_cached = False
else:
diff_time = current_time - cached_time
if expire == "hour":
is_cached = diff_time < 3600
elif expire == "day":
is_cached = diff_time < 86400
elif expire == "month":
is_cached = diff_time < 2592000
except:
is_cached = False
if is_cached and cached_file:
Path(file).write_bytes(cached_file.data)
return is_cached
@ -68,8 +75,9 @@ def cache_hash(cache: str, db=None) -> Optional[str]:
return loads(Path(f"{cache}.md").read_text()).get("checksum", None)
if db:
cached_file = db.get_job_cache_file(
getsourcefile(_getframe(1)).replace(".py", "").split("/")[-1],
cache.split("/")[-1],
basename(getsourcefile(_getframe(1))).replace(".py", ""),
basename(cache),
with_info=True,
with_data=False,
)
@ -86,21 +94,23 @@ def cache_file(
content = Path(file).read_bytes()
Path(cache).write_bytes(content)
Path(file).unlink()
md = {"date": datetime.now().timestamp(), "checksum": _hash}
Path(f"{cache}.md").write_text(dumps(md))
if db:
with lock:
err = db.update_job_cache(
getsourcefile(_getframe(1)).replace(".py", "").split("/")[-1],
basename(getsourcefile(_getframe(1))).replace(".py", ""),
service_id,
cache.split("/")[-1],
basename(cache),
content,
checksum=_hash,
)
if err:
ret = False
else:
Path(f"{cache}.md").write_text(
dumps(dict(date=datetime.now().timestamp(), checksum=_hash))
)
except:
return False, f"exception :\n{format_exc()}"
return ret, err

View File

@ -64,7 +64,7 @@ RUN apk add --no-cache bash libgcc libstdc++ openssl && \
chmod 660 /usr/share/bunkerweb/INTEGRATION
# Fix CVEs
RUN apk add "libcrypto3>=3.0.8-r4" "libssl3>=3.0.8-r4"
# There are no CVEs for the image python:3.11.3-alpine at the moment
VOLUME /data /etc/nginx

View File

@ -106,7 +106,7 @@ def generate_custom_configs(
Path(dirname(tmp_path)).mkdir(parents=True, exist_ok=True)
Path(tmp_path).write_bytes(custom_config["data"])
if integration not in ("Autoconf", "Swarm", "Kubernetes", "Docker"):
if integration in ("Autoconf", "Swarm", "Kubernetes", "Docker"):
logger.info("Sending custom configs to BunkerWeb")
ret = api_caller._send_files("/data/configs", "/custom_configs")
@ -137,7 +137,7 @@ def generate_external_plugins(
st = stat(job_file)
chmod(job_file, st.st_mode | S_IEXEC)
if integration not in ("Autoconf", "Swarm", "Kubernetes", "Docker"):
if integration in ("Autoconf", "Swarm", "Kubernetes", "Docker"):
logger.info("Sending plugins to BunkerWeb")
ret = api_caller._send_files("/data/plugins", "/plugins")
@ -461,7 +461,12 @@ if __name__ == "__main__":
# reload nginx
logger.info("Reloading nginx ...")
if integration not in ("Autoconf", "Swarm", "Kubernetes", "Docker"):
if integration not in (
"Autoconf",
"Swarm",
"Kubernetes",
"Docker",
):
# Reloading the nginx server.
proc = subprocess_run(
# Reload nginx

View File

@ -50,7 +50,7 @@ RUN apk add --no-cache bash && \
chmod 660 /usr/share/bunkerweb/INTEGRATION
# Fix CVEs
RUN apk add "libcrypto3>=3.0.8-r4" "libssl3>=3.0.8-r4"
# There are no CVEs for the image python:3.11.3-alpine at the moment
VOLUME /data /etc/nginx