Lint jobs py files

This commit is contained in:
Théophile Diot 2023-05-04 18:25:35 -04:00
parent 97b362bb17
commit 74fe9d5c16
No known key found for this signature in database
GPG Key ID: E752C80DB72BB014
7 changed files with 38 additions and 29 deletions

View File

@ -24,7 +24,6 @@ logger = setup_logger("BUNKERNET", getenv("LOG_LEVEL", "INFO"))
exit_status = 0
try:
# Check if at least a server has BunkerNet activated
bunkernet_activated = False
# Multisite case
@ -58,10 +57,12 @@ try:
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db :
if db:
bunkernet_id = get_file_in_db("bunkernet-register", "instance.id", db)
if bunkernet_id:
Path("/var/cache/bunkerweb/bunkernet/bunkernet.id").write_text(bunkernet_id.decode())
Path("/var/cache/bunkerweb/bunkernet/bunkernet.id").write_text(
bunkernet_id.decode()
)
logger.info("Successfully retrieved BunkerNet ID from db cache")
else:
logger.info("No BunkerNet ID found in db cache")
@ -81,7 +82,6 @@ try:
)
_exit(0)
exit_status = 1
# Download data
@ -116,7 +116,6 @@ try:
f"Received error from BunkerNet API while sending db request : {data['data']}, removing instance ID",
)
_exit(2)
logger.info("Successfully downloaded data from BunkerNet API")

View File

@ -58,10 +58,12 @@ try:
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
if db :
if db:
bunkernet_id = get_file_in_db("bunkernet-register", "instance.id", db)
if bunkernet_id:
Path("/var/cache/bunkerweb/bunkernet/instance.id").write_text(bunkernet_id.decode())
Path("/var/cache/bunkerweb/bunkernet/instance.id").write_text(
bunkernet_id.decode()
)
logger.info("Successfully retrieved BunkerNet ID from db cache")
else:
logger.info("No BunkerNet ID found in db cache")
@ -121,12 +123,7 @@ try:
# Update cache with new bunkernet ID
if db and registered:
with open("/var/cache/bunkerweb/bunkernet/instance.id", "rb") as f:
cached, err = set_file_in_db(
f"bunkernet-register",
f"instance.id",
f,
db
)
cached, err = set_file_in_db(f"bunkernet-register", f"instance.id", f, db)
if not cached:
logger.error(f"Error while saving BunkerNet data to db cache : {err}")
else:

View File

@ -27,6 +27,7 @@ lock = Lock()
status = 0
def generate_cert(first_server, days, subj):
if Path(f"/var/cache/bunkerweb/selfsigned/{first_server}.pem").is_file():
cmd = f"openssl x509 -checkend 86400 -noout -in /var/cache/bunkerweb/selfsigned/{first_server}.pem"
@ -41,7 +42,7 @@ def generate_cert(first_server, days, subj):
if proc.returncode != 0:
logger.error(f"Self-signed certificate generation failed for {first_server}")
return False, 2
return True, 1
# Update db

View File

@ -176,7 +176,7 @@ try:
if not cached:
logger.error(f"Error while caching whitelist : {err}")
status = 2
else :
else:
status = 1
except:
status = 2

View File

@ -877,13 +877,11 @@ class Database:
return ""
def delete_job_cache(
self,
job_name: str,
file_name: str
):
def delete_job_cache(self, job_name: str, file_name: str):
with self.__db_session() as session:
session.query(Jobs_cache).filter_by(job_name=job_name, file_name=file_name).delete()
session.query(Jobs_cache).filter_by(
job_name=job_name, file_name=file_name
).delete()
def update_job_cache(
self,

View File

@ -53,7 +53,11 @@ def get_instance_configs_and_apis(instance: Any, db, _type="Docker"):
custom_confs.append(
{
"value": splitted[1],
"exploded": (custom_conf[0], custom_conf[1], custom_conf[2].replace(".conf", ""))
"exploded": (
custom_conf[0],
custom_conf[1],
custom_conf[2].replace(".conf", ""),
),
}
)
else:
@ -235,7 +239,11 @@ if __name__ == "__main__":
custom_confs.append(
{
"value": v,
"exploded": (custom_conf[0], custom_conf[1], custom_conf[2].replace(".conf", ""))
"exploded": (
custom_conf[0],
custom_conf[1],
custom_conf[2].replace(".conf", ""),
),
}
)
root_dirs = listdir("/etc/bunkerweb/configs")
@ -286,7 +294,11 @@ if __name__ == "__main__":
custom_confs.append(
{
"value": splitted[1],
"exploded": (custom_conf[0], custom_conf[1], custom_conf[2].replace(".conf", ""))
"exploded": (
custom_conf[0],
custom_conf[1],
custom_conf[2].replace(".conf", ""),
),
}
)
else:

View File

@ -58,15 +58,14 @@ def is_cached_file(file: str, expire: str, db=None) -> bool:
return is_cached and cached_file
def get_file_in_db(job: str, file: str, db) -> bytes:
cached_file = db.get_job_cache_file(
job,
file
)
cached_file = db.get_job_cache_file(job, file)
if not cached_file:
return False
return cached_file.data
def set_file_in_db(job: str, name: str, bio, db) -> Tuple[bool, str]:
ret, err = True, "success"
try:
@ -78,7 +77,7 @@ def set_file_in_db(job: str, name: str, bio, db) -> Tuple[bool, str]:
None,
name,
content,
checksum=bytes_hash(bio)
checksum=bytes_hash(bio),
)
if err:
@ -87,6 +86,7 @@ def set_file_in_db(job: str, name: str, bio, db) -> Tuple[bool, str]:
return False, f"exception :\n{format_exc()}"
return ret, err
def del_file_in_db(job: str, name: str, db) -> Tuple[bool, str]:
ret, err = True, "success"
try:
@ -95,6 +95,7 @@ def del_file_in_db(job: str, name: str, db) -> Tuple[bool, str]:
return False, f"exception :\n{format_exc()}"
return ret, err
def file_hash(file: str) -> str:
_sha512 = sha512()
with open(file, "rb") as f:
@ -105,6 +106,7 @@ def file_hash(file: str) -> str:
_sha512.update(data)
return _sha512.hexdigest()
def bytes_hash(bio: bytes) -> str:
_sha512 = sha512()
while True: