Update jobs connect to the database only when needed

This commit is contained in:
Théophile Diot 2023-03-13 15:38:00 +01:00
parent aa0eff7491
commit 808b7b2206
No known key found for this signature in database
GPG Key ID: E752C80DB72BB014
12 changed files with 67 additions and 60 deletions

View File

@ -58,11 +58,6 @@ def check_line(kind: str, line: bytes) -> Tuple[bool, bytes]:
logger = setup_logger("BLACKLIST", getenv("LOG_LEVEL", "INFO"))
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
status = 0
try:
@ -85,6 +80,12 @@ try:
logger.info("Blacklist is not activated, skipping downloads...")
_exit(0)
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
# Create directories if they don't exist
Path("/var/cache/bunkerweb/blacklist").mkdir(parents=True, exist_ok=True)
Path("/var/tmp/bunkerweb/blacklist").mkdir(parents=True, exist_ok=True)

View File

@ -21,11 +21,6 @@ from logger import setup_logger
from jobs import cache_file, cache_hash, file_hash, is_cached_file
logger = setup_logger("BUNKERNET", getenv("LOG_LEVEL", "INFO"))
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
status = 0
try:
@ -48,6 +43,12 @@ try:
logger.info("BunkerNet is not activated, skipping download...")
_exit(0)
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
# Create directory if it doesn't exist
Path("/var/cache/bunkerweb/bunkernet").mkdir(parents=True, exist_ok=True)

View File

@ -21,11 +21,6 @@ from Database import Database
from logger import setup_logger
logger = setup_logger("BUNKERNET", getenv("LOG_LEVEL", "INFO"))
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
status = 0
try:
@ -153,6 +148,12 @@ try:
if not Path("/var/cache/bunkerweb/bunkernet/instance.id").is_file():
Path("/var/cache/bunkerweb/bunkernet/instance.id").write_text(bunkernet_id)
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
# Update db
with lock:
err = db.update_job_cache(

View File

@ -58,11 +58,6 @@ def check_line(kind: str, line: bytes) -> Tuple[bool, bytes]:
logger = setup_logger("GREYLIST", getenv("LOG_LEVEL", "INFO"))
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
status = 0
try:
@ -85,6 +80,12 @@ try:
logger.info("Greylist is not activated, skipping downloads...")
_exit(0)
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
# Create directories if they don't exist
Path("/var/cache/bunkerweb/greylist").mkdir(parents=True, exist_ok=True)
Path("/var/tmp/bunkerweb/greylist").mkdir(parents=True, exist_ok=True)

View File

@ -32,7 +32,6 @@ from logger import setup_logger
logger = setup_logger("Jobs.download-plugins", getenv("LOG_LEVEL", "INFO"))
lock = Lock()
status = 0
@ -67,6 +66,7 @@ try:
logger,
sqlalchemy_string=getenv("DATABASE_URI"),
)
lock = Lock()
plugin_nbr = 0

View File

@ -24,11 +24,6 @@ from logger import setup_logger
from jobs import cache_file, cache_hash, file_hash, is_cached_file
logger = setup_logger("JOBS", getenv("LOG_LEVEL", "INFO"))
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
status = 0
try:
@ -69,6 +64,12 @@ try:
logger.error(f"Error while caching mmdb file : {err}")
_exit(2)
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
# Update db
with lock:
err = db.update_job_cache(

View File

@ -24,11 +24,6 @@ from logger import setup_logger
from jobs import cache_file, cache_hash, file_hash, is_cached_file
logger = setup_logger("JOBS", getenv("LOG_LEVEL", "INFO"))
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
status = 0
try:
@ -71,6 +66,12 @@ try:
logger.error(f"Error while caching mmdb file : {err}")
_exit(2)
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
# Update db
with lock:
err = db.update_job_cache(

View File

@ -20,11 +20,6 @@ from logger import setup_logger
from API import API
logger = setup_logger("Lets-encrypt", getenv("LOG_LEVEL", "INFO"))
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
status = 0
try:
@ -37,13 +32,18 @@ try:
elif getenv("AUTOCONF_MODE") == "yes":
bw_integration = "Autoconf"
elif Path("/usr/share/bunkerweb/INTEGRATION").exists():
with open("/usr/share/bunkerweb/INTEGRATION", "r") as f:
bw_integration = f.read().strip()
bw_integration = Path("/usr/share/bunkerweb/INTEGRATION").read_text().strip()
token = getenv("CERTBOT_TOKEN", "")
validation = getenv("CERTBOT_VALIDATION", "")
# Cluster case
if bw_integration in ("Docker", "Swarm", "Kubernetes", "Autoconf"):
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
with lock:
instances = db.get_instances()

View File

@ -21,11 +21,6 @@ from logger import setup_logger
from API import API
logger = setup_logger("Lets-encrypt", getenv("LOG_LEVEL", "INFO"))
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
status = 0
try:
@ -38,12 +33,16 @@ try:
elif getenv("AUTOCONF_MODE") == "yes":
bw_integration = "Autoconf"
elif Path("/usr/share/bunkerweb/INTEGRATION").exists():
with open("/usr/share/bunkerweb/INTEGRATION", "r") as f:
bw_integration = f.read().strip()
bw_integration = Path("/usr/share/bunkerweb/INTEGRATION").read_text().strip()
token = getenv("CERTBOT_TOKEN", "")
# Cluster case
if bw_integration in ("Docker", "Swarm", "Kubernetes", "Autoconf"):
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
with lock:
instances = db.get_instances()

View File

@ -25,11 +25,6 @@ from logger import setup_logger
from API import API
logger = setup_logger("Lets-encrypt", getenv("LOG_LEVEL", "INFO"))
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
status = 0
try:
@ -64,6 +59,12 @@ try:
tgz.seek(0, 0)
files = {"archive.tar.gz": tgz}
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
with lock:
instances = db.get_instances()

View File

@ -36,11 +36,6 @@ def check_line(line):
logger = setup_logger("REALIP", getenv("LOG_LEVEL", "INFO"))
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
status = 0
try:
@ -126,6 +121,12 @@ try:
logger.error(f"Error while caching list : {err}")
_exit(2)
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
# Update db
with lock:
err = db.update_job_cache(

View File

@ -58,11 +58,6 @@ def check_line(kind: str, line: bytes) -> Tuple[bool, bytes]:
logger = setup_logger("WHITELIST", getenv("LOG_LEVEL", "INFO"))
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
status = 0
try:
@ -175,6 +170,11 @@ try:
logger.error(f"Error while caching whitelist : {err}")
status = 2
else:
db = Database(
logger,
sqlalchemy_string=getenv("DATABASE_URI", None),
)
lock = Lock()
# Update db
with lock:
err = db.update_job_cache(