autoconf - various fixes

This commit is contained in:
bunkerity 2020-12-05 11:06:38 +01:00
parent 95153dbc5d
commit 36cbb927c0
No known key found for this signature in database
GPG Key ID: 654FFF51CEF7CC47
6 changed files with 65 additions and 37 deletions

View File

@ -3,7 +3,7 @@
import docker, datetime, subprocess, shutil, os
def log(event) :
print("[" + datetime.datetime.now().replace(microsecond=0) + "] AUTOCONF - " + event)
print("[" + str(datetime.datetime.now().replace(microsecond=0)) + "] AUTOCONF - " + event, flush=True)
def replace_in_file(file, old_str, new_str) :
with open(file) as f :
@ -20,12 +20,12 @@ def generate(vars) :
log("Generated config for " + vars["SERVER_NAME"])
def activate(vars) :
replace_in_file("/etc/nginx/nginx.conf", "}", "include /etc/nginx/" + vars["SERVER_NAME"] + "/server.conf;")
replace_in_file("/etc/nginx/nginx.conf", "}", "include /etc/nginx/" + vars["SERVER_NAME"] + "/server.conf;\n}")
subprocess.run(["/usr/sbin/nginx", "-s", "reload"])
log("Activated config for " + vars["SERVER_NAME"])
def deactivate(vars) :
replace_in_file("/etc/nginx/nginx.conf", "include /etc/nginx/" + vars["SERVER_NAME"] + "/server.conf;", "")
replace_in_file("/etc/nginx/nginx.conf", "include /etc/nginx/" + vars["SERVER_NAME"] + "/server.conf;\n", "")
subprocess.run(["/usr/sbin/nginx", "-s", "reload"])
log("Deactivated config for " + vars["SERVER_NAME"])
@ -36,7 +36,7 @@ def remove(vars) :
def process(id, event, vars) :
global containers
if event == "create" :
generate(labels)
generate(vars)
containers.append(id)
elif event == "start" :
activate(vars)
@ -55,7 +55,7 @@ for container in client.containers.list(all=True, filters={"label" : "bunkerized
# Extract bunkerized-nginx.* labels
labels = container.labels.copy()
for label in labels :
for label in container.labels :
if not label.startswith("bunkerized-nginx.") :
del labels[label]
# Remove bunkerized-nginx. on labels
@ -64,7 +64,7 @@ for container in client.containers.list(all=True, filters={"label" : "bunkerized
# Container is restarting or running
if container.status == "restarting" or container.status == "running" :
process(container.id, "create", vars)
process(container.id, "activate", vars)
process(container.id, "start", vars)
# Container is created or exited
if container.status == "created" or container.status == "exited" :
@ -74,28 +74,28 @@ for event in client.events(decode=True) :
# Process only container events
if event["Type"] != "container" :
continue
continue
# Check if a bunkerized-nginx.* label is present
present = False
for label in event["Actor"]["Attributes"] :
if label.startswith("bunkerized-nginx.") :
present = True
break
if label.startswith("bunkerized-nginx.") :
present = True
break
if not present :
continue
continue
# Only process if we generated a config
if not event["id"] in containers and event["Action"] != "create" :
continue
continue
# Extract bunkerized-nginx.* labels
labels = event["Actor"]["Attributes"].copy()
for label in labels :
if not label.startswith("bunkerized-nginx.") :
del labels[label]
for label in event["Actor"]["Attributes"] :
if not label.startswith("bunkerized-nginx.") :
del labels[label]
# Remove bunkerized-nginx. on labels
vars = { k.replace("bunkerized-nginx.", "", 1) : v for k, v in labels.items()}
# Process the event
process(event["id"], event["Action"], vars
process(event["id"], event["Action"], vars)

View File

@ -67,10 +67,14 @@ crond
echo "[*] Running nginx ..."
su -s "/usr/sbin/nginx" nginx
# list of log files to display
LOGS="/var/log/access.log /var/log/error.log"
# start fail2ban
if [ "$USE_FAIL2BAN" = "yes" ] ; then
echo "[*] Running fail2ban ..."
fail2ban-server > /dev/null
LOGS="$LOGS /var/log/fail2ban.log"
fi
# start crowdsec
@ -91,15 +95,13 @@ if [ "$1" == "test" ] ; then
fi
# start the autoconf manager
if [ -f "/var/run/docker.sock" ] ; then
/opt/autoconf/autoconf.py &
if [ -S "/var/run/docker.sock" ] ; then
echo "[*] Running autoconf ..."
/opt/autoconf/autoconf.py > /var/log/autoconf.log 2>&1 &
LOGS="$LOGS /var/log/autoconf.log"
fi
# display logs
LOGS="/var/log/access.log /var/log/error.log"
if [ "$USE_FAIL2BAN" = "yes" ] ; then
LOGS="$LOGS /var/log/fail2ban.log"
fi
tail -F $LOGS &
wait $!

View File

@ -23,42 +23,30 @@ services:
- USE_REVERSE_PROXY=yes
myapp1:
image: node
build: js-app
restart: always
working_dir: /home/node/app
volumes:
- ./js-app:/home/node/app
environment:
- NODE_ENV=production
command: bash -c "npm install express && node index.js"
labels:
- "bunkerized-nginx.SERVER_NAME=app1.domain.com"
- "bunkerized-nginx.REVERSE_PROXY_URL=/"
- "bunkerized-nginx.REVERSE_PROXY_HOST=http://myapp1:3000"
myapp2:
image: node
build: js-app
restart: always
working_dir: /home/node/app
volumes:
- ./js-app:/home/node/app
environment:
- NODE_ENV=production
command: bash -c "npm install express && node index.js"
labels:
- "bunkerized-nginx.SERVER_NAME=app2.domain.com"
- "bunkerized-nginx.REVERSE_PROXY_URL=/"
- "bunkerized-nginx.REVERSE_PROXY_HOST=http://myapp2:3000"
myapp3:
image: node
build: js-app
restart: always
working_dir: /home/node/app
volumes:
- ./js-app:/home/node/app
environment:
- NODE_ENV=production
command: bash -c "npm install express && node index.js"
labels:
- "bunkerized-nginx.SERVER_NAME=app3.domain.com"
- "bunkerized-nginx.REVERSE_PROXY_URL=/"

View File

@ -0,0 +1,11 @@
FROM node
COPY app/ /home/node/app
RUN cd /home/node/app && npm install && chown -R root:node /home/node/app && chmod -R 770 /home/node/app
WORKDIR /home/node/app
USER node
CMD ["node", "index.js"]

View File

@ -0,0 +1,13 @@
const express = require('express')
const app = express()
const port = 3000
var os = require("os");
app.get('/', (req, res) => {
res.send('Container id = ' + os.hostname())
})
app.listen(port, () => {
console.log(`Example app listening at http://localhost:${port}`)
})

View File

@ -0,0 +1,14 @@
{
"name": "js-app",
"version": "1.0.0",
"description": "demo",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC",
"dependencies": {
"express": "^4.17.1"
}
}