forked from Disroot/gpg-lacre
Finish migration to SQLAlchemy and automate testing cron.py
This commit is contained in:
parent
7aff414fb7
commit
ffc53b935a
5 changed files with 110 additions and 47 deletions
29
Makefile
29
Makefile
|
@ -1,5 +1,5 @@
|
|||
.POSIX:
|
||||
.PHONY: test unittest pre-clean clean
|
||||
.PHONY: test e2etest unittest crontest pre-clean clean
|
||||
|
||||
#
|
||||
# On systems where Python 3.x binary has a different name, just
|
||||
|
@ -12,6 +12,13 @@
|
|||
#
|
||||
PYTHON = python3
|
||||
|
||||
TEST_DB = test/lacre.db
|
||||
|
||||
#
|
||||
# Main goal to run tests.
|
||||
#
|
||||
test: e2etest unittest crontest
|
||||
|
||||
#
|
||||
# Run a set of end-to-end tests.
|
||||
#
|
||||
|
@ -19,9 +26,27 @@ PYTHON = python3
|
|||
# file. Basically this is just a script that feeds GPG Mailgate with
|
||||
# known input and checks whether output meets expectations.
|
||||
#
|
||||
test: test/tmp test/logs pre-clean
|
||||
e2etest: test/tmp test/logs pre-clean
|
||||
$(PYTHON) test/e2e_test.py
|
||||
|
||||
#
|
||||
# Run a basic cron-job test.
|
||||
#
|
||||
# We use PYTHONPATH to make sure that cron.py can import GnuPG
|
||||
# package. We also set GPG_MAILGATE_CONFIG env. variable to make sure
|
||||
# it slurps the right config.
|
||||
#
|
||||
crontest: clean-db $(TEST_DB)
|
||||
GPG_MAILGATE_CONFIG=test/gpg-mailgate-cron-test.conf PYTHONPATH=`pwd` $(PYTHON) gpg-mailgate-web/cron.py
|
||||
|
||||
$(TEST_DB):
|
||||
$(PYTHON) test/schema.py $(TEST_DB)
|
||||
|
||||
# Before running the crontest goal we need to make sure that the
|
||||
# database gets regenerated.
|
||||
clean-db:
|
||||
rm -f $(TEST_DB)
|
||||
|
||||
#
|
||||
# Run unit tests
|
||||
#
|
||||
|
|
|
@ -22,8 +22,7 @@
|
|||
from configparser import RawConfigParser
|
||||
import GnuPG
|
||||
import sqlalchemy
|
||||
from sqlalchemy.sql import select
|
||||
# import MySQLdb
|
||||
from sqlalchemy.sql import select, delete, update, and_
|
||||
import smtplib
|
||||
import markdown
|
||||
import syslog
|
||||
|
@ -37,6 +36,7 @@ from email.mime.multipart import MIMEMultipart
|
|||
CONFIG_PATH_ENV = "GPG_MAILGATE_CONFIG"
|
||||
|
||||
def appendLog(msg):
|
||||
print(msg)
|
||||
if 'logging' in cfg and 'file' in cfg['logging']:
|
||||
if cfg['logging'].get('file') == "syslog":
|
||||
syslog.syslog(syslog.LOG_INFO | syslog.LOG_MAIL, msg)
|
||||
|
@ -110,53 +110,47 @@ if 'database' in cfg and 'enabled' in cfg['database'] and cfg['database']['enabl
|
|||
(engine, conn) = setup_db_connection(cfg["database"]["url"])
|
||||
(gpgmw_keys) = define_db_schema()
|
||||
|
||||
# Original query: "SELECT publickey, id, email FROM gpgmw_keys WHERE status = 0 AND confirm = '' LIMIT 100"
|
||||
# TODO: add WHERE clause referencing "status" and "confirm" columns.
|
||||
query = select(gpgmw_keys.c.publickey, gpgmw_keys.c.id, gpgmw_keys.c.email).limit(100)
|
||||
result_set = conn.execute(query)
|
||||
selq = select(gpgmw_keys.c.publickey, gpgmw_keys.c.id, gpgmw_keys.c.email)\
|
||||
.where(and_(gpgmw_keys.c.status == 0, gpgmw_keys.c.confirm == ""))\
|
||||
.limit(100)
|
||||
result_set = conn.execute(selq)
|
||||
|
||||
rowno = 0
|
||||
for row in result_set:
|
||||
# delete any other public keys associated with this confirmed email address
|
||||
# TODO: replace with a delete() query generator.
|
||||
conn.execute("DELETE FROM gpgmw_keys WHERE email = %s AND id != %s", (row[2], row[1],))
|
||||
delq = delete(gpgmw_keys).where(and_(gpgmw_keys.c.email == row[2], gpgmw_keys.c.id != row[1]))
|
||||
conn.execute(delq)
|
||||
GnuPG.delete_key(cfg['gpg']['keyhome'], row[2])
|
||||
appendLog('Deleted key for <' + row[2] + '> via import request')
|
||||
|
||||
if row[0].strip(): # we have this so that user can submit blank key to remove any encryption
|
||||
if GnuPG.confirm_key(row[0], row[2]):
|
||||
GnuPG.add_key(cfg['gpg']['keyhome'], row[0]) # import the key to gpg
|
||||
# TODO: replace with an update() query generator.
|
||||
conn.execute("UPDATE gpgmw_keys SET status = 1 WHERE id = %s", (row[1],)) # mark key as accepted
|
||||
modq = gpgmw_keys.update().where(gpgmw_keys.c.id == row[1]).values(status = 1)
|
||||
conn.execute(modq) # mark key as accepted
|
||||
appendLog('Imported key from <' + row[2] + '>')
|
||||
if 'send_email' in cfg['cron'] and cfg['cron']['send_email'] == 'yes':
|
||||
send_msg( "PGP key registration successful", "registrationSuccess.md", row[2] )
|
||||
else:
|
||||
# TODO: replace with a delete() query generator.
|
||||
conn.execute("DELETE FROM gpgmw_keys WHERE id = %s", (row[1],)) # delete key
|
||||
delq = delete(gpgmw_keys).where(gpgmw_keys.c.id == row[1])
|
||||
conn.execute(delq) # delete key
|
||||
appendLog('Import confirmation failed for <' + row[2] + '>')
|
||||
if 'send_email' in cfg['cron'] and cfg['cron']['send_email'] == 'yes':
|
||||
send_msg( "PGP key registration failed", "registrationError.md", row[2] )
|
||||
else:
|
||||
# delete key so we don't continue processing it
|
||||
# TODO: replace with a delete() query generator.
|
||||
conn.execute("DELETE FROM gpgmw_keys WHERE id = %s", (row[1],))
|
||||
delq = delete(gpgmw_keys).where(gpgmw_keys.c.id == row[1])
|
||||
conn.execute(delq)
|
||||
if 'send_email' in cfg['cron'] and cfg['cron']['send_email'] == 'yes':
|
||||
send_msg( "PGP key deleted", "keyDeleted.md", row[2])
|
||||
|
||||
# connection.commit()
|
||||
# TODO: see if it's still necessary
|
||||
|
||||
# delete keys
|
||||
# TODO: replace with a delete() query generator.
|
||||
result_set = conn.execute("SELECT email, id FROM gpgmw_keys WHERE status = 2 LIMIT 100")
|
||||
stat2q = select(gpgmw_keys.c.email, gpgmw_keys.c.id).where(gpgmw_keys.c.status == 2).limit(100)
|
||||
stat2_result_set = conn.execute(stat2q)
|
||||
|
||||
for row in result_set:
|
||||
for row in stat2_result_set:
|
||||
GnuPG.delete_key(cfg['gpg']['keyhome'], row[0])
|
||||
# TODO: replace with a delete() query generator.
|
||||
conn.execute("DELETE FROM gpgmw_keys WHERE id = %s", (row[1],))
|
||||
delq = delete(gpgmw_keys).where(gpgmw_keys.c.id == row[1])
|
||||
conn.execute(delq)
|
||||
appendLog('Deleted key for <' + row[0] + '>')
|
||||
# connection.commit()
|
||||
# TODO: see if it's still necessary
|
||||
else:
|
||||
print("Warning: doing nothing since database settings are not configured!")
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
import sqlalchemy
|
||||
|
||||
def define_db_schema():
|
||||
meta = sqlalchemy.MetaData()
|
||||
|
||||
gpgmw_keys = sqlalchemy.Table('gpgmw_keys', meta,
|
||||
sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True),
|
||||
sqlalchemy.Column('email', sqlalchemy.String(256)),
|
||||
sqlalchemy.Column('publickey', sqlalchemy.Text),
|
||||
sqlalchemy.Column('confirm', sqlalchemy.String(32)),
|
||||
sqlalchemy.Column('status', sqlalchemy.Integer),
|
||||
sqlalchemy.Column('time', sqlalchemy.DateTime))
|
||||
|
||||
return (meta, gpgmw_keys)
|
||||
|
||||
(meta, gpgmw_keys) = define_db_schema()
|
||||
|
||||
test_db = sqlalchemy.create_engine("sqlite:///test.db")
|
||||
|
||||
meta.create_all(test_db)
|
27
test/gpg-mailgate-cron-test.conf
Normal file
27
test/gpg-mailgate-cron-test.conf
Normal file
|
@ -0,0 +1,27 @@
|
|||
[logging]
|
||||
config = test/gpg-lacre-log.ini
|
||||
file = test/logs/gpg-mailgate.log
|
||||
format = %(asctime)s %(module)s[%(process)d]: %(message)s
|
||||
date_format = ISO
|
||||
|
||||
[gpg]
|
||||
keyhome = test/keyhome
|
||||
|
||||
[smime]
|
||||
cert_path = test/certs
|
||||
|
||||
[database]
|
||||
enabled = yes
|
||||
url = sqlite:///test/lacre.db
|
||||
|
||||
[relay]
|
||||
host = localhost
|
||||
port = 2500
|
||||
|
||||
[cron]
|
||||
send_email = no
|
||||
|
||||
[enc_keymap]
|
||||
alice@disposlab = 1CD245308F0963D038E88357973CF4D9387C44D7
|
||||
bob@disposlab = 19CF4B47ECC9C47AFA84D4BD96F39FDA0E31BB67
|
||||
|
37
test/schema.py
Normal file
37
test/schema.py
Normal file
|
@ -0,0 +1,37 @@
|
|||
import sys
|
||||
import sqlalchemy
|
||||
from sqlalchemy.sql import insert
|
||||
|
||||
def define_db_schema():
|
||||
meta = sqlalchemy.MetaData()
|
||||
|
||||
gpgmw_keys = sqlalchemy.Table('gpgmw_keys', meta,
|
||||
sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True),
|
||||
sqlalchemy.Column('email', sqlalchemy.String(256)),
|
||||
sqlalchemy.Column('publickey', sqlalchemy.Text),
|
||||
sqlalchemy.Column('confirm', sqlalchemy.String(32)),
|
||||
sqlalchemy.Column('status', sqlalchemy.Integer),
|
||||
sqlalchemy.Column('time', sqlalchemy.DateTime))
|
||||
|
||||
return (meta, gpgmw_keys)
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
print("ERROR: output database missing")
|
||||
sys.exit(1)
|
||||
|
||||
(meta, gpgmw_keys) = define_db_schema()
|
||||
|
||||
dbname = sys.argv[1]
|
||||
test_db = sqlalchemy.create_engine(f"sqlite:///{dbname}")
|
||||
|
||||
# Initialise the schema
|
||||
meta.create_all(test_db)
|
||||
|
||||
conn = test_db.connect()
|
||||
|
||||
# Populate the database with dummy data
|
||||
conn.execute(gpgmw_keys.insert(), [
|
||||
{"id": 1, "email": "alice@lacre.io", "publickey": "PUBLICKEY1", "status": 0, "confirm": "", "time": None},
|
||||
{"id": 2, "email": "bob@lacre.io", "publickey": "PUBLICKEY2", "status": 0, "confirm": "", "time": None},
|
||||
{"id": 3, "email": "cecil@lacre.io", "publickey": "PUBLICKEY3", "status": 0, "confirm": "", "time": None}
|
||||
])
|
Loading…
Reference in a new issue