Merge pull request 'Fix unencrypted delivery and key removal' (#130) from 129-key-removal into main

Reviewed-on: #130
This commit is contained in:
pfm 2023-12-02 20:59:13 +00:00
commit b7713207ab
28 changed files with 852 additions and 426 deletions

3
.gitignore vendored
View File

@ -1,3 +1,6 @@
# Generated project files:
test/lacre.db
*.py[cod]
# C extensions

View File

@ -27,15 +27,22 @@ import random
import string
import sys
import logging
import re
import tempfile
from email.utils import parseaddr
LINE_FINGERPRINT = 'fpr'
LINE_USER_ID = 'uid'
LINE_PUBLIC_KEY = 'pub'
POS_FINGERPRINT = 9
POS_UID = 9
LOG = logging.getLogger(__name__)
RX_CONFIRM = re.compile(br'key "([^"]+)" imported')
class EncryptionException(Exception):
"""Represents a failure to encrypt a payload."""
@ -57,25 +64,40 @@ def _build_command(key_home, *args, **kwargs):
return cmd
def public_keys(keyhome):
"""List public keys from keyring KEYHOME."""
def public_keys(keyhome, *, key_id=None):
"""List public keys from keyring KEYHOME.
Returns a dict with fingerprints as keys and email as values."""
cmd = _build_command(keyhome, '--list-keys', '--with-colons')
if key_id is not None:
cmd.append(key_id)
p = subprocess.Popen(cmd, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
keys = dict()
collected = set()
fingerprint = None
email = None
for line in p.stdout.readlines():
line = line.decode(sys.getdefaultencoding())
if line[0:3] == LINE_FINGERPRINT:
fingerprint = line.split(':')[POS_FINGERPRINT]
if line[0:3] == LINE_PUBLIC_KEY:
# New identity has started, reset state.
fingerprint = None
email = None
if line[0:3] == LINE_FINGERPRINT and not fingerprint:
fingerprint = _extract_fingerprint(line)
if line[0:3] == LINE_USER_ID:
if ('<' not in line or '>' not in line):
continue
email = line.split('<')[1].split('>')[0]
if not (fingerprint is None or email is None):
email = _parse_uid_line(line)
if fingerprint and email and not email in collected:
keys[fingerprint] = email
collected.add(email)
fingerprint = None
email = None
@ -84,6 +106,23 @@ def public_keys(keyhome):
return keys
def _extract_fingerprint(line):
fpr_line = line.split(':')
if len(fpr_line) <= POS_FINGERPRINT:
return None
else:
return fpr_line[POS_FINGERPRINT]
def _parse_uid_line(line: str):
userid_line = line.split(':')
if len(userid_line) <= POS_UID:
return None
else:
(_, email) = parseaddr(userid_line[POS_UID])
return email
def _to_bytes(s) -> bytes:
if isinstance(s, str):
return bytes(s, sys.getdefaultencoding())
@ -94,32 +133,21 @@ def _to_bytes(s) -> bytes:
# Confirms a key has a given email address by importing it into a temporary
# keyring. If this operation succeeds and produces a message mentioning the
# expected email, a key is confirmed.
def confirm_key(content, email):
def confirm_key(content, email: str):
"""Verify that the key CONTENT is assigned to identity EMAIL."""
tmpkeyhome = ''
content = _to_bytes(content)
expected_email = _to_bytes(email.lower())
expected_email = email.lower()
while True:
tmpkeyhome = '/tmp/' + ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(12))
if not os.path.exists(tmpkeyhome):
break
tmpkeyhome = tempfile.mkdtemp()
# let only the owner access the directory, otherwise gpg would complain
os.mkdir(tmpkeyhome, mode=0o700)
localized_env = os.environ.copy()
localized_env["LANG"] = "C"
p = subprocess.Popen(_build_command(tmpkeyhome, '--import', '--batch'), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=localized_env)
result = p.communicate(input=content)[1]
result = _import_key(tmpkeyhome, content)
confirmed = False
for line in result.split(b"\n"):
if b'imported' in line and b'<' in line and b'>' in line:
if line.split(b'<')[1].split(b'>')[0].lower() == expected_email:
confirmed = True
break
else:
break # confirmation failed
for line in result.splitlines():
found = RX_CONFIRM.search(line)
if found:
(_, extracted_email) = parseaddr(found.group(1).decode())
confirmed = (extracted_email == expected_email)
# cleanup
shutil.rmtree(tmpkeyhome)
@ -127,19 +155,44 @@ def confirm_key(content, email):
return confirmed
def _import_key(keyhome, content):
content = _to_bytes(content)
# Ensure we get expected output regardless of the system locale.
localized_env = os.environ.copy()
localized_env["LANG"] = "C"
p = subprocess.Popen(_build_command(keyhome, '--import', '--batch'), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=localized_env)
output = p.communicate(input=content)[1]
p.wait()
return output
# adds a key and ensures it has the given email address
def add_key(keyhome, content):
"""Register new key CONTENT in the keyring KEYHOME."""
if isinstance(content, str):
content = bytes(content, sys.getdefaultencoding())
p = subprocess.Popen(_build_command(keyhome, '--import', '--batch'), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate(input=content)
p.wait()
output = _import_key(keyhome, content)
email = None
for line in output.splitlines():
found = RX_CONFIRM.search(line)
if found:
(_, extracted_email) = parseaddr(found.group(1).decode())
email = extracted_email
# Find imported key to get its fingerprint
imported = public_keys(keyhome, key_id=email)
if len(imported.keys()) == 1:
fingerprint = list(imported.keys())[0]
return fingerprint, imported[fingerprint]
else:
return None, None
def delete_key(keyhome, email):
"""Remove key assigned to identity EMAIL from keyring KEYHOME."""
from email.utils import parseaddr
result = parseaddr(email)
if result[1]:
@ -149,6 +202,7 @@ def delete_key(keyhome, email):
p.wait()
return True
LOG.warn('Failed to parse email before deleting key: %s', email)
return False
@ -243,6 +297,8 @@ KEY_EXPIRED = b'KEYEXPIRED'
KEY_REVOKED = b'KEYREVOKED'
NO_RECIPIENTS = b'NO_RECP'
INVALID_RECIPIENT = b'INV_RECP'
KEY_CONSIDERED = b'KEY_CONSIDERED'
NOAVAIL = b'n/a'
# INV_RECP reason code descriptions.
INVALID_RECIPIENT_CAUSES = [
@ -271,7 +327,7 @@ def parse_status(status_buffer: str) -> dict:
def parse_status_lines(lines: list) -> dict:
"""Parse --status-fd output and return important information."""
result = {'issue': 'n/a', 'recipient': 'n/a', 'cause': 'Unknown'}
result = {'issue': NOAVAIL, 'recipient': NOAVAIL, 'cause': 'Unknown', 'key': NOAVAIL}
LOG.debug('Processing stderr lines %s', lines)
@ -286,6 +342,8 @@ def parse_status_lines(lines: list) -> dict:
result['issue'] = KEY_REVOKED
elif line.startswith(NO_RECIPIENTS, STATUS_FD_PREFIX_LEN):
result['issue'] = NO_RECIPIENTS
elif line.startswith(KEY_CONSIDERED, STATUS_FD_PREFIX_LEN):
result['key'] = line.split(b' ')[2]
elif line.startswith(INVALID_RECIPIENT, STATUS_FD_PREFIX_LEN):
words = line.split(b' ')
reason_code = int(words[2])

View File

@ -7,17 +7,23 @@
#
# make test PYTHON=/usr/local/bin/python3.8
#
# This marco is passed via environment to test/e2e_test.py, where it's
# This macro is passed via environment to test/e2e_test.py, where it's
# used to compute further commands.
#
PYTHON = python3
PYTHON = python
#
# SQLite database used during tests
#
# This database stores key queue and identity repository for e2etest,
# daemontest, and crontest.
#
TEST_DB = test/lacre.db
#
# Main goal to run tests.
# Main goal to run all tests.
#
test: e2etest unittest daemontest crontest
test: e2etest daemontest unittest crontest
#
# Run a set of end-to-end tests.
@ -37,7 +43,8 @@ e2etest: test/tmp test/logs pre-clean restore-keyhome
# it slurps the right config.
#
crontest: clean-db $(TEST_DB)
GPG_MAILGATE_CONFIG=test/gpg-mailgate-cron-test.conf PYTHONPATH=`pwd` $(PYTHON) webgate-cron.py
GPG_MAILGATE_CONFIG=test/gpg-mailgate-cron-test.conf PYTHONPATH=`pwd` \
$(PYTHON) webgate-cron.py
$(TEST_DB):
$(PYTHON) test/utils/schema.py $(TEST_DB)
@ -45,7 +52,7 @@ $(TEST_DB):
#
# Run an e2e test of Advanced Content Filter.
#
daemontest:
daemontest: restore-keyhome
$(PYTHON) test/daemon_test.py
# Before running the crontest goal we need to make sure that the
@ -65,6 +72,7 @@ pre-clean:
restore-keyhome:
git restore test/keyhome
git restore test/keyhome.other
test/tmp:
mkdir test/tmp
@ -72,5 +80,5 @@ test/tmp:
test/logs:
mkdir test/logs
clean: pre-clean
clean: pre-clean clean-db
rm -rfv test/tmp test/logs

34
doc/admin.md Normal file
View File

@ -0,0 +1,34 @@
# Lacre administration
## Command-line tool
There's a little tool for administrators. As long as Lacre Python packages
are available via `PYTHONPATH`, you can use it like this:
```sh
python -m lacre.admin -h
```
Of course `-h` displays some help.
## Inspecting key confirmation queue
To find out how many keys are waiting to be confirmed, run:
```sh
python -m lacre.admin queue
```
## Inspecting identities registered
To list all identities, run:
```sh
python -m lacre.admin identities -a
```
To preview a particular identity, run:
```sh
python -m lacre.admin identities -e alice@example.com
```

View File

@ -49,3 +49,11 @@ verifying that the correct key has been used. That's because we don't know
When things go wrong, be sure to study `test/logs/e2e.log` and
`test/logs/gpg-mailgate.log` files -- they contain some useful information.
## Test identities
There are several identities in test/keyhome and in the test database:
* alice@disposlab: 1CD245308F0963D038E88357973CF4D9387C44D7
* bob@disposlab: 19CF4B47ECC9C47AFA84D4BD96F39FDA0E31BB67
* evan@disposlab: 530B1BB2D0CC7971648198BBA4774E507D3AF5BC

View File

@ -36,9 +36,11 @@ import lacre.core as core
LOG = logging.getLogger('gpg-mailgate.py')
missing_params = conf.validate_config()
missing_params = conf.validate_config(additional=conf.SCRIPT_REQUIRED)
config_file = conf.config_source()
if missing_params:
LOG.error(f"Aborting delivery! Following mandatory config parameters are missing: {missing_params!r}")
LOG.error(f"Aborting delivery! Following mandatory config parameters are missing in {config_file!r}: {missing_params}")
sys.exit(lacre.EX_CONFIG)
delivered = False

View File

@ -8,30 +8,30 @@ import logging.config
# be performed. It only sets up a syslog handler, so that the admin has at
# least some basic information.
FAIL_OVER_LOGGING_CONFIG = {
'version': 1,
'formatters': {
'sysfmt': {
'format': '%(asctime)s %(module)s %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S'
},
},
'handlers': {
'syslog': {
'class': 'logging.handlers.SysLogHandler',
'level': 'INFO',
'formatter': 'sysfmt'
},
'lacrelog': {
'class': 'logging.FileHandler',
'level': 'INFO',
'formatter': 'sysfmt',
'filename': 'lacre.log'
}
},
'root': {
'level': 'INFO',
'handlers': ['syslog', 'lacrelog']
}
'version': 1,
'formatters': {
'sysfmt': {
'format': '%(asctime)s %(module)s %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S'
},
},
'handlers': {
'syslog': {
'class': 'logging.handlers.SysLogHandler',
'level': 'INFO',
'formatter': 'sysfmt'
},
'lacrelog': {
'class': 'logging.FileHandler',
'level': 'INFO',
'formatter': 'sysfmt',
'filename': 'lacre.log'
}
},
'root': {
'level': 'INFO',
'handlers': ['syslog', 'lacrelog']
}
}
# Exit code taken from <sysexits.h>:
@ -41,8 +41,9 @@ EX_CONFIG = 78
def init_logging(config_filename):
if config_filename is not None:
logging.config.fileConfig(config_filename)
else:
logging.config.dictConfig(FAIL_OVER_LOGGING_CONFIG)
logging.warning('Lacre logging configuration missing, using syslog as default')
if config_filename is not None:
logging.config.fileConfig(config_filename)
logging.info('Configured from %s', config_filename)
else:
logging.config.dictConfig(FAIL_OVER_LOGGING_CONFIG)
logging.warning('Lacre logging configuration missing, using syslog as default')

63
lacre/_keyringcommon.py Normal file
View File

@ -0,0 +1,63 @@
class KeyCache:
"""A store for OpenPGP keys.
Key case is sanitised while loading from GnuPG if so
configured. See mail_case_insensitive parameter in section
[default].
"""
def __init__(self, keys: dict = None):
"""Initialise an empty cache.
With keyring_dir given, set location of the directory from which keys should be loaded.
"""
self._keys = keys
def __getitem__(self, fingerpring):
"""Look up email assigned to the given fingerprint."""
return self._keys[fingerpring]
def __setitem__(self, fingerprint, email):
"""Assign an email to a fingerpring, overwriting it if it was already present."""
self._keys[fingerprint] = email
def __contains__(self, fingerprint):
"""Check if the given fingerprint is assigned to an email."""
# This method has to be present for KeyCache to be a dict substitute.
# See mailgate, function _identify_gpg_recipients.
return fingerprint in self._keys
def has_email(self, email):
"""Check if cache contains a key assigned to the given email."""
return email in self._keys.values()
def __repr__(self):
"""Return text representation of this object."""
details = ' '.join(self._keys.keys())
return '<KeyCache %s>' % (details)
def __iter__(self):
return iter(self._keys.keys())
def emails(self):
return { email: fingerprint for (fingerprint, email) in self._keys.items() }
class KeyRing:
"""Contract to be implemented by a key-store (a.k.a. keyring)."""
def freeze_identities(self) -> KeyCache:
"""Return a static, async-safe copy of the identity map."""
raise NotImplementedError('KeyRing.load not implemented')
def register_or_update(self, email: str, key_id: str):
"""Add a new (email,key) pair to the keystore."""
raise NotImplementedError('KeyRing.register_or_update not implemented')
def post_init_hook(self):
"""Lets the keyring perform additional operations following its initialisation."""
pass
def shutdown(self):
"""Lets the keyring perform operations prior to shutting down."""
pass

122
lacre/admin.py Normal file
View File

@ -0,0 +1,122 @@
"""Lacre administrative tool.
This is a command-line tool expected to be run by a person who knows what they
are doing. Also, please read the docs first.
"""
import sys
import argparse
import logging
import GnuPG
import lacre
import lacre.config as conf
conf.load_config()
lacre.init_logging(conf.get_item('logging', 'config'))
import lacre.repositories as repo
import lacre.dbschema as db
LOG = logging.getLogger('lacre.admin')
def _no_database():
print('Database unavailable or not configured properly')
exit(lacre.EX_CONFIG)
def sub_queue(args):
"""Sub-command to inspect queue contents."""
LOG.debug('Inspecting queue...')
conn = repo.connect(conf.get_item('database', 'url'))
queue = repo.KeyConfirmationQueue(conn)
cnt = queue.count_keys()
if cnt is None:
_no_database()
print(f'Keys in the queue: {cnt}')
def sub_identities(args):
"""Sub-command to inspect identity database."""
LOG.debug('Inspecting identities...')
conn = repo.connect(conf.get_item('database', 'url'))
identities = repo.IdentityRepository(conn)
all_identities = identities.freeze_identities()
if all_identities is None:
_no_database()
if args.email:
all_rev = all_identities.emails()
print('-', args.email, all_rev[args.email])
else:
for id_ in all_identities:
print('-', all_identities[id_], id_)
def sub_import(args):
"""Sub-command to import all identities known to GnuPG into Lacre database."""
LOG.debug('Importing identities...')
source_dir = args.homedir or conf.get_item('gpg', 'keyhome')
public = GnuPG.public_keys(source_dir)
conn = repo.connect(conf.get_item('database', 'url'))
identities = repo.IdentityRepository(conn)
total = 0
for (fingerprint, email) in public.items():
LOG.debug('Importing %s - %s', email, fingerprint)
identities.register_or_update(email, fingerprint)
total += 1
LOG.debug('Imported %d identities', total)
print(f'Imported {total} identities')
def main():
conf.validate_config(additional=conf.SCRIPT_REQUIRED)
general_conf = conf.config_source()
log_conf = conf.get_item('logging', 'config')
parser = argparse.ArgumentParser(
prog='lacre.admin',
description='Lacre Admin\'s best friend',
epilog=f'Config read from {general_conf}. For diagnostic info, see {log_conf}'
)
sub_commands = parser.add_subparsers(help='Sub-commands', required=True)
cmd_import = sub_commands.add_parser('import',
help='Load identities from GnuPG directory to Lacre database'
)
cmd_import.add_argument('-d', '--homedir', help='specify GnuPG directory (default: use configured dir.)')
cmd_import.set_defaults(operation=sub_import)
cmd_queue = sub_commands.add_parser('queue',
help='Inspect key queue',
aliases=['q']
)
cmd_queue.set_defaults(operation=sub_queue)
cmd_identities = sub_commands.add_parser('identities',
help='Inspect identity database',
aliases=['id']
)
cmd_identities.add_argument('-e', '--email', help='look up a single email')
cmd_identities.set_defaults(operation=sub_identities)
user_request = parser.parse_args()
user_request.operation(user_request)
if __name__ == '__main__':
main()

View File

@ -22,6 +22,13 @@ MANDATORY_CONFIG_ITEMS = [("relay", "host"),
("daemon", "port"),
("gpg", "keyhome")]
SCRIPT_REQUIRED = [('database', 'enabled'),
('database', 'url')]
CRON_REQUIRED = [('database', 'enabled'),
('database', 'url'),
('cron', 'mail_templates')]
# Global dict to keep configuration parameters. It's hidden behind several
# utility functions to make it easy to replace it with ConfigParser object in
# the future.
@ -36,9 +43,9 @@ def load_config() -> dict:
path. Otherwise, the default is taken
('/etc/gpg-mailgate.conf').
"""
configFile = os.getenv(CONFIG_PATH_ENV, '/etc/gpg-mailgate.conf')
config_file = config_source()
parser = _read_config(configFile)
parser = _read_config(config_file)
# XXX: Global variable. It is a left-over from old GPG-Mailgate code. We
# should drop it and probably use ConfigParser instance where configuration
@ -48,6 +55,14 @@ def load_config() -> dict:
return cfg
def config_source() -> str:
"""Return path of configuration file.
Taken from GPG_MAILGATE_CONFIG environment variable, and if it's not
set, defaults to /etc/gpg-mailgate.conf."""
return os.getenv(CONFIG_PATH_ENV, '/etc/gpg-mailgate.conf')
def _read_config(fileName) -> RawConfigParser:
cp = RawConfigParser()
cp.read(fileName)
@ -90,16 +105,23 @@ def flag_enabled(section, key) -> bool:
return config_item_equals(section, key, 'yes')
def validate_config():
def validate_config(*, additional=None):
"""Check if configuration is complete.
Returns a list of missing parameters, so an empty list means
configuration is complete.
If 'additional' parameter is specified, it should be a list of
tuples (section, param).
"""
missing = []
for (section, param) in MANDATORY_CONFIG_ITEMS:
if not config_item_set(section, param):
missing.append((section, param))
if additional:
for (section, param) in additional:
if not config_item_set(section, param):
missing.append((section, param))
return missing

View File

@ -3,6 +3,7 @@
import logging
import lacre
from lacre.text import DOUBLE_EOL_BYTES
from lacre.stats import time_logger
import lacre.config as conf
import sys
from aiosmtpd.controller import Controller
@ -10,8 +11,6 @@ from aiosmtpd.smtp import Envelope
import asyncio
import email
from email.policy import SMTPUTF8
import time
from watchdog.observers import Observer
# Load configuration and init logging, in this order. Only then can we load
# the last Lacre module, i.e. lacre.mailgate.
@ -35,50 +34,47 @@ class MailEncryptionProxy:
async def handle_DATA(self, server, session, envelope: Envelope):
"""Accept a message and either encrypt it or forward as-is."""
start = time.process_time()
try:
keys = await self._keyring.freeze_identities()
LOG.debug('Parsing message: %s', self._beginning(envelope))
message = email.message_from_bytes(envelope.original_content, policy=SMTPUTF8)
LOG.debug('Parsed into %s: %s', type(message), repr(message))
with time_logger('Message delivery', LOG):
try:
keys = self._keyring.freeze_identities()
LOG.debug('Parsing message: %s', self._beginning(envelope))
message = email.message_from_bytes(envelope.original_content, policy=SMTPUTF8)
LOG.debug('Parsed into %s: %s', type(message), repr(message))
if message.defects:
# Sometimes a weird message cannot be encoded back and
# delivered, so before bouncing such messages we at least
# record information about the issues. Defects are identified
# by email.* package.
LOG.warning("Issues found: %d; %s", len(message.defects), repr(message.defects))
if message.defects:
# Sometimes a weird message cannot be encoded back and
# delivered, so before bouncing such messages we at least
# record information about the issues. Defects are identified
# by email.* package.
LOG.warning("Issues found: %d; %s", len(message.defects), repr(message.defects))
if conf.flag_enabled('daemon', 'log_headers'):
LOG.info('Message headers: %s', self._extract_headers(message))
if conf.flag_enabled('daemon', 'log_headers'):
LOG.info('Message headers: %s', self._extract_headers(message))
send = xport.SendFrom(envelope.mail_from)
for operation in gate.delivery_plan(envelope.rcpt_tos, message, keys):
LOG.debug(f"Sending mail via {operation!r}")
try:
new_message = operation.perform(message)
send(new_message, operation.recipients())
except EncryptionException:
# If the message can't be encrypted, deliver cleartext.
LOG.exception('Unable to encrypt message, delivering in cleartext')
if not isinstance(operation, KeepIntact):
self._send_unencrypted(operation, message, envelope, send)
else:
LOG.error(f'Cannot perform {operation}')
send = xport.SendFrom(envelope.mail_from)
for operation in gate.delivery_plan(envelope.rcpt_tos, message, keys):
LOG.debug(f"Sending mail via {operation!r}")
try:
new_message = operation.perform(message)
send(new_message, operation.recipients())
except EncryptionException:
# If the message can't be encrypted, deliver cleartext.
LOG.exception('Unable to encrypt message, delivering in cleartext')
if not isinstance(operation, KeepIntact):
self._send_unencrypted(operation, message, envelope, send)
else:
LOG.error(f'Cannot perform {operation}')
except:
LOG.exception('Unexpected exception caught, bouncing message')
return xport.RESULT_ERROR
ellapsed = (time.process_time() - start) * 1000
LOG.info(f'Message delivered in {ellapsed:.2f} ms')
except:
LOG.exception('Unexpected exception caught, bouncing message')
return xport.RESULT_ERROR
return xport.RESULT_OK
def _send_unencrypted(self, operation, message, envelope, send: xport.SendFrom):
keep = KeepIntact(operation.recipients())
new_message = keep.perform(message)
send(new_message, operation.recipients(), envelope.mail_from)
send(new_message, operation.recipients())
def _beginning(self, e: Envelope) -> bytes:
double_eol_pos = e.original_content.find(DOUBLE_EOL_BYTES)
@ -96,6 +92,9 @@ class MailEncryptionProxy:
'cte' : message['Content-Transfer-Encoding']
}
def _seconds_between(self, start_ms, end_ms) -> float:
return (end_ms - start_ms) * 1000
def _init_controller(keys: kcache.KeyRing, max_body_bytes=None, tout: float = 5):
proxy = MailEncryptionProxy(keys)
@ -106,13 +105,6 @@ def _init_controller(keys: kcache.KeyRing, max_body_bytes=None, tout: float = 5)
data_size_limit=max_body_bytes)
def _init_reloader(keyring_dir: str, reloader) -> kcache.KeyringModificationListener:
listener = kcache.KeyringModificationListener(reloader)
observer = Observer()
observer.schedule(listener, keyring_dir, recursive=False)
return observer
def _validate_config():
missing = conf.validate_config()
if missing:
@ -130,7 +122,7 @@ async def _sleep():
await asyncio.sleep(360)
def _main():
async def _main():
_validate_config()
keyring_path = conf.get_item('gpg', 'keyhome')
@ -138,30 +130,27 @@ def _main():
loop = asyncio.get_event_loop()
keyring = kcache.KeyRing(keyring_path, loop)
controller = _init_controller(keyring, max_data_bytes)
reloader = _init_reloader(keyring_path, keyring)
LOG.info(f'Watching keyring directory {keyring_path}...')
reloader.start()
LOG.info('Starting the daemon...')
controller.start()
try:
loop.run_until_complete(_sleep())
keyring = kcache.init_keyring()
controller = _init_controller(keyring, max_data_bytes)
keyring.post_init_hook()
LOG.info('Starting the daemon...')
controller.start()
await _sleep()
except KeyboardInterrupt:
LOG.info("Finishing...")
except:
LOG.exception('Unexpected exception caught, your system may be unstable')
finally:
LOG.info('Shutting down keyring watcher and the daemon...')
reloader.stop()
reloader.join()
keyring.shutdown()
controller.stop()
LOG.info("Done")
if __name__ == '__main__':
_main()
asyncio.run(_main())

38
lacre/dbschema.py Normal file
View File

@ -0,0 +1,38 @@
"""Database schema for Lacre.
This definition includes:
- 'lacre_keys' -- temporary key storage, used by the frontend to submit keys and
by webgate-cron script to import submitted keys.
- 'lacre_identities' -- identity catalogue, used by encryption logic to match
emails with corresponding keys.
"""
import sqlalchemy
# Values for lacre_keys.status column:
ST_DEFAULT, ST_IMPORTED, ST_TO_BE_DELETED = range(3)
_meta = sqlalchemy.MetaData()
LACRE_KEYS = sqlalchemy.Table('lacre_keys', _meta,
sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True),
sqlalchemy.Column('email', sqlalchemy.String(256)),
# ASCII-armored key
sqlalchemy.Column('publickey', sqlalchemy.Text),
sqlalchemy.Column('confirm', sqlalchemy.String(32)),
# Status: see ST_* constants at the top of the file.
sqlalchemy.Column('status', sqlalchemy.Integer),
sqlalchemy.Column('time', sqlalchemy.DateTime))
LACRE_IDENTITIES = sqlalchemy.Table('lacre_identities', _meta,
sqlalchemy.Column('email', sqlalchemy.String(256), index=True),
# Key fingerprint
sqlalchemy.Column('fingerprint', sqlalchemy.String(64), index=True))
def init_identities_table() -> sqlalchemy.Table:
return LACRE_IDENTITIES
def table_metadata():
return _meta

View File

@ -4,172 +4,24 @@ IMPORTANT: This module has to be loaded _after_ initialisation of the logging
module.
"""
import lacre.text as text
import lacre.config as conf
from lacre._keyringcommon import KeyRing, KeyCache
from lacre.repositories import IdentityRepository
import logging
from os import stat
from watchdog.events import FileSystemEventHandler, FileSystemEvent
from asyncio import Semaphore, create_task, get_event_loop, run
import copy
import GnuPG
LOG = logging.getLogger(__name__)
def _sanitize(keys):
sanitize = text.choose_sanitizer(conf.get_item('default', 'mail_case_insensitive'))
return {fingerprint: sanitize(keys[fingerprint]) for fingerprint in keys}
def init_keyring() -> KeyRing:
"""Initialise appropriate type of keyring."""
url = conf.get_item('database', 'url')
return IdentityRepository(db_url=url)
class KeyCacheMisconfiguration(Exception):
"""Exception used to signal that KeyCache is misconfigured."""
class KeyCache:
"""A store for OpenPGP keys.
Key case is sanitised while loading from GnuPG if so
configured. See mail_case_insensitive parameter in section
[default].
"""
def __init__(self, keys: dict = None):
"""Initialise an empty cache.
With keyring_dir given, set location of the directory from which keys should be loaded.
"""
self._keys = keys
def __getitem__(self, fingerpring):
"""Look up email assigned to the given fingerprint."""
return self._keys[fingerpring]
def __setitem__(self, fingerprint, email):
"""Assign an email to a fingerpring, overwriting it if it was already present."""
self._keys[fingerprint] = email
def __contains__(self, fingerprint):
"""Check if the given fingerprint is assigned to an email."""
# This method has to be present for KeyCache to be a dict substitute.
# See mailgate, function _identify_gpg_recipients.
return fingerprint in self._keys
def has_email(self, email):
"""Check if cache contains a key assigned to the given email."""
return email in self._keys.values()
def __repr__(self):
"""Return text representation of this object."""
details = ' '.join(self._keys.keys())
return '<KeyCache %s>' % (details)
class KeyRing:
"""A high-level adapter for GnuPG-maintained keyring directory.
Its role is to keep a cache of keys present in the keyring,
reload it when necessary and produce static copies of
fingerprint=>email maps.
"""
def __init__(self, path: str, loop=None):
"""Initialise the adapter."""
self._path = path
self._keys = self._load_and_sanitize()
self._sema = Semaphore()
self._last_mod = None
self._loop = loop or get_event_loop()
def _load_and_sanitize(self):
keys = self._load_keyring_from(self._path)
return _sanitize(keys)
def _load_keyring_from(self, keyring_dir):
return GnuPG.public_keys(keyring_dir)
async def freeze_identities(self) -> KeyCache:
"""Return a static, async-safe copy of the identity map."""
async with self._sema:
keys = copy.deepcopy(self._keys)
return KeyCache(keys)
def load(self):
"""Load keyring, replacing any previous contents of the cache."""
LOG.debug('Reloading keys...')
tsk = create_task(self._load(), 'LoadTask')
self._loop.run_until_complete(tsk)
async def _load(self):
last_mod = self._read_mod_time()
LOG.debug(f'Keyring was last modified: {last_mod}')
if self._is_modified(last_mod):
LOG.debug('Keyring has been modified')
async with self._sema:
LOG.debug('About to re-load the keyring')
self.replace_keyring(self._load_keyring_from(self._path))
else:
LOG.debug('Keyring not modified recently, continuing')
self._last_mod = self._read_mod_time()
reload = load
def replace_keyring(self, keys: dict):
"""Overwrite previously stored key cache with KEYS."""
keys = _sanitize(keys)
LOG.info(f'Storing {len(keys)} keys')
self._keys = keys
def _read_mod_time(self) -> int:
# (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime)
# 0 1 2 3 4 5 6 7 8 9
MTIME = 8
st = stat(self._path)
return st[MTIME]
def _is_modified(self, last_mod):
if self._last_mod is None:
LOG.debug('Keyring not loaded before')
return True
elif self._last_mod != last_mod:
LOG.debug('Keyring directory mtime changed')
return True
else:
LOG.debug('Keyring not modified ')
return False
def __repr__(self) -> str:
"""Return text representation of this keyring."""
return '<KeyRing path=%s last_mod=%d>' % (self._path, self._last_mod)
class KeyringModificationListener(FileSystemEventHandler):
"""A filesystem event listener that triggers key cache reload."""
def __init__(self, keyring: KeyRing):
"""Initialise a listener with a callback to be executed upon each change."""
self._keyring = keyring
def handle(self, event: FileSystemEvent):
"""Reload keys upon FS event."""
LOG.debug('FS event: %s, %s', event.event_type, event.src_path)
if 'pubring.kbx' in event.src_path:
LOG.info('Reloading %s on event: %s', self._keyring, event)
self._keyring.reload()
# All methods should do the same: reload the key cache.
# on_created = handle
# on_deleted = handle
on_modified = handle
def freeze_and_load_keys():
def freeze_and_load_keys() -> KeyCache:
"""Load and return keys.
Doesn't refresh the keys when they change on disk.
'"""
keyring_dir = conf.get_item('gpg', 'keyhome')
keyring = KeyRing(keyring_dir)
return run(keyring.freeze_identities())
"""
keyring = init_keyring()
return keyring.freeze_identities()

51
lacre/notify.py Normal file
View File

@ -0,0 +1,51 @@
"""Lacre notification sender"""
import logging
import lacre
import lacre.config as conf
# Read configuration from /etc/gpg-mailgate.conf
conf.load_config()
LOG = logging.getLogger(__name__)
def _load_file(name):
f = open(name)
data = f.read()
f.close()
return data
def _authenticate_maybe(smtp):
if conf.config_item_equals('smtp', 'enabled', 'true'):
LOG.debug(f"Connecting to {conf.get_item('smtp', 'host')}:{conf.get_item('smtp', 'port')}")
smtp.connect(conf.get_item('smtp', 'host'), conf.get_item('smtp', 'port'))
smtp.ehlo()
if conf.config_item_equals('smtp', 'starttls', 'true'):
LOG.debug("StartTLS enabled")
smtp.starttls()
smtp.ehlo()
smtp.login(conf.get_item('smtp', 'username'), conf.get_item('smtp', 'password'))
def notify(mailsubject, messagefile, recipients = None):
"""Send notification email."""
mailbody = _load_file(conf.get_item('cron', 'mail_templates') + "/" + messagefile)
msg = MIMEMultipart("alternative")
msg["From"] = conf.get_item('cron', 'notification_email')
msg["To"] = recipients
msg["Subject"] = mailsubject
msg.attach(MIMEText(mailbody, 'plain'))
msg.attach(MIMEText(markdown.markdown(mailbody), 'html'))
if conf.config_item_set('relay', 'host') and conf.config_item_set('relay', 'enc_port'):
(host, port) = conf.relay_params()
smtp = smtplib.SMTP(host, port)
_authenticate_maybe(smtp)
smtp.sendmail(conf.get_item('cron', 'notification_email'), recipients, msg.as_string())
else:
LOG.info("Could not send mail due to wrong configuration")

View File

@ -53,13 +53,13 @@ class GpgRecipient(Recipient):
def __init__(self, left, right):
"""Initialise a tuple-like object that contains GPG recipient data."""
self._left = left
super().__init__(left)
self._right = right
def __getitem__(self, index):
"""Pretend this object is a tuple by returning an indexed tuple element."""
if index == 0:
return self._left
return self.email()
elif index == 1:
return self._right
else:
@ -67,11 +67,9 @@ class GpgRecipient(Recipient):
def __repr__(self):
"""Return textual representation of this GPG Recipient."""
return f"GpgRecipient({self._left!r}, {self._right!r})"
return f"GpgRecipient({self.email()!r}, {self._right!r})"
def email(self) -> str:
"""Return this recipient's email address."""
return self._left
__str__ = __repr__
def key(self):
"""Return this recipient's key ID."""

142
lacre/repositories.py Normal file
View File

@ -0,0 +1,142 @@
"""Lacre identity and key repositories."""
from sqlalchemy import create_engine, select, delete, and_, func
from sqlalchemy.exc import OperationalError
import logging
from lacre._keyringcommon import KeyRing, KeyCache
import lacre.dbschema as db
LOG = logging.getLogger(__name__)
# Internal state
_engine = None
def connect(url):
global _engine
if not _engine:
_engine = create_engine(url)
return _engine.connect()
class IdentityRepository(KeyRing):
def __init__(self, /, connection=None, db_url=None):
self._identities = db.LACRE_IDENTITIES
self._conn = connection
self._url = db_url
self._initialised = connection is not None
def register_or_update(self, email, fprint):
assert email, "email is mandatory"
assert fprint, "fprint is mandatory"
if self._exists(email):
self._update(email, fprint)
else:
self._insert(email, fprint)
def _exists(self, email: str) -> bool:
self._ensure_connected()
selq = select(self._identities.c.email).where(self._identities.c.email == email)
emails = [e for e in self._conn.execute(selq)]
assert len(emails) == 1
return emails
def _insert(self, email, fprint):
self._ensure_connected()
insq = self._identities.insert().values(email=email, fingerprint=fprint)
LOG.debug('Registering identity %s: %s', email, insq)
self._conn.execute(insq)
def _update(self, email, fprint):
self._ensure_connected()
upq = self._identities.update() \
.values(fingerprint=fprint) \
.where(self._identities.c.email == email)
LOG.debug('Updating identity %s: %s', email, upq)
self._conn.execute(upq)
def _ensure_connected(self):
if not self._initialised:
LOG.debug('Connecting with %s', self._url)
self._conn = connect(self._url)
def delete(self, email):
self._ensure_connected()
delq = delete(self._identities).where(self._identities.c.email == email)
LOG.debug('Deleting keys assigned to %s', email)
def freeze_identities(self) -> KeyCache:
"""Return a static, async-safe copy of the identity map."""
self._ensure_connected()
try:
return self._load_identities()
except OperationalError:
LOG.exception('Cannot retrieve identities')
return None
def _load_identities(self) -> KeyCache:
all_identities = select(self._identities.c.fingerprint, self._identities.c.email)
result = self._conn.execute(all_identities)
LOG.debug('Retrieving all keys')
return KeyCache({key_id: email for key_id, email in result})
class KeyConfirmationQueue:
"""Encapsulates access to lacre_keys table."""
# Default number of items retrieved from the database.
keys_read_max = 100
def __init__(self, connection):
self._keys = db.LACRE_KEYS
self._conn = connection
def fetch_keys(self, /, max_keys=None):
"""Runs a query to retrieve at most `keys_read_max` keys and returns db result."""
max_keys = max_keys or self.keys_read_max
selq = select(self._keys.c.publickey, self._keys.c.id, self._keys.c.email) \
.where(and_(self._keys.c.status == db.ST_DEFAULT, self._keys.c.confirm == "")) \
.limit(max_keys)
LOG.debug('Retrieving keys to be processed: %s', selq)
return self._conn.execute(selq)
def count_keys(self):
selq = select(func.count(self._keys.c.id))
LOG.debug('Counting all keys: %s', selq)
try:
c = [cnt for cnt in self._conn.execute(selq)]
# Result is an iterable of tuples:
return c[0][0]
except OperationalError:
LOG.exception('Cannot count keys')
return None
def fetch_keys_to_delete(self):
seldel = select(self._keys.c.email, self._keys.c.id).where(self._keys.c.status == db.ST_TO_BE_DELETED).limit(self.keys_read_max)
return self._conn.execute(seldel)
def delete_keys(self, row_id, /, email=None):
"""Remove key from the database."""
if email is not None:
delq = delete(self._keys).where(and_(self._keys.c.email == email, self._keys.c.id != row_id))
else:
delq = delete(self._keys).where(self._keys.c.id != row_id)
LOG.debug('Deleting public keys associated with confirmed email: %s', delq)
self._conn.execute(delq)
def mark_accepted(self, row_id):
modq = self._keys.update().where(self._keys.c.id == row_id).values(status=db.ST_IMPORTED)
LOG.debug("Key imported, updating key: %s", modq)
self._conn.execute(modq)

27
lacre/stats.py Normal file
View File

@ -0,0 +1,27 @@
"""Insights into Lacre's inner workings."""
import time
import logging
class ExecutionTimeLogger:
"""Context-manager that measures how much time some operation took and logs it."""
def __init__(self, message: str, logger: logging.Logger):
self._message = message
self._log = logger
self._start = None
def __enter__(self):
self._start = time.process_time()
def __exit__(self, exc_type=None, exc_value=None, traceback=None):
end = time.process_time()
ellapsed = (end - self._start) * 1000
if exc_type:
self._log.error('%s took %d ms, raised exception %s', self._message, ellapsed, exc_type)
else:
self._log.info('%s took %d ms', self._message, ellapsed)
def time_logger(msg: str, logger: logging.Logger):
return ExecutionTimeLogger(msg, logger)

View File

@ -3,4 +3,3 @@ SQLAlchemy==1.4.32
Markdown==3.4.1
M2Crypto==0.38.0
requests==2.27.1
watchdog==2.1.9

View File

@ -39,6 +39,10 @@ def _build_config(config):
cp.add_section("gpg")
cp.set("gpg", "keyhome", config["gpg_keyhome"])
cp.add_section('database')
cp.set('database', 'enabled', 'yes')
cp.set('database', 'url', 'sqlite:///test/lacre.db')
cp.add_section("smime")
cp.set("smime", "cert_path", config["smime_certpath"])

View File

@ -10,6 +10,10 @@ keyhome = test/keyhome
[smime]
cert_path = test/certs
[daemon]
host = not_used
port = not_used
[database]
enabled = yes
url = sqlite:///test/lacre.db
@ -20,6 +24,7 @@ port = 2500
[cron]
send_email = no
mail_templates = not_used
[enc_keymap]
alice@disposlab = 1CD245308F0963D038E88357973CF4D9387C44D7

View File

@ -6,7 +6,6 @@ date_format = ISO
[gpg]
keyhome = test/keyhome
cache_refresh_minutes = 1
[smime]
cert_path = test/certs

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1,8 +1,14 @@
import GnuPG
import logging
import unittest
class GnuPGUtilitiesTest(unittest.TestCase):
def setUp(self):
# Record GnuPG logs:
logging.basicConfig(filename='test/logs/unittest.log', level=logging.DEBUG,
format='%(asctime)s %(pathname)s:%(lineno)d %(levelname)s [%(funcName)s] %(message)s')
def test_build_default_command(self):
cmd = GnuPG._build_command("test/keyhome")
self.assertEqual(cmd, ["gpg", "--homedir", "test/keyhome"])
@ -37,18 +43,28 @@ class GnuPGUtilitiesTest(unittest.TestCase):
self.assertDictEqual(keys, known_identities)
def test_add_delete_key(self):
self.assertDictEqual(GnuPG.public_keys('/tmp'), { })
GnuPG.add_key('/tmp', self._load('test/keys/bob@disposlab.pub'))
self.assertDictEqual(GnuPG.public_keys('/tmp'), {
self.assertDictEqual(GnuPG.public_keys('test/keyhome.other'), { })
GnuPG.add_key('test/keyhome.other', self._load('test/keys/bob@disposlab.pub'))
self.assertDictEqual(GnuPG.public_keys('test/keyhome.other'), {
'19CF4B47ECC9C47AFA84D4BD96F39FDA0E31BB67': 'bob@disposlab',
})
GnuPG.delete_key('/tmp', 'bob@disposlab')
self.assertDictEqual(GnuPG.public_keys('/tmp'), { })
GnuPG.delete_key('test/keyhome.other', 'bob@disposlab')
self.assertDictEqual(GnuPG.public_keys('test/keyhome.other'), { })
def _load(self, filename):
with open(filename) as f:
return f.read()
def test_extract_fingerprint(self):
sample_in = '''fpr:::::::::1CD245308F0963D038E88357973CF4D9387C44D7:'''
fpr = GnuPG._extract_fingerprint(sample_in)
self.assertEqual(fpr, '1CD245308F0963D038E88357973CF4D9387C44D7')
def test_parse_uid_line(self):
sample_in = '''uid:e::::1624794010::C16E259AA1435947C6385B8160BC020B6C05EE18::alice@disposlab::::::::::0:'''
uid = GnuPG._parse_uid_line(sample_in)
self.assertEqual(uid, 'alice@disposlab')
def test_parse_statusfd_key_expired(self):
key_expired = b"""
[GNUPG:] KEYEXPIRED 1668272263
@ -56,10 +72,24 @@ class GnuPGUtilitiesTest(unittest.TestCase):
[GNUPG:] INV_RECP 0 name@domain
[GNUPG:] FAILURE encrypt 1
"""
result = GnuPG.parse_status(key_expired)
self.assertEqual(result['issue'], b'KEYEXPIRED')
self.assertEqual(result['recipient'], b'name@domain')
self.assertEqual(result['cause'], 'No specific reason given')
self.assertEqual(result['key'], b'XXXXXXXXXXXXX')
def test_parse_statusfd_key_absent(self):
non_specific_errors = b"""
[GNUPG:] INV_RECP 0 name@domain
[GNUPG:] FAILURE encrypt 1
"""
result = GnuPG.parse_status(non_specific_errors)
self.assertEqual(result['issue'], b'n/a')
self.assertEqual(result['recipient'], b'name@domain')
self.assertEqual(result['cause'], 'No specific reason given')
self.assertEqual(result['key'], b'n/a')
if __name__ == '__main__':

View File

@ -0,0 +1,14 @@
"""Lacre identity and key repository tests."""
import unittest
import lacre.repositories as r
import lacre.dbschema as s
class IdentityRepositoryTest(unittest.TestCase):
def test_x(self):
ir = r.IdentityRepository(db_url='sqlite:///test/lacre.db')
identities = ir.freeze_identities()
self.assertTrue(identities)

View File

@ -3,23 +3,27 @@ import sqlalchemy
from sqlalchemy.sql import insert
def define_db_schema():
meta = sqlalchemy.MetaData()
meta = sqlalchemy.MetaData()
gpgmw_keys = sqlalchemy.Table('gpgmw_keys', meta,
sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True),
sqlalchemy.Column('email', sqlalchemy.String(256)),
sqlalchemy.Column('publickey', sqlalchemy.Text),
sqlalchemy.Column('confirm', sqlalchemy.String(32)),
sqlalchemy.Column('status', sqlalchemy.Integer),
sqlalchemy.Column('time', sqlalchemy.DateTime))
lacre_keys = sqlalchemy.Table('lacre_keys', meta,
sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True),
sqlalchemy.Column('email', sqlalchemy.String(256)),
sqlalchemy.Column('publickey', sqlalchemy.Text),
sqlalchemy.Column('confirm', sqlalchemy.String(32)),
sqlalchemy.Column('status', sqlalchemy.Integer),
sqlalchemy.Column('time', sqlalchemy.DateTime))
return (meta, gpgmw_keys)
identities = sqlalchemy.Table('lacre_identities', meta,
sqlalchemy.Column('email', sqlalchemy.String(256), index=True),
sqlalchemy.Column('fingerprint', sqlalchemy.String(64), index=True))
return (meta, lacre_keys, identities)
if len(sys.argv) != 2:
print("ERROR: output database missing")
sys.exit(1)
print("ERROR: output database missing")
sys.exit(1)
(meta, gpgmw_keys) = define_db_schema()
(meta, lacre_keys, identities) = define_db_schema()
dbname = sys.argv[1]
test_db = sqlalchemy.create_engine(f"sqlite:///{dbname}")
@ -30,8 +34,8 @@ meta.create_all(test_db)
conn = test_db.connect()
# Populate the database with dummy data
conn.execute(gpgmw_keys.insert(), [
{"id": 1, "email": "alice@disposlab", "publickey": "-----BEGIN PGP PUBLIC KEY BLOCK-----\n\
conn.execute(lacre_keys.insert(), [
{"id": 1, "email": "alice@disposlab", "publickey": "-----BEGIN PGP PUBLIC KEY BLOCK-----\n\
\n\
mQGNBGDYY5oBDAC+HAVjA05jsIpHfQ2KQ9m2olo1Qnlk+dkjD+Gagxj1ACezyiGL\n\
cfZfoE/MJYLCH9yPcX1fUIAPwdAyfJKlvkVcz+MhEpgl3aP3NM2L2unSx3v9ZFwT\n\
@ -73,7 +77,7 @@ pw==\n\
=Tbwz\n\
-----END PGP PUBLIC KEY BLOCK-----\
", "status": 0, "confirm": "", "time": None},
{"id": 2, "email": "bob@disposlab", "publickey": "-----BEGIN PGP PUBLIC KEY BLOCK-----\n\
{"id": 2, "email": "bob@disposlab", "publickey": "-----BEGIN PGP PUBLIC KEY BLOCK-----\n\
\n\
mDMEYdTFkRYJKwYBBAHaRw8BAQdA2tgdP1pMt3cv3XAW7ov5AFn74mMZvyTksp9Q\n\
eO1PkpK0GkJvYiBGb29iYXIgPGJvYkBkaXNwb3NsYWI+iJYEExYIAD4WIQQZz0tH\n\
@ -87,5 +91,11 @@ OjjB6xRD0Q2FN+alsNGCtdutAs18AZ5l33RMzws=\n\
=wWoq\n\
-----END PGP PUBLIC KEY BLOCK-----\
", "status": 0, "confirm": "", "time": None},
{"id": 3, "email": "cecil@lacre.io", "publickey": "RUBBISH", "status": 0, "confirm": "", "time": None}
])
{"id": 3, "email": "cecil@lacre.io", "publickey": "RUBBISH", "status": 0, "confirm": "", "time": None}
])
conn.execute(identities.insert(), [
{'fingerprint': '1CD245308F0963D038E88357973CF4D9387C44D7', 'email': 'alice@disposlab'},
{'fingerprint': '19CF4B47ECC9C47AFA84D4BD96F39FDA0E31BB67', 'email': 'bob@disposlab'},
{'fingerprint': '530B1BB2D0CC7971648198BBA4774E507D3AF5BC', 'email': 'evan@disposlab'}
])

View File

@ -19,9 +19,7 @@
# along with gpg-mailgate source code. If not, see <http://www.gnu.org/licenses/>.
#
import GnuPG
import sqlalchemy
from sqlalchemy.sql import select, delete, and_
import smtplib
import markdown
from email.mime.text import MIMEText
@ -30,65 +28,8 @@ from email.mime.multipart import MIMEMultipart
import logging
import lacre
import lacre.config as conf
def _load_file(name):
f = open(name)
data = f.read()
f.close()
return data
def _authenticate_maybe(smtp):
if conf.config_item_equals('smtp', 'enabled', 'true'):
LOG.debug(f"Connecting to {conf.get_item('smtp', 'host')}:{conf.get_item('smtp', 'port')}")
smtp.connect(conf.get_item('smtp', 'host'), conf.get_item('smtp', 'port'))
smtp.ehlo()
if conf.config_item_equals('smtp', 'starttls', 'true'):
LOG.debug("StartTLS enabled")
smtp.starttls()
smtp.ehlo()
smtp.login(conf.get_item('smtp', 'username'), conf.get_item('smtp', 'password'))
def _send_msg(mailsubject, messagefile, recipients = None):
mailbody = _load_file(conf.get_item('cron', 'mail_templates') + "/" + messagefile)
msg = MIMEMultipart("alternative")
msg["From"] = conf.get_item('cron', 'notification_email')
msg["To"] = recipients
msg["Subject"] = mailsubject
msg.attach(MIMEText(mailbody, 'plain'))
msg.attach(MIMEText(markdown.markdown(mailbody), 'html'))
if conf.config_item_set('relay', 'host') and conf.config_item_set('relay', 'enc_port'):
relay = (conf.get_item('relay', 'host'), int(conf.get_item('relay', 'enc_port')))
smtp = smtplib.SMTP(relay[0], relay[1])
_authenticate_maybe(smtp)
smtp.sendmail(conf.get_item('cron', 'notification_email'), recipients, msg.as_string())
else:
LOG.info("Could not send mail due to wrong configuration")
def _setup_db_connection(url):
engine = sqlalchemy.create_engine(url)
return (engine, engine.connect())
def _define_db_schema():
meta = sqlalchemy.MetaData()
gpgmw_keys = sqlalchemy.Table('gpgmw_keys', meta,
sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True),
sqlalchemy.Column('email', sqlalchemy.String(256)),
sqlalchemy.Column('publickey', sqlalchemy.Text),
sqlalchemy.Column('confirm', sqlalchemy.String(32)),
sqlalchemy.Column('status', sqlalchemy.Integer),
sqlalchemy.Column('time', sqlalchemy.DateTime))
return (gpgmw_keys)
import lacre.dbschema as db
from lacre.notify import notify
# Read configuration from /etc/gpg-mailgate.conf
conf.load_config()
@ -96,59 +37,65 @@ conf.load_config()
lacre.init_logging(conf.get_item('logging', 'config'))
LOG = logging.getLogger('webgate-cron.py')
import GnuPG
from lacre.repositories import KeyConfirmationQueue, IdentityRepository, connect
if conf.config_item_equals('database', 'enabled', 'yes') and conf.config_item_set('database', 'url'):
(engine, conn) = _setup_db_connection(conf.get_item("database", "url"))
(gpgmw_keys) = _define_db_schema()
selq = select(gpgmw_keys.c.publickey, gpgmw_keys.c.id, gpgmw_keys.c.email)\
.where(and_(gpgmw_keys.c.status == 0, gpgmw_keys.c.confirm == ""))\
.limit(100)
LOG.debug(f"Retrieving keys to be processed: {selq}")
result_set = conn.execute(selq)
def _validate_config():
missing = conf.validate_config(additional=conf.CRON_REQUIRED)
if missing:
LOG.error('Missing config parameters: %s', missing)
exit(lacre.EX_CONFIG)
for key_id, row_id, email in result_set:
_validate_config()
if conf.flag_enabled('database', 'enabled') and conf.config_item_set('database', 'url'):
conn = connect(conf.get_item('database', 'url'))
identities = IdentityRepository(conn)
key_queue = KeyConfirmationQueue(conn)
key_dir = conf.get_item('gpg', 'keyhome')
LOG.debug('Using GnuPG with home directory in %s', key_dir)
result_set = key_queue.fetch_keys()
for armored_key, row_id, email in result_set:
# delete any other public keys associated with this confirmed email address
delq = delete(gpgmw_keys).where(and_(gpgmw_keys.c.email == email, gpgmw_keys.c.id != row_id))
LOG.debug(f"Deleting public keys associated with confirmed email: {delq}")
conn.execute(delq)
GnuPG.delete_key(conf.get_item('gpg', 'keyhome'), email)
LOG.info('Deleted key for <' + email + '> via import request')
key_queue.delete_keys(row_id, email=email)
identities.delete(email)
GnuPG.delete_key(key_dir, email)
LOG.info('Deleted key for <%s> via import request', email)
if key_id.strip(): # we have this so that user can submit blank key to remove any encryption
if GnuPG.confirm_key(key_id, email):
GnuPG.add_key(conf.get_item('gpg', 'keyhome'), key_id) # import the key to gpg
modq = gpgmw_keys.update().where(gpgmw_keys.c.id == row_id).values(status=1)
LOG.debug(f"Key imported, updating key: {modq}")
conn.execute(modq) # mark key as accepted
LOG.warning('Imported key from <' + email + '>')
if conf.config_item_equals('cron', 'send_email', 'yes'):
_send_msg("PGP key registration successful", "registrationSuccess.md", email)
if armored_key.strip(): # we have this so that user can submit blank key to remove any encryption
if GnuPG.confirm_key(armored_key, email):
# import the key to gpg
(fingerprint, _) = GnuPG.add_key(key_dir, armored_key)
key_queue.mark_accepted(row_id)
identities.register_or_update(email, fingerprint)
LOG.info('Imported key from <%s>', email)
if conf.flag_enabled('cron', 'send_email'):
notify("PGP key registration successful", "registrationSuccess.md", email)
else:
delq = delete(gpgmw_keys).where(gpgmw_keys.c.id == row_id)
LOG.debug(f"Cannot confirm key, deleting it: {delq}")
conn.execute(delq) # delete key
LOG.warning('Import confirmation failed for <' + email + '>')
if conf.config_item_equals('cron', 'send_email', 'yes'):
_send_msg("PGP key registration failed", "registrationError.md", email)
key_queue.delete_keys(row_id)
LOG.warning('Import confirmation failed for <%s>', email)
if conf.flag_enabled('cron', 'send_email'):
notify("PGP key registration failed", "registrationError.md", email)
else:
# delete key so we don't continue processing it
delq = delete(gpgmw_keys).where(gpgmw_keys.c.id == row_id)
LOG.debug(f"Deleting key: {delq}")
conn.execute(delq)
if conf.config_item_equals('cron', 'send_email', 'yes'):
_send_msg("PGP key deleted", "keyDeleted.md", email)
# delete keys
stat2q = select(gpgmw_keys.c.email, gpgmw_keys.c.id).where(gpgmw_keys.c.status == 2).limit(100)
stat2_result_set = conn.execute(stat2q)
key_queue.delete_keys(row_id)
if conf.flag_enabled('cron', 'send_email'):
notify("PGP key deleted", "keyDeleted.md", email)
stat2_result_set = key_queue.fetch_keys_to_delete()
for email, row_id in stat2_result_set:
GnuPG.delete_key(conf.get_item('gpg', 'keyhome'), email)
delq = delete(gpgmw_keys).where(gpgmw_keys.c.id == row_id)
LOG.debug(f"Deleting keys that have already been processed: {delq}")
conn.execute(delq)
LOG.info('Deleted key for <' + email + '>')
GnuPG.delete_key(key_dir, email)
key_queue.delete_keys(row_id)
LOG.info('Deleted key for <%s>', email)
else:
print("Warning: doing nothing since database settings are not configured!")
LOG.error("Warning: doing nothing since database settings are not configured!")