new import/export using marshmallow

master
Alexander Graf 4 years ago
parent 1c9abf6e48
commit 68caf50154

@ -4,7 +4,6 @@
import sys
import os
import socket
import json
import logging
import uuid
@ -20,7 +19,7 @@ from flask.cli import FlaskGroup, with_appcontext
from marshmallow.exceptions import ValidationError
from . import models
from .schemas import MailuSchema, get_schema
from .schemas import MailuSchema, get_schema, get_fieldspec, colorize, RenderJSON, HIDDEN
db = models.db
@ -182,7 +181,7 @@ def user_import(localpart, domain_name, password_hash, hash_scheme = None):
db.session.commit()
# TODO: remove this deprecated function
# TODO: remove deprecated config_update function?
@mailu.command()
@click.option('-v', '--verbose')
@click.option('-d', '--delete-objects')
@ -324,17 +323,16 @@ def config_update(verbose=False, delete_objects=False):
db.session.commit()
SECTIONS = {'domains', 'relays', 'users', 'aliases'}
@mailu.command()
@click.option('-v', '--verbose', count=True, help='Increase verbosity')
@click.option('-q', '--quiet', is_flag=True, help='Quiet mode - only show errors')
@click.option('-u', '--update', is_flag=True, help='Update mode - merge input with existing config')
@click.option('-n', '--dry-run', is_flag=True, help='Perform a trial run with no changes made')
@click.option('-v', '--verbose', count=True, help='Increase verbosity.')
@click.option('-s', '--secrets', is_flag=True, help='Show secret attributes in messages.')
@click.option('-q', '--quiet', is_flag=True, help='Quiet mode - only show errors.')
@click.option('-c', '--color', is_flag=True, help='Force colorized output.')
@click.option('-u', '--update', is_flag=True, help='Update mode - merge input with existing config.')
@click.option('-n', '--dry-run', is_flag=True, help='Perform a trial run with no changes made.')
@click.argument('source', metavar='[FILENAME|-]', type=click.File(mode='r'), default=sys.stdin)
@with_appcontext
def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=None):
def config_import(verbose=0, secrets=False, quiet=False, color=False, update=False, dry_run=False, source=None):
""" Import configuration as YAML or JSON from stdin or file
"""
@ -344,12 +342,19 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
# 2 : also show secrets
# 3 : also show input data
# 4 : also show sql queries
# 5 : also show tracebacks
if quiet:
verbose = -1
color_cfg = {
'color': color or sys.stdout.isatty(),
'lexer': 'python',
'strip': True,
}
counter = Counter()
dumper = {}
logger = {}
def format_errors(store, path=None):
@ -387,19 +392,26 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
last = action
changes.append(f'{what}({count})')
else:
changes = 'no changes.'
changes = ['No changes.']
return chain(message, changes)
def log(action, target, message=None):
if message is None:
message = json.dumps(dumper[target.__class__].dump(target), ensure_ascii=False)
print(f'{action} {target.__table__}: {message}')
# TODO: convert nested OrderedDict to dict
# see: flask mailu config-import -nvv yaml/dump4.yaml
try:
message = dict(logger[target.__class__].dump(target))
except KeyError:
message = target
if not isinstance(message, str):
message = repr(message)
print(f'{action} {target.__table__}: {colorize(message, **color_cfg)}')
def listen_insert(mapper, connection, target): # pylint: disable=unused-argument
""" callback function to track import """
counter.update([('Added', target.__table__.name)])
counter.update([('Created', target.__table__.name)])
if verbose >= 1:
log('Added', target)
log('Created', target)
def listen_update(mapper, connection, target): # pylint: disable=unused-argument
""" callback function to track import """
@ -407,32 +419,32 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
changed = {}
inspection = sqlalchemy.inspect(target)
for attr in sqlalchemy.orm.class_mapper(target.__class__).column_attrs:
if getattr(inspection.attrs, attr.key).history.has_changes():
if sqlalchemy.orm.attributes.get_history(target, attr.key)[2]:
before = sqlalchemy.orm.attributes.get_history(target, attr.key)[2].pop()
after = getattr(target, attr.key)
# only remember changed keys
if before != after and (before or after):
if verbose >= 1:
changed[str(attr.key)] = (before, after)
else:
break
history = getattr(inspection.attrs, attr.key).history
if history.has_changes() and history.deleted:
before = history.deleted[-1]
after = getattr(target, attr.key)
# TODO: remove special handling of "comment" after modifying model
if attr.key == 'comment' and not before and not after:
pass
# only remember changed keys
elif before != after:
if verbose >= 1:
changed[str(attr.key)] = (before, after)
else:
break
if verbose >= 1:
# use schema with dump_context to hide secrets and sort keys
primary = json.dumps(str(target), ensure_ascii=False)
dumped = get_schema(target)(only=changed.keys(), context=dump_context).dump(target)
dumped = get_schema(target)(only=changed.keys(), context=diff_context).dump(target)
for key, value in dumped.items():
before, after = changed[key]
if value == '<hidden>':
before = '<hidden>' if before else before
after = '<hidden>' if after else after
if value == HIDDEN:
before = HIDDEN if before else before
after = HIDDEN if after else after
else:
# TODO: use schema to "convert" before value?
# TODO: need to use schema to "convert" before value?
after = value
before = json.dumps(before, ensure_ascii=False)
after = json.dumps(after, ensure_ascii=False)
log('Modified', target, f'{primary} {key}: {before} -> {after}')
log('Modified', target, f'{str(target)!r} {key}: {before!r} -> {after!r}')
if changed:
counter.update([('Modified', target.__table__.name)])
@ -443,47 +455,60 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
if verbose >= 1:
log('Deleted', target)
# this listener should not be necessary, when:
# dkim keys should be stored in database and it should be possible to store multiple
# keys per domain. the active key would be also stored on disk on commit.
# TODO: this listener will not be necessary, if dkim keys would be stored in database
_dedupe_dkim = set()
def listen_dkim(session, flush_context): # pylint: disable=unused-argument
""" callback function to track import """
for target in session.identity_map.values():
if not isinstance(target, models.Domain):
# look at Domains originally loaded from db
if not isinstance(target, models.Domain) or not target._sa_instance_state.load_path:
continue
primary = json.dumps(str(target), ensure_ascii=False)
before = target._dkim_key_on_disk
after = target._dkim_key
if before != after and (before or after):
if verbose >= 2:
if before != after:
if secrets:
before = before.decode('ascii', 'ignore')
after = after.decode('ascii', 'ignore')
else:
before = '<hidden>' if before else ''
after = '<hidden>' if after else ''
before = json.dumps(before, ensure_ascii=False)
after = json.dumps(after, ensure_ascii=False)
log('Modified', target, f'{primary} dkim_key: {before} -> {after}')
counter.update([('Modified', target.__table__.name)])
before = HIDDEN if before else ''
after = HIDDEN if after else ''
# "de-dupe" messages; this event is fired at every flush
if not (target, before, after) in _dedupe_dkim:
_dedupe_dkim.add((target, before, after))
counter.update([('Modified', target.__table__.name)])
if verbose >= 1:
log('Modified', target, f'{str(target)!r} dkim_key: {before!r} -> {after!r}')
def track_serialize(self, item):
def track_serialize(obj, item):
""" callback function to track import """
log('Handling', self.opts.model, item)
# hide secrets
data = logger[obj.opts.model].hide(item)
if 'hash_password' in data:
data['password'] = HIDDEN
if 'fetches' in data:
for fetch in data['fetches']:
fetch['password'] = HIDDEN
log('Handling', obj.opts.model, data)
# configure contexts
dump_context = {
'secrets': verbose >= 2,
diff_context = {
'full': True,
'secrets': secrets,
}
log_context = {
'secrets': secrets,
}
load_context = {
'callback': track_serialize if verbose >= 3 else None,
'clear': not update,
'import': True,
'update': update,
'clear': not update,
'callback': track_serialize if verbose >= 2 else None,
}
# register listeners
for schema in get_schema():
model = schema.Meta.model
dumper[model] = schema(context=dump_context)
logger[model] = schema(context=log_context)
sqlalchemy.event.listen(model, 'after_insert', listen_insert)
sqlalchemy.event.listen(model, 'after_update', listen_update)
sqlalchemy.event.listen(model, 'after_delete', listen_delete)
@ -491,18 +516,24 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
# special listener for dkim_key changes
sqlalchemy.event.listen(db.session, 'after_flush', listen_dkim)
if verbose >= 4:
if verbose >= 3:
logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
try:
with models.db.session.no_autoflush:
config = MailuSchema(only=SECTIONS, context=load_context).loads(source)
config = MailuSchema(only=MailuSchema.Meta.order, context=load_context).loads(source)
except ValidationError as exc:
raise click.ClickException(format_errors(exc.messages)) from exc
except Exception as exc:
# (yaml.scanner.ScannerError, UnicodeDecodeError, ...)
raise click.ClickException(f'[{exc.__class__.__name__}] {" ".join(str(exc).split())}') from exc
if verbose >= 5:
raise
else:
# (yaml.scanner.ScannerError, UnicodeDecodeError, ...)
raise click.ClickException(
f'[{exc.__class__.__name__}] '
f'{" ".join(str(exc).split())}'
) from exc
# flush session to show/count all changes
if dry_run or verbose >= 1:
@ -510,53 +541,47 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
# check for duplicate domain names
dup = set()
for fqdn in chain(db.session.query(models.Domain.name),
db.session.query(models.Alternative.name),
db.session.query(models.Relay.name)):
for fqdn in chain(
db.session.query(models.Domain.name),
db.session.query(models.Alternative.name),
db.session.query(models.Relay.name)
):
if fqdn in dup:
raise click.ClickException(f'[ValidationError] Duplicate domain name: {fqdn}')
dup.add(fqdn)
# TODO: implement special update "items"
# -pkey: which - remove item "which"
# -key: null or [] or {} - set key to default
# -pkey: null or [] or {} - remove all existing items in this list
# don't commit when running dry
if dry_run:
db.session.rollback()
if not quiet:
print(*format_changes('Dry run. Not commiting changes.'))
# TODO: remove debug
print(MailuSchema().dumps(config))
db.session.rollback()
else:
db.session.commit()
if not quiet:
print(*format_changes('Commited changes.'))
print(*format_changes('Committing changes.'))
db.session.commit()
@mailu.command()
@click.option('-f', '--full', is_flag=True, help='Include attributes with default value')
@click.option('-f', '--full', is_flag=True, help='Include attributes with default value.')
@click.option('-s', '--secrets', is_flag=True,
help='Include secret attributes (dkim-key, passwords)')
@click.option('-d', '--dns', is_flag=True, help='Include dns records')
help='Include secret attributes (dkim-key, passwords).')
@click.option('-c', '--color', is_flag=True, help='Force colorized output.')
@click.option('-d', '--dns', is_flag=True, help='Include dns records.')
@click.option('-o', '--output-file', 'output', default=sys.stdout, type=click.File(mode='w'),
help='save yaml to file')
@click.option('-j', '--json', 'as_json', is_flag=True, help='Dump in josn format')
@click.argument('sections', nargs=-1)
help='Save configuration to file.')
@click.option('-j', '--json', 'as_json', is_flag=True, help='Export configuration in json format.')
@click.argument('only', metavar='[FILTER]...', nargs=-1)
@with_appcontext
def config_export(full=False, secrets=False, dns=False, output=None, as_json=False, sections=None):
def config_export(full=False, secrets=False, color=False, dns=False, output=None, as_json=False, only=None):
""" Export configuration as YAML or JSON to stdout or file
"""
if sections:
for section in sections:
if section not in SECTIONS:
print(f'[ERROR] Unknown section: {section}')
raise click.exceptions.Exit(1)
sections = set(sections)
if only:
for spec in only:
if spec.split('.', 1)[0] not in MailuSchema.Meta.order:
raise click.ClickException(f'[ERROR] Unknown section: {spec}')
else:
sections = SECTIONS
only = MailuSchema.Meta.order
context = {
'full': full,
@ -564,13 +589,20 @@ def config_export(full=False, secrets=False, dns=False, output=None, as_json=Fal
'dns': dns,
}
if as_json:
schema = MailuSchema(only=sections, context=context)
schema.opts.render_module = json
print(schema.dumps(models.MailuConfig(), separators=(',',':')), file=output)
schema = MailuSchema(only=only, context=context)
color_cfg = {'color': color or output.isatty()}
else:
MailuSchema(only=sections, context=context).dumps(models.MailuConfig(), output)
if as_json:
schema.opts.render_module = RenderJSON
color_cfg['lexer'] = 'json'
color_cfg['strip'] = True
try:
print(colorize(schema.dumps(models.MailuConfig()), **color_cfg), file=output)
except ValueError as exc:
if spec := get_fieldspec(exc):
raise click.ClickException(f'[ERROR] Invalid filter: {spec}') from exc
raise
@mailu.command()

@ -19,6 +19,7 @@ import dns
from flask import current_app as app
from sqlalchemy.ext import declarative
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.inspection import inspect
from werkzeug.utils import cached_property
@ -121,6 +122,36 @@ class Base(db.Model):
updated_at = db.Column(db.Date, nullable=True, onupdate=date.today)
comment = db.Column(db.String(255), nullable=True, default='')
def __str__(self):
pkey = self.__table__.primary_key.columns.values()[0].name
if pkey == 'email':
# ugly hack for email declared attr. _email is not always up2date
return str(f'{self.localpart}@{self.domain_name}')
elif pkey in {'name', 'email'}:
return str(getattr(self, pkey, None))
else:
return self.__repr__()
return str(getattr(self, self.__table__.primary_key.columns.values()[0].name))
def __repr__(self):
return f'<{self.__class__.__name__} {str(self)!r}>'
def __eq__(self, other):
if isinstance(other, self.__class__):
pkey = self.__table__.primary_key.columns.values()[0].name
this = getattr(self, pkey, None)
other = getattr(other, pkey, None)
return this is not None and other is not None and str(this) == str(other)
else:
return NotImplemented
def __hash__(self):
primary = getattr(self, self.__table__.primary_key.columns.values()[0].name)
if primary is None:
return NotImplemented
else:
return hash(primary)
# Many-to-many association table for domain managers
managers = db.Table('manager', Base.metadata,
@ -261,19 +292,6 @@ class Domain(Base):
except dns.exception.DNSException:
return False
def __str__(self):
return str(self.name)
def __eq__(self, other):
if isinstance(other, self.__class__):
return str(self.name) == str(other.name)
else:
return NotImplemented
def __hash__(self):
return hash(str(self.name))
class Alternative(Base):
""" Alternative name for a served domain.
@ -287,9 +305,6 @@ class Alternative(Base):
domain = db.relationship(Domain,
backref=db.backref('alternatives', cascade='all, delete-orphan'))
def __str__(self):
return str(self.name)
class Relay(Base):
""" Relayed mail domain.
@ -302,9 +317,6 @@ class Relay(Base):
# TODO: String(80) is too small?
smtp = db.Column(db.String(80), nullable=True)
def __str__(self):
return str(self.name)
class Email(object):
""" Abstraction for an email address (localpart and domain).
@ -312,11 +324,11 @@ class Email(object):
# TODO: validate max. total length of address (<=254)
# TODO: String(80) is too large (>64)?
# TODO: String(80) is too large (64)?
localpart = db.Column(db.String(80), nullable=False)
@declarative.declared_attr
def domain_name(self):
def domain_name(cls):
""" the domain part of the email address """
return db.Column(IdnaDomain, db.ForeignKey(Domain.name),
nullable=False, default=IdnaDomain)
@ -325,13 +337,33 @@ class Email(object):
# It is however very useful for quick lookups without joining tables,
# especially when the mail server is reading the database.
@declarative.declared_attr
def email(self):
def _email(cls):
""" the complete email address (localpart@domain) """
updater = lambda ctx: '{localpart}@{domain_name}'.format(**ctx.current_parameters)
return db.Column(IdnaEmail,
primary_key=True, nullable=False,
default=updater
)
def updater(ctx):
key = f'{cls.__tablename__}_email'
if key in ctx.current_parameters:
return ctx.current_parameters[key]
return '{localpart}@{domain_name}'.format(**ctx.current_parameters)
return db.Column('email', IdnaEmail, primary_key=True, nullable=False, onupdate=updater)
# We need to keep email, localpart and domain_name in sync.
# But IMHO using email as primary key was not a good idea in the first place.
@hybrid_property
def email(self):
""" getter for email - gets _email """
return self._email
@email.setter
def email(self, value):
""" setter for email - sets _email, localpart and domain_name at once """
self.localpart, self.domain_name = value.rsplit('@', 1)
self._email = value
# hack for email declared attr - when _email is not updated yet
def __str__(self):
return str(f'{self.localpart}@{self.domain_name}')
def sendmail(self, subject, body):
""" send an email to the address """
@ -391,9 +423,6 @@ class Email(object):
return None
def __str__(self):
return str(self.email)
class User(Base, Email):
""" A user is an email address that has a password to access a mailbox.
@ -435,12 +464,10 @@ class User(Base, Email):
is_active = True
is_anonymous = False
# TODO: remove unused user.get_id()
def get_id(self):
""" return users email address """
return self.email
# TODO: remove unused user.destination
@property
def destination(self):
""" returns comma separated string of destinations """
@ -471,17 +498,20 @@ class User(Base, Email):
'CRYPT': 'des_crypt',
}
def _get_password_context(self):
@classmethod
def get_password_context(cls):
""" Create password context for hashing and verification
"""
return passlib.context.CryptContext(
schemes=self.scheme_dict.values(),
default=self.scheme_dict[app.config['PASSWORD_SCHEME']],
schemes=cls.scheme_dict.values(),
default=cls.scheme_dict[app.config['PASSWORD_SCHEME']],
)
def check_password(self, plain):
""" Check password against stored hash
Update hash when default scheme has changed
"""
context = self._get_password_context()
context = self.get_password_context()
hashed = re.match('^({[^}]+})?(.*)$', self.password).group(2)
result = context.verify(plain, hashed)
if result and context.identify(hashed) != context.default_scheme():
@ -490,8 +520,6 @@ class User(Base, Email):
db.session.commit()
return result
# TODO: remove kwarg hash_scheme - there is no point in setting a scheme,
# when the next check updates the password to the default scheme.
def set_password(self, new, hash_scheme=None, raw=False):
""" Set password for user with specified encryption scheme
@new: plain text password to encrypt (or, if raw is True: the hash itself)
@ -500,7 +528,7 @@ class User(Base, Email):
if hash_scheme is None:
hash_scheme = app.config['PASSWORD_SCHEME']
if not raw:
new = self._get_password_context().encrypt(new, self.scheme_dict[hash_scheme])
new = self.get_password_context().encrypt(new, self.scheme_dict[hash_scheme])
self.password = f'{{{hash_scheme}}}{new}'
def get_managed_domains(self):
@ -593,7 +621,7 @@ class Alias(Base, Email):
return None
# TODO: what about API tokens?
class Token(Base):
""" A token is an application password for a given user.
"""
@ -606,20 +634,19 @@ class Token(Base):
user = db.relationship(User,
backref=db.backref('tokens', cascade='all, delete-orphan'))
password = db.Column(db.String(255), nullable=False)
# TODO: String(80) is too large?
# TODO: String(255) is too large? (43 should be sufficient)
ip = db.Column(db.String(255))
def check_password(self, password):
""" verifies password against stored hash """
return passlib.hash.sha256_crypt.verify(password, self.password)
# TODO: use crypt context and default scheme from config?
def set_password(self, password):
""" sets password using sha256_crypt(rounds=1000) """
self.password = passlib.hash.sha256_crypt.using(rounds=1000).hash(password)
def __str__(self):
return str(self.comment or self.ip)
def __repr__(self):
return f'<Token #{self.id}: {self.comment or self.ip or self.password}>'
class Fetch(Base):
@ -644,8 +671,11 @@ class Fetch(Base):
last_check = db.Column(db.DateTime, nullable=True)
error = db.Column(db.String(1023), nullable=True)
def __str__(self):
return f'{self.protocol}{"s" if self.tls else ""}://{self.username}@{self.host}:{self.port}'
def __repr__(self):
return (
f'<Fetch #{self.id}: {self.protocol}{"s" if self.tls else ""}:'
f'//{self.username}@{self.host}:{self.port}>'
)
class MailuConfig:
@ -661,7 +691,7 @@ class MailuConfig:
def __init__(self, model : db.Model):
self.model = model
def __str__(self):
def __repr__(self):
return f'<{self.model.__name__}-Collection>'
@cached_property
@ -837,8 +867,8 @@ class MailuConfig:
if models is None or model in models:
db.session.query(model).delete()
domains = MailuCollection(Domain)
relays = MailuCollection(Relay)
users = MailuCollection(User)
aliases = MailuCollection(Alias)
domain = MailuCollection(Domain)
user = MailuCollection(User)
alias = MailuCollection(Alias)
relay = MailuCollection(Relay)
config = MailuCollection(Config)

@ -1,27 +1,66 @@
""" Mailu marshmallow fields and schema
"""
import re
from copy import deepcopy
from collections import OrderedDict
from textwrap import wrap
import re
import json
import yaml
import sqlalchemy
from marshmallow import pre_load, post_load, post_dump, fields, Schema
from marshmallow.utils import ensure_text_type
from marshmallow.exceptions import ValidationError
from marshmallow_sqlalchemy import SQLAlchemyAutoSchemaOpts
from flask_marshmallow import Marshmallow
from OpenSSL import crypto
try:
from pygments import highlight
from pygments.token import Token
from pygments.lexers import get_lexer_by_name
from pygments.lexers.data import YamlLexer
from pygments.formatters import get_formatter_by_name
except ModuleNotFoundError:
COLOR_SUPPORTED = False
else:
COLOR_SUPPORTED = True
from . import models, dkim
ma = Marshmallow()
# TODO: how and where to mark keys as "required" while unserializing (on commandline, in api)?
# - fields without default => required
# - fields which are the primary key => unchangeable when updating
# TODO: how and where to mark keys as "required" while unserializing in api?
# - when modifying, nothing is required (only the primary key, but this key is in the uri)
# - the primary key from post data must not differ from the key in the uri
# - when creating all fields without default or auto-increment are required
# TODO: what about deleting list items and prung lists?
# - domain.alternatives, user.forward_destination, user.manager_of, aliases.destination
# TODO: validate everything!
### class for hidden values ###
class _Hidden:
def __bool__(self):
return False
def __copy__(self):
return self
def __deepcopy__(self, _):
return self
def __eq__(self, other):
return str(other) == '<hidden>'
def __repr__(self):
return '<hidden>'
__str__ = __repr__
HIDDEN = _Hidden()
### map model to schema ###
@ -41,13 +80,90 @@ def mapped(cls):
return cls
### yaml render module ###
### helper functions ###
def get_fieldspec(exc):
""" walk traceback to extract spec of invalid field from marshmallow """
path = []
tbck = exc.__traceback__
while tbck:
if tbck.tb_frame.f_code.co_name == '_serialize':
if 'attr' in tbck.tb_frame.f_locals:
path.append(tbck.tb_frame.f_locals['attr'])
elif tbck.tb_frame.f_code.co_name == '_init_fields':
path = '.'.join(path)
spec = ', '.join([f'{path}.{key}' for key in tbck.tb_frame.f_locals['invalid_fields']])
return spec
tbck = tbck.tb_next
return None
def colorize(data, lexer='yaml', formatter='terminal', color=None, strip=False):
""" add ANSI color to data """
if color is None:
# autodetect colorize
color = COLOR_SUPPORTED
if not color:
# no color wanted
return data
if not COLOR_SUPPORTED:
# want color, but not supported
raise ValueError('Please install pygments to colorize output')
scheme = {
Token: ('', ''),
Token.Name.Tag: ('cyan', 'brightcyan'),
Token.Literal.Scalar: ('green', 'green'),
Token.Literal.String: ('green', 'green'),
Token.Keyword.Constant: ('magenta', 'brightmagenta'),
Token.Literal.Number: ('magenta', 'brightmagenta'),
Token.Error: ('red', 'brightred'),
Token.Name: ('red', 'brightred'),
Token.Operator: ('red', 'brightred'),
}
class MyYamlLexer(YamlLexer):
""" colorize yaml constants and integers """
def get_tokens(self, text, unfiltered=False):
for typ, value in super().get_tokens(text, unfiltered):
if typ is Token.Literal.Scalar.Plain:
if value in {'true', 'false', 'null'}:
typ = Token.Keyword.Constant
elif value == HIDDEN:
typ = Token.Error
else:
try:
int(value, 10)
except ValueError:
try:
float(value)
except ValueError:
pass
else:
typ = Token.Literal.Number.Float
else:
typ = Token.Literal.Number.Integer
yield typ, value
res = highlight(
data,
MyYamlLexer() if lexer == 'yaml' else get_lexer_by_name(lexer),
get_formatter_by_name(formatter, colorscheme=scheme)
)
return res.rstrip('\n') if strip else res
### render modules ###
# allow yaml module to dump OrderedDict
yaml.add_representer(
OrderedDict,
lambda cls, data: cls.represent_mapping('tag:yaml.org,2002:map', data.items())
)
yaml.add_representer(
_Hidden,
lambda cls, data: cls.represent_data(str(data))
)
class RenderYAML:
""" Marshmallow YAML Render Module
@ -67,19 +183,19 @@ class RenderYAML:
return super().increase_indent(flow, False)
@staticmethod
def _update_items(dict1, dict2):
""" sets missing keys in dict1 to values of dict2
def _augment(kwargs, defaults):
""" add default kv's to kwargs if missing
"""
for key, value in dict2.items():
if key not in dict1:
dict1[key] = value
for key, value in defaults.items():
if key not in kwargs:
kwargs[key] = value
_load_defaults = {}
@classmethod
def loads(cls, *args, **kwargs):
""" load yaml data from string
"""
cls._update_items(kwargs, cls._load_defaults)
cls._augment(kwargs, cls._load_defaults)
return yaml.safe_load(*args, **kwargs)
_dump_defaults = {
@ -90,13 +206,52 @@ class RenderYAML:
}
@classmethod
def dumps(cls, *args, **kwargs):
""" dump yaml data to string
""" dump data to yaml string
"""
cls._update_items(kwargs, cls._dump_defaults)
cls._augment(kwargs, cls._dump_defaults)
return yaml.dump(*args, **kwargs)
class JSONEncoder(json.JSONEncoder):
""" JSONEncoder supporting serialization of HIDDEN """
def default(self, o):
""" serialize HIDDEN """
if isinstance(o, _Hidden):
return str(o)
return json.JSONEncoder.default(self, o)
### field definitions ###
class RenderJSON:
""" Marshmallow JSON Render Module
"""
@staticmethod
def _augment(kwargs, defaults):
""" add default kv's to kwargs if missing
"""
for key, value in defaults.items():
if key not in kwargs:
kwargs[key] = value
_load_defaults = {}
@classmethod
def loads(cls, *args, **kwargs):
""" load json data from string
"""
cls._augment(kwargs, cls._load_defaults)
return json.loads(*args, **kwargs)
_dump_defaults = {
'separators': (',',':'),
'cls': JSONEncoder,
}
@classmethod
def dumps(cls, *args, **kwargs):
""" dump data to json string
"""
cls._augment(kwargs, cls._dump_defaults)
return json.dumps(*args, **kwargs)
### custom fields ###
class LazyStringField(fields.String):
""" Field that serializes a "false" value to the empty string
@ -107,9 +262,8 @@ class LazyStringField(fields.String):
"""
return value if value else ''
class CommaSeparatedListField(fields.Raw):
""" Field that deserializes a string containing comma-separated values to
""" Deserialize a string containing comma-separated values to
a list of strings
"""
@ -129,10 +283,15 @@ class CommaSeparatedListField(fields.Raw):
class DkimKeyField(fields.String):
""" Field that serializes a dkim key to a list of strings (lines) and
deserializes a string or list of strings.
""" Serialize a dkim key to a list of strings (lines) and
Deserialize a string or list of strings to a valid dkim key
"""
default_error_messages = {
"invalid": "Not a valid string or list.",
"invalid_utf8": "Not a valid utf-8 string or list.",
}
_clean_re = re.compile(
r'(^-----BEGIN (RSA )?PRIVATE KEY-----|-----END (RSA )?PRIVATE KEY-----$|\s+)',
flags=re.UNICODE
@ -156,11 +315,19 @@ class DkimKeyField(fields.String):
# convert list to str
if isinstance(value, list):
value = ''.join(value)
try:
value = ''.join([ensure_text_type(item) for item in value])
except UnicodeDecodeError as exc:
raise self.make_error("invalid_utf8") from exc
# only strings are allowed
if not isinstance(value, str):
raise ValidationError(f'invalid type {type(value).__name__!r}')
# only text is allowed
else:
if not isinstance(value, (str, bytes)):
raise self.make_error("invalid")
try:
value = ensure_text_type(value)
except UnicodeDecodeError as exc:
raise self.make_error("invalid_utf8") from exc
# clean value (remove whitespace and header/footer)
value = self._clean_re.sub('', value.strip())
@ -189,28 +356,53 @@ class DkimKeyField(fields.String):
else:
return value
### base definitions ###
def handle_email(data):
""" merge separate localpart and domain to email
class PasswordField(fields.Str):
""" Serialize a hashed password hash by stripping the obsolete {SCHEME}
Deserialize a plain password or hashed password into a hashed password
"""
localpart = 'localpart' in data
domain = 'domain' in data
_hashes = {'PBKDF2', 'BLF-CRYPT', 'SHA512-CRYPT', 'SHA256-CRYPT', 'MD5-CRYPT', 'CRYPT'}
if 'email' in data:
if localpart or domain:
raise ValidationError('duplicate email and localpart/domain')
data['localpart'], data['domain_name'] = data['email'].rsplit('@', 1)
elif localpart and domain:
data['domain_name'] = data['domain']
del data['domain']
data['email'] = f'{data["localpart"]}@{data["domain_name"]}'
elif localpart or domain:
raise ValidationError('incomplete localpart/domain')
def _serialize(self, value, attr, obj, **kwargs):
""" strip obsolete {password-hash} when serializing """
# strip scheme spec if in database - it's obsolete
if value.startswith('{') and (end := value.find('}', 1)) >= 0:
if value[1:end] in self._hashes:
return value[end+1:]
return value
return data
def _deserialize(self, value, attr, data, **kwargs):
""" hashes plain password or checks hashed password
also strips obsolete {password-hash} when deserializing
"""
# when hashing is requested: use model instance to hash plain password
if data.get('hash_password'):
# hash password using model instance
inst = self.metadata['model']()
inst.set_password(value)
value = inst.password
del inst
# strip scheme spec when specified - it's obsolete
if value.startswith('{') and (end := value.find('}', 1)) >= 0:
if value[1:end] in self._hashes:
value = value[end+1:]
# check if algorithm is supported
inst = self.metadata['model'](password=value)
try:
# just check against empty string to see if hash is valid
inst.check_password('')
except ValueError as exc:
# ValueError: hash could not be identified
raise ValidationError(f'invalid password hash {value!r}') from exc
del inst
return value
### base schema ###
class BaseOpts(SQLAlchemyAutoSchemaOpts):
""" Option class with sqla session
@ -220,6 +412,8 @@ class BaseOpts(SQLAlchemyAutoSchemaOpts):
meta.sqla_session = models.db.session
if not hasattr(meta, 'ordered'):
meta.ordered = True
if not hasattr(meta, 'sibling'):
meta.sibling = False
super(BaseOpts, self).__init__(meta, ordered=ordered)
class BaseSchema(ma.SQLAlchemyAutoSchema):
@ -231,10 +425,15 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
class Meta:
""" Schema config """
include_by_context = {}
exclude_by_value = {}
hide_by_context = {}
order = []
sibling = False
def __init__(self, *args, **kwargs):
# context?
# get context
context = kwargs.get('context', {})
flags = {key for key, value in context.items() if value is True}
@ -261,7 +460,7 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
# exclude default values
if not context.get('full'):
for column in getattr(self.opts, 'model').__table__.columns:
for column in self.opts.model.__table__.columns:
if column.name not in exclude:
self._exclude_by_value.setdefault(column.name, []).append(
None if column.default is None else column.default.arg
@ -274,45 +473,239 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
if not flags & set(need):
self._hide_by_context |= set(what)
# remember primary keys
self._primary = self.opts.model.__table__.primary_key.columns.values()[0].name
# initialize attribute order
if hasattr(self.Meta, 'order'):
# use user-defined order
self._order = list(reversed(getattr(self.Meta, 'order')))
self._order = list(reversed(self.Meta.order))
else:
# default order is: primary_key + other keys alphabetically
self._order = list(sorted(self.fields.keys()))
primary = self.opts.model.__table__.primary_key.columns.values()[0].name
if primary in self._order:
self._order.remove(primary)
if self._primary in self._order:
self._order.remove(self._primary)
self._order.reverse()
self._order.append(primary)
self._order.append(self._primary)
# move pre_load hook "_track_import" to the front
hooks = self._hooks[('pre_load', False)]
if '_track_import' in hooks:
hooks.remove('_track_import')
hooks.insert(0, '_track_import')
# and post_load hook "_fooo" to the end
hooks.remove('_track_import')
hooks.insert(0, '_track_import')
# move pre_load hook "_add_instance" to the end
hooks.remove('_add_required')
hooks.append('_add_required')
# move post_load hook "_add_instance" to the end
hooks = self._hooks[('post_load', False)]
if '_add_instance' in hooks:
hooks.remove('_add_instance')
hooks.append('_add_instance')
hooks.remove('_add_instance')
hooks.append('_add_instance')
def hide(self, data):
""" helper method to hide input data for logging """
# always returns a copy of data
return {
key: HIDDEN if key in self._hide_by_context else deepcopy(value)
for key, value in data.items()
}
def _call_and_store(self, *args, **kwargs):
""" track curent parent field for pruning """
self.context['parent_field'] = kwargs['field_name']
return super()._call_and_store(*args, **kwargs)
# this is only needed to work around the declared attr "email" primary key in model
def get_instance(self, data):
""" lookup item by defined primary key instead of key(s) from model """
if self.transient:
return None
if keys := getattr(self.Meta, 'primary_keys', None):
filters = {key: data.get(key) for key in keys}
if None not in filters.values():
return self.session.query(self.opts.model).filter_by(**filters).first()
return super().get_instance(data)
@pre_load(pass_many=True)
def _patch_input(self, items, many, **kwargs): # pylint: disable=unused-argument
""" - flush sqla session before serializing a section when requested
(make sure all objects that could be referred to later are created)
- when in update mode: patch input data before deserialization
- handle "prune" and "delete" items
- replace values in keys starting with '-' with default
"""
# flush sqla session
if not self.Meta.sibling:
self.opts.sqla_session.flush()
# stop early when not updating
if not self.context.get('update'):
return items
# patch "delete", "prune" and "default"
want_prune = []
def patch(count, data, prune):
# don't allow __delete__ coming from input
if '__delete__' in data:
raise ValidationError('Unknown field.', f'{count}.__delete__')
# handle "prune list" and "delete item" (-pkey: none and -pkey: id)
for key in data:
if key.startswith('-'):
if key[1:] == self._primary:
# delete or prune
if data[key] is None:
# prune
prune.append(True)
return None
# mark item for deletion
return {key[1:]: data[key], '__delete__': True}
# handle "set to default value" (-key: none)
def set_default(key, value):
if not key.startswith('-'):
return (key, value)
key = key[1:]
if not key in self.opts.model.__table__.columns:
return (key, None)
if value is not None:
raise ValidationError(
'When resetting to default value must be null.',
f'{count}.{key}'
)
value = self.opts.model.__table__.columns[key].default
if value is None:
raise ValidationError(
'Field has no default value.',
f'{count}.{key}'
)
return (key, value.arg)
return dict([set_default(key, value) for key, value in data.items()])
# convert items to "delete" and filter "prune" item
items = [
item for item in [
patch(count, item, want_prune) for count, item in enumerate(items)
] if item
]
# prune: determine if existing items in db need to be added or marked for deletion
add_items = False
del_items = False
if self.Meta.sibling:
# parent prunes automatically
if not want_prune:
# no prune requested => add old items
add_items = True
else:
# parent does not prune automatically
if want_prune:
# prune requested => mark old items for deletion
del_items = True
if add_items or del_items:
existing = {item[self._primary] for item in items if self._primary in item}
for item in getattr(self.context['parent'], self.context['parent_field']):
key = getattr(item, self._primary)
if key not in existing:
if add_items:
items.append({self._primary: key})
else:
items.append({self._primary: key, '__delete__': True})
return items
@pre_load
def _track_import(self, data, many, **kwargs): # pylint: disable=unused-argument
# TODO: also handle reset, prune and delete in pre_load / post_load hooks!
# print('!!!', repr(data))
""" call callback function to track import
"""
# callback
if callback := self.context.get('callback'):
callback(self, data)
return data
@post_load
def _add_instance(self, item, many, **kwargs): # pylint: disable=unused-argument
self.opts.sqla_session.add(item)
@pre_load
def _add_required(self, data, many, **kwargs): # pylint: disable=unused-argument
""" when updating:
allow modification of existing items having required attributes
by loading existing value from db
"""
if not self.opts.load_instance or not self.context.get('update'):
return data
# stabilize import of auto-increment primary keys (not required),
# by matching import data to existing items and setting primary key
if not self._primary in data:
for item in getattr(self.context['parent'], self.context['parent_field']):
existing = self.dump(item, many=False)
this = existing.pop(self._primary)
if data == existing:
instance = item
data[self._primary] = this
break
# try to load instance
instance = self.instance or self.get_instance(data)
if instance is None:
if '__delete__' in data:
# deletion of non-existent item requested
raise ValidationError(
f'item not found: {data[self._primary]!r}',
field_name=f'?.{self._primary}',
)
else:
if self.context.get('update'):
# remember instance as parent for pruning siblings
if not self.Meta.sibling:
self.context['parent'] = instance
# delete instance when marked
if '__delete__' in data:
self.opts.sqla_session.delete(instance)
# add attributes required for validation from db
# TODO: this will cause validation errors if value from database does not validate
for attr_name, field_obj in self.load_fields.items():
if field_obj.required and attr_name not in data:
data[attr_name] = getattr(instance, attr_name)
return data
@post_load(pass_original=True)
def _add_instance(self, item, original, many, **kwargs): # pylint: disable=unused-argument
""" add new instances to sqla session """
if item in self.opts.sqla_session:
# item was modified
if 'hash_password' in original:
# stabilize import of passwords to be hashed,
# by not re-hashing an unchanged password
if attr := getattr(sqlalchemy.inspect(item).attrs, 'password', None):
if attr.history.has_changes() and attr.history.deleted:
try:
# reset password hash, if password was not changed
inst = type(item)(password=attr.history.deleted[-1])
if inst.check_password(original['password']):
item.password = inst.password
except ValueError:
# hash in db is invalid
pass
else:
del inst
else:
# new item
self.opts.sqla_session.add(item)
return item
@post_dump
def _hide_and_order(self, data, many, **kwargs): # pylint: disable=unused-argument
""" hide secrets and order output """
# order output
for key in self._order:
@ -325,15 +718,18 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
if not self._exclude_by_value and not self._hide_by_context:
return data
# exclude items or hide values
# exclude or hide values
full = self.context.get('full')
return type(data)([
(key, '<hidden>' if key in self._hide_by_context else value)
(key, HIDDEN if key in self._hide_by_context else value)
for key, value in data.items()
if full or key not in self._exclude_by_value or value not in self._exclude_by_value[key]
])
# TODO: remove LazyStringField and change model (IMHO comment should not be nullable)
# this field is used to mark items for deletion
mark_delete = fields.Boolean(data_key='__delete__', load_only=True)
# TODO: remove LazyStringField (when model was changed - IMHO comment should not be nullable)
comment = LazyStringField()
@ -381,6 +777,11 @@ class TokenSchema(BaseSchema):
model = models.Token
load_instance = True
sibling = True
password = PasswordField(required=True, metadata={'model': models.User})
hash_password = fields.Boolean(load_only=True, missing=False)
@mapped
class FetchSchema(BaseSchema):
@ -389,6 +790,8 @@ class FetchSchema(BaseSchema):
""" Schema config """
model = models.Fetch
load_instance = True
sibling = True
include_by_context = {
('full', 'import'): {'last_check', 'error'},
}
@ -405,52 +808,25 @@ class UserSchema(BaseSchema):
model = models.User
load_instance = True
include_relationships = True
exclude = ['domain', 'quota_bytes_used']
exclude = ['_email', 'domain', 'localpart', 'domain_name', 'quota_bytes_used']
primary_keys = ['email']
exclude_by_value = {
'forward_destination': [[]],
'tokens': [[]],
'fetches': [[]],
'manager_of': [[]],
'reply_enddate': ['2999-12-31'],
'reply_startdate': ['1900-01-01'],
'tokens': [[]],
'fetches': [[]],
'manager_of': [[]],
'reply_enddate': ['2999-12-31'],
'reply_startdate': ['1900-01-01'],
}
@pre_load
def _handle_email_and_password(self, data, many, **kwargs): # pylint: disable=unused-argument
data = handle_email(data)
if 'password' in data:
if 'password_hash' in data or 'hash_scheme' in data:
raise ValidationError('ambigous key password and password_hash/hash_scheme')
# check (hashed) password
password = data['password']
if password.startswith('{') and '}' in password:
scheme = password[1:password.index('}')]
if scheme not in self.Meta.model.scheme_dict:
raise ValidationError(f'invalid password scheme {scheme!r}')
else:
raise ValidationError(f'invalid hashed password {password!r}')
elif 'password_hash' in data and 'hash_scheme' in data:
if data['hash_scheme'] not in self.Meta.model.scheme_dict:
raise ValidationError(f'invalid password scheme {data["hash_scheme"]!r}')
data['password'] = f'{{{data["hash_scheme"]}}}{data["password_hash"]}'
del data['hash_scheme']
del data['password_hash']
return data
# TODO: verify password (should this be done in model?)
# scheme, hashed = re.match('^(?:{([^}]+)})?(.*)$', self.password).groups()
# if not scheme...
# ctx = passlib.context.CryptContext(schemes=[scheme], default=scheme)
# try:
# ctx.verify('', hashed)
# =>? ValueError: hash could not be identified
localpart = fields.Str(load_only=True)
domain_name = fields.Str(load_only=True)
email = fields.String(required=True)
tokens = fields.Nested(TokenSchema, many=True)
fetches = fields.Nested(FetchSchema, many=True)
password = PasswordField(required=True, metadata={'model': models.User})
hash_password = fields.Boolean(load_only=True, missing=False)
@mapped
class AliasSchema(BaseSchema):
@ -459,18 +835,14 @@ class AliasSchema(BaseSchema):
""" Schema config """
model = models.Alias
load_instance = True
exclude = ['domain']
exclude = ['_email', 'domain', 'localpart', 'domain_name']
primary_keys = ['email']
exclude_by_value = {
'destination': [[]],
}
@pre_load
def _handle_email(self, data, many, **kwargs): # pylint: disable=unused-argument
return handle_email(data)
localpart = fields.Str(load_only=True)
domain_name = fields.Str(load_only=True)
email = fields.String(required=True)
destination = CommaSeparatedListField()
@ -499,7 +871,7 @@ class MailuSchema(Schema):
render_module = RenderYAML
ordered = True
order = ['config', 'domains', 'users', 'aliases', 'relays']
order = ['domain', 'user', 'alias', 'relay'] # 'config'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@ -511,6 +883,14 @@ class MailuSchema(Schema):
except KeyError:
pass
def _call_and_store(self, *args, **kwargs):
""" track current parent and field for pruning """
self.context.update({
'parent': self.context.get('config'),
'parent_field': kwargs['field_name'],
})
return super()._call_and_store(*args, **kwargs)
@pre_load
def _clear_config(self, data, many, **kwargs): # pylint: disable=unused-argument
""" create config object in context if missing
@ -534,8 +914,8 @@ class MailuSchema(Schema):
return config
config = fields.Nested(ConfigSchema, many=True)
domains = fields.Nested(DomainSchema, many=True)
users = fields.Nested(UserSchema, many=True)
aliases = fields.Nested(AliasSchema, many=True)
relays = fields.Nested(RelaySchema, many=True)
domain = fields.Nested(DomainSchema, many=True)
user = fields.Nested(UserSchema, many=True)
alias = fields.Nested(AliasSchema, many=True)
relay = fields.Nested(RelaySchema, many=True)
# config = fields.Nested(ConfigSchema, many=True)

@ -10,8 +10,9 @@ Managing users and aliases can be done from CLI using commands:
* user
* user-import
* user-delete
* config-dump
* config-update
* config-export
* config-import
alias
-----
@ -69,104 +70,160 @@ user-delete
docker-compose exec admin flask mailu user-delete foo@example.net
config-dump
-----------
The purpose of this command is to dump domain-, relay-, alias- and user-configuration to a YAML template.
.. code-block:: bash
# docker-compose exec admin flask mailu config-dump --help
Usage: flask mailu config-dump [OPTIONS] [SECTIONS]...
dump configuration as YAML-formatted data to stdout
SECTIONS can be: domains, relays, users, aliases
Options:
-f, --full Include default attributes
-s, --secrets Include secrets (dkim-key, plain-text / not hashed)
-d, --dns Include dns records
--help Show this message and exit.
If you want to export secrets (dkim-keys, plain-text / not hashed) you have to add the ``--secrets`` option.
Only non-default attributes are dumped. If you want to dump all attributes use ``--full``.
To include dns records (mx, spf, dkim and dmarc) add the ``--dns`` option.
Unless you specify some sections all sections are dumped by default.
.. code-block:: bash
docker-compose exec admin flask mailu config-dump > mail-config.yml
docker-compose exec admin flask mailu config-dump --dns domains
config-update
-------------
The purpose of this command is for importing domain-, relay-, alias- and user-configuration in bulk and synchronizing DB entries with an external YAML template.
The sole purpose of this command is for importing users/aliases in bulk and synchronizing DB entries with external YAML template:
.. code-block:: bash
# docker-compose exec admin flask mailu config-update --help
cat mail-config.yml | docker-compose exec -T admin flask mailu config-update --delete-objects
Usage: flask mailu config-update [OPTIONS]
where mail-config.yml looks like:
sync configuration with data from YAML-formatted stdin
.. code-block:: bash
Options:
-v, --verbose Increase verbosity
-d, --delete-objects Remove objects not included in yaml
-n, --dry-run Perform a trial run with no changes made
--help Show this message and exit.
users:
- localpart: foo
domain: example.com
password_hash: klkjhumnzxcjkajahsdqweqqwr
hash_scheme: MD5-CRYPT
aliases:
- localpart: alias1
domain: example.com
destination: "user1@example.com,user2@example.com"
without ``--delete-object`` option config-update will only add/update new values but will *not* remove any entries missing in provided YAML input.
Users
-----
following are additional parameters that could be defined for users:
* comment
* quota_bytes
* global_admin
* enable_imap
* enable_pop
* forward_enabled
* forward_destination
* reply_enabled
* reply_subject
* reply_body
* displayed_name
* spam_enabled
* spam_threshold
Alias
-----
additional fields:
* wildcard
config-export
-------------
The purpose of this command is to export domain-, relay-, alias- and user-configuration in YAML or JSON format.
.. code-block:: bash
# docker-compose exec admin flask mailu config-export --help
Usage: flask mailu config-export [OPTIONS] [FILTER]...
Export configuration as YAML or JSON to stdout or file
Options:
-f, --full Include attributes with default value.
-s, --secrets Include secret attributes (dkim-key, passwords).
-c, --color Force colorized output.
-d, --dns Include dns records.
-o, --output-file FILENAME Save configuration to file.
-j, --json Export configuration in json format.
-?, -h, --help Show this message and exit.
Only non-default attributes are exported. If you want to export all attributes use ``--full``.
If you want to export plain-text secrets (dkim-keys, passwords) you have to add the ``--secrets`` option.
To include dns records (mx, spf, dkim and dmarc) add the ``--dns`` option.
By default all configuration objects are exported (domain, user, alias, relay). You can specify
filters to export only some objects or attributes (try: ``user`` or ``domain.name``).
.. code-block:: bash
docker-compose exec admin flask mailu config-export -o mail-config.yml
docker-compose exec admin flask mailu config-export --dns domain.dns_mx domain.dns_spf
config-import
-------------
The purpose of this command is for importing domain-, relay-, alias- and user-configuration in bulk and synchronizing DB entries with an external YAML/JOSN source.
.. code-block:: bash
# docker-compose exec admin flask mailu config-import --help
Usage: flask mailu config-import [OPTIONS] [FILENAME|-]
Import configuration as YAML or JSON from stdin or file
Options:
-v, --verbose Increase verbosity.
-s, --secrets Show secret attributes in messages.
-q, --quiet Quiet mode - only show errors.
-c, --color Force colorized output.
-u, --update Update mode - merge input with existing config.
-n, --dry-run Perform a trial run with no changes made.
-?, -h, --help Show this message and exit.
The current version of docker-compose exec does not pass stdin correctly, so you have to user docker exec instead:
.. code-block:: bash
docker exec -i $(docker-compose ps -q admin) flask mailu config-update -nvd < mail-config.yml
docker exec -i $(docker-compose ps -q admin) flask mailu config-import -nv < mail-config.yml
mail-config.yml looks like this:
mail-config.yml contains the configuration and looks like this:
.. code-block:: yaml
domains:
domain:
- name: example.com
alternatives:
- alternative.example.com
users:
user:
- email: foo@example.com
password_hash: klkjhumnzxcjkajahsdqweqqwr
password_hash: '$2b$12$...'
hash_scheme: MD5-CRYPT
aliases:
alias:
- email: alias1@example.com
destination: "user1@example.com,user2@example.com"
destination:
- user1@example.com
- user2@example.com
relays:
relay:
- name: relay.example.com
comment: test
smtp: mx.example.com
You can use ``--dry-run`` to test your YAML without comitting any changes to the database.
With ``--verbose`` config-update will show exactly what it changes in the database.
Without ``--delete-object`` option config-update will only add/update changed values but will *not* remove any entries missing in provided YAML input.
config-update shows the number of created/modified/deleted objects after import.
To suppress all messages except error messages use ``--quiet``.
By adding the ``--verbose`` switch (one or more times) the import gets more detailed and shows exactyl what attributes changed.
In all messages plain-text secrets (dkim-keys, passwords) are hidden by default. Use ``--secrets`` to show secrets.
If you want to test what would be done when importing use ``--dry-run``.
By default config-update replaces the whole configuration. You can use ``--update`` to change the existing configuration instead.
When updating you can add new and change existing objects.
To delete an object use ``-key: value`` (To delete the domain example.com ``-name: example.com`` for example).
To reset an attribute to default use ``-key: null`` (To reset enable_imap ``-enable_imap: null`` for example).
This is a complete YAML template with all additional parameters that could be defined:
This is a complete YAML template with all additional parameters that can be defined:
.. code-block:: yaml
aliases:
- email: email@example.com
comment: ''
destination:
- address@example.com
wildcard: false
domains:
domain:
- name: example.com
alternatives:
- alternative.tld
@ -176,13 +233,8 @@ This is a complete YAML template with all additional parameters that could be de
max_quota_bytes: 0
max_users: -1
signup_enabled: false
relays:
- name: relay.example.com
comment: ''
smtp: mx.example.com
users:
user:
- email: postmaster@example.com
comment: ''
displayed_name: 'Postmaster'
@ -192,13 +244,16 @@ This is a complete YAML template with all additional parameters that could be de
fetches:
- id: 1
comment: 'test fetch'
username: fetch-user
error: null
host: other.example.com
keep: true
last_check: '2020-12-29T17:09:48.200179'
password: 'secret'
hash_password: true
port: 993
protocol: imap
tls: true
keep: true
username: fetch-user
forward_destination:
- address@remote.example.com
forward_enabled: true
@ -206,12 +261,13 @@ This is a complete YAML template with all additional parameters that could be de
global_admin: true
manager_of:
- example.com
password: '{BLF-CRYPT}$2b$12$...'
password: '$2b$12$...'
hash_password: true
quota_bytes: 1000000000
reply_body: ''
reply_enabled: false
reply_enddate: 2999-12-31
reply_startdate: 1900-01-01
reply_enddate: '2999-12-31'
reply_startdate: '1900-01-01'
reply_subject: ''
spam_enabled: true
spam_threshold: 80
@ -219,5 +275,16 @@ This is a complete YAML template with all additional parameters that could be de
- id: 1
comment: email-client
ip: 192.168.1.1
password: '$5$rounds=1000$...'
password: '$5$rounds=1$...'
aliases:
- email: email@example.com
comment: ''
destination:
- address@example.com
wildcard: false
relay:
- name: relay.example.com
comment: ''
smtp: mx.example.com

@ -1,4 +1,4 @@
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
users:
- localpart: forwardinguser
password_hash: "\$1\$F2OStvi1\$Q8hBIHkdJpJkJn/TrMIZ9/"
@ -10,7 +10,7 @@ EOF
python3 tests/forward_test.py
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
users:
- localpart: forwardinguser
password_hash: "\$1\$F2OStvi1\$Q8hBIHkdJpJkJn/TrMIZ9/"

@ -1,4 +1,4 @@
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
aliases:
- localpart: alltheusers
domain: mailu.io
@ -7,6 +7,6 @@ EOF
python3 tests/alias_test.py
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
aliases: []
EOF

@ -1,4 +1,4 @@
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
users:
- localpart: replyuser
password_hash: "\$1\$F2OStvi1\$Q8hBIHkdJpJkJn/TrMIZ9/"
@ -11,7 +11,7 @@ EOF
python3 tests/reply_test.py
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
users:
- localpart: replyuser
password_hash: "\$1\$F2OStvi1\$Q8hBIHkdJpJkJn/TrMIZ9/"

@ -1 +1 @@
Added cli command config-dump and enhanced config-update
Add cli commands config-import and config-export

Loading…
Cancel
Save