@ -1,27 +1,66 @@
""" Mailu marshmallow fields and schema
""" Mailu marshmallow fields and schema
"""
"""
import re
from copy import deepcopy
from collections import OrderedDict
from collections import OrderedDict
from textwrap import wrap
from textwrap import wrap
import re
import json
import yaml
import yaml
import sqlalchemy
from marshmallow import pre_load , post_load , post_dump , fields , Schema
from marshmallow import pre_load , post_load , post_dump , fields , Schema
from marshmallow . utils import ensure_text_type
from marshmallow . exceptions import ValidationError
from marshmallow . exceptions import ValidationError
from marshmallow_sqlalchemy import SQLAlchemyAutoSchemaOpts
from marshmallow_sqlalchemy import SQLAlchemyAutoSchemaOpts
from flask_marshmallow import Marshmallow
from flask_marshmallow import Marshmallow
from OpenSSL import crypto
from OpenSSL import crypto
try :
from pygments import highlight
from pygments . token import Token
from pygments . lexers import get_lexer_by_name
from pygments . lexers . data import YamlLexer
from pygments . formatters import get_formatter_by_name
except ModuleNotFoundError :
COLOR_SUPPORTED = False
else :
COLOR_SUPPORTED = True
from . import models , dkim
from . import models , dkim
ma = Marshmallow ( )
ma = Marshmallow ( )
# TODO: how and where to mark keys as "required" while unserializing (on commandline, in api)?
# TODO: how and where to mark keys as "required" while unserializing in api?
# - fields without default => required
# - when modifying, nothing is required (only the primary key, but this key is in the uri)
# - fields which are the primary key => unchangeable when updating
# - the primary key from post data must not differ from the key in the uri
# - when creating all fields without default or auto-increment are required
# TODO: what about deleting list items and prung lists?
# - domain.alternatives, user.forward_destination, user.manager_of, aliases.destination
# TODO: validate everything!
### class for hidden values ###
class _Hidden :
def __bool__ ( self ) :
return False
def __copy__ ( self ) :
return self
def __deepcopy__ ( self , _ ) :
return self
def __eq__ ( self , other ) :
return str ( other ) == ' <hidden> '
def __repr__ ( self ) :
return ' <hidden> '
__str__ = __repr__
HIDDEN = _Hidden ( )
### map model to schema ###
### map model to schema ###
@ -41,13 +80,90 @@ def mapped(cls):
return cls
return cls
### yaml render module ###
### helper functions ###
def get_fieldspec ( exc ) :
""" walk traceback to extract spec of invalid field from marshmallow """
path = [ ]
tbck = exc . __traceback__
while tbck :
if tbck . tb_frame . f_code . co_name == ' _serialize ' :
if ' attr ' in tbck . tb_frame . f_locals :
path . append ( tbck . tb_frame . f_locals [ ' attr ' ] )
elif tbck . tb_frame . f_code . co_name == ' _init_fields ' :
path = ' . ' . join ( path )
spec = ' , ' . join ( [ f ' { path } . { key } ' for key in tbck . tb_frame . f_locals [ ' invalid_fields ' ] ] )
return spec
tbck = tbck . tb_next
return None
def colorize ( data , lexer = ' yaml ' , formatter = ' terminal ' , color = None , strip = False ) :
""" add ANSI color to data """
if color is None :
# autodetect colorize
color = COLOR_SUPPORTED
if not color :
# no color wanted
return data
if not COLOR_SUPPORTED :
# want color, but not supported
raise ValueError ( ' Please install pygments to colorize output ' )
scheme = {
Token : ( ' ' , ' ' ) ,
Token . Name . Tag : ( ' cyan ' , ' brightcyan ' ) ,
Token . Literal . Scalar : ( ' green ' , ' green ' ) ,
Token . Literal . String : ( ' green ' , ' green ' ) ,
Token . Keyword . Constant : ( ' magenta ' , ' brightmagenta ' ) ,
Token . Literal . Number : ( ' magenta ' , ' brightmagenta ' ) ,
Token . Error : ( ' red ' , ' brightred ' ) ,
Token . Name : ( ' red ' , ' brightred ' ) ,
Token . Operator : ( ' red ' , ' brightred ' ) ,
}
class MyYamlLexer ( YamlLexer ) :
""" colorize yaml constants and integers """
def get_tokens ( self , text , unfiltered = False ) :
for typ , value in super ( ) . get_tokens ( text , unfiltered ) :
if typ is Token . Literal . Scalar . Plain :
if value in { ' true ' , ' false ' , ' null ' } :
typ = Token . Keyword . Constant
elif value == HIDDEN :
typ = Token . Error
else :
try :
int ( value , 10 )
except ValueError :
try :
float ( value )
except ValueError :
pass
else :
typ = Token . Literal . Number . Float
else :
typ = Token . Literal . Number . Integer
yield typ , value
res = highlight (
data ,
MyYamlLexer ( ) if lexer == ' yaml ' else get_lexer_by_name ( lexer ) ,
get_formatter_by_name ( formatter , colorscheme = scheme )
)
return res . rstrip ( ' \n ' ) if strip else res
### render modules ###
# allow yaml module to dump OrderedDict
# allow yaml module to dump OrderedDict
yaml . add_representer (
yaml . add_representer (
OrderedDict ,
OrderedDict ,
lambda cls , data : cls . represent_mapping ( ' tag:yaml.org,2002:map ' , data . items ( ) )
lambda cls , data : cls . represent_mapping ( ' tag:yaml.org,2002:map ' , data . items ( ) )
)
)
yaml . add_representer (
_Hidden ,
lambda cls , data : cls . represent_data ( str ( data ) )
)
class RenderYAML :
class RenderYAML :
""" Marshmallow YAML Render Module
""" Marshmallow YAML Render Module
@ -67,19 +183,19 @@ class RenderYAML:
return super ( ) . increase_indent ( flow , False )
return super ( ) . increase_indent ( flow , False )
@staticmethod
@staticmethod
def _ update_items( dict1 , dict2 ) :
def _ augment( kwargs , defaults ) :
""" sets missing keys in dict1 to values of dict2
""" add default kv' s to kwargs if missing
"""
"""
for key , value in d ict2 . items ( ) :
for key , value in d efaults . items ( ) :
if key not in dict1 :
if key not in kwargs :
dict1 [ key ] = value
kwargs [ key ] = value
_load_defaults = { }
_load_defaults = { }
@classmethod
@classmethod
def loads ( cls , * args , * * kwargs ) :
def loads ( cls , * args , * * kwargs ) :
""" load yaml data from string
""" load yaml data from string
"""
"""
cls . _ update_items ( kwargs , cls . _load_defaults )
cls . _ augment ( kwargs , cls . _load_defaults )
return yaml . safe_load ( * args , * * kwargs )
return yaml . safe_load ( * args , * * kwargs )
_dump_defaults = {
_dump_defaults = {
@ -90,13 +206,52 @@ class RenderYAML:
}
}
@classmethod
@classmethod
def dumps ( cls , * args , * * kwargs ) :
def dumps ( cls , * args , * * kwargs ) :
""" dump yaml data to string
""" dump data to yaml string
"""
"""
cls . _ update_items ( kwargs , cls . _dump_defaults )
cls . _ augment ( kwargs , cls . _dump_defaults )
return yaml . dump ( * args , * * kwargs )
return yaml . dump ( * args , * * kwargs )
class JSONEncoder ( json . JSONEncoder ) :
""" JSONEncoder supporting serialization of HIDDEN """
def default ( self , o ) :
""" serialize HIDDEN """
if isinstance ( o , _Hidden ) :
return str ( o )
return json . JSONEncoder . default ( self , o )
### field definitions ###
class RenderJSON :
""" Marshmallow JSON Render Module
"""
@staticmethod
def _augment ( kwargs , defaults ) :
""" add default kv ' s to kwargs if missing
"""
for key , value in defaults . items ( ) :
if key not in kwargs :
kwargs [ key ] = value
_load_defaults = { }
@classmethod
def loads ( cls , * args , * * kwargs ) :
""" load json data from string
"""
cls . _augment ( kwargs , cls . _load_defaults )
return json . loads ( * args , * * kwargs )
_dump_defaults = {
' separators ' : ( ' , ' , ' : ' ) ,
' cls ' : JSONEncoder ,
}
@classmethod
def dumps ( cls , * args , * * kwargs ) :
""" dump data to json string
"""
cls . _augment ( kwargs , cls . _dump_defaults )
return json . dumps ( * args , * * kwargs )
### custom fields ###
class LazyStringField ( fields . String ) :
class LazyStringField ( fields . String ) :
""" Field that serializes a " false " value to the empty string
""" Field that serializes a " false " value to the empty string
@ -107,9 +262,8 @@ class LazyStringField(fields.String):
"""
"""
return value if value else ' '
return value if value else ' '
class CommaSeparatedListField ( fields . Raw ) :
class CommaSeparatedListField ( fields . Raw ) :
""" Field that deserializes a string containing comma-separated values to
""" Deserialize a string containing comma-separated values to
a list of strings
a list of strings
"""
"""
@ -129,10 +283,15 @@ class CommaSeparatedListField(fields.Raw):
class DkimKeyField ( fields . String ) :
class DkimKeyField ( fields . String ) :
""" Field that serializes a dkim key to a list of strings (lines) and
""" Serialize a dkim key to a list of strings (lines) and
deserializes a string or list of strings .
Deserialize a string or list of strings to a valid dkim key
"""
"""
default_error_messages = {
" invalid " : " Not a valid string or list. " ,
" invalid_utf8 " : " Not a valid utf-8 string or list. " ,
}
_clean_re = re . compile (
_clean_re = re . compile (
r ' (^-----BEGIN (RSA )?PRIVATE KEY-----|-----END (RSA )?PRIVATE KEY-----$| \ s+) ' ,
r ' (^-----BEGIN (RSA )?PRIVATE KEY-----|-----END (RSA )?PRIVATE KEY-----$| \ s+) ' ,
flags = re . UNICODE
flags = re . UNICODE
@ -156,11 +315,19 @@ class DkimKeyField(fields.String):
# convert list to str
# convert list to str
if isinstance ( value , list ) :
if isinstance ( value , list ) :
value = ' ' . join ( value )
try :
value = ' ' . join ( [ ensure_text_type ( item ) for item in value ] )
except UnicodeDecodeError as exc :
raise self . make_error ( " invalid_utf8 " ) from exc
# only strings are allowed
# only text is allowed
if not isinstance ( value , str ) :
else :
raise ValidationError ( f ' invalid type { type ( value ) . __name__ !r} ' )
if not isinstance ( value , ( str , bytes ) ) :
raise self . make_error ( " invalid " )
try :
value = ensure_text_type ( value )
except UnicodeDecodeError as exc :
raise self . make_error ( " invalid_utf8 " ) from exc
# clean value (remove whitespace and header/footer)
# clean value (remove whitespace and header/footer)
value = self . _clean_re . sub ( ' ' , value . strip ( ) )
value = self . _clean_re . sub ( ' ' , value . strip ( ) )
@ -189,28 +356,53 @@ class DkimKeyField(fields.String):
else :
else :
return value
return value
class PasswordField ( fields . Str ) :
### base definitions ###
""" Serialize a hashed password hash by stripping the obsolete {SCHEME}
Deserialize a plain password or hashed password into a hashed password
def handle_email ( data ) :
""" merge separate localpart and domain to email
"""
"""
localpart = ' localpart ' in data
_hashes = { ' PBKDF2 ' , ' BLF-CRYPT ' , ' SHA512-CRYPT ' , ' SHA256-CRYPT ' , ' MD5-CRYPT ' , ' CRYPT ' }
domain = ' domain ' in data
if ' email ' in data :
def _serialize ( self , value , attr , obj , * * kwargs ) :
if localpart or domain :
""" strip obsolete { password-hash} when serializing """
raise ValidationError ( ' duplicate email and localpart/domain ' )
# strip scheme spec if in database - it's obsolete
data [ ' localpart ' ] , data [ ' domain_name ' ] = data [ ' email ' ] . rsplit ( ' @ ' , 1 )
if value . startswith ( ' { ' ) and ( end := value . find ( ' } ' , 1 ) ) > = 0 :
elif localpart and domain :
if value [ 1 : end ] in self . _hashes :
data [ ' domain_name ' ] = data [ ' domain ' ]
return value [ end + 1 : ]
del data [ ' domain ' ]
return value
data [ ' email ' ] = f ' { data [ " localpart " ] } @ { data [ " domain_name " ] } '
elif localpart or domain :
raise ValidationError ( ' incomplete localpart/domain ' )
return data
def _deserialize ( self , value , attr , data , * * kwargs ) :
""" hashes plain password or checks hashed password
also strips obsolete { password - hash } when deserializing
"""
# when hashing is requested: use model instance to hash plain password
if data . get ( ' hash_password ' ) :
# hash password using model instance
inst = self . metadata [ ' model ' ] ( )
inst . set_password ( value )
value = inst . password
del inst
# strip scheme spec when specified - it's obsolete
if value . startswith ( ' { ' ) and ( end := value . find ( ' } ' , 1 ) ) > = 0 :
if value [ 1 : end ] in self . _hashes :
value = value [ end + 1 : ]
# check if algorithm is supported
inst = self . metadata [ ' model ' ] ( password = value )
try :
# just check against empty string to see if hash is valid
inst . check_password ( ' ' )
except ValueError as exc :
# ValueError: hash could not be identified
raise ValidationError ( f ' invalid password hash { value !r} ' ) from exc
del inst
return value
### base schema ###
class BaseOpts ( SQLAlchemyAutoSchemaOpts ) :
class BaseOpts ( SQLAlchemyAutoSchemaOpts ) :
""" Option class with sqla session
""" Option class with sqla session
@ -220,6 +412,8 @@ class BaseOpts(SQLAlchemyAutoSchemaOpts):
meta . sqla_session = models . db . session
meta . sqla_session = models . db . session
if not hasattr ( meta , ' ordered ' ) :
if not hasattr ( meta , ' ordered ' ) :
meta . ordered = True
meta . ordered = True
if not hasattr ( meta , ' sibling ' ) :
meta . sibling = False
super ( BaseOpts , self ) . __init__ ( meta , ordered = ordered )
super ( BaseOpts , self ) . __init__ ( meta , ordered = ordered )
class BaseSchema ( ma . SQLAlchemyAutoSchema ) :
class BaseSchema ( ma . SQLAlchemyAutoSchema ) :
@ -231,10 +425,15 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
class Meta :
class Meta :
""" Schema config """
""" Schema config """
include_by_context = { }
exclude_by_value = { }
hide_by_context = { }
order = [ ]
sibling = False
def __init__ ( self , * args , * * kwargs ) :
def __init__ ( self , * args , * * kwargs ) :
# context?
# get context
context = kwargs . get ( ' context ' , { } )
context = kwargs . get ( ' context ' , { } )
flags = { key for key , value in context . items ( ) if value is True }
flags = { key for key , value in context . items ( ) if value is True }
@ -261,7 +460,7 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
# exclude default values
# exclude default values
if not context . get ( ' full ' ) :
if not context . get ( ' full ' ) :
for column in getattr ( self . opts , ' model ' ) . __table__ . columns :
for column in self . opts . model . __table__ . columns :
if column . name not in exclude :
if column . name not in exclude :
self . _exclude_by_value . setdefault ( column . name , [ ] ) . append (
self . _exclude_by_value . setdefault ( column . name , [ ] ) . append (
None if column . default is None else column . default . arg
None if column . default is None else column . default . arg
@ -274,45 +473,239 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
if not flags & set ( need ) :
if not flags & set ( need ) :
self . _hide_by_context | = set ( what )
self . _hide_by_context | = set ( what )
# remember primary keys
self . _primary = self . opts . model . __table__ . primary_key . columns . values ( ) [ 0 ] . name
# initialize attribute order
# initialize attribute order
if hasattr ( self . Meta , ' order ' ) :
if hasattr ( self . Meta , ' order ' ) :
# use user-defined order
# use user-defined order
self . _order = list ( reversed ( getattr ( self . Meta , ' order ' ) ) )
self . _order = list ( reversed ( self . Meta . order ) )
else :
else :
# default order is: primary_key + other keys alphabetically
# default order is: primary_key + other keys alphabetically
self . _order = list ( sorted ( self . fields . keys ( ) ) )
self . _order = list ( sorted ( self . fields . keys ( ) ) )
primary = self . opts . model . __table__ . primary_key . columns . values ( ) [ 0 ] . name
if self . _primary in self . _order :
if primary in self . _order :
self . _order . remove ( self . _primary )
self . _order . remove ( primary )
self . _order . reverse ( )
self . _order . reverse ( )
self . _order . append ( primary)
self . _order . append ( self . _ primary)
# move pre_load hook "_track_import" to the front
# move pre_load hook "_track_import" to the front
hooks = self . _hooks [ ( ' pre_load ' , False ) ]
hooks = self . _hooks [ ( ' pre_load ' , False ) ]
if ' _track_import ' in hooks :
hooks . remove ( ' _track_import ' )
hooks . remove ( ' _track_import ' )
hooks . insert ( 0 , ' _track_import ' )
hooks . insert ( 0 , ' _track_import ' )
# and post_load hook "_fooo" to the end
# move pre_load hook "_add_instance" to the end
hooks . remove ( ' _add_required ' )
hooks . append ( ' _add_required ' )
# move post_load hook "_add_instance" to the end
hooks = self . _hooks [ ( ' post_load ' , False ) ]
hooks = self . _hooks [ ( ' post_load ' , False ) ]
if ' _add_instance ' in hooks :
hooks . remove ( ' _add_instance ' )
hooks . remove ( ' _add_instance ' )
hooks . append ( ' _add_instance ' )
hooks . append ( ' _add_instance ' )
def hide ( self , data ) :
""" helper method to hide input data for logging """
# always returns a copy of data
return {
key : HIDDEN if key in self . _hide_by_context else deepcopy ( value )
for key , value in data . items ( )
}
def _call_and_store ( self , * args , * * kwargs ) :
""" track curent parent field for pruning """
self . context [ ' parent_field ' ] = kwargs [ ' field_name ' ]
return super ( ) . _call_and_store ( * args , * * kwargs )
# this is only needed to work around the declared attr "email" primary key in model
def get_instance ( self , data ) :
""" lookup item by defined primary key instead of key(s) from model """
if self . transient :
return None
if keys := getattr ( self . Meta , ' primary_keys ' , None ) :
filters = { key : data . get ( key ) for key in keys }
if None not in filters . values ( ) :
return self . session . query ( self . opts . model ) . filter_by ( * * filters ) . first ( )
return super ( ) . get_instance ( data )
@pre_load ( pass_many = True )
def _patch_input ( self , items , many , * * kwargs ) : # pylint: disable=unused-argument
""" - flush sqla session before serializing a section when requested
( make sure all objects that could be referred to later are created )
- when in update mode : patch input data before deserialization
- handle " prune " and " delete " items
- replace values in keys starting with ' - ' with default
"""
# flush sqla session
if not self . Meta . sibling :
self . opts . sqla_session . flush ( )
# stop early when not updating
if not self . context . get ( ' update ' ) :
return items
# patch "delete", "prune" and "default"
want_prune = [ ]
def patch ( count , data , prune ) :
# don't allow __delete__ coming from input
if ' __delete__ ' in data :
raise ValidationError ( ' Unknown field. ' , f ' { count } .__delete__ ' )
# handle "prune list" and "delete item" (-pkey: none and -pkey: id)
for key in data :
if key . startswith ( ' - ' ) :
if key [ 1 : ] == self . _primary :
# delete or prune
if data [ key ] is None :
# prune
prune . append ( True )
return None
# mark item for deletion
return { key [ 1 : ] : data [ key ] , ' __delete__ ' : True }
# handle "set to default value" (-key: none)
def set_default ( key , value ) :
if not key . startswith ( ' - ' ) :
return ( key , value )
key = key [ 1 : ]
if not key in self . opts . model . __table__ . columns :
return ( key , None )
if value is not None :
raise ValidationError (
' When resetting to default value must be null. ' ,
f ' { count } . { key } '
)
value = self . opts . model . __table__ . columns [ key ] . default
if value is None :
raise ValidationError (
' Field has no default value. ' ,
f ' { count } . { key } '
)
return ( key , value . arg )
return dict ( [ set_default ( key , value ) for key , value in data . items ( ) ] )
# convert items to "delete" and filter "prune" item
items = [
item for item in [
patch ( count , item , want_prune ) for count , item in enumerate ( items )
] if item
]
# prune: determine if existing items in db need to be added or marked for deletion
add_items = False
del_items = False
if self . Meta . sibling :
# parent prunes automatically
if not want_prune :
# no prune requested => add old items
add_items = True
else :
# parent does not prune automatically
if want_prune :
# prune requested => mark old items for deletion
del_items = True
if add_items or del_items :
existing = { item [ self . _primary ] for item in items if self . _primary in item }
for item in getattr ( self . context [ ' parent ' ] , self . context [ ' parent_field ' ] ) :
key = getattr ( item , self . _primary )
if key not in existing :
if add_items :
items . append ( { self . _primary : key } )
else :
items . append ( { self . _primary : key , ' __delete__ ' : True } )
return items
@pre_load
@pre_load
def _track_import ( self , data , many , * * kwargs ) : # pylint: disable=unused-argument
def _track_import ( self , data , many , * * kwargs ) : # pylint: disable=unused-argument
# TODO: also handle reset, prune and delete in pre_load / post_load hooks!
""" call callback function to track import
# print('!!!', repr(data))
"""
# callback
if callback := self . context . get ( ' callback ' ) :
if callback := self . context . get ( ' callback ' ) :
callback ( self , data )
callback ( self , data )
return data
return data
@post_load
@pre_load
def _add_instance ( self , item , many , * * kwargs ) : # pylint: disable=unused-argument
def _add_required ( self , data , many , * * kwargs ) : # pylint: disable=unused-argument
""" when updating:
allow modification of existing items having required attributes
by loading existing value from db
"""
if not self . opts . load_instance or not self . context . get ( ' update ' ) :
return data
# stabilize import of auto-increment primary keys (not required),
# by matching import data to existing items and setting primary key
if not self . _primary in data :
for item in getattr ( self . context [ ' parent ' ] , self . context [ ' parent_field ' ] ) :
existing = self . dump ( item , many = False )
this = existing . pop ( self . _primary )
if data == existing :
instance = item
data [ self . _primary ] = this
break
# try to load instance
instance = self . instance or self . get_instance ( data )
if instance is None :
if ' __delete__ ' in data :
# deletion of non-existent item requested
raise ValidationError (
f ' item not found: { data [ self . _primary ] !r} ' ,
field_name = f ' ?. { self . _primary } ' ,
)
else :
if self . context . get ( ' update ' ) :
# remember instance as parent for pruning siblings
if not self . Meta . sibling :
self . context [ ' parent ' ] = instance
# delete instance when marked
if ' __delete__ ' in data :
self . opts . sqla_session . delete ( instance )
# add attributes required for validation from db
# TODO: this will cause validation errors if value from database does not validate
for attr_name , field_obj in self . load_fields . items ( ) :
if field_obj . required and attr_name not in data :
data [ attr_name ] = getattr ( instance , attr_name )
return data
@post_load ( pass_original = True )
def _add_instance ( self , item , original , many , * * kwargs ) : # pylint: disable=unused-argument
""" add new instances to sqla session """
if item in self . opts . sqla_session :
# item was modified
if ' hash_password ' in original :
# stabilize import of passwords to be hashed,
# by not re-hashing an unchanged password
if attr := getattr ( sqlalchemy . inspect ( item ) . attrs , ' password ' , None ) :
if attr . history . has_changes ( ) and attr . history . deleted :
try :
# reset password hash, if password was not changed
inst = type ( item ) ( password = attr . history . deleted [ - 1 ] )
if inst . check_password ( original [ ' password ' ] ) :
item . password = inst . password
except ValueError :
# hash in db is invalid
pass
else :
del inst
else :
# new item
self . opts . sqla_session . add ( item )
self . opts . sqla_session . add ( item )
return item
return item
@post_dump
@post_dump
def _hide_and_order ( self , data , many , * * kwargs ) : # pylint: disable=unused-argument
def _hide_and_order ( self , data , many , * * kwargs ) : # pylint: disable=unused-argument
""" hide secrets and order output """
# order output
# order output
for key in self . _order :
for key in self . _order :
@ -325,15 +718,18 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
if not self . _exclude_by_value and not self . _hide_by_context :
if not self . _exclude_by_value and not self . _hide_by_context :
return data
return data
# exclude items or hide values
# exclude or hide values
full = self . context . get ( ' full ' )
full = self . context . get ( ' full ' )
return type ( data ) ( [
return type ( data ) ( [
( key , ' <hidden> ' if key in self . _hide_by_context else value )
( key , HIDDEN if key in self . _hide_by_context else value )
for key , value in data . items ( )
for key , value in data . items ( )
if full or key not in self . _exclude_by_value or value not in self . _exclude_by_value [ key ]
if full or key not in self . _exclude_by_value or value not in self . _exclude_by_value [ key ]
] )
] )
# TODO: remove LazyStringField and change model (IMHO comment should not be nullable)
# this field is used to mark items for deletion
mark_delete = fields . Boolean ( data_key = ' __delete__ ' , load_only = True )
# TODO: remove LazyStringField (when model was changed - IMHO comment should not be nullable)
comment = LazyStringField ( )
comment = LazyStringField ( )
@ -381,6 +777,11 @@ class TokenSchema(BaseSchema):
model = models . Token
model = models . Token
load_instance = True
load_instance = True
sibling = True
password = PasswordField ( required = True , metadata = { ' model ' : models . User } )
hash_password = fields . Boolean ( load_only = True , missing = False )
@mapped
@mapped
class FetchSchema ( BaseSchema ) :
class FetchSchema ( BaseSchema ) :
@ -389,6 +790,8 @@ class FetchSchema(BaseSchema):
""" Schema config """
""" Schema config """
model = models . Fetch
model = models . Fetch
load_instance = True
load_instance = True
sibling = True
include_by_context = {
include_by_context = {
( ' full ' , ' import ' ) : { ' last_check ' , ' error ' } ,
( ' full ' , ' import ' ) : { ' last_check ' , ' error ' } ,
}
}
@ -405,8 +808,9 @@ class UserSchema(BaseSchema):
model = models . User
model = models . User
load_instance = True
load_instance = True
include_relationships = True
include_relationships = True
exclude = [ ' domain' , ' quota_bytes_used ' ]
exclude = [ ' _email' , ' domain' , ' localpart ' , ' domain_name ' , ' quota_bytes_used ' ]
primary_keys = [ ' email ' ]
exclude_by_value = {
exclude_by_value = {
' forward_destination ' : [ [ ] ] ,
' forward_destination ' : [ [ ] ] ,
' tokens ' : [ [ ] ] ,
' tokens ' : [ [ ] ] ,
@ -416,41 +820,13 @@ class UserSchema(BaseSchema):
' reply_startdate ' : [ ' 1900-01-01 ' ] ,
' reply_startdate ' : [ ' 1900-01-01 ' ] ,
}
}
@pre_load
email = fields . String ( required = True )
def _handle_email_and_password ( self , data , many , * * kwargs ) : # pylint: disable=unused-argument
data = handle_email ( data )
if ' password ' in data :
if ' password_hash ' in data or ' hash_scheme ' in data :
raise ValidationError ( ' ambigous key password and password_hash/hash_scheme ' )
# check (hashed) password
password = data [ ' password ' ]
if password . startswith ( ' { ' ) and ' } ' in password :
scheme = password [ 1 : password . index ( ' } ' ) ]
if scheme not in self . Meta . model . scheme_dict :
raise ValidationError ( f ' invalid password scheme { scheme !r} ' )
else :
raise ValidationError ( f ' invalid hashed password { password !r} ' )
elif ' password_hash ' in data and ' hash_scheme ' in data :
if data [ ' hash_scheme ' ] not in self . Meta . model . scheme_dict :
raise ValidationError ( f ' invalid password scheme { data [ " hash_scheme " ] !r} ' )
data [ ' password ' ] = f ' {{ { data [ " hash_scheme " ] } }} { data [ " password_hash " ] } '
del data [ ' hash_scheme ' ]
del data [ ' password_hash ' ]
return data
# TODO: verify password (should this be done in model?)
# scheme, hashed = re.match('^(?:{([^}]+)})?(.*)$', self.password).groups()
# if not scheme...
# ctx = passlib.context.CryptContext(schemes=[scheme], default=scheme)
# try:
# ctx.verify('', hashed)
# =>? ValueError: hash could not be identified
localpart = fields . Str ( load_only = True )
domain_name = fields . Str ( load_only = True )
tokens = fields . Nested ( TokenSchema , many = True )
tokens = fields . Nested ( TokenSchema , many = True )
fetches = fields . Nested ( FetchSchema , many = True )
fetches = fields . Nested ( FetchSchema , many = True )
password = PasswordField ( required = True , metadata = { ' model ' : models . User } )
hash_password = fields . Boolean ( load_only = True , missing = False )
@mapped
@mapped
class AliasSchema ( BaseSchema ) :
class AliasSchema ( BaseSchema ) :
@ -459,18 +835,14 @@ class AliasSchema(BaseSchema):
""" Schema config """
""" Schema config """
model = models . Alias
model = models . Alias
load_instance = True
load_instance = True
exclude = [ ' domain' ]
exclude = [ ' _email' , ' domain' , ' localpart ' , ' domain_name ' ]
primary_keys = [ ' email ' ]
exclude_by_value = {
exclude_by_value = {
' destination ' : [ [ ] ] ,
' destination ' : [ [ ] ] ,
}
}
@pre_load
email = fields . String ( required = True )
def _handle_email ( self , data , many , * * kwargs ) : # pylint: disable=unused-argument
return handle_email ( data )
localpart = fields . Str ( load_only = True )
domain_name = fields . Str ( load_only = True )
destination = CommaSeparatedListField ( )
destination = CommaSeparatedListField ( )
@ -499,7 +871,7 @@ class MailuSchema(Schema):
render_module = RenderYAML
render_module = RenderYAML
ordered = True
ordered = True
order = [ ' config' , ' domains ' , ' user s ' , ' alia se s' , ' relay s ' ]
order = [ ' domain' , ' user ' , ' alia s' , ' relay ' ] # 'config'
def __init__ ( self , * args , * * kwargs ) :
def __init__ ( self , * args , * * kwargs ) :
super ( ) . __init__ ( * args , * * kwargs )
super ( ) . __init__ ( * args , * * kwargs )
@ -511,6 +883,14 @@ class MailuSchema(Schema):
except KeyError :
except KeyError :
pass
pass
def _call_and_store ( self , * args , * * kwargs ) :
""" track current parent and field for pruning """
self . context . update ( {
' parent ' : self . context . get ( ' config ' ) ,
' parent_field ' : kwargs [ ' field_name ' ] ,
} )
return super ( ) . _call_and_store ( * args , * * kwargs )
@pre_load
@pre_load
def _clear_config ( self , data , many , * * kwargs ) : # pylint: disable=unused-argument
def _clear_config ( self , data , many , * * kwargs ) : # pylint: disable=unused-argument
""" create config object in context if missing
""" create config object in context if missing
@ -534,8 +914,8 @@ class MailuSchema(Schema):
return config
return config
config = fields . Nested ( Config Schema, many = True )
domain = fields . Nested ( Domain Schema, many = True )
domains = fields . Nested ( Domain Schema, many = True )
user = fields . Nested ( User Schema, many = True )
users = fields . Nested ( User Schema, many = True )
alias = fields . Nested ( Alias Schema, many = True )
aliases = fields . Nested ( Alias Schema, many = True )
relay = fields . Nested ( Relay Schema, many = True )
relays = fields . Nested ( RelaySchema , many = True )
# config = fields.Nested(ConfigSchema, many=True )