Merge branch 'master' into hibp

main
Alexander Graf 2 years ago
commit 4b179d9008
No known key found for this signature in database
GPG Key ID: B8A9DC143E075629

@ -76,7 +76,7 @@ jobs:
- derive-variables
uses: ./.github/workflows/build_test_deploy.yml
with:
architecture: 'linux/arm64,linux/arm/v7'
architecture: 'linux/arm64/v8,linux/arm/v7'
mailu_version: ${{needs.derive-variables.outputs.MAILU_VERSION}}-arm
pinned_mailu_version: ${{needs.derive-variables.outputs.PINNED_MAILU_VERSION}}-arm
docker_org: ${{needs.derive-variables.outputs.DOCKER_ORG}}

@ -89,9 +89,10 @@ jobs:
run: |
echo ${{ steps.targets.outputs.matrix }}
## This job buils the base image. The base image is used by all other images.
build-base-image:
name: Build base image
## This job builds the base image. The base image is used by all other images.
build-base-image-x64:
name: Build base image x64
if: inputs.architecture == 'linux/amd64'
needs:
- targets
runs-on: ubuntu-latest
@ -118,6 +119,11 @@ jobs:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.Docker_Login }}
password: ${{ secrets.Docker_Password }}
- name: Helper to convert docker org to lowercase
id: string
uses: ASzc/change-string-case-action@v2
@ -139,6 +145,62 @@ jobs:
*.cache-to=type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/base:${{ hashFiles('core/base/Dockerfile','core/base/requirements-prod.txt') }},mode=max
*.platform=${{ inputs.architecture }}
## This job builds the base image. The base image is used by all other images.
build-base-image-arm:
name: Build base image arm
if: inputs.architecture != 'linux/amd64'
needs:
- targets
runs-on: self-hosted
permissions:
contents: read
packages: write
steps:
- uses: actions/checkout@v3
- name: Retrieve global variables
shell: bash
run: |
echo "BRANCH=${{ inputs.branch }}" >> $GITHUB_ENV
echo "MAILU_VERSION=${{ inputs.mailu_version }}" >> $GITHUB_ENV
echo "PINNED_MAILU_VERSION=${{ inputs.pinned_mailu_version }}" >> $GITHUB_ENV
echo "DOCKER_ORG=${{ inputs.docker_org }}" >> $GITHUB_ENV
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- uses: crazy-max/ghaction-github-runtime@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.Docker_Login }}
password: ${{ secrets.Docker_Password }}
- name: Helper to convert docker org to lowercase
id: string
uses: ASzc/change-string-case-action@v2
with:
string: ${{ github.repository_owner }}
- name: Build all docker images
env:
DOCKER_ORG: ghcr.io/${{ steps.string.outputs.lowercase }}
MAILU_VERSION: ${{ env.MAILU_VERSION }}
PINNED_MAILU_VERSION: ${{ env.PINNED_MAILU_VERSION }}
uses: docker/bake-action@v2
with:
files: ${{env.HCL_FILE}}
targets: base
load: false
push: false
set: |
*.cache-from=type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/base:${{ hashFiles('core/base/Dockerfile','core/base/requirements-prod.txt') }}-arm
*.cache-to=type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/base:${{ hashFiles('core/base/Dockerfile','core/base/requirements-prod.txt') }}-arm,mode=max
*.platform=${{ inputs.architecture }}
# This job builds all the images. The build cache is stored in the github actions cache.
# In further jobs, this cache is used to quickly rebuild the images.
build:
@ -146,7 +208,7 @@ jobs:
if: inputs.architecture == 'linux/amd64'
needs:
- targets
- build-base-image
- build-base-image-x64
strategy:
fail-fast: false
matrix:
@ -175,6 +237,11 @@ jobs:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.Docker_Login }}
password: ${{ secrets.Docker_Password }}
- name: Helper to convert docker org to lowercase
id: string
uses: ASzc/change-string-case-action@v2
@ -204,7 +271,7 @@ jobs:
if: inputs.architecture != 'linux/amd64'
needs:
- targets
- build-base-image
- build-base-image-arm
strategy:
fail-fast: false
matrix:
@ -233,6 +300,11 @@ jobs:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.Docker_Login }}
password: ${{ secrets.Docker_Password }}
- name: Helper to convert docker org to lowercase
id: string
uses: ASzc/change-string-case-action@v2
@ -248,12 +320,11 @@ jobs:
files: ${{env.HCL_FILE}}
targets: ${{ matrix.target }}
load: false
push: false
push: true
set: |
*.cache-from=type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/${{ matrix.target }}:buildcache-arm
*.cache-to=type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/${{ matrix.target }}:buildcache-arm,mode=max
*.cache-from=type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/base:${{ hashFiles('core/base/Dockerfile','core/base/requirements-prod.txt') }}-arm
*.cache-to=type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/base:${{ hashFiles('core/base/Dockerfile','core/base/requirements-prod.txt') }}-arm,mode=max
*.platform=${{ inputs.architecture }}
# This job runs all the tests.
@ -265,7 +336,6 @@ jobs:
contents: read
packages: read
needs:
- targets
- build
strategy:
fail-fast: false
@ -320,7 +390,6 @@ jobs:
if: inputs.deploy == 'true'
runs-on: ubuntu-latest
needs:
- build
- tests
strategy:
fail-fast: false
@ -357,6 +426,7 @@ jobs:
docker buildx imagetools create \
--tag ${{ inputs.docker_org }}/${{ matrix.target }}:${{ env.MAILU_VERSION }} \
--tag ${{ inputs.docker_org }}/${{ matrix.target }}:$pinned_mailu_version \
--tag ${{ inputs.docker_org }}/${{ matrix.target }}:latest \
ghcr.io/${{ steps.string.outputs.lowercase }}/${{ matrix.target }}:${{ env.MAILU_VERSION }}
deploy-arm:
@ -401,6 +471,7 @@ jobs:
docker buildx imagetools create \
--tag ${{ inputs.docker_org }}/${{ matrix.target }}:${{ env.MAILU_VERSION }} \
--tag ${{ inputs.docker_org }}/${{ matrix.target }}:$pinned_mailu_version \
--tag ${{ inputs.docker_org }}/${{ matrix.target }}:latest \
ghcr.io/${{ steps.string.outputs.lowercase }}/${{ matrix.target }}:${{ env.MAILU_VERSION }}
#This job creates a tagged release. A tag is created for the pinned version x.y.z. The GH release refers to this tag.

@ -2,3 +2,4 @@
lib64
.vscode
tags
dev

@ -4,13 +4,19 @@ FROM node:16-alpine3.16
WORKDIR /work
COPY content/ ./
COPY package.json ./
COPY webpack.config.js ./
RUN set -euxo pipefail \
&& npm config set update-notifier false \
&& npm install --no-audit --no-fund \
&& sed -i 's/#007bff/#55a5d9/' node_modules/admin-lte/build/scss/_bootstrap-variables.scss \
&& for l in ca da de:de-DE en:en-GB es:es-ES eu fr:fr-FR he hu is it:it-IT ja nb_NO:no-NB nl:nl-NL pl pt:pt-PT ru sv:sv-SE zh; do \
; npm config set update-notifier false \
; npm install --no-audit --no-fund \
; sed -i 's/#007bff/#55a5d9/' node_modules/admin-lte/build/scss/_bootstrap-variables.scss \
; mkdir assets \
; for l in ca da de:de-DE en:en-GB es:es-ES eu fr:fr-FR he hu is it:it-IT ja nb_NO:no-NB nl:nl-NL pl pt:pt-PT ru sv:sv-SE zh; do \
cp node_modules/datatables.net-plugins/i18n/${l#*:}.json assets/${l%:*}.json; \
done \
&& node_modules/.bin/webpack-cli --color
done
COPY assets/ ./assets/
RUN set -euxo pipefail \
; node_modules/.bin/webpack-cli --color

Before

Width:  |  Height:  |  Size: 4.8 KiB

After

Width:  |  Height:  |  Size: 4.8 KiB

@ -44,8 +44,10 @@ def create_app_from_config(config):
# Initialize debugging tools
if app.config.get("DEBUG"):
debug.toolbar.init_app(app)
# TODO: add a specific configuration variable for profiling
# debug.profiler.init_app(app)
if app.config.get("DEBUG_PROFILER"):
debug.profiler.init_app(app)
if assets := app.config.get('DEBUG_ASSETS'):
app.static_folder = assets
# Inject the default variables in the Jinja parser
# TODO: move this to blueprints when needed

@ -11,8 +11,9 @@ DEFAULT_CONFIG = {
'BABEL_DEFAULT_TIMEZONE': 'UTC',
'BOOTSTRAP_SERVE_LOCAL': True,
'RATELIMIT_STORAGE_URL': '',
'QUOTA_STORAGE_URL': '',
'DEBUG': False,
'DEBUG_PROFILER': False,
'DEBUG_ASSETS': '',
'DOMAIN_REGISTRATION': False,
'TEMPLATES_AUTO_RELOAD': True,
'MEMORY_SESSIONS': False,
@ -149,8 +150,9 @@ class ConfigManager:
template = self.DB_TEMPLATES[self.config['DB_FLAVOR']]
self.config['SQLALCHEMY_DATABASE_URI'] = template.format(**self.config)
if not self.config.get('RATELIMIT_STORAGE_URL'):
self.config['RATELIMIT_STORAGE_URL'] = f'redis://{self.config["REDIS_ADDRESS"]}/2'
self.config['QUOTA_STORAGE_URL'] = f'redis://{self.config["REDIS_ADDRESS"]}/1'
self.config['SESSION_STORAGE_URL'] = f'redis://{self.config["REDIS_ADDRESS"]}/3'
self.config['SESSION_COOKIE_SAMESITE'] = 'Strict'
self.config['SESSION_COOKIE_HTTPONLY'] = True
@ -159,9 +161,9 @@ class ConfigManager:
self.config['PERMANENT_SESSION_LIFETIME'] = int(self.config['PERMANENT_SESSION_LIFETIME'])
self.config['AUTH_RATELIMIT_IP_V4_MASK'] = int(self.config['AUTH_RATELIMIT_IP_V4_MASK'])
self.config['AUTH_RATELIMIT_IP_V6_MASK'] = int(self.config['AUTH_RATELIMIT_IP_V6_MASK'])
hostnames = [host.strip() for host in self.config['HOSTNAMES'].split(',')]
self.config['AUTH_RATELIMIT_EXEMPTION'] = set(ipaddress.ip_network(cidr, False) for cidr in (cidr.strip() for cidr in self.config['AUTH_RATELIMIT_EXEMPTION'].split(',')) if cidr)
self.config['MESSAGE_RATELIMIT_EXEMPTION'] = set([s for s in self.config['MESSAGE_RATELIMIT_EXEMPTION'].lower().replace(' ', '').split(',') if s])
hostnames = [host.strip() for host in self.config['HOSTNAMES'].split(',')]
self.config['HOSTNAMES'] = ','.join(hostnames)
self.config['HOSTNAME'] = hostnames[0]
self.config['DEFAULT_SPAM_THRESHOLD'] = int(self.config['DEFAULT_SPAM_THRESHOLD'])

@ -1,12 +1,11 @@
from mailu import models, utils
from flask import current_app as app
from socrate import system
import re
import urllib
import ipaddress
import socket
import sqlalchemy.exc
import tenacity
SUPPORTED_AUTH_METHODS = ["none", "plain"]
@ -146,13 +145,5 @@ def get_server(protocol, authenticated=False):
ipaddress.ip_address(hostname)
except:
# hostname is not an ip address - so we need to resolve it
hostname = resolve_hostname(hostname)
hostname = system.resolve_hostname(hostname)
return hostname, port
@tenacity.retry(stop=tenacity.stop_after_attempt(100),
wait=tenacity.wait_random(min=2, max=5))
def resolve_hostname(hostname):
""" This function uses system DNS to resolve a hostname.
It is capable of retrying in case the host is not immediately available
"""
return socket.gethostbyname(hostname)

@ -5,6 +5,7 @@ from flask import current_app as app
import flask
import socket
import os
import sqlalchemy.exc
@internal.route("/dovecot/passdb/<path:user_email>")
def dovecot_passdb_dict(user_email):
@ -19,12 +20,20 @@ def dovecot_passdb_dict(user_email):
"allow_nets": ",".join(allow_nets)
})
@internal.route("/dovecot/userdb/")
def dovecot_userdb_dict_list():
return flask.jsonify([
user[0] for user in models.User.query.filter(models.User.enabled.is_(True)).with_entities(models.User.email).all()
])
@internal.route("/dovecot/userdb/<path:user_email>")
def dovecot_userdb_dict(user_email):
user = models.User.query.get(user_email) or flask.abort(404)
try:
quota = models.User.query.filter(models.User.email==user_email).with_entities(models.User.quota_bytes).one_or_none() or flask.abort(404)
except sqlalchemy.exc.StatementError as exc:
flask.abort(404)
return flask.jsonify({
"quota_rule": "*:bytes={}".format(user.quota_bytes)
"quota_rule": f"*:bytes={quota[0]}"
})

@ -546,8 +546,8 @@ class User(Base, Email):
now = date.today()
return (
self.reply_enabled and
self.reply_startdate < now and
self.reply_enddate > now
self.reply_startdate <= now and
self.reply_enddate >= now
)
@property

@ -0,0 +1,138 @@
#!/usr/bin/env bash
set -euo pipefail
### CONFIG
DEV_NAME="${DEV_NAME:-mailu-dev}"
DEV_DB="${DEV_DB:-}"
DEV_PROFILER="${DEV_PROFILER:-false}"
DEV_LISTEN="${DEV_LISTEN:-127.0.0.1:8080}"
[[ "${DEV_LISTEN}" == *:* ]] || DEV_LISTEN="127.0.0.1:${DEV_LISTEN}"
DEV_ADMIN="${DEV_ADMIN:-admin@example.com}"
DEV_PASSWORD="${DEV_PASSWORD:-letmein}"
### MAIN
[[ -n "${DEV_DB}" ]] && {
[[ -f "${DEV_DB}" ]] || {
echo "Sorry, can't find DEV_DB: '${DEV_DB}'"
exit 1
}
DEV_DB="$(realpath "${DEV_DB}")"
}
docker="$(command -v podman || command -v docker || echo false)"
[[ "${docker}" == "false" ]] && {
echo "Sorry, you'll need podman or docker to run this."
exit 1
}
tmp="$(mktemp -d)"
[[ -n "${tmp}" && -d "${tmp}" ]] || {
echo "Sorry, can't create temporary folder."
exit 1
}
trap "rm -rf '${tmp}'" INT TERM EXIT
admin="$(realpath "$(pwd)/${0%/*}")"
base="${admin}/../base"
assets="${admin}/assets"
cd "${tmp}"
# base
cp "${base}"/requirements-* .
cp -r "${base}"/libs .
sed -E '/^#/d;s:^FROM system$:FROM system AS base:' "${base}/Dockerfile" >Dockerfile
# assets
cp "${assets}/package.json" .
cp -r "${assets}/assets/" .
awk '/new compress/{f=1}!f{print}/}),/{f=0}' <"${assets}/webpack.config.js" >webpack.config.js
sed -E '/^#/d;s:^(FROM [^ ]+$):\1 AS assets:' "${assets}/Dockerfile" >>Dockerfile
# admin
sed -E '/^#/d;/^(COPY|EXPOSE|HEALTHCHECK|VOLUME|CMD) /d; s:^(.* )[^ ]*pybabel[^\\]*(.*):\1true \2:' "${admin}/Dockerfile" >>Dockerfile
# development
cat >>Dockerfile <<EOF
COPY --from=assets /work/static/ ./static/
RUN set -euxo pipefail \
; mkdir /data \
; ln -s /app/audit.py / \
; ln -s /app/start.py /
ENV \
FLASK_ENV="development" \
MEMORY_SESSIONS="true" \
RATELIMIT_STORAGE_URL="memory://" \
SESSION_COOKIE_SECURE="false" \
\
DEBUG="true" \
DEBUG_PROFILER="${DEV_PROFILER}" \
DEBUG_ASSETS="/app/static" \
DEBUG_TB_ENABLED="true" \
\
IMAP_ADDRESS="127.0.0.1" \
POP3_ADDRESS="127.0.0.1" \
AUTHSMTP_ADDRESS="127.0.0.1" \
SMTP_ADDRESS="127.0.0.1" \
REDIS_ADDRESS="127.0.0.1" \
WEBMAIL_ADDRESS="127.0.0.1"
CMD ["/bin/bash", "-c", "flask db upgrade &>/dev/null && flask mailu admin '${DEV_ADMIN/@*}' '${DEV_ADMIN#*@}' '${DEV_PASSWORD}' --mode ifmissing >/dev/null && flask run --host=0.0.0.0 --port=8080"]
EOF
# build
chmod -R u+rwX,go+rX .
"${docker}" build --tag "${DEV_NAME}:latest" .
# gather volumes to map into container
volumes=()
[[ -n "${DEV_DB}" ]] && volumes+=( --volume "${DEV_DB}:/data/main.db" )
for vol in audit.py start.py mailu/ migrations/; do
volumes+=( --volume "${admin}/${vol}:/app/${vol}" )
done
for file in "${assets}/assets"/*; do
[[ ! -f "${file}" || "${file}" == */vendor.js ]] && continue
volumes+=( --volume "${file}:/app/static/${file/*\//}" )
done
# show configuration
cat <<EOF
=============================================================================
The "${DEV_NAME}" container was built using this configuration:
DEV_NAME="${DEV_NAME}"
DEV_DB="${DEV_DB}"
DEV_PROFILER="${DEV_PROFILER}"
DEV_LISTEN="${DEV_LISTEN}"
DEV_ADMIN="${DEV_ADMIN}"
DEV_PASSWORD="${DEV_PASSWORD}"
=============================================================================
=============================================================================
You can start the container later using this commandline:
${docker/*\/} run --rm -it --name "${DEV_NAME}" --publish ${DEV_LISTEN}:8080$(printf " %q" "${volumes[@]}") "${DEV_NAME}"
=============================================================================
=============================================================================
The Mailu UI can be found here: http://${DEV_LISTEN}/sso/login
EOF
[[ -z "${DEV_DB}" ]] && echo "You can log in with user ${DEV_ADMIN} and password ${DEV_PASSWORD}"
cat <<EOF
=============================================================================
Starting mailu dev environment...
EOF
# run
"${docker}" run --rm -it --name "${DEV_NAME}" --publish "${DEV_LISTEN}:8080" "${volumes[@]}" "${DEV_NAME}"

@ -22,7 +22,7 @@ CMD /bin/bash
# build virtual env (intermediate)
FROM system as build
ARG MAILU_ENV=prod
ARG MAILU_DEPS=prod
ENV VIRTUAL_ENV=/app/venv
@ -32,21 +32,30 @@ RUN set -euxo pipefail \
; apk add --no-cache py3-pip \
; python3 -m venv ${VIRTUAL_ENV} \
; ${VIRTUAL_ENV}/bin/pip install --no-cache-dir -r requirements-build.txt \
; apk del -r py3-pip
; apk del -r py3-pip \
; rm -f /tmp/*.pem
ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
COPY requirements-${MAILU_ENV}.txt ./
COPY requirements-${MAILU_DEPS}.txt ./
COPY libs/ libs/
RUN set -euxo pipefail \
; machine="$(uname -m)"; deps="" \
; [[ "${machine}" == arm* || "${machine}" == aarch64 ]] && deps="${deps} build-base gcc libffi-dev python3-dev" \
; [[ "${machine}" == armv7* ]] && deps="${deps} cargo libressl-dev mariadb-connector-c-dev postgresql-dev" \
; [[ "${deps}" ]] && apk add --virtual .build-deps ${deps} \
; pip install -r requirements-${MAILU_ENV}.txt \
; apk -e info -q .build-deps && apk del -r .build-deps \
; rm -rf /root/.cache /tmp/*.pem
; pip install -r requirements-${MAILU_DEPS}.txt || \
{ \
machine="$(uname -m)" \
; deps="build-base gcc libffi-dev python3-dev" \
; [[ "${machine}" != x86_64 ]] && \
deps="${deps} cargo git libressl-dev mariadb-connector-c-dev postgresql-dev" \
; apk add --virtual .build-deps ${deps} \
; [[ "${machine}" == armv7* ]] && \
mkdir -p /root/.cargo/registry/index && \
git clone --bare https://github.com/rust-lang/crates.io-index.git /root/.cargo/registry/index/github.com-1285ae84e5963aae \
; pip install -r requirements-${MAILU_DEPS}.txt \
; apk del -r .build-deps \
; rm -rf /root/.cargo /tmp/*.pem \
; } \
; rm -rf /root/.cache
# base mailu image

@ -40,6 +40,7 @@ class DictProtocol(asyncio.Protocol):
def connection_made(self, transport):
logging.info('Connect {}'.format(transport.get_extra_info('peername')))
self.transport = transport
self.transport_lock = asyncio.Lock()
def data_received(self, data):
logging.debug("Received {}".format(data))
@ -77,10 +78,11 @@ class DictProtocol(asyncio.Protocol):
logging.debug("Client {}.{} type {}, user {}, dict {}".format(
self.major, self.minor, self.value_type, self.user, dict_name))
async def process_lookup(self, key, user=None):
async def process_lookup(self, key, user=None, is_iter=False):
""" Process a dict lookup message
"""
logging.debug("Looking up {} for {}".format(key, user))
orig_key = key
# Priv and shared keys are handled slighlty differently
key_type, key = key.decode("utf8").split("/", 1)
try:
@ -93,9 +95,38 @@ class DictProtocol(asyncio.Protocol):
response = result
else:
response = json.dumps(result).encode("ascii")
return self.reply(b"O", response)
return await (self.reply(b"O", orig_key, response) if is_iter else self.reply(b"O", response))
except KeyError:
return self.reply(b"N")
return await self.reply(b"N")
async def process_iterate(self, flags, max_rows, path, user=None):
""" Process an iterate command
"""
logging.debug("Iterate flags {} max_rows {} on {} for {}".format(flags, max_rows, path, user))
# Priv and shared keys are handled slighlty differently
key_type, key = path.decode("utf8").split("/", 1)
max_rows = int(max_rows.decode("utf-8"))
flags = int(flags.decode("utf-8"))
if flags != 0: # not implemented
return await self.reply(b"F")
rows = []
try:
result = await self.dict.iter(key)
logging.debug("Found {} entries: {}".format(len(result), result))
for i,k in enumerate(result):
if max_rows > 0 and i >= max_rows:
break
rows.append(self.process_lookup((path.decode("utf8")+k).encode("utf8"), user, is_iter=True))
await asyncio.gather(*rows)
async with self.transport_lock:
self.transport.write(b"\n") # ITER_FINISHED
return
except KeyError:
return await self.reply(b"F")
except Exception as e:
for task in rows:
task.cancel()
raise e
def process_begin(self, transaction_id, user=None):
""" Process a dict begin message
@ -124,9 +155,10 @@ class DictProtocol(asyncio.Protocol):
# Remove stored transaction
del self.transactions[transaction_id]
del self.transactions_user[transaction_id]
return self.reply(b"O", transaction_id)
return await self.reply(b"O", transaction_id)
def reply(self, command, *args):
async def reply(self, command, *args):
async with self.transport_lock:
logging.debug("Replying {} with {}".format(command, args))
self.transport.write(command)
self.transport.write(b"\t".join(map(tabescape, args)))
@ -141,6 +173,7 @@ class DictProtocol(asyncio.Protocol):
COMMANDS = {
ord("H"): process_hello,
ord("L"): process_lookup,
ord("I"): process_iterate,
ord("B"): process_begin,
ord("C"): process_commit,
ord("S"): process_set

@ -1,5 +1,6 @@
uri = proxy:/tmp/podop.socket:auth
iterate_disable = yes
iterate_prefix = 'userdb/'
default_pass_scheme = plain
password_key = passdb/%u
user_key = userdb/%u

@ -313,6 +313,48 @@ If git opens a editor for a commit message just save and exit as-is. If you have
see above and do the complete procedure from ``git fetch`` onward again.
Web administration development
------------------------------
The administration web interface requires a proper dev environment that can easily
be setup using the ``run_dev.sh`` shell script. You need ``docker`` or ``podman``
to run it. It will create a local webserver listening at port 8080:
.. code-block:: bash
cd core/admin
./run_dev.sh
pip install -r requirements.txt
[...]
=============================================================================
The "mailu-dev" container was built using this configuration:
DEV_NAME="mailu-dev"
DEV_DB=""
DEV_PROFILER="false"
DEV_LISTEN="127.0.0.1:8080"
DEV_ADMIN="admin@example.com"
DEV_PASSWORD="letmein"
=============================================================================
[...]
=============================================================================
The Mailu UI can be found here: http://127.0.0.1:8080/sso/login
You can log in with user admin@example.com and password letmein
=============================================================================
The container will use an empty database and a default user/password unless you
specify a database file to use by setting ``$DEV_DB``.
.. code-block:: bash
DEV_DB="/path/to/dev.db" ./run_dev.sh
Any change to the files will automatically restart the Web server and reload the files.
When using the development environment, a debugging toolbar is displayed on the right
side of the screen, where you can access query details, internal variables, etc.
Documentation
-------------

@ -0,0 +1 @@
Implement the required glue to make "doveadm -A" work

@ -0,0 +1 @@
Fix: include start and end dates in the auto-reply period
Loading…
Cancel
Save