diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml new file mode 100644 index 00000000..e2a535dd --- /dev/null +++ b/.github/workflows/CI.yml @@ -0,0 +1,431 @@ +name: CI +on: + push: + branches: + - staging + - testing + - '1.5' + - '1.6' + - '1.7' + - '1.8' + - master + # version tags, e.g. 1.7.1 + - '[1-9].[0-9].[0-9]' + # pre-releases, e.g. 1.8-pre1 + - 1.8-pre[0-9] + # test branches, e.g. test-debian + - test-* + +############################################### +# REQUIRED secrets +# DOCKER_UN: ${{ secrets.Docker_Login }} +# Username of docker login for pushing the images to repo $DOCKER_ORG +# DOCKER_PW: ${{ secrets.Docker_Password }} +# Password of docker login for pushing the images to repo $DOCKER_ORG +# DOCKER_ORG: ${{ secrets.DOCKER_ORG }} +# The docker repository where the images are pushed to. +# DOCKER_ORG_TESTS: ${{ secrets.DOCKER_ORG_TESTS }} +# The docker repository for test images. Only used for the branch TESTING (BORS try). +# Add the above secrets to your github repo to determine where the images will be pushed. +################################################ + +jobs: + build: + name: Build images + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Extract branch name + shell: bash + run: | + echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV + #For branch TESTING, we set the image tag to PR-xxxx + - name: Derive MAILU_VERSION for branch testing + if: ${{ env.BRANCH == 'testing' }} + shell: bash + env: + COMMIT_MESSAGE: ${{ github.event.head_commit.message }} + run: | + echo "MAILU_VERSION=pr-${COMMIT_MESSAGE//[!0-9]/}" >> $GITHUB_ENV + - name: Derive MAILU_VERSION for other branches than testing + if: ${{ env.BRANCH != 'testing' }} + shell: bash + env: + MAILU_BRANCH: ${{ env.BRANCH }} + run: | + echo "MAILU_VERSION=${{ env.MAILU_BRANCH }}" >> $GITHUB_ENV + - name: Create folder for storing images + run: | + sudo mkdir -p /images + sudo chmod 777 /images + - name: Configure images folder for caching + uses: actions/cache@v2 + with: + path: /images + key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }} + - name: Check docker-compose version + run: docker-compose -v + - name: Login docker + env: + DOCKER_UN: ${{ secrets.Docker_Login }} + DOCKER_PW: ${{ secrets.Docker_Password }} + DOCKER_ORG: ${{ secrets.DOCKER_ORG }} + run: echo "$DOCKER_PW" | docker login --username $DOCKER_UN --password-stdin + - name: Build all docker images + env: + MAILU_VERSION: ${{ env.MAILU_VERSION }} + TRAVIS_BRANCH: ${{ env.BRANCH }} + DOCKER_ORG: ${{ secrets.DOCKER_ORG }} + run: docker-compose -f tests/build.yml build + - name: Save all docker images + run: docker save ${{ secrets.DOCKER_ORG }}/admin ${{ secrets.DOCKER_ORG }}/clamav ${{ secrets.DOCKER_ORG }}/docs ${{ secrets.DOCKER_ORG }}/dovecot ${{ secrets.DOCKER_ORG }}/fetchmail ${{ secrets.DOCKER_ORG }}/nginx ${{ secrets.DOCKER_ORG }}/none ${{ secrets.DOCKER_ORG }}/postfix ${{ secrets.DOCKER_ORG }}/postgresql ${{ secrets.DOCKER_ORG }}/radicale ${{ secrets.DOCKER_ORG }}/rainloop ${{ secrets.DOCKER_ORG }}/roundcube ${{ secrets.DOCKER_ORG }}/rspamd ${{ secrets.DOCKER_ORG }}/setup ${{ secrets.DOCKER_ORG }}/traefik-certdumper ${{ secrets.DOCKER_ORG }}/unbound -o /images/images.tar.gz + + test-core: + name: Perform core tests + runs-on: ubuntu-latest + needs: + - build + steps: + - uses: actions/checkout@v2 + - name: Extract branch name + shell: bash + run: | + echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV + - name: Derive MAILU_VERSION for branch testing + if: ${{ env.BRANCH == 'testing' }} + shell: bash + env: + COMMIT_MESSAGE: ${{ github.event.head_commit.message }} + run: | + echo "MAILU_VERSION=pr-${COMMIT_MESSAGE//[!0-9]/}" >> $GITHUB_ENV + - name: Derive MAILU_VERSION for other branches than testing + if: ${{ env.BRANCH != 'testing' }} + shell: bash + env: + MAILU_BRANCH: ${{ env.BRANCH }} + run: | + echo "MAILU_VERSION=${{ env.MAILU_BRANCH }}" >> $GITHUB_ENV + - name: Create folder for storing images + run: | + sudo mkdir -p /images + sudo chmod 777 /images + - name: Configure images folder for caching + uses: actions/cache@v2 + with: + path: /images + key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }} + - name: Load docker images + run: docker load -i /images/images.tar.gz + - name: Install python packages + run: python3 -m pip install -r tests/requirements.txt + - name: Copy all certs + run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*' + - name: Test core suite + run: python tests/compose/test.py core 2 + env: + MAILU_VERSION: ${{ env.MAILU_VERSION }} + TRAVIS_BRANCH: ${{ env.BRANCH }} + DOCKER_ORG: ${{ secrets.DOCKER_ORG }} + + test-fetchmail: + name: Perform fetchmail tests + runs-on: ubuntu-latest + needs: + - build + steps: + - uses: actions/checkout@v2 + - name: Extract branch name + shell: bash + run: | + echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV + - name: Derive MAILU_VERSION for branch testing + if: ${{ env.BRANCH == 'testing' }} + shell: bash + env: + COMMIT_MESSAGE: ${{ github.event.head_commit.message }} + run: | + echo "MAILU_VERSION=pr-${COMMIT_MESSAGE//[!0-9]/}" >> $GITHUB_ENV + - name: Derive MAILU_VERSION for other branches than testing + if: ${{ env.BRANCH != 'testing' }} + shell: bash + env: + MAILU_BRANCH: ${{ env.BRANCH }} + run: | + echo "MAILU_VERSION=${{ env.MAILU_BRANCH }}" >> $GITHUB_ENV + - name: Create folder for storing images + run: | + sudo mkdir -p /images + sudo chmod 777 /images + - name: Configure images folder for caching + uses: actions/cache@v2 + with: + path: /images + key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }} + - name: Load docker images + run: docker load -i /images/images.tar.gz + - name: Install python packages + run: python3 -m pip install -r tests/requirements.txt + - name: Copy all certs + run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*' + - name: Test fetch + run: python tests/compose/test.py fetchmail 2 + env: + MAILU_VERSION: ${{ env.MAILU_VERSION }} + TRAVIS_BRANCH: ${{ env.BRANCH }} + DOCKER_ORG: ${{ secrets.DOCKER_ORG }} + + test-filters: + name: Perform filter tests + runs-on: ubuntu-latest + needs: + - build + steps: + - uses: actions/checkout@v2 + - name: Extract branch name + shell: bash + run: | + echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV + - name: Derive MAILU_VERSION for branch testing + if: ${{ env.BRANCH == 'testing' }} + shell: bash + env: + COMMIT_MESSAGE: ${{ github.event.head_commit.message }} + run: | + echo "MAILU_VERSION=pr-${COMMIT_MESSAGE//[!0-9]/}" >> $GITHUB_ENV + - name: Derive MAILU_VERSION for other branches than testing + if: ${{ env.BRANCH != 'testing' }} + shell: bash + env: + MAILU_BRANCH: ${{ env.BRANCH }} + run: | + echo "MAILU_VERSION=${{ env.MAILU_BRANCH }}" >> $GITHUB_ENV + - name: Create folder for storing images + run: | + sudo mkdir -p /images + sudo chmod 777 /images + - name: Configure images folder for caching + uses: actions/cache@v2 + with: + path: /images + key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }} + - name: Load docker images + run: docker load -i /images/images.tar.gz + - name: Install python packages + run: python3 -m pip install -r tests/requirements.txt + - name: Copy all certs + run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*' + - name: Test clamvav + run: python tests/compose/test.py filters 3 + env: + MAILU_VERSION: ${{ env.MAILU_VERSION }} + TRAVIS_BRANCH: ${{ env.BRANCH }} + DOCKER_ORG: ${{ secrets.DOCKER_ORG }} + + test-rainloop: + name: Perform rainloop tests + runs-on: ubuntu-latest + needs: + - build + steps: + - uses: actions/checkout@v2 + - name: Extract branch name + shell: bash + run: | + echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV + - name: Derive MAILU_VERSION for branch testing + if: ${{ env.BRANCH == 'testing' }} + shell: bash + env: + COMMIT_MESSAGE: ${{ github.event.head_commit.message }} + run: | + echo "MAILU_VERSION=pr-${COMMIT_MESSAGE//[!0-9]/}" >> $GITHUB_ENV + - name: Derive MAILU_VERSION for other branches than testing + if: ${{ env.BRANCH != 'testing' }} + shell: bash + env: + MAILU_BRANCH: ${{ env.BRANCH }} + run: | + echo "MAILU_VERSION=${{ env.MAILU_BRANCH }}" >> $GITHUB_ENV + - name: Create folder for storing images + run: | + sudo mkdir -p /images + sudo chmod 777 /images + - name: Configure images folder for caching + uses: actions/cache@v2 + with: + path: /images + key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }} + - name: Load docker images + run: docker load -i /images/images.tar.gz + - name: Install python packages + run: python3 -m pip install -r tests/requirements.txt + - name: Copy all certs + run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*' + - name: Test rainloop + run: python tests/compose/test.py rainloop 2 + env: + MAILU_VERSION: ${{ env.MAILU_VERSION }} + TRAVIS_BRANCH: ${{ env.BRANCH }} + DOCKER_ORG: ${{ secrets.DOCKER_ORG }} + + test-roundcube: + name: Perform roundcube tests + runs-on: ubuntu-latest + needs: + - build + steps: + - uses: actions/checkout@v2 + - name: Extract branch name + shell: bash + run: | + echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV + - name: Derive MAILU_VERSION for branch testing + if: ${{ env.BRANCH == 'testing' }} + shell: bash + env: + COMMIT_MESSAGE: ${{ github.event.head_commit.message }} + run: | + echo "MAILU_VERSION=pr-${COMMIT_MESSAGE//[!0-9]/}" >> $GITHUB_ENV + - name: Derive MAILU_VERSION for other branches than testing + if: ${{ env.BRANCH != 'testing' }} + shell: bash + env: + MAILU_BRANCH: ${{ env.BRANCH }} + run: | + echo "MAILU_VERSION=${{ env.MAILU_BRANCH }}" >> $GITHUB_ENV + - name: Create folder for storing images + run: | + sudo mkdir -p /images + sudo chmod 777 /images + - name: Configure images folder for caching + uses: actions/cache@v2 + with: + path: /images + key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }} + - name: Load docker images + run: docker load -i /images/images.tar.gz + - name: Install python packages + run: python3 -m pip install -r tests/requirements.txt + - name: Copy all certs + run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*' + - name: Test roundcube + run: python tests/compose/test.py roundcube 2 + env: + MAILU_VERSION: ${{ env.MAILU_VERSION }} + TRAVIS_BRANCH: ${{ env.BRANCH }} + DOCKER_ORG: ${{ secrets.DOCKER_ORG }} + + test-webdav: + name: Perform webdav tests + runs-on: ubuntu-latest + needs: + - build + steps: + - uses: actions/checkout@v2 + - name: Extract branch name + shell: bash + run: | + echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV + - name: Derive MAILU_VERSION for branch testing + if: ${{ env.BRANCH == 'testing' }} + shell: bash + env: + COMMIT_MESSAGE: ${{ github.event.head_commit.message }} + run: | + echo "MAILU_VERSION=pr-${COMMIT_MESSAGE//[!0-9]/}" >> $GITHUB_ENV + - name: Derive MAILU_VERSION for other branches than testing + if: ${{ env.BRANCH != 'testing' }} + shell: bash + env: + MAILU_BRANCH: ${{ env.BRANCH }} + run: | + echo "MAILU_VERSION=${{ env.MAILU_BRANCH }}" >> $GITHUB_ENV + - name: Create folder for storing images + run: | + sudo mkdir -p /images + sudo chmod 777 /images + - name: Configure images folder for caching + uses: actions/cache@v2 + with: + path: /images + key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }} + - name: Load docker images + run: docker load -i /images/images.tar.gz + - name: Install python packages + run: python3 -m pip install -r tests/requirements.txt + - name: Copy all certs + run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*' + - name: Test webdav + run: python tests/compose/test.py webdav 2 + env: + MAILU_VERSION: ${{ env.MAILU_VERSION }} + TRAVIS_BRANCH: ${{ env.BRANCH }} + DOCKER_ORG: ${{ secrets.DOCKER_ORG }} + + deploy: + name: Deploy images + runs-on: ubuntu-latest + needs: + - build + - test-core + - test-fetchmail + - test-filters + - test-rainloop + - test-roundcube + - test-webdav + steps: + - uses: actions/checkout@v2 + - name: Extract branch name + shell: bash + run: | + echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV + #For branch TESTING, we set the image tag to PR-xxxx + - name: Derive MAILU_VERSION for branch testing + if: ${{ env.BRANCH == 'testing' }} + shell: bash + env: + COMMIT_MESSAGE: ${{ github.event.head_commit.message }} + run: | + echo "MAILU_VERSION=pr-${COMMIT_MESSAGE//[!0-9]/}" >> $GITHUB_ENV + - name: Derive MAILU_VERSION for other branches than testing + if: ${{ env.BRANCH != 'testing' }} + shell: bash + env: + MAILU_BRANCH: ${{ env.BRANCH }} + run: | + echo "MAILU_VERSION=${{ env.MAILU_BRANCH }}" >> $GITHUB_ENV + - name: Create folder for storing images + run: | + sudo mkdir -p /images + sudo chmod 777 /images + - name: Configure images folder for caching + # For staging we do not deploy images. So we do not have to load them from cache. + if: ${{ env.BRANCH != 'staging' }} + uses: actions/cache@v2 + with: + path: /images + key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }} + - name: Load docker images + if: ${{ env.BRANCH != 'staging' }} + run: docker load -i /images/images.tar.gz + - name: Deploy built docker images + env: + DOCKER_UN: ${{ secrets.Docker_Login }} + DOCKER_PW: ${{ secrets.Docker_Password }} + DOCKER_ORG: ${{ secrets.DOCKER_ORG }} + DOCKER_ORG_TESTS: ${{ secrets.DOCKER_ORG_TESTS }} + MAILU_VERSION: ${{ env.MAILU_VERSION }} + TRAVIS_BRANCH: ${{ env.BRANCH }} + run: bash tests/deploy.sh + + # This job is watched by bors. It only complets if building,testing and deploy worked. + ci-success: + name: CI-Done + #Returns true when none of the **previous** steps have failed or been canceled. + if: ${{ success() }} + needs: + - deploy + runs-on: ubuntu-latest + steps: + - name: CI/CD succeeded. + run: exit 0 diff --git a/.mergify.yml b/.mergify.yml index 2af387ed..6cd6a5a3 100644 --- a/.mergify.yml +++ b/.mergify.yml @@ -27,7 +27,7 @@ pull_request_rules: - name: Trusted author and 1 approved review; trigger bors r+ conditions: - - author~=^(mergify|kaiyou|muhlemmer|mildred|HorayNarea|adi90x|hoellen|ofthesun9|Nebukadneza|micw|lub|Diman0)$ + - author~=^(mergify|kaiyou|muhlemmer|mildred|HorayNarea|hoellen|ofthesun9|Nebukadneza|micw|lub|Diman0|3-w-c|decentral1se|ghostwheel42|nextgens|parisni)$ - -title~=(WIP|wip) - -label~=^(status/wip|status/blocked|review/need2)$ - "#approved-reviews-by>=1" diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index f2a85630..00000000 --- a/.travis.yml +++ /dev/null @@ -1,56 +0,0 @@ -branches: - only: - - staging - - testing - - '1.5' - - '1.6' - - '1.7' - - '1.8' - - master - # version tags, e.g. 1.7.1 - - /^1\.[5678]\.\d+$/ - # pre-releases, e.g. 1.8-pre1 - - /^1\.8-pre\d+$/ - # test branches, e.g. test-debian - - /^test-[\w\-\.]+$/ - -sudo: required -services: docker -addons: - apt: - packages: - - docker-ce - -env: - - MAILU_VERSION=${TRAVIS_BRANCH////-} - -language: python -python: - - "3.6" -install: - - pip install -r tests/requirements.txt - - sudo curl -L https://github.com/docker/compose/releases/download/1.23.0-rc3/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose - - sudo chmod +x /usr/local/bin/docker-compose - -before_script: - - docker-compose -v - - echo "$DOCKER_PW" | docker login --username $DOCKER_UN --password-stdin - - docker-compose -f tests/build.yml build - - sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*' - - -script: -# test.py, test name and timeout between start and tests. - - python tests/compose/test.py core 1 - - python tests/compose/test.py fetchmail 1 - - travis_wait python tests/compose/test.py filters 10 - - python tests/compose/test.py rainloop 1 - - python tests/compose/test.py roundcube 1 - - python tests/compose/test.py webdav 1 - -deploy: - provider: script - script: bash tests/deploy.sh - on: - all_branches: true - condition: -n $DOCKER_UN diff --git a/CHANGELOG.md b/CHANGELOG.md index 579f3e82..da945c72 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,18 +4,49 @@ Changelog Upgrade should run fine as long as you generate a new compose or stack configuration and upgrade your mailu.env. -Please note that the current 1.8 is what we call a "soft release": It’s there for everyone to see and use, but to limit possible user-impact of this very big release, it’s not yet the default in the setup-utility for new users. When upgrading, please treat it with some care, and be sure to always have backups! - There are some changes to the configuration overrides. Override files are now mounted read-only into the containers. The Dovecot and Postfix overrides are moved in their own sub-directory. If there are local override files, they will need to be moved from overrides/ to overrides/dovecot and overrides/postfix/. See https://mailu.io/1.8/faq.html#how-can-i-override-settings for all the mappings. -Please note that the shipped image for PostgreSQL database is deprecated. -We advise to switch to an external database server. +One major change for the docker compose file is that the antispam container needs a fixed hostname [#1837](https://github.com/Mailu/Mailu/issues/1837). +This is handled when you regenerate the docker-compose file. A fixed hostname is required to retain rspamd history. +This is also handled in the helm-chart repo. - -v1.8.0 - 2020-09-28 +Improvements have been made to protect again session-fixation attacks. +To be fully protected, it is required to change your SECRET_KEY in Mailu.env after upgrading. +A new SECRET_KEY is generated when you recreate your docker-compose.yml & mailu.env file via setup.mailu.io. + +The SECRET_KEY is an uppercase alphanumeric string of length 16. You can manually create such a string via +```cat /dev/urandom | tr -dc 'A-Z0-9' | fold -w ${1:-16} | head -n 1``` + +After changing mailu.env, it is required to recreate all containers for the changes to be propagated. + +Please note that the shipped image for PostgreSQL database is deprecated. +We advise to switch to an external PostgreSQL database server. + + +1.8.0 - 2021-08-06 +-------------------- + +- Features: Update version of roundcube webmail and carddav plugin. This is a security update. ([#1841](https://github.com/Mailu/Mailu/issues/1841)) +- Features: Update version of rainloop webmail to 1.16.0. This is a security update. ([#1845](https://github.com/Mailu/Mailu/issues/1845)) +- Features: Changed default value of AUTH_RATELIMIT_SUBNET to false. Increased default value of the rate limit in setup utility (AUTH_RATELIMIT) to a higher value. ([#1867](https://github.com/Mailu/Mailu/issues/1867)) +- Features: Update jquery used in setup. Set pinned versions in requirements.txt for setup. This is a security update. ([#1880](https://github.com/Mailu/Mailu/issues/1880)) +- Bugfixes: Replace PUBLIC_HOSTNAME and PUBLIC_IP in "Received" headers to ensure that no undue spam points are attributed ([#191](https://github.com/Mailu/Mailu/issues/191)) +- Bugfixes: Don't replace nested headers (typically in attached emails) ([#1660](https://github.com/Mailu/Mailu/issues/1660)) +- Bugfixes: Fix letsencrypt access to certbot for the mail-letsencrypt flavour ([#1686](https://github.com/Mailu/Mailu/issues/1686)) +- Bugfixes: Fix CVE-2020-25275 and CVE-2020-24386 by upgrading alpine for + dovecot which contains a fixed dovecot version. ([#1720](https://github.com/Mailu/Mailu/issues/1720)) +- Bugfixes: Antispam service now uses a static hostname. Rspamd history is only retained when the service has a fixed hostname. ([#1837](https://github.com/Mailu/Mailu/issues/1837)) +- Bugfixes: Fix a bug preventing colons from being used in passwords when using radicale/webdav. ([#1861](https://github.com/Mailu/Mailu/issues/1861)) +- Bugfixes: Remove dot in blueprint name to prevent critical flask startup error in setup. ([#1874](https://github.com/Mailu/Mailu/issues/1874)) +- Bugfixes: fix punycode encoding of domain names ([#1891](https://github.com/Mailu/Mailu/issues/1891)) +- Improved Documentation: Update fail2ban documentation to use systemd backend instead of filepath for journald ([#1857](https://github.com/Mailu/Mailu/issues/1857)) +- Misc: Switch from client side (cookie) sessions to server side sessions and protect against session-fixation attacks. We recommend that you change your SECRET_KEY after upgrading. ([#1783](https://github.com/Mailu/Mailu/issues/1783)) + + +v1.8.0rc - 2020-09-28 -------------------- - Features: Add support for backward-forwarding using SRS ([#328](https://github.com/Mailu/Mailu/issues/328)) diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md index 059318fc..8fb3265d 100644 --- a/PULL_REQUEST_TEMPLATE.md +++ b/PULL_REQUEST_TEMPLATE.md @@ -13,4 +13,4 @@ Before we can consider review and merge, please make sure the following list is If an entry in not applicable, you can check it or remove it from the list. - [ ] In case of feature or enhancement: documentation updated accordingly -- [ ] Unless it's docs or a minor change: add [changelog](https://mailu.io/master/contributors/guide.html#changelog) entry file. +- [ ] Unless it's docs or a minor change: add [changelog](https://mailu.io/master/contributors/workflow.html#changelog) entry file. diff --git a/bors.toml b/bors.toml index 5279fe72..272a6047 100644 --- a/bors.toml +++ b/bors.toml @@ -1,3 +1,4 @@ status = [ - "continuous-integration/travis-ci/push" + "CI-Done" ] + diff --git a/core/admin/Dockerfile b/core/admin/Dockerfile index 81ffd147..86796978 100644 --- a/core/admin/Dockerfile +++ b/core/admin/Dockerfile @@ -1,8 +1,9 @@ # First stage to build assets -ARG DISTRO=alpine:3.12 +ARG DISTRO=alpine:3.14 ARG ARCH="" -FROM ${ARCH}node:15 as assets -COPY --from=balenalib/rpi-alpine:3.10 /usr/bin/qemu-arm-static /usr/bin/qemu-arm-static + +FROM ${ARCH}node:8 as assets +COPY --from=balenalib/rpi-alpine:3.14 /usr/bin/qemu-arm-static /usr/bin/qemu-arm-static COPY package.json ./ RUN npm install @@ -24,9 +25,9 @@ RUN mkdir -p /app WORKDIR /app COPY requirements-prod.txt requirements.txt -RUN apk add --no-cache libressl curl postgresql-libs mariadb-connector-c \ +RUN apk add --no-cache openssl curl postgresql-libs mariadb-connector-c \ && apk add --no-cache --virtual build-dep \ - libressl-dev libffi-dev python3-dev build-base postgresql-dev mariadb-connector-c-dev \ + openssl-dev libffi-dev python3-dev build-base postgresql-dev mariadb-connector-c-dev cargo \ && pip3 install -r requirements.txt \ && apk del --no-cache build-dep diff --git a/core/admin/mailu/__init__.py b/core/admin/mailu/__init__.py index f9ca2466..8ab8ed0e 100644 --- a/core/admin/mailu/__init__.py +++ b/core/admin/mailu/__init__.py @@ -1,11 +1,12 @@ +""" Mailu admin app +""" + import flask import flask_bootstrap -import redis -from flask_kvsession import KVSessionExtension -from simplekv.memory.redisstore import RedisStore from mailu import utils, debug, models, manage, configuration +import hmac def create_app_from_config(config): """ Create a new application based on the given configuration @@ -20,7 +21,7 @@ def create_app_from_config(config): # Initialize application extensions config.init_app(app) models.db.init_app(app) - KVSessionExtension(RedisStore(redis.StrictRedis().from_url('redis://{0}/3'.format(config['REDIS_ADDRESS']))), app).cleanup_sessions(app) + utils.session.init_app(app) utils.limiter.init_app(app) utils.babel.init_app(app) utils.login.init_app(app) @@ -28,6 +29,8 @@ def create_app_from_config(config): utils.proxy.init_app(app) utils.migrate.init_app(app, models.db) + app.temp_token_key = hmac.new(bytearray(app.secret_key, 'utf-8'), bytearray('WEBMAIL_TEMP_TOKEN_KEY', 'utf-8'), 'sha256').digest() + # Initialize debugging tools if app.config.get("DEBUG"): debug.toolbar.init_app(app) @@ -53,8 +56,7 @@ def create_app_from_config(config): def create_app(): - """ Create a new application based on the config module + """ Create a new application based on the config module """ config = configuration.ConfigManager() return create_app_from_config(config) - diff --git a/core/admin/mailu/configuration.py b/core/admin/mailu/configuration.py index 6f65d17d..d2d34d88 100644 --- a/core/admin/mailu/configuration.py +++ b/core/admin/mailu/configuration.py @@ -14,6 +14,7 @@ DEFAULT_CONFIG = { 'DEBUG': False, 'DOMAIN_REGISTRATION': False, 'TEMPLATES_AUTO_RELOAD': True, + 'MEMORY_SESSIONS': False, # Database settings 'DB_FLAVOR': None, 'DB_USER': 'mailu', @@ -33,8 +34,8 @@ DEFAULT_CONFIG = { 'POSTMASTER': 'postmaster', 'TLS_FLAVOR': 'cert', 'INBOUND_TLS_ENFORCE': False, - 'AUTH_RATELIMIT': '10/minute;1000/hour', - 'AUTH_RATELIMIT_SUBNET': True, + 'AUTH_RATELIMIT': '1000/minute;10000/hour', + 'AUTH_RATELIMIT_SUBNET': False, 'DISABLE_STATISTICS': False, # Mail settings 'DMARC_RUA': None, @@ -55,6 +56,7 @@ DEFAULT_CONFIG = { 'RECAPTCHA_PRIVATE_KEY': '', # Advanced settings 'LOG_LEVEL': 'WARNING', + 'SESSION_KEY_BITS': 128, 'SESSION_LIFETIME': 24, 'SESSION_COOKIE_SECURE': True, 'CREDENTIAL_ROUNDS': 12, @@ -65,7 +67,6 @@ DEFAULT_CONFIG = { 'HOST_SMTP': 'smtp', 'HOST_AUTHSMTP': 'smtp', 'HOST_ADMIN': 'admin', - 'WEBMAIL': 'none', 'HOST_WEBMAIL': 'webmail', 'HOST_WEBDAV': 'webdav:5232', 'HOST_REDIS': 'redis', @@ -136,9 +137,9 @@ class ConfigManager(dict): self.config['RATELIMIT_STORAGE_URL'] = 'redis://{0}/2'.format(self.config['REDIS_ADDRESS']) self.config['QUOTA_STORAGE_URL'] = 'redis://{0}/1'.format(self.config['REDIS_ADDRESS']) + self.config['SESSION_STORAGE_URL'] = 'redis://{0}/3'.format(self.config['REDIS_ADDRESS']) self.config['SESSION_COOKIE_SAMESITE'] = 'Strict' self.config['SESSION_COOKIE_HTTPONLY'] = True - self.config['SESSION_KEY_BITS'] = 128 self.config['PERMANENT_SESSION_LIFETIME'] = timedelta(hours=int(self.config['SESSION_LIFETIME'])) # update the app config itself app.config = self diff --git a/core/admin/mailu/internal/nginx.py b/core/admin/mailu/internal/nginx.py index 600e438b..3f5582cc 100644 --- a/core/admin/mailu/internal/nginx.py +++ b/core/admin/mailu/internal/nginx.py @@ -7,7 +7,6 @@ import ipaddress import socket import tenacity - SUPPORTED_AUTH_METHODS = ["none", "plain"] @@ -26,8 +25,12 @@ def check_credentials(user, password, ip, protocol=None): if not user or not user.enabled or (protocol == "imap" and not user.enable_imap) or (protocol == "pop3" and not user.enable_pop): return False is_ok = False + # webmails + if len(password) == 64 and ip == app.config['WEBMAIL_ADDRESS']: + if user.verify_temp_token(password): + is_ok = True # All tokens are 32 characters hex lowercase - if len(password) == 32: + if not is_ok and len(password) == 32: for token in user.tokens: if (token.check_password(password) and (not token.ip or token.ip == ip)): diff --git a/core/admin/mailu/internal/views/auth.py b/core/admin/mailu/internal/views/auth.py index edd62e37..2baeddce 100644 --- a/core/admin/mailu/internal/views/auth.py +++ b/core/admin/mailu/internal/views/auth.py @@ -43,6 +43,18 @@ def admin_authentication(): return "" return flask.abort(403) +@internal.route("/auth/user") +def user_authentication(): + """ Fails if the user is not authenticated. + """ + if (not flask_login.current_user.is_anonymous + and flask_login.current_user.enabled): + response = flask.Response() + response.headers["X-User"] = flask_login.current_user.get_id() + response.headers["X-User-Token"] = models.User.get_temp_token(flask_login.current_user.get_id()) + return response + return flask.abort(403) + @internal.route("/auth/basic") def basic_authentication(): @@ -51,7 +63,7 @@ def basic_authentication(): authorization = flask.request.headers.get("Authorization") if authorization and authorization.startswith("Basic "): encoded = authorization.replace("Basic ", "") - user_email, password = base64.b64decode(encoded).split(b":") + user_email, password = base64.b64decode(encoded).split(b":", 1) user = models.User.query.get(user_email.decode("utf8")) if nginx.check_credentials(user, password.decode('utf-8'), flask.request.remote_addr, "web"): response = flask.Response() diff --git a/core/admin/mailu/internal/views/postfix.py b/core/admin/mailu/internal/views/postfix.py index a5507830..c358c37f 100644 --- a/core/admin/mailu/internal/views/postfix.py +++ b/core/admin/mailu/internal/views/postfix.py @@ -2,6 +2,7 @@ from mailu import models from mailu.internal import internal import flask +import idna import re import srslib @@ -35,13 +36,67 @@ def postfix_alias_map(alias): def postfix_transport(email): if email == '*' or re.match("(^|.*@)\[.*\]$", email): return flask.abort(404) - localpart, domain_name = models.Email.resolve_domain(email) + _, domain_name = models.Email.resolve_domain(email) relay = models.Relay.query.get(domain_name) or flask.abort(404) - ret = "smtp:[{0}]".format(relay.smtp) - if ":" in relay.smtp: - split = relay.smtp.split(':') - ret = "smtp:[{0}]:{1}".format(split[0], split[1]) - return flask.jsonify(ret) + target = relay.smtp.lower() + port = None + use_lmtp = False + use_mx = False + # strip prefixes mx: and lmtp: + if target.startswith('mx:'): + target = target[3:] + use_mx = True + elif target.startswith('lmtp:'): + target = target[5:] + use_lmtp = True + # split host:port or [host]:port + if target.startswith('['): + if use_mx or ']' not in target: + # invalid target (mx: and [] or missing ]) + flask.abort(400) + host, rest = target[1:].split(']', 1) + if rest.startswith(':'): + port = rest[1:] + elif rest: + # invalid target (rest should be :port) + flask.abort(400) + else: + if ':' in target: + host, port = target.rsplit(':', 1) + else: + host = target + # default for empty host part is mx:domain + if not host: + if not use_lmtp: + host = relay.name.lower() + use_mx = True + else: + # lmtp: needs a host part + flask.abort(400) + # detect ipv6 address or encode host + if ':' in host: + host = f'ipv6:{host}' + else: + try: + host = idna.encode(host).decode('ascii') + except idna.IDNAError: + # invalid host (fqdn not encodable) + flask.abort(400) + # validate port + if port is not None: + try: + port = int(port, 10) + except ValueError: + # invalid port (should be numeric) + flask.abort(400) + # create transport + transport = 'lmtp' if use_lmtp else 'smtp' + # use [] when not using MX lookups or host is an ipv6 address + if host.startswith('ipv6:') or (not use_lmtp and not use_mx): + host = f'[{host}]' + # create port suffix + port = '' if port is None else f':{port}' + return flask.jsonify(f'{transport}:{host}{port}') @internal.route("/postfix/recipient/map/") diff --git a/core/admin/mailu/manage.py b/core/admin/mailu/manage.py index 9c576404..5708327e 100644 --- a/core/admin/mailu/manage.py +++ b/core/admin/mailu/manage.py @@ -1,40 +1,46 @@ -from mailu import models +""" Mailu command line interface +""" -from flask import current_app as app -from flask import cli as flask_cli - -import flask +import sys import os import socket import uuid + import click +import yaml + +from flask import current_app as app +from flask.cli import FlaskGroup, with_appcontext + +from mailu import models +from mailu.schemas import MailuSchema, Logger, RenderJSON db = models.db -@click.group() -def mailu(cls=flask_cli.FlaskGroup): +@click.group(cls=FlaskGroup, context_settings={'help_option_names': ['-?', '-h', '--help']}) +def mailu(): """ Mailu command line """ @mailu.command() -@flask_cli.with_appcontext +@with_appcontext def advertise(): """ Advertise this server against statistic services. """ - if os.path.isfile(app.config["INSTANCE_ID_PATH"]): - with open(app.config["INSTANCE_ID_PATH"], "r") as handle: + if os.path.isfile(app.config['INSTANCE_ID_PATH']): + with open(app.config['INSTANCE_ID_PATH'], 'r') as handle: instance_id = handle.read() else: instance_id = str(uuid.uuid4()) - with open(app.config["INSTANCE_ID_PATH"], "w") as handle: + with open(app.config['INSTANCE_ID_PATH'], 'w') as handle: handle.write(instance_id) - if not app.config["DISABLE_STATISTICS"]: + if not app.config['DISABLE_STATISTICS']: try: - socket.gethostbyname(app.config["STATS_ENDPOINT"].format(instance_id)) - except: + socket.gethostbyname(app.config['STATS_ENDPOINT'].format(instance_id)) + except OSError: pass @@ -43,7 +49,7 @@ def advertise(): @click.argument('domain_name') @click.argument('password') @click.option('-m', '--mode') -@flask_cli.with_appcontext +@with_appcontext def admin(localpart, domain_name, password, mode='create'): """ Create an admin user 'mode' can be: @@ -58,7 +64,7 @@ def admin(localpart, domain_name, password, mode='create'): user = None if mode == 'ifmissing' or mode == 'update': - email = '{}@{}'.format(localpart, domain_name) + email = f'{localpart}@{domain_name}' user = models.User.query.get(email) if user and mode == 'ifmissing': @@ -86,7 +92,7 @@ def admin(localpart, domain_name, password, mode='create'): @click.argument('localpart') @click.argument('domain_name') @click.argument('password') -@flask_cli.with_appcontext +@with_appcontext def user(localpart, domain_name, password): """ Create a user """ @@ -108,16 +114,16 @@ def user(localpart, domain_name, password): @click.argument('localpart') @click.argument('domain_name') @click.argument('password') -@flask_cli.with_appcontext +@with_appcontext def password(localpart, domain_name, password): """ Change the password of an user """ - email = '{0}@{1}'.format(localpart, domain_name) + email = f'{localpart}@{domain_name}' user = models.User.query.get(email) if user: user.set_password(password) else: - print("User " + email + " not found.") + print(f'User {email} not found.') db.session.commit() @@ -126,7 +132,7 @@ def password(localpart, domain_name, password): @click.option('-u', '--max-users') @click.option('-a', '--max-aliases') @click.option('-q', '--max-quota-bytes') -@flask_cli.with_appcontext +@with_appcontext def domain(domain_name, max_users=-1, max_aliases=-1, max_quota_bytes=0): """ Create a domain """ @@ -142,9 +148,9 @@ def domain(domain_name, max_users=-1, max_aliases=-1, max_quota_bytes=0): @click.argument('localpart') @click.argument('domain_name') @click.argument('password_hash') -@flask_cli.with_appcontext +@with_appcontext def user_import(localpart, domain_name, password_hash): - """ Import a user along with password hash. + """ Import a user along with password hash """ domain = models.Domain.query.get(domain_name) if not domain: @@ -160,14 +166,14 @@ def user_import(localpart, domain_name, password_hash): db.session.commit() +# TODO: remove deprecated config_update function? @mailu.command() @click.option('-v', '--verbose') @click.option('-d', '--delete-objects') -@flask_cli.with_appcontext +@with_appcontext def config_update(verbose=False, delete_objects=False): - """sync configuration with data from YAML-formatted stdin""" - import yaml - import sys + """ Sync configuration with data from YAML (deprecated) + """ new_config = yaml.safe_load(sys.stdin) # print new_config domains = new_config.get('domains', []) @@ -187,13 +193,13 @@ def config_update(verbose=False, delete_objects=False): max_aliases=max_aliases, max_quota_bytes=max_quota_bytes) db.session.add(domain) - print("Added " + str(domain_config)) + print(f'Added {domain_config}') else: domain.max_users = max_users domain.max_aliases = max_aliases domain.max_quota_bytes = max_quota_bytes db.session.add(domain) - print("Updated " + str(domain_config)) + print(f'Updated {domain_config}') users = new_config.get('users', []) tracked_users = set() @@ -209,7 +215,7 @@ def config_update(verbose=False, delete_objects=False): domain_name = user_config['domain'] password_hash = user_config.get('password_hash', None) domain = models.Domain.query.get(domain_name) - email = '{0}@{1}'.format(localpart, domain_name) + email = f'{localpart}@{domain_name}' optional_params = {} for k in user_optional_params: if k in user_config: @@ -239,13 +245,13 @@ def config_update(verbose=False, delete_objects=False): print(str(alias_config)) localpart = alias_config['localpart'] domain_name = alias_config['domain'] - if type(alias_config['destination']) is str: + if isinstance(alias_config['destination'], str): destination = alias_config['destination'].split(',') else: destination = alias_config['destination'] wildcard = alias_config.get('wildcard', False) domain = models.Domain.query.get(domain_name) - email = '{0}@{1}'.format(localpart, domain_name) + email = f'{localpart}@{domain_name}' if not domain: domain = models.Domain(name=domain_name) db.session.add(domain) @@ -275,7 +281,7 @@ def config_update(verbose=False, delete_objects=False): domain_name = manager_config['domain'] user_name = manager_config['user'] domain = models.Domain.query.get(domain_name) - manageruser = models.User.query.get(user_name + '@' + domain_name) + manageruser = models.User.query.get(f'{user_name}@{domain_name}') if manageruser not in domain.managers: domain.managers.append(manageruser) db.session.add(domain) @@ -284,26 +290,117 @@ def config_update(verbose=False, delete_objects=False): if delete_objects: for user in db.session.query(models.User).all(): - if not (user.email in tracked_users): + if not user.email in tracked_users: if verbose: - print("Deleting user: " + str(user.email)) + print(f'Deleting user: {user.email}') db.session.delete(user) for alias in db.session.query(models.Alias).all(): - if not (alias.email in tracked_aliases): + if not alias.email in tracked_aliases: if verbose: - print("Deleting alias: " + str(alias.email)) + print(f'Deleting alias: {alias.email}') db.session.delete(alias) for domain in db.session.query(models.Domain).all(): - if not (domain.name in tracked_domains): + if not domain.name in tracked_domains: if verbose: - print("Deleting domain: " + str(domain.name)) + print(f'Deleting domain: {domain.name}') db.session.delete(domain) db.session.commit() +@mailu.command() +@click.option('-v', '--verbose', count=True, help='Increase verbosity.') +@click.option('-s', '--secrets', is_flag=True, help='Show secret attributes in messages.') +@click.option('-d', '--debug', is_flag=True, help='Enable debug output.') +@click.option('-q', '--quiet', is_flag=True, help='Quiet mode - only show errors.') +@click.option('-c', '--color', is_flag=True, help='Force colorized output.') +@click.option('-u', '--update', is_flag=True, help='Update mode - merge input with existing config.') +@click.option('-n', '--dry-run', is_flag=True, help='Perform a trial run with no changes made.') +@click.argument('source', metavar='[FILENAME|-]', type=click.File(mode='r'), default=sys.stdin) +@with_appcontext +def config_import(verbose=0, secrets=False, debug=False, quiet=False, color=False, + update=False, dry_run=False, source=None): + """ Import configuration as YAML or JSON from stdin or file + """ + + log = Logger(want_color=color or None, can_color=sys.stdout.isatty(), secrets=secrets, debug=debug) + log.lexer = 'python' + log.strip = True + log.verbose = 0 if quiet else verbose + log.quiet = quiet + + context = { + 'import': True, + 'update': update, + 'clear': not update, + 'callback': log.track_serialize, + } + + schema = MailuSchema(only=MailuSchema.Meta.order, context=context) + + try: + # import source + with models.db.session.no_autoflush: + config = schema.loads(source) + # flush session to show/count all changes + if not quiet and (dry_run or verbose): + db.session.flush() + # check for duplicate domain names + config.check() + except Exception as exc: + if msg := log.format_exception(exc): + raise click.ClickException(msg) from exc + raise + + # don't commit when running dry + if dry_run: + log.changes('Dry run. Not committing changes.') + db.session.rollback() + else: + log.changes('Committing changes.') + db.session.commit() + + +@mailu.command() +@click.option('-f', '--full', is_flag=True, help='Include attributes with default value.') +@click.option('-s', '--secrets', is_flag=True, + help='Include secret attributes (dkim-key, passwords).') +@click.option('-d', '--dns', is_flag=True, help='Include dns records.') +@click.option('-c', '--color', is_flag=True, help='Force colorized output.') +@click.option('-o', '--output-file', 'output', default=sys.stdout, type=click.File(mode='w'), + help='Save configuration to file.') +@click.option('-j', '--json', 'as_json', is_flag=True, help='Export configuration in json format.') +@click.argument('only', metavar='[FILTER]...', nargs=-1) +@with_appcontext +def config_export(full=False, secrets=False, color=False, dns=False, output=None, as_json=False, only=None): + """ Export configuration as YAML or JSON to stdout or file + """ + + log = Logger(want_color=color or None, can_color=output.isatty()) + + only = only or MailuSchema.Meta.order + + context = { + 'full': full, + 'secrets': secrets, + 'dns': dns, + } + + try: + schema = MailuSchema(only=only, context=context) + if as_json: + schema.opts.render_module = RenderJSON + log.lexer = 'json' + log.strip = True + print(log.colorize(schema.dumps(models.MailuConfig())), file=output) + except Exception as exc: + if msg := log.format_exception(exc): + raise click.ClickException(msg) from exc + raise + + @mailu.command() @click.argument('email') -@flask_cli.with_appcontext +@with_appcontext def user_delete(email): """delete user""" user = models.User.query.get(email) @@ -314,7 +411,7 @@ def user_delete(email): @mailu.command() @click.argument('email') -@flask_cli.with_appcontext +@with_appcontext def alias_delete(email): """delete alias""" alias = models.Alias.query.get(email) @@ -328,7 +425,7 @@ def alias_delete(email): @click.argument('domain_name') @click.argument('destination') @click.option('-w', '--wildcard', is_flag=True) -@flask_cli.with_appcontext +@with_appcontext def alias(localpart, domain_name, destination, wildcard=False): """ Create an alias """ @@ -341,7 +438,7 @@ def alias(localpart, domain_name, destination, wildcard=False): domain=domain, wildcard=wildcard, destination=destination.split(','), - email="%s@%s" % (localpart, domain_name) + email=f'{localpart}@{domain_name}' ) db.session.add(alias) db.session.commit() @@ -352,7 +449,7 @@ def alias(localpart, domain_name, destination, wildcard=False): @click.argument('max_users') @click.argument('max_aliases') @click.argument('max_quota_bytes') -@flask_cli.with_appcontext +@with_appcontext def setlimits(domain_name, max_users, max_aliases, max_quota_bytes): """ Set domain limits """ @@ -367,16 +464,12 @@ def setlimits(domain_name, max_users, max_aliases, max_quota_bytes): @mailu.command() @click.argument('domain_name') @click.argument('user_name') -@flask_cli.with_appcontext +@with_appcontext def setmanager(domain_name, user_name='manager'): """ Make a user manager of a domain """ domain = models.Domain.query.get(domain_name) - manageruser = models.User.query.get(user_name + '@' + domain_name) + manageruser = models.User.query.get(f'{user_name}@{domain_name}') domain.managers.append(manageruser) db.session.add(domain) db.session.commit() - - -if __name__ == '__main__': - cli() diff --git a/core/admin/mailu/models.py b/core/admin/mailu/models.py index a63c33a5..3a299786 100644 --- a/core/admin/mailu/models.py +++ b/core/admin/mailu/models.py @@ -1,20 +1,34 @@ -from mailu import dkim +""" Mailu config storage model +""" -from sqlalchemy.ext import declarative -from passlib import context, hash, registry -from datetime import datetime, date +import os +import smtplib +import json + +from datetime import date from email.mime import text -from flask import current_app as app +from itertools import chain import flask_sqlalchemy import sqlalchemy +import passlib.context +import passlib.hash +import passlib.registry import time import os -import glob +import hmac import smtplib import idna import dns +from flask import current_app as app +from sqlalchemy.ext import declarative +from sqlalchemy.ext.hybrid import hybrid_property +from sqlalchemy.inspection import inspect +from werkzeug.utils import cached_property + +from mailu import dkim + db = flask_sqlalchemy.SQLAlchemy() @@ -26,11 +40,14 @@ class IdnaDomain(db.TypeDecorator): impl = db.String(80) def process_bind_param(self, value, dialect): - return idna.encode(value).decode("ascii").lower() + """ encode unicode domain name to punycode """ + return idna.encode(value.lower()).decode('ascii') def process_result_value(self, value, dialect): + """ decode punycode domain name to unicode """ return idna.decode(value) + python_type = str class IdnaEmail(db.TypeDecorator): """ Stores a Unicode string in it's IDNA representation (ASCII only) @@ -39,22 +56,18 @@ class IdnaEmail(db.TypeDecorator): impl = db.String(255) def process_bind_param(self, value, dialect): - try: - localpart, domain_name = value.split('@') - return "{0}@{1}".format( - localpart, - idna.encode(domain_name).decode('ascii'), - ).lower() - except ValueError: - pass + """ encode unicode domain part of email address to punycode """ + localpart, domain_name = value.lower().rsplit('@', 1) + if '@' in localpart: + raise ValueError('email local part must not contain "@"') + return f'{localpart}@{idna.encode(domain_name).decode("ascii")}' def process_result_value(self, value, dialect): - localpart, domain_name = value.split('@') - return "{0}@{1}".format( - localpart, - idna.decode(domain_name), - ) + """ decode punycode domain part of email to unicode """ + localpart, domain_name = value.rsplit('@', 1) + return f'{localpart}@{idna.decode(domain_name)}' + python_type = str class CommaSeparatedList(db.TypeDecorator): """ Stores a list as a comma-separated string, compatible with Postfix. @@ -63,29 +76,35 @@ class CommaSeparatedList(db.TypeDecorator): impl = db.String def process_bind_param(self, value, dialect): - if type(value) is not list: - raise TypeError("Shoud be a list") + """ join list of items to comma separated string """ + if not isinstance(value, (list, tuple, set)): + raise TypeError('Must be a list of strings') for item in value: - if "," in item: - raise ValueError("No item should contain a comma") - return ",".join(value) + if ',' in item: + raise ValueError('list item must not contain ","') + return ','.join(sorted(set(value))) def process_result_value(self, value, dialect): - return list(filter(bool, value.split(","))) if value else [] + """ split comma separated string to list """ + return list(filter(bool, (item.strip() for item in value.split(',')))) if value else [] + python_type = list class JSONEncoded(db.TypeDecorator): - """Represents an immutable structure as a json-encoded string. + """ Represents an immutable structure as a json-encoded string. """ impl = db.String def process_bind_param(self, value, dialect): + """ encode data as json """ return json.dumps(value) if value else None def process_result_value(self, value, dialect): + """ decode json to data """ return json.loads(value) if value else None + python_type = str class Base(db.Model): """ Base class for all models @@ -95,14 +114,43 @@ class Base(db.Model): metadata = sqlalchemy.schema.MetaData( naming_convention={ - "fk": "%(table_name)s_%(column_0_name)s_fkey", - "pk": "%(table_name)s_pkey" + 'fk': '%(table_name)s_%(column_0_name)s_fkey', + 'pk': '%(table_name)s_pkey' } ) created_at = db.Column(db.Date, nullable=False, default=date.today) updated_at = db.Column(db.Date, nullable=True, onupdate=date.today) - comment = db.Column(db.String(255), nullable=True) + comment = db.Column(db.String(255), nullable=True, default='') + + def __str__(self): + pkey = self.__table__.primary_key.columns.values()[0].name + if pkey == 'email': + # ugly hack for email declared attr. _email is not always up2date + return str(f'{self.localpart}@{self.domain_name}') + return str(getattr(self, pkey)) + + def __repr__(self): + return f'<{self.__class__.__name__} {str(self)!r}>' + + def __eq__(self, other): + if isinstance(other, self.__class__): + pkey = self.__table__.primary_key.columns.values()[0].name + this = getattr(self, pkey, None) + other = getattr(other, pkey, None) + return this is not None and other is not None and str(this) == str(other) + else: + return NotImplemented + + # we need hashable instances here for sqlalchemy to update collections + # in collections.bulk_replace, but auto-incrementing don't always have + # a valid primary key, in this case we use the object's id + __hashed = None + def __hash__(self): + if self.__hashed is None: + primary = getattr(self, self.__table__.primary_key.columns.values()[0].name) + self.__hashed = id(self) if primary is None else hash(primary) + return self.__hashed # Many-to-many association table for domain managers @@ -120,99 +168,155 @@ class Config(Base): value = db.Column(JSONEncoded) +def _save_dkim_keys(session): + """ store DKIM keys after commit """ + for obj in session.identity_map.values(): + if isinstance(obj, Domain): + obj.save_dkim_key() + class Domain(Base): """ A DNS domain that has mail addresses associated to it. """ - __tablename__ = "domain" + + __tablename__ = 'domain' name = db.Column(IdnaDomain, primary_key=True, nullable=False) managers = db.relationship('User', secondary=managers, backref=db.backref('manager_of'), lazy='dynamic') max_users = db.Column(db.Integer, nullable=False, default=-1) max_aliases = db.Column(db.Integer, nullable=False, default=-1) - max_quota_bytes = db.Column(db.BigInteger(), nullable=False, default=0) - signup_enabled = db.Column(db.Boolean(), nullable=False, default=False) + max_quota_bytes = db.Column(db.BigInteger, nullable=False, default=0) + signup_enabled = db.Column(db.Boolean, nullable=False, default=False) + + _dkim_key = None + _dkim_key_on_disk = None + + def _dkim_file(self): + """ return filename for active DKIM key """ + return app.config['DKIM_PATH'].format( + domain=self.name, + selector=app.config['DKIM_SELECTOR'] + ) + + def save_dkim_key(self): + """ save changed DKIM key to disk """ + if self._dkim_key != self._dkim_key_on_disk: + file_path = self._dkim_file() + if self._dkim_key: + with open(file_path, 'wb') as handle: + handle.write(self._dkim_key) + elif os.path.exists(file_path): + os.unlink(file_path) + self._dkim_key_on_disk = self._dkim_key + + @property + def dns_mx(self): + """ return MX record for domain """ + hostname = app.config['HOSTNAMES'].split(',', 1)[0] + return f'{self.name}. 600 IN MX 10 {hostname}.' + + @property + def dns_spf(self): + """ return SPF record for domain """ + hostname = app.config['HOSTNAMES'].split(',', 1)[0] + return f'{self.name}. 600 IN TXT "v=spf1 mx a:{hostname} ~all"' + + @property + def dns_dkim(self): + """ return DKIM record for domain """ + if self.dkim_key: + selector = app.config['DKIM_SELECTOR'] + return ( + f'{selector}._domainkey.{self.name}. 600 IN TXT' + f'"v=DKIM1; k=rsa; p={self.dkim_publickey}"' + ) + + @property + def dns_dmarc(self): + """ return DMARC record for domain """ + if self.dkim_key: + domain = app.config['DOMAIN'] + rua = app.config['DMARC_RUA'] + rua = f' rua=mailto:{rua}@{domain};' if rua else '' + ruf = app.config['DMARC_RUF'] + ruf = f' ruf=mailto:{ruf}@{domain};' if ruf else '' + return f'_dmarc.{self.name}. 600 IN TXT "v=DMARC1; p=reject;{rua}{ruf} adkim=s; aspf=s"' @property def dkim_key(self): - file_path = app.config["DKIM_PATH"].format( - domain=self.name, selector=app.config["DKIM_SELECTOR"]) - if os.path.exists(file_path): - with open(file_path, "rb") as handle: - return handle.read() + """ return private DKIM key """ + if self._dkim_key is None: + file_path = self._dkim_file() + if os.path.exists(file_path): + with open(file_path, 'rb') as handle: + self._dkim_key = self._dkim_key_on_disk = handle.read() + else: + self._dkim_key = self._dkim_key_on_disk = b'' + return self._dkim_key if self._dkim_key else None @dkim_key.setter def dkim_key(self, value): - file_path = app.config["DKIM_PATH"].format( - domain=self.name, selector=app.config["DKIM_SELECTOR"]) - with open(file_path, "wb") as handle: - handle.write(value) + """ set private DKIM key """ + old_key = self.dkim_key + self._dkim_key = value if value is not None else b'' + if self._dkim_key != old_key: + if not sqlalchemy.event.contains(db.session, 'after_commit', _save_dkim_keys): + sqlalchemy.event.listen(db.session, 'after_commit', _save_dkim_keys) @property def dkim_publickey(self): + """ return public part of DKIM key """ dkim_key = self.dkim_key if dkim_key: - return dkim.strip_key(self.dkim_key).decode("utf8") + return dkim.strip_key(dkim_key).decode('utf8') def generate_dkim_key(self): + """ generate new DKIM key """ self.dkim_key = dkim.gen_key() def has_email(self, localpart): - for email in self.users + self.aliases: + """ checks if localpart is configured for domain """ + localpart = localpart.lower() + for email in chain(self.users, self.aliases): if email.localpart == localpart: return True - else: - return False + return False def check_mx(self): + """ checks if MX record for domain points to mailu host """ try: - hostnames = app.config['HOSTNAMES'].split(',') + hostnames = set(app.config['HOSTNAMES'].split(',')) return any( - str(rset).split()[-1][:-1] in hostnames + rset.exchange.to_text().rstrip('.') in hostnames for rset in dns.resolver.query(self.name, 'MX') ) - except Exception as e: - return False - - def __str__(self): - return self.name - - def __eq__(self, other): - try: - return self.name == other.name - except AttributeError: + except dns.exception.DNSException: return False class Alternative(Base): """ Alternative name for a served domain. - The name "domain alias" was avoided to prevent some confusion. + The name "domain alias" was avoided to prevent some confusion. """ - __tablename__ = "alternative" + __tablename__ = 'alternative' name = db.Column(IdnaDomain, primary_key=True, nullable=False) domain_name = db.Column(IdnaDomain, db.ForeignKey(Domain.name)) domain = db.relationship(Domain, backref=db.backref('alternatives', cascade='all, delete-orphan')) - def __str__(self): - return self.name - class Relay(Base): """ Relayed mail domain. The domain is either relayed publicly or through a specified SMTP host. """ - __tablename__ = "relay" + __tablename__ = 'relay' name = db.Column(IdnaDomain, primary_key=True, nullable=False) smtp = db.Column(db.String(80), nullable=True) - def __str__(self): - return self.name - class Email(object): """ Abstraction for an email address (localpart and domain). @@ -222,6 +326,7 @@ class Email(object): @declarative.declared_attr def domain_name(cls): + """ the domain part of the email address """ return db.Column(IdnaDomain, db.ForeignKey(Domain.name), nullable=False, default=IdnaDomain) @@ -229,54 +334,82 @@ class Email(object): # It is however very useful for quick lookups without joining tables, # especially when the mail server is reading the database. @declarative.declared_attr - def email(cls): - updater = lambda context: "{0}@{1}".format( - context.current_parameters["localpart"], - context.current_parameters["domain_name"], - ) - return db.Column(IdnaEmail, - primary_key=True, nullable=False, - default=updater) + def _email(cls): + """ the complete email address (localpart@domain) """ + + def updater(ctx): + key = f'{cls.__tablename__}_email' + if key in ctx.current_parameters: + return ctx.current_parameters[key] + return '{localpart}@{domain_name}'.format_map(ctx.current_parameters) + + return db.Column('email', IdnaEmail, primary_key=True, nullable=False, onupdate=updater) + + # We need to keep email, localpart and domain_name in sync. + # But IMHO using email as primary key was not a good idea in the first place. + @hybrid_property + def email(self): + """ getter for email - gets _email """ + return self._email + + @email.setter + def email(self, value): + """ setter for email - sets _email, localpart and domain_name at once """ + self._email = value.lower() + self.localpart, self.domain_name = self._email.rsplit('@', 1) + + @staticmethod + def _update_localpart(target, value, *_): + if target.domain_name: + target._email = f'{value}@{target.domain_name}' + + @staticmethod + def _update_domain_name(target, value, *_): + if target.localpart: + target._email = f'{target.localpart}@{value}' + + @classmethod + def __declare_last__(cls): + # gets called after mappings are completed + sqlalchemy.event.listen(cls.localpart, 'set', cls._update_localpart, propagate=True) + sqlalchemy.event.listen(cls.domain_name, 'set', cls._update_domain_name, propagate=True) def sendmail(self, subject, body): - """ Send an email to the address. - """ - from_address = "{0}@{1}".format( - app.config['POSTMASTER'], - idna.encode(app.config['DOMAIN']).decode('ascii'), - ) + """ send an email to the address """ + f_addr = f'{app.config["POSTMASTER"]}@{idna.encode(app.config["DOMAIN"]).decode("ascii")}' with smtplib.SMTP(app.config['HOST_AUTHSMTP'], port=10025) as smtp: - to_address = "{0}@{1}".format( - self.localpart, - idna.encode(self.domain_name).decode('ascii'), - ) + to_address = f'{self.localpart}@{idna.encode(self.domain_name).decode("ascii")}' msg = text.MIMEText(body) msg['Subject'] = subject - msg['From'] = from_address + msg['From'] = f_addr msg['To'] = to_address - smtp.sendmail(from_address, [to_address], msg.as_string()) + smtp.sendmail(f_addr, [to_address], msg.as_string()) @classmethod def resolve_domain(cls, email): - localpart, domain_name = email.split('@', 1) if '@' in email else (None, email) - alternative = Alternative.query.get(domain_name) - if alternative: + """ resolves domain alternative to real domain """ + localpart, domain_name = email.rsplit('@', 1) if '@' in email else (None, email) + if alternative := Alternative.query.get(domain_name): domain_name = alternative.domain_name return (localpart, domain_name) @classmethod def resolve_destination(cls, localpart, domain_name, ignore_forward_keep=False): + """ return destination for email address localpart@domain_name """ + localpart_stripped = None stripped_alias = None - if os.environ.get('RECIPIENT_DELIMITER') in localpart: - localpart_stripped = localpart.rsplit(os.environ.get('RECIPIENT_DELIMITER'), 1)[0] + delim = os.environ.get('RECIPIENT_DELIMITER') + if delim in localpart: + localpart_stripped = localpart.rsplit(delim, 1)[0] - user = User.query.get('{}@{}'.format(localpart, domain_name)) + user = User.query.get(f'{localpart}@{domain_name}') if not user and localpart_stripped: - user = User.query.get('{}@{}'.format(localpart_stripped, domain_name)) + user = User.query.get(f'{localpart_stripped}@{domain_name}') + if user: - email = '{}@{}'.format(localpart, domain_name) + email = f'{localpart}@{domain_name}' if user.forward_enabled: destination = user.forward_destination @@ -284,56 +417,59 @@ class Email(object): destination.append(email) else: destination = [email] + return destination pure_alias = Alias.resolve(localpart, domain_name) - stripped_alias = Alias.resolve(localpart_stripped, domain_name) if pure_alias and not pure_alias.wildcard: return pure_alias.destination - elif stripped_alias: + + if stripped_alias := Alias.resolve(localpart_stripped, domain_name): return stripped_alias.destination - elif pure_alias: + + if pure_alias: return pure_alias.destination - def __str__(self): - return self.email + return None class User(Base, Email): """ A user is an email address that has a password to access a mailbox. """ - __tablename__ = "user" + + __tablename__ = 'user' _ctx = None + _credential_cache = {} domain = db.relationship(Domain, backref=db.backref('users', cascade='all, delete-orphan')) password = db.Column(db.String(255), nullable=False) - quota_bytes = db.Column(db.BigInteger(), nullable=False, default=10**9) - quota_bytes_used = db.Column(db.BigInteger(), nullable=False, default=0) - global_admin = db.Column(db.Boolean(), nullable=False, default=False) - enabled = db.Column(db.Boolean(), nullable=False, default=True) + quota_bytes = db.Column(db.BigInteger, nullable=False, default=10**9) + quota_bytes_used = db.Column(db.BigInteger, nullable=False, default=0) + global_admin = db.Column(db.Boolean, nullable=False, default=False) + enabled = db.Column(db.Boolean, nullable=False, default=True) # Features - enable_imap = db.Column(db.Boolean(), nullable=False, default=True) - enable_pop = db.Column(db.Boolean(), nullable=False, default=True) + enable_imap = db.Column(db.Boolean, nullable=False, default=True) + enable_pop = db.Column(db.Boolean, nullable=False, default=True) # Filters - forward_enabled = db.Column(db.Boolean(), nullable=False, default=False) - forward_destination = db.Column(CommaSeparatedList(), nullable=True, default=[]) - forward_keep = db.Column(db.Boolean(), nullable=False, default=True) - reply_enabled = db.Column(db.Boolean(), nullable=False, default=False) + forward_enabled = db.Column(db.Boolean, nullable=False, default=False) + forward_destination = db.Column(CommaSeparatedList, nullable=True, default=list) + forward_keep = db.Column(db.Boolean, nullable=False, default=True) + reply_enabled = db.Column(db.Boolean, nullable=False, default=False) reply_subject = db.Column(db.String(255), nullable=True, default=None) - reply_body = db.Column(db.Text(), nullable=True, default=None) + reply_body = db.Column(db.Text, nullable=True, default=None) reply_startdate = db.Column(db.Date, nullable=False, default=date(1900, 1, 1)) reply_enddate = db.Column(db.Date, nullable=False, default=date(2999, 12, 31)) # Settings - displayed_name = db.Column(db.String(160), nullable=False, default="") - spam_enabled = db.Column(db.Boolean(), nullable=False, default=True) - spam_threshold = db.Column(db.Integer(), nullable=False, default=80) + displayed_name = db.Column(db.String(160), nullable=False, default='') + spam_enabled = db.Column(db.Boolean, nullable=False, default=True) + spam_threshold = db.Column(db.Integer, nullable=False, default=80) # Flask-login attributes is_authenticated = True @@ -341,20 +477,23 @@ class User(Base, Email): is_anonymous = False def get_id(self): + """ return users email address """ return self.email @property def destination(self): + """ returns comma separated string of destinations """ if self.forward_enabled: - result = self.forward_destination + result = list(self.forward_destination) if self.forward_keep: - result += ',' + self.email - return result + result.append(self.email) + return ','.join(result) else: return self.email @property def reply_active(self): + """ returns status of autoreply function """ now = date.today() return ( self.reply_enabled and @@ -362,58 +501,82 @@ class User(Base, Email): self.reply_enddate > now ) - def get_password_context(): - if User._ctx: - return User._ctx + @classmethod + def get_password_context(cls): + """ create password context for hashing and verification + """ + if cls._ctx: + return cls._ctx - schemes = registry.list_crypt_handlers() + schemes = passlib.registry.list_crypt_handlers() # scrypt throws a warning if the native wheels aren't found schemes.remove('scrypt') # we can't leave plaintext schemes as they will be misidentified for scheme in schemes: if scheme.endswith('plaintext'): schemes.remove(scheme) - User._ctx = context.CryptContext( + cls._ctx = passlib.context.CryptContext( schemes=schemes, default='bcrypt_sha256', bcrypt_sha256__rounds=app.config['CREDENTIAL_ROUNDS'], deprecated='auto' ) - return User._ctx + return cls._ctx def check_password(self, password): + """ verifies password against stored hash + and updates hash if outdated + """ + cache_result = self._credential_cache.get(self.get_id()) + current_salt = self.password.split('$')[3] if len(self.password.split('$')) == 5 else None + if cache_result and current_salt: + cache_salt, cache_hash = cache_result + if cache_salt == current_salt: + return passlib.hash.pbkdf2_sha256.verify(password, cache_hash) + else: + # the cache is local per gunicorn; the password has changed + # so the local cache can be invalidated + del self._credential_cache[self.get_id()] reference = self.password # strip {scheme} if that's something mailu has added # passlib will identify *crypt based hashes just fine # on its own - if self.password.startswith("{"): - scheme = self.password.split('}')[0][1:] - if scheme in ['PBKDF2', 'BLF-CRYPT', 'SHA512-CRYPT', 'SHA256-CRYPT', 'MD5-CRYPT', 'CRYPT']: - reference = reference[len(scheme)+2:] + if reference.startswith(('{PBKDF2}', '{BLF-CRYPT}', '{SHA512-CRYPT}', '{SHA256-CRYPT}', '{MD5-CRYPT}', '{CRYPT}')): + reference = reference.split('}', 1)[1] result, new_hash = User.get_password_context().verify_and_update(password, reference) if new_hash: self.password = new_hash db.session.add(self) db.session.commit() + + if result: + """The credential cache uses a low number of rounds to be fast. +While it's not meant to be persisted to cold-storage, no additional measures +are taken to ensure it isn't (mlock(), encrypted swap, ...) on the basis that +we have little control over GC and string interning anyways. + + An attacker that can dump the process' memory is likely to find credentials +in clear-text regardless of the presence of the cache. + """ + self._credential_cache[self.get_id()] = (self.password.split('$')[3], passlib.hash.pbkdf2_sha256.using(rounds=1).hash(password)) return result - def set_password(self, password, hash_scheme=None, raw=False): - """Set password for user with specified encryption scheme - @password: plain text password to encrypt (if raw == True the hash itself) + def set_password(self, password, raw=False): + """ Set password for user + @password: plain text password to encrypt (or, if raw is True: the hash itself) """ - if raw: - self.password = password - else: - self.password = User.get_password_context().hash(password) + self.password = password if raw else User.get_password_context().hash(password) def get_managed_domains(self): + """ return list of domains this user can manage """ if self.global_admin: return Domain.query.all() else: return self.manager_of def get_managed_emails(self, include_aliases=True): + """ returns list of email addresses this user can manage """ emails = [] for domain in self.get_managed_domains(): emails.extend(domain.users) @@ -422,32 +585,46 @@ class User(Base, Email): return emails def send_welcome(self): - if app.config["WELCOME"]: - self.sendmail(app.config["WELCOME_SUBJECT"], - app.config["WELCOME_BODY"]) + """ send welcome email to user """ + if app.config['WELCOME']: + self.sendmail(app.config['WELCOME_SUBJECT'], app.config['WELCOME_BODY']) @classmethod def get(cls, email): + """ find user object for email address """ return cls.query.get(email) @classmethod def login(cls, email, password): + """ login user when enabled and password is valid """ user = cls.query.get(email) return user if (user and user.enabled and user.check_password(password)) else None + @classmethod + def get_temp_token(cls, email): + user = cls.query.get(email) + return hmac.new(app.temp_token_key, bytearray("{}|{}".format(time.strftime('%Y%m%d'), email), 'utf-8'), 'sha256').hexdigest() if (user and user.enabled) else None + + def verify_temp_token(self, token): + return hmac.compare_digest(self.get_temp_token(self.email), token) + + class Alias(Base, Email): """ An alias is an email address that redirects to some destination. """ - __tablename__ = "alias" + + __tablename__ = 'alias' domain = db.relationship(Domain, backref=db.backref('aliases', cascade='all, delete-orphan')) - wildcard = db.Column(db.Boolean(), nullable=False, default=False) - destination = db.Column(CommaSeparatedList, nullable=False, default=[]) + wildcard = db.Column(db.Boolean, nullable=False, default=False) + destination = db.Column(CommaSeparatedList, nullable=False, default=list) @classmethod def resolve(cls, localpart, domain_name): + """ find aliases matching email address localpart@domain_name """ + alias_preserve_case = cls.query.filter( sqlalchemy.and_(cls.domain_name == domain_name, sqlalchemy.or_( @@ -456,7 +633,7 @@ class Alias(Base, Email): cls.localpart == localpart ), sqlalchemy.and_( cls.wildcard == True, - sqlalchemy.bindparam("l", localpart).like(cls.localpart) + sqlalchemy.bindparam('l', localpart).like(cls.localpart) ) ) ) @@ -471,30 +648,33 @@ class Alias(Base, Email): sqlalchemy.func.lower(cls.localpart) == localpart_lower ), sqlalchemy.and_( cls.wildcard == True, - sqlalchemy.bindparam("l", localpart_lower).like(sqlalchemy.func.lower(cls.localpart)) + sqlalchemy.bindparam('l', localpart_lower).like( + sqlalchemy.func.lower(cls.localpart)) ) ) ) - ).order_by(cls.wildcard, sqlalchemy.func.char_length(sqlalchemy.func.lower(cls.localpart)).desc()).first() + ).order_by(cls.wildcard, sqlalchemy.func.char_length( + sqlalchemy.func.lower(cls.localpart)).desc()).first() if alias_preserve_case and alias_lower_case: - if alias_preserve_case.wildcard: - return alias_lower_case - else: - return alias_preserve_case - elif alias_preserve_case and not alias_lower_case: + return alias_lower_case if alias_preserve_case.wildcard else alias_preserve_case + + if alias_preserve_case and not alias_lower_case: return alias_preserve_case - elif alias_lower_case and not alias_preserve_case: + + if alias_lower_case and not alias_preserve_case: return alias_lower_case - else: - return None + + return None + class Token(Base): """ A token is an application password for a given user. """ - __tablename__ = "token" - id = db.Column(db.Integer(), primary_key=True) + __tablename__ = 'token' + + id = db.Column(db.Integer, primary_key=True) user_email = db.Column(db.String(255), db.ForeignKey(User.email), nullable=False) user = db.relationship(User, @@ -503,40 +683,259 @@ class Token(Base): ip = db.Column(db.String(255)) def check_password(self, password): + """ verifies password against stored hash + and updates hash if outdated + """ if self.password.startswith("$5$"): - if hash.sha256_crypt.verify(password, self.password): + if passlib.hash.sha256_crypt.verify(password, self.password): self.set_password(password) db.session.add(self) db.session.commit() return True return False - return hash.pbkdf2_sha256.verify(password, self.password) + return passlib.hash.pbkdf2_sha256.verify(password, self.password) def set_password(self, password): + """ sets password using pbkdf2_sha256 (1 round) """ # tokens have 128bits of entropy, they are not bruteforceable - self.password = hash.pbkdf2_sha256.using(rounds=1).hash(password) + self.password = passlib.hash.pbkdf2_sha256.using(rounds=1).hash(password) - def __str__(self): - return self.comment + def __repr__(self): + return f'' class Fetch(Base): - """ A fetched account is a repote POP/IMAP account fetched into a local + """ A fetched account is a remote POP/IMAP account fetched into a local account. """ - __tablename__ = "fetch" - id = db.Column(db.Integer(), primary_key=True) + __tablename__ = 'fetch' + + id = db.Column(db.Integer, primary_key=True) user_email = db.Column(db.String(255), db.ForeignKey(User.email), nullable=False) user = db.relationship(User, backref=db.backref('fetches', cascade='all, delete-orphan')) protocol = db.Column(db.Enum('imap', 'pop3'), nullable=False) host = db.Column(db.String(255), nullable=False) - port = db.Column(db.Integer(), nullable=False) - tls = db.Column(db.Boolean(), nullable=False) + port = db.Column(db.Integer, nullable=False) + tls = db.Column(db.Boolean, nullable=False, default=False) username = db.Column(db.String(255), nullable=False) password = db.Column(db.String(255), nullable=False) - keep = db.Column(db.Boolean(), nullable=False) + keep = db.Column(db.Boolean, nullable=False, default=False) last_check = db.Column(db.DateTime, nullable=True) error = db.Column(db.String(1023), nullable=True) + + def __repr__(self): + return ( + f'' + ) + + +class MailuConfig: + """ Class which joins whole Mailu config for dumping + and loading + """ + + class MailuCollection: + """ Provides dict- and list-like access to instances + of a sqlalchemy model + """ + + def __init__(self, model : db.Model): + self.model = model + + def __repr__(self): + return f'<{self.model.__name__}-Collection>' + + @cached_property + def _items(self): + return { + inspect(item).identity: item + for item in self.model.query.all() + } + + def __len__(self): + return len(self._items) + + def __iter__(self): + return iter(self._items.values()) + + def __getitem__(self, key): + return self._items[key] + + def __setitem__(self, key, item): + if not isinstance(item, self.model): + raise TypeError(f'expected {self.model.name}') + if key != inspect(item).identity: + raise ValueError(f'item identity != key {key!r}') + self._items[key] = item + + def __delitem__(self, key): + del self._items[key] + + def append(self, item, update=False): + """ list-like append """ + if not isinstance(item, self.model): + raise TypeError(f'expected {self.model.name}') + key = inspect(item).identity + if key in self._items: + if not update: + raise ValueError(f'item {key!r} already present in collection') + self._items[key] = item + + def extend(self, items, update=False): + """ list-like extend """ + add = {} + for item in items: + if not isinstance(item, self.model): + raise TypeError(f'expected {self.model.name}') + key = inspect(item).identity + if not update and key in self._items: + raise ValueError(f'item {key!r} already present in collection') + add[key] = item + self._items.update(add) + + def pop(self, *args): + """ list-like (no args) and dict-like (1 or 2 args) pop """ + if args: + if len(args) > 2: + raise TypeError(f'pop expected at most 2 arguments, got {len(args)}') + return self._items.pop(*args) + else: + return self._items.popitem()[1] + + def popitem(self): + """ dict-like popitem """ + return self._items.popitem() + + def remove(self, item): + """ list-like remove """ + if not isinstance(item, self.model): + raise TypeError(f'expected {self.model.name}') + key = inspect(item).identity + if not key in self._items: + raise ValueError(f'item {key!r} not found in collection') + del self._items[key] + + def clear(self): + """ dict-like clear """ + while True: + try: + self.pop() + except IndexError: + break + + def update(self, items): + """ dict-like update """ + for key, item in items: + if not isinstance(item, self.model): + raise TypeError(f'expected {self.model.name}') + if key != inspect(item).identity: + raise ValueError(f'item identity != key {key!r}') + self._items.update(items) + + def setdefault(self, key, item=None): + """ dict-like setdefault """ + if key in self._items: + return self._items[key] + if item is None: + return None + if not isinstance(item, self.model): + raise TypeError(f'expected {self.model.name}') + if key != inspect(item).identity: + raise ValueError(f'item identity != key {key!r}') + self._items[key] = item + return item + + def __init__(self): + + # section-name -> attr + self._sections = { + name: getattr(self, name) + for name in dir(self) + if isinstance(getattr(self, name), self.MailuCollection) + } + + # known models + self._models = tuple(section.model for section in self._sections.values()) + + # model -> attr + self._sections.update({ + section.model: section for section in self._sections.values() + }) + + def _get_model(self, section): + if section is None: + return None + model = self._sections.get(section) + if model is None: + raise ValueError(f'Invalid section: {section!r}') + if isinstance(model, self.MailuCollection): + return model.model + return model + + def _add(self, items, section, update): + + model = self._get_model(section) + if isinstance(items, self._models): + items = [items] + elif not hasattr(items, '__iter__'): + raise ValueError(f'{items!r} is not iterable') + + for item in items: + if model is not None and not isinstance(item, model): + what = item.__class__.__name__.capitalize() + raise ValueError(f'{what} can not be added to section {section!r}') + self._sections[type(item)].append(item, update=update) + + def add(self, items, section=None): + """ add item to config """ + self._add(items, section, update=False) + + def update(self, items, section=None): + """ add or replace item in config """ + self._add(items, section, update=True) + + def remove(self, items, section=None): + """ remove item from config """ + model = self._get_model(section) + if isinstance(items, self._models): + items = [items] + elif not hasattr(items, '__iter__'): + raise ValueError(f'{items!r} is not iterable') + + for item in items: + if isinstance(item, str): + if section is None: + raise ValueError(f'Cannot remove key {item!r} without section') + del self._sections[model][item] + elif model is not None and not isinstance(item, model): + what = item.__class__.__name__.capitalize() + raise ValueError(f'{what} can not be removed from section {section!r}') + self._sections[type(item)].remove(item,) + + def clear(self, models=None): + """ remove complete configuration """ + for model in self._models: + if models is None or model in models: + db.session.query(model).delete() + + def check(self): + """ check for duplicate domain names """ + dup = set() + for fqdn in chain( + db.session.query(Domain.name), + db.session.query(Alternative.name), + db.session.query(Relay.name) + ): + if fqdn in dup: + raise ValueError(f'Duplicate domain name: {fqdn}') + dup.add(fqdn) + + domain = MailuCollection(Domain) + user = MailuCollection(User) + alias = MailuCollection(Alias) + relay = MailuCollection(Relay) + config = MailuCollection(Config) diff --git a/core/admin/mailu/schemas.py b/core/admin/mailu/schemas.py new file mode 100644 index 00000000..191d01ac --- /dev/null +++ b/core/admin/mailu/schemas.py @@ -0,0 +1,1269 @@ +""" Mailu marshmallow fields and schema +""" + +from copy import deepcopy +from collections import Counter +from datetime import timezone + +import json +import logging +import yaml + +import sqlalchemy + +from marshmallow import pre_load, post_load, post_dump, fields, Schema +from marshmallow.utils import ensure_text_type +from marshmallow.exceptions import ValidationError +from marshmallow_sqlalchemy import SQLAlchemyAutoSchemaOpts +from marshmallow_sqlalchemy.fields import RelatedList + +from flask_marshmallow import Marshmallow + +from OpenSSL import crypto + +from pygments import highlight +from pygments.token import Token +from pygments.lexers import get_lexer_by_name +from pygments.lexers.data import YamlLexer +from pygments.formatters import get_formatter_by_name + +from mailu import models, dkim + + +ma = Marshmallow() + + +### import logging and schema colorization ### + +_model2schema = {} + +def get_schema(cls=None): + """ return schema class for model """ + if cls is None: + return _model2schema.values() + return _model2schema.get(cls) + +def mapped(cls): + """ register schema in model2schema map """ + _model2schema[cls.Meta.model] = cls + return cls + +class Logger: + """ helps with counting and colorizing + imported and exported data + """ + + class MyYamlLexer(YamlLexer): + """ colorize yaml constants and integers """ + def get_tokens(self, text, unfiltered=False): + for typ, value in super().get_tokens(text, unfiltered): + if typ is Token.Literal.Scalar.Plain: + if value in {'true', 'false', 'null'}: + typ = Token.Keyword.Constant + elif value == HIDDEN: + typ = Token.Error + else: + try: + int(value, 10) + except ValueError: + try: + float(value) + except ValueError: + pass + else: + typ = Token.Literal.Number.Float + else: + typ = Token.Literal.Number.Integer + yield typ, value + + def __init__(self, want_color=None, can_color=False, debug=False, secrets=False): + + self.lexer = 'yaml' + self.formatter = 'terminal' + self.strip = False + self.verbose = 0 + self.quiet = False + self.secrets = secrets + self.debug = debug + self.print = print + + self.color = want_color or can_color + + self._counter = Counter() + self._schemas = {} + + # log contexts + self._diff_context = { + 'full': True, + 'secrets': secrets, + } + log_context = { + 'secrets': secrets, + } + + # register listeners + for schema in get_schema(): + model = schema.Meta.model + self._schemas[model] = schema(context=log_context) + sqlalchemy.event.listen(model, 'after_insert', self._listen_insert) + sqlalchemy.event.listen(model, 'after_update', self._listen_update) + sqlalchemy.event.listen(model, 'after_delete', self._listen_delete) + + # special listener for dkim_key changes + # TODO: _listen_dkim can be removed when dkim keys are stored in database + self._dedupe_dkim = set() + sqlalchemy.event.listen(models.db.session, 'after_flush', self._listen_dkim) + + # register debug logger for sqlalchemy + if self.debug: + logging.basicConfig() + logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) + + def _log(self, action, target, message=None): + if message is None: + try: + message = self._schemas[target.__class__].dump(target) + except KeyError: + message = target + if not isinstance(message, str): + message = repr(message) + self.print(f'{action} {target.__table__}: {self.colorize(message)}') + + def _listen_insert(self, mapper, connection, target): # pylint: disable=unused-argument + """ callback method to track import """ + self._counter.update([('Created', target.__table__.name)]) + if self.verbose: + self._log('Created', target) + + def _listen_update(self, mapper, connection, target): # pylint: disable=unused-argument + """ callback method to track import """ + + changes = {} + inspection = sqlalchemy.inspect(target) + for attr in sqlalchemy.orm.class_mapper(target.__class__).column_attrs: + history = getattr(inspection.attrs, attr.key).history + if history.has_changes() and history.deleted: + before = history.deleted[-1] + after = getattr(target, attr.key) + # TODO: this can be removed when comment is not nullable in model + if attr.key == 'comment' and not before and not after: + pass + # only remember changed keys + elif before != after: + if self.verbose: + changes[str(attr.key)] = (before, after) + else: + break + + if self.verbose: + # use schema to log changed attributes + schema = get_schema(target.__class__) + only = set(changes.keys()) & set(schema().fields.keys()) + if only: + for key, value in schema( + only=only, + context=self._diff_context + ).dump(target).items(): + before, after = changes[key] + if value == HIDDEN: + before = HIDDEN if before else before + after = HIDDEN if after else after + else: + # also hide this + after = value + self._log('Modified', target, f'{str(target)!r} {key}: {before!r} -> {after!r}') + + if changes: + self._counter.update([('Modified', target.__table__.name)]) + + def _listen_delete(self, mapper, connection, target): # pylint: disable=unused-argument + """ callback method to track import """ + self._counter.update([('Deleted', target.__table__.name)]) + if self.verbose: + self._log('Deleted', target) + + # TODO: _listen_dkim can be removed when dkim keys are stored in database + def _listen_dkim(self, session, flush_context): # pylint: disable=unused-argument + """ callback method to track import """ + for target in session.identity_map.values(): + # look at Domains originally loaded from db + if not isinstance(target, models.Domain) or not target._sa_instance_state.load_path: + continue + before = target._dkim_key_on_disk + after = target._dkim_key + # "de-dupe" messages; this event is fired at every flush + if before == after or (target, before, after) in self._dedupe_dkim: + continue + self._dedupe_dkim.add((target, before, after)) + self._counter.update([('Modified', target.__table__.name)]) + if self.verbose: + if self.secrets: + before = before.decode('ascii', 'ignore') + after = after.decode('ascii', 'ignore') + else: + before = HIDDEN if before else '' + after = HIDDEN if after else '' + self._log('Modified', target, f'{str(target)!r} dkim_key: {before!r} -> {after!r}') + + def track_serialize(self, obj, item, backref=None): + """ callback method to track import """ + # called for backref modification? + if backref is not None: + self._log( + 'Modified', item, '{target!r} {key}: {before!r} -> {after!r}'.format_map(backref)) + return + # show input data? + if self.verbose < 2: + return + # hide secrets in data + if not self.secrets: + item = self._schemas[obj.opts.model].hide(item) + if 'hash_password' in item: + item['password'] = HIDDEN + if 'fetches' in item: + for fetch in item['fetches']: + fetch['password'] = HIDDEN + self._log('Handling', obj.opts.model, item) + + def changes(self, *messages, **kwargs): + """ show changes gathered in counter """ + if self.quiet: + return + if self._counter: + changes = [] + last = None + for (action, what), count in sorted(self._counter.items()): + if action != last: + if last: + changes.append('/') + changes.append(f'{action}:') + last = action + changes.append(f'{what}({count})') + else: + changes = ['No changes.'] + self.print(*messages, *changes, **kwargs) + + def _format_errors(self, store, path=None): + + res = [] + if path is None: + path = [] + for key in sorted(store): + location = path + [str(key)] + value = store[key] + if isinstance(value, dict): + res.extend(self._format_errors(value, location)) + else: + for message in value: + res.append((".".join(location), message)) + + if path: + return res + + maxlen = max(len(loc) for loc, msg in res) + res = [f' - {loc.ljust(maxlen)} : {msg}' for loc, msg in res] + errors = f'{len(res)} error{["s",""][len(res)==1]}' + res.insert(0, f'[ValidationError] {errors} occurred during input validation') + + return '\n'.join(res) + + def _is_validation_error(self, exc): + """ walk traceback to extract invalid field from marshmallow """ + path = [] + trace = exc.__traceback__ + while trace: + if trace.tb_frame.f_code.co_name == '_serialize': + if 'attr' in trace.tb_frame.f_locals: + path.append(trace.tb_frame.f_locals['attr']) + elif trace.tb_frame.f_code.co_name == '_init_fields': + spec = ', '.join( + '.'.join(path + [key]) + for key in trace.tb_frame.f_locals['invalid_fields']) + return f'Invalid filter: {spec}' + trace = trace.tb_next + return None + + def format_exception(self, exc): + """ format ValidationErrors and other exceptions when not debugging """ + if isinstance(exc, ValidationError): + return self._format_errors(exc.messages) + if isinstance(exc, ValueError): + if msg := self._is_validation_error(exc): + return msg + if self.debug: + return None + msg = ' '.join(str(exc).split()) + return f'[{exc.__class__.__name__}] {msg}' + + colorscheme = { + Token: ('', ''), + Token.Name.Tag: ('cyan', 'cyan'), + Token.Literal.Scalar: ('green', 'green'), + Token.Literal.String: ('green', 'green'), + Token.Name.Constant: ('green', 'green'), # multiline strings + Token.Keyword.Constant: ('magenta', 'magenta'), + Token.Literal.Number: ('magenta', 'magenta'), + Token.Error: ('red', 'red'), + Token.Name: ('red', 'red'), + Token.Operator: ('red', 'red'), + } + + def colorize(self, data, lexer=None, formatter=None, color=None, strip=None): + """ add ANSI color to data """ + + if color is False or not self.color: + return data + + lexer = lexer or self.lexer + lexer = Logger.MyYamlLexer() if lexer == 'yaml' else get_lexer_by_name(lexer) + formatter = get_formatter_by_name(formatter or self.formatter, colorscheme=self.colorscheme) + if strip is None: + strip = self.strip + + res = highlight(data, lexer, formatter) + if strip: + return res.rstrip('\n') + return res + + +### marshmallow render modules ### + +# hidden attributes +class _Hidden: + def __bool__(self): + return False + def __copy__(self): + return self + def __deepcopy__(self, _): + return self + def __eq__(self, other): + return str(other) == '' + def __repr__(self): + return '' + __str__ = __repr__ + +yaml.add_representer( + _Hidden, + lambda dumper, data: dumper.represent_data(str(data)) +) + +HIDDEN = _Hidden() + +# multiline attributes +class _Multiline(str): + pass + +yaml.add_representer( + _Multiline, + lambda dumper, data: dumper.represent_scalar(u'tag:yaml.org,2002:str', data, style='|') + +) + +# yaml render module +class RenderYAML: + """ Marshmallow YAML Render Module + """ + + class SpacedDumper(yaml.Dumper): + """ YAML Dumper to add a newline between main sections + and double the indent used + """ + + def write_line_break(self, data=None): + super().write_line_break(data) + if len(self.indents) == 1: + super().write_line_break() + + def increase_indent(self, flow=False, indentless=False): + return super().increase_indent(flow, False) + + @staticmethod + def _augment(kwargs, defaults): + """ add defaults to kwargs if missing + """ + for key, value in defaults.items(): + if key not in kwargs: + kwargs[key] = value + + _load_defaults = {} + @classmethod + def loads(cls, *args, **kwargs): + """ load yaml data from string + """ + cls._augment(kwargs, cls._load_defaults) + return yaml.safe_load(*args, **kwargs) + + _dump_defaults = { + 'Dumper': SpacedDumper, + 'default_flow_style': False, + 'allow_unicode': True, + 'sort_keys': False, + } + @classmethod + def dumps(cls, *args, **kwargs): + """ dump data to yaml string + """ + cls._augment(kwargs, cls._dump_defaults) + return yaml.dump(*args, **kwargs) + +# json encoder +class JSONEncoder(json.JSONEncoder): + """ JSONEncoder supporting serialization of HIDDEN """ + def default(self, o): + """ serialize HIDDEN """ + if isinstance(o, _Hidden): + return str(o) + return json.JSONEncoder.default(self, o) + +# json render module +class RenderJSON: + """ Marshmallow JSON Render Module + """ + + @staticmethod + def _augment(kwargs, defaults): + """ add defaults to kwargs if missing + """ + for key, value in defaults.items(): + if key not in kwargs: + kwargs[key] = value + + _load_defaults = {} + @classmethod + def loads(cls, *args, **kwargs): + """ load json data from string + """ + cls._augment(kwargs, cls._load_defaults) + return json.loads(*args, **kwargs) + + _dump_defaults = { + 'separators': (',',':'), + 'cls': JSONEncoder, + } + @classmethod + def dumps(cls, *args, **kwargs): + """ dump data to json string + """ + cls._augment(kwargs, cls._dump_defaults) + return json.dumps(*args, **kwargs) + + +### marshmallow: custom fields ### + +def _rfc3339(datetime): + """ dump datetime according to rfc3339 """ + if datetime.tzinfo is None: + datetime = datetime.astimezone(timezone.utc) + res = datetime.isoformat() + if res.endswith('+00:00'): + return f'{res[:-6]}Z' + return res + +fields.DateTime.SERIALIZATION_FUNCS['rfc3339'] = _rfc3339 +fields.DateTime.DESERIALIZATION_FUNCS['rfc3339'] = fields.DateTime.DESERIALIZATION_FUNCS['iso'] +fields.DateTime.DEFAULT_FORMAT = 'rfc3339' + +class LazyStringField(fields.String): + """ Field that serializes a "false" value to the empty string + """ + + def _serialize(self, value, attr, obj, **kwargs): + """ serialize None to the empty string + """ + return value if value else '' + +class CommaSeparatedListField(fields.Raw): + """ Deserialize a string containing comma-separated values to + a list of strings + """ + + default_error_messages = { + "invalid": "Not a valid string or list.", + "invalid_utf8": "Not a valid utf-8 string or list.", + } + + def _deserialize(self, value, attr, data, **kwargs): + """ deserialize comma separated string to list of strings + """ + + # empty + if not value: + return [] + + # handle list + if isinstance(value, list): + try: + value = [ensure_text_type(item) for item in value] + except UnicodeDecodeError as exc: + raise self.make_error("invalid_utf8") from exc + + # handle text + else: + if not isinstance(value, (str, bytes)): + raise self.make_error("invalid") + try: + value = ensure_text_type(value) + except UnicodeDecodeError as exc: + raise self.make_error("invalid_utf8") from exc + else: + value = filter(bool, (item.strip() for item in value.split(','))) + + return list(value) + + +class DkimKeyField(fields.String): + """ Serialize a dkim key to a multiline string and + deserialize a dkim key data as string or list of strings + to a valid dkim key + """ + + default_error_messages = { + "invalid": "Not a valid string or list.", + "invalid_utf8": "Not a valid utf-8 string or list.", + } + + def _serialize(self, value, attr, obj, **kwargs): + """ serialize dkim key as multiline string + """ + + # map empty string and None to None + if not value: + return '' + + # return multiline string + return _Multiline(value.decode('utf-8')) + + def _wrap_key(self, begin, data, end): + """ generator to wrap key into RFC 7468 format """ + yield begin + pos = 0 + while pos < len(data): + yield data[pos:pos+64] + pos += 64 + yield end + yield '' + + def _deserialize(self, value, attr, data, **kwargs): + """ deserialize a string or list of strings to dkim key data + with verification + """ + + # convert list to str + if isinstance(value, list): + try: + value = ''.join(ensure_text_type(item) for item in value).strip() + except UnicodeDecodeError as exc: + raise self.make_error("invalid_utf8") from exc + + # only text is allowed + else: + if not isinstance(value, (str, bytes)): + raise self.make_error("invalid") + try: + value = ensure_text_type(value).strip() + except UnicodeDecodeError as exc: + raise self.make_error("invalid_utf8") from exc + + # generate new key? + if value.lower() == '-generate-': + return dkim.gen_key() + + # no key? + if not value: + return None + + # remember part of value for ValidationError + bad_key = value + + # strip header and footer, clean whitespace and wrap to 64 characters + try: + if value.startswith('-----BEGIN '): + end = value.index('-----', 11) + 5 + header = value[:end] + value = value[end:] + else: + header = '-----BEGIN PRIVATE KEY-----' + + if (pos := value.find('-----END ')) >= 0: + end = value.index('-----', pos+9) + 5 + footer = value[pos:end] + value = value[:pos] + else: + footer = '-----END PRIVATE KEY-----' + except ValueError as exc: + raise ValidationError(f'invalid dkim key {bad_key!r}') from exc + + # remove whitespace from key data + value = ''.join(value.split()) + + # remember part of value for ValidationError + bad_key = f'{value[:25]}...{value[-10:]}' if len(value) > 40 else value + + # wrap key according to RFC 7468 + value = ('\n'.join(self._wrap_key(header, value, footer))).encode('ascii') + + # check key validity + try: + crypto.load_privatekey(crypto.FILETYPE_PEM, value) + except crypto.Error as exc: + raise ValidationError(f'invalid dkim key {bad_key!r}') from exc + else: + return value + +class PasswordField(fields.Str): + """ Serialize a hashed password hash by stripping the obsolete {SCHEME} + Deserialize a plain password or hashed password into a hashed password + """ + + _hashes = {'PBKDF2', 'BLF-CRYPT', 'SHA512-CRYPT', 'SHA256-CRYPT', 'MD5-CRYPT', 'CRYPT'} + + def _serialize(self, value, attr, obj, **kwargs): + """ strip obsolete {password-hash} when serializing """ + # strip scheme spec if in database - it's obsolete + if value.startswith('{') and (end := value.find('}', 1)) >= 0: + if value[1:end] in self._hashes: + return value[end+1:] + return value + + def _deserialize(self, value, attr, data, **kwargs): + """ hashes plain password or checks hashed password + also strips obsolete {password-hash} when deserializing + """ + + # when hashing is requested: use model instance to hash plain password + if data.get('hash_password'): + # hash password using model instance + inst = self.metadata['model']() + inst.set_password(value) + value = inst.password + del inst + + # strip scheme spec when specified - it's obsolete + if value.startswith('{') and (end := value.find('}', 1)) >= 0: + if value[1:end] in self._hashes: + value = value[end+1:] + + # check if algorithm is supported + inst = self.metadata['model'](password=value) + try: + # just check against empty string to see if hash is valid + inst.check_password('') + except ValueError as exc: + # ValueError: hash could not be identified + raise ValidationError(f'invalid password hash {value!r}') from exc + del inst + + return value + + +### base schema ### + +class Storage: + """ Storage class to save information in context + """ + + context = {} + + def _bind(self, key, bind): + if bind is True: + return (self.__class__, key) + if isinstance(bind, str): + return (get_schema(self.recall(bind).__class__), key) + return (bind, key) + + def store(self, key, value, bind=None): + """ store value under key """ + self.context.setdefault('_track', {})[self._bind(key, bind)]= value + + def recall(self, key, bind=None): + """ recall value from key """ + return self.context['_track'][self._bind(key, bind)] + +class BaseOpts(SQLAlchemyAutoSchemaOpts): + """ Option class with sqla session + """ + def __init__(self, meta, ordered=False): + if not hasattr(meta, 'sqla_session'): + meta.sqla_session = models.db.session + if not hasattr(meta, 'sibling'): + meta.sibling = False + super(BaseOpts, self).__init__(meta, ordered=ordered) + +class BaseSchema(ma.SQLAlchemyAutoSchema, Storage): + """ Marshmallow base schema with custom exclude logic + and option to hide sqla defaults + """ + + OPTIONS_CLASS = BaseOpts + + class Meta: + """ Schema config """ + include_by_context = {} + exclude_by_value = {} + hide_by_context = {} + order = [] + sibling = False + + def __init__(self, *args, **kwargs): + + # prepare only to auto-include explicitly specified attributes + only = set(kwargs.get('only') or []) + + # get context + context = kwargs.get('context', {}) + flags = {key for key, value in context.items() if value is True} + + # compile excludes + exclude = set(kwargs.get('exclude', [])) + + # always exclude + exclude.update({'created_at', 'updated_at'} - only) + + # add include_by_context + if context is not None: + for need, what in getattr(self.Meta, 'include_by_context', {}).items(): + if not flags & set(need): + exclude |= what - only + + # update excludes + kwargs['exclude'] = exclude + + # init SQLAlchemyAutoSchema + super().__init__(*args, **kwargs) + + # exclude_by_value + self._exclude_by_value = { + key: values for key, values in getattr(self.Meta, 'exclude_by_value', {}).items() + if key not in only + } + + # exclude default values + if not context.get('full'): + for column in self.opts.model.__table__.columns: + if column.name not in exclude and column.name not in only: + self._exclude_by_value.setdefault(column.name, []).append( + None if column.default is None else column.default.arg + ) + + # hide by context + self._hide_by_context = set() + if context is not None: + for need, what in getattr(self.Meta, 'hide_by_context', {}).items(): + if not flags & set(need): + self._hide_by_context |= what - only + + # remember primary keys + self._primary = str(self.opts.model.__table__.primary_key.columns.values()[0].name) + + # determine attribute order + if hasattr(self.Meta, 'order'): + # use user-defined order + order = self.Meta.order + else: + # default order is: primary_key + other keys alphabetically + order = list(sorted(self.fields.keys())) + if self._primary in order: + order.remove(self._primary) + order.insert(0, self._primary) + + # order fieldlists + for fieldlist in (self.fields, self.load_fields, self.dump_fields): + for field in order: + if field in fieldlist: + fieldlist[field] = fieldlist.pop(field) + + # move post_load hook "_add_instance" to the end (after load_instance mixin) + hooks = self._hooks[('post_load', False)] + hooks.remove('_add_instance') + hooks.append('_add_instance') + + def hide(self, data): + """ helper method to hide input data for logging """ + # always returns a copy of data + return { + key: HIDDEN if key in self._hide_by_context else deepcopy(value) + for key, value in data.items() + } + + def _call_and_store(self, *args, **kwargs): + """ track current parent field for pruning """ + self.store('field', kwargs['field_name'], True) + return super()._call_and_store(*args, **kwargs) + + # this is only needed to work around the declared attr "email" primary key in model + def get_instance(self, data): + """ lookup item by defined primary key instead of key(s) from model """ + if self.transient: + return None + if keys := getattr(self.Meta, 'primary_keys', None): + filters = {key: data.get(key) for key in keys} + if None not in filters.values(): + res= self.session.query(self.opts.model).filter_by(**filters).first() + return res + res= super().get_instance(data) + return res + + @pre_load(pass_many=True) + def _patch_many(self, items, many, **kwargs): # pylint: disable=unused-argument + """ - flush sqla session before serializing a section when requested + (make sure all objects that could be referred to later are created) + - when in update mode: patch input data before deserialization + - handle "prune" and "delete" items + - replace values in keys starting with '-' with default + """ + + # flush sqla session + if not self.Meta.sibling: + self.opts.sqla_session.flush() + + # stop early when not updating + if not self.context.get('update'): + return items + + # patch "delete", "prune" and "default" + want_prune = [] + def patch(count, data): + + # don't allow __delete__ coming from input + if '__delete__' in data: + raise ValidationError('Unknown field.', f'{count}.__delete__') + + # fail when hash_password is specified without password + if 'hash_password' in data and not 'password' in data: + raise ValidationError( + 'Nothing to hash. Field "password" is missing.', + field_name = f'{count}.hash_password', + ) + + # handle "prune list" and "delete item" (-pkey: none and -pkey: id) + for key in data: + if key.startswith('-'): + if key[1:] == self._primary: + # delete or prune + if data[key] is None: + # prune + want_prune.append(True) + return None + # mark item for deletion + return {key[1:]: data[key], '__delete__': count} + + # handle "set to default value" (-key: none) + def set_default(key, value): + if not key.startswith('-'): + return (key, value) + key = key[1:] + if not key in self.opts.model.__table__.columns: + return (key, None) + if value is not None: + raise ValidationError( + 'Value must be "null" when resetting to default.', + f'{count}.{key}' + ) + value = self.opts.model.__table__.columns[key].default + if value is None: + raise ValidationError( + 'Field has no default value.', + f'{count}.{key}' + ) + return (key, value.arg) + + return dict(set_default(key, value) for key, value in data.items()) + + # convert items to "delete" and filter "prune" item + items = [ + item for item in [ + patch(count, item) for count, item in enumerate(items) + ] if item + ] + + # remember if prune was requested for _prune_items@post_load + self.store('prune', bool(want_prune), True) + + # remember original items to stabilize password-changes in _add_instance@post_load + self.store('original', items, True) + + return items + + @pre_load + def _patch_item(self, data, many, **kwargs): # pylint: disable=unused-argument + """ - call callback function to track import + - stabilize import of items with auto-increment primary key + - delete items + - delete/prune list attributes + - add missing required attributes + """ + + # callback + if callback := self.context.get('callback'): + callback(self, data) + + # stop early when not updating + if not self.opts.load_instance or not self.context.get('update'): + return data + + # stabilize import of auto-increment primary keys (not required), + # by matching import data to existing items and setting primary key + if not self._primary in data: + for item in getattr(self.recall('parent'), self.recall('field', 'parent')): + existing = self.dump(item, many=False) + this = existing.pop(self._primary) + if data == existing: + instance = item + data[self._primary] = this + break + + # try to load instance + instance = self.instance or self.get_instance(data) + if instance is None: + + if '__delete__' in data: + # deletion of non-existent item requested + raise ValidationError( + f'Item to delete not found: {data[self._primary]!r}.', + field_name = f'{data["__delete__"]}.{self._primary}', + ) + + else: + + if self.context.get('update'): + # remember instance as parent for pruning siblings + if not self.Meta.sibling: + self.store('parent', instance) + # delete instance from session when marked + if '__delete__' in data: + self.opts.sqla_session.delete(instance) + # delete item from lists or prune lists + # currently: domain.alternatives, user.forward_destination, + # user.manager_of, aliases.destination + for key, value in data.items(): + if not isinstance(self.fields.get(key), ( + RelatedList, CommaSeparatedListField, fields.Raw) + ) or not isinstance(value, list): + continue + # deduplicate new value + new_value = set(value) + # handle list pruning + if '-prune-' in value: + value.remove('-prune-') + new_value.remove('-prune-') + else: + for old in getattr(instance, key): + # using str() is okay for now (see above) + new_value.add(str(old)) + # handle item deletion + for item in value: + if item.startswith('-'): + new_value.remove(item) + try: + new_value.remove(item[1:]) + except KeyError as exc: + raise ValidationError( + f'Item to delete not found: {item[1:]!r}.', + field_name=f'?.{key}', + ) from exc + # sort list of new values + data[key] = sorted(new_value) + # log backref modification not catched by modify hook + if isinstance(self.fields[key], RelatedList): + if callback := self.context.get('callback'): + before = {str(v) for v in getattr(instance, key)} + after = set(data[key]) + if before != after: + callback(self, instance, { + 'key': key, + 'target': str(instance), + 'before': before, + 'after': after, + }) + + # add attributes required for validation from db + for attr_name, field_obj in self.load_fields.items(): + if field_obj.required and attr_name not in data: + data[attr_name] = getattr(instance, attr_name) + + return data + + @post_load(pass_many=True) + def _prune_items(self, items, many, **kwargs): # pylint: disable=unused-argument + """ handle list pruning """ + + # stop early when not updating + if not self.context.get('update'): + return items + + # get prune flag from _patch_many@pre_load + want_prune = self.recall('prune', True) + + # prune: determine if existing items in db need to be added or marked for deletion + add_items = False + del_items = False + if self.Meta.sibling: + # parent prunes automatically + if not want_prune: + # no prune requested => add old items + add_items = True + else: + # parent does not prune automatically + if want_prune: + # prune requested => mark old items for deletion + del_items = True + + if add_items or del_items: + existing = {item[self._primary] for item in items if self._primary in item} + for item in getattr(self.recall('parent'), self.recall('field', 'parent')): + key = getattr(item, self._primary) + if key not in existing: + if add_items: + items.append({self._primary: key}) + else: + items.append({self._primary: key, '__delete__': '?'}) + + return items + + @post_load + def _add_instance(self, item, many, **kwargs): # pylint: disable=unused-argument + """ - undo password change in existing instances when plain password did not change + - add new instances to sqla session + """ + + if not item in self.opts.sqla_session: + self.opts.sqla_session.add(item) + return item + + # stop early when not updating or item has no password attribute + if not self.context.get('update') or not hasattr(item, 'password'): + return item + + # did we hash a new plaintext password? + original = None + pkey = getattr(item, self._primary) + for data in self.recall('original', True): + if 'hash_password' in data and data.get(self._primary) == pkey: + original = data['password'] + break + if original is None: + # password was hashed by us + return item + + # reset hash if plain password matches hash from db + if attr := getattr(sqlalchemy.inspect(item).attrs, 'password', None): + if attr.history.has_changes() and attr.history.deleted: + try: + # reset password hash + inst = type(item)(password=attr.history.deleted[-1]) + if inst.check_password(original): + item.password = inst.password + except ValueError: + # hash in db is invalid + pass + else: + del inst + + return item + + @post_dump + def _hide_values(self, data, many, **kwargs): # pylint: disable=unused-argument + """ hide secrets """ + + # stop early when not excluding/hiding + if not self._exclude_by_value and not self._hide_by_context: + return data + + # exclude or hide values + full = self.context.get('full') + return type(data)( + (key, HIDDEN if key in self._hide_by_context else value) + for key, value in data.items() + if full or key not in self._exclude_by_value or value not in self._exclude_by_value[key] + ) + + # this field is used to mark items for deletion + mark_delete = fields.Boolean(data_key='__delete__', load_only=True) + + # TODO: this can be removed when comment is not nullable in model + comment = LazyStringField() + + +### schema definitions ### + +@mapped +class DomainSchema(BaseSchema): + """ Marshmallow schema for Domain model """ + class Meta: + """ Schema config """ + model = models.Domain + load_instance = True + include_relationships = True + exclude = ['users', 'managers', 'aliases'] + + include_by_context = { + ('dns',): {'dkim_publickey', 'dns_mx', 'dns_spf', 'dns_dkim', 'dns_dmarc'}, + } + hide_by_context = { + ('secrets',): {'dkim_key'}, + } + exclude_by_value = { + 'alternatives': [[]], + 'dkim_key': [None], + 'dkim_publickey': [None], + 'dns_mx': [None], + 'dns_spf': [None], + 'dns_dkim': [None], + 'dns_dmarc': [None], + } + + dkim_key = DkimKeyField(allow_none=True) + dkim_publickey = fields.String(dump_only=True) + dns_mx = fields.String(dump_only=True) + dns_spf = fields.String(dump_only=True) + dns_dkim = fields.String(dump_only=True) + dns_dmarc = fields.String(dump_only=True) + + +@mapped +class TokenSchema(BaseSchema): + """ Marshmallow schema for Token model """ + class Meta: + """ Schema config """ + model = models.Token + load_instance = True + + sibling = True + + password = PasswordField(required=True, metadata={'model': models.User}) + hash_password = fields.Boolean(load_only=True, missing=False) + + +@mapped +class FetchSchema(BaseSchema): + """ Marshmallow schema for Fetch model """ + class Meta: + """ Schema config """ + model = models.Fetch + load_instance = True + + sibling = True + include_by_context = { + ('full', 'import'): {'last_check', 'error'}, + } + hide_by_context = { + ('secrets',): {'password'}, + } + + +@mapped +class UserSchema(BaseSchema): + """ Marshmallow schema for User model """ + class Meta: + """ Schema config """ + model = models.User + load_instance = True + include_relationships = True + exclude = ['_email', 'domain', 'localpart', 'domain_name', 'quota_bytes_used'] + + primary_keys = ['email'] + exclude_by_value = { + 'forward_destination': [[]], + 'tokens': [[]], + 'fetches': [[]], + 'manager_of': [[]], + 'reply_enddate': ['2999-12-31'], + 'reply_startdate': ['1900-01-01'], + } + + email = fields.String(required=True) + tokens = fields.Nested(TokenSchema, many=True) + fetches = fields.Nested(FetchSchema, many=True) + + password = PasswordField(required=True, metadata={'model': models.User}) + hash_password = fields.Boolean(load_only=True, missing=False) + + +@mapped +class AliasSchema(BaseSchema): + """ Marshmallow schema for Alias model """ + class Meta: + """ Schema config """ + model = models.Alias + load_instance = True + exclude = ['_email', 'domain', 'localpart', 'domain_name'] + + primary_keys = ['email'] + exclude_by_value = { + 'destination': [[]], + } + + email = fields.String(required=True) + destination = CommaSeparatedListField() + + +@mapped +class ConfigSchema(BaseSchema): + """ Marshmallow schema for Config model """ + class Meta: + """ Schema config """ + model = models.Config + load_instance = True + + +@mapped +class RelaySchema(BaseSchema): + """ Marshmallow schema for Relay model """ + class Meta: + """ Schema config """ + model = models.Relay + load_instance = True + + +@mapped +class MailuSchema(Schema, Storage): + """ Marshmallow schema for complete Mailu config """ + class Meta: + """ Schema config """ + model = models.MailuConfig + render_module = RenderYAML + + order = ['domain', 'user', 'alias', 'relay'] # 'config' + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + # order fieldlists + for fieldlist in (self.fields, self.load_fields, self.dump_fields): + for field in self.Meta.order: + if field in fieldlist: + fieldlist[field] = fieldlist.pop(field) + + def _call_and_store(self, *args, **kwargs): + """ track current parent and field for pruning """ + self.store('field', kwargs['field_name'], True) + self.store('parent', self.context.get('config')) + return super()._call_and_store(*args, **kwargs) + + @pre_load + def _clear_config(self, data, many, **kwargs): # pylint: disable=unused-argument + """ create config object in context if missing + and clear it if requested + """ + if 'config' not in self.context: + self.context['config'] = models.MailuConfig() + if self.context.get('clear'): + self.context['config'].clear( + models = {field.nested.opts.model for field in self.fields.values()} + ) + return data + + @post_load + def _make_config(self, data, many, **kwargs): # pylint: disable=unused-argument + """ update and return config object """ + config = self.context['config'] + for section in self.Meta.order: + if section in data: + config.update(data[section], section) + + return config + + domain = fields.Nested(DomainSchema, many=True) + user = fields.Nested(UserSchema, many=True) + alias = fields.Nested(AliasSchema, many=True) + relay = fields.Nested(RelaySchema, many=True) +# config = fields.Nested(ConfigSchema, many=True) diff --git a/core/admin/mailu/ui/templates/domain/details.html b/core/admin/mailu/ui/templates/domain/details.html index 29db5f6d..0560d5e0 100644 --- a/core/admin/mailu/ui/templates/domain/details.html +++ b/core/admin/mailu/ui/templates/domain/details.html @@ -50,5 +50,17 @@
_dmarc.{{ domain.name }}. 600 IN TXT "v=DMARC1; p=reject;{% if config["DMARC_RUA"] %} rua=mailto:{{ config["DMARC_RUA"] }}@{{ config["DOMAIN"] }};{% endif %}{% if config["DMARC_RUF"] %} ruf=mailto:{{ config["DMARC_RUF"] }}@{{ config["DOMAIN"] }};{% endif %} adkim=s; aspf=s"
{% endif %} + + {% trans %}DNS client auto-configuration (RFC6186) entries{% endtrans %} + +
_submission._tcp.{{ domain.name }}. 600 IN SRV 1 1 587 {{ config["HOSTNAMES"].split(',')[0] }}.
+
_imap._tcp.{{ domain.name }}. 600 IN SRV 100 1 143 {{ config["HOSTNAMES"].split(',')[0] }}.
+
_pop3._tcp.{{ domain.name }}. 600 IN SRV 100 1 110 {{ config["HOSTNAMES"].split(',')[0] }}.
+{% if config["TLS_FLAVOR"] != "notls" %} +
_submissions._tcp.{{ domain.name }}. 600 IN SRV 10 1 465 {{ config["HOSTNAMES"].split(',')[0] }}.
+
_imaps._tcp.{{ domain.name }}. 600 IN SRV 10 1 993 {{ config["HOSTNAMES"].split(',')[0] }}.
+
_pop3s._tcp.{{ domain.name }}. 600 IN SRV 10 1 995 {{ config["HOSTNAMES"].split(',')[0] }}.
+{% endif %} + {% endcall %} {% endblock %} diff --git a/core/admin/mailu/ui/templates/user/reply.html b/core/admin/mailu/ui/templates/user/reply.html index 7a6b7fa8..74c604fa 100644 --- a/core/admin/mailu/ui/templates/user/reply.html +++ b/core/admin/mailu/ui/templates/user/reply.html @@ -14,7 +14,7 @@ {{ form.hidden_tag() }} {{ macros.form_field(form.reply_enabled, onchange="if(this.checked){$('#reply_subject,#reply_body,#reply_enddate,#reply_startdate').removeAttr('readonly')} - else{$('#reply_subject,#reply_body,#reply_enddate').attr('readonly', '')}") }} + else{$('#reply_subject,#reply_body,#reply_enddate,#reply_startdate').attr('readonly', '')}") }} {{ macros.form_field(form.reply_subject, **{("rw" if user.reply_enabled else "readonly"): ""}) }} {{ macros.form_field(form.reply_body, rows=10, diff --git a/core/admin/mailu/ui/views/base.py b/core/admin/mailu/ui/views/base.py index 625c02e1..eb5490bc 100644 --- a/core/admin/mailu/ui/views/base.py +++ b/core/admin/mailu/ui/views/base.py @@ -1,6 +1,7 @@ from mailu import models from mailu.ui import ui, forms, access +from flask import current_app as app import flask import flask_login @@ -49,6 +50,9 @@ def announcement(): flask.flash('Your announcement was sent', 'success') return flask.render_template('announcement.html', form=form) +@ui.route('/webmail', methods=['GET']) +def webmail(): + return flask.redirect(app.config['WEB_WEBMAIL']) @ui.route('/client', methods=['GET']) def client(): diff --git a/core/admin/mailu/ui/views/domains.py b/core/admin/mailu/ui/views/domains.py index 719d3844..f394ce7d 100644 --- a/core/admin/mailu/ui/views/domains.py +++ b/core/admin/mailu/ui/views/domains.py @@ -74,6 +74,8 @@ def domain_details(domain_name): def domain_genkeys(domain_name): domain = models.Domain.query.get(domain_name) or flask.abort(404) domain.generate_dkim_key() + models.db.session.add(domain) + models.db.session.commit() return flask.redirect( flask.url_for(".domain_details", domain_name=domain_name)) diff --git a/core/admin/mailu/utils.py b/core/admin/mailu/utils.py index 9d88b54a..b1279d9e 100644 --- a/core/admin/mailu/utils.py +++ b/core/admin/mailu/utils.py @@ -1,11 +1,28 @@ -from mailu import models, limiter +""" Mailu admin app utilities +""" + +try: + import cPickle as pickle +except ImportError: + import pickle + +import hmac +import secrets +import time + +from multiprocessing import Value + +from mailu import limiter import flask import flask_login -import flask_script import flask_migrate import flask_babel +import redis +from flask.sessions import SessionMixin, SessionInterface +from itsdangerous.encoding import want_bytes +from werkzeug.datastructures import CallbackDict from werkzeug.contrib import fixers @@ -15,6 +32,7 @@ login.login_view = "ui.login" @login.unauthorized_handler def handle_needs_login(): + """ redirect unauthorized requests to login page """ return flask.redirect( flask.url_for('ui.login', next=flask.request.endpoint) ) @@ -27,6 +45,7 @@ babel = flask_babel.Babel() @babel.localeselector def get_locale(): + """ selects locale for translation """ translations = list(map(str, babel.list_translations())) flask.session['available_languages'] = translations @@ -41,6 +60,10 @@ def get_locale(): # Proxy fixer class PrefixMiddleware(object): + """ fix proxy headers """ + def __init__(self): + self.app = None + def __call__(self, environ, start_response): prefix = environ.get('HTTP_X_FORWARDED_PREFIX', '') if prefix: @@ -56,3 +79,384 @@ proxy = PrefixMiddleware() # Data migrate migrate = flask_migrate.Migrate() + + +# session store (inspired by https://github.com/mbr/flask-kvsession) +class RedisStore: + """ Stores session data in a redis db. """ + + has_ttl = True + + def __init__(self, redisstore): + self.redis = redisstore + + def get(self, key): + """ load item from store. """ + value = self.redis.get(key) + if value is None: + raise KeyError(key) + return value + + def put(self, key, value, ttl=None): + """ save item to store. """ + if ttl: + self.redis.setex(key, int(ttl), value) + else: + self.redis.set(key, value) + + def delete(self, key): + """ delete item from store. """ + self.redis.delete(key) + + def list(self, prefix=None): + """ return list of keys starting with prefix """ + if prefix: + prefix += b'*' + return list(self.redis.scan_iter(match=prefix)) + +class DictStore: + """ Stores session data in a python dict. """ + + has_ttl = False + + def __init__(self): + self.dict = {} + + def get(self, key): + """ load item from store. """ + return self.dict[key] + + def put(self, key, value, ttl_secs=None): + """ save item to store. """ + self.dict[key] = value + + def delete(self, key): + """ delete item from store. """ + try: + del self.dict[key] + except KeyError: + pass + + def list(self, prefix=None): + """ return list of keys starting with prefix """ + if prefix is None: + return list(self.dict.keys()) + return [key for key in self.dict if key.startswith(prefix)] + +class MailuSession(CallbackDict, SessionMixin): + """ Custom flask session storage. """ + + # default modified to false + modified = False + + def __init__(self, key=None, app=None): + + self.app = app or flask.current_app + + initial = None + + key = want_bytes(key) + if parsed := self.app.session_config.parse_key(key, self.app): + try: + initial = pickle.loads(app.session_store.get(key)) + except (KeyError, EOFError, pickle.UnpicklingError): + # either the cookie was manipulated or we did not find the + # session in the backend or the pickled data is invalid. + # => start new session + pass + else: + (self._uid, self._sid, self._created) = parsed + self._key = key + + if initial is None: + # start new session + self.new = True + self._uid = None + self._sid = None + self._created = self.app.session_config.gen_created() + self._key = None + + def _on_update(obj): + obj.modified = True + + CallbackDict.__init__(self, initial, _on_update) + + @property + def saved(self): + """ this reflects if the session was saved. """ + return self._key is not None + + @property + def sid(self): + """ this reflects the session's id. """ + if self._sid is None or self._uid is None or self._created is None: + return None + return b''.join([self._uid, self._sid, self._created]) + + def destroy(self): + """ destroy session for security reasons. """ + + self.delete() + + self._uid = None + self._sid = None + self._created = None + + self.clear() + + self.modified = True + self.new = False + + def regenerate(self): + """ generate new id for session to avoid `session fixation`. """ + + self.delete() + + self._sid = None + self._created = self.app.session_config.gen_created() + + self.modified = True + + def delete(self): + """ Delete stored session. """ + if self.saved: + self.app.session_store.delete(self._key) + self._key = None + + def save(self): + """ Save session to store. """ + + set_cookie = False + + # set uid from dict data + if self._uid is None: + self._uid = self.app.session_config.gen_uid(self.get('user_id', '')) + + # create new session id for new or regenerated sessions and force setting the cookie + if self._sid is None: + self._sid = self.app.session_config.gen_sid() + set_cookie = True + + # get new session key + key = self.sid + + # delete old session if key has changed + if key != self._key: + self.delete() + + # remember time to refresh + self['_refresh'] = int(time.time()) + self.app.permanent_session_lifetime.total_seconds()/2 + + # save session + self.app.session_store.put( + key, + pickle.dumps(dict(self)), + self.app.permanent_session_lifetime.total_seconds() + ) + + self._key = key + + self.new = False + self.modified = False + + return set_cookie + + def needs_refresh(self): + """ Checks if server side session needs to be refreshed. """ + + return int(time.time()) > self.get('_refresh', 0) + +class MailuSessionConfig: + """ Stores sessions crypto config """ + + # default size of session key parts + uid_bits = 64 # default if SESSION_KEY_BITS is not set in config + sid_bits = 128 # for now. must be multiple of 8! + time_bits = 32 # for now. must be multiple of 8! + + def __init__(self, app=None): + + if app is None: + app = flask.current_app + + bits = app.config.get('SESSION_KEY_BITS', self.uid_bits) + if not 64 <= bits <= 256: + raise ValueError('SESSION_KEY_BITS must be between 64 and 256!') + + uid_bytes = bits//8 + (bits%8>0) + sid_bytes = self.sid_bits//8 + + key = want_bytes(app.secret_key) + + self._hmac = hmac.new(hmac.digest(key, b'SESSION_UID_HASH', digest='sha256'), digestmod='sha256') + self._uid_len = uid_bytes + self._uid_b64 = len(self._encode(bytes(uid_bytes))) + self._sid_len = sid_bytes + self._sid_b64 = len(self._encode(bytes(sid_bytes))) + self._key_min = self._uid_b64 + self._sid_b64 + self._key_max = self._key_min + len(self._encode(bytes(self.time_bits//8))) + + def gen_sid(self): + """ Generate random session id. """ + return self._encode(secrets.token_bytes(self._sid_len)) + + def gen_uid(self, uid): + """ Generate hashed user id part of session key. """ + _hmac = self._hmac.copy() + _hmac.update(want_bytes(uid)) + return self._encode(_hmac.digest()[:self._uid_len]) + + def gen_created(self, now=None): + """ Generate base64 representation of creation time. """ + return self._encode(int(now or time.time()).to_bytes(8, byteorder='big').lstrip(b'\0')) + + def parse_key(self, key, app=None, validate=False, now=None): + """ Split key into sid, uid and creation time. """ + + if not (isinstance(key, bytes) and self._key_min <= len(key) <= self._key_max): + return None + + uid = key[:self._uid_b64] + sid = key[self._uid_b64:self._key_min] + crt = key[self._key_min:] + + # validate if parts are decodeable + created = self._decode(crt) + if created is None or self._decode(uid) is None or self._decode(sid) is None: + return None + + # validate creation time when requested or store does not support ttl + if validate or not app.session_store.has_ttl: + if now is None: + now = int(time.time()) + created = int.from_bytes(created, byteorder='big') + if not created < now < created + app.permanent_session_lifetime.total_seconds(): + return None + + return (uid, sid, crt) + + def _encode(self, value): + return secrets.base64.urlsafe_b64encode(value).rstrip(b'=') + + def _decode(self, value): + try: + return secrets.base64.urlsafe_b64decode(value + b'='*(4-len(value)%4)) + except secrets.binascii.Error: + return None + +class MailuSessionInterface(SessionInterface): + """ Custom flask session interface. """ + + def open_session(self, app, request): + """ Load or create session. """ + return MailuSession(request.cookies.get(app.config['SESSION_COOKIE_NAME'], None), app) + + def save_session(self, app, session, response): + """ Save modified session. """ + + # If the session is modified to be empty, remove the cookie. + # If the session is empty, return without setting the cookie. + if not session: + if session.modified: + session.delete() + response.delete_cookie( + app.session_cookie_name, + domain=self.get_cookie_domain(app), + path=self.get_cookie_path(app), + ) + return + + # Add a "Vary: Cookie" header if the session was accessed + if session.accessed: + response.vary.add('Cookie') + + set_cookie = session.permanent and app.config['SESSION_REFRESH_EACH_REQUEST'] + need_refresh = session.needs_refresh() + + # save modified session or refresh unmodified session + if session.modified or need_refresh: + set_cookie |= session.save() + + # set cookie on refreshed permanent sessions + if need_refresh and session.permanent: + set_cookie = True + + # set or update cookie if necessary + if set_cookie: + response.set_cookie( + app.session_cookie_name, + session.sid, + expires=self.get_expiration_time(app, session), + httponly=self.get_cookie_httponly(app), + domain=self.get_cookie_domain(app), + path=self.get_cookie_path(app), + secure=self.get_cookie_secure(app), + samesite=self.get_cookie_samesite(app) + ) + +class MailuSessionExtension: + """ Server side session handling """ + + @staticmethod + def cleanup_sessions(app=None): + """ Remove invalid or expired sessions. """ + + app = app or flask.current_app + now = int(time.time()) + + count = 0 + for key in app.session_store.list(): + if not app.session_config.parse_key(key, app, validate=True, now=now): + app.session_store.delete(key) + count += 1 + + return count + + @staticmethod + def prune_sessions(uid=None, keep=None, app=None): + """ Remove sessions + uid: remove all sessions (NONE) or sessions belonging to a specific user + keep: keep listed sessions + """ + + keep = keep or set() + app = app or flask.current_app + + prefix = None if uid is None else app.session_config.gen_uid(uid) + + count = 0 + for key in app.session_store.list(prefix): + if key not in keep: + app.session_store.delete(key) + count += 1 + + return count + + def init_app(self, app): + """ Replace session management of application. """ + + if app.config.get('MEMORY_SESSIONS'): + # in-memory session store for use in development + app.session_store = DictStore() + + else: + # redis-based session store for use in production + app.session_store = RedisStore( + redis.StrictRedis().from_url(app.config['SESSION_STORAGE_URL']) + ) + + # clean expired sessions oonce on first use in case lifetime was changed + def cleaner(): + with cleaned.get_lock(): + if not cleaned.value: + cleaned.value = True + flask.current_app.logger.error('cleaning') + MailuSessionExtension.cleanup_sessions(app) + + app.before_first_request(cleaner) + + app.session_config = MailuSessionConfig(app) + app.session_interface = MailuSessionInterface() + +cleaned = Value('i', False) +session = MailuSessionExtension() diff --git a/core/admin/package.json b/core/admin/package.json index 72e612bc..b11672c7 100644 --- a/core/admin/package.json +++ b/core/admin/package.json @@ -2,30 +2,31 @@ "name": "mailu", "version": "1.0.0", "description": "Mailu admin assets", - "main": "assest/index.js", + "main": "assets/index.js", + "directories": { + "lib": "lib" + }, "scripts": { "test": "echo \"Error: no test specified\" && exit 1" }, "author": "", "license": "ISC", "dependencies": { - "@babel/core": "^7.4.4", - "@babel/preset-env": "^7.4.4", + "@babel/core": "^7.14.6", + "@babel/preset-env": "^7.14.7", "admin-lte": "^3.1.0", - "babel-loader": "^8.0.5", + "babel-loader": "^8.0.6", "css-loader": "^2.1.1", "expose-loader": "^0.7.5", - "file-loader": "^3.0.1", - "jQuery": "^1.7.4", - "less": "^3.9.0", + "jquery": "^3.6.0", + "less": "^3.13.1", "less-loader": "^5.0.0", - "mini-css-extract-plugin": "^0.6.0", - "node-sass": "^4.12.0", - "popper.js": "^1.15.0", - "sass-loader": "^7.1.0", - "style-loader": "^0.23.1", - "url-loader": "^1.1.2", - "webpack": "^4.30.0", - "webpack-cli": "^3.3.2" + "mini-css-extract-plugin": "^1.2.1", + "node-sass": "^4.13.1", + "sass-loader": "^7.3.1", + "select2": "^4.0.13", + "url-loader": "^2.3.0", + "webpack": "^4.33.0", + "webpack-cli": "^3.3.12" } } diff --git a/core/admin/requirements-prod.txt b/core/admin/requirements-prod.txt index 54cf9a14..88ff2981 100644 --- a/core/admin/requirements-prod.txt +++ b/core/admin/requirements-prod.txt @@ -5,7 +5,7 @@ bcrypt==3.1.6 blinker==1.4 cffi==1.12.3 Click==7.0 -cryptography==3.2 +cryptography==3.4.7 decorator==4.4.0 dnspython==1.16.0 dominate==2.3.5 @@ -13,9 +13,9 @@ Flask==1.0.2 Flask-Babel==0.12.2 Flask-Bootstrap==3.3.7.1 Flask-DebugToolbar==0.10.1 -Flask-KVSession==0.6.2 Flask-Limiter==1.0.1 Flask-Login==0.4.1 +flask-marshmallow==0.14.0 Flask-Migrate==2.4.0 Flask-Script==2.0.6 Flask-SQLAlchemy==2.4.0 @@ -25,19 +25,22 @@ idna==2.8 infinity==1.4 intervals==0.8.1 itsdangerous==1.1.0 -Jinja2==2.10.1 +Jinja2==2.11.3 limits==1.3 Mako==1.0.9 MarkupSafe==1.1.1 mysqlclient==1.4.2.post1 +marshmallow==3.10.0 +marshmallow-sqlalchemy==0.24.1 passlib==1.7.4 psycopg2==2.8.2 pycparser==2.19 -pyOpenSSL==19.0.0 +Pygments==2.8.1 +pyOpenSSL==20.0.1 python-dateutil==2.8.0 python-editor==1.0.4 pytz==2019.1 -PyYAML==5.1 +PyYAML==5.4.1 redis==3.2.1 #alpine3:12 provides six==1.15.0 #six==1.12.0 diff --git a/core/admin/requirements.txt b/core/admin/requirements.txt index abb37234..e1de3b01 100644 --- a/core/admin/requirements.txt +++ b/core/admin/requirements.txt @@ -3,7 +3,6 @@ Flask-Login Flask-SQLAlchemy Flask-bootstrap Flask-Babel -Flask-KVSession Flask-migrate Flask-script Flask-wtf @@ -17,6 +16,7 @@ gunicorn tabulate PyYAML PyOpenSSL +Pygments dnspython bcrypt tenacity @@ -24,3 +24,6 @@ mysqlclient psycopg2 idna srslib +marshmallow +flask-marshmallow +marshmallow-sqlalchemy diff --git a/core/admin/start.py b/core/admin/start.py index 2c925e01..0eff3bbe 100755 --- a/core/admin/start.py +++ b/core/admin/start.py @@ -19,7 +19,8 @@ if account is not None and domain is not None and password is not None: os.system("flask mailu admin %s %s '%s' --mode %s" % (account, domain, password, mode)) start_command="".join([ - "gunicorn -w 4 -b :80 ", + "gunicorn --threads ", str(os.cpu_count()), + " -b :80 ", "--access-logfile - " if (log.root.level<=log.INFO) else "", "--error-logfile - ", "--preload ", diff --git a/core/dovecot/Dockerfile b/core/dovecot/Dockerfile index e1c20eff..22145bde 100644 --- a/core/dovecot/Dockerfile +++ b/core/dovecot/Dockerfile @@ -1,4 +1,4 @@ -ARG DISTRO=alpine:3.13 +ARG DISTRO=alpine:3.14 FROM $DISTRO as builder WORKDIR /tmp RUN apk add git build-base automake autoconf libtool dovecot-dev xapian-core-dev icu-dev diff --git a/core/dovecot/conf/dovecot.conf b/core/dovecot/conf/dovecot.conf index 81811cdb..6b97a086 100644 --- a/core/dovecot/conf/dovecot.conf +++ b/core/dovecot/conf/dovecot.conf @@ -21,7 +21,10 @@ mail_access_groups = mail maildir_stat_dirs = yes mailbox_list_index = yes mail_vsize_bg_after_count = 100 -mail_plugins = $mail_plugins quota quota_clone zlib{{ ' ' }} +mail_plugins = $mail_plugins quota quota_clone{{ ' ' }} + {%- if COMPRESSION -%} + zlib{{ ' ' }} + {%- endif %} {%- if (FULL_TEXT_SEARCH or '').lower() not in ['off', 'false', '0'] -%} fts fts_xapian {%- endif %} @@ -50,7 +53,7 @@ plugin { fts_autoindex_exclude = \Trash {% endif %} - {% if COMPRESSION in [ 'gz', 'bz2' ] %} + {% if COMPRESSION in [ 'gz', 'bz2', 'lz4', 'zstd' ] %} zlib_save = {{ COMPRESSION }} {% endif %} diff --git a/core/nginx/Dockerfile b/core/nginx/Dockerfile index 2bc1cfd1..1906ed31 100644 --- a/core/nginx/Dockerfile +++ b/core/nginx/Dockerfile @@ -1,4 +1,4 @@ -ARG DISTRO=alpine:3.12 +ARG DISTRO=alpine:3.14 FROM $DISTRO # python3 shared with most images RUN apk add --no-cache \ diff --git a/core/nginx/conf/nginx.conf b/core/nginx/conf/nginx.conf index 7a212ebe..5158ca5c 100644 --- a/core/nginx/conf/nginx.conf +++ b/core/nginx/conf/nginx.conf @@ -117,7 +117,7 @@ http { include /overrides/*.conf; # Actual logic - {% if WEB_WEBMAIL != '/' %} + {% if WEB_WEBMAIL != '/' and WEBROOT_REDIRECT != 'none' %} location / { {% if WEBROOT_REDIRECT %} try_files $uri {{ WEBROOT_REDIRECT }}; @@ -136,9 +136,33 @@ http { include /etc/nginx/proxy.conf; client_max_body_size {{ MESSAGE_SIZE_LIMIT|int + 8388608 }}; proxy_pass http://$webmail; + {% if ADMIN == 'true' %} + auth_request /internal/auth/user; + error_page 403 @webmail_login; } - {% endif %} + location {{ WEB_WEBMAIL }}/sso.php { + {% if WEB_WEBMAIL != '/' %} + rewrite ^({{ WEB_WEBMAIL }})$ $1/ permanent; + rewrite ^{{ WEB_WEBMAIL }}/(.*) /$1 break; + {% endif %} + include /etc/nginx/proxy.conf; + client_max_body_size {{ MESSAGE_SIZE_LIMIT|int + 8388608 }}; + auth_request /internal/auth/user; + auth_request_set $user $upstream_http_x_user; + auth_request_set $token $upstream_http_x_user_token; + proxy_set_header X-Remote-User $user; + proxy_set_header X-Remote-User-Token $token; + proxy_pass http://$webmail; + error_page 403 @webmail_login; + } + + location @webmail_login { + return 302 {{ WEB_ADMIN }}/ui/login?next=ui.webmail; + } + {% else %} + } + {% endif %}{% endif %} {% if ADMIN == 'true' %} location {{ WEB_ADMIN }} { return 301 {{ WEB_ADMIN }}/ui; diff --git a/core/none/Dockerfile b/core/none/Dockerfile index 70041dac..51b8d1c5 100644 --- a/core/none/Dockerfile +++ b/core/none/Dockerfile @@ -1,6 +1,6 @@ # This is an idle image to dynamically replace any component if disabled. -ARG DISTRO=alpine:3.12 +ARG DISTRO=alpine:3.14 FROM $DISTRO CMD sleep 1000000d diff --git a/core/postfix/Dockerfile b/core/postfix/Dockerfile index af29bf91..062155c1 100644 --- a/core/postfix/Dockerfile +++ b/core/postfix/Dockerfile @@ -1,4 +1,4 @@ -ARG DISTRO=alpine:3.12 +ARG DISTRO=alpine:3.14 FROM $DISTRO # python3 shared with most images RUN apk add --no-cache \ @@ -12,7 +12,7 @@ RUN pip3 install socrate==0.2.0 RUN pip3 install "podop>0.2.5" # Image specific layers under this line -RUN apk add --no-cache postfix postfix-pcre cyrus-sasl-plain cyrus-sasl-login +RUN apk add --no-cache postfix postfix-pcre cyrus-sasl-login COPY conf /conf COPY start.py /start.py diff --git a/core/postfix/conf/main.cf b/core/postfix/conf/main.cf index 8f35f609..9cd4010e 100644 --- a/core/postfix/conf/main.cf +++ b/core/postfix/conf/main.cf @@ -32,7 +32,7 @@ mydestination = relayhost = {{ RELAYHOST }} {% if RELAYUSER %} smtp_sasl_auth_enable = yes -smtp_sasl_password_maps = hash:/etc/postfix/sasl_passwd +smtp_sasl_password_maps = lmdb:/etc/postfix/sasl_passwd smtp_sasl_security_options = noanonymous {% endif %} @@ -58,7 +58,7 @@ tls_ssl_options = NO_COMPRESSION smtp_tls_security_level = {{ OUTBOUND_TLS_LEVEL|default('may') }} smtp_tls_mandatory_protocols = !SSLv2, !SSLv3 smtp_tls_protocols =!SSLv2,!SSLv3 -smtp_tls_session_cache_database = btree:${data_directory}/smtp_scache +smtp_tls_session_cache_database = lmdb:${data_directory}/smtp_scache ############### # Virtual diff --git a/core/postfix/conf/outclean_header_filter.cf b/core/postfix/conf/outclean_header_filter.cf index 03e33ee9..7e0e92d3 100644 --- a/core/postfix/conf/outclean_header_filter.cf +++ b/core/postfix/conf/outclean_header_filter.cf @@ -4,7 +4,7 @@ # Remove the first line of the Received: header. Note that we cannot fully remove the Received: header # because OpenDKIM requires that a header be present when signing outbound mail. The first line is # where the user's home IP address would be. -/^\s*Received:[^\n]*(.*)/ REPLACE Received: from authenticated-user (PRIMARY_HOSTNAME [PUBLIC_IP])$1 +/^\s*Received:[^\n]*(.*)/ REPLACE Received: from authenticated-user ({{OUTCLEAN}} [{{OUTCLEAN_ADDRESS}}])$1 # Remove other typically private information. /^\s*User-Agent:/ IGNORE diff --git a/core/postfix/conf/sasl_passwd b/core/postfix/conf/sasl_passwd index e19d0657..1e32322a 100644 --- a/core/postfix/conf/sasl_passwd +++ b/core/postfix/conf/sasl_passwd @@ -1 +1,2 @@ -{{ RELAYHOST }} {{ RELAYUSER }}:{{ RELAYPASSWORD }} \ No newline at end of file +{{ RELAYHOST }} {{ RELAYUSER }}:{{ RELAYPASSWORD }} + diff --git a/core/postfix/start.py b/core/postfix/start.py index b68303e1..e0c781b7 100755 --- a/core/postfix/start.py +++ b/core/postfix/start.py @@ -8,12 +8,13 @@ import logging as log import sys from podop import run_server +from pwd import getpwnam from socrate import system, conf log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "WARNING")) def start_podop(): - os.setuid(100) + os.setuid(getpwnam('postfix').pw_uid) url = "http://" + os.environ["ADMIN_ADDRESS"] + "/internal/postfix/" # TODO: Remove verbosity setting from Podop? run_server(0, "postfix", "/tmp/podop.socket", [ @@ -36,6 +37,15 @@ os.environ["FRONT_ADDRESS"] = system.get_host_address_from_environment("FRONT", os.environ["ADMIN_ADDRESS"] = system.get_host_address_from_environment("ADMIN", "admin") os.environ["ANTISPAM_MILTER_ADDRESS"] = system.get_host_address_from_environment("ANTISPAM_MILTER", "antispam:11332") os.environ["LMTP_ADDRESS"] = system.get_host_address_from_environment("LMTP", "imap:2525") +os.environ["OUTCLEAN"] = os.environ["HOSTNAMES"].split(",")[0] +try: + _to_lookup = os.environ["OUTCLEAN"] + # Ensure we lookup a FQDN: @see #1884 + if not _to_lookup.endswith('.'): + _to_lookup += '.' + os.environ["OUTCLEAN_ADDRESS"] = system.resolve_hostname(_to_lookup) +except: + os.environ["OUTCLEAN_ADDRESS"] = "10.10.10.10" for postfix_file in glob.glob("/conf/*.cf"): conf.jinja(postfix_file, os.environ, os.path.join("/etc/postfix", os.path.basename(postfix_file))) diff --git a/core/rspamd/Dockerfile b/core/rspamd/Dockerfile index acaf074e..6706ef14 100644 --- a/core/rspamd/Dockerfile +++ b/core/rspamd/Dockerfile @@ -1,4 +1,4 @@ -ARG DISTRO=alpine:3.12 +ARG DISTRO=alpine:3.14 FROM $DISTRO # python3 shared with most images RUN apk add --no-cache \ diff --git a/core/rspamd/start.py b/core/rspamd/start.py index bde708f2..e2e72bcb 100755 --- a/core/rspamd/start.py +++ b/core/rspamd/start.py @@ -10,7 +10,6 @@ log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "WARNING")) # Actual startup script -os.environ["FRONT_ADDRESS"] = system.get_host_address_from_environment("FRONT", "front") os.environ["REDIS_ADDRESS"] = system.get_host_address_from_environment("REDIS", "redis") if os.environ.get("ANTIVIRUS") == 'clamav': diff --git a/docs/Dockerfile b/docs/Dockerfile index 70c9c3c4..289697da 100644 --- a/docs/Dockerfile +++ b/docs/Dockerfile @@ -1,20 +1,28 @@ -ARG DISTRO=alpine:3.8 -FROM $DISTRO - -COPY requirements.txt /requirements.txt +# Convert .rst files to .html in temporary build container +FROM python:3.8-alpine3.14 AS build ARG version=master ENV VERSION=$version -RUN apk add --no-cache nginx curl python3 \ - && pip3 install -r /requirements.txt \ - && mkdir /run/nginx - -COPY ./nginx.conf /etc/nginx/conf.d/default.conf +COPY requirements.txt /requirements.txt COPY . /docs -RUN mkdir -p /build/$VERSION \ - && sphinx-build -W /docs /build/$VERSION +RUN apk add --no-cache --virtual .build-deps \ + gcc musl-dev \ + && pip3 install -r /requirements.txt \ + && mkdir -p /build/$VERSION \ + && sphinx-build -W /docs /build/$VERSION \ + && apk del .build-deps + + +# Build nginx deployment image including generated html +FROM nginx:1.21-alpine + +ARG version=master +ENV VERSION=$version + +COPY ./nginx.conf /etc/nginx/conf.d/default.conf +COPY --from=build /build/$VERSION /build/$VERSION EXPOSE 80/tcp diff --git a/docs/cli.rst b/docs/cli.rst index 8e94026b..957e47e4 100644 --- a/docs/cli.rst +++ b/docs/cli.rst @@ -11,6 +11,8 @@ Managing users and aliases can be done from CLI using commands: * user-import * user-delete * config-update +* config-export +* config-import alias ----- @@ -62,7 +64,7 @@ primary difference with simple `user` command is that password is being imported docker-compose run --rm admin flask mailu user-import myuser example.net '$6$51ebe0cb9f1dab48effa2a0ad8660cb489b445936b9ffd812a0b8f46bca66dd549fea530ce' 'SHA512-CRYPT' user-delete ------------- +----------- .. code-block:: bash @@ -94,7 +96,7 @@ where mail-config.yml looks like: without ``--delete-object`` option config-update will only add/update new values but will *not* remove any entries missing in provided YAML input. Users ------ +^^^^^ following are additional parameters that could be defined for users: @@ -113,8 +115,197 @@ following are additional parameters that could be defined for users: * spam_threshold Alias ------ +^^^^^ additional fields: * wildcard + +config-export +------------- + +The purpose of this command is to export the complete configuration in YAML or JSON format. + +.. code-block:: bash + + $ docker-compose exec admin flask mailu config-export --help + + Usage: flask mailu config-export [OPTIONS] [FILTER]... + + Export configuration as YAML or JSON to stdout or file + + Options: + -f, --full Include attributes with default value. + -s, --secrets Include secret attributes (dkim-key, passwords). + -d, --dns Include dns records. + -c, --color Force colorized output. + -o, --output-file FILENAME Save configuration to file. + -j, --json Export configuration in json format. + -?, -h, --help Show this message and exit. + +Only non-default attributes are exported. If you want to export all attributes use ``--full``. +If you want to export plain-text secrets (dkim-keys, passwords) you have to add the ``--secrets`` option. +To include dns records (mx, spf, dkim and dmarc) add the ``--dns`` option. + +By default all configuration objects are exported (domain, user, alias, relay). You can specify +filters to export only some objects or attributes (try: ``user`` or ``domain.name``). +Attributes explicitly specified in filters are automatically exported: there is no need to add ``--secrets`` or ``--full``. + +.. code-block:: bash + + $ docker-compose exec admin flask mailu config-export --output mail-config.yml + + $ docker-compose exec admin flask mailu config-export domain.dns_mx domain.dns_spf + + $ docker-compose exec admin flask mailu config-export user.spam_threshold + +config-import +------------- + +This command imports configuration data from an external YAML or JSON source. + +.. code-block:: bash + + $ docker-compose exec admin flask mailu config-import --help + + Usage: flask mailu config-import [OPTIONS] [FILENAME|-] + + Import configuration as YAML or JSON from stdin or file + + Options: + -v, --verbose Increase verbosity. + -s, --secrets Show secret attributes in messages. + -q, --quiet Quiet mode - only show errors. + -c, --color Force colorized output. + -u, --update Update mode - merge input with existing config. + -n, --dry-run Perform a trial run with no changes made. + -?, -h, --help Show this message and exit. + +The current version of docker-compose exec does not pass stdin correctly, so you have to user docker exec instead: + +.. code-block:: bash + + docker exec -i $(docker-compose ps -q admin) flask mailu config-import -nv < mail-config.yml + +mail-config.yml contains the configuration and looks like this: + +.. code-block:: yaml + + domain: + - name: example.com + alternatives: + - alternative.example.com + + user: + - email: foo@example.com + password_hash: '$2b$12$...' + hash_scheme: MD5-CRYPT + + alias: + - email: alias1@example.com + destination: + - user1@example.com + - user2@example.com + + relay: + - name: relay.example.com + comment: test + smtp: mx.example.com + +config-import shows the number of created/modified/deleted objects after import. +To suppress all messages except error messages use ``--quiet``. +By adding the ``--verbose`` switch the import gets more detailed and shows exactly what attributes changed. +In all log messages plain-text secrets (dkim-keys, passwords) are hidden by default. Use ``--secrets`` to log secrets. +If you want to test what would be done when importing without committing any changes, use ``--dry-run``. + +By default config-import replaces the whole configuration. ``--update`` allows to modify the existing configuration instead. +New elements will be added and existing elements will be modified. +It is possible to delete a single element or prune all elements from lists and associative arrays using a special notation: + ++-----------------------------+------------------+--------------------------+ +| Delete what? | notation | example | ++=============================+==================+==========================+ +| specific array object | ``- -key: id`` | ``- -name: example.com`` | ++-----------------------------+------------------+--------------------------+ +| specific list item | ``- -id`` | ``- -user1@example.com`` | ++-----------------------------+------------------+--------------------------+ +| all remaining array objects | ``- -key: null`` | ``- -email: null`` | ++-----------------------------+------------------+--------------------------+ +| all remaining list items | ``- -prune-`` | ``- -prune-`` | ++-----------------------------+------------------+--------------------------+ + +The ``-key: null`` notation can also be used to reset an attribute to its default. +To reset *spam_threshold* to it's default *80* use ``-spam_threshold: null``. + +A new dkim key can be generated when adding or modifying a domain, by using the special value +``dkim_key: -generate-``. + +This is a complete YAML template with all additional parameters that can be defined: + +.. code-block:: yaml + + domain: + - name: example.com + alternatives: + - alternative.tld + comment: '' + dkim_key: '' + max_aliases: -1 + max_quota_bytes: 0 + max_users: -1 + signup_enabled: false + + user: + - email: postmaster@example.com + comment: '' + displayed_name: 'Postmaster' + enable_imap: true + enable_pop: false + enabled: true + fetches: + - id: 1 + comment: 'test fetch' + error: null + host: other.example.com + keep: true + last_check: '2020-12-29T17:09:48.200179' + password: 'secret' + hash_password: true + port: 993 + protocol: imap + tls: true + username: fetch-user + forward_destination: + - address@remote.example.com + forward_enabled: true + forward_keep: true + global_admin: true + manager_of: + - example.com + password: '$2b$12$...' + hash_password: true + quota_bytes: 1000000000 + reply_body: '' + reply_enabled: false + reply_enddate: '2999-12-31' + reply_startdate: '1900-01-01' + reply_subject: '' + spam_enabled: true + spam_threshold: 80 + tokens: + - id: 1 + comment: email-client + ip: 192.168.1.1 + password: '$5$rounds=1$...' + + aliases: + - email: email@example.com + comment: '' + destination: + - address@example.com + wildcard: false + + relay: + - name: relay.example.com + comment: '' + smtp: mx.example.com diff --git a/docs/compose/.env b/docs/compose/.env index 432b20b0..4df73080 100644 --- a/docs/compose/.env +++ b/docs/compose/.env @@ -97,7 +97,7 @@ WELCOME_SUBJECT=Welcome to your new email account WELCOME_BODY=Welcome to your new email account, if you can read this, then it is configured properly! # Maildir Compression -# choose compression-method, default: none (value: bz2, gz) +# choose compression-method, default: none (value: gz, bz2, lz4, zstd) COMPRESSION= # change compression-level, default: 6 (value: 1-9) COMPRESSION_LEVEL= diff --git a/docs/conf.py b/docs/conf.py index 6b19f967..db7008b3 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -4,7 +4,7 @@ import os -extensions = ['sphinx.ext.imgmath', 'sphinx.ext.viewcode'] +extensions = ['sphinx.ext.imgmath', 'sphinx.ext.viewcode', 'sphinx_rtd_theme'] templates_path = ['_templates'] source_suffix = '.rst' master_doc = 'index' @@ -36,7 +36,7 @@ html_context = { 'github_user': 'mailu', 'github_repo': 'mailu', 'github_version': version, - 'stable_version': '1.7', + 'stable_version': '1.8', 'versions': [ ('1.5', '/1.5/'), ('1.6', '/1.6/'), diff --git a/docs/configuration.rst b/docs/configuration.rst index e08675a8..21effc52 100644 --- a/docs/configuration.rst +++ b/docs/configuration.rst @@ -41,7 +41,7 @@ The ``AUTH_RATELIMIT`` holds a security setting for fighting attackers that try to guess user passwords. The value is the limit of failed authentication attempts that a single IP address can perform against IMAP, POP and SMTP authentication endpoints. -If ``AUTH_RATELIMIT_SUBNET`` is ``True`` (which is the default), the ``AUTH_RATELIMIT`` +If ``AUTH_RATELIMIT_SUBNET`` is ``True`` (default: False), the ``AUTH_RATELIMIT`` rules does also apply to auth requests coming from ``SUBNET``, especially for the webmail. If you disable this, ensure that the rate limit on the webmail is enforced in a different way (e.g. roundcube plug-in), otherwise an attacker can simply bypass the limit using webmail. @@ -99,14 +99,19 @@ the localpart for DMARC rua and ruf email addresses. Full-text search is enabled for IMAP is enabled by default. This feature can be disabled (e.g. for performance reasons) by setting the optional variable ``FULL_TEXT_SEARCH`` to ``off``. +.. _web_settings: + Web settings ------------ -The ``WEB_ADMIN`` contains the path to the main admin interface, while -``WEB_WEBMAIL`` contains the path to the Web email client. -The ``WEBROOT_REDIRECT`` redirects all non-found queries to the set path. -An empty ``WEBROOT_REDIRECT`` value disables redirecting and enables classic -behavior of a 404 result when not found. +- ``WEB_ADMIN`` contains the path to the main admin interface + +- ``WEB_WEBMAIL`` contains the path to the Web email client. + +- ``WEBROOT_REDIRECT`` redirects all non-found queries to the set path. + An empty ``WEBROOT_REDIRECT`` value disables redirecting and enables classic behavior of a 404 result when not found. + Alternatively, ``WEBROOT_REDIRECT`` can be set to ``none`` if you are using an Nginx override for ``location /``. + All three options need a leading slash (``/``) to work. .. note:: ``WEBROOT_REDIRECT`` has to point to a valid path on the webserver. @@ -195,4 +200,24 @@ resolved. This can be used to rely on DNS based service discovery with changing When using ``*_ADDRESS``, the hostnames must be full-qualified hostnames. Otherwise nginx will not be able to resolve the hostnames. +Database settings +----------------- + +The admin service stores configurations in a database. + +- ``DB_FLAVOR``: the database type for mailu admin service. (``sqlite``, ``postgresql``, ``mysql``) +- ``DB_HOST``: the database host for mailu admin service. (when not ``sqlite``) +- ``DB_PORT``: the database port for mailu admin service. (when not ``sqlite``) +- ``DB_PW``: the database password for mailu admin service. (when not ``sqlite``) +- ``DB_USER``: the database user for mailu admin service. (when not ``sqlite``) +- ``DB_NAME``: the database name for mailu admin service. (when not ``sqlite``) + +The roundcube service stores configurations in a database. + +- ``ROUNDCUBE_DB_FLAVOR``: the database type for roundcube service. (``sqlite``, ``postgresql``, ``mysql``) +- ``ROUNDCUBE_DB_HOST``: the database host for roundcube service. (when not ``sqlite``) +- ``ROUNDCUBE_DB_PORT``: the database port for roundcube service. (when not ``sqlite``) +- ``ROUNDCUBE_DB_PW``: the database password for roundcube service. (when not ``sqlite``) +- ``ROUNDCUBE_DB_USER``: the database user for roundcube service. (when not ``sqlite``) +- ``ROUNDCUBE_DB_NAME``: the database name for roundcube service. (when not ``sqlite``) diff --git a/docs/contributors/environment.rst b/docs/contributors/environment.rst index 26c04d0b..cef71c6c 100644 --- a/docs/contributors/environment.rst +++ b/docs/contributors/environment.rst @@ -178,9 +178,9 @@ In the case of a PR from a fellow team member, a single review is enough to initiate merging. In all other cases, two approving reviews are required. There is also a possibility to set the ``review/need2`` to require a second review. -After Travis successfully tests the PR and the required amount of reviews are acquired, +After the Github Action workflow successfully tests the PR and the required amount of reviews are acquired, Mergify will trigger with a ``bors r+`` command. Bors will batch any approved PR's, -merges them with master in a staging branch where Travis builds and tests the result. +merges them with master in a staging branch where the Github Action workflow builds and tests the result. After a successful test, the actual master gets fast-forwarded to that point. System requirements @@ -201,16 +201,16 @@ us on `Matrix`_. Test images ``````````` -All PR's automatically get build by Travis, controlled by `bors-ng`_. +All PR's automatically get build by a Github Action workflow, controlled by `bors-ng`_. Some primitive auto testing is done. The resulting images get uploaded to Docker hub, under the -tag name ``mailutest/:pr-``. +tag name ``mailuci/:pr-``. For example, to test PR #500 against master, reviewers can use: .. code-block:: bash - export DOCKER_ORG="mailutest" + export DOCKER_ORG="mailuci" export MAILU_VERSION="pr-500" docker-compose pull docker-compose up -d @@ -232,8 +232,8 @@ after Bors confirms a successful build. When bors try fails ``````````````````` -Sometimes Travis fails when another PR triggers a ``bors try`` command, -before Travis cloned the git repository. +Sometimes the Github Action workflow fails when another PR triggers a ``bors try`` command, +before the Github Action workflow cloned the git repository. Inspect the build log in the link provided by *bors-ng* to find out the cause. If you see something like the following error on top of the logs, feel free to write a comment with ``bors retry``. diff --git a/docs/contributors/workflow.rst b/docs/contributors/workflow.rst index 16dcef52..31ffd793 100644 --- a/docs/contributors/workflow.rst +++ b/docs/contributors/workflow.rst @@ -41,7 +41,7 @@ PR Workflow ----------- All pull requests have to be against the main ``master`` branch. -The PR gets build by Travis and some primitive auto-testing is done. +The PR gets build by a Github Action workflow and some primitive auto-testing is done. Test images get uploaded to a separate section in Docker hub. Reviewers will check the PR and test the resulting images. See the :ref:`testing` section for more info. diff --git a/docs/database.rst b/docs/database.rst index b2526d6f..0f8318d5 100644 --- a/docs/database.rst +++ b/docs/database.rst @@ -8,7 +8,8 @@ This functionality should still be considered experimental! Mailu Postgresql ---------------- -Mailu optionally comes with a pre-configured Postgresql image. +Mailu optionally comes with a pre-configured Postgresql image, which as of 1.8, is deprecated +and will be removed in 1.9. This images has the following features: - Automatic creation of users, db, extensions and password; diff --git a/docs/faq.rst b/docs/faq.rst index 9c4f1d75..a2c6bd33 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -61,7 +61,7 @@ have to prevent pushing out something quickly. We currently maintain a strict work flow: #. Someone writes a solution and sends a pull request; -#. We use Travis-CI for some very basic building and testing; +#. We use Github actions for some very basic building and testing; #. The pull request needs to be code-reviewed and tested by at least two members from the contributors team. @@ -261,10 +261,14 @@ correct syntax. The following file names will be taken as override configuration - ``main.cf`` as ``$ROOT/overrides/postfix/postfix.cf`` - ``master.cf`` as ``$ROOT/overrides/postfix/postfix.master`` - All ``$ROOT/overrides/postfix/*.map`` files + - For both ``postfix.cf`` and ``postfix.master``, you need to put one configuration per line, as they are fed line-by-line + to postfix. - `Dovecot`_ - ``dovecot.conf`` in dovecot sub-directory; - `Nginx`_ - All ``*.conf`` files in the ``nginx`` sub-directory; - `Rspamd`_ - All files in the ``rspamd`` sub-directory. +To override the root location (``/``) in Nginx ``WEBROOT_REDIRECT`` needs to be set to ``none`` in the env file (see :ref:`web settings `). + *Issue reference:* `206`_, `1368`_. I want to integrate Nextcloud 15 (and newer) with Mailu @@ -495,6 +499,8 @@ follow these steps: logging: driver: journald + options: + tag: mailu-front 2. Add the /etc/fail2ban/filter.d/bad-auth.conf @@ -504,6 +510,7 @@ follow these steps: [Definition] failregex = .* client login failed: .+ client:\ ignoreregex = + journalmatch = CONTAINER_TAG=mailu-front 3. Add the /etc/fail2ban/jail.d/bad-auth.conf @@ -511,8 +518,8 @@ follow these steps: [bad-auth] enabled = true + backend = systemd filter = bad-auth - logpath = /var/log/messages bantime = 604800 findtime = 300 maxretry = 10 diff --git a/docs/kubernetes/mailu/front.yaml b/docs/kubernetes/mailu/front.yaml index a1d5acb2..2fba1026 100644 --- a/docs/kubernetes/mailu/front.yaml +++ b/docs/kubernetes/mailu/front.yaml @@ -1,4 +1,4 @@ -apiVersion: apps/v1beta2 +apiVersion: apps/v1 kind: DaemonSet metadata: name: mailu-front diff --git a/docs/kubernetes/mailu/index.rst b/docs/kubernetes/mailu/index.rst index 5d3502a7..0af3942e 100644 --- a/docs/kubernetes/mailu/index.rst +++ b/docs/kubernetes/mailu/index.rst @@ -3,6 +3,10 @@ Kubernetes setup ================ +> Hold up! +> These instructions are not recommended for setting up Mailu in a production Kubernetes environment. +> Please see [the Helm Chart documentation](https://github.com/Mailu/helm-charts/blob/master/mailu/README.md). + Prequisites ----------- diff --git a/docs/releases.rst b/docs/releases.rst index 7a15d1fa..6c672538 100644 --- a/docs/releases.rst +++ b/docs/releases.rst @@ -1,8 +1,81 @@ Release notes ============= -Mailu 1.8 - 2020-10-02 ----------------------- +Mailu 1.8 - 2021-08-7 +--------------------- + +The full 1.8 release is finally ready. There have been some changes in the contributors team. Many people from the contributors team have stepped back due to changed priorities in their life. +We are very grateful for all their contributions and hope we will see them back again in the future. +This is the main reason why it took so long for 1.8 to be fully released. + +Fortunately more people have decided to join the project. Some very nice contributions have been made which will become part of the next 1.9 release. +We hope that future Mailu releases will be released more quickly now we have more active contributors again. + +For a list of all changes refer to `CHANGELOG.md` in the root folder of the Mailu github project. Please read the 'Override location changes' section further on this page. It contains important information for the people who use the overrides folder. + +New Functionality & Improvements +```````````````````````````````` + +Here’s a short summary of new features: + +- Roundcube and Rainloop have been updated. +- All dependencies have been updated to the latest security update. +- Fail2ban documentation has been improved. +- Switch from client side (cookie) sessions to server side sessions and protect against session-fixation attacks. We recommend that you change your SECRET_KEY after upgrading. +- Full-text-search is back after having been disabled for a while due to nasty bugs. It can still be disabled via the mailu.env file. +- Tons of documentation improvements, especially geared towards new users. +- (Experimental) support for different architectures, such as ARM. +- Improvements around webmails, such as CardDAV, GPG and a new skin for an updated roundcube, and support for MySQL for it. Updated Rainloop, too. +- Improvements around relaying, such as AUTH LOGIN and non-standard port support. +- Update to alpine:3.14 as baseimage for most containers. +- Setup warns users about compose-IPv6 deployments which have caused open relays in the past. +- Improved handling of upper-vs-lowercase aliases and user-addresses. +- Improved rate-limiting system. +- Support for SRS. +- Japanese localisation is now available. + + +Upgrading +````````` + +Upgrade should run fine as long as you generate a new compose or stack +configuration and upgrade your mailu.env. + +Please note that the shipped image for PostgreSQL database is deprecated. +The shipped image for PostgreSQL is not maintained anymore from release 1.8. +We recommend switching to an external PostgreSQL image as soon as possible. + +Override location changes +^^^^^^^^^^^^^^^^^^^^^^^^^ + +If you have regenerated the Docker compose and environment files, there are some changes to the configuration overrides. +Override files are now mounted read-only into the containers. The Dovecot and Postfix overrides are moved in their own sub-directory. If there are local override files, they will need to be moved from ``overrides/`` to ``overrides/dovecot`` and ``overrides/postfix/``. + +Recreate SECRET_KEY after upgrading +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Improvements have been made to protect again session-fixation attacks. +To be fully protected, it is required to change your SECRET_KEY in Mailu.env after upgrading. +A new SECRET_KEY is generated when you recreate your docker-compose.yml & mailu.env file via setup.mailu.io. + +The SECRET_KEY is an uppercase alphanumeric string of length 16. You can manually create such a string via +```cat /dev/urandom | tr -dc 'A-Z0-9' | fold -w ${1:-16} | head -n 1``` + +After changing mailu.env, it is required to recreate all containers for the changes to be propagated. + +Update your DNS SPF Records +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +It has become known that the SPF DNS records generated by the admin interface are not completely standard compliant anymore. Please check the DNS records for your domains and compare them to what the new admin-interface instructs you to use. In most cases, this should be a simple copy-paste operation for you …. + +Fixed hostname for antispam service +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +For history to be retained in Rspamd, the antispam container requires a static hostname. When you re-generate your docker-compose.yml file (or helm-chart), this will be covered. + + +Mailu 1.8rc - 2020-10-02 +------------------------ Release 1.8 has come a long way again. Due to corona the project slowed down to a crawl. Fortunately new contributors have joined the team what enabled us to still release Mailu 1.8 this year. diff --git a/docs/requirements.txt b/docs/requirements.txt index 4afd9bb6..f49e26d5 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -2,3 +2,4 @@ recommonmark Sphinx sphinx-autobuild sphinx-rtd-theme +docutils==0.16 diff --git a/docs/reverse.rst b/docs/reverse.rst index de710dff..29f9e9e1 100644 --- a/docs/reverse.rst +++ b/docs/reverse.rst @@ -154,7 +154,40 @@ Add the respective Traefik labels for your domain/configuration, like If your Traefik is configured to automatically request certificates from *letsencrypt*, then you’ll have a certificate for ``mail.your.doma.in`` now. However, ``mail.your.doma.in`` might only be the location where you want the Mailu web-interfaces to live — your mail should be sent/received from ``your.doma.in``, and this is the ``DOMAIN`` in your ``.env``? -To support that use-case, Traefik can request ``SANs`` for your domain. Lets add something like +To support that use-case, Traefik can request ``SANs`` for your domain. The configuration for this will depend on your Traefik version. + +---- + +Traefik 2.x using labels configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Add the appropriate labels for your domain(s) to the ``front`` container in ``docker-compose.yml``. + +.. code-block:: yaml + + services: + front: + labels: + # Enable TLS + - "traefik.http.routers.mailu-secure.tls" + # Your main domain + - "traefik.http.routers.mailu-secure.tls.domains[0].main=your.doma.in" + # Optional SANs for your main domain + - "traefik.http.routers.mailu-secure.tls.domains[0].sans=mail.your.doma.in,webmail.your.doma.in,smtp.your.doma.in" + # Optionally add other domains + - "traefik.http.routers.mailu-secure.tls.domains[1].main=mail.other.doma.in" + - "traefik.http.routers.mailu-secure.tls.domains[1].sans=mail2.other.doma.in,mail3.other.doma.in" + # Your ACME certificate resolver + - "traefik.http.routers.mailu-secure.tls.certResolver=foo" + +Of course, be sure to define the Certificate Resolver ``foo`` in the static configuration as well. + +Alternatively, you can define SANs in the Traefik static configuration using routers, or in the static configuration using entrypoints. Refer to the Traefik documentation for more details. + +Traefik 1.x with TOML configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Lets add something like .. code-block:: yaml @@ -163,7 +196,11 @@ To support that use-case, Traefik can request ``SANs`` for your domain. Lets add main = "your.doma.in" # this is the same as $TRAEFIK_DOMAIN! sans = ["mail.your.doma.in", "webmail.your.doma.in", "smtp.your.doma.in"] -to your ``traefik.toml``. You might need to clear your ``acme.json``, if a certificate for one of these domains already exists. +to your ``traefik.toml``. + +---- + +You might need to clear your ``acme.json``, if a certificate for one of these domains already exists. You will need some solution which dumps the certificates in ``acme.json``, so you can include them in the ``mailu/front`` container. One such example is ``mailu/traefik-certdumper``, which has been adapted for use in Mailu. You can add it to your ``docker-compose.yml`` like: diff --git a/docs/webadministration.rst b/docs/webadministration.rst index 070eb473..86ce41c0 100644 --- a/docs/webadministration.rst +++ b/docs/webadministration.rst @@ -1,7 +1,7 @@ Web administration interface ============================ -The web administration interface is the main website for maintaining your Mailu installation. +The web administration interface is the main website for maintaining your Mailu installation. For brevity the web administration interface will now be mentioned as admin gui. It offers the following configuration options: @@ -30,13 +30,13 @@ It offers the following configuration options: * Configure all email domains served by Mailu, including: * generating dkim and dmarc keys for a domain. - + * view email domain information on how to configure your SPF, DMARC, DKIM and MX dns records for an email domain. - + * Add new email domains. - + * For existing domains, configure users, quotas, aliases, administrators and alternative domain names. - + * access the webmail site. * lookup settings for configuring your email client. @@ -49,7 +49,7 @@ The admin GUI is by default accessed via the URL `https:///admin`, wh To login the admin GUI enter the email address and password of an user. Only global administrator users have access to all configuration settings and the Rspamd webgui. Other users will be presented with settings for only their account, and domains they are managers of. -To create a user who is a global administrator for a new installation, the Mailu.env file can be adapted. +To create a user who is a global administrator for a new installation, the Mailu.env file can be adapted. For more information see the section 'Admin account - automatic creation' in :ref:`the configuration reference `. The following sections are only accessible for global administrators: @@ -69,7 +69,7 @@ The following sections are only accessible for global administrators: Settings -------- -After logging in the web administration interface, the settings page is loaded. +After logging in the web administration interface, the settings page is loaded. On the settings page the settings of the currently logged in user can be changed. Changes are saved and effective immediately after clicking the Save Settings button at the bottom of the page. @@ -77,27 +77,27 @@ Changes are saved and effective immediately after clicking the Save Settings but Display name ```````````` -On the settings page the displayed name can be changed of the logged in user. +On the settings page the displayed name can be changed of the logged in user. This display name is only used within the web administration interface. Antispam ```````` -Under the section `Antispam` the spam filter can be enabled or disabled for the logged in user. By default the spam filter is enabled. +Under the section `Antispam` the spam filter can be enabled or disabled for the logged in user. By default the spam filter is enabled. When the spam filter is disabled, all received email messages will go to the inbox folder of the logged in user. The exception to this rule, are email messages with an extremely high spam score. These email messages are always rejected by Rspamd. When the spam filter is enabled, received email messages will be moved to the logged in user's inbox folder or junk folder depending on the user defined spam filter tolerance. -The user defined spam filter tolerance determines when an email is classified as ham (moved to the inbox folder) or spam (moved to the junk folder). -The default value is 80%. The lower the spam filter tolerance, the more false positives (ham classified as spam). The higher the spam filter tolerance, the more false negatives (spam classified as ham). +The user defined spam filter tolerance determines when an email is classified as ham (moved to the inbox folder) or spam (moved to the junk folder). +The default value is 80%. The lower the spam filter tolerance, the more false positives (ham classified as spam). The higher the spam filter tolerance, the more false negatives (spam classified as ham). For more information see the :ref:`antispam documentation `. Auto-forward ````````````` -Under the section `Auto-forward`, the automatic forwarding of received email messages can be enabled. When enabled, all received email messages are forwarded to the specified email address. +Under the section `Auto-forward`, the automatic forwarding of received email messages can be enabled. When enabled, all received email messages are forwarded to the specified email address. The option "Keep a copy of the emails" can be ticked, to keep a copy of the received email message in the inbox folder. @@ -107,7 +107,7 @@ In the destination textbox, the email addresses can be entered for automatic for Update password --------------- -On the `update password` page, the password of the logged in user can be changed. Changes are effective immediately. +On the `update password` page, the password of the logged in user can be changed. Changes are effective immediately. .. _webadministration_auto-reply: @@ -117,7 +117,7 @@ Auto-reply On the `auto-reply` page, automatic replies can be configured. This is also known as out of office (ooo) or out of facility (oof) replies. -To enable automatic replies tick the checkbox 'Enable automatic reply'. +To enable automatic replies tick the checkbox 'Enable automatic reply'. Under Reply subject the email subject for automatic replies can be configured. When a reply subject is entered, this subject will be used for the automatic reply. @@ -130,12 +130,12 @@ E.g. if the email subject of the received email message is "how are you?", then Fetched accounts ---------------- -This page is only available when the Fetchmail container is part of your Mailu deployment. +This page is only available when the Fetchmail container is part of your Mailu deployment. Fetchmail can be enabled when creating the docker-compose.yml file with the setup utility (https://setup.mailu.io). On the `fetched accounts` page you can configure email accounts from which email messages will be retrieved. -Only unread email messages are retrieved from the specified email account. -By default Fetchmail will retrieve email messages every 10 minutes. This can be changed in the Mailu.env file. +Only unread email messages are retrieved from the specified email account. +By default Fetchmail will retrieve email messages every 10 minutes. This can be changed in the Mailu.env file. For more information on changing the polling interval see :ref:`the configuration reference `. @@ -149,7 +149,7 @@ You can add a fetched account by clicking on the `Add an account` button on the * Enable TLS. Tick this setting if the email server requires TLS/SSL instead of STARTTLS. -* Username. The user name for logging in to the email server. Normally this is the email address or the email address' local-part (the part before @). +* Username. The user name for logging in to the email server. Normally this is the email address or the email address' local-part (the part before @). * Password. The password for logging in to the email server. @@ -166,8 +166,8 @@ The purpose of an authentication token is to create a unique and strong password The application will use this authentication token instead of the logged in user's password for sending/receiving email. This allows safe access to the logged in user's email account. At any moment, the authentication token can be deleted so that the application has no access to the logged in user's email account anymore. -By clicking on the New token button on the top right of the page, a new authentication token can be created. On this page the generated authentication token will only be displayed once. -After saving the application token it is not possible anymore to view the unique password. +By clicking on the New token button on the top right of the page, a new authentication token can be created. On this page the generated authentication token will only be displayed once. +After saving the application token it is not possible anymore to view the unique password. The comment field can be used to enter a description for the authentication token. For example the name of the application the application token is created for. @@ -198,9 +198,9 @@ A global administrator can change `any setting` in the admin GUI. Be careful tha Relayed domains --------------- -On the `relayed domains list` page, destination domains can be added that Mailu will relay email messages for without authentication. -This means that for these destination domains, other email clients or email servers can send email via Mailu unauthenticated via port 25 to this destination domain. -For example if the destination domain example.com is added. Any emails to example.com (john@example.com) will be relayed to example.com. +On the `relayed domains list` page, destination domains can be added that Mailu will relay email messages for without authentication. +This means that for these destination domains, other email clients or email servers can send email via Mailu unauthenticated via port 25 to this destination domain. +For example if the destination domain example.com is added. Any emails to example.com (john@example.com) will be relayed to example.com. Example scenario's are: * relay domain from a backup server. @@ -212,30 +212,37 @@ Example scenario's are: On the new relayed domain page the following options can be entered for a new relayed domain: -* Relayed domain name. The domain name that is relayed. Email messages addressed to this domain (To: John@example.com), will be forwarded to this domain. - No authentication is required. +* Relayed domain name. The domain name that is relayed. Email messages addressed to this domain (To: John@example.com), will be forwarded to this domain. + No authentication is required. -* Remote host (optional). The SMPT server that will be used for relaying the email message. - When this field is blank, the Mailu server will directly send the email message to the relayed domain. - As value can be entered either a hostname or IP address of the SMPT server. - By default port 25 is used. To use a different port append ":port number" to the Remote Host. For example: - 123.45.67.90:2525. +* Remote host (optional). The host that will be used for relaying the email message. + When this field is blank, the Mailu server will directly send the email message to the mail server of the relayed domain. + When a remote host is specified it can be prefixed by ``mx:`` or ``lmtp:`` and followed by a port number: ``:port``). + + ================ ===================================== ========================= + Remote host Description postfix transport:nexthop + ================ ===================================== ========================= + empty use MX of relay domain smtp:domain + :port use MX of relay domain and use port smtp:domain:port + target resolve A/AAAA of target smtp:[target] + target:port resolve A/AAAA of target and use port smtp:[target]:port + mx:target resolve MX of target smtp:target + mx:target:port resolve MX of target and use port smtp:target:port + lmtp:target resolve A/AAAA of target lmtp:target + lmtp:target:port resolve A/AAAA of target and use port lmtp:target:port + ================ ===================================== ========================= + + `target` can also be an IPv4 or IPv6 address (an IPv6 address must be enclosed in []: ``[2001:DB8::]``). * Comment. A text field where a comment can be entered to describe the entry. Changes are effective immediately after clicking the Save button. -NOTE: Due to bug `1588`_ email messages fail to be relayed if no Remote Host is configured. -As a workaround the HOSTNAME or IP Address of the SMPT server of the relayed domain can be entered as Remote Host. -Please note that no MX lookup is performed when entering a hostname as Remote Host. You can use the MX lookup on mxtoolbox.com to find the hostname and IP Address of the SMTP server. - -.. _`1588`: https://github.com/Mailu/Mailu/issues/1588 - Antispam -------- The menu item Antispam opens the Rspamd webgui. For more information how spam filtering works in Mailu see the :ref:`Spam filtering page `. -The spam filtering page also contains a section that describes how to create a local blacklist for blocking email messages from specific domains. +The spam filtering page also contains a section that describes how to create a local blacklist for blocking email messages from specific domains. The Rspamd webgui offers basic functions for setting metric actions, scores, viewing statistics and learning. The following settings are not persisent and are *lost* when the Antispam container is recreated or restarted: @@ -266,31 +273,31 @@ On the `Mail domains` page all the domains served by Mailu are configured. Via t Details ``````` -This page is also accessible for domain managers. On the details page all DNS settings are displayed for configuring your DNS server. It contains information on what to configure as MX record and SPF record. On this page it is also possible to (re-)generate the keys for DKIM and DMARC. The option for generating keys for DKIM and DMARC is only available for global administrators. After generating the keys for DKIM and DMARC, this page will also show the DNS records for configuring the DKIM/DMARC records on the DNS server. +This page is also accessible for domain managers. On the details page all DNS settings are displayed for configuring your DNS server. It contains information on what to configure as MX record and SPF record. On this page it is also possible to (re-)generate the keys for DKIM and DMARC. The option for generating keys for DKIM and DMARC is only available for global administrators. After generating the keys for DKIM and DMARC, this page will also show the DNS records for configuring the DKIM/DMARC records on the DNS server. Edit -```` +```` -This page is only accessible for global administrators. On the edit page, the global settings for the domain can be changed. +This page is only accessible for global administrators. On the edit page, the global settings for the domain can be changed. * Maximum user count. The maximum amount of users that can be created under this domain. Once this limit is reached it is not possible anymore to add users to the domain; and it is also not possible for users to self-register. - + * Maximum alias count. The maximum amount of aliases that can be created for an email account. - + * Maximum user quota. The maximum amount of quota that can be assigned to a user. When creating or editing a user, this sets the limit on the maximum amount of quota that can be assigned to the user. - -* Enable sign-up. When this option is ticked, self-registration is enabled. When the Admin GUI is accessed, in the menu list the option Signup becomes available. - Obviously this menu item is only visible when signed out. On the Signup page a user can create an email account. - If your Admin GUI is available to the public internet, this means your Mailu installation basically becomes a free email provider. + +* Enable sign-up. When this option is ticked, self-registration is enabled. When the Admin GUI is accessed, in the menu list the option Signup becomes available. + Obviously this menu item is only visible when signed out. On the Signup page a user can create an email account. + If your Admin GUI is available to the public internet, this means your Mailu installation basically becomes a free email provider. Use this option with care! - + * Comment. Description for the domain. This description is visible on the parent domains list page. Delete `````` -This page is only accessible for global administrators. This page allows you to delete the domain. The Admin GUI will ask for confirmation if the domain must be really deleted. +This page is only accessible for global administrators. This page allows you to delete the domain. The Admin GUI will ask for confirmation if the domain must be really deleted. Users @@ -326,7 +333,7 @@ For adding a new user the following options can be configured. * Enabled. Tick this checkbox to enable the user account. When an user is disabled, the user is unable to login to the Admin GUI or webmail or access his email via IMAP/POP3 or send mail. The email inbox of the user is still retained. This option can be used to temporarily suspend an user account. - + * Quota. The maximum quota for the user's email box. * Allow IMAP access. When ticked, allows email retrieval via the IMAP protocol. @@ -337,7 +344,7 @@ For adding a new user the following options can be configured. Aliases ``````` -This page is also accessible for domain managers. On the aliases page, aliases can be added for email addresses. An alias is a way to disguise another email address. +This page is also accessible for domain managers. On the aliases page, aliases can be added for email addresses. An alias is a way to disguise another email address. Everything sent to an alias email address is actually received in the primary email account's inbox of the destination email address. Aliases can diversify a single email account without having to create multiple email addresses (users). It is also possible to add multiple email addresses to the destination field. All incoming mails will be sent to each users inbox in this case. @@ -348,11 +355,11 @@ The following options are available when adding an alias: * Use SQL LIKE Syntax (e.g. for catch-all aliases). When this option is ticked, you can use SQL LIKE syntax as alias. The SQL LIKE syntax is used to match text values against a pattern using wildcards. There are two wildcards that can be used with SQL LIKE syntax: - + * % - The percent sign represents zero, one, or multiple characters * _ - The underscore represents a single character - - Examples are: + + Examples are: * a% - Finds any values that start with "a" * %a - Finds any values that end with "a" * %or% - Finds any values that have "or" in any position @@ -369,7 +376,7 @@ The following options are available when adding an alias: Managers ```````` -This page is also accessible for domain managers. On the `managers list` page, managers can be added for the domain and can be deleted. +This page is also accessible for domain managers. On the `managers list` page, managers can be added for the domain and can be deleted. Managers have access to configuration settings of the domain. On the `add manager` page you can click on the manager email text box to access a drop down list of users that can be made a manager of the domain. @@ -377,11 +384,11 @@ On the `add manager` page you can click on the manager email text box to access Alternatives ```````````` -This page is only accessible for global administrators. On the alternatives page, alternative domains can be added for the domain. +This page is only accessible for global administrators. On the alternatives page, alternative domains can be added for the domain. An alternative domain acts as a copy of a given domain. -Everything sent to an alternative domain, is actually received in the domain the alternative is created for. -This allows you to receive emails for multiple domains while using a single domain. -For example if the main domain has the email address user@example.com, and the alternative domain is mymail.com, +Everything sent to an alternative domain, is actually received in the domain the alternative is created for. +This allows you to receive emails for multiple domains while using a single domain. +For example if the main domain has the email address user@example.com, and the alternative domain is mymail.com, then email send to user@mymail.com will end up in the email box of user@example.com. New domain @@ -392,16 +399,16 @@ This page is only accessible for global administrators. Via this page a new doma * domain name. The name of the domain. * Maximum user count. The maximum amount of users that can be created under this domain. Once this limit is reached it is not possible anymore to add users to the domain; and it is also not possible for users to self-register. - + * Maximum alias count. The maximum amount of aliases that can be made for an email account. - + * Maximum user quota. The maximum amount of quota that can be assigned to a user. When creating or editing a user, this sets the limit on the maximum amount of quota that can be assigned to the user. - -* Enable sign-up. When this option is ticked, self-registration is enabled. When the Admin GUI is accessed, in the menu list the option Signup becomes available. - Obviously this menu item is only visible when signed out. On the Signup page a user can create an email account. - If your Admin GUI is available to the public internet, this means your Mailu installation basically becomes a free email provider. + +* Enable sign-up. When this option is ticked, self-registration is enabled. When the Admin GUI is accessed, in the menu list the option Signup becomes available. + Obviously this menu item is only visible when signed out. On the Signup page a user can create an email account. + If your Admin GUI is available to the public internet, this means your Mailu installation basically becomes a free email provider. Use this option with care! - + * Comment. Description for the domain. This description is visible on the parent domains list page. @@ -414,7 +421,7 @@ The menu item `Webmail` opens the webmail page. This option is only available if Client setup ------------ -The menu item `Client setup` shows all settings for configuring your email client for connecting to Mailu. +The menu item `Client setup` shows all settings for configuring your email client for connecting to Mailu. Website diff --git a/optional/clamav/Dockerfile b/optional/clamav/Dockerfile index 1132845f..20cebcdc 100644 --- a/optional/clamav/Dockerfile +++ b/optional/clamav/Dockerfile @@ -1,4 +1,4 @@ -ARG DISTRO=alpine:3.12 +ARG DISTRO=alpine:3.14 FROM $DISTRO # python3 shared with most images RUN apk add --no-cache \ diff --git a/optional/fetchmail/Dockerfile b/optional/fetchmail/Dockerfile index a707a54a..506e409a 100644 --- a/optional/fetchmail/Dockerfile +++ b/optional/fetchmail/Dockerfile @@ -1,4 +1,4 @@ -ARG DISTRO=alpine:3.12 +ARG DISTRO=alpine:3.14 FROM $DISTRO # python3 shared with most images diff --git a/optional/postgresql/Dockerfile b/optional/postgresql/Dockerfile index 95048147..0f5034da 100644 --- a/optional/postgresql/Dockerfile +++ b/optional/postgresql/Dockerfile @@ -1,4 +1,4 @@ -ARG DISTRO=alpine:3.12 +ARG DISTRO=alpine:3.14 FROM $DISTRO # python3 shared with most images RUN apk add --no-cache \ diff --git a/optional/postgresql/start.py b/optional/postgresql/start.py index 1f2f2a2b..e34e157e 100755 --- a/optional/postgresql/start.py +++ b/optional/postgresql/start.py @@ -2,7 +2,6 @@ import anosql import psycopg2 -import jinja2 import glob import os import subprocess @@ -38,7 +37,6 @@ if not os.listdir("/data"): rec.write("restore_command = 'gunzip < /backup/wal_archive/%f > %p'\n") rec.write("standby_mode = off\n") os.system("chown postgres:postgres /data/recovery.conf") - #os.system("sudo -u postgres pg_ctl start -D /data -o '-h \"''\" '") else: # Bootstrap the database os.system("sudo -u postgres initdb -D /data") diff --git a/optional/radicale/Dockerfile b/optional/radicale/Dockerfile index 400b1a3f..13761164 100644 --- a/optional/radicale/Dockerfile +++ b/optional/radicale/Dockerfile @@ -1,4 +1,4 @@ -ARG DISTRO=alpine:3.12 +ARG DISTRO=alpine:3.14 FROM $DISTRO # python3 shared with most images diff --git a/optional/unbound/Dockerfile b/optional/unbound/Dockerfile index abb45420..2b472d44 100644 --- a/optional/unbound/Dockerfile +++ b/optional/unbound/Dockerfile @@ -1,4 +1,4 @@ -ARG DISTRO=alpine:3.12 +ARG DISTRO=alpine:3.14 FROM $DISTRO # python3 shared with most images RUN apk add --no-cache \ diff --git a/optional/unbound/unbound.conf b/optional/unbound/unbound.conf index 8abd4325..6c8fc64d 100644 --- a/optional/unbound/unbound.conf +++ b/optional/unbound/unbound.conf @@ -2,7 +2,7 @@ server: verbosity: 1 interface: 0.0.0.0 interface: ::0 - logfile: /dev/stdout + logfile: "" do-ip4: yes do-ip6: yes do-udp: yes diff --git a/setup/Dockerfile b/setup/Dockerfile index 2b3c3c6c..5775ab6b 100644 --- a/setup/Dockerfile +++ b/setup/Dockerfile @@ -1,4 +1,4 @@ -ARG DISTRO=alpine:3.10 +ARG DISTRO=alpine:3.14 FROM $DISTRO RUN mkdir -p /app diff --git a/setup/flavors/compose/docker-compose.yml b/setup/flavors/compose/docker-compose.yml index 155e1180..08bba13b 100644 --- a/setup/flavors/compose/docker-compose.yml +++ b/setup/flavors/compose/docker-compose.yml @@ -85,6 +85,7 @@ services: antispam: image: ${DOCKER_ORG:-mailu}/${DOCKER_PREFIX:-}rspamd:${MAILU_VERSION:-{{ version }}} + hostname: antispam restart: always env_file: {{ env }} volumes: diff --git a/setup/flavors/compose/mailu.env b/setup/flavors/compose/mailu.env index 44452e36..d45f5517 100644 --- a/setup/flavors/compose/mailu.env +++ b/setup/flavors/compose/mailu.env @@ -86,7 +86,7 @@ WELCOME_SUBJECT={{ welcome_subject or 'Welcome to your new email account' }} WELCOME_BODY={{ welcome_body or 'Welcome to your new email account, if you can read this, then it is configured properly!' }} # Maildir Compression -# choose compression-method, default: none (value: bz2, gz) +# choose compression-method, default: none (value: gz, bz2, lz4, zstd) COMPRESSION={{ compression }} # change compression-level, default: 6 (value: 1-9) COMPRESSION_LEVEL={{ compression_level }} @@ -175,3 +175,10 @@ DB_HOST={{ db_url }} DB_NAME={{ db_name }} {% endif %} +{% if (postgresql == 'external' or db_flavor == 'mysql') and webmail_type == 'roundcube' %} +ROUNDCUBE_DB_FLAVOR={{ db_flavor }} +ROUNDCUBE_DB_USER={{ roundcube_db_user }} +ROUNDCUBE_DB_PW={{ roundcube_db_pw }} +ROUNDCUBE_DB_HOST={{ roundcube_db_url }} +ROUNDCUBE_DB_NAME={{ roundcube_db_name }} +{% endif %} diff --git a/setup/flavors/stack/docker-compose.yml b/setup/flavors/stack/docker-compose.yml index d9c5cd4f..df1fe7b4 100644 --- a/setup/flavors/stack/docker-compose.yml +++ b/setup/flavors/stack/docker-compose.yml @@ -70,6 +70,7 @@ services: antispam: image: ${DOCKER_ORG:-mailu}/${DOCKER_PREFIX:-}rspamd:${MAILU_VERSION:-{{ version }}} + hostname: antispam env_file: {{ env }} volumes: - "{{ root }}/filter:/var/lib/rspamd" diff --git a/setup/requirements.txt b/setup/requirements.txt index b6bf2120..f2eb2157 100644 --- a/setup/requirements.txt +++ b/setup/requirements.txt @@ -1,4 +1,4 @@ -flask -flask-bootstrap -redis -gunicorn +Flask==1.0.2 +Flask-Bootstrap==3.3.7.1 +gunicorn==19.9.0 +redis==3.2.1 diff --git a/setup/server.py b/setup/server.py index 0d58fa25..5be1fc83 100644 --- a/setup/server.py +++ b/setup/server.py @@ -54,11 +54,11 @@ def build_app(path): @app.context_processor def app_context(): return dict( - versions=os.getenv("VERSIONS","master").split(','), + versions=os.getenv("VERSIONS","master").split(','), stable_version = os.getenv("stable_version", "master") ) - prefix_bp = flask.Blueprint(version, __name__) + prefix_bp = flask.Blueprint(version.replace(".", "_"), __name__) prefix_bp.jinja_loader = jinja2.ChoiceLoader([ jinja2.FileSystemLoader(os.path.join(path, "templates")), jinja2.FileSystemLoader(os.path.join(path, "flavors")) diff --git a/setup/static/render.js b/setup/static/render.js index a817c4f0..0a0a6675 100644 --- a/setup/static/render.js +++ b/setup/static/render.js @@ -57,6 +57,13 @@ $(document).ready(function() { $("#db_pw").prop('required',true); $("#db_url").prop('required',true); $("#db_name").prop('required',true); + if ($("#webmail").val() == 'roundcube') { + $("#roundcube_external_db").show(); + $("#roundcube_db_user").prop('required',true); + $("#roundcube_db_pw").prop('required',true); + $("#roundcube_db_url").prop('required',true); + $("#roundcube_db_name").prop('required',true); + } } else if (this.value == 'mysql') { $("#postgres_db").hide(); $("#external_db").show(); @@ -64,6 +71,13 @@ $(document).ready(function() { $("#db_pw").prop('required',true); $("#db_url").prop('required',true); $("#db_name").prop('required',true); + if ($("#webmail").val() == 'roundcube') { + $("#roundcube_external_db").show(); + $("#roundcube_db_user").prop('required',true); + $("#roundcube_db_pw").prop('required',true); + $("#roundcube_db_url").prop('required',true); + $("#roundcube_db_name").prop('required',true); + } } }); $("#external_psql").change(function() { @@ -73,6 +87,13 @@ $(document).ready(function() { $("#db_pw").prop('required',true); $("#db_url").prop('required',true); $("#db_name").prop('required',true); + if ($("#webmail").val() == 'roundcube') { + $("#roundcube_external_db").show(); + $("#roundcube_db_user").prop('required',true); + $("#roundcube_db_pw").prop('required',true); + $("#roundcube_db_url").prop('required',true); + $("#roundcube_db_name").prop('required',true); + } } else { $("#external_db").hide(); } diff --git a/setup/templates/steps/compose/02_services.html b/setup/templates/steps/compose/02_services.html index 20d4d7cb..5118c304 100644 --- a/setup/templates/steps/compose/02_services.html +++ b/setup/templates/steps/compose/02_services.html @@ -59,7 +59,7 @@ the security implications caused by such an increase of attack surface.

Fetchmail allows users to retrieve mail from an external mail-server via IMAP/POP3 and puts it in their inbox. - + diff --git a/setup/templates/steps/config.html b/setup/templates/steps/config.html index 29d8dddd..72b83915 100644 --- a/setup/templates/steps/config.html +++ b/setup/templates/steps/config.html @@ -50,8 +50,8 @@ Or in plain english: if receivers start to classify your mail as spam, this post

-

/ minute +

/ minute

@@ -83,7 +83,7 @@ manage your email domains, users, etc.

- + diff --git a/setup/templates/steps/database.html b/setup/templates/steps/database.html index ad5411ab..0a2f94f3 100644 --- a/setup/templates/steps/database.html +++ b/setup/templates/steps/database.html @@ -28,7 +28,7 @@
diff --git a/setup/templates/steps/stack/02_services.html b/setup/templates/steps/stack/02_services.html index 3f5186b0..6fce0ae6 100644 --- a/setup/templates/steps/stack/02_services.html +++ b/setup/templates/steps/stack/02_services.html @@ -55,7 +55,7 @@ the security implications caused by such an increase of attack surface.

Fetchmail allows users to retrieve mail from an external mail-server via IMAP/POP3 and puts it in their inbox. - + diff --git a/tests/build_arm.sh b/tests/build_arm.sh index 04836ddb..32dba421 100755 --- a/tests/build_arm.sh +++ b/tests/build_arm.sh @@ -1,6 +1,6 @@ #!/bin/bash -x -ALPINE_VER="3.10" +ALPINE_VER="3.14" DISTRO="balenalib/rpi-alpine:$ALPINE_VER" # Used for webmails QEMU="arm" diff --git a/tests/compose/core/mailu.env b/tests/compose/core/mailu.env index b13e57c5..a78515b8 100644 --- a/tests/compose/core/mailu.env +++ b/tests/compose/core/mailu.env @@ -92,7 +92,7 @@ DMARC_RUF=admin # Maildir Compression -# choose compression-method, default: none (value: bz2, gz) +# choose compression-method, default: none (value: gz, bz2, lz4, zstd) COMPRESSION= # change compression-level, default: 6 (value: 1-9) COMPRESSION_LEVEL= diff --git a/tests/compose/fetchmail/mailu.env b/tests/compose/fetchmail/mailu.env index 636a09a9..afb57751 100644 --- a/tests/compose/fetchmail/mailu.env +++ b/tests/compose/fetchmail/mailu.env @@ -92,7 +92,7 @@ DMARC_RUF=admin # Maildir Compression -# choose compression-method, default: none (value: bz2, gz) +# choose compression-method, default: none (value: gz, bz2, lz4, zstd) COMPRESSION= # change compression-level, default: 6 (value: 1-9) COMPRESSION_LEVEL= diff --git a/tests/compose/filters/00_create_users.sh b/tests/compose/filters/00_create_users.sh new file mode 100755 index 00000000..3c581685 --- /dev/null +++ b/tests/compose/filters/00_create_users.sh @@ -0,0 +1,5 @@ +echo "Creating user required for next test ..." +# Should not fail and update the password; update mode +docker-compose -f tests/compose/filters/docker-compose.yml exec -T admin flask mailu admin admin mailu.io 'password' --mode=update || exit 1 +docker-compose -f tests/compose/filters/docker-compose.yml exec -T admin flask mailu user user mailu.io 'password' || exit 1 +echo "User created successfully" diff --git a/tests/compose/filters/mailu.env b/tests/compose/filters/mailu.env index b6d5ca8f..4c8c219d 100644 --- a/tests/compose/filters/mailu.env +++ b/tests/compose/filters/mailu.env @@ -92,7 +92,7 @@ DMARC_RUF=admin # Maildir Compression -# choose compression-method, default: none (value: bz2, gz) +# choose compression-method, default: none (value: gz, bz2, lz4, zstd) COMPRESSION= # change compression-level, default: 6 (value: 1-9) COMPRESSION_LEVEL= diff --git a/tests/compose/rainloop/mailu.env b/tests/compose/rainloop/mailu.env index 9c31c8bb..1f5fce0c 100644 --- a/tests/compose/rainloop/mailu.env +++ b/tests/compose/rainloop/mailu.env @@ -51,7 +51,7 @@ DISABLE_STATISTICS=False ################################### # Expose the admin interface (value: true, false) -ADMIN=true +ADMIN=false # Choose which webmail to run if any (values: roundcube, rainloop, none) WEBMAIL=rainloop @@ -92,7 +92,7 @@ DMARC_RUF=admin # Maildir Compression -# choose compression-method, default: none (value: bz2, gz) +# choose compression-method, default: none (value: gz, bz2, lz4, zstd) COMPRESSION= # change compression-level, default: 6 (value: 1-9) COMPRESSION_LEVEL= diff --git a/tests/compose/roundcube/mailu.env b/tests/compose/roundcube/mailu.env index dc503268..ba153ac2 100644 --- a/tests/compose/roundcube/mailu.env +++ b/tests/compose/roundcube/mailu.env @@ -51,7 +51,7 @@ DISABLE_STATISTICS=False ################################### # Expose the admin interface (value: true, false) -ADMIN=true +ADMIN=false # Choose which webmail to run if any (values: roundcube, rainloop, none) WEBMAIL=roundcube @@ -92,7 +92,7 @@ DMARC_RUF=admin # Maildir Compression -# choose compression-method, default: none (value: bz2, gz) +# choose compression-method, default: none (value: gz, bz2, lz4, zstd) COMPRESSION= # change compression-level, default: 6 (value: 1-9) COMPRESSION_LEVEL= diff --git a/tests/compose/webdav/mailu.env b/tests/compose/webdav/mailu.env index 90fb25b1..939f453b 100644 --- a/tests/compose/webdav/mailu.env +++ b/tests/compose/webdav/mailu.env @@ -92,7 +92,7 @@ DMARC_RUF=admin # Maildir Compression -# choose compression-method, default: none (value: bz2, gz) +# choose compression-method, default: none (value: gz, bz2, lz4, zstd) COMPRESSION= # change compression-level, default: 6 (value: 1-9) COMPRESSION_LEVEL= diff --git a/tests/deploy.sh b/tests/deploy.sh index 21aec444..abb37b6b 100755 --- a/tests/deploy.sh +++ b/tests/deploy.sh @@ -3,14 +3,5 @@ # Skip deploy for staging branch [ "$TRAVIS_BRANCH" = "staging" ] && exit 0 -# Retag in case of `bors try` -if [ "$TRAVIS_BRANCH" = "testing" ]; then - export DOCKER_ORG="mailutest" - # Commit message is like "Try #99". - # This sets the version tag to "pr-99" - export MAILU_VERSION="pr-${TRAVIS_COMMIT_MESSAGE//[!0-9]/}" - docker-compose -f tests/build.yml build -fi - docker login -u $DOCKER_UN -p $DOCKER_PW docker-compose -f tests/build.yml push diff --git a/towncrier/newsfragments/1194.feature b/towncrier/newsfragments/1194.feature new file mode 100644 index 00000000..0cd2a9e9 --- /dev/null +++ b/towncrier/newsfragments/1194.feature @@ -0,0 +1 @@ +Add a credential cache to speedup authentication requests. diff --git a/towncrier/newsfragments/1294.bugfix b/towncrier/newsfragments/1294.bugfix new file mode 100644 index 00000000..68bb7a8a --- /dev/null +++ b/towncrier/newsfragments/1294.bugfix @@ -0,0 +1 @@ +Ensure that the podop socket is always owned by the postfix user (wasn't the case when build using non-standard base images... typically for arm64) diff --git a/towncrier/newsfragments/1503.doc b/towncrier/newsfragments/1503.doc new file mode 100644 index 00000000..9c59feb7 --- /dev/null +++ b/towncrier/newsfragments/1503.doc @@ -0,0 +1 @@ +Add documentation for Traefik 2 in Reverse Proxy \ No newline at end of file diff --git a/towncrier/newsfragments/1604.feature b/towncrier/newsfragments/1604.feature new file mode 100644 index 00000000..2b47791a --- /dev/null +++ b/towncrier/newsfragments/1604.feature @@ -0,0 +1 @@ +Add cli commands config-import and config-export diff --git a/towncrier/newsfragments/1660.bugfix b/towncrier/newsfragments/1660.bugfix deleted file mode 100644 index a90fb099..00000000 --- a/towncrier/newsfragments/1660.bugfix +++ /dev/null @@ -1 +0,0 @@ -Don't replace nested headers (typically in attached emails) diff --git a/towncrier/newsfragments/1686.bugfix b/towncrier/newsfragments/1686.bugfix deleted file mode 100644 index 932d7d7c..00000000 --- a/towncrier/newsfragments/1686.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix letsencrypt access to certbot for the mail-letsencrypt flavour diff --git a/towncrier/newsfragments/1694.feature b/towncrier/newsfragments/1694.feature new file mode 100644 index 00000000..f7e2013e --- /dev/null +++ b/towncrier/newsfragments/1694.feature @@ -0,0 +1 @@ +Support configuring lz4 and zstd compression for dovecot. diff --git a/towncrier/newsfragments/1720.bugfix b/towncrier/newsfragments/1720.bugfix deleted file mode 100644 index 0bf2b8e6..00000000 --- a/towncrier/newsfragments/1720.bugfix +++ /dev/null @@ -1,2 +0,0 @@ -Fix CVE-2020-25275 and CVE-2020-24386 by using alpine 3.13 for -dovecot which contains a fixed dovecot version. diff --git a/towncrier/newsfragments/1760.bugfix b/towncrier/newsfragments/1760.bugfix new file mode 100644 index 00000000..9d6f38af --- /dev/null +++ b/towncrier/newsfragments/1760.bugfix @@ -0,0 +1,2 @@ +Fix CVE-2021-23240, CVE-2021-3156 and CVE-2021-23239 for postgresql +by force-upgrading sudo. diff --git a/towncrier/newsfragments/1783.misc b/towncrier/newsfragments/1783.misc deleted file mode 100644 index 2ee4c97f..00000000 --- a/towncrier/newsfragments/1783.misc +++ /dev/null @@ -1 +0,0 @@ -Switch from client side sessions (cookies) to server-side sessions (Redis). This simplies the security model a lot and allows for an easier recovery should a cookie ever land in the hands of an attacker. diff --git a/towncrier/newsfragments/1828.misc b/towncrier/newsfragments/1828.misc new file mode 100644 index 00000000..09da59ad --- /dev/null +++ b/towncrier/newsfragments/1828.misc @@ -0,0 +1 @@ +Switched from Travis to Github actions for CI/CD. Improved CI workflow to perform all tests in parallel. diff --git a/towncrier/newsfragments/1830.misc b/towncrier/newsfragments/1830.misc new file mode 100644 index 00000000..6de3aff1 --- /dev/null +++ b/towncrier/newsfragments/1830.misc @@ -0,0 +1 @@ +Make CI tests run in parallel. diff --git a/towncrier/newsfragments/1831.bugfix b/towncrier/newsfragments/1831.bugfix new file mode 100644 index 00000000..1094be34 --- /dev/null +++ b/towncrier/newsfragments/1831.bugfix @@ -0,0 +1 @@ +Fix roundcube environment configuration for databases \ No newline at end of file diff --git a/towncrier/newsfragments/1851.feature b/towncrier/newsfragments/1851.feature new file mode 100644 index 00000000..e01f5cb4 --- /dev/null +++ b/towncrier/newsfragments/1851.feature @@ -0,0 +1 @@ +Remove cyrus-sasl-plain as it's not packaged by alpine anymore. SASL-login is still available and used when relaying. diff --git a/towncrier/newsfragments/1917.bugfix b/towncrier/newsfragments/1917.bugfix new file mode 100644 index 00000000..68187d61 --- /dev/null +++ b/towncrier/newsfragments/1917.bugfix @@ -0,0 +1 @@ +Alpine has removed support for btree and hash in postfix... please use lmdb instead diff --git a/towncrier/newsfragments/224.feature b/towncrier/newsfragments/224.feature new file mode 100644 index 00000000..9a2f479b --- /dev/null +++ b/towncrier/newsfragments/224.feature @@ -0,0 +1 @@ +Add instructions on how to create DNS records for email client auto-configuration (RFC6186 style) diff --git a/towncrier/newsfragments/783.feature b/towncrier/newsfragments/783.feature new file mode 100644 index 00000000..fcafceef --- /dev/null +++ b/towncrier/newsfragments/783.feature @@ -0,0 +1 @@ +Centralize the authentication of webmails behind the admin interface diff --git a/webmails/rainloop/Dockerfile b/webmails/rainloop/Dockerfile index ee040f25..9814413d 100644 --- a/webmails/rainloop/Dockerfile +++ b/webmails/rainloop/Dockerfile @@ -3,7 +3,7 @@ ARG QEMU=other # NOTE: only add file if building for arm FROM ${ARCH}php:7.4-apache as build_arm -ONBUILD COPY --from=balenalib/rpi-alpine:3.10 /usr/bin/qemu-arm-static /usr/bin/qemu-arm-static +ONBUILD COPY --from=balenalib/rpi-alpine:3.14 /usr/bin/qemu-arm-static /usr/bin/qemu-arm-static FROM ${ARCH}php:7.4-apache as build_other @@ -17,7 +17,7 @@ RUN apt-get update && apt-get install -y \ # Shared layer between nginx, dovecot, postfix, postgresql, rspamd, unbound, rainloop, roundcube RUN pip3 install socrate -ENV RAINLOOP_URL https://github.com/RainLoop/rainloop-webmail/releases/download/v1.14.0/rainloop-community-1.14.0.zip +ENV RAINLOOP_URL https://github.com/RainLoop/rainloop-webmail/releases/download/v1.16.0/rainloop-community-1.16.0.zip RUN apt-get update && apt-get install -y \ unzip python3-jinja2 \ @@ -35,6 +35,7 @@ RUN apt-get update && apt-get install -y \ && rm -rf /var/lib/apt/lists COPY include.php /var/www/html/include.php +COPY sso.php /var/www/html/sso.php COPY php.ini /php.ini COPY application.ini /application.ini diff --git a/webmails/rainloop/application.ini b/webmails/rainloop/application.ini index 5bd9943d..0504f174 100644 --- a/webmails/rainloop/application.ini +++ b/webmails/rainloop/application.ini @@ -8,6 +8,10 @@ allow_admin_panel = Off [labs] allow_gravatar = Off +{% if ADMIN == "true" %} +custom_login_link='sso.php' +custom_logout_link='{{ WEB_ADMIN }}/ui/logout' +{% endif %} [contacts] enable = On diff --git a/webmails/rainloop/sso.php b/webmails/rainloop/sso.php new file mode 100644 index 00000000..2415f45c --- /dev/null +++ b/webmails/rainloop/sso.php @@ -0,0 +1,31 @@ + /usr/local/etc/php/conf.d/timezone.ini \ && rm -rf /var/www/html/ \ && cd /var/www \ - && curl -L -O ${ROUNDCUBE_URL} \ - && curl -L -O ${CARDDAV_URL} \ - && tar -xf *.tar.gz \ - && tar -xf *.tar.bz2 \ - && rm -f *.tar.gz \ - && rm -f *.tar.bz2 \ + && curl -sL ${ROUNDCUBE_URL} | tar xz \ + && curl -sL ${CARDDAV_URL} | tar xz \ && mv roundcubemail-* html \ && mv carddav html/plugins/ \ && cd html \ @@ -46,6 +42,7 @@ RUN apt-get update && apt-get install -y \ COPY php.ini /php.ini COPY config.inc.php /var/www/html/config/ +COPY mailu.php /var/www/html/plugins/mailu/mailu.php COPY start.py /start.py EXPOSE 80/tcp diff --git a/webmails/roundcube/config.inc.php b/webmails/roundcube/config.inc.php index 627b96a7..797f229c 100644 --- a/webmails/roundcube/config.inc.php +++ b/webmails/roundcube/config.inc.php @@ -36,7 +36,11 @@ $config['managesieve_host'] = $imap; $config['managesieve_usetls'] = false; // Customization settings -$config['support_url'] = getenv('WEB_ADMIN') ? '../..' . getenv('WEB_ADMIN') : ''; +if (filter_var(getenv('ADMIN'), FILTER_VALIDATE_BOOLEAN, FILTER_NULL_ON_FAILURE)) { + array_push($config['plugins'], 'mailu'); + $config['support_url'] = getenv('WEB_ADMIN') ? '../..' . getenv('WEB_ADMIN') : ''; + $config['sso_logout_url'] = getenv('WEB_ADMIN').'/ui/logout'; +} $config['product_name'] = 'Mailu Webmail'; // We access the IMAP and SMTP servers locally with internal names, SSL diff --git a/webmails/roundcube/mailu.php b/webmails/roundcube/mailu.php new file mode 100644 index 00000000..bb4d65e9 --- /dev/null +++ b/webmails/roundcube/mailu.php @@ -0,0 +1,59 @@ +add_hook('startup', array($this, 'startup')); + $this->add_hook('authenticate', array($this, 'authenticate')); + $this->add_hook('login_after', array($this, 'login')); + $this->add_hook('login_failed', array($this, 'login_failed')); + $this->add_hook('logout_after', array($this, 'logout')); + } + + function startup($args) + { + if (empty($_SESSION['user_id'])) { + $args['action'] = 'login'; + } + + return $args; + } + + function authenticate($args) + { + if (!in_array('HTTP_X_REMOTE_USER', $_SERVER) || !in_array('HTTP_X_REMOTE_USER_TOKEN', $_SERVER)) { + header('HTTP/1.0 403 Forbidden'); + die(); + } + $args['user'] = $_SERVER['HTTP_X_REMOTE_USER']; + $args['pass'] = $_SERVER['HTTP_X_REMOTE_USER_TOKEN']; + + $args['cookiecheck'] = false; + $args['valid'] = true; + + return $args; + } + + function logout($args) { + // Redirect to global SSO logout path. + $this->load_config(); + + $sso_logout_url = rcmail::get_instance()->config->get('sso_logout_url'); + header("Location: " . $sso_logout_url, true); + exit; + } + + function login($args) + { + header('Location: index.php'); + exit(); + } + function login_failed($args) + { + header('Location: sso.php'); + exit(); + } + +} diff --git a/webmails/roundcube/start.py b/webmails/roundcube/start.py index 649f3324..cd42ba06 100755 --- a/webmails/roundcube/start.py +++ b/webmails/roundcube/start.py @@ -8,41 +8,42 @@ import subprocess log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "WARNING")) -os.environ["MAX_FILESIZE"] = str(int(int(os.environ.get("MESSAGE_SIZE_LIMIT"))*0.66/1048576)) +os.environ["MAX_FILESIZE"] = str(int(int(os.environ.get("MESSAGE_SIZE_LIMIT")) * 0.66 / 1048576)) -db_flavor=os.environ.get("ROUNDCUBE_DB_FLAVOR",os.environ.get("DB_FLAVOR","sqlite")) -if db_flavor=="sqlite": - os.environ["DB_DSNW"]="sqlite:////data/roundcube.db" -elif db_flavor=="mysql": - os.environ["DB_DSNW"]="mysql://%s:%s@%s/%s" % ( - os.environ.get("ROUNDCUBE_DB_USER","roundcube"), +db_flavor = os.environ.get("ROUNDCUBE_DB_FLAVOR", "sqlite") +if db_flavor == "sqlite": + os.environ["DB_DSNW"] = "sqlite:////data/roundcube.db" +elif db_flavor == "mysql": + os.environ["DB_DSNW"] = "mysql://%s:%s@%s/%s" % ( + os.environ.get("ROUNDCUBE_DB_USER", "roundcube"), os.environ.get("ROUNDCUBE_DB_PW"), - os.environ.get("ROUNDCUBE_DB_HOST",os.environ.get("DB_HOST","database")), - os.environ.get("ROUNDCUBE_DB_NAME","roundcube") - ) -elif db_flavor=="postgresql": - os.environ["DB_DSNW"]="pgsql://%s:%s@%s/%s" % ( - os.environ.get("ROUNDCUBE_DB_USER","roundcube"), + os.environ.get("ROUNDCUBE_DB_HOST", "database"), + os.environ.get("ROUNDCUBE_DB_NAME", "roundcube") + ) +elif db_flavor == "postgresql": + os.environ["DB_DSNW"] = "pgsql://%s:%s@%s/%s" % ( + os.environ.get("ROUNDCUBE_DB_USER", "roundcube"), os.environ.get("ROUNDCUBE_DB_PW"), - os.environ.get("ROUNDCUBE_DB_HOST",os.environ.get("DB_HOST","database")), - os.environ.get("ROUNDCUBE_DB_NAME","roundcube") - ) + os.environ.get("ROUNDCUBE_DB_HOST", "database"), + os.environ.get("ROUNDCUBE_DB_NAME", "roundcube") + ) else: - print("Unknown ROUNDCUBE_DB_FLAVOR: %s",db_flavor) + print("Unknown ROUNDCUBE_DB_FLAVOR: %s", db_flavor) exit(1) - - conf.jinja("/php.ini", os.environ, "/usr/local/etc/php/conf.d/roundcube.ini") # Create dirs, setup permissions os.system("mkdir -p /data/gpg /var/www/html/logs") os.system("touch /var/www/html/logs/errors.log") os.system("chown -R www-data:www-data /var/www/html/logs") +os.system("chmod -R a+rX /var/www/html/") +os.system("ln -sf /var/www/html/index.php /var/www/html/sso.php") try: print("Initializing database") - result=subprocess.check_output(["/var/www/html/bin/initdb.sh","--dir","/var/www/html/SQL"],stderr=subprocess.STDOUT) + result = subprocess.check_output(["/var/www/html/bin/initdb.sh", "--dir", "/var/www/html/SQL"], + stderr=subprocess.STDOUT) print(result.decode()) except subprocess.CalledProcessError as e: if "already exists" in e.stdout.decode(): @@ -53,7 +54,7 @@ except subprocess.CalledProcessError as e: try: print("Upgrading database") - subprocess.check_call(["/var/www/html/bin/update.sh","--version=?","-y"],stderr=subprocess.STDOUT) + subprocess.check_call(["/var/www/html/bin/update.sh", "--version=?", "-y"], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: quit(1) @@ -61,7 +62,7 @@ except subprocess.CalledProcessError as e: os.system("chown -R www-data:www-data /data") # Tail roundcube logs -subprocess.Popen(["tail","-f","-n","0","/var/www/html/logs/errors.log"]) +subprocess.Popen(["tail", "-f", "-n", "0", "/var/www/html/logs/errors.log"]) # Run apache os.execv("/usr/local/bin/apache2-foreground", ["apache2-foreground"])