2460: Switch to a base image containing base tools and the podop and socrate libs r=mergify[bot] a=ghostwheel42

## What type of PR?

enhancement of build process

## What does this PR do?

Changes build.hcl to build core images using a base image.
Also adds a "assets" base image for the admin container.


Co-authored-by: Alexander Graf <ghostwheel42@users.noreply.github.com>
Co-authored-by: Pierre Jaury <pierre@jaury.eu>
Co-authored-by: kaiyou <pierre@jaury.eu>
Co-authored-by: Dimitri Huisman <52963853+Diman0@users.noreply.github.com>
main
bors[bot] 2 years ago committed by GitHub
commit 5703e97c73
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -61,7 +61,7 @@ jobs:
echo "RELEASE=true" >> $GITHUB_ENV
echo "DEPLOY=true" >> $GITHUB_ENV
echo "RELEASE=true" >> $GITHUB_ENV
- name: Derive PINNED_MAILU_VERSION for staging for master
- name: Derive PINNED_MAILU_VERSION for master
if: env.BRANCH == 'master'
shell: bash
env:
@ -98,4 +98,4 @@ jobs:
#else
# pinned_version=$root_version.$(expr $patch_version + 1)
#fi
#echo "PINNED_MAILU_VERSION=$pinned_version" >> $GITHUB_ENV
#echo "PINNED_MAILU_VERSION=$pinned_version" >> $GITHUB_ENV

@ -58,15 +58,15 @@ on:
required: true
type: string
deploy:
description: Deploy to docker hub. Happens for all branches but staging
description: Deploy to docker hub. Happens for all branches but staging. Use string true or false.
default: true
required: false
type: boolean
type: string
release:
description: 'Tag and create the github release. Only happens for branch x.y (release branch)'
description: Tag and create the github release. Use string true or false.
default: false
required: false
type: boolean
type: string
env:
HCL_FILE: ./tests/build.hcl
@ -84,11 +84,61 @@ jobs:
- name: Create matrix
id: targets
run: |
echo ::set-output name=matrix::$(docker buildx bake -f ${{env.HCL_FILE}} --print | jq -cr '.group.default.targets')
echo matrix=$(docker buildx bake -f ${{env.HCL_FILE}} --print | jq -cr '.group.default.targets') >> $GITHUB_OUTPUT
- name: Show matrix
run: |
echo ${{ steps.targets.outputs.matrix }}
## This job buils the base image. The base image is used by all other images.
build-base-image:
name: Build base image
needs:
- targets
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- uses: actions/checkout@v3
- name: Retrieve global variables
shell: bash
run: |
echo "BRANCH=${{ inputs.branch }}" >> $GITHUB_ENV
echo "MAILU_VERSION=${{ inputs.mailu_version }}" >> $GITHUB_ENV
echo "PINNED_MAILU_VERSION=${{ inputs.pinned_mailu_version }}" >> $GITHUB_ENV
echo "DOCKER_ORG=${{ inputs.docker_org }}" >> $GITHUB_ENV
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- uses: crazy-max/ghaction-github-runtime@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Helper to convert docker org to lowercase
id: string
uses: ASzc/change-string-case-action@v2
with:
string: ${{ github.repository_owner }}
- name: Build all docker images
env:
DOCKER_ORG: ghcr.io/${{ steps.string.outputs.lowercase }}
MAILU_VERSION: ${{ env.MAILU_VERSION }}
PINNED_MAILU_VERSION: ${{ env.PINNED_MAILU_VERSION }}
uses: docker/bake-action@v2
with:
files: ${{env.HCL_FILE}}
targets: base
load: false
push: false
set: |
*.cache-from=type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/base:${{ hashFiles('core/base/Dockerfile','core/base/requirements-prod.txt') }}
*.cache-to=type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/base:${{ hashFiles('core/base/Dockerfile','core/base/requirements-prod.txt') }},mode=max
*.platform=${{ inputs.architecture }}
# This job builds all the images. The build cache is stored in the github actions cache.
# In further jobs, this cache is used to quickly rebuild the images.
build:
@ -96,6 +146,7 @@ jobs:
if: inputs.architecture == 'linux/amd64'
needs:
- targets
- build-base-image
strategy:
fail-fast: false
matrix:
@ -113,26 +164,25 @@ jobs:
echo "MAILU_VERSION=${{ inputs.mailu_version }}" >> $GITHUB_ENV
echo "PINNED_MAILU_VERSION=${{ inputs.pinned_mailu_version }}" >> $GITHUB_ENV
echo "DOCKER_ORG=${{ inputs.docker_org }}" >> $GITHUB_ENV
- name: Configure actions/cache@v3 action for storing build cache in the ${{ runner.temp }}/cache folder
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/${{ matrix.target }}
key: ${{ github.ref }}-${{ inputs.mailu_version }}-${{ matrix.target }}-${{ github.run_id }}
restore-keys: |
${{ github.ref }}-${{ inputs.mailu_version }}-${{ matrix.target }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- uses: crazy-max/ghaction-github-runtime@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
username: ${{ secrets.Docker_Login }}
password: ${{ secrets.Docker_Password }}
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Helper to convert docker org to lowercase
id: string
uses: ASzc/change-string-case-action@v2
with:
string: ${{ github.repository_owner }}
- name: Build all docker images
env:
DOCKER_ORG: ${{ env.DOCKER_ORG }}
DOCKER_ORG: ghcr.io/${{ steps.string.outputs.lowercase }}
MAILU_VERSION: ${{ env.MAILU_VERSION }}
PINNED_MAILU_VERSION: ${{ env.PINNED_MAILU_VERSION }}
uses: docker/bake-action@v2
@ -140,10 +190,11 @@ jobs:
files: ${{env.HCL_FILE}}
targets: ${{ matrix.target }}
load: false
push: false
push: true
set: |
*.cache-from=type=local,src=${{ runner.temp }}/cache/${{ matrix.target }}
*.cache-to=type=local,dest=${{ runner.temp }}/cache/${{ matrix.target }},mode=max
*.cache-from=type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/${{ matrix.target }}:buildcache
*.cache-to=type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/${{ matrix.target }}:buildcache,mode=max
*.cache-from=type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/base:${{ hashFiles('core/base/Dockerfile','core/base/requirements-prod.txt') }}
*.platform=${{ inputs.architecture }}
# This job builds all the images. The build cache is stored in the github actions cache.
@ -153,6 +204,7 @@ jobs:
if: inputs.architecture != 'linux/amd64'
needs:
- targets
- build-base-image
strategy:
fail-fast: false
matrix:
@ -170,26 +222,25 @@ jobs:
echo "MAILU_VERSION=${{ inputs.mailu_version }}" >> $GITHUB_ENV
echo "PINNED_MAILU_VERSION=${{ inputs.pinned_mailu_version }}" >> $GITHUB_ENV
echo "DOCKER_ORG=${{ inputs.docker_org }}" >> $GITHUB_ENV
- name: Configure actions/cache@v3 action for storing build cache in the ${{ runner.temp }}/cache folder
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/${{ matrix.target }}
key: ${{ github.ref }}-${{ inputs.mailu_version }}-${{ matrix.target }}-${{ github.run_id }}
restore-keys: |
${{ github.ref }}-${{ inputs.mailu_version }}-${{ matrix.target }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- uses: crazy-max/ghaction-github-runtime@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
username: ${{ secrets.Docker_Login }}
password: ${{ secrets.Docker_Password }}
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Helper to convert docker org to lowercase
id: string
uses: ASzc/change-string-case-action@v2
with:
string: ${{ github.repository_owner }}
- name: Build all docker images
env:
DOCKER_ORG: ${{ env.DOCKER_ORG }}
DOCKER_ORG: ghcr.io/${{ steps.string.outputs.lowercase }}
MAILU_VERSION: ${{ env.MAILU_VERSION }}
PINNED_MAILU_VERSION: ${{ env.PINNED_MAILU_VERSION }}
uses: docker/bake-action@v2
@ -199,8 +250,10 @@ jobs:
load: false
push: false
set: |
*.cache-from=type=local,src=${{ runner.temp }}/cache/${{ matrix.target }}
*.cache-to=type=local,dest=${{ runner.temp }}/cache/${{ matrix.target }},mode=max
*.cache-from=type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/${{ matrix.target }}:buildcache-arm
*.cache-to=type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/${{ matrix.target }}:buildcache-arm,mode=max
*.cache-from=type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/base:${{ hashFiles('core/base/Dockerfile','core/base/requirements-prod.txt') }}-arm
*.cache-to=type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/base:${{ hashFiles('core/base/Dockerfile','core/base/requirements-prod.txt') }}-arm,mode=max
*.platform=${{ inputs.architecture }}
# This job runs all the tests.
@ -234,112 +287,22 @@ jobs:
echo "MAILU_VERSION=${{ inputs.mailu_version }}" >> $GITHUB_ENV
echo "PINNED_MAILU_VERSION=${{ inputs.pinned_mailu_version }}" >> $GITHUB_ENV
echo "DOCKER_ORG=${{ inputs.docker_org }}" >> $GITHUB_ENV
- name: Configure /cache for image docs
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/docs
key: ${{ github.ref }}-${{ inputs.mailu_version }}-docs-${{ github.run_id }}
- name: Configure /cache for image setup
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/setup
key: ${{ github.ref }}-${{ inputs.mailu_version }}-setup-${{ github.run_id }}
- name: Configure /cache for image admin
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/admin
key: ${{ github.ref }}-${{ inputs.mailu_version }}-admin-${{ github.run_id }}
- name: Configure /cache for image antispam
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/antispam
key: ${{ github.ref }}-${{ inputs.mailu_version }}-antispam-${{ github.run_id }}
- name: Configure /cache for image front
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/front
key: ${{ github.ref }}-${{ inputs.mailu_version }}-front-${{ github.run_id }}
- name: Configure /cache for image imap
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/imap
key: ${{ github.ref }}-${{ inputs.mailu_version }}-imap-${{ github.run_id }}
- name: Configure /cache for image smtp
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/smtp
key: ${{ github.ref }}-${{ inputs.mailu_version }}-smtp-${{ github.run_id }}
- name: Configure /cache for image snappymail
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/snappymail
key: ${{ github.ref }}-${{ inputs.mailu_version }}-snappymail-${{ github.run_id }}
- name: Configure /cache for image roundcube
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/roundcube
key: ${{ github.ref }}-${{ inputs.mailu_version }}-roundcube-${{ github.run_id }}
- name: Configure /cache for image antivirus
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/antivirus
key: ${{ github.ref }}-${{ inputs.mailu_version }}-antivirus-${{ github.run_id }}
- name: Configure /cache for image fetchmail
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/fetchmail
key: ${{ github.ref }}-${{ inputs.mailu_version }}-fetchmail-${{ github.run_id }}
- name: Configure /cache for image resolver
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/resolver
key: ${{ github.ref }}-${{ inputs.mailu_version }}-resolver-${{ github.run_id }}
- name: Configure /cache for image traefik-certdumper
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/traefik-certdumper
key: ${{ github.ref }}-${{ inputs.mailu_version }}-traefik-certdumper-${{ github.run_id }}
- name: Configure /cache for image webdav
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/webdav
key: ${{ github.ref }}-${{ inputs.mailu_version }}-webdav-${{ github.run_id }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- uses: crazy-max/ghaction-github-runtime@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
username: ${{ secrets.Docker_Login }}
password: ${{ secrets.Docker_Password }}
- name: Build docker images for testing from cache
env:
DOCKER_ORG: ${{ env.DOCKER_ORG }}
MAILU_VERSION: ${{ env.MAILU_VERSION }}
PINNED_MAILU_VERSION: ${{ env.PINNED_MAILU_VERSION }}
uses: docker/bake-action@v2
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Helper to convert docker org to lowercase
id: string
uses: ASzc/change-string-case-action@v2
with:
files: ${{env.HCL_FILE}}
load: true
push: false
set: |
*.cache-from=type=local,src=${{ runner.temp }}/cache/docs
*.cache-from=type=local,src=${{ runner.temp }}/cache/setup
*.cache-from=type=local,src=${{ runner.temp }}/cache/admin
*.cache-from=type=local,src=${{ runner.temp }}/cache/antispam
*.cache-from=type=local,src=${{ runner.temp }}/cache/front
*.cache-from=type=local,src=${{ runner.temp }}/cache/imap
*.cache-from=type=local,src=${{ runner.temp }}/cache/smtp
*.cache-from=type=local,src=${{ runner.temp }}/cache/snappymail
*.cache-from=type=local,src=${{ runner.temp }}/cache/roundcube
*.cache-from=type=local,src=${{ runner.temp }}/cache/antivirus
*.cache-from=type=local,src=${{ runner.temp }}/cache/fetchmail
*.cache-from=type=local,src=${{ runner.temp }}/cache/resolver
*.cache-from=type=local,src=${{ runner.temp }}/cache/traefik-certdumper
*.cache-from=type=local,src=${{ runner.temp }}/cache/webdav
*.platform=${{ inputs.architecture }}
string: ${{ github.repository_owner }}
- name: Install python packages
run: python3 -m pip install -r tests/requirements.txt
- name: Copy all certs
@ -347,12 +310,10 @@ jobs:
- name: Test ${{ matrix.target }}
run: python tests/compose/test.py ${{ matrix.target }} ${{ matrix.time }}
env:
DOCKER_ORG: ${{ env.DOCKER_ORG }}
DOCKER_ORG: ghcr.io/${{ steps.string.outputs.lowercase }}
MAILU_VERSION: ${{ env.MAILU_VERSION }}
PINNED_MAILU_VERSION: ${{ env.PINNED_MAILU_VERSION }}
# This job deploys the docker images to the docker repository. The build.hcl file contains logic that determines what tags are pushed.
# E.g. for master only the :master and :latest tags are pushed.
deploy:
name: Deploy images
# Deploying is not required for staging
@ -361,6 +322,10 @@ jobs:
needs:
- build
- tests
strategy:
fail-fast: false
matrix:
target: ["setup", "docs", "fetchmail", "roundcube", "admin", "traefik-certdumper", "radicale", "clamav", "rspamd", "postfix", "dovecot", "unbound", "nginx", "snappymail"]
steps:
- uses: actions/checkout@v3
- name: Retrieve global variables
@ -370,76 +335,6 @@ jobs:
echo "MAILU_VERSION=${{ inputs.mailu_version }}" >> $GITHUB_ENV
echo "PINNED_MAILU_VERSION=${{ inputs.pinned_mailu_version }}" >> $GITHUB_ENV
echo "DOCKER_ORG=${{ inputs.docker_org }}" >> $GITHUB_ENV
- name: Configure /cache for image docs
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/docs
key: ${{ github.ref }}-${{ inputs.mailu_version }}-docs-${{ github.run_id }}
- name: Configure /cache for image setup
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/setup
key: ${{ github.ref }}-${{ inputs.mailu_version }}-setup-${{ github.run_id }}
- name: Configure /cache for image admin
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/admin
key: ${{ github.ref }}-${{ inputs.mailu_version }}-admin-${{ github.run_id }}
- name: Configure /cache for image antispam
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/antispam
key: ${{ github.ref }}-${{ inputs.mailu_version }}-antispam-${{ github.run_id }}
- name: Configure /cache for image front
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/front
key: ${{ github.ref }}-${{ inputs.mailu_version }}-front-${{ github.run_id }}
- name: Configure /cache for image imap
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/imap
key: ${{ github.ref }}-${{ inputs.mailu_version }}-imap-${{ github.run_id }}
- name: Configure /cache for image smtp
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/smtp
key: ${{ github.ref }}-${{ inputs.mailu_version }}-smtp-${{ github.run_id }}
- name: Configure /cache for image snappymail
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/snappymail
key: ${{ github.ref }}-${{ inputs.mailu_version }}-snappymail-${{ github.run_id }}
- name: Configure /cache for image roundcube
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/roundcube
key: ${{ github.ref }}-${{ inputs.mailu_version }}-roundcube-${{ github.run_id }}
- name: Configure /cache for image antivirus
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/antivirus
key: ${{ github.ref }}-${{ inputs.mailu_version }}-antivirus-${{ github.run_id }}
- name: Configure /cache for image fetchmail
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/fetchmail
key: ${{ github.ref }}-${{ inputs.mailu_version }}-fetchmail-${{ github.run_id }}
- name: Configure /cache for image resolver
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/resolver
key: ${{ github.ref }}-${{ inputs.mailu_version }}-resolver-${{ github.run_id }}
- name: Configure /cache for image traefik-certdumper
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/traefik-certdumper
key: ${{ github.ref }}-${{ inputs.mailu_version }}-traefik-certdumper-${{ github.run_id }}
- name: Configure /cache for image webdav
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/webdav
key: ${{ github.ref }}-${{ inputs.mailu_version }}-webdav-${{ github.run_id }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- uses: crazy-max/ghaction-github-runtime@v2
@ -450,31 +345,19 @@ jobs:
with:
username: ${{ secrets.Docker_Login }}
password: ${{ secrets.Docker_Password }}
- name: Deploy images to docker hub. Build.hcl contains the logic for the tags that are pushed.
env:
DOCKER_ORG: ${{ env.DOCKER_ORG }}
MAILU_VERSION: ${{ env.MAILU_VERSION }}
PINNED_MAILU_VERSION: ${{ env.PINNED_MAILU_VERSION }}
uses: docker/bake-action@v2
- name: Helper to convert docker org to lowercase
id: string
uses: ASzc/change-string-case-action@v2
with:
files: ${{env.HCL_FILE}}
push: true
set: |
*.cache-from=type=local,src=${{ runner.temp }}/cache/docs
*.cache-from=type=local,src=${{ runner.temp }}/cache/setup
*.cache-from=type=local,src=${{ runner.temp }}/cache/admin
*.cache-from=type=local,src=${{ runner.temp }}/cache/antispam
*.cache-from=type=local,src=${{ runner.temp }}/cache/front
*.cache-from=type=local,src=${{ runner.temp }}/cache/imap
*.cache-from=type=local,src=${{ runner.temp }}/cache/smtp
*.cache-from=type=local,src=${{ runner.temp }}/cache/snappymail
*.cache-from=type=local,src=${{ runner.temp }}/cache/roundcube
*.cache-from=type=local,src=${{ runner.temp }}/cache/antivirus
*.cache-from=type=local,src=${{ runner.temp }}/cache/fetchmail
*.cache-from=type=local,src=${{ runner.temp }}/cache/resolver
*.cache-from=type=local,src=${{ runner.temp }}/cache/traefik-certdumper
*.cache-from=type=local,src=${{ runner.temp }}/cache/webdav
*.platform=${{ inputs.architecture }}
string: ${{ github.repository_owner }}
- name: Push image to Docker
shell: bash
run: |
if [ '${{ env.MAILU_VERSION }}' == 'master' ]; then pinned_mailu_version='master'; else pinned_mailu_version=${{ env.PINNED_MAILU_VERSION}}; fi;
docker buildx imagetools create \
--tag ${{ inputs.docker_org }}/${{ matrix.target }}:${{ env.MAILU_VERSION }} \
--tag ${{ inputs.docker_org }}/${{ matrix.target }}:$pinned_mailu_version \
ghcr.io/${{ steps.string.outputs.lowercase }}/${{ matrix.target }}:${{ env.MAILU_VERSION }}
deploy-arm:
name: Deploy images for arm
@ -483,6 +366,10 @@ jobs:
runs-on: self-hosted
needs:
- build-arm
strategy:
fail-fast: false
matrix:
target: ["setup", "docs", "fetchmail", "roundcube", "admin", "traefik-certdumper", "radicale", "clamav", "rspamd", "postfix", "dovecot", "unbound", "nginx", "snappymail"]
steps:
- uses: actions/checkout@v3
- name: Retrieve global variables
@ -492,76 +379,6 @@ jobs:
echo "MAILU_VERSION=${{ inputs.mailu_version }}" >> $GITHUB_ENV
echo "PINNED_MAILU_VERSION=${{ inputs.pinned_mailu_version }}" >> $GITHUB_ENV
echo "DOCKER_ORG=${{ inputs.docker_org }}" >> $GITHUB_ENV
- name: Configure /cache for image docs
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/docs
key: ${{ github.ref }}-${{ inputs.mailu_version }}-docs-${{ github.run_id }}
- name: Configure /cache for image setup
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/setup
key: ${{ github.ref }}-${{ inputs.mailu_version }}-setup-${{ github.run_id }}
- name: Configure /cache for image admin
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/admin
key: ${{ github.ref }}-${{ inputs.mailu_version }}-admin-${{ github.run_id }}
- name: Configure /cache for image antispam
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/antispam
key: ${{ github.ref }}-${{ inputs.mailu_version }}-antispam-${{ github.run_id }}
- name: Configure /cache for image front
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/front
key: ${{ github.ref }}-${{ inputs.mailu_version }}-front-${{ github.run_id }}
- name: Configure /cache for image imap
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/imap
key: ${{ github.ref }}-${{ inputs.mailu_version }}-imap-${{ github.run_id }}
- name: Configure /cache for image smtp
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/smtp
key: ${{ github.ref }}-${{ inputs.mailu_version }}-smtp-${{ github.run_id }}
- name: Configure /cache for image snappymail
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/snappymail
key: ${{ github.ref }}-${{ inputs.mailu_version }}-snappymail-${{ github.run_id }}
- name: Configure /cache for image roundcube
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/roundcube
key: ${{ github.ref }}-${{ inputs.mailu_version }}-roundcube-${{ github.run_id }}
- name: Configure /cache for image antivirus
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/antivirus
key: ${{ github.ref }}-${{ inputs.mailu_version }}-antivirus-${{ github.run_id }}
- name: Configure /cache for image fetchmail
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/fetchmail
key: ${{ github.ref }}-${{ inputs.mailu_version }}-fetchmail-${{ github.run_id }}
- name: Configure /cache for image resolver
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/resolver
key: ${{ github.ref }}-${{ inputs.mailu_version }}-resolver-${{ github.run_id }}
- name: Configure /cache for image traefik-certdumper
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/traefik-certdumper
key: ${{ github.ref }}-${{ inputs.mailu_version }}-traefik-certdumper-${{ github.run_id }}
- name: Configure /cache for image webdav
uses: actions/cache@v3
with:
path: ${{ runner.temp }}/cache/webdav
key: ${{ github.ref }}-${{ inputs.mailu_version }}-webdav-${{ github.run_id }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- uses: crazy-max/ghaction-github-runtime@v2
@ -572,31 +389,19 @@ jobs:
with:
username: ${{ secrets.Docker_Login }}
password: ${{ secrets.Docker_Password }}
- name: Deploy images to docker hub. Build.hcl contains the logic for the tags that are pushed.
env:
DOCKER_ORG: ${{ env.DOCKER_ORG }}
MAILU_VERSION: ${{ env.MAILU_VERSION }}
PINNED_MAILU_VERSION: ${{ env.PINNED_MAILU_VERSION }}
uses: docker/bake-action@v2
- name: Helper to convert docker org to lowercase
id: string
uses: ASzc/change-string-case-action@v2
with:
files: ${{env.HCL_FILE}}
push: true
set: |
*.cache-from=type=local,src=${{ runner.temp }}/cache/docs
*.cache-from=type=local,src=${{ runner.temp }}/cache/setup
*.cache-from=type=local,src=${{ runner.temp }}/cache/admin
*.cache-from=type=local,src=${{ runner.temp }}/cache/antispam
*.cache-from=type=local,src=${{ runner.temp }}/cache/front
*.cache-from=type=local,src=${{ runner.temp }}/cache/imap
*.cache-from=type=local,src=${{ runner.temp }}/cache/smtp
*.cache-from=type=local,src=${{ runner.temp }}/cache/snappymail
*.cache-from=type=local,src=${{ runner.temp }}/cache/roundcube
*.cache-from=type=local,src=${{ runner.temp }}/cache/antivirus
*.cache-from=type=local,src=${{ runner.temp }}/cache/fetchmail
*.cache-from=type=local,src=${{ runner.temp }}/cache/resolver
*.cache-from=type=local,src=${{ runner.temp }}/cache/traefik-certdumper
*.cache-from=type=local,src=${{ runner.temp }}/cache/webdav
*.platform=${{ inputs.architecture }}
string: ${{ github.repository_owner }}
- name: Push image to Docker
shell: bash
run: |
if [ '${{ env.MAILU_VERSION }}' == 'master-arm' ]; then pinned_mailu_version='master-arm'; else pinned_mailu_version=${{ env.PINNED_MAILU_VERSION}}; fi;
docker buildx imagetools create \
--tag ${{ inputs.docker_org }}/${{ matrix.target }}:${{ env.MAILU_VERSION }} \
--tag ${{ inputs.docker_org }}/${{ matrix.target }}:$pinned_mailu_version \
ghcr.io/${{ steps.string.outputs.lowercase }}/${{ matrix.target }}:${{ env.MAILU_VERSION }}
#This job creates a tagged release. A tag is created for the pinned version x.y.z. The GH release refers to this tag.
tag-release:

@ -82,7 +82,7 @@ jobs:
echo "PINNED_MAILU_VERSION=staging" >> $GITHUB_ENV
echo "DEPLOY=false" >> $GITHUB_ENV
echo "RELEASE=false" >> $GITHUB_ENV
- name: Derive PINNED_MAILU_VERSION for staging for master
- name: Derive PINNED_MAILU_VERSION for master
if: env.BRANCH == 'master'
shell: bash
env:
@ -131,4 +131,4 @@ jobs:
#else
# pinned_version=$root_version.$(expr $patch_version + 1)
#fi
#echo "PINNED_MAILU_VERSION=$pinned_version" >> $GITHUB_ENV
#echo "PINNED_MAILU_VERSION=$pinned_version" >> $GITHUB_ENV

4
.gitignore vendored

@ -1,5 +1,5 @@
*.pyc
*.mo
**/*.pyc
**/*.mo
__pycache__
pip-selfcheck.json
/core/admin/lib*

@ -1,61 +1,31 @@
# First stage to build assets
ARG DISTRO=alpine:3.14.5
# syntax=docker/dockerfile-upstream:1.4.3
FROM node:16-alpine3.16 as assets
COPY package.json ./
RUN set -eu \
&& npm config set update-notifier false \
&& npm install --no-fund
COPY webpack.config.js ./
COPY assets ./assets
RUN set -eu \
&& sed -i 's/#007bff/#55a5d9/' node_modules/admin-lte/build/scss/_bootstrap-variables.scss \
&& for l in ca da de:de-DE en:en-GB es:es-ES eu fr:fr-FR he hu is it:it-IT ja nb_NO:no-NB nl:nl-NL pl pt:pt-PT ru sv:sv-SE zh; do \
cp node_modules/datatables.net-plugins/i18n/${l#*:}.json assets/${l%:*}.json; \
done \
&& node_modules/.bin/webpack-cli --color
# Actual application
FROM $DISTRO
ARG VERSION
ENV TZ Etc/UTC
# admin image
FROM base
ARG VERSION=local
LABEL version=$VERSION
# python3 shared with most images
RUN set -eu \
&& apk add --no-cache python3 py3-pip py3-wheel git bash tzdata \
&& pip3 install --upgrade pip
RUN set -euxo pipefail \
; apk add --no-cache libressl mariadb-connector-c postgresql-libs
RUN mkdir -p /app
WORKDIR /app
COPY --from=assets /work/static/ ./mailu/static/
COPY requirements-prod.txt requirements.txt
RUN set -eu \
&& apk add --no-cache libressl curl postgresql-libs mariadb-connector-c \
&& pip install --no-cache-dir -r requirements.txt --only-binary=:all: --no-binary=Flask-bootstrap,PyYAML,SQLAlchemy \
|| ( apk add --no-cache --virtual build-dep libressl-dev libffi-dev python3-dev build-base postgresql-dev mariadb-connector-c-dev cargo \
&& pip install --upgrade pip \
&& pip install -r requirements.txt \
&& apk del --no-cache build-dep )
COPY audit.py /
COPY start.py /
COPY --from=assets static ./mailu/static
COPY mailu ./mailu
COPY migrations ./migrations
COPY start.py /start.py
COPY audit.py /audit.py
COPY migrations/ ./migrations/
RUN pybabel compile -d mailu/translations
COPY mailu/ ./mailu/
RUN set -euxo pipefail \
; venv/bin/pybabel compile -d mailu/translations
RUN echo $VERSION >/version
EXPOSE 80/tcp
HEALTHCHECK CMD curl -skfLo /dev/null http://localhost/sso/login?next=ui.index
VOLUME ["/data","/dkim"]
ENV FLASK_APP mailu
ENV FLASK_APP=mailu
CMD /start.py
HEALTHCHECK CMD curl -f -L http://localhost/sso/login?next=ui.index || exit 1
RUN echo $VERSION >> /version

@ -0,0 +1,16 @@
# syntax=docker/dockerfile-upstream:1.4.3
FROM node:16-alpine3.16
WORKDIR /work
COPY content/ ./
RUN set -euxo pipefail \
&& npm config set update-notifier false \
&& npm install --no-audit --no-fund \
&& sed -i 's/#007bff/#55a5d9/' node_modules/admin-lte/build/scss/_bootstrap-variables.scss \
&& for l in ca da de:de-DE en:en-GB es:es-ES eu fr:fr-FR he hu is it:it-IT ja nb_NO:no-NB nl:nl-NL pl pt:pt-PT ru sv:sv-SE zh; do \
cp node_modules/datatables.net-plugins/i18n/${l#*:}.json assets/${l%:*}.json; \
done \
&& node_modules/.bin/webpack-cli --color

Before

Width:  |  Height:  |  Size: 4.8 KiB

After

Width:  |  Height:  |  Size: 4.8 KiB

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/env python3
import sys
import tabulate

@ -1,28 +0,0 @@
Flask
Flask-Login
Flask-SQLAlchemy
Flask-bootstrap
Flask-Babel
Flask-migrate
Flask-script
Flask-wtf
Flask-debugtoolbar
limits
redis
WTForms-Components
socrate
passlib
gunicorn
tabulate
PyYAML
PyOpenSSL
Pygments
dnspython
tenacity
mysql-connector-python
idna
srslib
marshmallow
flask-marshmallow
marshmallow-sqlalchemy
xmltodict

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/env python3
import os
import logging as log

@ -0,0 +1,58 @@
# syntax=docker/dockerfile-upstream:1.4.3
# base system image (intermediate)
ARG DISTRO=alpine:3.14.5
FROM $DISTRO as system
ENV TZ=Etc/UTC LANG=C.UTF-8
ARG MAILU_UID=1000
ARG MAILU_GID=1000
RUN set -euxo pipefail \
; addgroup -Sg ${MAILU_GID} mailu \
; adduser -Sg ${MAILU_UID} -G mailu -h /app -g "mailu app" -s /bin/bash mailu \
; apk add --no-cache bash ca-certificates curl python3 tzdata
WORKDIR /app
CMD /bin/bash
# build virtual env (intermediate)
FROM system as build
ARG MAILU_ENV=prod
ENV VIRTUAL_ENV=/app/venv
COPY requirements-build.txt ./
RUN set -euxo pipefail \
; apk add --no-cache py3-pip \
; python3 -m venv ${VIRTUAL_ENV} \
; ${VIRTUAL_ENV}/bin/pip install --no-cache-dir -r requirements-build.txt \
; apk del -r py3-pip
ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
COPY requirements-${MAILU_ENV}.txt ./
COPY libs/ libs/
RUN set -euxo pipefail \
; machine="$(uname -m)"; deps="" \
; [[ "${machine}" == arm* || "${machine}" == aarch64 ]] && deps="${deps} build-base gcc libffi-dev python3-dev" \
; [[ "${machine}" == armv7* ]] && deps="${deps} cargo libressl-dev mariadb-connector-c-dev postgresql-dev" \
; [[ "${deps}" ]] && apk add --virtual .build-deps ${deps} \
; pip install -r requirements-${MAILU_ENV}.txt \
; apk -e info -q .build-deps && apk del -r .build-deps \
; rm -rf /root/.cache /tmp/*.pem
# base mailu image
FROM system
COPY --from=build /app/venv/ /app/venv/
ENV VIRTUAL_ENV=/app/venv
ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"

@ -0,0 +1,20 @@
.DS_Store
.idea
tmp
*.bak
*~
.*.swp
__pycache__/
*.pyc
*.pyo
*.egg-info/
.build
.env*
.venv
*.code-workspace
build/

@ -0,0 +1,7 @@
This project is open source, and your contributions are all welcome. There are mostly three different ways one can contribute to the project:
1. use Podop, either on test or on production servers, and report meaningful bugs when you find some;
2. write and publish, or contribute to mail distributions based on Podop, like Mailu;
2. contribute code and/or configuration to the repository (see [the development guidelines](https://mailu.io/contributors/guide.html) for details);
Either way, keep in mind that the code you write must be licensed under the same conditions as the project itself. Additionally, all contributors are considered equal co-authors of the project.

@ -0,0 +1,25 @@
MIT License
Copyright (c) 2018 All Podop contributors at the date
This software consists of voluntary contributions made by multiple individuals.
For exact contribution history, see the revision history available at
https://github.com/Mailu/podop.git
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -0,0 +1,2 @@
include README.md
include LICENSE.md

@ -0,0 +1,112 @@
Podop is a piece of middleware designed to run between Postfix or Dovecot
on one side, any Python implementation of a table lookup protocol on the
other side.
It is thus able to forward Postfix maps and Dovecot dicts to the same
(or multiple) backends in order to write a single, more flexible backend
for a mail distribution.
Examples
========
- Connect Postfix to a DNS lookup so that every domain that has a proper MX
record to your Postfix is actually accepted as a local domain
- Connect both Postfix and Dovecot to an HTTP microservice to run a high
availability microservice-based mail service
- Use a single database server running any Python-compatible API for both
your Postfix and Dovecot servers
Configure Podop tables
======================
Podop tables are configured through CLI arguments when running the server.
You must provide a ``--name`` for the table, a ``--type`` for the table and
a ``--param`` that parametrizes the map.
URL table
---------
The URL table will initiate an HTTP GET request for read access and an HTTP
POST request for write access to a table. The table is parametrized with
a template URL containing ``§`` (or ``{}``) for inserting the table key.
```
--name test --type url --param http://microservice/api/v1/map/tests/§
```
GET requests should return ``200`` and a JSON-encoded object
that will be passed either to Postfix or Dovecot. They should return ``4XX``
for access issues that will result in lookup miss, and ``5XX`` for backend
issues that will result in a temporary failure.
POST requests will contain a JSON-encoded object in the request body, that
will be saved in the table.
Postfix usage
=============
In order to access Podop tables from Postfix, you should setup ``socketmap``
Postfix maps. For instance, in order to access the ``test`` table on a Podop
socket at ``/tmp/podop.socket``, use the following setup:
```
virtual_alias_maps = socketmap:unix:/tmp/podop.socket:test
```
Multiple maps or identical maps can be configured for various usages.
```
virtual_alias_maps = socketmap:unix:/tmp/podop.socket:alias
virtual_mailbox_domains = socketmap:unix:/tmp/podop.socket:domain
virtual_mailbox_maps = socketmap:unix:/tmp/podop.socket:alias
```
In order to simplify the configuration, you can setup a shortcut.
```
podop = socketmap:unix:/tmp/podop.socket
virtual_alias_maps = ${podop}:alias
virtual_mailbox_domains = ${podop}:domain
virtual_mailbox_maps = ${podop}:alias
```
Dovecot usage
=============
In order to access Podop tables from Dovecot, you should setup a ``proxy``
Dovecot dictionary. For instance, in order to access the ``test`` table on
a Podop socket at ``/tmp/podop.socket``, use the following setup:
```
mail_attribute_dict = proxy:/tmp/podop.socket:test
```
Multiple maps or identical maps can be configured for various usages.
```
mail_attribute_dict = proxy:/tmp/podop.socket:meta
passdb {
driver = dict
args = /etc/dovecot/auth.conf
}
userdb {
driver = dict
args = /etc/dovecot/auth.conf
}
# then in auth.conf
uri = proxy:/tmp/podop.socket:auth
iterate_disable = yes
default_pass_scheme = plain
password_key = passdb/%u
user_key = userdb/%u
```
Contributing
============
Podop is free software, open to suggestions and contributions. All
components are free software and compatible with the MIT license. All
the code is placed under the MIT license.

@ -0,0 +1,46 @@
""" Podop is a *Po*stfix and *Do*vecot proxy
It is able to proxify postfix maps and dovecot dicts to any table
"""
import asyncio
import logging
import sys
from podop import postfix, dovecot, table
SERVER_TYPES = dict(
postfix=postfix.SocketmapProtocol,
dovecot=dovecot.DictProtocol
)
TABLE_TYPES = dict(
url=table.UrlTable
)
def run_server(verbosity, server_type, socket, tables):
""" Run the server, given its type, socket path and table list
The table list must be a list of tuples (name, type, param)
"""
# Prepare the maps
table_map = {
name: TABLE_TYPES[table_type](param)
for name, table_type, param in tables
}
# Run the main loop
logging.basicConfig(stream=sys.stderr, level=max(3 - verbosity, 0) * 10,
format='%(name)s (%(levelname)s): %(message)s')
loop = asyncio.get_event_loop()
server = loop.run_until_complete(loop.create_unix_server(
SERVER_TYPES[server_type].factory(table_map), socket
))
try:
loop.run_forever()
except KeyboardInterrupt:
pass
server.close()
loop.run_until_complete(server.wait_closed())
loop.close()

@ -0,0 +1,165 @@
""" Dovecot dict proxy implementation
"""
import asyncio
import logging
import json
class DictProtocol(asyncio.Protocol):
""" Protocol to answer Dovecot dict requests, as implemented in Dict proxy.
Only a subset of operations is handled properly by this proxy: hello,
lookup and transaction-based set.
There is very little documentation about the protocol, most of it was
reverse-engineered from :
https://github.com/dovecot/core/blob/master/src/dict/dict-connection.c
https://github.com/dovecot/core/blob/master/src/dict/dict-commands.c
https://github.com/dovecot/core/blob/master/src/lib-dict/dict-client.h
"""
DATA_TYPES = {0: str, 1: int}
def __init__(self, table_map):
self.table_map = table_map
# Minor and major versions are not properly checked yet, but stored
# anyway
self.major_version = None
self.minor_version = None
# Every connection starts with specifying which table is used, dovecot
# tables are called dicts
self.dict = None
# Dictionary of active transaction lists per transaction id
self.transactions = {}
super(DictProtocol, self).__init__()
def connection_made(self, transport):
logging.info('Connect {}'.format(transport.get_extra_info('peername')))
self.transport = transport
def data_received(self, data):
logging.debug("Received {}".format(data))
results = []
# Every command is separated by "\n"
for line in data.split(b"\n"):
# A command must at list have a type and one argument
if len(line) < 2:
continue
# The command function will handle the command itself
command = DictProtocol.COMMANDS.get(line[0])
if command is None:
logging.warning('Unknown command {}'.format(line[0]))
return self.transport.abort()
# Args are separated by "\t"
args = line[1:].strip().split(b"\t")
try:
future = command(self, *args)
if future:
results.append(future)
except Exception:
logging.exception("Error when processing request")
return self.transport.abort()
# For asyncio consistency, wait for all results to fire before
# actually returning control
return asyncio.gather(*results)
def process_hello(self, major, minor, value_type, user, dict_name):
""" Process a dict protocol hello message
"""
self.major, self.minor = int(major), int(minor)
self.value_type = DictProtocol.DATA_TYPES[int(value_type)]
self.user = user.decode("utf8")
self.dict = self.table_map[dict_name.decode("ascii")]
logging.debug("Client {}.{} type {}, user {}, dict {}".format(
self.major, self.minor, self.value_type, self.user, dict_name))
async def process_lookup(self, key):
""" Process a dict lookup message
"""
logging.debug("Looking up {}".format(key))
# Priv and shared keys are handled slighlty differently
key_type, key = key.decode("utf8").split("/", 1)
try:
result = await self.dict.get(
key, ns=(self.user if key_type == "priv" else None)
)
if type(result) is str:
response = result.encode("utf8")
elif type(result) is bytes:
response = result
else:
response = json.dumps(result).encode("ascii")
return self.reply(b"O", response)
except KeyError:
return self.reply(b"N")
def process_begin(self, transaction_id):
""" Process a dict begin message
"""
self.transactions[transaction_id] = {}
def process_set(self, transaction_id, key, value):
""" Process a dict set message
"""
# Nothing is actually set until everything is commited
self.transactions[transaction_id][key] = value
async def process_commit(self, transaction_id):
""" Process a dict commit message
"""
# Actually handle all set operations from the transaction store
results = []
for key, value in self.transactions[transaction_id].items():
logging.debug("Storing {}={}".format(key, value))
key_type, key = key.decode("utf8").split("/", 1)
result = await self.dict.set(
key, json.loads(value),
ns=(self.user if key_type == "priv" else None)
)
# Remove stored transaction
del self.transactions[transaction_id]
return self.reply(b"O", transaction_id)
def reply(self, command, *args):
logging.debug("Replying {} with {}".format(command, args))
self.transport.write(command)
self.transport.write(b"\t".join(map(tabescape, args)))
self.transport.write(b"\n")
@classmethod
def factory(cls, table_map):
""" Provide a protocol factory for a given map instance.
"""
return lambda: cls(table_map)
COMMANDS = {
ord("H"): process_hello,
ord("L"): process_lookup,
ord("B"): process_begin,
ord("C"): process_commit,
ord("S"): process_set
}
def tabescape(unescaped):
""" Escape a string using the specific Dovecot tabescape
See: https://github.com/dovecot/core/blob/master/src/lib/strescape.c
"""
return unescaped.replace(b"\x01", b"\x011")\
.replace(b"\x00", b"\x010")\
.replace(b"\t", b"\x01t")\
.replace(b"\n", b"\x01n")\
.replace(b"\r", b"\x01r")
def tabunescape(escaped):
""" Unescape a string using the specific Dovecot tabescape
See: https://github.com/dovecot/core/blob/master/src/lib/strescape.c
"""
return escaped.replace(b"\x01r", b"\r")\
.replace(b"\x01n", b"\n")\
.replace(b"\x01t", b"\t")\
.replace(b"\x010", b"\x00")\
.replace(b"\x011", b"\x01")

@ -0,0 +1,116 @@
""" Postfix map proxy implementation
"""
import asyncio
import logging
class NetstringProtocol(asyncio.Protocol):
""" Netstring asyncio protocol implementation.
For protocol details, see https://cr.yp.to/proto/netstrings.txt
"""
# Length of the smallest allocated buffer, larger buffers will be
# allocated dynamically
BASE_BUFFER = 1024
# Maximum length of a buffer, will crash when exceeded
MAX_BUFFER = 65535
def __init__(self):
super(NetstringProtocol, self).__init__()
self.init_buffer()
def init_buffer(self):
self.len = None # None when waiting for a length to be sent)
self.separator = -1 # -1 when not yet detected (str.find)
self.index = 0 # relative to the buffer
self.buffer = bytearray(NetstringProtocol.BASE_BUFFER)
def data_received(self, data):
# Manage the buffer
missing = len(data) - len(self.buffer) + self.index
if missing > 0:
if len(self.buffer) + missing > NetstringProtocol.MAX_BUFFER:
raise IOError("Not enough space when decoding netstring")
self.buffer.append(bytearray(missing + 1))
new_index = self.index + len(data)
self.buffer[self.index:new_index] = data
self.index = new_index
# Try to detect a length at the beginning of the string
if self.len is None:
self.separator = self.buffer.find(0x3a)
if self.separator != -1 and self.buffer[:self.separator].isdigit():
self.len = int(self.buffer[:self.separator], 10)
# Then get the complete string
if self.len is not None:
if self.index - self.separator == self.len + 2:
string = self.buffer[self.separator + 1:self.index - 1]
self.init_buffer()
self.string_received(string)
def string_received(self, string):
""" A new netstring was received
"""
pass
def send_string(self, string):
""" Send a netstring
"""
logging.debug("Replying {}".format(string))
self.transport.write(str(len(string)).encode('ascii'))
self.transport.write(b':')
self.transport.write(string)
self.transport.write(b',')
class SocketmapProtocol(NetstringProtocol):
""" Protocol to answer Postfix socketmap and proxify lookups to
an outside object.
See http://www.postfix.org/socketmap_table.5.html for details on the
protocol.
A table map must be provided as a dictionary to lookup tables.
"""
def __init__(self, table_map):
self.table_map = table_map
super(SocketmapProtocol, self).__init__()
def connection_made(self, transport):
logging.info('Connect {}'.format(transport.get_extra_info('peername')))
self.transport = transport
def string_received(self, string):
# The postfix format contains a space for separating the map name and
# the key
logging.debug("Received {}".format(string))
space = string.find(0x20)
if space != -1:
name = string[:space].decode('ascii')
key = string[space+1:].decode('utf8')
return asyncio.ensure_future(self.process_request(name, key))
async def process_request(self, name, key):
""" Process a request by querying the provided map.
"""
logging.debug("Request {}/{}".format(name, key))
try:
table = self.table_map.get(name)
except KeyError:
return self.send_string(b'TEMP no such map')
try:
result = await table.get(key)
return self.send_string(b'OK ' + str(result).encode('utf8'))
except KeyError:
return self.send_string(b'NOTFOUND ')
except Exception:
logging.exception("Error when processing request")
return self.send_string(b'TEMP unknown error')
@classmethod
def factory(cls, table_map):
""" Provide a protocol factory for a given map instance.
"""
return lambda: cls(table_map)

@ -0,0 +1,55 @@
""" Table lookup backends for podop
"""
import aiohttp
import logging
from urllib.parse import quote
class UrlTable(object):
""" Resolve an entry by querying a parametrized GET URL.
"""
def __init__(self, url_pattern):
""" url_pattern must contain a format ``{}`` so the key is injected in
the url before the query, the ``§`` character will be replaced with
``{}`` for easier setup.
"""
self.url_pattern = url_pattern.replace('§', '{}')
async def get(self, key, ns=None):
""" Get the given key in the provided namespace
"""
logging.debug("Table get {}".format(key))
if ns is not None:
key += "/" + ns
async with aiohttp.ClientSession() as session:
quoted_key = quote(key)
async with session.get(self.url_pattern.format(quoted_key)) as request:
if request.status == 200:
result = await request.json()
logging.debug("Table get {} is {}".format(key, result))
return result
elif request.status == 404:
raise KeyError()
else:
raise Exception(request.status)
async def set(self, key, value, ns=None):
""" Set a value for the given key in the provided namespace
"""
logging.debug("Table set {} to {}".format(key, value))
if ns is not None:
key += "/" + ns
async with aiohttp.ClientSession() as session:
quoted_key = quote(key)
await session.post(self.url_pattern.format(quoted_key), json=value)
async def iter(self, cat):
""" Iterate the given key (experimental)
"""
logging.debug("Table iter {}".format(cat))
async with aiohttp.ClientSession() as session:
async with session.get(self.url_pattern.format(cat)) as request:
if request.status == 200:
result = await request.json()
return result

@ -0,0 +1,33 @@
#!/usr/bin/env python
import argparse
from podop import run_server, SERVER_TYPES, TABLE_TYPES
def main():
""" Run a podop server based on CLI arguments
"""
parser = argparse.ArgumentParser("Postfix and Dovecot proxy")
parser.add_argument("--socket", required=True,
help="path to the listening unix socket")
parser.add_argument("--mode", choices=SERVER_TYPES.keys(), required=True,
help="select which server will connect to Podop")
parser.add_argument("--name", action="append",
help="name of each configured table")
parser.add_argument("--type", choices=TABLE_TYPES.keys(), action="append",
help="type of each configured table")
parser.add_argument("--param", action="append",
help="mandatory param for each table configured")
parser.add_argument("-v", "--verbose", dest="verbosity",
action="count", default=0,
help="increases log verbosity for each occurence.")
args = parser.parse_args()
run_server(
args.verbosity, args.mode, args.socket,
zip(args.name, args.type, args.param) if args.name else []
)
if __name__ == "__main__":
main()

@ -0,0 +1,23 @@
#!/usr/bin/env python
from setuptools import setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="podop",
version="0.2.5",
description="Postfix and Dovecot proxy",
long_description=long_description,
long_description_content_type="text/markdown",
author="Pierre Jaury",
author_email="pierre@jaury.eu",
url="https://github.com/mailu/podop.git",
packages=["podop"],
include_package_data=True,
scripts=["scripts/podop"],
install_requires=[
"aiohttp"
]
)

@ -0,0 +1,22 @@
.DS_Store
.idea
tmp
*.bak
*~
.*.swp
__pycache__/
*.pyc
*.pyo
*.egg-info/
.build
.env*
.venv
*.code-workspace
venv/
build/
dist/

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2019 Mailu
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -0,0 +1,2 @@
include README.md
include LICENSE.md

@ -0,0 +1,24 @@
Socrate is a simple Python module providing a set of utility functions for
Python daemon applications.
The scope includes:
- configuration utilities (configuration parsing, etc.)
- system utilities (access to DNS, stats, etc.)
Setup
======
Socrate is available on Pypi, simpy run:
```
pip install socrate
```
Contributing
============
Podop is free software, open to suggestions and contributions. All
components are free software and compatible with the MIT license. All
the code is placed under the MIT license.

@ -0,0 +1,24 @@
#!/usr/bin/env python
import setuptools
from distutils.core import setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="socrate",
version="0.2.0",
description="Socrate daemon utilities",
long_description=long_description,
long_description_content_type="text/markdown",
author="Pierre Jaury",
author_email="pierre@jaury.eu",
url="https://github.com/mailu/socrate.git",
packages=["socrate"],
include_package_data=True,
install_requires=[
"jinja2",
"tenacity"
]
)

@ -0,0 +1,55 @@
import jinja2
import importlib
def jinja(source, environ, destination=None):
""" Render a Jinja configuration file, supports file handle or path
"""
close_source = close_destination = False
if type(source) is str:
source = open(source, "r")
close_source = True
if type(destination) is str:
destination = open(destination, "w")
close_destination = True
result = jinja2.Template(source.read()).render(environ)
if close_source:
source.close()
if destination is not None:
destination.write(result)
if close_destination:
destination.close()
return result
def merge(*objects):
""" Merge simple python objects, which only consist of
strings, integers, bools, lists and dicts
"""
mode = type(objects[0])
if not all(type(obj) is mode for obj in objects):
raise ValueError("Cannot merge mixed typed objects")
if len(objects) == 1:
return objects[0]
elif mode is dict:
return {
key: merge(*[obj[key] for obj in objects if key in obj])
for obj in objects for key in obj.keys()
}
elif mode is list:
return sum(objects, [])
else:
raise ValueError("Cannot merge objects of type {}: {}".format(
mode, objects))
def resolve_function(function, cache={}):
""" Resolve a fully qualified function name in Python, and caches
the result
"""
if function not in cache:
module, name = function.rsplit(".", 1)
cache[function] = getattr(importlib.import_module(module), name)
return cache[function]

@ -0,0 +1,38 @@
import socket
import tenacity
from os import environ
import logging as log
@tenacity.retry(stop=tenacity.stop_after_attempt(100),
wait=tenacity.wait_random(min=2, max=5))
def resolve_hostname(hostname):
""" This function uses system DNS to resolve a hostname.
It is capable of retrying in case the host is not immediately available
"""
try:
return sorted(socket.getaddrinfo(hostname, None, socket.AF_UNSPEC, socket.SOCK_STREAM, 0, socket.AI_PASSIVE), key=lambda s:s[0])[0][4][0]
except Exception as e:
log.warn("Unable to lookup '%s': %s",hostname,e)
raise e
def resolve_address(address):
""" This function is identical to ``resolve_hostname`` but also supports
resolving an address, i.e. including a port.
"""
hostname, *rest = address.rsplit(":", 1)
ip_address = resolve_hostname(hostname)
if ":" in ip_address:
ip_address = "[{}]".format(ip_address)
return ip_address + "".join(":" + port for port in rest)
def get_host_address_from_environment(name, default):
""" This function looks up an envionment variable ``{{ name }}_ADDRESS``.
If it's defined, it is returned unmodified. If it's undefined, an environment
variable ``HOST_{{ name }}`` is looked up and resolved to an ip address.
If this is also not defined, the default is resolved to an ip address.
"""
if "{}_ADDRESS".format(name) in environ:
return environ.get("{}_ADDRESS".format(name))
return resolve_address(environ.get("HOST_{}".format(name), default))

@ -0,0 +1,117 @@
import unittest
import io
import os
from socrate import conf, system
class TestConf(unittest.TestCase):
""" Test configuration functions
"""
MERGE_EXPECTATIONS = [
({"a": "1", "b": "2", "c": "3", "d": "4"},
{"a": "1", "b": "2"},
{"c": "3", "d": "4"}),
({"a": [1, 2, 3, 4, 5], "b": "4"},
{"a": [1, 2, 3], "b": "4"},
{"a": [4, 5]}),
({"a": {"x": "1", "y": "2", "z": 3}, "b": 4, "c": "5"},
{"a": {"x": "1", "y": "2"}, "b": 4},
{"a": {"z": 3}, "c": "5"})
]
def test_jinja(self):
template = "Test {{ variable }}"
environ = {"variable": "ok"}
self.assertEqual(
conf.jinja(io.StringIO(template), environ),
"Test ok"
)
result = io.StringIO()
conf.jinja(io.StringIO(template), environ, result)
self.assertEqual(
result.getvalue(),
"Test ok"
)
def test_merge(self):
for result, *parts in TestConf.MERGE_EXPECTATIONS:
self.assertEqual(result, conf.merge(*parts))
def test_merge_failure(self):
with self.assertRaises(ValueError):
conf.merge({"a": 1}, {"a": 2})
with self.assertRaises(ValueError):
conf.merge(1, "a")
def test_resolve(self):
self.assertEqual(
conf.resolve_function("unittest.TestCase"),
unittest.TestCase
)
self.assertEqual(
conf.resolve_function("unittest.util.strclass"),
unittest.util.strclass
)
def test_resolve_failure(self):
with self.assertRaises(AttributeError):
conf.resolve_function("unittest.inexistant")
with self.assertRaises(ModuleNotFoundError):
conf.resolve_function("inexistant.function")
class TestSystem(unittest.TestCase):
""" Test the system functions
"""
def test_resolve_hostname(self):
self.assertEqual(
system.resolve_hostname("1.2.3.4.sslip.io"),
"1.2.3.4"
)
self.assertEqual(
system.resolve_hostname("2001-db8--f00.sslip.io"),
"2001:db8::f00"
)
def test_resolve_address(self):
self.assertEqual(
system.resolve_address("1.2.3.4.sslip.io:80"),
"1.2.3.4:80"
)
self.assertEqual(
system.resolve_address("2001-db8--f00.sslip.io:80"),
"[2001:db8::f00]:80"
)
def test_get_host_address_from_environment(self):
if "TEST_ADDRESS" in os.environ:
del os.environ["TEST_ADDRESS"]
if "HOST_TEST" in os.environ:
del os.environ["HOST_TEST"]
# if nothing is set, the default must be resolved
self.assertEqual(
system.get_host_address_from_environment("TEST", "1.2.3.4.sslip.io:80"),
"1.2.3.4:80"
)
# if HOST is set, the HOST must be resolved
os.environ['HOST_TEST']="1.2.3.5.sslip.io:80"
self.assertEqual(
system.get_host_address_from_environment("TEST", "1.2.3.4.sslip.io:80"),
"1.2.3.5:80"
)
# if ADDRESS is set, the ADDRESS must be returned unresolved
os.environ['TEST_ADDRESS']="1.2.3.6.sslip.io:80"
self.assertEqual(
system.get_host_address_from_environment("TEST", "1.2.3.4.sslip.io:80"),
"1.2.3.6.sslip.io:80"
)
if __name__ == "__main__":
unittest.main()

@ -0,0 +1,3 @@
pip==22.3
setuptools==65.5.0
wheel==0.37.1

@ -0,0 +1,53 @@
# core/base
libs/podop
libs/socrate
# core/admin
alembic
Babel
click
dnspython
Flask
Flask-Babel
Flask-Bootstrap
Flask-DebugToolbar
Flask-Login
flask-marshmallow
Flask-Migrate
Flask-SQLAlchemy
Flask-WTF
gunicorn
idna
itsdangerous
limits
marshmallow
marshmallow-sqlalchemy
mysql-connector-python
passlib
psycopg2-binary
Pygments
pyOpenSSL
PyYAML
redis
SQLAlchemy
srslib
tabulate
tenacity
validators
Werkzeug
WTForms
WTForms-Components
xmltodict
# core/nginx
watchdog
# core/postfix
postfix-mta-sts-resolver
# optional/fetchmail
requests
# optional/radicale
radicale

@ -1,19 +1,24 @@
aiodns==3.0.0
aiohttp==3.8.3
aiosignal==1.2.0
alembic==1.7.4
appdirs==1.4.4
async-timeout==4.0.2
attrs==22.1.0
Babel==2.9.1
bcrypt==3.2.0
blinker==1.4
CacheControl==0.12.9
certifi==2021.10.8
# cffi==1.15.0
cffi==1.15.1
chardet==4.0.0
charset-normalizer==2.0.12
click==8.0.3
colorama==0.4.4
contextlib2==21.6.0
cryptography==35.0.0
decorator==5.1.0
# distlib==0.3.1
# distro==1.5.0
defusedxml==0.7.1
dnspython==2.1.0
dominate==2.6.0
email-validator==1.1.3
@ -28,6 +33,7 @@ Flask-Migrate==3.1.0
Flask-Script==2.0.6
Flask-SQLAlchemy==2.5.1
Flask-WTF==0.15.1
frozenlist==1.3.1
greenlet==1.1.2
gunicorn==20.1.0
html5lib==1.1
@ -38,31 +44,34 @@ itsdangerous==2.0.1
Jinja2==3.0.2
limits==1.5.1
lockfile==0.12.2
Mako==1.1.5
Mako==1.2.3
MarkupSafe==2.0.1
marshmallow==3.14.0
marshmallow-sqlalchemy==0.26.1
msgpack==1.0.2
# mysqlclient==2.0.3
multidict==6.0.2
mysql-connector-python==8.0.25
ordered-set==4.0.2
# packaging==20.9
passlib==1.7.4
# pep517==0.10.0
podop @ file:///app/libs/podop
postfix-mta-sts-resolver==1.0.1
progress==1.6
#psycopg2==2.9.1
protobuf==4.21.7
psycopg2-binary==2.9.3
pycares==4.2.2
pycparser==2.20
Pygments==2.10.0
pyOpenSSL==21.0.0
pyparsing==3.0.4
python-dateutil==2.8.2
pytz==2021.3
PyYAML==6.0
Radicale==3.1.8
redis==3.5.3
requests==2.26.0
retrying==1.3.3
# six==1.15.0
socrate==0.2.0
six==1.16.0
socrate @ file:///app/libs/socrate
SQLAlchemy==1.4.26
srslib==0.1.4
tabulate==0.8.9
@ -71,8 +80,11 @@ toml==0.10.2
urllib3==1.26.7
validators==0.18.2
visitor==0.1.3
vobject==0.9.6.1
watchdog==2.1.9
webencodings==0.5.1
Werkzeug==2.0.2
WTForms==2.3.3
WTForms-Components==0.10.5
xmltodict==0.12.0
yarl==1.8.1

@ -1,36 +1,23 @@
ARG DISTRO=alpine:3.14.5
# syntax=docker/dockerfile-upstream:1.4.3
# dovecot image
FROM base
FROM $DISTRO
ARG VERSION
ENV TZ Etc/UTC
LABEL version=$VERSION
# python3 shared with most images
RUN apk add --no-cache \
python3 py3-pip git bash py3-multidict py3-yarl tzdata \
&& pip3 install --upgrade pip
RUN set -euxo pipefail \
; apk add --no-cache dovecot dovecot-fts-xapian dovecot-lmtpd dovecot-pigeonhole-plugin dovecot-pop3d dovecot-submissiond rspamd-client xapian-core \
; mkdir /var/lib/dovecot
# Shared layer between nginx, dovecot, postfix, postgresql, rspamd, unbound, snappymail, roundcube
RUN pip3 install socrate==0.2.0
COPY conf/ /conf/
COPY start.py /
# Shared layer between dovecot and postfix
RUN apk add --no-cache --virtual .build-deps gcc musl-dev python3-dev \
&& pip3 install "podop>0.2.5" \
&& apk del .build-deps
# Image specific layers under this line
RUN apk add --no-cache \
dovecot dovecot-lmtpd dovecot-pop3d dovecot-submissiond dovecot-pigeonhole-plugin rspamd-client xapian-core dovecot-fts-xapian \
&& mkdir /var/lib/dovecot
COPY conf /conf
COPY start.py /start.py
RUN echo $VERSION >/version
EXPOSE 110/tcp 143/tcp 993/tcp 4190/tcp 2525/tcp
HEALTHCHECK --start-period=350s CMD echo QUIT|nc localhost 110|grep "Dovecot ready."
VOLUME ["/mail"]
CMD /start.py
HEALTHCHECK --start-period=350s CMD echo QUIT|nc localhost 110|grep "Dovecot ready."
RUN echo $VERSION >> /version

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/env python3
import os
import glob

@ -1,34 +1,33 @@
ARG DISTRO=alpine:3.14.5
FROM $DISTRO
# syntax=docker/dockerfile-upstream:1.4.3
# build static assets (intermediate)
FROM base as static
COPY static/ /static/
RUN set -euxo pipefail \
; gzip -k9 /static/*.ico /static/*.txt \
; chmod a+rX-w -R /static
# nginx image
FROM base
ARG VERSION
ENV TZ Etc/UTC
LABEL version=$VERSION
# python3 shared with most images
RUN apk add --no-cache \
python3 py3-pip git bash py3-multidict \
&& pip3 install --upgrade pip
RUN set -euxo pipefail \
; apk add --no-cache certbot nginx nginx-mod-mail openssl
# Shared layer between nginx, dovecot, postfix, postgresql, rspamd, unbound, snappymail, roundcube
RUN pip3 install socrate==0.2.0
# Image specific layers under this line
RUN apk add --no-cache certbot nginx nginx-mod-mail openssl curl tzdata \
&& pip3 install watchdog
COPY conf /conf
COPY static /static
COPY conf/ /conf/
COPY --from=static /static/ /static/
COPY *.py /
RUN gzip -k9 /static/*.ico /static/*.txt; chmod a+rX -R /static
RUN echo $VERSION >/version
EXPOSE 80/tcp 443/tcp 110/tcp 143/tcp 465/tcp 587/tcp 993/tcp 995/tcp 25/tcp 10025/tcp 10143/tcp
VOLUME ["/certs"]
VOLUME ["/overrides"]
HEALTHCHECK --start-period=60s CMD curl -skfLo /dev/null http://localhost/health
VOLUME ["/certs", "/overrides"]
CMD /start.py
HEALTHCHECK CMD curl -k -f -L http://localhost/health || exit 1
RUN echo $VERSION >> /version

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/env python3
"""
Certificate watcher which reloads nginx or reconfigures it, depending on what
happens to externally supplied certificates. Only executed by start.py in case

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/env python3
import os
import logging as log

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/env python3
import os
import time

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/env python3
import os
import subprocess

@ -1,6 +1,14 @@
# This is an idle image to dynamically replace any component if disabled.
# syntax=docker/dockerfile-upstream:1.4.3
ARG DISTRO=alpine:3.14.5
FROM $DISTRO
# idle image (to dynamically replace any disabled component)
FROM base
CMD sleep 1000000d
ARG VERSION=local
LABEL version=$VERSION
RUN echo $VERSION >/version
HEALTHCHECK CMD true
USER app
CMD ["/bin/bash", "-c", "sleep infinity"]

@ -1,40 +1,22 @@
ARG DISTRO=alpine:3.14.5
# syntax=docker/dockerfile-upstream:1.4.3
FROM $DISTRO
ARG VERSION
ENV TZ Etc/UTC
# postfix image
FROM base
ARG VERSION=local
LABEL version=$VERSION
# python3 shared with most images
RUN apk add --no-cache \
python3 py3-pip git bash py3-multidict py3-yarl tzdata \
&& pip3 install --upgrade pip
RUN set -euxo pipefail \
; apk add --no-cache cyrus-sasl-login logrotate postfix postfix-pcre rsyslog
# Shared layer between nginx, dovecot, postfix, postgresql, rspamd, unbound, snappymail, roundcube
RUN pip3 install socrate==0.2.0
COPY conf/ /conf/
COPY start.py /
# Shared layer between dovecot and postfix
RUN apk add --no-cache --virtual .build-deps gcc musl-dev python3-dev \
&& pip3 install "podop>0.2.5" \
&& apk del .build-deps
# Image specific layers under this line
# Building pycares from source requires py3-wheel and libffi-dev packages
RUN pip install --no-cache-dir --only-binary=:all: postfix-mta-sts-resolver==1.0.1 || (apk add --no-cache --virtual .build-deps gcc musl-dev python3-dev py3-wheel libffi-dev \
&& pip3 install postfix-mta-sts-resolver==1.0.1 \
&& apk del .build-deps )
RUN apk add --no-cache postfix postfix-pcre cyrus-sasl-login rsyslog logrotate
COPY conf /conf
COPY start.py /start.py
RUN echo $VERSION >/version
EXPOSE 25/tcp 10025/tcp
HEALTHCHECK --start-period=350s CMD echo QUIT|nc localhost 25|grep "220 .* ESMTP Postfix"
VOLUME ["/queue"]
CMD /start.py
HEALTHCHECK --start-period=350s CMD echo QUIT|nc localhost 25|grep "220 .* ESMTP Postfix"
RUN echo $VERSION >> /version

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/env python3
import os
import glob

@ -1,31 +1,23 @@
ARG DISTRO=alpine:3.15
FROM $DISTRO
ARG VERSION
ENV TZ Etc/UTC
# syntax=docker/dockerfile-upstream:1.4.3
# rspamd image
FROM base
ARG VERSION=local
LABEL version=$VERSION
# python3 shared with most images
RUN apk add --no-cache \
python3 py3-pip git bash py3-multidict tzdata \
&& pip3 install --upgrade pip
RUN set -euxo pipefail \
; apk add --no-cache rspamd rspamd-controller rspamd-fuzzy rspamd-proxy \
; mkdir /run/rspamd
# Shared layer between nginx, dovecot, postfix, postgresql, rspamd, unbound, snappymail, roundcube
RUN pip3 install socrate==0.2.0
COPY conf/ /conf/
COPY start.py /
# Image specific layers under this line
RUN apk add --no-cache rspamd rspamd-controller rspamd-proxy rspamd-fuzzy ca-certificates curl
RUN mkdir /run/rspamd
COPY conf/ /conf
COPY start.py /start.py
RUN echo $VERSION >/version
EXPOSE 11332/tcp 11334/tcp 11335/tcp
HEALTHCHECK --start-period=350s CMD curl -skfLo /dev/null http://localhost:11334/
VOLUME ["/var/lib/rspamd"]
CMD /start.py
HEALTHCHECK --start-period=350s CMD curl -f -L http://localhost:11334/ || exit 1
RUN echo $VERSION >> /version

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/env python3
import os
import glob

@ -312,35 +312,6 @@ The following must be done on every PR or after every new commit to an existing
If git opens a editor for a commit message just save and exit as-is. If you have a merge conflict,
see above and do the complete procedure from ``git fetch`` onward again.
Web administration
------------------
The administration Web interface requires a proper dev environment that can easily be setup using
``virtualenv`` (make sure you are using Python 3) :
.. code-block:: bash
cd core/admin
virtualenv .
source bin/activate
pip install -r requirements.txt
You can then export the path to the development database (use four slashes for absolute path):
.. code-block:: bash
export SQLALCHEMY_DATABASE_URI=sqlite:///path/to/dev.db
And finally run the server with debug enabled:
.. code-block:: bash
python run.py
Any change to the files will automatically restart the Web server and reload the files.
When using the development environment, a debugging toolbar is displayed on the right side
of the screen, that you can open to access query details, internal variables, etc.
Documentation
-------------

@ -1,26 +1,22 @@
ARG DISTRO=alpine:3.14.5
FROM $DISTRO
ARG VERSION
# syntax=docker/dockerfile-upstream:1.4.3
ENV TZ Etc/UTC
# clamav image
FROM base
ARG VERSION=local
LABEL version=$VERSION
# python3 shared with most images
RUN apk add --no-cache \
python3 py3-pip bash tzdata \
&& pip3 install --upgrade pip
# Image specific layers under this line
RUN apk add --no-cache clamav rsyslog wget clamav-libunrar
RUN set -euxo pipefail \
; apk add --no-cache clamav clamav-libunrar rsyslog wget
COPY conf /etc/clamav
COPY start.py /start.py
COPY health.sh /health.sh
COPY conf/ /etc/clamav/
COPY start.py /
RUN echo $VERSION >/version
EXPOSE 3310/tcp
HEALTHCHECK --start-period=350s CMD echo PING|nc localhost 3310|grep "PONG"
VOLUME ["/data"]
CMD /start.py
HEALTHCHECK --start-period=350s CMD /health.sh
RUN echo $VERSION >> /version

@ -1,8 +0,0 @@
#!/bin/sh
if [ "$(echo PING | nc localhost 3310)" = "PONG" ]; then
echo "ping successful"
else
echo "ping failed"
exit 1
fi

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/env python3
import os
import logging as log

@ -1,23 +1,21 @@
ARG DISTRO=alpine:3.14.5
FROM $DISTRO
ARG VERSION
# syntax=docker/dockerfile-upstream:1.4.3
ENV TZ Etc/UTC
# fetchmail image
FROM base
ARG VERSION=local
LABEL version=$VERSION
# python3 shared with most images
RUN apk add --no-cache \
python3 py3-pip bash tzdata \
&& pip3 install --upgrade pip
RUN set -euxo pipefail \
; apk add --no-cache fetchmail openssl \
; mkdir -p /data
# Image specific layers under this line
RUN apk add --no-cache fetchmail ca-certificates openssl \
&& pip3 install requests
COPY fetchmail.py /
RUN mkdir -p /data
RUN echo $VERSION >/version
COPY fetchmail.py /fetchmail.py
HEALTHCHECK --start-period=350s CMD ["/bin/sh", "-c", "ps ax | grep [/]fetchmail.py"]
VOLUME ["/var/lib/rspamd"]
CMD ["/fetchmail.py"]
RUN echo $VERSION >> /version

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/env python3
import time
import os

@ -1,27 +1,18 @@
ARG DISTRO=alpine:3.14.5
FROM $DISTRO
ARG VERSION
# syntax=docker/dockerfile-upstream:1.4.3
ENV TZ Etc/UTC
# webdav image
FROM base
ARG VERSION=local
LABEL version=$VERSION
# python3 shared with most images
RUN apk add --no-cache \
python3 py3-pip bash tzdata \
&& pip3 install --upgrade pip
COPY radicale.conf /
# Image specific layers under this line
RUN apk add --no-cache curl \
&& pip3 install pytz radicale~=3.0
COPY radicale.conf /radicale.conf
RUN echo $VERSION >/version
EXPOSE 5232/tcp
HEALTHCHECK CMD curl -f -L http://localhost:5232/ || exit 1
VOLUME ["/data"]
CMD radicale -S -C /radicale.conf
HEALTHCHECK CMD curl -f -L http://localhost:5232/ || exit 1
RUN echo $VERSION >> /version

@ -1,16 +1,22 @@
# syntax=docker/dockerfile-upstream:1.4.3
# cert dumper image
FROM ldez/traefik-certs-dumper
ARG VERSION
ENV TZ Etc/UTC
ENV LANG C.UTF-8
ARG VERSION
LABEL version=$VERSION
RUN apk --no-cache add inotify-tools util-linux bash tzdata
RUN set -euxo pipefail \
; apk add --no-cache bash inotify-tools tzdata util-linux
COPY run.sh /
RUN echo $VERSION >/version
VOLUME ["/traefik"]
VOLUME ["/output"]
ENTRYPOINT ["/run.sh"]
RUN echo $VERSION >> /version

@ -1,33 +1,24 @@
ARG DISTRO=alpine:3.14.5
FROM $DISTRO
ARG VERSION
# syntax=docker/dockerfile-upstream:1.4.3
ENV TZ Etc/UTC
# resolver image
FROM base
ARG VERSION=local
LABEL version=$VERSION
# python3 shared with most images
RUN apk add --no-cache \
python3 py3-pip git bash py3-multidict tzdata \
&& pip3 install --upgrade pip
RUN set -euxo pipefail \
; apk add --no-cache bind-tools unbound \
; curl -so /etc/unbound/root.hints https://www.internic.net/domain/named.cache \
; chown root:unbound /etc/unbound \
; chmod 775 /etc/unbound \
; /usr/sbin/unbound-anchor -a /etc/unbound/trusted-key.key || true
# Shared layer between nginx, dovecot, postfix, postgresql, rspamd, unbound, snappymail, roundcube
RUN pip3 install socrate==0.2.0
COPY unbound.conf /
COPY start.py /
# Image specific layers under this line
RUN apk add --no-cache unbound curl bind-tools \
&& curl -o /etc/unbound/root.hints https://www.internic.net/domain/named.cache \
&& chown root:unbound /etc/unbound \
&& chmod 775 /etc/unbound \
&& apk del --no-cache curl \
&& /usr/sbin/unbound-anchor -a /etc/unbound/trusted-key.key | true
COPY start.py /start.py
COPY unbound.conf /unbound.conf
RUN echo $VERSION >/version
EXPOSE 53/udp 53/tcp
HEALTHCHECK CMD dig @127.0.0.1 || exit 1
CMD /start.py
HEALTHCHECK CMD dig @127.0.0.1 || exit 1
RUN echo $VERSION >> /version

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/env python3
import os
import logging as log

@ -78,12 +78,25 @@ function "tag" {
# docker buildx bake -f tests\build.hcl docs
#-----------------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------------
# Base images
# -----------------------------------------------------------------------------------------
target "base" {
inherits = ["defaults"]
context = "core/base/"
}
target "assets" {
inherits = ["defaults"]
context = "core/admin/assets/"
}
# -----------------------------------------------------------------------------------------
# Documentation and setup images
# -----------------------------------------------------------------------------------------
target "docs" {
inherits = ["defaults"]
context = "docs"
context = "docs/"
tags = tag("docs")
args = {
version = "${MAILU_VERSION}"
@ -93,7 +106,7 @@ target "docs" {
target "setup" {
inherits = ["defaults"]
context="setup"
context = "setup/"
tags = tag("setup")
}
@ -102,37 +115,56 @@ target "setup" {
# -----------------------------------------------------------------------------------------
target "none" {
inherits = ["defaults"]
context="core/none"
context = "core/none/"
contexts = {
base = "target:base"
}
tags = tag("none")
}
target "admin" {
inherits = ["defaults"]
context="core/admin"
context = "core/admin/"
contexts = {
base = "target:base"
assets = "target:assets"
}
tags = tag("admin")
}
target "antispam" {
inherits = ["defaults"]
context="core/rspamd"
context = "core/rspamd/"
contexts = {
base = "target:base"
}
tags = tag("rspamd")
}
target "front" {
inherits = ["defaults"]
context="core/nginx"
context = "core/nginx/"
contexts = {
base = "target:base"
}
tags = tag("nginx")
}
target "imap" {
inherits = ["defaults"]
context="core/dovecot"
context = "core/dovecot/"
contexts = {
base = "target:base"
}
tags = tag("dovecot")
}
target "smtp" {
inherits = ["defaults"]
context="core/postfix"
context = "core/postfix/"
contexts = {
base = "target:base"
}
tags = tag("postfix")
}
@ -141,13 +173,13 @@ target "smtp" {
# -----------------------------------------------------------------------------------------
target "snappymail" {
inherits = ["defaults"]
context="webmails/snappymail"
context = "webmails/snappymail/"
tags = tag("snappymail")
}
target "roundcube" {
inherits = ["defaults"]
context="webmails/roundcube"
context = "webmails/roundcube/"
tags = tag("roundcube")
}
@ -156,30 +188,42 @@ target "roundcube" {
# -----------------------------------------------------------------------------------------
target "antivirus" {
inherits = ["defaults"]
context="optional/clamav"
context = "optional/clamav/"
contexts = {
base = "target:base"
}
tags = tag("clamav")
}
target "fetchmail" {
inherits = ["defaults"]
context="optional/fetchmail"
context = "optional/fetchmail/"
contexts = {
base = "target:base"
}
tags = tag("fetchmail")
}
target "resolver" {
inherits = ["defaults"]
context="optional/unbound"
context = "optional/unbound/"
contexts = {
base = "target:base"
}
tags = tag("unbound")
}
target "traefik-certdumper" {
inherits = ["defaults"]
context="optional/traefik-certdumper"
context = "optional/traefik-certdumper/"
tags = tag("traefik-certdumper")
}
target "webdav" {
inherits = ["defaults"]
context="optional/radicale"
context = "optional/radicale/"
contexts = {
base = "target:base"
}
tags = tag("radicale")
}
}

Loading…
Cancel
Save